diff --git a/.circleci/add_pr_comment.py b/.circleci/add_pr_comment.py
new file mode 100644
index 000000000000..bb019961f8b6
--- /dev/null
+++ b/.circleci/add_pr_comment.py
@@ -0,0 +1,14 @@
+import os
+from demisto_sdk.commands.test_content.execute_test_content import _add_pr_comment
+from demisto_sdk.commands.test_content.execute_test_content import ParallelLoggingManager
+
+
+JOB_ID = os.environ.get("CIRCLE_WORKFLOW_JOB_ID")
+COVERAGE_LINK = f'https://output.circle-artifacts.com/output/job/{JOB_ID}/artifacts/0/artifacts/coverage_report/html/' \
+ f'index.html'
+COVERAGE_REPORT_COMMENT = f'Link to the unit tests coverage report: \n {COVERAGE_LINK}'
+
+
+if __name__ == "__main__":
+ logging_manager = ParallelLoggingManager('UT_coverage_report.log')
+ _add_pr_comment(COVERAGE_REPORT_COMMENT, logging_manager)
diff --git a/.circleci/analyze_non_packs_files.sh b/.circleci/analyze_non_packs_files.sh
new file mode 100755
index 000000000000..60b3fc088a36
--- /dev/null
+++ b/.circleci/analyze_non_packs_files.sh
@@ -0,0 +1,19 @@
+#!/bin/bash
+
+
+# Run flake8 pylint and mypy on all non-Packs. Packs are handled in pre-commit.
+errors=0
+all_dirs=$(find . -type d -not \( -path "*cache*" -o -path "./.*" -o -path "./Templates*" -o -path "./TestPlaybooks*" -o -path "./node_modules*" -o -path "./venv*" -o -path "./Packs*" -o -path "./artifacts*" -o -path "*infrastructure_tests*" -o -path "*scripts/awsinstancetool*" -o -path "./docs*" \))
+all_1_depth_dirs=$(find . -maxdepth 1 -type d -not \( -path "*cache*" -o -path . -o -path ./Packs -o -path ./venv -o -path ./Templates -o -path ./TestPlaybooks -o -path ./node_modules -o -path "./artifacts*" -o -path "./.*" -o -path ./docs \))
+
+echo -e "Top level folders to scan (used by ruff):\n${all_1_depth_dirs}\n"
+echo -e "Folders to be used for lint scan (used by pylint and mypy):\n${all_dirs}\n"
+
+./.circleci/mypy.sh $all_1_depth_dirs || errors=$?
+python3 -m ruff $all_1_depth_dirs --select=E,F,PLC,PLE --ignore=PLC1901 || errors=$?
+
+
+echo 'analyze non-packs files exit code:' $errors
+if [[ $errors -ne 0 ]]; then
+ exit 1
+fi
diff --git a/.circleci/circleci_spell_checker.py b/.circleci/circleci_spell_checker.py
new file mode 100644
index 000000000000..28d8b3f3d202
--- /dev/null
+++ b/.circleci/circleci_spell_checker.py
@@ -0,0 +1,67 @@
+import re
+import sys
+
+from spell_checker import spell_checker
+from demisto_sdk.commands.common.tools import run_command, find_type
+from demisto_sdk.commands.common.constants import DESCRIPTION_REGEX, FileType
+
+
+IGNORED_FILES = ['.devcontainer/devcontainer.json', '.vscode/extensions.json']
+
+
+def get_modified_files(files_string):
+ """Get lists of the modified files in your branch according to the files string.
+
+ Args:
+ files_string (string): String that was calculated by git using `git diff` command.
+
+ Returns:
+ (yml_files, md_files). Tuple of sets.
+ """
+ all_files = files_string.split('\n')
+ yml_files = set([])
+ md_files = set([])
+ for f in all_files:
+ file_data = f.split()
+ if not file_data:
+ continue
+
+ file_status = file_data[0]
+ file_path = file_data[1]
+ if file_path in IGNORED_FILES:
+ continue
+ if file_path.endswith('.js') or file_path.endswith('.py'):
+ continue
+ if file_status.lower().startswith('r'):
+ file_path = file_data[2]
+
+ if file_status.lower() == 'm' or file_status.lower() == 'a' or file_status.lower().startswith('r'):
+ if find_type(file_path) in [FileType.INTEGRATION, FileType.BETA_INTEGRATION, FileType.SCRIPT,
+ FileType.PLAYBOOK]:
+ yml_files.add(file_path)
+ elif re.match(DESCRIPTION_REGEX, file_path, re.IGNORECASE):
+ md_files.add(file_path)
+
+ return yml_files, md_files
+
+
+def check_changed_files():
+ branch_name = sys.argv[1]
+
+ if branch_name != "master":
+ all_changed_files_string = run_command("git diff --name-status origin/master...{}".format(branch_name))
+ yml_files, md_files = get_modified_files(all_changed_files_string)
+ for yml_file in yml_files:
+ print("Checking the file - {}".format(yml_file))
+ spell_checker(yml_file)
+
+ for md_file in md_files:
+ print("Checking the file - {}".format(md_file))
+ spell_checker(md_file, is_md=True)
+
+ else:
+ print("Not checking for spelling errors in master branch")
+
+
+if __name__ == "__main__":
+ check_changed_files()
diff --git a/.circleci/comment_on_pr.py b/.circleci/comment_on_pr.py
new file mode 100755
index 000000000000..30fdb1c4509c
--- /dev/null
+++ b/.circleci/comment_on_pr.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python3
+import argparse
+import os
+import sys
+
+import requests
+
+
+def main():
+ parser = argparse.ArgumentParser(description='Add a comment to a pull request in the repo.')
+ parser.add_argument('-p', '--pr_number', help='Pull request number')
+ parser.add_argument('-c', '--comment', help='The comment to add')
+ args = parser.parse_args()
+
+ pr_number = args.pr_number
+ comment = args.comment
+ token = os.environ['CONTENT_GITHUB_TOKEN']
+
+ comments_url = get_pr_comments_url(pr_number)
+
+ headers = {'Authorization': 'Bearer ' + token}
+ response = requests.post(comments_url, json={'body': comment}, headers=headers)
+ response.raise_for_status()
+
+ print('Successfully added the comment to the PR.')
+
+
+def get_pr_comments_url(pr_number: str) -> str:
+ """
+ Get the comments URL for a PR. If the PR contains a comment about an instance test (for contrib PRs),
+ it will use that comment.
+ Args:
+ pr_number: The pull request number
+
+ Returns:
+ The comments URL for the PR.
+ """
+ pr_url = f'https://api.github.com/repos/demisto/content/pulls/{pr_number}'
+ response = requests.get(pr_url)
+ response.raise_for_status()
+ pr = response.json()
+ if not pr:
+ print('Could not find the pull request to reply on.')
+ sys.exit(1)
+ page = 1
+ comments_url = pr['comments_url']
+ while True:
+ response = requests.get(comments_url, params={'page': str(page)})
+ response.raise_for_status()
+ comments = response.json()
+ if not comments:
+ break
+
+ link_comments = [comment for comment in comments if 'Instance is ready.' in comment.get('body', '')]
+ if link_comments:
+ comments_url = link_comments[0]['url']
+ break
+ page += 1
+
+ return comments_url
+
+
+if __name__ == '__main__':
+ main()
diff --git a/.circleci/config.yml b/.circleci/config.yml
index ed5d47a62cd9..cfbf911bda3b 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -224,66 +224,26 @@ references:
# poll for neo4j status until available
while ! curl --fail http://127.0.0.1:7474 &> /dev/null; do sleep 1; done
- ./Tests/scripts/linters_runner.sh
- ./Tests/scripts/validate.sh
-
- run_unit_testing_and_lint: &run_unit_testing_and_lint
- run:
- parameters:
- dockerimageflag:
- type: string
- name: Run Unit Testing And Lint - Docker Image:<< parameters.dockerimageflag >>
- when: always
- no_output_timeout: 5h
- command: |
- if [[ "$(echo "$GCS_MARKET_BUCKET" | tr '[:upper:]' '[:lower:]')" != "marketplace-dist" ]]; then
- echo "Skipping validations when uploading to a test bucket."
- exit 0
- fi
-
- echo "demisto-sdk version: $(demisto-sdk --version)"
- echo "mypy version: $(mypy --version)"
- echo "flake8 py3 version: $(python3 -m flake8 --version)"
- echo "bandit py3 version: $(python3 -m bandit --version 2>&1)"
- echo "vulture py3 version: $(python3 -m vulture --version 2>&1)"
- mkdir ./unit-tests
-
- neo4j start
- # poll for neo4j status until available
- while ! curl --fail http://127.0.0.1:7474 &> /dev/null; do sleep 1; done
-
- demisto-sdk lint -p 8 -g --test-xml ./unit-tests --log-path ./artifacts --failure-report ./artifacts --coverage-report $ARTIFACTS_FOLDER/coverage_report --docker-image << parameters.dockerimageflag >> --check-dependent-api-module
-
- generate_coverage_reports: &generate_coverage_reports
- run:
- name: Generate coverage reports
- when: always
- no_output_timeout: 1h
- command: |
- EXIT_CODE=0
- if [[ -f $ARTIFACTS_FOLDER/coverage_report/.coverage ]]; then
- demisto-sdk coverage-analyze -i $ARTIFACTS_FOLDER/coverage_report/.coverage --report-dir $ARTIFACTS_FOLDER/coverage_report --report-type all --previous-coverage-report-url https://storage.googleapis.com/marketplace-dist-dev/code-coverage-reports/coverage-min.json || EXIT_CODE=1
- # Checks if the $XSOAR_BOT_TEST_CONTENT exist. for security reasons only non forked pr's have access to it.
- if [[ -n $XSOAR_BOT_TEST_CONTENT && -e $ARTIFACTS_FOLDER/coverage_report/html/index.html ]]; then
- echo "Adding unit tests coverage comment to the pr"
- python3 ./Tests/scripts/add_pr_comment.py
- fi
- exit $EXIT_CODE
- fi
-
- infrastructure_testing: &infrastructure_testing
- run:
- name: Infrastructure testing
- when: always
- command: |
- python3 -m pytest ./Tests/scripts/infrastructure_tests/ -v
- python3 -m pytest ./Tests/Marketplace/Tests/ -v
- python3 -m pytest ./Tests/tests -v
- python3 -m pytest Utils -v
-
- if [ -n "${DEMISTO_SDK_NIGHTLY}" ] ; then
- ./Tests/scripts/sdk_pylint_check.sh
- fi
+ ./.circleci/analyze_non_packs_files.sh
+ ./.circleci/validate.sh
+
+
+ # generate_coverage_reports: &generate_coverage_reports
+ # run:
+ # name: Generate coverage reports
+ # when: always
+ # no_output_timeout: 1h
+ # command: |
+ # EXIT_CODE=0
+ # if [[ -f $ARTIFACTS_FOLDER/coverage_report/.coverage ]]; then
+ # demisto-sdk coverage-analyze -i $ARTIFACTS_FOLDER/coverage_report/.coverage --report-dir $ARTIFACTS_FOLDER/coverage_report --report-type all --previous-coverage-report-url https://storage.googleapis.com/marketplace-dist-dev/code-coverage-reports/coverage-min.json || EXIT_CODE=1
+ # # Checks if the $XSOAR_BOT_TEST_CONTENT exist. for security reasons only non forked pr's have access to it.
+ # if [[ -n $XSOAR_BOT_TEST_CONTENT && -e $ARTIFACTS_FOLDER/coverage_report/html/index.html ]]; then
+ # echo "Adding unit tests coverage comment to the pr"
+ # python3 ./.circleci/add_pr_comment.py
+ # fi
+ # exit $EXIT_CODE
+ # fi
get_contribution_pack: &get_contribution_pack
when:
@@ -296,7 +256,7 @@ references:
USERNAME=$(echo $CONTRIB_BRANCH | cut -d ":" -f 1)
BRANCH=$(echo $CONTRIB_BRANCH | cut -d ":" -f 2)
$CONTRIB_REPO="content"
- python3 ./Utils/update_contribution_pack_in_base_branch.py -p $PULL_REQUEST_NUMBER -b $BRANCH -u $USERNAME -c $CONTRIB_REPO -gt $GITHUB_TOKEN
+ python3 ./.circleci/update_contribution_pack_in_base_branch.py -p $PULL_REQUEST_NUMBER -b $BRANCH -u $USERNAME -c $CONTRIB_REPO -gt $GITHUB_TOKEN
comment_on_contrib_pr: &comment_on_contrib_pr
when:
@@ -307,19 +267,11 @@ references:
when: always
command: |
SERVER_URL=$(jq -r 'select(.[].Role == "Server Master") | .[].InstanceDNS' $ENV_RESULTS_PATH)
- python3 ./Utils/comment_on_pr.py -p $PULL_REQUEST_NUMBER -c "Instance is ready. Server link: https://$SERVER_URL, Build link: $CIRCLE_BUILD_URL"
+ python3 ./.circleci/comment_on_pr.py -p $PULL_REQUEST_NUMBER -c "Instance is ready. Server link: https://$SERVER_URL, Build link: $CIRCLE_BUILD_URL"
nightly_jobs: &nightly_jobs
- Setup Environment:
context: nightly_env
- - Run Unit Testing And Lint:
- context: nightly_env
- requires:
- - Setup Environment
- matrix:
- parameters:
- dockerimageflag: [ "native:ga", "native:maintenance", "native:dev", "from-yml" ]
- name: Run Unit Testing And Lint - Docker Image:<< matrix.dockerimageflag >>
- Run Validations:
requires:
- Setup Environment
@@ -338,26 +290,6 @@ jobs:
- *get_contribution_pack
- *persist_to_workspace
- Run Unit Testing And Lint:
- <<: *container_config
- resource_class: large
- <<: *environment
- parameters:
- dockerimageflag:
- type: string
- steps:
- - *attach_workspace
- - *remote_docker
- - *install_build_dependencies
- - *install_node_ci
- - *install_neo4j
- - *prepare_environment
- - *infrastructure_testing
- - *run_unit_testing_and_lint
- - *generate_coverage_reports
- - store_test_results:
- path: ./unit-tests
- - *store_artifacts
Run Validations:
<<: *container_config
@@ -374,14 +306,14 @@ jobs:
- run:
name: Spell Checks
command: |
- python3 ./Tests/scripts/circleci_spell_checker.py $CIRCLE_BRANCH
+ python3 ./.circleci/circleci_spell_checker.py $CIRCLE_BRANCH
- run:
name: Verify Base Branch for Contribution
when: always
command: |
if [[ $CIRCLE_BRANCH =~ pull/[0-9]+ ]] ;
then
- python3 ./Tests/scripts/verify_base_branch_for_contribution.py $CIRCLE_BRANCH
+ python3 ./.circleci/verify_base_branch_for_contribution.py $CIRCLE_BRANCH
fi
- run:
name: Validate landingPageSections.json
@@ -395,7 +327,7 @@ jobs:
UNZIP_PATH=$(mktemp -d)
unzip $INDEX_PATH -d $UNZIP_PATH
- python3 Tests/Marketplace/validate_landing_page_sections.py -i $UNZIP_PATH
+ python3 ./.circleci/validate_landing_page_sections.py -i $UNZIP_PATH
- *store_artifacts
- store_artifacts:
path: $ARTIFACTS_FOLDER
@@ -412,13 +344,6 @@ workflows:
value: << pipeline.git.branch >>
jobs:
- Setup Environment
- - Run Unit Testing And Lint:
- requires:
- - Setup Environment
- matrix:
- parameters:
- dockerimageflag: [ "native:ga", "native:maintenance", "native:dev", "native:candidate", "from-yml" ]
- name: Run Unit Testing And Lint - Docker Image:<< matrix.dockerimageflag >>
- Run Validations:
requires:
- Setup Environment
diff --git a/.circleci/git_pull_master_into_fork.sh b/.circleci/git_pull_master_into_fork.sh
new file mode 100755
index 000000000000..8b851775a464
--- /dev/null
+++ b/.circleci/git_pull_master_into_fork.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+# this file has been deprecated and relocated to the contribution/utils directory
+#Be aware, only contributors should run this script.
+
+echo "This file has been deprecated and relocated to the contribution/utils directory"
+
+CONTENT_URL='https://github.com/demisto/content.git'
+
+if [ -z "$1" ]
+then
+ CURRENT=$(git branch --show-current)
+else
+ CURRENT=$1
+fi
+
+(
+ git remote add upstream_content $CONTENT_URL ||
+ git remote set-url upstream_content $CONTENT_URL
+) &&
+git fetch upstream_content &&
+git checkout master &&
+git rebase upstream_content/master &&
+git push -f origin master &&
+git checkout $CURRENT &&
+git pull origin master
+
diff --git a/.circleci/is_file_up_to_date.sh b/.circleci/is_file_up_to_date.sh
index a4b64e28767c..53e3d4547585 100755
--- a/.circleci/is_file_up_to_date.sh
+++ b/.circleci/is_file_up_to_date.sh
@@ -31,7 +31,7 @@ if [[ $(git diff origin/master -G"." -- ${FILE_TO_CHECK}) ]]; then
fi
if [[ $BRANCH =~ pull/[0-9]+ ]]; then
- echo "Run ./Utils/git_pull_master_into_fork.sh or merge manually from upstream demisto content"
+ echo "Run ./contribution/utils/git_pull_master_into_fork.sh or merge manually from upstream demisto content"
fi
exit 1
diff --git a/.circleci/mypy.sh b/.circleci/mypy.sh
new file mode 100755
index 000000000000..04d7a0305e4a
--- /dev/null
+++ b/.circleci/mypy.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+
+# ignored_messages_file contains patterns of mypy messages we want to ignore
+
+errors=0
+
+echo "Starting mypy run"
+
+for dir in $*; do
+ # if dir is PWD or no python files in the directory, skip
+ if [[ $dir == "." || $(find $dir -name "*.py" | wc -l) -eq 0 ]]; then
+ continue
+ fi
+ mypy_out=$(python3 -m mypy $dir 2>&1)
+ if [[ $? -ne 0 && $? -ne 2 ]]; then
+
+ echo "$mypy_out"
+ if [[ $? -eq 0 ]]; then
+ errors=1 # some errors founded by grep
+ fi
+
+ fi
+done
+
+if [[ $errors -ne 0 ]]; then
+ echo "*** Finished mypy run, please fix the above errors ***"
+ exit 1
+fi
+
+echo "Finished mypy run - no errors were found"
+
diff --git a/.circleci/spell_checker.py b/.circleci/spell_checker.py
new file mode 100644
index 000000000000..b2057334e8ff
--- /dev/null
+++ b/.circleci/spell_checker.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python3
+import sys
+import yaml
+import argparse
+
+from spellchecker import SpellChecker
+
+DISPLAYABLE_LINES = [
+ "description",
+ "name",
+ "display",
+ "comment"
+]
+
+SCRIPT_ARGS = 'scriptarguments'
+
+
+def check_yaml(spellchecker, yml_info, unknown_words):
+ for key, value in yml_info.items():
+ if key in DISPLAYABLE_LINES and isinstance(value, str):
+ for word in value.split():
+ if word.isalpha() and spellchecker.unknown([word]):
+ unknown_words.add(word)
+
+ else:
+ if isinstance(value, dict):
+ if key != SCRIPT_ARGS:
+ check_yaml(spellchecker, value, unknown_words)
+ elif isinstance(value, list):
+ for sub_list in value:
+ if isinstance(sub_list, dict):
+ check_yaml(spellchecker, sub_list, unknown_words)
+
+
+def check_md_file(spellchecker, md_data, unknown_words):
+ for line in md_data:
+ for word in line.split():
+ if word.isalpha() and spellchecker.unknown([word]):
+ unknown_words.add(word)
+
+
+def spell_checker(path, is_md=False):
+ unknown_words: set = set([])
+ spellchecker = SpellChecker()
+ spellchecker.word_frequency.load_text_file('Tests/known_words.txt')
+
+ if is_md:
+ with open(path, 'r') as md_file:
+ md_data = md_file.readlines()
+
+ check_md_file(spellchecker, md_data, unknown_words)
+ else:
+ with open(path, 'r') as yaml_file:
+ yml_info = yaml.safe_load(yaml_file)
+
+ check_yaml(spellchecker, yml_info, unknown_words)
+
+ if unknown_words:
+ print(u"Found the problematic words:\n{}".format('\n'.join(unknown_words)))
+ return 1
+
+ print("No problematic words found")
+ return 0
+
+
+if __name__ == "__main__":
+ description = """Run spell check on a given yml/md file. """
+ parser = argparse.ArgumentParser(description=description, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ parser.add_argument("-p", "--path", help="Specify path of yml/md file", required=True)
+ parser.add_argument("-i", "--isMD", help="Whether the path is to a yml file or an md.", action='store_true')
+
+ args = parser.parse_args()
+ sys.exit(spell_checker(args.path, args.isMD))
diff --git a/.circleci/update_contribution_pack_in_base_branch.py b/.circleci/update_contribution_pack_in_base_branch.py
new file mode 100755
index 000000000000..669d8b9a1256
--- /dev/null
+++ b/.circleci/update_contribution_pack_in_base_branch.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python3
+import argparse
+import os
+from collections.abc import Iterable
+from urllib.parse import urljoin
+
+import requests
+
+PER_PAGE = 100 # value of `per_page` request parameter
+
+
+def main():
+ parser = argparse.ArgumentParser(description='Deploy a pack from a contribution PR to a branch')
+ parser.add_argument('-p', '--pr_number', help='Contrib PR number')
+ parser.add_argument('-b', '--branch', help='The contrib branch')
+ parser.add_argument('-c', '--contrib_repo', help='The contrib repo')
+ parser.add_argument('-u', '--username', help='The contrib user name')
+ parser.add_argument("-gt", "--github_token", help="The Github token")
+ args = parser.parse_args()
+
+ pr_number = args.pr_number
+ username = args.username
+ repo = args.contrib_repo
+ branch = args.branch
+ github_token = args.github_token
+
+ print(
+ f"args received in Utils/update_contribution_pack_in_base_branch.py script: {pr_number=}, {username=}, {repo=}, {branch=}"
+ )
+
+ packs_dir_names = get_files_from_github(
+ username, branch, pr_number, repo, github_token
+ )
+ if packs_dir_names:
+ print('Successfully updated the base branch ' # noqa: T201
+ 'with the following contrib packs: Packs/'
+ f'{", Packs/".join(packs_dir_names)}')
+
+
+def get_pr_files(pr_number: str, github_token: str) -> Iterable[str]:
+ """
+ Get changed files names from a contribution pull request.
+ Args:
+ pr_number: The contrib PR
+
+ Returns:
+ A list of changed file names (under the Packs dir), if found.
+ """
+ page = 1
+ while True:
+ response = requests.get(
+ f"https://api.github.com/repos/demisto/content/pulls/{pr_number}/files",
+ params={"page": str(page), "per_page": str(PER_PAGE)},
+ headers={"Authorization": f"Bearer {github_token}"},
+ )
+ response.raise_for_status()
+ files = response.json()
+ if not files:
+ break
+ for pr_file in files:
+ if pr_file['filename'].startswith('Packs/'):
+ yield pr_file['filename']
+ page += 1
+
+
+def get_files_from_github(
+ username: str, branch: str, pr_number: str, repo: str, github_token: str
+) -> list[str]:
+ """
+ Write the changed files content repo
+ Args:
+ username: The username of the contributor (e.g. demisto / xsoar-bot)
+ branch: The contributor branch
+ pr_number: The contrib PR
+ repo: The contrib repository
+ Returns:
+ A list of packs names, if found.
+ """
+ print("Getting files from Github")
+ content_path = os.getcwd()
+ print(f"content_path: {content_path}")
+ files_list = set()
+ chunk_size = 1024 * 500 # 500 Kb
+ base_url = f'https://raw.githubusercontent.com/{username}/{repo}/{branch}/'
+ print(f"base url: {base_url}")
+ for file_path in get_pr_files(pr_number, github_token):
+ print(f"file_path: {file_path}")
+ abs_file_path = os.path.join(content_path, file_path)
+ print(f"abs_file_path: {abs_file_path}")
+ abs_dir = os.path.dirname(abs_file_path)
+ print(f"abs_dir: {abs_dir}")
+ if not os.path.isdir(abs_dir):
+ os.makedirs(abs_dir)
+ with open(abs_file_path, "wb") as changed_file, requests.get(
+ urljoin(base_url, file_path),
+ stream=True,
+ headers={"Authorization": f"Bearer {github_token}"},
+ ) as file_content:
+ # mypy didn't like the request being used as context manager
+ file_content.raise_for_status() # type:ignore[attr-defined]
+ for data in file_content.iter_content(chunk_size=chunk_size): # type:ignore[attr-defined]
+ changed_file.write(data)
+
+ files_list.add(file_path.split(os.path.sep)[1])
+ print(f"list(files_list): {list(files_list)}")
+ return list(files_list)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/.circleci/validate.sh b/.circleci/validate.sh
new file mode 100755
index 000000000000..b640f5beaa61
--- /dev/null
+++ b/.circleci/validate.sh
@@ -0,0 +1,27 @@
+#!/usr/bin/env bash
+set -ex
+
+
+echo "CI_COMMIT_BRANCH: $CI_COMMIT_BRANCH CI: $CI DEMISTO_README_VALIDATION: $DEMISTO_README_VALIDATION, CI_COMMIT_SHA: $CI_COMMIT_SHA, LAST_UPLOAD_COMMIT: $LAST_UPLOAD_COMMIT"
+if [[ $CI_COMMIT_BRANCH = master ]] || [[ -n "${NIGHTLY}" ]] || [[ -n "${BUCKET_UPLOAD}" ]] || [[ -n "${DEMISTO_SDK_NIGHTLY}" ]]; then
+ if [[ -n "${PACKS_TO_UPLOAD}" ]]; then
+ echo "Packs upload - Validating only the supplied packs"
+ PACKS_TO_UPLOAD_SPACED=${PACKS_TO_UPLOAD//,/ }
+ for item in $PACKS_TO_UPLOAD_SPACED; do
+ python3 -m demisto_sdk validate -i Packs/"$item" --post-commit --graph --skip-pack-dependencies --run-old-validate --skip-new-validate
+ done
+ else
+ if [[ -n "${NIGHTLY}" && "${CI_COMMIT_BRANCH}" == "master" ]]; then
+ PREV_VER=$LAST_UPLOAD_COMMIT
+ else
+ PREV_VER="origin/master"
+ fi
+ python3 -m demisto_sdk validate -a --graph --skip-pack-dependencies --prev-ver $PREV_VER --run-old-validate --skip-new-validate
+ fi
+elif [[ $CI_COMMIT_BRANCH =~ pull/[0-9]+ ]]; then
+ python3 -m demisto_sdk validate -g --post-commit --graph --skip-pack-dependencies --run-old-validate --skip-new-validate
+elif [[ $CI_COMMIT_BRANCH = demisto/python3 ]]; then
+ python3 -m demisto_sdk validate -g --post-commit --no-conf-json --allow-skipped --graph --skip-pack-dependencies --run-old-validate --skip-new-validate
+else
+ python3 -m demisto_sdk validate -g --post-commit --graph --skip-pack-dependencies --run-old-validate --skip-new-validate
+fi
\ No newline at end of file
diff --git a/.circleci/validate_landing_page_sections.py b/.circleci/validate_landing_page_sections.py
new file mode 100644
index 000000000000..c013dbc7552f
--- /dev/null
+++ b/.circleci/validate_landing_page_sections.py
@@ -0,0 +1,70 @@
+import argparse
+import json
+import os
+import sys
+from glob import glob
+
+import logging
+logger = logging.getLogger(__file__)
+
+LANDING_PAGE_SECTIONS_PAGE_PATH = 'Tests/Marketplace/landingPage_sections.json'
+
+
+def main():
+ parser = argparse.ArgumentParser(description="Validates landingPage_sections.json file")
+ parser.add_argument('-i', '--index-path', help="Path of the unzipped content of the index.zip file", required=True)
+ options = parser.parse_args()
+
+ landing_page_sections_json: dict = parse_landing_page_sections_to_json()
+ validate_file_keys(landing_page_sections_json)
+
+ bucket_pack_names = {os.path.basename(pack_name) for pack_name in glob(f'{options.index_path}/index/*')}
+ content_repo_pack_names = {os.path.basename(pack_name) for pack_name in glob('Packs/*')}
+ valid_packs = bucket_pack_names | content_repo_pack_names
+ validate_valid_packs_in_sections(landing_page_sections_json, valid_packs)
+ logger.info('Validation finished successfully')
+
+
+def validate_valid_packs_in_sections(landing_page_sections_json: dict, valid_pack_names: set) -> None:
+ """
+ Validates all packs in the sections of the file are valid packs according to the latest index.zip file
+ Args:
+ landing_page_sections_json: The content of the landingPage_sections.json file
+ valid_pack_names: A set containing all valid pack names from latest index.zip file and content repo
+ """
+ logger.info('validating packs in sections appear in latest index.zip file')
+ for section_name, packs_in_section in landing_page_sections_json.items():
+ if section_name in {'description', 'sections'}:
+ continue
+ for pack_name in packs_in_section:
+ assert pack_name in valid_pack_names, f'Pack {pack_name} was not found in latest index.zip file, ' \
+ f'Make sure you uploaded the pack'
+
+
+def validate_file_keys(landing_page_sections_json: dict) -> None:
+ """
+ Validates that besides the 'description' and 'sections' keys - all keys in the file are sections names that appear
+ in the 'sections' part of the file.
+ Raises: Exception if the file has non allowed key.
+ Args:
+ landing_page_sections_json: The content of the landingPage_sections.json file
+ """
+ logger.info('Validating file keys are valid sections')
+ allowed_keys = {'description', 'sections'}
+ allowed_keys.update(landing_page_sections_json['sections'])
+ not_allowed_key = [key for key in landing_page_sections_json.keys() if key not in allowed_keys]
+ assert not not_allowed_key, f'Unsupported keys found: {not_allowed_key}, please add ' \
+ f'these keys under the "sections" key or remove them.'
+
+
+def parse_landing_page_sections_to_json():
+ try:
+ with open(LANDING_PAGE_SECTIONS_PAGE_PATH, 'r') as file:
+ return json.load(file)
+ except Exception:
+ logger.critical('Could not parse the file as json file')
+ sys.exit(1)
+
+
+if __name__ in ("__main__", "__builtin__", "builtins"):
+ main()
diff --git a/.circleci/verify_base_branch_for_contribution.py b/.circleci/verify_base_branch_for_contribution.py
new file mode 100644
index 000000000000..365369630ab7
--- /dev/null
+++ b/.circleci/verify_base_branch_for_contribution.py
@@ -0,0 +1,69 @@
+import requests
+import sys
+import re
+from demisto_sdk.commands.common.constants import EXTERNAL_PR_REGEX
+
+
+def get_base_branch(pr_num):
+ """Fetches the base branch name of PR num {pr_num}
+
+ Args:
+ pr_num (string): The string representation of the pr number
+
+ Returns:
+ string. The name of the base branch of the pr if succeeds, '' otherwise.
+ """
+
+ # Disable insecure warnings
+ import urllib3
+ urllib3.disable_warnings()
+
+ url = 'https://api.github.com/repos/demisto/content/pulls/{}'.format(pr_num)
+
+ try:
+ res = requests.get(url, verify=False)
+ res.raise_for_status()
+ pr = res.json()
+ if pr and isinstance(pr, list) and len(pr) == 1:
+ # github usually returns a list of PRs, if not pr is a dict
+ pr = pr[0]
+ return pr.get('base', {}).get('ref', '')
+
+ except (requests.exceptions.HTTPError, ValueError) as e:
+ # If we didn't succeed to fetch pr for any http error / res.json() we raise an error
+ # then we don't want the build to fail
+ print('Unable to fetch pull request #{0}.\nError: {1}'.format(pr_num, str(e)))
+ return ''
+
+
+def verify_base_branch(pr_num):
+ """Checks if the base branch is master or not
+
+ Args:
+ pr_num (string): The string representation of the pr number
+
+ Returns:
+ True if valid, False otherwise. And attaches the appropriate message to the user.
+ """
+
+ print('Fetching the base branch of pull request #{}.'.format(pr_num))
+ base_branch = get_base_branch(pr_num)
+ if base_branch == 'master':
+ return 'Cannot merge a contribution directly to master, the pull request reviewer will handle that soon.', False
+ else:
+ return 'Verified pull request #{} base branch successfully.'.format(pr_num), True
+
+
+if __name__ == '__main__':
+ circle_branch = sys.argv[1]
+ pr_numbers_list = re.findall(EXTERNAL_PR_REGEX, circle_branch, re.IGNORECASE)
+ if pr_numbers_list:
+ pr_number = pr_numbers_list[0]
+ msg, is_valid = verify_base_branch(pr_number)
+ if is_valid:
+ print(msg)
+ else:
+ print(msg)
+ sys.exit(1)
+ else:
+ print('Unable to fetch pull request.')
diff --git a/.github/content_roles.json b/.github/content_roles.json
index 9a28cb4711c7..7151244b0281 100644
--- a/.github/content_roles.json
+++ b/.github/content_roles.json
@@ -7,8 +7,8 @@
"CONTRIBUTION_TL": "BEAdi",
"CONTRIBUTION_SECURITY_REVIEWER": "ssokolovich",
"ON_CALL_DEVS": [
- "AradCarmi",
- "yaakovpraisler"
+ "acarmi",
+ "ypreisler"
],
"DOC_REVIEWER": "ShirleyDenkberg",
"TIM_REVIEWER": "MLainer1"
diff --git a/.github/workflows/check-nightly-ok-label.yml b/.github/workflows/check-nightly-ok-label.yml
deleted file mode 100644
index 6dc10a80a336..000000000000
--- a/.github/workflows/check-nightly-ok-label.yml
+++ /dev/null
@@ -1,51 +0,0 @@
-name: Check nightly-ok label
-
-on:
- pull_request:
- types: [opened, synchronize, labeled, unlabeled]
-
-jobs:
- check_label:
- runs-on: ubuntu-latest
- if: github.repository == 'demisto/content' && github.event.pull_request.head.repo.fork == false
-
- steps:
- - name: Checkout repo
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
-
- - name: Check if files under .gitlab directory are changed
- id: check-changes
- run: |
- CHANGED_FILES=$(git diff --name-only origin/master...origin/${{ github.head_ref || github.ref_name }})
- echo "All changed files:"
- echo "${CHANGED_FILES}"
- GITLAB_CHANGED_FILES=$( [[ $CHANGED_FILES == *".gitlab/ci"* ]] && echo true || echo false)
- echo "Files in the .gitlab folder have changed: ${GITLAB_CHANGED_FILES}"
- echo "gitlab_changed_files=$GITLAB_CHANGED_FILES" >> $GITHUB_OUTPUT
- if [[ $GITLAB_CHANGED_FILES == true ]]; then
- echo 'Files under .gitlab folder has changed, Will check if the PR has the `nightly-ok` label.'
- else
- echo 'Files in the .gitlab folder have not been changed.'
- fi
-
- - name: Check if PR has the nightly-ok label
- uses: actions/github-script@v7
- id: check-label
- with:
- script: |
- const gitlabChangedFiles = ${{ steps.check-changes.outputs.gitlab_changed_files }};
- if(gitlabChangedFiles) {
- console.log('Files under .gitlab folder has changed, Will check if the PR has the `nightly-ok` label.');
- const labels = context.payload.pull_request.labels.map(label => label.name);
- const hasLabel = labels.includes('nightly-ok');
- if (hasLabel) {
- console.log('All good, the PR has the `nightly-ok` label.');
- } else {
- console.log('PR does not have the `nightly-ok` label. It is required when changing files under the `.gitlab` directory. Please run nightly using the Utils/gitlab_triggers/trigger_content_nightly_build.sh script, check that succeeded, and add the `nightly-ok` label');
- process.exit(1); // Exit with failure status if label is missing
- }
- } else {
- console.log('Files in the .gitlab folder have not been changed.');
- }
diff --git a/.github/workflows/clean_stale_branches.yml b/.github/workflows/clean_stale_branches.yml
index c07ffc962d16..c49ee1c948be 100644
--- a/.github/workflows/clean_stale_branches.yml
+++ b/.github/workflows/clean_stale_branches.yml
@@ -6,6 +6,7 @@ on:
env:
DAY_BEFORE_STALE: 30
DAY_BEFORE_CLOSE: 15
+ OPERATION_PER_RUN: 1000
EXEMPT_LABELS: "Ignore Stale,External PR"
jobs:
@@ -23,7 +24,8 @@ jobs:
days-before-issue-close: -1
days-before-pr-stale: ${{env.DAY_BEFORE_STALE}}
days-before-pr-close: ${{env.DAY_BEFORE_CLOSE}}
+ operations-per-run: ${{env.OPERATION_PER_RUN}}
stale-pr-message: "This PR is marked as 'Stale' because it has been open for ${{env.DAY_BEFORE_STALE}} days with no activity, it will be automatically closed in ${{env.DAY_BEFORE_CLOSE}} days if no activity will be done. To reset the counter just remove the 'Stale' label or make changes to update this PR. If you wish this PR will never be marked as 'Stale' add the 'Ignore Stale'"
delete-branch: true
remove-pr-stale-when-updated: true
- exempt-pr-labels: ${{env.EXEMPT_LABELS}}
\ No newline at end of file
+ exempt-pr-labels: ${{env.EXEMPT_LABELS}}
diff --git a/.github/workflows/handle-new-external-pr.yml b/.github/workflows/handle-new-external-pr.yml
index 63cab0acc72e..e08a67d020f6 100644
--- a/.github/workflows/handle-new-external-pr.yml
+++ b/.github/workflows/handle-new-external-pr.yml
@@ -43,7 +43,18 @@ jobs:
cd Utils/github_workflow_scripts
poetry run ./handle_external_pr.py
echo "Finished Handling External PR"
+
+ - name: Add Contribution Form Filled for Marketplace Contributions
+ if: contains(github.event.pull_request.title, '[Marketplace Contribution]')
+ run: gh pr edit "$PR_NUMBER" --add-label "$LABEL"
+ env:
+ GH_TOKEN: ${{ secrets.CONTENTBOT_GH_ADMIN_TOKEN }}
+ GH_REPO: ${{ github.repository }}
+ PR_NUMBER: ${{ github.event.pull_request.number }}
+ LABEL: "Contribution Form Filled"
+
- name: Send Notification
+ if: startsWith(github.event.pull_request.title, 'test') != true
env:
CONTENTBOT_GH_ADMIN_TOKEN: ${{ secrets.CONTENTBOT_GH_ADMIN_TOKEN }}
EVENT_PAYLOAD: ${{ toJson(github.event) }}
diff --git a/.github/workflows/pre-commit-reuse.yml b/.github/workflows/pre-commit-reuse.yml
index fe8844ba08c1..b3eeec0d8b55 100644
--- a/.github/workflows/pre-commit-reuse.yml
+++ b/.github/workflows/pre-commit-reuse.yml
@@ -42,9 +42,16 @@ jobs:
env:
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
+ BRANCH_NAME: ${{ github.head_ref }}
run: |
source .venv/bin/activate
- demisto-sdk pre-commit -g --validate --no-secrets --show-diff-on-failure --verbose --mode=ci
+ if [[ "$BRANCH_NAME" =~ ^AUD-demisto/.* ]]; then
+ echo "In docker auto update branch, pre commit on docker_autoupdate mode."
+ demisto-sdk pre-commit -g --mode=docker_autoupdate --show-diff-on-failure --verbose
+ else
+ echo "Not in docker auto update branch, pre commit on CI mode."
+ demisto-sdk pre-commit -g --validate --no-secrets --show-diff-on-failure --verbose --mode=ci
+ fi
- name: "Check coverage.xml exists"
if: always()
diff --git a/.github/workflows/protect-infra-directories.yml b/.github/workflows/protect-infra-directories.yml
index 1a4762bf9a3c..346e8920a034 100644
--- a/.github/workflows/protect-infra-directories.yml
+++ b/.github/workflows/protect-infra-directories.yml
@@ -25,4 +25,4 @@ jobs:
- name: Check for changes in protected directories
run: |
- python Utils/check_protected_directories.py ${{ steps.changed-files.outputs.all_changed_files }}
+ python Utils/github_workflow_scripts/check_protected_directories.py ${{ steps.changed-files.outputs.all_changed_files }}
diff --git a/.gitignore b/.gitignore
index 507298122772..8553f5cf0c14 100644
--- a/.gitignore
+++ b/.gitignore
@@ -32,7 +32,7 @@ Integrations/*/*_unified.yml
Beta_Integrations/*/*_unified.yml
Packs/*/*/*/*_unified.yml
conftest.py
-!Tests/scripts/pytest/conftest.py
+!Tests/scripts/dev_envs/pytest/conftest.py
!Tests/tests_e2e/conftest.py
venv
failed_unittests.txt
diff --git a/.pre-commit-config_template.yaml b/.pre-commit-config_template.yaml
index 12c0909dc322..9ade9c5e25a5 100644
--- a/.pre-commit-config_template.yaml
+++ b/.pre-commit-config_template.yaml
@@ -41,6 +41,7 @@ repos:
- --config=nightly_ruff.toml
args:ci:
- --config=nightly_ruff.toml
+ skip:docker_autoupdate: true
- repo: https://github.com/pre-commit/mirrors-autopep8
rev: v2.0.4
hooks:
@@ -60,7 +61,7 @@ repos:
- --show-error-codes
- --follow-imports=silent
- --allow-redefinition
- exclude: test_data|tests_data|.venv|.*_test.py$|infrastructure_tests|.vulture_whitelist.py|demistomock.py|Templates|conftest.py
+ exclude: test_data|tests_data|.venv|.*_test.py$|infrastructure_tests|.vulture_whitelist.py|demistomock.py|Templates|conftest.py|Utils/download_packs_and_docker_images.py
language: system
entry: mypy
@@ -76,6 +77,7 @@ repos:
language: system
# skip nightly since we don't care about warnings in nightly
skip:nightly: true
+ skip:docker_autoupdate: true
- id: pylint-in-docker
name: pylint-in-docker
@@ -129,8 +131,17 @@ repos:
- --junitxml=/src/.pre-commit/pytest-junit/.report_pytest.xml
- --color=yes
- --files
+ args:docker_autoupdate:
+ - /src/Tests/scripts/script_runner.py
+ - pytest
+ - -v
+ - --override-ini='asyncio_mode=auto'
+ - --rootdir=/src
+ - --junitxml=/src/.pre-commit/pytest-junit/.report_pytest.xml
+ - --color=yes
+ - --files
copy_files:
- - Tests/scripts/pytest/conftest.py
+ - Tests/scripts/dev_envs/pytest/conftest.py
skip:commit: true
pass_docker_extra_args:ci: --rm=false --network=none
pass_docker_extra_args:nightly: --rm=false --network=none
@@ -160,15 +171,17 @@ repos:
- --junitxml=/src/.pre-commit/pytest-junit/.report_pytest.xml
- --color=yes
copy_files:
- - Tests/scripts/pytest/conftest.py
+ - Tests/scripts/dev_envs/pytest/conftest.py
skip:commit: true
run_isolated: true
+ skip:docker_autoupdate: true
pass_docker_extra_args:ci: --rm=false
pass_docker_extra_args:nightly: --rm=false
- id: validate-deleted-files
name: validate-deleted-files
entry: validate-deleted-files
+ skip:docker_autoupdate: true
language: system
require_serial: true
pass_filenames: false
@@ -180,6 +193,7 @@ repos:
- id: validate-content-paths
name: validate-content-paths
+ skip:docker_autoupdate: true
entry: validate-content-path
language: system
require_serial: true
@@ -198,6 +212,7 @@ repos:
files: Tests/conf.json
skip:commit: true
skip:nightly: true
+ skip:docker_autoupdate: true
entry: validate-conf-json
pass_filenames: false
language: system
@@ -257,6 +272,7 @@ repos:
require_serial: true
skip:commit: true
skip:nightly: true
+ skip:docker_autoupdate: true
- id: format
name: format
@@ -269,6 +285,7 @@ repos:
language: system
pass_filenames: false
require_serial: true
+ skip:docker_autoupdate: true
- id: secrets
name: secrets
@@ -290,32 +307,33 @@ repos:
needs:
- pytest-in-docker
- # - id: coverage-pytest-analyze
- # name: coverage-pytest-analyze
- # entry: demisto-sdk coverage-analyze
- # description: Running demisto-sdk coverage-analyze and showing a coverage report.
- # language: system
- # pass_filenames: false
- # args:
- # - -i
- # - .coverage
- # - --report-dir
- # - coverage_report
- # - --report-type
- # - all
- # - --previous-coverage-report-url
- # - https://storage.googleapis.com/marketplace-dist-dev/code-coverage-reports/coverage-min.json
- # args:nightly:
- # - -i
- # - .coverage
- # - --report-dir
- # - coverage_report
- # - --report-type
- # - all
- # - --allowed-coverage-degradation-percentage
- # - '100'
- # needs:
- # - pytest-in-docker
+ - id: coverage-pytest-analyze
+ name: coverage-pytest-analyze
+ entry: demisto-sdk coverage-analyze
+ description: Running demisto-sdk coverage-analyze and showing a coverage report.
+ language: system
+ pass_filenames: false
+ args:
+ - -i
+ - .coverage
+ - --report-dir
+ - coverage_report
+ - --report-type
+ - all
+ - --previous-coverage-report-url
+ - https://storage.googleapis.com/marketplace-dist-dev/code-coverage-reports/coverage-min.json
+ args:nightly:
+ - -i
+ - .coverage
+ - --report-dir
+ - coverage_report
+ - --report-type
+ - all
+ - --allowed-coverage-degradation-percentage
+ - '100'
+ needs:
+ - pytest-in-docker
+
- repo: https://github.com/sourcery-ai/sourcery
rev: v1.6.0
hooks:
diff --git a/Packs/AMP/Integrations/CiscoAMPEventCollector/README.md b/Packs/AMP/Integrations/CiscoAMPEventCollector/README.md
index 2149882f4bc1..03426befd82a 100644
--- a/Packs/AMP/Integrations/CiscoAMPEventCollector/README.md
+++ b/Packs/AMP/Integrations/CiscoAMPEventCollector/README.md
@@ -1,6 +1,8 @@
This is the Cisco AMP event collector integration for Cortex XSIAM.
This integration was integrated and tested with version v1 of CiscoAMPEventCollector.
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure Cisco AMP Event Collector on Cortex XSIAM
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
diff --git a/Packs/AMP/pack_metadata.json b/Packs/AMP/pack_metadata.json
index 83e5834aff03..be21f34b192f 100644
--- a/Packs/AMP/pack_metadata.json
+++ b/Packs/AMP/pack_metadata.json
@@ -16,5 +16,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "CiscoAMPEventCollector"
}
\ No newline at end of file
diff --git a/Packs/AWS-EC2/ReleaseNotes/1_4_8.md b/Packs/AWS-EC2/ReleaseNotes/1_4_8.md
new file mode 100644
index 000000000000..dda3f96d4327
--- /dev/null
+++ b/Packs/AWS-EC2/ReleaseNotes/1_4_8.md
@@ -0,0 +1,3 @@
+## AWS - EC2
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/AWS-EC2/pack_metadata.json b/Packs/AWS-EC2/pack_metadata.json
index d3b5ad596722..5ad2d5dad66a 100644
--- a/Packs/AWS-EC2/pack_metadata.json
+++ b/Packs/AWS-EC2/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AWS - EC2",
"description": "Amazon Web Services Elastic Compute Cloud (EC2)",
"support": "xsoar",
- "currentVersion": "1.4.7",
+ "currentVersion": "1.4.8",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AWS-GuardDuty/Integrations/AWSGuardDutyEventCollector/README.md b/Packs/AWS-GuardDuty/Integrations/AWSGuardDutyEventCollector/README.md
index bb10ba49f8e6..da9179929697 100644
--- a/Packs/AWS-GuardDuty/Integrations/AWSGuardDutyEventCollector/README.md
+++ b/Packs/AWS-GuardDuty/Integrations/AWSGuardDutyEventCollector/README.md
@@ -1,5 +1,7 @@
Amazon Web Services Guard Duty Service Event Collector integration for Cortex XSIAM.
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure AWS - GuardDuty Event Collector on Cortex XSOAR
1. Navigate to **Settings** > **Configurations** > **Data Collection** > **Automation and Feed Integrations**.
diff --git a/Packs/AWS-GuardDuty/ModelingRules/AWSGuardDutyModelingRules_1_3/AWSGuardDutyModelingRules_1_3.xif b/Packs/AWS-GuardDuty/ModelingRules/AWSGuardDutyModelingRules_1_3/AWSGuardDutyModelingRules_1_3.xif
index eab4c84de5c3..f23e80657ffa 100644
--- a/Packs/AWS-GuardDuty/ModelingRules/AWSGuardDutyModelingRules_1_3/AWSGuardDutyModelingRules_1_3.xif
+++ b/Packs/AWS-GuardDuty/ModelingRules/AWSGuardDutyModelingRules_1_3/AWSGuardDutyModelingRules_1_3.xif
@@ -4,13 +4,14 @@ alter
targetIP2 = json_extract_scalar(Service, "$.Action.KubernetesApiCallAction.RemoteIpDetails.IpAddressV4"),
username1 = trim(json_extract_scalar(Resource,"$.AccessKeyDetails.UserName"), "\""),
username2 = json_extract_scalar(Resource, "$.KubernetesDetails.KubernetesUserDetails.Username"),
- userType = json_extract_scalar(Resource, "$.AccessKeyDetails.UserType")
+ userType = json_extract_scalar(Resource, "$.AccessKeyDetails.UserType"),
+ severity = to_float(Severity)
| alter
xdm.alert.category = json_extract_scalar(Resource, "$.ResourceType"),
xdm.alert.subcategory = Type,
xdm.alert.description = Description,
xdm.event.outcome_reason = Title,
- xdm.alert.severity = to_string(Severity),
+ xdm.alert.severity = if( severity >= 7, "High", severity >= 4 and severity < 6.9 , "Medium", severity >= 1 and severity < 3.9 , "Low", to_string(Severity)),
xdm.target.resource.id = AccountId,
xdm.target.host.hostname = json_extract_scalar(Resource, "$.EksClusterDetails.Name"),
xdm.source.user.user_type = if(userType in("Root","IAMUser","Role","FederatedUser","AWSAccount"),XDM_CONST.USER_TYPE_REGULAR , userType in("Directory","AWSService") ,XDM_CONST.USER_TYPE_SERVICE_ACCOUNT,userType in("AssumedRole") ,XDM_CONST.USER_TYPE_MACHINE_ACCOUNT ,to_string(userType)),
diff --git a/Packs/AWS-GuardDuty/ReleaseNotes/1_3_49.md b/Packs/AWS-GuardDuty/ReleaseNotes/1_3_49.md
new file mode 100644
index 000000000000..fe6c869bab9c
--- /dev/null
+++ b/Packs/AWS-GuardDuty/ReleaseNotes/1_3_49.md
@@ -0,0 +1,3 @@
+#### Modeling Rules
+##### AWSGuardDuty Modeling Rule
+Updated the mapping of *xdm.alert.severity* to comply with the string standard instead of numerical values.
diff --git a/Packs/AWS-GuardDuty/pack_metadata.json b/Packs/AWS-GuardDuty/pack_metadata.json
index fd720fa60928..c17430295aeb 100644
--- a/Packs/AWS-GuardDuty/pack_metadata.json
+++ b/Packs/AWS-GuardDuty/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AWS - GuardDuty",
"description": "Amazon Web Services Guard Duty Service (gd)",
"support": "xsoar",
- "currentVersion": "1.3.48",
+ "currentVersion": "1.3.49",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -20,5 +20,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "AWS - GuardDuty Event Collector"
}
\ No newline at end of file
diff --git a/Packs/AWS-IAM/ReleaseNotes/1_1_62.md b/Packs/AWS-IAM/ReleaseNotes/1_1_62.md
new file mode 100644
index 000000000000..91a43390a263
--- /dev/null
+++ b/Packs/AWS-IAM/ReleaseNotes/1_1_62.md
@@ -0,0 +1,3 @@
+## AWS - IAM
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/AWS-IAM/pack_metadata.json b/Packs/AWS-IAM/pack_metadata.json
index 7ed76c68c188..d3ae5517c6df 100644
--- a/Packs/AWS-IAM/pack_metadata.json
+++ b/Packs/AWS-IAM/pack_metadata.json
@@ -3,7 +3,7 @@
"description": "Amazon Web Services Identity and Access Management (IAM)",
"support": "xsoar",
"author": "Cortex XSOAR",
- "currentVersion": "1.1.61",
+ "currentVersion": "1.1.62",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
"created": "2020-04-14T00:00:00Z",
diff --git a/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.py b/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.py
index 0ece4385446d..a624dfdace91 100755
--- a/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.py
+++ b/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.py
@@ -1052,10 +1052,8 @@ def main(): # pragma: no cover
command=demisto.command()))
if demisto.command() == 'test-module':
# This is the call made when pressing the integration test button.
- client = aws_session()
- response = client.REPLACE_WITH_TEST_FUNCTION()
- if response['ResponseMetadata']['HTTPStatusCode'] == 200:
- demisto.results('ok')
+ response = list_firewalls_command(args)
+ demisto.results('ok')
elif demisto.command() == 'aws-network-firewall-associate-firewall-policy':
human_readable, outputs, response = associate_firewall_policy_command(
diff --git a/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.yml b/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.yml
index b87ebc972560..5f66c6c341a6 100755
--- a/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.yml
+++ b/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.yml
@@ -1521,7 +1521,7 @@ script:
- contextPath: AWS-NetworkFirewall.SubnetChangeProtection
description: A setting indicating whether the firewall is protected against changes to the subnet associations. Use this setting to protect against accidentally modifying the subnet associations for a firewall that is in use. When you create a firewall, the operation initializes this setting to TRUE.
type: Unknown
- dockerimage: demisto/boto3py3:1.0.0.87655
+ dockerimage: demisto/boto3py3:1.0.0.95377
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/AWS-NetworkFirewall/ReleaseNotes/1_0_7.md b/Packs/AWS-NetworkFirewall/ReleaseNotes/1_0_7.md
new file mode 100644
index 000000000000..11706bd0df2c
--- /dev/null
+++ b/Packs/AWS-NetworkFirewall/ReleaseNotes/1_0_7.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### AWS Network Firewall
+- Updated the Docker image to: *demisto/boto3py3:1.0.0.95377*.
+- Fixed an issue where test connection was failing.
diff --git a/Packs/AWS-NetworkFirewall/pack_metadata.json b/Packs/AWS-NetworkFirewall/pack_metadata.json
index cf8701dd5fbd..c89f169fe11c 100644
--- a/Packs/AWS-NetworkFirewall/pack_metadata.json
+++ b/Packs/AWS-NetworkFirewall/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AWS - Network Firewall",
"description": "Amazon Web Services Network Firewall",
"support": "xsoar",
- "currentVersion": "1.0.6",
+ "currentVersion": "1.0.7",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AWS-Route53/ReleaseNotes/1_1_34.md b/Packs/AWS-Route53/ReleaseNotes/1_1_34.md
new file mode 100644
index 000000000000..811cd448d769
--- /dev/null
+++ b/Packs/AWS-Route53/ReleaseNotes/1_1_34.md
@@ -0,0 +1,3 @@
+## AWS - Route53
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/AWS-Route53/pack_metadata.json b/Packs/AWS-Route53/pack_metadata.json
index 13e225c7b4c1..b797842d69c8 100644
--- a/Packs/AWS-Route53/pack_metadata.json
+++ b/Packs/AWS-Route53/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AWS - Route53",
"description": "Amazon Web Services Managed Cloud DNS Service.",
"support": "xsoar",
- "currentVersion": "1.1.33",
+ "currentVersion": "1.1.34",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AWS-S3/ReleaseNotes/1_2_24.md b/Packs/AWS-S3/ReleaseNotes/1_2_24.md
new file mode 100644
index 000000000000..e933a273ba4d
--- /dev/null
+++ b/Packs/AWS-S3/ReleaseNotes/1_2_24.md
@@ -0,0 +1,3 @@
+## AWS - S3
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/AWS-S3/pack_metadata.json b/Packs/AWS-S3/pack_metadata.json
index cc2db1a8eb35..b2fa0670fc45 100644
--- a/Packs/AWS-S3/pack_metadata.json
+++ b/Packs/AWS-S3/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AWS - S3",
"description": "Amazon Web Services Simple Storage Service (S3)",
"support": "xsoar",
- "currentVersion": "1.2.23",
+ "currentVersion": "1.2.24",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AWS-SecurityHub/Integrations/AWSSecurityHubEventCollector/README.md b/Packs/AWS-SecurityHub/Integrations/AWSSecurityHubEventCollector/README.md
index 4051699216e5..74d6391b5561 100644
--- a/Packs/AWS-SecurityHub/Integrations/AWSSecurityHubEventCollector/README.md
+++ b/Packs/AWS-SecurityHub/Integrations/AWSSecurityHubEventCollector/README.md
@@ -1,5 +1,7 @@
An XSIAM event collector for AWS Security Hub.
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure AWS Security Hub Event Collector on Cortex XSIAM
1. Navigate to **Settings** > **Configurations** > **Data Collection** > **Automations & Feed Integrations**.
diff --git a/Packs/AWS-SecurityHub/pack_metadata.json b/Packs/AWS-SecurityHub/pack_metadata.json
index 5220455d3a56..2bd7bc38249c 100644
--- a/Packs/AWS-SecurityHub/pack_metadata.json
+++ b/Packs/AWS-SecurityHub/pack_metadata.json
@@ -21,5 +21,6 @@
],
"itemPrefix": [
"AWS Security Hub"
- ]
+ ],
+ "defaultDataSource": "AWS Security Hub Event Collector"
}
\ No newline at end of file
diff --git a/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/AbnormalSecurityEventCollector.yml b/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/AbnormalSecurityEventCollector.yml
index c737e1bfc55e..5a74f7587690 100644
--- a/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/AbnormalSecurityEventCollector.yml
+++ b/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/AbnormalSecurityEventCollector.yml
@@ -17,7 +17,8 @@ configuration:
type: 0
defaultvalue: 1 day
required: false
- hidden: true
+ hidden:
+ - marketplacev2
- display: Use system proxy settings
name: proxy
type: 8
diff --git a/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/README.md b/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/README.md
index 2b2b0ba8a835..b4e7566c9556 100644
--- a/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/README.md
+++ b/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/README.md
@@ -1,6 +1,8 @@
Abnormal Security Event Collector integration for XSIAM.
This integration was integrated and tested with version 01 of Abnormal Security Event Collector
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure Abnormal Security Event Collector on Cortex XSIAM
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
diff --git a/Packs/AbnormalSecurity/ReleaseNotes/2_2_10.md b/Packs/AbnormalSecurity/ReleaseNotes/2_2_10.md
new file mode 100644
index 000000000000..31ce63a54111
--- /dev/null
+++ b/Packs/AbnormalSecurity/ReleaseNotes/2_2_10.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Abnormal Security Event Collector
+
+- Fixed a validation error related to the *First fetch time interval* parameter.
diff --git a/Packs/AbnormalSecurity/pack_metadata.json b/Packs/AbnormalSecurity/pack_metadata.json
index 291eeef562e5..67e79f41a33a 100644
--- a/Packs/AbnormalSecurity/pack_metadata.json
+++ b/Packs/AbnormalSecurity/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Abnormal Security",
"description": "Abnormal Security detects and protects against the whole spectrum of email attacks",
"support": "partner",
- "currentVersion": "2.2.9",
+ "currentVersion": "2.2.10",
"author": "Abnormal Security",
"url": "",
"email": "support@abnormalsecurity.com",
@@ -23,5 +23,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "Abnormal Security Event Collector"
}
\ No newline at end of file
diff --git a/Packs/AccentureCTI_Feed/Integrations/ACTIIndicatorFeed/ACTIIndicatorFeed.py b/Packs/AccentureCTI_Feed/Integrations/ACTIIndicatorFeed/ACTIIndicatorFeed.py
index 11a3c5d23b5b..38e6e82c676c 100644
--- a/Packs/AccentureCTI_Feed/Integrations/ACTIIndicatorFeed/ACTIIndicatorFeed.py
+++ b/Packs/AccentureCTI_Feed/Integrations/ACTIIndicatorFeed/ACTIIndicatorFeed.py
@@ -1,9 +1,8 @@
-from typing import Dict, Union
from CommonServerPython import *
from JSONFeedApiModule import * # noqa: E402
-def custom_build_iterator(client: Client, feed: Dict, limit, **kwargs) -> List:
+def custom_build_iterator(client: Client, feed: dict, limit, **kwargs) -> List:
"""
Implement the http_request with API that works with pagination and filtering. Uses the integration context to
save last fetch time to each indicator type
@@ -79,7 +78,7 @@ def custom_build_iterator(client: Client, feed: Dict, limit, **kwargs) -> List:
return result
-def create_fetch_configuration(indicators_type: list, filters: dict, params: dict) -> Dict[str, dict]:
+def create_fetch_configuration(indicators_type: list, filters: dict, params: dict) -> dict[str, dict]:
mapping_by_indicator_type = { # pragma: no cover
'IP': {
'last_seen_as': 'malwaretypes',
@@ -119,7 +118,7 @@ def create_fetch_configuration(indicators_type: list, filters: dict, params: dic
return indicators_configuration
-def build_feed_filters(params: dict) -> Dict[str, Optional[Union[str, list]]]:
+def build_feed_filters(params: dict) -> dict[str, Optional[str | list]]:
filters = {'severity.from': params.get('severity'),
'threat_types.values': params.get('threat_type'),
'confidence.from': params.get('confidence_from'),
@@ -131,7 +130,7 @@ def build_feed_filters(params: dict) -> Dict[str, Optional[Union[str, list]]]:
def main(): # pragma: no cover
params = demisto.params()
- filters: Dict[str, Optional[Union[str, list]]] = build_feed_filters(params)
+ filters: dict[str, Optional[str | list]] = build_feed_filters(params)
indicators_type: list = argToList(params.get('indicator_type', []))
params['feed_name_to_config'] = create_fetch_configuration(indicators_type, filters, params)
diff --git a/Packs/AccentureCTI_Feed/Integrations/ACTIIndicatorFeed/ACTIIndicatorFeed.yml b/Packs/AccentureCTI_Feed/Integrations/ACTIIndicatorFeed/ACTIIndicatorFeed.yml
index cf4656803bd1..260df6234623 100644
--- a/Packs/AccentureCTI_Feed/Integrations/ACTIIndicatorFeed/ACTIIndicatorFeed.yml
+++ b/Packs/AccentureCTI_Feed/Integrations/ACTIIndicatorFeed/ACTIIndicatorFeed.yml
@@ -150,7 +150,7 @@ script:
name: limit
description: Gets the feed indicators.
name: acti-get-indicators
- dockerimage: demisto/py3-tools:1.0.0.86612
+ dockerimage: demisto/py3-tools:1.0.0.96102
feed: true
runonce: false
script: '-'
diff --git a/Packs/AccentureCTI_Feed/ReleaseNotes/1_1_36.md b/Packs/AccentureCTI_Feed/ReleaseNotes/1_1_36.md
new file mode 100644
index 000000000000..a46828bcfa93
--- /dev/null
+++ b/Packs/AccentureCTI_Feed/ReleaseNotes/1_1_36.md
@@ -0,0 +1,8 @@
+
+#### Integrations
+
+##### ACTI Indicator Feed
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.96102*.
+
+
+
diff --git a/Packs/AccentureCTI_Feed/pack_metadata.json b/Packs/AccentureCTI_Feed/pack_metadata.json
index eb3bda844e45..1298b8cc9951 100644
--- a/Packs/AccentureCTI_Feed/pack_metadata.json
+++ b/Packs/AccentureCTI_Feed/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Accenture CTI Feed",
"description": "Accenture Cyber Threat Intelligence Feed",
"support": "partner",
- "currentVersion": "1.1.35",
+ "currentVersion": "1.1.36",
"author": "Accenture",
"url": "https://www.accenture.com/us-en/services/security/cyber-defense",
"email": "CTI.AcctManagement@accenture.com",
diff --git a/Packs/AccessInvestigation/ReleaseNotes/1_2_9.md b/Packs/AccessInvestigation/ReleaseNotes/1_2_9.md
new file mode 100644
index 000000000000..806f5468cbf3
--- /dev/null
+++ b/Packs/AccessInvestigation/ReleaseNotes/1_2_9.md
@@ -0,0 +1,3 @@
+## Access Investigation
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/AccessInvestigation/pack_metadata.json b/Packs/AccessInvestigation/pack_metadata.json
index b03b5272a43a..cf0097aee1e8 100644
--- a/Packs/AccessInvestigation/pack_metadata.json
+++ b/Packs/AccessInvestigation/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Access Investigation",
"description": "This Content Pack automates response to unauthorised access incidents and contains customer access incident views and layouts to aid investigation.",
"support": "xsoar",
- "currentVersion": "1.2.8",
+ "currentVersion": "1.2.9",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Active_Directory_Query/ReleaseNotes/1_6_35.md b/Packs/Active_Directory_Query/ReleaseNotes/1_6_35.md
new file mode 100644
index 000000000000..a3c67f80fab8
--- /dev/null
+++ b/Packs/Active_Directory_Query/ReleaseNotes/1_6_35.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### IAMInitADUser
+
+- Replaced the *pyminizip* library with *pyzipper*.
+- Updated the Docker image to *demisto/py3-tools:1.0.0.95440*.
diff --git a/Packs/Active_Directory_Query/Scripts/IAMInitADUser/IAMInitADUser.py b/Packs/Active_Directory_Query/Scripts/IAMInitADUser/IAMInitADUser.py
index 90a85b899b45..66d1de9c3748 100644
--- a/Packs/Active_Directory_Query/Scripts/IAMInitADUser/IAMInitADUser.py
+++ b/Packs/Active_Directory_Query/Scripts/IAMInitADUser/IAMInitADUser.py
@@ -2,7 +2,7 @@
from CommonServerPython import * # noqa: F401
import uuid
-import pyminizip
+from pyzipper import AESZipFile, ZIP_DEFLATED, WZ_AES
DEFAULT_PWD_GENERATION_SCRIPT = "GeneratePassword"
TEXT_FILE_NAME = "AD_Password" # File name for the text file (within the zip file) to use
@@ -214,7 +214,10 @@ def create_zip_with_password(args: dict, generated_password: str, zip_password:
with open(text_file_name, 'w') as text_file:
text_file.write(generated_password)
- pyminizip.compress(text_file_name, '', zip_file_name, zip_password, 1)
+ demisto.debug(f'zipping {text_file_name=}')
+ with AESZipFile(zip_file_name, mode='w', compression=ZIP_DEFLATED, encryption=WZ_AES) as zf:
+ zf.pwd = bytes(zip_password, 'utf-8')
+ zf.write(text_file_name)
with open(zip_file_name, 'rb') as zip_file:
zip_content = zip_file.read()
diff --git a/Packs/Active_Directory_Query/Scripts/IAMInitADUser/IAMInitADUser.yml b/Packs/Active_Directory_Query/Scripts/IAMInitADUser/IAMInitADUser.yml
index df5fa120c65c..2a4065a922cf 100644
--- a/Packs/Active_Directory_Query/Scripts/IAMInitADUser/IAMInitADUser.yml
+++ b/Packs/Active_Directory_Query/Scripts/IAMInitADUser/IAMInitADUser.yml
@@ -80,7 +80,7 @@ tags:
- active directory
- Utility
type: python
-dockerimage: demisto/py3-tools:1.0.0.87415
+dockerimage: demisto/py3-tools:1.0.0.95440
runas: DBotWeakRole
tests:
- Active Directory Test
diff --git a/Packs/Active_Directory_Query/pack_metadata.json b/Packs/Active_Directory_Query/pack_metadata.json
index 4d2607f74187..33d8acd061b3 100644
--- a/Packs/Active_Directory_Query/pack_metadata.json
+++ b/Packs/Active_Directory_Query/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Active Directory Query",
"description": "Active Directory Query integration enables you to access and manage Active Directory objects (users, contacts, and computers).",
"support": "xsoar",
- "currentVersion": "1.6.34",
+ "currentVersion": "1.6.35",
"author": "Cortex XSOAR",
"url": "",
"email": "",
diff --git a/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.py b/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.py
index e4db59b46765..00307f4f6b12 100644
--- a/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.py
+++ b/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.py
@@ -238,7 +238,7 @@ def update_change(self,
# Created by C.L.
- def acknowledge_warning(self, change_path: str) -> dict:
+ def acknowledge_warning(self, change_path: str, allowed_input_type_param: str = 'post-verification-warnings-ack') -> dict:
"""
Acknowledge the warning message after updating a enrollment change
@@ -255,7 +255,7 @@ def acknowledge_warning(self, change_path: str) -> dict:
payload = '{"acknowledgement": "acknowledge"}'
return self._http_request(
method='POST',
- url_suffix=f"{change_path}/input/update/post-verification-warnings-ack",
+ url_suffix=f"{change_path}/input/update/{allowed_input_type_param}",
headers=headers,
data=payload
)
@@ -519,7 +519,7 @@ def update_property(self, property_type: str, domain_name: str, property_name: s
]
}
]
- trafficTargets: list[dict] = []
+ trafficTargets: List[dict] = []
elif property_type == "failover":
staticRRSets = []
trafficTargets = []
@@ -650,8 +650,8 @@ def get_network_list_by_id(self, network_list_id: str) -> dict:
url_suffix=f'/network-list/v2/network-lists/{network_list_id}',
params=params)
- def create_network_list(self, list_name: str, list_type: str, elements: list | str | None,
- description: str | None = None) -> dict:
+ def create_network_list(self, list_name: str, list_type: str, elements: Union[list, str] = None,
+ description: str = None) -> dict:
"""
Create network list
Args:
@@ -686,7 +686,7 @@ def delete_network_list(self, network_list_id: str) -> dict:
url_suffix=f'/network-list/v2/network-lists/{network_list_id}',
resp_type='response')
- def update_network_list_elements(self, network_list_id: str, elements: list | str) -> dict:
+ def update_network_list_elements(self, network_list_id: str, elements: Union[list, str]) -> dict:
"""
Update network list by ID
Args:
@@ -745,8 +745,8 @@ def update_network_list_elements(self, network_list_id: str, elements: list | st
f'{network_list_id}?extended=true&includeElements=true',
json_data=body)
- def activate_network_list(self, network_list_id: str, env: str, comment: str | None,
- notify: list | None) -> dict:
+ def activate_network_list(self, network_list_id: str, env: str, comment: str = None,
+ notify: list = None) -> dict:
"""
Activating network list in STAGING or PRODUCTION
Args:
@@ -768,7 +768,7 @@ def activate_network_list(self, network_list_id: str, env: str, comment: str | N
json_data=body,
resp_type='response')
- def add_elements_to_network_list(self, network_list_id: str, elements: list | str | None) -> dict:
+ def add_elements_to_network_list(self, network_list_id: str, elements: Union[list, str] = None) -> dict:
"""
Add elements to network list
Args:
@@ -1558,7 +1558,7 @@ def get_papi_edgehostname_creation_status(self,
def modify_appsec_config_selected_hosts(self, config_id: int,
config_version: int,
- hostname_list: list[dict[str, Any]],
+ hostname_list: List[dict],
mode: str
) -> dict:
"""
@@ -2106,7 +2106,7 @@ def get_cps_change_status(self,
''' HELPER FUNCTIONS '''
-def get_network_lists_ec(raw_response: list | None) -> tuple[list, list]:
+def get_network_lists_ec(raw_response: list = None) -> tuple[list, list]:
"""
Get raw response list of networks from Akamai and parse to ec
Args:
@@ -2146,7 +2146,7 @@ def get_network_lists_ec(raw_response: list | None) -> tuple[list, list]:
return entry_context, human_readable
-def get_list_from_file(entry_id: str | None) -> list:
+def get_list_from_file(entry_id: str = None) -> list:
"""
Get list of IPs and Geo from txt file
Args:
@@ -2354,7 +2354,6 @@ def get_cps_enrollment_by_cnname(raw_response: dict, cnname: str) -> dict:
Returns:
full enrollment info for given common name
"""
-
for enrollment in raw_response.get("enrollments", []):
if enrollment.get("csr").get("cn") == cnname:
return enrollment
@@ -2945,8 +2944,8 @@ def list_siteshield_maps_ec(raw_response: dict) -> tuple[list, list]:
entry_context = []
human_readable = []
if raw_response:
- entry_context.append(raw_response.get('siteShieldMaps', [])[0])
- human_readable.append(raw_response.get('siteShieldMaps', [])[0])
+ entry_context = raw_response.get('siteShieldMaps', [])
+ human_readable = raw_response.get('siteShieldMaps', [])
return entry_context, human_readable
@@ -3034,7 +3033,7 @@ def try_parsing_date(date: str, arr_fmt: list):
@logger
-def check_group_command(client: Client, checking_group_name: str) -> tuple[object, dict, list | dict]:
+def check_group_command(client: Client, checking_group_name: str) -> tuple[object, dict, Union[list, dict]]:
raw_response: dict = client.list_groups()
if raw_response:
human_readable = f'{INTEGRATION_NAME} - List Groups'
@@ -3073,7 +3072,7 @@ def check_group_command(client: Client, checking_group_name: str) -> tuple[objec
@logger
-def list_groups_command(client: Client) -> tuple[object, dict, list | dict]:
+def list_groups_command(client: Client) -> tuple[object, dict, Union[list, dict]]:
"""
List the information of all groups
@@ -3092,7 +3091,7 @@ def list_groups_command(client: Client) -> tuple[object, dict, list | dict]:
@logger
-def get_group_command(client: Client, group_id: int = 0) -> tuple[object, dict, list | dict]:
+def get_group_command(client: Client, group_id: int = 0) -> tuple[object, dict, Union[list, dict]]:
"""
Get the information of a group
Args:
@@ -3147,7 +3146,7 @@ def create_enrollment_command(client: Client,
ra: str = "third-party",
validation_type: str = "third-party",
sans: list = []
- ) -> tuple[object, dict, list | dict]:
+ ) -> tuple[object, dict, Union[list, dict]]:
"""
Create an enrollment
Args:
@@ -3218,7 +3217,7 @@ def create_enrollment_command(client: Client,
return f'{INTEGRATION_NAME} - Could not find any results for given query', {}, {}
-def list_enrollments_command(client: Client, contract_id: str) -> tuple[object, dict, list | dict]:
+def list_enrollments_command(client: Client, contract_id: str) -> tuple[object, dict, Union[list, dict]]:
"""
List enrollments
Args:
@@ -3239,7 +3238,7 @@ def list_enrollments_command(client: Client, contract_id: str) -> tuple[object,
# Created by C.L.
@logger
def get_enrollment_by_cn_command(client: Client, target_cn: str, contract_id: str = ""
- ) -> tuple[object, dict, list | dict]:
+ ) -> tuple[object, dict, Union[list, dict]]:
"""
List enrollments
Args:
@@ -3271,7 +3270,7 @@ def get_enrollment_by_cn_command(client: Client, target_cn: str, contract_id: st
@logger
def get_change_command(client: Client, enrollment_path: str, allowed_input_type_param: str = "third-party-csr"
- ) -> tuple[object, dict, list | dict]:
+ ) -> tuple[object, dict, Union[list, dict]]:
"""
Get change
Args:
@@ -3296,7 +3295,7 @@ def update_change_command(client: Client, change_path: str,
certificate: str, trust_chain: str,
allowed_input_type_param: str = "third-party-cert-and-trust-chain",
key_algorithm: str = "RSA"
- ) -> tuple[object, dict, list | dict]:
+ ) -> tuple[object, dict, Union[list, dict]]:
"""
Update a change
Args:
@@ -3322,9 +3321,25 @@ def update_change_command(client: Client, change_path: str,
# Created by C.L.
@logger
-def acknowledge_warning_command(client: Client, change_path: str) -> tuple[object, dict, list | dict]:
+def acknowledge_warning_command(client: Client,
+ change_path: str,
+ allowed_input_type_param: str = 'post-verification-warnings-ack'
+ ) -> tuple[object, dict, Union[list, dict]]:
+ """
+ Acknowledge the warning message after updating a enrollment change
- raw_response: dict = client.acknowledge_warning(change_path)
+ Args:
+ change_path: The path that includes enrollmentId and changeId: e.g. /cps/v2/enrollments/enrollmentId/changes/changeId
+ allowed_input_type_param: Enum Found as the last part of Change.allowedInput[].update hypermedia URL.
+ supported values include:
+ change-management-ack,
+ lets-encrypt-challenges-completed,
+ post-verification-warnings-ack,
+ pre-verification-warnings-ack.
+ Returns:
+ Json response as dictionary
+ """
+ raw_response: dict = client.acknowledge_warning(change_path, allowed_input_type_param)
if raw_response:
human_readable = f'{INTEGRATION_NAME} - Acknowledge_warning'
@@ -3336,7 +3351,7 @@ def acknowledge_warning_command(client: Client, change_path: str) -> tuple[objec
# Created by C.L.
@logger
-def acknowledge_pre_verification_warning_command(client: Client, change_path: str) -> tuple[object, dict, list | dict]:
+def acknowledge_pre_verification_warning_command(client: Client, change_path: str) -> tuple[object, dict, Union[list, dict]]:
raw_response: dict = client.acknowledge_pre_verification_warning(change_path)
@@ -3351,7 +3366,7 @@ def acknowledge_pre_verification_warning_command(client: Client, change_path: st
# Created by C.L. Oct-06-22
-def get_production_deployment_command(client: Client, enrollment_id: str) -> tuple[object, dict, list | dict]:
+def get_production_deployment_command(client: Client, enrollment_id: str) -> tuple[object, dict, Union[list, dict]]:
raw_response: dict = client.get_production_deployment(enrollment_id)
@@ -3365,7 +3380,7 @@ def get_production_deployment_command(client: Client, enrollment_id: str) -> tup
# Created by C.L. Oct-06-22
-def get_change_history_command(client: Client, enrollment_id: str) -> tuple[object, dict, list | dict]:
+def get_change_history_command(client: Client, enrollment_id: str) -> tuple[object, dict, Union[list, dict]]:
raw_response: dict = client.get_change_history(enrollment_id)
@@ -3379,7 +3394,7 @@ def get_change_history_command(client: Client, enrollment_id: str) -> tuple[obje
# Created by C.L.
@logger
-def create_group_command(client: Client, group_path: str = '') -> tuple[object, dict, list | dict]:
+def create_group_command(client: Client, group_path: str = '') -> tuple[object, dict, Union[list, dict]]:
"""
Create a new group
Args:
@@ -3416,7 +3431,7 @@ def create_group_command(client: Client, group_path: str = '') -> tuple[object,
# Created by C.L.
-def get_domains_command(client: Client) -> tuple[object, dict, list | dict]:
+def get_domains_command(client: Client) -> tuple[object, dict, Union[list, dict]]:
"""
Get all of the existing domains
@@ -3432,7 +3447,7 @@ def get_domains_command(client: Client) -> tuple[object, dict, list | dict]:
return f'{INTEGRATION_NAME} - Could not find any results for given query', {}, {}
-def get_domain_command(client: Client, domain_name: str) -> tuple[object, dict, list | dict]:
+def get_domain_command(client: Client, domain_name: str) -> tuple[object, dict, Union[list, dict]]:
"""
Get information of a specific domain
Args:
@@ -3451,7 +3466,7 @@ def get_domain_command(client: Client, domain_name: str) -> tuple[object, dict,
@logger
-def create_domain_command(client: Client, group_id: int, domain_name: str) -> tuple[object, dict, list | dict]:
+def create_domain_command(client: Client, group_id: int, domain_name: str) -> tuple[object, dict, Union[list, dict]]:
"""
Creating domains
Args:
@@ -3474,7 +3489,7 @@ def create_domain_command(client: Client, group_id: int, domain_name: str) -> tu
# Created by C.L.
@logger
def create_datacenter_command(client: Client, domain_name: str, dc_name: str = "", dc_country: str = "US"
- ) -> tuple[object, dict, list | dict]:
+ ) -> tuple[object, dict, Union[list, dict]]:
"""
Updating or adding datacenter to existing GTM domain
Args:
@@ -3502,7 +3517,7 @@ def create_datacenter_command(client: Client, domain_name: str, dc_name: str = "
def update_property_command(client: Client, property_type: str, domain_name: str, property_name: str,
static_type: str = "", property_comments: str = "", static_server: str = "", server_1: str = "",
server_2: str = "", weight_1: int = 50, weight_2: int = 50, dc1_id: int = 3131, dc2_id: int = 3132
- ) -> tuple[object, dict, list | dict]:
+ ) -> tuple[object, dict, Union[list, dict]]:
"""
Updating or adding properties to existing GTM domain
@@ -3560,7 +3575,7 @@ def get_network_lists_command(
list_type: str = None,
extended: str = 'true',
include_elements: str = 'true',
-) -> tuple[object, dict, list | dict]:
+) -> tuple[object, dict, Union[list, dict]]:
"""Get network lists
Args:
@@ -3593,7 +3608,7 @@ def get_network_lists_command(
@logger
-def get_network_list_by_id_command(client: Client, network_list_id: str) -> tuple[object, dict, list | dict]:
+def get_network_list_by_id_command(client: Client, network_list_id: str) -> tuple[object, dict, Union[list, dict]]:
"""Get network list by ID
Args:
@@ -3621,9 +3636,9 @@ def get_network_list_by_id_command(client: Client, network_list_id: str) -> tupl
@logger
-def create_network_list_command(client: Client, list_name: str, list_type: str, description: str | None = None,
- entry_id: str | None = None, elements: str | list | None = None) \
- -> tuple[object, dict, list | dict]:
+def create_network_list_command(client: Client, list_name: str, list_type: str, description: str = None,
+ entry_id: str = None, elements: Union[str, list] = None) \
+ -> tuple[object, dict, Union[list, dict]]:
"""
Create network list
@@ -3663,7 +3678,7 @@ def create_network_list_command(client: Client, list_name: str, list_type: str,
@logger
-def delete_network_list_command(client: Client, network_list_id: str) -> tuple[object, dict, list | dict]:
+def delete_network_list_command(client: Client, network_list_id: str) -> tuple[object, dict, Union[list, dict]]:
"""Delete network list by ID
Args:
@@ -3682,8 +3697,8 @@ def delete_network_list_command(client: Client, network_list_id: str) -> tuple[o
@logger
-def update_network_list_elements_command(client: Client, network_list_id: str, elements: str | list | None = None) \
- -> tuple[object, dict, list | dict]:
+def update_network_list_elements_command(client: Client, network_list_id: str, elements: Union[str, list] = None) \
+ -> tuple[object, dict, Union[list, dict]]:
"""Update network list by ID
Args:
@@ -3707,8 +3722,8 @@ def update_network_list_elements_command(client: Client, network_list_id: str, e
@logger
-def activate_network_list_command(client: Client, network_list_ids: str, env: str, comment: str | None = None,
- notify: str | None = None) -> tuple[object, dict, list | dict]:
+def activate_network_list_command(client: Client, network_list_ids: str, env: str, comment: str = None,
+ notify: str = None) -> tuple[object, dict, Union[list, dict]]:
"""Activate network list by ID
Args:
@@ -3743,9 +3758,9 @@ def activate_network_list_command(client: Client, network_list_ids: str, env: st
@logger
-def add_elements_to_network_list_command(client: Client, network_list_id: str, entry_id: str | None = None,
- elements: str | list | None = None) \
- -> tuple[object, dict, list | dict]:
+def add_elements_to_network_list_command(client: Client, network_list_id: str, entry_id: str = None,
+ elements: Union[str, list] = None) \
+ -> tuple[object, dict, Union[list, dict]]:
"""Add elements to network list by ID
Args:
@@ -3775,7 +3790,7 @@ def add_elements_to_network_list_command(client: Client, network_list_id: str, e
@logger
def remove_element_from_network_list_command(client: Client, network_list_id: str, element: str) -> \
- tuple[object, dict, list | dict]:
+ tuple[object, dict, Union[list, dict]]:
"""Remove element from network list by ID
Args:
@@ -3796,8 +3811,8 @@ def remove_element_from_network_list_command(client: Client, network_list_id: st
@logger
-def get_activation_status_command(client: Client, network_list_ids: str | list, env: str) \
- -> tuple[str, dict[str, Any], list | dict]:
+def get_activation_status_command(client: Client, network_list_ids: Union[str, list], env: str) \
+ -> tuple[str, dict, Union[list, dict]]:
"""Get activation status
Args:
@@ -3859,7 +3874,7 @@ def clone_papi_property_command(client: Client,
group_id: str,
property_id: str,
version: str,
- check_existence_before_create="yes") -> tuple[str, dict[str, Any], list | dict]:
+ check_existence_before_create="yes") -> tuple[str, dict, Union[list, dict]]:
"""
Post clone property command
Args:
@@ -3919,7 +3934,7 @@ def add_papi_property_hostname_command(client: Client,
cname_from: str,
edge_hostname_id: str,
sleep_time: str = '30'
- ) -> tuple[str, dict[str, Any], list | dict]:
+ ) -> tuple[str, dict, Union[list, dict]]:
"""
add hostname papi property
@@ -3971,7 +3986,7 @@ def add_papi_property_hostname_command(client: Client,
def list_papi_edgehostname_bygroup_command(client: Client,
contract_id: str,
group_id: str,
- domain_prefix: str) -> tuple[str, dict[str, Any], list | dict]:
+ domain_prefix: str) -> tuple[str, dict, Union[list, dict]]:
"""
add papi edge hostname command
Args:
@@ -4020,7 +4035,7 @@ def new_papi_edgehostname_command(client: Client,
secure: str,
secure_network: str,
cert_enrollment_id: str,
- check_existence_before_create="yes") -> tuple[str, dict[str, Any], list | dict]:
+ check_existence_before_create="yes") -> tuple[str, dict, Union[list, dict]]:
"""
add papi edge hostname command
@@ -4091,7 +4106,7 @@ def new_papi_edgehostname_command(client: Client,
def get_cps_enrollmentid_by_cnname_command(client: Client,
contract_id: str,
cnname: str,
- ) -> tuple[str, dict[str, Any], list | dict]:
+ ) -> tuple[str, dict, Union[list, dict]]:
"""
get CPS EnrollmentID by Common Name
@@ -4105,7 +4120,6 @@ def get_cps_enrollmentid_by_cnname_command(client: Client,
"""
raw_response: dict = client.list_cps_enrollments(contract_id=contract_id)
-
enrollment: dict = get_cps_enrollment_by_cnname(raw_response=raw_response, cnname=cnname)
title = f'{INTEGRATION_NAME} - Get cps enrollmentid by cnname command'
entry_context, human_readable_ec = get_cps_enrollment_by_cnname_ec(enrollment)
@@ -4129,7 +4143,7 @@ def new_papi_cpcode_command(client: Client,
group_id: str,
cpcode_name: str,
check_existence_before_create="yes"
- ) -> tuple[str, dict[str, Any], list | dict]:
+ ) -> tuple[str, dict, Union[list, dict]]:
"""
get papi property All Versions by group_id and property_id command
Args:
@@ -4189,7 +4203,7 @@ def patch_papi_property_rule_cpcode_command(client: Client,
path: str,
cpcode_id: str,
name: str,
- ) -> tuple[str, dict[str, Any], list | dict]:
+ ) -> tuple[str, dict, Union[list, dict]]:
"""
get papi property All Versions by group_id and property_id command
Args:
@@ -4254,7 +4268,7 @@ def patch_papi_property_rule_origin_command(client: Client,
external_url: str,
gzip_compression: str,
sleep_time: str = '30',
- ) -> tuple[str, dict[str, Any], list | dict]:
+ ) -> tuple[str, dict, Union[list, dict]]:
"""
get papi property All Versions by group_id and property_id command
Args:
@@ -4395,7 +4409,7 @@ def activate_papi_property_command(client: Client,
notify_emails: str,
property_version: str,
note: str,
- ) -> tuple[str, dict[str, Any], list | dict]:
+ ) -> tuple[str, dict, Union[list, dict]]:
"""
activate an property command
Args:
@@ -4442,7 +4456,7 @@ def clone_security_policy_command(client: Client,
create_from_security_policy: str,
policy_name: str,
policy_prefix: str = '',
- check_existence_before_create="yes") -> tuple[str, dict[str, Any], list | dict]:
+ check_existence_before_create="yes") -> tuple[str, dict, Union[list, dict]]:
"""
Clone security policy property command
Args:
@@ -4542,7 +4556,7 @@ def new_match_target_command(client: Client,
file_paths: str,
hostnames: str,
policy_id: str
- ) -> tuple[str, dict[str, Any], list | dict]:
+ ) -> tuple[str, dict, Union[list, dict]]:
"""
New match target command
Args:
@@ -4599,7 +4613,7 @@ def activate_appsec_config_version_command(client: Client,
notification_emails: str,
action: str,
network: str,
- note: str) -> tuple[str, dict[str, Any], list | dict]:
+ note: str) -> tuple[str, dict, Union[list, dict]]:
"""
Activate appsec config version command
Args:
@@ -4644,7 +4658,7 @@ def activate_appsec_config_version_command(client: Client,
def get_appsec_config_activation_status_command(client: Client,
activation_id: str,
sleep_time: str,
- retries: str) -> tuple[str, dict[str, Any], list | dict]:
+ retries: str) -> tuple[str, dict, Union[list, dict]]:
"""
Get appsec config version activation status command
Args:
@@ -4686,7 +4700,7 @@ def get_appsec_config_latest_version_command(client: Client,
sec_config_name: str,
sleep_time: str,
retries: str,
- skip_consistency_check: str) -> tuple[str, dict[str, Any], list | dict]:
+ skip_consistency_check: str) -> tuple[str, dict, Union[list, dict]]:
"""
1) Get appsec config Id and latestVersion.
2) Check latestVersion and stagingVersion, productionVersion consistency
@@ -4738,7 +4752,7 @@ def get_security_policy_id_by_name_command(client: Client,
config_id: str,
config_version: str,
policy_name: str,
- is_baseline_policy: str) -> tuple[str, dict[str, Any], list | dict]:
+ is_baseline_policy: str) -> tuple[str, dict, Union[list, dict]]:
"""
get a security policy ID by Policy name
It is also used to get the policy ID of "Baseline Security Policy"
@@ -4787,7 +4801,7 @@ def clone_appsec_config_version_command(client: Client,
create_from_version: str,
do_not_clone: str,
rule_update: bool = True,
- ) -> tuple[str, dict[str, Any], list | dict]:
+ ) -> tuple[str, dict, Union[list, dict]]:
"""
Appsec Configurtion - create a new version by clone the latest version
Args:
@@ -4835,7 +4849,7 @@ def patch_papi_property_rule_httpmethods_command(client: Client,
operation: str,
path: str,
value: dict,
- ) -> tuple[str, dict[str, Any], list | dict]:
+ ) -> tuple[str, dict, Union[list, dict]]:
"""
Patch papi property All Versions by group_id and property_id command
Args:
@@ -4892,7 +4906,7 @@ def get_papi_property_activation_status_command(client: Client,
activation_id: int,
property_id: int,
sleep_time: str,
- retries: str) -> tuple[str, dict[str, Any], list | dict]:
+ retries: str) -> tuple[str, dict, Union[list, dict]]:
"""
Get papi property activation status command - retry if the status is not "activate"
Args:
@@ -4936,7 +4950,7 @@ def get_papi_edgehostname_creation_status_command(client: Client,
edgehostname_id: str,
options: str,
sleep_time: str,
- retries: str) -> tuple[str, dict[str, Any], list | dict]:
+ retries: str) -> tuple[str, dict, Union[list, dict]]:
"""
Get papi property activation status command - retry if the status is not "activate"
Args:
@@ -4984,7 +4998,7 @@ def modify_appsec_config_selected_hosts_command(client: Client,
config_version: int,
hostname_list: list,
mode: str
- ) -> tuple[str, dict, list | dict]:
+ ) -> tuple[str, dict, Union[list, dict]]:
"""
Update the list of selected hostnames for a configuration version.
@@ -5027,7 +5041,7 @@ def patch_papi_property_rule_siteshield_command(client: Client,
operation: str,
path: str,
ssmap: str
- ) -> tuple[str, dict[str, Any], list | dict]:
+ ) -> tuple[str, dict, Union[list, dict]]:
"""
Patch papi property default rule's site shield command
Args:
@@ -5081,7 +5095,7 @@ def patch_papi_property_rule_siteshield_command(client: Client,
def update_appsec_config_version_notes_command(client: Client,
config_id: int,
config_version: int,
- notes: str) -> tuple[str, dict[str, Any], list | dict]:
+ notes: str) -> tuple[str, dict, Union[list, dict]]:
"""
Update application secuirty configuration version notes command
Args:
@@ -5117,7 +5131,7 @@ def new_or_renew_match_target_command(client: Client,
file_paths: str,
hostnames: str,
policy_id: str
- ) -> tuple[str, dict[str, Any], list | dict]:
+ ) -> tuple[str, dict, Union[list, dict]]:
"""
New match target if no existing found otherwise update the existing match target hostnames
If there are multiple match targets found, the first one in the list will be updated
@@ -5207,7 +5221,7 @@ def patch_papi_property_rule_command(client: Client,
path: str,
value: str,
value_to_json: str
- ) -> tuple[str, dict[str, Any], list | dict]:
+ ) -> tuple[str, dict, Union[list, dict]]:
"""
Generic JSON patch command for Papi Property default rule
Args:
@@ -5264,7 +5278,7 @@ def get_papi_property_rule_command(client: Client,
property_version: int,
group_id: str,
validate_rules: str
- ) -> tuple[str, dict[str, Any], list | dict]:
+ ) -> tuple[str, dict, Union[list, dict]]:
"""
Get Papi Property default rule
Args:
@@ -5306,7 +5320,7 @@ def get_papi_property_rule_command(client: Client,
def get_papi_property_by_name_command(client: Client,
contract_id: str,
group_id: str,
- property_name: str,) -> tuple[str, dict[str, Any], list | dict]:
+ property_name: str,) -> tuple[str, dict, Union[list, dict]]:
"""
Get papi property within a group by property name
Args:
@@ -5352,7 +5366,7 @@ def get_papi_property_by_name_command(client: Client,
def get_papi_property_by_id_command(client: Client,
contract_id: str,
group_id: str,
- property_id: str,) -> tuple[str, dict[str, Any], list | dict]:
+ property_id: str,) -> tuple[str, dict, Union[list, dict]]:
"""
Get papi property within a group by property name
Args:
@@ -5387,7 +5401,7 @@ def list_papi_property_by_group_command(client: Client,
contract_id: str,
group_id: str,
context_path: str = 'PapiProperty.ByGroup',
- ) -> tuple[str, dict[str, Any], list | dict]:
+ ) -> tuple[str, dict, Union[list, dict]]:
"""
Lists properties available for the current contract and group.
Args:
@@ -5423,7 +5437,7 @@ def new_papi_property_version_command(client: Client,
contract_id: str,
property_id: str,
group_id: str,
- create_from_version: str) -> tuple[str, dict[str, Any], list | dict]:
+ create_from_version: str) -> tuple[str, dict, Union[list, dict]]:
"""
Create a new property version based on any previous version.
All data from the createFromVersion populates the new version, including its rules and hostnames.
@@ -5461,7 +5475,7 @@ def new_papi_property_version_command(client: Client,
def list_papi_property_activations_command(client: Client,
contract_id: str,
property_id: str,
- group_id: str,) -> tuple[str, dict[str, Any], list | dict]:
+ group_id: str,) -> tuple[str, dict, Union[list, dict]]:
"""
This lists all activations for all versions of a property, on both production and staging networks.
@@ -5494,7 +5508,7 @@ def list_papi_property_activations_command(client: Client,
@logger
def list_appsec_configuration_activation_history_command(client: Client,
- config_id: int,) -> tuple[str, dict[str, Any], list | dict]:
+ config_id: int,) -> tuple[str, dict, Union[list, dict]]:
"""
Lists the activation history for a configuration.
The history is an array in descending order of submitDate.
@@ -5529,7 +5543,7 @@ def list_papi_property_by_hostname_command(client: Client,
hostname: str,
network: str = None,
contract_id: str = None,
- group_id: str = None,) -> tuple[str, dict[str, Any], list | dict]:
+ group_id: str = None,) -> tuple[str, dict, Union[list, dict]]:
"""
This operation lists active property hostnames for all properties available in an account.
@@ -5565,7 +5579,7 @@ def list_papi_property_by_hostname_command(client: Client,
# Created by D.S. 2023-03-30
@logger
-def list_siteshield_maps_command(client: Client) -> tuple[str, dict[str, Any], list | dict]:
+def list_siteshield_maps_command(client: Client) -> tuple[str, dict, Union[list, dict]]:
"""
Returns a list of all Site Shield maps that belong to your account.
@@ -5583,7 +5597,6 @@ def list_siteshield_maps_command(client: Client) -> tuple[str, dict[str, Any], l
context_entry: dict = {
f"{INTEGRATION_CONTEXT_NAME}.SiteShieldMaps": entry_context
}
-
human_readable = tableToMarkdown(
name=title,
t=human_readable_ec,
@@ -5596,7 +5609,7 @@ def list_siteshield_maps_command(client: Client) -> tuple[str, dict[str, Any], l
@logger
def get_cps_enrollment_deployment_command(client: Client,
enrollment_id: int,
- environment: str = 'production',) -> tuple[str, dict[str, Any], list | dict]:
+ environment: str = 'production',) -> tuple[str, dict, Union[list, dict]]:
"""
Returns the certification/Enarollment deployment status for specific a environtment: production or staging.
@@ -5631,7 +5644,7 @@ def get_cps_enrollment_deployment_command(client: Client,
@logger
def list_cidr_blocks_command(client: Client,
last_action: str = '',
- effective_date_gt: str = '') -> tuple[str, dict[str, Any], list | dict]:
+ effective_date_gt: str = '') -> tuple[str, dict, Union[list, dict]]:
"""
List all CIDR blocks for all services you are subscribed to.
To see additional CIDR blocks, subscribe yourself to more services and run this operation again.
@@ -5682,7 +5695,7 @@ def update_cps_enrollment_command(client: Client,
deploy_not_before: str = "",
force_renewal: str = 'false',
renewal_date_check_override: str = 'true',
- allow_missing_certificate_addition: str = 'false') -> tuple[str, dict[str, Any], list | dict]:
+ allow_missing_certificate_addition: str = 'false') -> tuple[str, dict, Union[list, dict]]:
import json
"""
Updates an enrollment with changes. Response type will vary depending on the type and impact of change.
@@ -5793,7 +5806,7 @@ def update_cps_enrollment_schedule_command(client: Client,
enrollment_id: str = '',
change_id: str = '',
deploy_not_before: str = '',
- deploy_not_after: str = None) -> tuple[str, dict[str, Any], list | dict]:
+ deploy_not_after: str = None) -> tuple[str, dict, Union[list, dict]]:
"""
Updates the current deployment schedule.
Reference: https://techdocs.akamai.com/cps/reference/put-change-deployment-schedule
@@ -5860,7 +5873,7 @@ def update_cps_enrollment_schedule_command(client: Client,
def get_cps_change_status_command(client: Client,
enrollment_path: str = "",
enrollment_id: str = "",
- change_id: str = "",) -> tuple[str, dict[str, Any], list | dict]:
+ change_id: str = "",) -> tuple[str, dict, Union[list, dict]]:
"""
Gets the status of a pending change.
diff --git a/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.yml b/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.yml
index 95dffaf7edd7..092af00bb75d 100644
--- a/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.yml
+++ b/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.yml
@@ -28,16 +28,19 @@ configuration:
hiddenusername: true
type: 9
required: false
+ display: ''
- displaypassword: Access token
name: credentials_access_token
hiddenusername: true
type: 9
required: false
+ display: ''
- displaypassword: Client secret
name: credentials_client_secret
hiddenusername: true
type: 9
required: false
+ display: ''
- display: Trust any certificate (not secure)
name: insecure
type: 8
@@ -1110,6 +1113,15 @@ script:
name: change_path
required: true
type: textArea
+ - auto: PREDEFINED
+ defaultValue: post-verification-warnings-ack
+ description: Enum found as the last part of Change.allowedInput[].update hypermedia URL.
+ name: allowed_input_type_param
+ predefined:
+ - change-management-ack
+ - lets-encrypt-challenges-completed
+ - post-verification-warnings-ack
+ - pre-verification-warnings-ack
description: Acknowledge the warning message for uploading the certs and trust chains of enrollments.
- arguments:
- description: A unique identifier for each configuration.
diff --git a/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF_test.py b/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF_test.py
index a472e8b11168..72ae7e3c887e 100644
--- a/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF_test.py
+++ b/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF_test.py
@@ -196,3 +196,57 @@ def test_try_parsing_date():
with pytest.raises(ValueError) as e:
try_parsing_date(date4, arr_fmt)
assert value_error == str(e.value)
+
+
+def test_list_siteshield_maps_command(mocker, akamai_waf_client):
+ """
+ When:
+ - running the command list_siteshield_maps_command.
+ Then:
+ - The returned value is correct.
+ """
+ from Akamai_WAF import list_siteshield_maps_command
+
+ test_data = util_load_json('test_data/list_siteshild_maps_test.json')
+ expected_raw_response = test_data.get('raw_response')
+ expected_human_readable = test_data.get('human_readable')
+ expected_context_entry = test_data.get('context_entry')
+
+ mocker.patch.object(akamai_waf_client, 'list_siteshield_maps', return_value=expected_raw_response)
+
+ human_readable, context_entry, raw_response = list_siteshield_maps_command(client=akamai_waf_client)
+ assert expected_raw_response == raw_response
+ assert expected_human_readable == human_readable
+ assert expected_context_entry == context_entry
+
+
+def test_acknowledge_warning_command(mocker, akamai_waf_client):
+ """
+ Given:
+ - An enrollment_path.
+ When:
+ - running the command get_cps_change_status.
+ Then:
+ - The returned value is correct.
+ """
+ from Akamai_WAF import acknowledge_warning_command
+
+ change_path = "/cps/v2/enrollments/10002/changes/10002"
+ expected_raw_response = {
+ "change": "/cps/v2/enrollments/10002/changes/10002"
+ }
+ expected_human_readable = "Akamai WAF - Acknowledge_warning"
+ expected_context_entry = {
+ 'Akamai.Acknowledge':
+ {
+ 'change': '/cps/v2/enrollments/10002/changes/10002'
+ }
+ }
+
+ mocker.patch.object(akamai_waf_client, 'acknowledge_warning', return_value=expected_raw_response)
+
+ human_readable, context_entry, raw_response = acknowledge_warning_command(client=akamai_waf_client,
+ change_path=change_path)
+ assert expected_raw_response == raw_response
+ assert expected_human_readable == human_readable
+ assert expected_context_entry == context_entry
diff --git a/Packs/Akamai_WAF/Integrations/Akamai_WAF/README.md b/Packs/Akamai_WAF/Integrations/Akamai_WAF/README.md
index 9d00af6c4604..f37d8639a161 100644
--- a/Packs/Akamai_WAF/Integrations/Akamai_WAF/README.md
+++ b/Packs/Akamai_WAF/Integrations/Akamai_WAF/README.md
@@ -22,6 +22,7 @@ This is the modified version where a new command "akamai-update-network-list-ele
4. Click **Test** to validate the URLs, token, and connection.
+
## Commands
You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
@@ -1075,10 +1076,12 @@ Acknowledge the warning message for uploading the certs and trust chains of enro
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
| change_path | The path of the changed certificate. | Required |
+| allowed_input_type_param | Enum found as the last part of Change.allowedInput[].update hypermedia URL. Possible values are: change-management-ack, lets-encrypt-challenges-completed, post-verification-warnings-ack, pre-verification-warnings-ack. Default is post-verification-warnings-ack. | Optional |
#### Context Output
There is no context output for this command.
+
### akamai-modify-appsec-config-selected-hosts
***
diff --git a/Packs/Akamai_WAF/Integrations/Akamai_WAF/test_data/list_siteshild_maps_test.json b/Packs/Akamai_WAF/Integrations/Akamai_WAF/test_data/list_siteshild_maps_test.json
new file mode 100644
index 000000000000..433efb984667
--- /dev/null
+++ b/Packs/Akamai_WAF/Integrations/Akamai_WAF/test_data/list_siteshild_maps_test.json
@@ -0,0 +1,143 @@
+{
+ "raw_response": {
+ "siteShieldMaps": [
+ {
+ "acknowledgeRequiredBy": 1679661685000,
+ "acknowledged": true,
+ "acknowledgedBy": "jsmith",
+ "acknowledgedOn": 1649661984000,
+ "contacts": [
+ "jsmith@example.com"
+ ],
+ "currentCidrs": [
+ "1.1.1.1/1"
+ ],
+ "id": 123456789,
+ "latestTicketId": 2456,
+ "mapAlias": "ABC Company",
+ "mcmMapRuleId": 12345,
+ "proposedCidrs": [
+ "2.2.2.2/2"
+ ],
+ "ruleName": "ss.akamai.net",
+ "service": "S",
+ "shared": false,
+ "sureRouteName": "route.name.example.com",
+ "type": "Production"
+ },
+ {
+ "acknowledgeRequiredBy": 1569661685000,
+ "acknowledged": true,
+ "acknowledgedBy": "bmack",
+ "acknowledgedOn": 1569661984000,
+ "contacts": [
+ "new@email.com"
+ ],
+ "currentCidrs": [
+ "1.1.1.1/1"
+ ],
+ "id": 7964,
+ "latestTicketId": 5884,
+ "mapAlias": "Capybara Corp",
+ "mcmMapRuleId": 957,
+ "proposedCidrs": [],
+ "ruleName": "e;s9.akamaiedge.net",
+ "service": "S",
+ "shared": false,
+ "type": "Production"
+ },
+ {
+ "acknowledgeRequiredBy": 1684661685000,
+ "acknowledged": true,
+ "acknowledgedBy": "sclaus",
+ "acknowledgedOn": 1647261984000,
+ "contacts": [
+ "test@test.com"
+ ],
+ "currentCidrs": [],
+ "id": 65,
+ "latestTicketId": 883,
+ "mapAlias": "Kookaburra Inc secure",
+ "mcmMapRuleId": 4255,
+ "proposedCidrs": [
+ "1.1.1.1/1"
+ ],
+ "ruleName": "e;s1.akamaiedge.net",
+ "service": "S",
+ "shared": false,
+ "sureRouteName": "route.name.example.com",
+ "type": "Production"
+ }
+ ]
+ },
+ "human_readable": "### Akamai WAF - list siteshield map command\n|acknowledgeRequiredBy|acknowledged|acknowledgedBy|acknowledgedOn|contacts|currentCidrs|id|latestTicketId|mapAlias|mcmMapRuleId|proposedCidrs|ruleName|service|shared|sureRouteName|type|\n|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|\n| 1679661685000 | true | jsmith | 1649661984000 | jsmith@example.com | 1.1.1.1/1 | 123456789 | 2456 | ABC Company | 12345 | 2.2.2.2/2 | ss.\u200bakamai.\u200bnet | S | false | route.name.example.com | Production |\n| 1569661685000 | true | bmack | 1569661984000 | new@email.com | 1.1.1.1/1 | 7964 | 5884 | Capybara Corp | 957 | | e;s9.akamaiedge.net | S | false | | Production |\n| 1684661685000 | true | sclaus | 1647261984000 | test@test.com | | 65 | 883 | Kookaburra Inc secure | 4255 | 1.1.1.1/1 | e;s1.akamaiedge.net | S | false | route.name.example.com | Production |\n",
+ "context_entry": {
+ "Akamai.SiteShieldMaps":
+ [{
+ "acknowledgeRequiredBy": 1679661685000,
+ "acknowledged": true,
+ "acknowledgedBy": "jsmith",
+ "acknowledgedOn": 1649661984000,
+ "contacts": [
+ "jsmith@example.com"
+ ],
+ "currentCidrs": [
+ "1.1.1.1/1"
+ ],
+ "id": 123456789,
+ "latestTicketId": 2456,
+ "mapAlias": "ABC Company",
+ "mcmMapRuleId": 12345,
+ "proposedCidrs": [
+ "2.2.2.2/2"
+ ],
+ "ruleName": "ss.\u200bakamai.\u200bnet",
+ "service": "S",
+ "shared": false,
+ "sureRouteName": "route.name.example.com",
+ "type": "Production"
+ },
+ {
+ "acknowledgeRequiredBy": 1569661685000,
+ "acknowledged": true,
+ "acknowledgedBy": "bmack",
+ "acknowledgedOn": 1569661984000,
+ "contacts": [
+ "new@email.com"
+ ],
+ "currentCidrs": [
+ "1.1.1.1/1"
+ ],
+ "id": 7964,
+ "latestTicketId": 5884,
+ "mapAlias": "Capybara Corp",
+ "mcmMapRuleId": 957,
+ "proposedCidrs": [],
+ "ruleName": "e;s9.akamaiedge.net",
+ "service": "S",
+ "shared": false,
+ "type": "Production"
+ },
+ {
+ "acknowledgeRequiredBy": 1684661685000,
+ "acknowledged": true,
+ "acknowledgedBy": "sclaus",
+ "acknowledgedOn": 1647261984000,
+ "contacts": [
+ "test@test.com"
+ ],
+ "currentCidrs": [],
+ "id": 65,
+ "latestTicketId": 883,
+ "mapAlias": "Kookaburra Inc secure",
+ "mcmMapRuleId": 4255,
+ "proposedCidrs": [
+ "1.1.1.1/1"
+ ],
+ "ruleName": "e;s1.akamaiedge.net",
+ "service": "S",
+ "shared": false,
+ "sureRouteName": "route.name.example.com",
+ "type": "Production"
+ }]}
+}
\ No newline at end of file
diff --git a/Packs/Akamai_WAF/ReleaseNotes/2_0_11.md b/Packs/Akamai_WAF/ReleaseNotes/2_0_11.md
new file mode 100644
index 000000000000..58544f0434e5
--- /dev/null
+++ b/Packs/Akamai_WAF/ReleaseNotes/2_0_11.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Akamai WAF
+
+Fixed an issue with the ***akamai-list-siteshield-map***command, which only returned the first map.
+Added support for the *allowed_input_type_param* argument in the ***akamai-acknowledge-warning-command*** command.
diff --git a/Packs/Akamai_WAF/pack_metadata.json b/Packs/Akamai_WAF/pack_metadata.json
index 82b6ba313cea..de330bb2ea05 100644
--- a/Packs/Akamai_WAF/pack_metadata.json
+++ b/Packs/Akamai_WAF/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Akamai WAF",
"description": "Use the Akamai WAF integration to manage common sets of lists used by various Akamai security products and features.",
"support": "xsoar",
- "currentVersion": "2.0.10",
+ "currentVersion": "2.0.11",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AlienVault_OTX/TestPlaybooks/playbook-Alienvault_OTX_v2-test.yml b/Packs/AlienVault_OTX/TestPlaybooks/playbook-Alienvault_OTX_v2-test.yml
index 986af5d727b5..938854340f43 100644
--- a/Packs/AlienVault_OTX/TestPlaybooks/playbook-Alienvault_OTX_v2-test.yml
+++ b/Packs/AlienVault_OTX/TestPlaybooks/playbook-Alienvault_OTX_v2-test.yml
@@ -79,7 +79,7 @@ tasks:
- "3"
scriptarguments:
ip:
- simple: 8.8.8.8
+ simple: 1.2.3.4
threshold: {}
continueonerror: true
separatecontext: false
@@ -216,7 +216,7 @@ tasks:
- "7"
scriptarguments:
domain:
- simple: example.com
+ simple: paloaltonetworks.com
threshold: {}
continueonerror: true
separatecontext: false
@@ -894,7 +894,7 @@ tasks:
- "27"
scriptarguments:
indicator:
- simple: 8.8.8.8
+ simple: 1.2.3.4
indicator_type:
simple: IPv4
continueonerror: true
@@ -1022,7 +1022,7 @@ tasks:
- "31"
scriptarguments:
indicator:
- simple: 8.8.8.8
+ simple: 1.2.3.4
indicator-type:
simple: IPv4
indicator_type:
@@ -1543,7 +1543,7 @@ tasks:
- "47"
scriptarguments:
indicator:
- simple: 8.8.8.8
+ simple: 1.2.3.4
indicator-type:
simple: IPv4
indicator_type:
diff --git a/Packs/Anomali_ThreatStream/ReleaseNotes/2_2_19.json b/Packs/Anomali_ThreatStream/ReleaseNotes/2_2_19.json
new file mode 100644
index 000000000000..69a2b3294c46
--- /dev/null
+++ b/Packs/Anomali_ThreatStream/ReleaseNotes/2_2_19.json
@@ -0,0 +1,4 @@
+{
+ "breakingChanges": true,
+ "breakingChangesNotes": "Due to a fixed issue in `Base` pack (Version `1.33.52`) Anomali ThreatStream v3 will now correctly input email addresses into context under `Account.Email` and not under `Email` as it did up until now."
+}
\ No newline at end of file
diff --git a/Packs/Anomali_ThreatStream/ReleaseNotes/2_2_19.md b/Packs/Anomali_ThreatStream/ReleaseNotes/2_2_19.md
new file mode 100644
index 000000000000..673e4daee7a8
--- /dev/null
+++ b/Packs/Anomali_ThreatStream/ReleaseNotes/2_2_19.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Anomali ThreatStream v3
+
+Fixed an issue in `Base` pack (Version `1.33.52`) so now Anomali ThreatStream v3 will correctly input email addresses into context under `Account.Email` and not under `Email`.
diff --git a/Packs/Anomali_ThreatStream/ReleaseNotes/2_2_20.md b/Packs/Anomali_ThreatStream/ReleaseNotes/2_2_20.md
new file mode 100644
index 000000000000..9ac996d4e564
--- /dev/null
+++ b/Packs/Anomali_ThreatStream/ReleaseNotes/2_2_20.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Anomali ThreatStream v3
+
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.93223*.
diff --git a/Packs/Anomali_ThreatStream/pack_metadata.json b/Packs/Anomali_ThreatStream/pack_metadata.json
index 62f095e9fc95..d42aa334e2b6 100644
--- a/Packs/Anomali_ThreatStream/pack_metadata.json
+++ b/Packs/Anomali_ThreatStream/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Anomali ThreatStream",
"description": "Use Anomali ThreatStream to query and submit threats.",
"support": "xsoar",
- "currentVersion": "2.2.18",
+ "currentVersion": "2.2.20",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AnsibleLinux/.pack-ignore b/Packs/AnsibleLinux/.pack-ignore
index 1be001011081..3ddf53cc756d 100644
--- a/Packs/AnsibleLinux/.pack-ignore
+++ b/Packs/AnsibleLinux/.pack-ignore
@@ -1,5 +1,5 @@
[file:README.md]
-ignore=RM100,RM113
+ignore=RM113
[file:AnsibleLinux.yml]
ignore=BA124,IN145
diff --git a/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule.py b/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule.py
index 219d0d58c0d6..bd002c6786e5 100644
--- a/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule.py
+++ b/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule.py
@@ -4,7 +4,7 @@
import copy
import re
from operator import itemgetter
-
+import json
from typing import Tuple, Callable
# Disable insecure warnings
@@ -142,6 +142,10 @@
"resourceType",
"tenantId",
}
+RBAC_VALIDATIONS_VERSION = '8.6.0'
+RBAC_VALIDATIONS_BUILD_NUMBER = '992980'
+FORWARD_USER_RUN_RBAC = is_xsiam() and is_demisto_version_ge(version=RBAC_VALIDATIONS_VERSION,
+ build_number=RBAC_VALIDATIONS_BUILD_NUMBER)
class CoreClient(BaseClient):
@@ -150,6 +154,79 @@ def __init__(self, base_url: str, headers: dict, timeout: int = 120, proxy: bool
super().__init__(base_url=base_url, headers=headers, proxy=proxy, verify=verify)
self.timeout = timeout
+ def _http_request(self, method, url_suffix='', full_url=None, headers=None, json_data=None,
+ params=None, data=None, timeout=None, raise_on_status=False, ok_codes=None,
+ error_handler=None, with_metrics=False, resp_type='json'):
+ '''
+ """A wrapper for requests lib to send our requests and handle requests and responses better.
+
+ :type method: ``str``
+ :param method: The HTTP method, for example: GET, POST, and so on.
+
+
+ :type url_suffix: ``str``
+ :param url_suffix: The API endpoint.
+
+
+ :type full_url: ``str``
+ :param full_url:
+ Bypasses the use of self._base_url + url_suffix. This is useful if you need to
+ make a request to an address outside of the scope of the integration
+ API.
+
+
+ :type headers: ``dict``
+ :param headers: Headers to send in the request. If None, will use self._headers.
+
+
+ :type params: ``dict``
+ :param params: URL parameters to specify the query.
+
+
+ :type data: ``dict``
+ :param data: The data to send in a 'POST' request.
+
+
+ :type raise_on_status ``bool``
+ :param raise_on_status: Similar meaning to ``raise_on_redirect``:
+ whether we should raise an exception, or return a response,
+ if status falls in ``status_forcelist`` range and retries have
+ been exhausted.
+
+
+ :type timeout: ``float`` or ``tuple``
+ :param timeout:
+ The amount of time (in seconds) that a request will wait for a client to
+ establish a connection to a remote machine before a timeout occurs.
+ can be only float (Connection Timeout) or a tuple (Connection Timeout, Read Timeout).
+ '''
+ if not FORWARD_USER_RUN_RBAC:
+ return BaseClient._http_request(self, # we use the standard base_client http_request without overriding it
+ method=method,
+ url_suffix=url_suffix,
+ full_url=full_url,
+ headers=headers,
+ json_data=json_data, params=params, data=data,
+ timeout=timeout,
+ raise_on_status=raise_on_status,
+ ok_codes=ok_codes,
+ error_handler=error_handler,
+ with_metrics=with_metrics,
+ resp_type=resp_type)
+ headers = headers if headers else self._headers
+ data = json.dumps(json_data) if json_data else data
+ address = full_url if full_url else urljoin(self._base_url, url_suffix)
+ response = demisto._apiCall(
+ method=method,
+ path=address,
+ data=data,
+ headers=headers,
+ timeout=timeout
+ )
+ if ok_codes and response.get('status') not in ok_codes:
+ self._handle_error(error_handler, response, with_metrics)
+ return json.loads(response['data'])
+
def get_incidents(self, incident_id_list=None, lte_modification_time=None, gte_modification_time=None,
lte_creation_time=None, gte_creation_time=None, status=None, starred=None,
starred_incidents_fetch_window=None, sort_by_modification_time=None, sort_by_creation_time=None,
@@ -349,15 +426,13 @@ def get_endpoints(self,
request_data['filters'] = filters
- reply = self._http_request(
+ response = self._http_request(
method='POST',
url_suffix='/endpoints/get_endpoint/',
json_data={'request_data': request_data},
timeout=self.timeout
)
- demisto.debug(f"get_endpoints response = {reply}")
-
- endpoints = reply.get('reply').get('endpoints', [])
+ endpoints = response.get('reply', {}).get('endpoints', [])
return endpoints
def set_endpoints_alias(self, filters: list[dict[str, str]], new_alias_name: str | None) -> dict: # pragma: no cover
@@ -470,7 +545,6 @@ def get_distribution_versions(self):
json_data={},
timeout=self.timeout
)
-
return reply.get('reply')
def create_distribution(self, name, platform, package_type, agent_version, description):
diff --git a/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule_test.py b/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule_test.py
index 6da6314511e2..92a1db7fff8c 100644
--- a/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule_test.py
+++ b/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule_test.py
@@ -9,7 +9,7 @@
import demistomock
import demistomock as demisto
-from CommonServerPython import Common, tableToMarkdown, pascalToSpace, DemistoException
+from CommonServerPython import Common, tableToMarkdown, pascalToSpace, DemistoException, BaseClient
from CoreIRApiModule import CoreClient, handle_outgoing_issue_closure, XSOAR_RESOLVED_STATUS_TO_XDR
from CoreIRApiModule import add_tag_to_endpoints_command, remove_tag_from_endpoints_command, quarantine_files_command, \
isolate_endpoint_command, list_user_groups_command, parse_user_groups, list_users_command, list_roles_command, \
@@ -4053,3 +4053,45 @@ def test_xsoar_to_xdr_flexible_close_reason_mapping(capfd, mocker, custom_mappin
assert remote_args.delta.get('status')
assert remote_args.delta['status'] == expected_resolved_status[i]
+
+
+def test_http_request_demisto_call(mocker):
+ """
+ Given:
+ - An XSIAM machine with a build version that supports demisto._apiCall() with RBAC validations.
+ When:
+ - Calling the http_request method.
+ Then:
+ - Make sure demisto._apiCall() is being called and the method returns the expected result.
+ """
+ from CoreIRApiModule import CoreClient
+ client = CoreClient(
+ base_url=f'{Core_URL}/public_api/v1', headers={}
+ )
+ mocker.patch("CoreIRApiModule.FORWARD_USER_RUN_RBAC", new=True)
+ mocker.patch.object(demisto, "_apiCall", return_value={'name': '/api/webapp/public_api/v1/distributions/get_versions/',
+ 'status': 200,
+ 'data': '{"reply":[{"container": ["1.1.1.1"]}]}'})
+ res = client._http_request(method="POST",
+ url_suffix="/distributions/get_versions/")
+ assert res == {"reply": [{"container": ["1.1.1.1"]}]}
+
+
+def test_http_request_base_client(mocker):
+ """
+ Given:
+ - An XSIAM machine with a build version that supports demisto._apiCall() with RBAC validations.
+ When
+ - Calling the http_request method.
+ Then
+ - Make sure demisto._apiCall() is being called and the method returns the expected result.
+ """
+ from CoreIRApiModule import CoreClient
+ client = CoreClient(
+ base_url=f'{Core_URL}/public_api/v1', headers={}
+ )
+ mocker.patch("CoreIRApiModule.FORWARD_USER_RUN_RBAC", new=False)
+ mocker.patch.object(BaseClient, "_http_request", return_value={'data': {"reply": [{"container": ["1.1.1.1"]}]}})
+ res = client._http_request(method="POST",
+ url_suffix="/distributions/get_versions/")
+ assert res['data'] == {"reply": [{"container": ["1.1.1.1"]}]}
diff --git a/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.py b/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.py
index a2245ebf7675..811b033ff685 100644
--- a/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.py
+++ b/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.py
@@ -20,6 +20,7 @@ class Scopes:
class Resources:
graph = 'https://graph.microsoft.com/'
security_center = 'https://api.securitycenter.microsoft.com/'
+ security = 'https://api.security.microsoft.com/'
management_azure = 'https://management.azure.com/' # resource_manager
manage_office = 'https://manage.office.com/'
@@ -40,7 +41,7 @@ class Resources:
# Deprecated, prefer using AZURE_CLOUDS
TOKEN_RETRIEVAL_ENDPOINTS = {
'com': 'https://login.microsoftonline.com',
- 'gcc': 'https://login.microsoftonline.us',
+ 'gcc': 'https://login.microsoftonline.com',
'gcc-high': 'https://login.microsoftonline.us',
'dod': 'https://login.microsoftonline.us',
'de': 'https://login.microsoftonline.de',
@@ -120,7 +121,7 @@ class Resources:
'geo-us': 'https://securitycenter.onmicrosoft.com',
'geo-eu': 'https://securitycenter.onmicrosoft.com',
'geo-uk': 'https://securitycenter.onmicrosoft.com',
- 'gcc': 'https://securitycenter.onmicrosoft.us',
+ 'gcc': 'https://securitycenter.onmicrosoft.com',
'gcc-high': 'https://securitycenter.onmicrosoft.us',
'dod': 'https://securitycenter.onmicrosoft.us',
}
@@ -140,7 +141,7 @@ class Resources:
MICROSOFT_DEFENDER_FOR_APPLICATION_TOKEN_RETRIEVAL_ENDPOINTS = {
'com': 'https://login.microsoftonline.com',
- 'gcc': 'https://login.microsoftonline.us',
+ 'gcc': 'https://login.microsoftonline.com',
'gcc-high': 'https://login.microsoftonline.us',
}
@@ -329,7 +330,7 @@ def __init__(self,
sql_management='https://management.core.usgovcloudapi.net:8443/',
batch_resource_id='https://batch.core.usgovcloudapi.net/',
gallery='https://gallery.usgovcloudapi.net/',
- active_directory='https://login.microsoftonline.us',
+ active_directory='https://login.microsoftonline.com',
active_directory_resource_id='https://management.core.usgovcloudapi.net/',
active_directory_graph_resource_id='https://graph.windows.net/',
microsoft_graph_resource_id='https://graph.microsoft.us/',
@@ -1496,7 +1497,7 @@ def generate_login_url(client: MicrosoftClient,
login_url = urljoin(login_url, f'{client.tenant_id}/oauth2/v2.0/authorize?'
f'response_type=code&scope=offline_access%20{client.scope.replace(" ", "%20")}'
- f'&client_id={client.client_id}&redirect_uri={client.redirect_uri}&prompt=consent')
+ f'&client_id={client.client_id}&redirect_uri={client.redirect_uri}')
result_msg = f"""### Authorization instructions
1. Click on the [login URL]({login_url}) to sign in and grant Cortex XSOAR permissions for your Azure Service Management.
diff --git a/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.yml b/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.yml
index b487e679c8e7..e71fdbce5379 100644
--- a/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.yml
+++ b/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.yml
@@ -13,7 +13,7 @@ system: true
scripttarget: 0
dependson: {}
timeout: 0s
-dockerimage: demisto/crypto:1.0.0.94037
+dockerimage: demisto/crypto:1.0.0.96042
fromversion: 5.0.0
tests:
- No test
diff --git a/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule_test.py b/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule_test.py
index 40c135e09e0b..88420692c887 100644
--- a/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule_test.py
+++ b/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule_test.py
@@ -724,7 +724,7 @@ def test_generate_login_url():
expected_url = f'[login URL](https://login.microsoftonline.com/{TENANT}/oauth2/v2.0/authorize?' \
f'response_type=code&scope=offline_access%20https://graph.microsoft.com/.default' \
- f'&client_id={CLIENT_ID}&redirect_uri=https://localhost/myapp&prompt=consent)'
+ f'&client_id={CLIENT_ID}&redirect_uri=https://localhost/myapp)'
assert expected_url in result.readable_output, "Login URL is incorrect"
diff --git a/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule.py b/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule.py
index f0f4a236fb50..02d1d18bfbb4 100644
--- a/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule.py
+++ b/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule.py
@@ -381,6 +381,11 @@ def create_stix_object(self, xsoar_indicator: dict, xsoar_type: str, extensions_
demisto.debug(f'No such indicator type: {xsoar_type} in stix format.')
return {}, {}, {}
+ indicator_value = xsoar_indicator.get("value")
+ if (stix_type == "file") and (get_hash_type(indicator_value) == "Unknown"):
+ demisto.debug(f"Skip indicator of type 'file' with value: '{indicator_value}', as it is not a valid hash.")
+ return {}, {}, {}
+
created_parsed = parse(xsoar_indicator.get('timestamp')).strftime(STIX_DATE_FORMAT) # type: ignore[arg-type]
try:
@@ -399,7 +404,7 @@ def create_stix_object(self, xsoar_indicator: dict, xsoar_type: str, extensions_
stix_object['object_refs'] = [ref['objectstixid']
for ref in xsoar_indicator['CustomFields'].get('reportobjectreferences', [])]
if is_sdo:
- stix_object['name'] = xsoar_indicator.get('value')
+ stix_object['name'] = indicator_value
stix_object = self.add_sdo_required_field_2_1(stix_object, xsoar_indicator)
stix_object = self.add_sdo_required_field_2_0(stix_object, xsoar_indicator)
else:
diff --git a/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule_test.py b/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule_test.py
index 9fae74d3d4b3..efb58fedecec 100644
--- a/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule_test.py
+++ b/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule_test.py
@@ -1922,6 +1922,25 @@ def test_create_stix_object(xsoar_indicator, xsoar_type, expected_stix_object, e
assert extensions_dict == {}
+def test_create_stix_object_unknown_file_hash():
+ """
+ Given:
+ - A XSOAR indicator of type 'File' and the value is an invalid hash.
+ When:
+ - Creating a stix object.
+ Then:
+ - Ensure the stix object is empty.
+ """
+ cilent = XSOAR2STIXParser(server_version='2.1', fields_to_present={'name', 'type'}, types_for_indicator_sdo=[],
+ namespace_uuid=PAWN_UUID)
+ xsoar_indicator = {"value": "invalidhash"}
+ xsoar_type = FeedIndicatorType.File
+ stix_object, extension_definition, extensions_dict = cilent.create_stix_object(xsoar_indicator, xsoar_type)
+ assert stix_object == {}
+ assert extension_definition == {}
+ assert extensions_dict == {}
+
+
def test_init_client_with_wrong_version():
"""
Given:
diff --git a/Packs/Arcanna/Integrations/ArcannaAI/ArcannaAI.py b/Packs/Arcanna/Integrations/ArcannaAI/ArcannaAI.py
index 662fee9db5bb..6c87f4592a2d 100644
--- a/Packs/Arcanna/Integrations/ArcannaAI/ArcannaAI.py
+++ b/Packs/Arcanna/Integrations/ArcannaAI/ArcannaAI.py
@@ -5,7 +5,7 @@
import urllib3
import traceback
import requests
-from typing import Any, Dict
+from typing import Any
# Disable insecure warnings
urllib3.disable_warnings()
@@ -151,7 +151,7 @@ def get_jobs(client: Client) -> CommandResults:
)
-def post_event(client: Client, args: Dict[str, Any]) -> CommandResults:
+def post_event(client: Client, args: dict[str, Any]) -> CommandResults:
title = args.get("title")
job_id = args.get("job_id", None)
@@ -171,7 +171,7 @@ def post_event(client: Client, args: Dict[str, Any]) -> CommandResults:
)
-def get_event_status(client: Client, args: Dict[str, Any]) -> CommandResults:
+def get_event_status(client: Client, args: dict[str, Any]) -> CommandResults:
job_id = args.get("job_id", None)
if not job_id:
job_id = client.get_default_job_id()
@@ -198,7 +198,7 @@ def get_default_job_id(client: Client) -> CommandResults:
)
-def get_feedback_field(params: Dict[str, Any]) -> CommandResults:
+def get_feedback_field(params: dict[str, Any]) -> CommandResults:
response = params.get("closing_reason_field")
readable_output = f' ## Get feedback returned results: {response}'
@@ -209,7 +209,7 @@ def get_feedback_field(params: Dict[str, Any]) -> CommandResults:
)
-def set_default_job_id(client: Client, args: Dict[str, Any]) -> CommandResults:
+def set_default_job_id(client: Client, args: dict[str, Any]) -> CommandResults:
job_id = args.get("job_id")
client.set_default_job_id(job_id)
return get_default_job_id(client)
@@ -218,7 +218,7 @@ def set_default_job_id(client: Client, args: Dict[str, Any]) -> CommandResults:
''' MAIN FUNCTION '''
-def send_event_feedback(client: Client, feature_mapping_field: str, args: Dict[str, Any]) -> CommandResults:
+def send_event_feedback(client: Client, feature_mapping_field: str, args: dict[str, Any]) -> CommandResults:
job_id = args.get("job_id", None)
if not job_id:
job_id = client.get_default_job_id()
@@ -244,7 +244,7 @@ def send_event_feedback(client: Client, feature_mapping_field: str, args: Dict[s
)
-def send_bulk_events(client: Client, feature_mapping_field: str, args: Dict[str, Any]) -> CommandResults:
+def send_bulk_events(client: Client, feature_mapping_field: str, args: dict[str, Any]) -> CommandResults:
job_id = args.get("job_id")
events = argToList(args.get("events"))
mappings = parse_mappings(feature_mapping_field)
diff --git a/Packs/Arcanna/Integrations/ArcannaAI/ArcannaAI.yml b/Packs/Arcanna/Integrations/ArcannaAI/ArcannaAI.yml
index 1b650e54d63d..4c073e4b0a45 100644
--- a/Packs/Arcanna/Integrations/ArcannaAI/ArcannaAI.yml
+++ b/Packs/Arcanna/Integrations/ArcannaAI/ArcannaAI.yml
@@ -39,57 +39,57 @@ configuration:
name: closing_reason_field
type: 0
required: false
-description: Arcanna integration for using the power of AI in SOC
+description: Arcanna integration for using the power of AI in SOC.
display: Arcanna.AI
name: Arcanna.AI
script:
commands:
- - description: Get jobs list
+ - description: Get jobs list.
name: arcanna-get-jobs
outputs:
- contextPath: Arcanna.Jobs.job_id
- description: Arcanna Job id
+ description: Arcanna Job id.
type: Number
- contextPath: Arcanna.Jobs.data_type
- description: Arcanna Job type
+ description: Arcanna Job type.
type: String
- contextPath: Arcanna.Jobs.title
- description: Arcanna Job title
+ description: Arcanna Job title.
type: String
- contextPath: Arcanna.Jobs.status
- description: Arcanna job status
+ description: Arcanna job status.
type: String
- arguments:
- - description: An Arcanna running job_id
+ - description: An Arcanna running job_id.
name: job_id
- description: json event for arcanna to inference.
name: event_json
required: true
- - description: event title
+ - description: event title.
name: title
required: true
- - description: event severity
+ - description: event severity.
name: severity
- description: Sends a raw event to Arcanna
+ description: Sends a raw event to Arcanna.
name: arcanna-send-event
outputs:
- contextPath: Arcanna.Event.event_id
- description: Arcanna event id
+ description: Arcanna event id.
type: Number
- contextPath: Arcanna.Event.status
- description: Arcanna ingestion status
+ description: Arcanna ingestion status.
type: String
- contextPath: Arcanna.Event.ingest_timestamp
- description: Arcanna ingestion timestamp
+ description: Arcanna ingestion timestamp.
type: date
- contextPath: Arcanna.Event.error_message
- description: Arcanna error message if any
+ description: Arcanna error message if any.
type: String
- contextPath: Arcanna.Event.job_id
description: An Arcanna Job id used for sending.
type: Number
- arguments:
- - description: Arcanna Job Id
+ - description: Arcanna Job Id.
name: job_id
- description: Arcanna generated unique event id.
name: event_id
@@ -98,7 +98,7 @@ script:
name: arcanna-get-event-status
outputs:
- contextPath: Arcanna.Event.event_id
- description: Arcanna event id
+ description: Arcanna event id.
type: String
- contextPath: Arcanna.Event.ingest_timestamp
description: Arcanna ingestion timestamp.
@@ -107,7 +107,7 @@ script:
description: Arcanna ML confidence_level.
type: Number
- contextPath: Arcanna.Event.result
- description: Arcanna event result
+ description: Arcanna event result.
type: String
- contextPath: Arcanna.Event.is_duplicated
description: Arcanna signalling if event is duplicated by another alert.
@@ -122,10 +122,10 @@ script:
name: arcanna-get-default-job-id
outputs:
- contextPath: Arcanna.Default_Job_Id
- description: Arcanna Default Job id
+ description: Arcanna Default Job id.
type: String
- arguments:
- - description: job_id
+ - description: job_id.
name: job_id
required: true
description: Sets Arcanna Default Job id.
@@ -168,7 +168,7 @@ script:
name: arcanna-send-bulk-events
outputs:
- contextPath: Arcanna.Bulk.status
- description: Arcanna response status for bulk events indexing
+ description: Arcanna response status for bulk events indexing.
type: String
- description: Returns the Feedback field set on integration.
name: arcanna-get-feedback-field
@@ -176,7 +176,7 @@ script:
- contextPath: Arcanna.FeedbackField
description: XSOAR field used to signal feedback/label for Arcanna.
type: String
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.95956
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/Arcanna/Integrations/ArcannaAI/ArcannaAI_test.py b/Packs/Arcanna/Integrations/ArcannaAI/ArcannaAI_test.py
index 760ceab9906b..991a142e78d7 100644
--- a/Packs/Arcanna/Integrations/ArcannaAI/ArcannaAI_test.py
+++ b/Packs/Arcanna/Integrations/ArcannaAI/ArcannaAI_test.py
@@ -1,5 +1,4 @@
import json
-import io
import demistomock as demisto
from ArcannaAI import Client, get_jobs, post_event, get_default_job_id, set_default_job_id, get_event_status, \
@@ -9,7 +8,7 @@
def util_load_json(path):
- with io.open(path, mode='r', encoding='utf-8') as f:
+ with open(path, encoding='utf-8') as f:
return json.loads(f.read())
diff --git a/Packs/Arcanna/ReleaseNotes/1_1_11.md b/Packs/Arcanna/ReleaseNotes/1_1_11.md
new file mode 100644
index 000000000000..e807062a1922
--- /dev/null
+++ b/Packs/Arcanna/ReleaseNotes/1_1_11.md
@@ -0,0 +1,18 @@
+
+#### Integrations
+
+##### Arcanna.AI
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
+
+#### Scripts
+
+##### ArcannaFeedbackPostProcessing
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
+##### PrepareArcannaRawJson
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/Arcanna/Scripts/ArcannaFeedbackPostProcessingScript/ArcannaFeedbackPostProcessingScript.yml b/Packs/Arcanna/Scripts/ArcannaFeedbackPostProcessingScript/ArcannaFeedbackPostProcessingScript.yml
index 1c13ab061e09..c3a286f4e2da 100644
--- a/Packs/Arcanna/Scripts/ArcannaFeedbackPostProcessingScript/ArcannaFeedbackPostProcessingScript.yml
+++ b/Packs/Arcanna/Scripts/ArcannaFeedbackPostProcessingScript/ArcannaFeedbackPostProcessingScript.yml
@@ -9,7 +9,7 @@ args:
description: Closing Notes for manual overriding.
name: closing_notes
- default: true
- description: aaa
+ description: aaa.
name: closing_reason
comment: |-
Arcanna.Ai post-processing script for sending feedback back to Arcanna about the closed incident. Additional modification might be required depending on each Cortex setup.
@@ -25,7 +25,7 @@ tags:
- post-processing
timeout: '0'
type: python
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
dependson:
should:
diff --git a/Packs/Arcanna/Scripts/PrepareArcannaRawJson/PrepareArcannaRawJson.yml b/Packs/Arcanna/Scripts/PrepareArcannaRawJson/PrepareArcannaRawJson.yml
index 225022eacdb2..7795e63727e7 100644
--- a/Packs/Arcanna/Scripts/PrepareArcannaRawJson/PrepareArcannaRawJson.yml
+++ b/Packs/Arcanna/Scripts/PrepareArcannaRawJson/PrepareArcannaRawJson.yml
@@ -1,18 +1,18 @@
args:
- default: true
- description: Input string to transform to JSON escaped object
+ description: Input string to transform to JSON escaped object.
name: input
required: true
comment: Loads a json from string input, and returns a json escaped result.
commonfields:
id: PrepareArcannaRawJson
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.95956
enabled: true
name: PrepareArcannaRawJson
outputs:
- contextPath: JsonObject
- description: The JSON object loaded from input and escaped for Arcanna to be used
+ description: The JSON object loaded from input and escaped for Arcanna to be used.
runas: DBotWeakRole
script: ''
scripttarget: 0
diff --git a/Packs/Arcanna/pack_metadata.json b/Packs/Arcanna/pack_metadata.json
index aa300b6f3a8d..6fb7ff47919b 100644
--- a/Packs/Arcanna/pack_metadata.json
+++ b/Packs/Arcanna/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "ArcannaAI",
"description": "Siscale Arcanna.Ai Cognitive automation platform that provides AI assistance to IT & Cybersecurity teams",
"support": "partner",
- "currentVersion": "1.1.10",
+ "currentVersion": "1.1.11",
"author": "Siscale Engineering",
"created": "2021-06-16T20:46:43Z",
"url": "https://www.arcanna.ai/contact",
diff --git a/Packs/ArcherRSA/Integrations/ArcherV2/ArcherV2.py b/Packs/ArcherRSA/Integrations/ArcherV2/ArcherV2.py
index c647e8b9ceeb..569e82357811 100644
--- a/Packs/ArcherRSA/Integrations/ArcherV2/ArcherV2.py
+++ b/Packs/ArcherRSA/Integrations/ArcherV2/ArcherV2.py
@@ -1089,6 +1089,9 @@ def generate_field_value(client, field_name, field_data, field_val, depth):
if field_type == 4:
field_data = client.get_field_value_list(field_data["FieldId"], depth)
list_ids = []
+ other_text = None
+ if isinstance(field_val, dict) and (other_text := field_val.get('OtherText', None)):
+ field_val = field_val.get('ValuesList')
if not isinstance(field_val, list):
field_val = [field_val]
for item in field_val:
@@ -1101,7 +1104,10 @@ def generate_field_value(client, field_name, field_data, field_val, depth):
raise Exception(
f"Failed to create the field: {field_name} with the value: {item}"
)
- return "Value", {"ValuesListIds": list_ids}
+ res = {"ValuesListIds": list_ids}
+ if other_text:
+ res['OtherText'] = other_text
+ return "Value", res
# when field type is External Links
# for example: {"Patch URL":[{"value":"github", "link": "https://github.com"}]}
diff --git a/Packs/ArcherRSA/Integrations/ArcherV2/ArcherV2.yml b/Packs/ArcherRSA/Integrations/ArcherV2/ArcherV2.yml
index 94cbd0924700..8479de4699fa 100644
--- a/Packs/ArcherRSA/Integrations/ArcherV2/ArcherV2.yml
+++ b/Packs/ArcherRSA/Integrations/ArcherV2/ArcherV2.yml
@@ -417,7 +417,7 @@ script:
- arguments: []
description: Prints the Archer's integration cache.
name: archer-print-cache
- dockerimage: demisto/python3-deb:3.10.13.87666
+ dockerimage: demisto/python3-deb:3.10.14.93258
isfetch: true
script: ''
subtype: python3
diff --git a/Packs/ArcherRSA/Integrations/ArcherV2/ArcherV2_test.py b/Packs/ArcherRSA/Integrations/ArcherV2/ArcherV2_test.py
index e6fea9690dbe..880e547b99ce 100644
--- a/Packs/ArcherRSA/Integrations/ArcherV2/ArcherV2_test.py
+++ b/Packs/ArcherRSA/Integrations/ArcherV2/ArcherV2_test.py
@@ -614,7 +614,7 @@ def test_get_field_value_list(self, requests_mock):
requests_mock.get(BASE_URL + 'api/core/system/valueslistvalue/valueslist/62', json=VALUE_LIST_RES)
client = Client(BASE_URL, '', '', '', '', 400)
field_data = client.get_field_value_list(304, 1)
- assert VALUE_LIST_FIELD_DATA == field_data
+ assert field_data == VALUE_LIST_FIELD_DATA
@pytest.mark.parametrize('args, expected_response', [(0, RES_DEPTH_0), (1, RES_DEPTH_1), (2, RES_DEPTH_2)])
def test_get_field_value_list_nested_response(self, requests_mock, args, expected_response):
@@ -681,6 +681,27 @@ def test_generate_field_users_groups_input(self):
assert field_key == 'Value'
assert field_value == {"UserList": [{"ID": 20}], "GroupList": [{"ID": 30}]}
+ def test_generate_field_values_list_with_other(self, requests_mock, mocker):
+ """
+ Given:
+ list values with "OtherText" from dictionary type under "fieldsToValues" argument
+
+ When:
+ - running archer-update-record
+
+ Then:
+ - assert fields are generated correctly
+
+ """
+ mocker.patch.object(Client, 'get_field_value_list', return_value={'ValuesList': [{"Name": "NA", "Id": 222}]})
+
+ client = Client(BASE_URL, '', '', '', '', 400)
+ field_key, field_value = generate_field_value(client, "", {'Type': 4, 'FieldId': 1234},
+ {"ValuesList": ["NA"], "OtherText": "test"},
+ {"depth": 1})
+ assert field_key == 'Value'
+ assert field_value == {'ValuesListIds': [222], 'OtherText': 'test'}
+
def test_generate_invalid_field_users_groups_input(self):
"""
Given:
diff --git a/Packs/ArcherRSA/Integrations/ArcherV2/README.md b/Packs/ArcherRSA/Integrations/ArcherV2/README.md
index a0350442067b..64734464e936 100644
--- a/Packs/ArcherRSA/Integrations/ArcherV2/README.md
+++ b/Packs/ArcherRSA/Integrations/ArcherV2/README.md
@@ -423,6 +423,7 @@ Creates a new content record in the given application.
Note: When creating a new record, make sure the values are sent through the *fieldsToValues* argument properly.
- Example for the *Values List* field type: {"Type": ["Switch"], fieldname: [value1, value2]}
+- Example for the *Values List* field type with *OtherText* property: {"Patch Type": {"ValuesList": ["Custom Type"], "OtherText": "actuall text"}, field_name_without_other: [value1, value2]}
- Example for the *External Links* field type: {"Patch URL": [{"value":"github", "link": "https://github.com"}]}
- Example for the *Users/Groups List* field type: {"Policy Owner":{"users": [20],"groups": [30]}}
- Example for the *Cross- Reference* field type: {"Area Reference(s)": [20]}
diff --git a/Packs/ArcherRSA/ReleaseNotes/1_2_17.md b/Packs/ArcherRSA/ReleaseNotes/1_2_17.md
new file mode 100644
index 000000000000..749d232ba644
--- /dev/null
+++ b/Packs/ArcherRSA/ReleaseNotes/1_2_17.md
@@ -0,0 +1,9 @@
+
+#### Integrations
+
+##### RSA Archer v2
+
+- Fixed an issue where the ***archer-update-record*** command would fail in case of a *Values List* field with *Other* property enabled.
+- Updated the Docker image to: *demisto/python3-deb:3.10.14.93258*.
+
+
diff --git a/Packs/ArcherRSA/pack_metadata.json b/Packs/ArcherRSA/pack_metadata.json
index d98608e5ff24..8dbdb676ad92 100644
--- a/Packs/ArcherRSA/pack_metadata.json
+++ b/Packs/ArcherRSA/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "RSA Archer",
"description": "The RSA Archer GRC Platform provides a common foundation for managing policies, controls, risks, assessments and deficiencies across lines of business.",
"support": "xsoar",
- "currentVersion": "1.2.16",
+ "currentVersion": "1.2.17",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/ArcusTeam/Integrations/ArcusTeam/ArcusTeam.py b/Packs/ArcusTeam/Integrations/ArcusTeam/ArcusTeam.py
index 5b4425e487a2..e0087f3aa29a 100644
--- a/Packs/ArcusTeam/Integrations/ArcusTeam/ArcusTeam.py
+++ b/Packs/ArcusTeam/Integrations/ArcusTeam/ArcusTeam.py
@@ -2,7 +2,7 @@
from CommonServerPython import * # noqa: F401
import urllib3
import traceback
-from typing import Any, Dict
+from typing import Any
# Disable insecure warnings
@@ -60,7 +60,7 @@ def get_vulnerabities(self, firmwareId, deviceId, pageSize, page, sortField, sor
"returnFields": returnFields,
})
- def get_authentication_token(self, authentication_url: str, headers: Dict, payload: Dict):
+ def get_authentication_token(self, authentication_url: str, headers: dict, payload: dict):
return self._http_request(
method='POST',
full_url=authentication_url,
@@ -93,7 +93,7 @@ def editIssue(issue):
key = returnFields[0]
if key == "risk":
issue = str(round(issue * 100, 2)) + '%'
- data = dict()
+ data = {}
data[key] = issue
return data
else:
@@ -104,7 +104,7 @@ def editIssue(issue):
return editIssue
-def arcusteam_get_devices(client: Client, args: Dict[str, Any]):
+def arcusteam_get_devices(client: Client, args: dict[str, Any]):
"""
Search for matching devices giving a device name
:param device_name: device name to search for in the DB.
@@ -122,7 +122,7 @@ def arcusteam_get_devices(client: Client, args: Dict[str, Any]):
)
-def arcusteam_get_vulnerabilities(client: Client, args: Dict[str, Any]) -> CommandResults:
+def arcusteam_get_vulnerabilities(client: Client, args: dict[str, Any]) -> CommandResults:
returnFields = str(args.get("return_fields", 'risk,cve')).split(',')
firmwareId = args.get("firmware_id", "")
diff --git a/Packs/ArcusTeam/Integrations/ArcusTeam/ArcusTeam.yml b/Packs/ArcusTeam/Integrations/ArcusTeam/ArcusTeam.yml
index a2fdda78c5e8..67a273e602c5 100644
--- a/Packs/ArcusTeam/Integrations/ArcusTeam/ArcusTeam.yml
+++ b/Packs/ArcusTeam/Integrations/ArcusTeam/ArcusTeam.yml
@@ -38,62 +38,62 @@ script:
arguments:
- name: vendor
required: true
- description: Device vendor
+ description: Device vendor.
- name: model
- description: Device model
+ description: Device model.
- name: series
- description: ' Device series'
+ description: 'Device series.'
- name: firmware_version
- description: Firmware version
+ description: Firmware version.
outputs:
- contextPath: ArcusTeamDevices.devices.categories
- description: ' Device categories'
+ description: 'Device categories.'
type: Unknown
- contextPath: ArcusTeamDevices.devices.device_key
- description: ' Arcus Team Device ID'
+ description: 'Arcus Team Device ID.'
type: string
- contextPath: ArcusTeamDevices.devices.model
- description: ' Device model'
+ description: 'Device model.'
type: string
- contextPath: ArcusTeamDevices.devices.series
- description: ' Device series'
+ description: 'Device series.'
type: string
- contextPath: ArcusTeamDevices.devices.vendor
- description: ' Device vendor'
+ description: 'Device vendor.'
type: string
- contextPath: ArcusTeamDevices.devices.score
- description: The similarity score
+ description: The similarity score.
type: number
- contextPath: ArcusTeamDevices.devices.firmware.firmwareid
- description: ' Firmware ID'
+ description: 'Firmware ID.'
type: string
- contextPath: ArcusTeamDevices.devices.firmware.name
- description: ' Firmware name'
+ description: 'Firmware name.'
type: string
- contextPath: ArcusTeamDevices.devices.firmware.version
- description: Firmware version
+ description: Firmware version.
type: string
- description: ' Find ArcusTeam Device'
+ description: 'Find ArcusTeam Device.'
- name: arcusteam-get-vulnerabilities
arguments:
- name: firmware_id
required: true
- description: 'ArcusTeam Firmware ID (as returned by the arcusteam-get-devices command)'
+ description: 'ArcusTeam Firmware ID (as returned by the arcusteam-get-devices command).'
- name: device_id
required: true
- description: 'ArcusTeam Device ID (as returned by the arcusteam-get-devices command)'
+ description: 'ArcusTeam Device ID (as returned by the arcusteam-get-devices command).'
- name: page_size
description: Page size. Minimum page size is 1, maximum is 100.
defaultValue: "10"
- name: page_number
- description: ' Page number'
+ description: 'Page number.'
defaultValue: "1"
- name: sort_order
auto: PREDEFINED
predefined:
- desc
- asc
- description: ' Sorting order (“asc”,”desc”)'
+ description: 'Sorting order (“asc”,”desc”).'
defaultValue: desc
- name: sort_field
auto: PREDEFINED
@@ -106,7 +106,7 @@ script:
- exploit_published
- exploit_used
- modified_date
- description: ' Sorting field'
+ description: 'Sorting field.'
defaultValue: risk
- name: return_fields
auto: PREDEFINED
@@ -119,54 +119,54 @@ script:
- exploit_published
- exploit_used
- modified_date
- description: ' The fields to return'
+ description: 'The fields to return.'
isArray: true
defaultValue: cve,risk
outputs:
- contextPath: ArcusTeamVulnerabilities.max_items
- description: Number of results
+ description: Number of results.
type: number
- contextPath: ArcusTeamVulnerabilities.has_next
- description: If there is another page
+ description: If there is another page.
type: boolean
- contextPath: ArcusTeamVulnerabilities.results.cve
- description: CVE name
+ description: CVE name.
type: string
- contextPath: ArcusTeamVulnerabilities.results.risk
- description: CVE risk
+ description: CVE risk.
type: number
- contextPath: ArcusTeamVulnerabilities.results.description
- description: CVE description
+ description: CVE description.
type: string
- contextPath: ArcusTeamVulnerabilities.results.codename
- description: CVE codename
+ description: CVE codename.
type: string
- contextPath: ArcusTeamVulnerabilities.results.cwe
- description: CVE cwe
+ description: CVE cwe.
type: string
- contextPath: ArcusTeamVulnerabilities.results.exploit_published
- description: If exploit was published
+ description: If exploit was published.
type: string
- contextPath: ArcusTeamVulnerabilities.results.exploit_used
- description: If exploit was used
+ description: If exploit was used.
type: string
- contextPath: ArcusTeamVulnerabilities.results.modified_date
- description: If date was modified
+ description: If date was modified.
type: string
- contextPath: ArcusTeamVulnerabilities.results.ownership
- description: CVE ownership
+ description: CVE ownership.
type: string
- contextPath: ArcusTeamVulnerabilities.results.published_date
- description: 'The date the CVE was published '
+ description: 'The date the CVE was published.'
type: string
- contextPath: ArcusTeamVulnerabilities.results.title
- description: CVE title
+ description: CVE title.
type: string
- contextPath: ArcusTeamVulnerabilities.results.url
- description: CVE url
+ description: CVE url.
type: string
- description: ' Retrieve CVEs for an ArcusTeam device'
- dockerimage: demisto/python3:3.10.12.63474
+ description: 'Retrieve CVEs for an ArcusTeam device.'
+ dockerimage: demisto/python3:3.10.14.95956
subtype: python3
fromversion: 6.0.0
tests:
diff --git a/Packs/ArcusTeam/ReleaseNotes/1_0_10.md b/Packs/ArcusTeam/ReleaseNotes/1_0_10.md
new file mode 100644
index 000000000000..3a747e6ac89d
--- /dev/null
+++ b/Packs/ArcusTeam/ReleaseNotes/1_0_10.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### ArcusTeam
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/ArcusTeam/pack_metadata.json b/Packs/ArcusTeam/pack_metadata.json
index e96a52c20fb5..10a2605cff09 100644
--- a/Packs/ArcusTeam/pack_metadata.json
+++ b/Packs/ArcusTeam/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "ArcusTeam",
"description": "ArcusTeam's DeviceTotal Platform helps to identify and manage vulnerabilities found on IoT devices",
"support": "partner",
- "currentVersion": "1.0.9",
+ "currentVersion": "1.0.10",
"author": "ArcusTeam",
"url": "https://arcusteam.com/pa-partnership/",
"email": "support@arcusteam.com",
@@ -26,4 +26,4 @@
"xsoar",
"marketplacev2"
]
-}
+}
\ No newline at end of file
diff --git a/Packs/Armis/Integrations/ArmisEventCollector/ArmisEventCollector_dark.svg b/Packs/Armis/Integrations/ArmisEventCollector/ArmisEventCollector_dark.svg
new file mode 100644
index 000000000000..3000133e8d2d
--- /dev/null
+++ b/Packs/Armis/Integrations/ArmisEventCollector/ArmisEventCollector_dark.svg
@@ -0,0 +1,16 @@
+
diff --git a/Packs/Armis/Integrations/ArmisEventCollector/ArmisEventCollector_light.svg b/Packs/Armis/Integrations/ArmisEventCollector/ArmisEventCollector_light.svg
new file mode 100644
index 000000000000..5f062d02f372
--- /dev/null
+++ b/Packs/Armis/Integrations/ArmisEventCollector/ArmisEventCollector_light.svg
@@ -0,0 +1,16 @@
+
diff --git a/Packs/Armis/Integrations/ArmisEventCollector/README.md b/Packs/Armis/Integrations/ArmisEventCollector/README.md
index 3c5918015dc7..d58ab2a4af05 100644
--- a/Packs/Armis/Integrations/ArmisEventCollector/README.md
+++ b/Packs/Armis/Integrations/ArmisEventCollector/README.md
@@ -1,6 +1,8 @@
Collects alerts, devices and activities from Armis resources.
This integration was integrated and tested with API V.1.8 of Armis API.
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure Armis Event Collector on Cortex XSOAR
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
diff --git a/Packs/Armis/ReleaseNotes/1_1_14.md b/Packs/Armis/ReleaseNotes/1_1_14.md
new file mode 100644
index 000000000000..c1a4e40491cc
--- /dev/null
+++ b/Packs/Armis/ReleaseNotes/1_1_14.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Armis Event Collector
+
+- Updated the logos for display on the XSIAM dashboard
diff --git a/Packs/Armis/pack_metadata.json b/Packs/Armis/pack_metadata.json
index fc35894466f0..d7aa144e3948 100755
--- a/Packs/Armis/pack_metadata.json
+++ b/Packs/Armis/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Armis",
"description": "Agentless and passive security platform that sees, identifies, and classifies every device, tracks behavior, identifies threats, and takes action automatically to protect critical information and systems",
"support": "partner",
- "currentVersion": "1.1.13",
+ "currentVersion": "1.1.14",
"author": "Armis Corporation",
"url": "https://support.armis.com/",
"email": "support@armis.com",
@@ -17,5 +17,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "ArmisEventCollector"
}
\ No newline at end of file
diff --git a/Packs/Asset/ReleaseNotes/1_0_9.md b/Packs/Asset/ReleaseNotes/1_0_9.md
new file mode 100644
index 000000000000..b73d9031f0c9
--- /dev/null
+++ b/Packs/Asset/ReleaseNotes/1_0_9.md
@@ -0,0 +1,3 @@
+## Asset
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/Asset/pack_metadata.json b/Packs/Asset/pack_metadata.json
index 4f5d4a24bd66..48ef391997dd 100644
--- a/Packs/Asset/pack_metadata.json
+++ b/Packs/Asset/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Asset",
"description": "Base pack for any packs using asset fields.",
"support": "xsoar",
- "currentVersion": "1.0.8",
+ "currentVersion": "1.0.9",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AttivoBotsink/Integrations/AttivoBotsink/AttivoBotsink.py b/Packs/AttivoBotsink/Integrations/AttivoBotsink/AttivoBotsink.py
index 7f7edfc9b2e1..652cd8aa1b4f 100644
--- a/Packs/AttivoBotsink/Integrations/AttivoBotsink/AttivoBotsink.py
+++ b/Packs/AttivoBotsink/Integrations/AttivoBotsink/AttivoBotsink.py
@@ -542,7 +542,7 @@ def date_to_epoch(date):
users[user] = [group_name]
all_users = []
- for user in sorted(list(users.keys()), key=lambda x: x.lower()):
+ for user in sorted(users.keys(), key=lambda x: x.lower()):
user_entry = {'User': user, 'Groups': ", ".join(users[user])}
all_users.append(user_entry)
diff --git a/Packs/AttivoBotsink/Integrations/AttivoBotsink/AttivoBotsink.yml b/Packs/AttivoBotsink/Integrations/AttivoBotsink/AttivoBotsink.yml
index 758972111923..8193b61749f7 100644
--- a/Packs/AttivoBotsink/Integrations/AttivoBotsink/AttivoBotsink.yml
+++ b/Packs/AttivoBotsink/Integrations/AttivoBotsink/AttivoBotsink.yml
@@ -47,81 +47,81 @@ script:
arguments:
- name: user
required: true
- description: User to validate
+ description: User to validate.
outputs:
- contextPath: Attivo.User.IsDeceptive
- description: Is the user part of the Deception environment
+ description: Is the user part of the Deception environment.
type: boolean
- contextPath: Attivo.User.Groups
- description: If the user is part of the Deception environment, the member is a member of these groups
- description: Checks whether a user is deceptive
+ description: If the user is part of the Deception environment, the member is a member of these groups.
+ description: Checks whether a user is deceptive.
- name: attivo-check-host
arguments:
- name: host
required: true
default: true
- description: Hostname or IP to validate
+ description: Hostname or IP to validate.
outputs:
- contextPath: Attivo.Host.IsDeceptive
- description: Is the IP or hostname part of the Decpetion environment
+ description: Is the IP or hostname part of the Decpetion environment.
type: boolean
- contextPath: Attivo.Host.IPAddress
- description: IP Address of the deceptive host
+ description: IP Address of the deceptive host.
- contextPath: Attivo.Host.Name
- description: Hostname of the deceptive host
+ description: Hostname of the deceptive host.
- contextPath: Attivo.Host.MACAddress
- description: MAC address of the deceptive host
+ description: MAC address of the deceptive host.
- contextPath: Attivo.Host.VLAN
- description: VLAN of the deceptive host
+ description: VLAN of the deceptive host.
- contextPath: Attivo.Host.UserDefined
- description: Was this host manually defined
+ description: Was this host manually defined.
type: boolean
- contextPath: Attivo.Host.DHCP
- description: Does the host have a dynamic IP address
+ description: Does the host have a dynamic IP address.
type: boolean
- contextPath: Attivo.Host.ThreatDirect.Name
- description: Name of the ThreatDirect device projecting this deceptive host
+ description: Name of the ThreatDirect device projecting this deceptive host.
- contextPath: Attivo.Host.ThreatDirect.Type
- description: The type of ThreatDirect device projecting the deceptive host
- description: Checks whether a host is deceptive
+ description: The type of ThreatDirect device projecting the deceptive host.
+ description: Checks whether a host is deceptive.
- name: attivo-run-playbook
arguments:
- name: playbook_name
required: true
- description: The name of the prebuilt playbook
+ description: The name of the prebuilt playbook.
- name: attacker_ip
required: true
- description: Malicious source IP
+ description: Malicious source IP.
outputs:
- contextPath: Attivo.Playbook.Message
- description: Complete status message
+ description: Complete status message.
type: string
- contextPath: Attivo.Playbook.Status
- description: Was the playbook successful
+ description: Was the playbook successful.
type: boolean
- description: Runs a prebuilt playbook on the BOTsink appliance
+ description: Runs a prebuilt playbook on the BOTsink appliance.
- name: attivo-deploy-decoy
arguments:
- name: vulnerable_ip
required: true
- description: Used to determine which subnet to deploy to
+ description: Used to determine which subnet to deploy to.
- name: decoy_number
default: true
- description: The number of decoys to deploy, default is "1"
+ description: The number of decoys to deploy, default is "1".
defaultValue: "1"
outputs:
- contextPath: Attivo.DeployDecoy.Status
- description: Was the network decoy successfully deployed
+ description: Was the network decoy successfully deployed.
type: boolean
- contextPath: Attivo.DeployDecoy.Message
- description: Complete status message
+ description: Complete status message.
type: string
- description: 'Deploys a new network decoy '
+ description: 'Deploys a new network decoy.'
- name: attivo-get-events
arguments:
- name: attacker_ip
required: true
- description: Source IP address
+ description: Source IP address.
- name: severity
auto: PREDEFINED
predefined:
@@ -131,58 +131,58 @@ script:
- Low
- VeryLow
- SystemActivity
- description: The minimum Attivo severity for the events, default is "Medium"; "VeryHigh", "High", "Medium", "Low", "VeryLow", "SystemActivity"
+ description: The minimum Attivo severity for the events, default is "Medium"; "VeryHigh", "High", "Medium", "Low", "VeryLow", "SystemActivity".
defaultValue: Medium
- name: alerts_start_date
- description: Date and time to start looking for events, e.g., 2018-12-10 or 2018-12-10T13:59:05Z
+ description: Date and time to start looking for events, e.g., 2018-12-10 or 2018-12-10T13:59:05Z.
- name: alerts_end_date
- description: Date and time to stop looking for events, e.g., 2018-12-10 or 2018-12-10T13:59:05Z
+ description: Date and time to stop looking for events, e.g., 2018-12-10 or 2018-12-10T13:59:05Z.
outputs:
- contextPath: Attivo.Events.Count
- description: Total number of events retrieved
+ description: Total number of events retrieved.
type: number
- contextPath: Attivo.Events.List.AttackName
- description: Short name of the attack
+ description: Short name of the attack.
- contextPath: Attivo.Events.List.Attack Phase
- description: Kill chain phase of the attack
+ description: Kill chain phase of the attack.
type: string
- contextPath: Attivo.Events.List.Server
- description: Internal name of the target decoy
+ description: Internal name of the target decoy.
type: string
- contextPath: Attivo.Events.List.Target
- description: Display name of the target decoy
+ description: Display name of the target decoy.
type: string
- contextPath: Attivo.Events.List.TargetOS
- description: Operating system of the target decoy
+ description: Operating system of the target decoy.
type: string
- contextPath: Attivo.Events.List.Attacker
- description: Attacker IP address
+ description: Attacker IP address.
type: string
- contextPath: Attivo.Events.List.Service
- description: The attacked service
+ description: The attacked service.
type: string
- contextPath: Attivo.Events.List.Timestamp
- description: Time of the attack
+ description: Time of the attack.
type: string
- contextPath: Attivo.Events.List.TargetIP
- description: IP address of the target decoy
+ description: IP address of the target decoy.
type: string
- contextPath: Attivo.Events.List.Severity
- description: Attivo severity of the attack
+ description: Attivo severity of the attack.
type: string
- description: Retrieves events for a specified source IP
+ description: Retrieves events for a specified source IP.
- name: attivo-list-playbooks
arguments: []
- description: List information about playbooks configured on the Attivo device
+ description: List information about playbooks configured on the Attivo device.
- name: attivo-list-hosts
arguments: []
- description: List information about network decoys
+ description: List information about network decoys.
- name: attivo-list-users
arguments: []
- description: List all deceptive users
+ description: List all deceptive users.
isfetch: true
runonce: false
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.95956
tests:
- AttivoBotsinkTest
fromversion: 5.0.0
diff --git a/Packs/AttivoBotsink/ReleaseNotes/1_0_15.md b/Packs/AttivoBotsink/ReleaseNotes/1_0_15.md
new file mode 100644
index 000000000000..6d42e4f73766
--- /dev/null
+++ b/Packs/AttivoBotsink/ReleaseNotes/1_0_15.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Attivo Botsink
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/AttivoBotsink/pack_metadata.json b/Packs/AttivoBotsink/pack_metadata.json
index 010df5adf10e..fc7023c4d963 100644
--- a/Packs/AttivoBotsink/pack_metadata.json
+++ b/Packs/AttivoBotsink/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Attivo Botsink",
"description": "Network-based Threat Deception for Post-Compromise Threat Detection.",
"support": "partner",
- "currentVersion": "1.0.14",
+ "currentVersion": "1.0.15",
"author": "Attivo Networks",
"url": "https://www.attivonetworks.com",
"email": "support@attivonetworks.com",
diff --git a/Packs/AutoFocus/ReleaseNotes/2_2_2.md b/Packs/AutoFocus/ReleaseNotes/2_2_2.md
new file mode 100644
index 000000000000..9d90cfb13a7f
--- /dev/null
+++ b/Packs/AutoFocus/ReleaseNotes/2_2_2.md
@@ -0,0 +1,3 @@
+## AutoFocus by Palo Alto Networks
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/AutoFocus/pack_metadata.json b/Packs/AutoFocus/pack_metadata.json
index 54323b701ab1..5078c7e8e627 100644
--- a/Packs/AutoFocus/pack_metadata.json
+++ b/Packs/AutoFocus/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AutoFocus by Palo Alto Networks",
"description": "Use the Palo Alto Networks AutoFocus integration to distinguish the most\n important threats from everyday commodity attacks.",
"support": "xsoar",
- "currentVersion": "2.2.1",
+ "currentVersion": "2.2.2",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Azure-Enrichment-Remediation/ReleaseNotes/1_1_17.md b/Packs/Azure-Enrichment-Remediation/ReleaseNotes/1_1_17.md
new file mode 100644
index 000000000000..02c8239b6921
--- /dev/null
+++ b/Packs/Azure-Enrichment-Remediation/ReleaseNotes/1_1_17.md
@@ -0,0 +1,3 @@
+## Azure Enrichment and Remediation
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/Azure-Enrichment-Remediation/pack_metadata.json b/Packs/Azure-Enrichment-Remediation/pack_metadata.json
index 9a1f6ccf89b4..075647812fc0 100644
--- a/Packs/Azure-Enrichment-Remediation/pack_metadata.json
+++ b/Packs/Azure-Enrichment-Remediation/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Enrichment and Remediation",
"description": "Playbooks using multiple Azure content packs for enrichment and remediation purposes",
"support": "xsoar",
- "currentVersion": "1.1.16",
+ "currentVersion": "1.1.17",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureActiveDirectory/ReleaseNotes/1_3_23.md b/Packs/AzureActiveDirectory/ReleaseNotes/1_3_23.md
new file mode 100644
index 000000000000..b82e8b1f023a
--- /dev/null
+++ b/Packs/AzureActiveDirectory/ReleaseNotes/1_3_23.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Azure Active Directory Identity Protection (Deprecated)
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/AzureActiveDirectory/ReleaseNotes/1_3_24.md b/Packs/AzureActiveDirectory/ReleaseNotes/1_3_24.md
new file mode 100644
index 000000000000..2319348ff743
--- /dev/null
+++ b/Packs/AzureActiveDirectory/ReleaseNotes/1_3_24.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Active Directory Identity Protection (Deprecated)
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/AzureActiveDirectory/pack_metadata.json b/Packs/AzureActiveDirectory/pack_metadata.json
index 07f498b371dd..bc4bd61940af 100644
--- a/Packs/AzureActiveDirectory/pack_metadata.json
+++ b/Packs/AzureActiveDirectory/pack_metadata.json
@@ -3,7 +3,7 @@
"description": "Deprecated. Use Microsoft Graph Identity and Access instead.",
"support": "xsoar",
"hidden": true,
- "currentVersion": "1.3.22",
+ "currentVersion": "1.3.24",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureCompute/ReleaseNotes/1_2_25.md b/Packs/AzureCompute/ReleaseNotes/1_2_25.md
new file mode 100644
index 000000000000..7536ae25abfe
--- /dev/null
+++ b/Packs/AzureCompute/ReleaseNotes/1_2_25.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Azure Compute v2
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/AzureCompute/ReleaseNotes/1_2_26.md b/Packs/AzureCompute/ReleaseNotes/1_2_26.md
new file mode 100644
index 000000000000..f2e7859f30d5
--- /dev/null
+++ b/Packs/AzureCompute/ReleaseNotes/1_2_26.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Compute v2
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/AzureCompute/ReleaseNotes/1_2_27.md b/Packs/AzureCompute/ReleaseNotes/1_2_27.md
new file mode 100644
index 000000000000..313768e75854
--- /dev/null
+++ b/Packs/AzureCompute/ReleaseNotes/1_2_27.md
@@ -0,0 +1,3 @@
+## Azure Compute
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/AzureCompute/pack_metadata.json b/Packs/AzureCompute/pack_metadata.json
index 8b9f178b4d1a..599c15354fa1 100644
--- a/Packs/AzureCompute/pack_metadata.json
+++ b/Packs/AzureCompute/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Compute",
"description": "Create and Manage Azure Virtual Machines",
"support": "xsoar",
- "currentVersion": "1.2.24",
+ "currentVersion": "1.2.27",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer_test.py b/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer_test.py
index 1edbbb9dffc7..1702da9a51fe 100644
--- a/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer_test.py
+++ b/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer_test.py
@@ -322,6 +322,6 @@ def test_generate_login_url(mocker):
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
'response_type=code' \
f'&scope=offline_access%20{cluster_url}/.default' \
- f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
+ f'&client_id={client_id}&redirect_uri={redirect_uri})'
res = AzureDataExplorer.return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/AzureDataExplorer/ReleaseNotes/1_2_42.md b/Packs/AzureDataExplorer/ReleaseNotes/1_2_42.md
new file mode 100644
index 000000000000..8ed0edb603cb
--- /dev/null
+++ b/Packs/AzureDataExplorer/ReleaseNotes/1_2_42.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Azure Data Explorer
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/AzureDataExplorer/ReleaseNotes/1_2_43.md b/Packs/AzureDataExplorer/ReleaseNotes/1_2_43.md
new file mode 100644
index 000000000000..75759dcd8b22
--- /dev/null
+++ b/Packs/AzureDataExplorer/ReleaseNotes/1_2_43.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Data Explorer
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/AzureDataExplorer/pack_metadata.json b/Packs/AzureDataExplorer/pack_metadata.json
index 3885997431df..d6184a59af62 100644
--- a/Packs/AzureDataExplorer/pack_metadata.json
+++ b/Packs/AzureDataExplorer/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Data Explorer",
"description": "Use Azure Data Explorer integration to collect and analyze data inside clusters of Azure Data Explorer and manage search queries.",
"support": "xsoar",
- "currentVersion": "1.2.41",
+ "currentVersion": "1.2.43",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureDevOps/Integrations/AzureDevOps/AzureDevOps.yml b/Packs/AzureDevOps/Integrations/AzureDevOps/AzureDevOps.yml
index 76cec0d59309..c94dcff5e4c7 100644
--- a/Packs/AzureDevOps/Integrations/AzureDevOps/AzureDevOps.yml
+++ b/Packs/AzureDevOps/Integrations/AzureDevOps/AzureDevOps.yml
@@ -2986,7 +2986,7 @@ script:
- description: Generate the login url used for Authorization code flow.
name: azure-devops-generate-login-url
arguments: []
- dockerimage: demisto/crypto:1.0.0.87358
+ dockerimage: demisto/crypto:1.0.0.96042
isremotesyncout: true
ismappable: true
isfetch: true
diff --git a/Packs/AzureDevOps/Integrations/AzureDevOps/AzureDevOps_test.py b/Packs/AzureDevOps/Integrations/AzureDevOps/AzureDevOps_test.py
index 7b4259f89c07..be5dc03b6ccb 100644
--- a/Packs/AzureDevOps/Integrations/AzureDevOps/AzureDevOps_test.py
+++ b/Packs/AzureDevOps/Integrations/AzureDevOps/AzureDevOps_test.py
@@ -822,7 +822,7 @@ def test_generate_login_url(mocker):
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
'response_type=code' \
'&scope=offline_access%20499b84ac-1321-427f-aa17-267ca6975798/user_impersonation%20offline_access' \
- f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
+ f'&client_id={client_id}&redirect_uri={redirect_uri})'
res = AzureDevOps.return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/AzureDevOps/ReleaseNotes/1_3_19.md b/Packs/AzureDevOps/ReleaseNotes/1_3_19.md
new file mode 100644
index 000000000000..e14a68a7ee3b
--- /dev/null
+++ b/Packs/AzureDevOps/ReleaseNotes/1_3_19.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### AzureDevOps
+- Updated the Docker image to: *demisto/crypto:1.0.0.96042*.
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/AzureDevOps/ReleaseNotes/1_3_20.md b/Packs/AzureDevOps/ReleaseNotes/1_3_20.md
new file mode 100644
index 000000000000..8370886baeec
--- /dev/null
+++ b/Packs/AzureDevOps/ReleaseNotes/1_3_20.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### AzureDevOps
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/AzureDevOps/pack_metadata.json b/Packs/AzureDevOps/pack_metadata.json
index 38fc5bbdd404..38bbc1b13f9d 100644
--- a/Packs/AzureDevOps/pack_metadata.json
+++ b/Packs/AzureDevOps/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "AzureDevOps",
"description": "Create and manage Git repositories in Azure DevOps Services.",
"support": "xsoar",
- "currentVersion": "1.3.18",
+ "currentVersion": "1.3.20",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureFirewall/ReleaseNotes/1_1_40.md b/Packs/AzureFirewall/ReleaseNotes/1_1_40.md
new file mode 100644
index 000000000000..a466336fb83d
--- /dev/null
+++ b/Packs/AzureFirewall/ReleaseNotes/1_1_40.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Azure Firewall
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/AzureFirewall/ReleaseNotes/1_1_41.md b/Packs/AzureFirewall/ReleaseNotes/1_1_41.md
new file mode 100644
index 000000000000..0ced0881583d
--- /dev/null
+++ b/Packs/AzureFirewall/ReleaseNotes/1_1_41.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Firewall
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/AzureFirewall/pack_metadata.json b/Packs/AzureFirewall/pack_metadata.json
index b7126cd66d51..7aa9f6486fa8 100644
--- a/Packs/AzureFirewall/pack_metadata.json
+++ b/Packs/AzureFirewall/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Firewall",
"description": "Azure Firewall is a cloud-native and intelligent network firewall security service that provides breed threat protection for cloud workloads running in Azure.It's a fully stateful, firewall as a service with built-in high availability and unrestricted cloud scalability.",
"support": "xsoar",
- "currentVersion": "1.1.39",
+ "currentVersion": "1.1.41",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureKeyVault/Integrations/AzureKeyVault/AzureKeyVault.yml b/Packs/AzureKeyVault/Integrations/AzureKeyVault/AzureKeyVault.yml
index cd72cbe1cb0c..0619a58f2008 100644
--- a/Packs/AzureKeyVault/Integrations/AzureKeyVault/AzureKeyVault.yml
+++ b/Packs/AzureKeyVault/Integrations/AzureKeyVault/AzureKeyVault.yml
@@ -964,7 +964,7 @@ script:
description: Run this command if for some reason you need to rerun the authentication process.
execution: false
name: azure-key-vault-auth-reset
- dockerimage: demisto/crypto:1.0.0.87358
+ dockerimage: demisto/crypto:1.0.0.96042
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/AzureKeyVault/Integrations/AzureKeyVault/AzureKeyVault_test.py b/Packs/AzureKeyVault/Integrations/AzureKeyVault/AzureKeyVault_test.py
index b6e12769fd2f..123951e8611c 100644
--- a/Packs/AzureKeyVault/Integrations/AzureKeyVault/AzureKeyVault_test.py
+++ b/Packs/AzureKeyVault/Integrations/AzureKeyVault/AzureKeyVault_test.py
@@ -41,7 +41,7 @@ def load_mock_response(file_name: str) -> str:
Returns:
str: Mock file content.
"""
- with open(f'test_data/{file_name}', mode='r', encoding='utf-8') as mock_file:
+ with open(f'test_data/{file_name}', encoding='utf-8') as mock_file:
return mock_file.read()
diff --git a/Packs/AzureKeyVault/ReleaseNotes/1_1_44.md b/Packs/AzureKeyVault/ReleaseNotes/1_1_44.md
new file mode 100644
index 000000000000..817d2e52d881
--- /dev/null
+++ b/Packs/AzureKeyVault/ReleaseNotes/1_1_44.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Azure Key Vault
+- Updated the Docker image to: *demisto/crypto:1.0.0.96042*.
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/AzureKeyVault/ReleaseNotes/1_1_45.md b/Packs/AzureKeyVault/ReleaseNotes/1_1_45.md
new file mode 100644
index 000000000000..48ec7d5e819a
--- /dev/null
+++ b/Packs/AzureKeyVault/ReleaseNotes/1_1_45.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Key Vault
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/AzureKeyVault/pack_metadata.json b/Packs/AzureKeyVault/pack_metadata.json
index 435268020553..37922438a9aa 100644
--- a/Packs/AzureKeyVault/pack_metadata.json
+++ b/Packs/AzureKeyVault/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Key Vault",
"description": "Use Key Vault to safeguard and manage cryptographic keys and secrets used by cloud applications and services.",
"support": "xsoar",
- "currentVersion": "1.1.43",
+ "currentVersion": "1.1.45",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureKubernetesServices/Integrations/AzureKubernetesServices/AzureKubernetesServices.yml b/Packs/AzureKubernetesServices/Integrations/AzureKubernetesServices/AzureKubernetesServices.yml
index 6f00529662aa..f71cfad7a749 100644
--- a/Packs/AzureKubernetesServices/Integrations/AzureKubernetesServices/AzureKubernetesServices.yml
+++ b/Packs/AzureKubernetesServices/Integrations/AzureKubernetesServices/AzureKubernetesServices.yml
@@ -393,7 +393,7 @@ script:
- contextPath: AzureKS.ResourceGroup.tags.type
description: 'The type tag associated with the Azure Kubernetes resource group.'
type: String
- dockerimage: demisto/crypto:1.0.0.67955
+ dockerimage: demisto/crypto:1.0.0.96042
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/AzureKubernetesServices/Integrations/AzureKubernetesServices/AzureKubernetesServices_test.py b/Packs/AzureKubernetesServices/Integrations/AzureKubernetesServices/AzureKubernetesServices_test.py
index d21bb6845694..f90505115bf7 100644
--- a/Packs/AzureKubernetesServices/Integrations/AzureKubernetesServices/AzureKubernetesServices_test.py
+++ b/Packs/AzureKubernetesServices/Integrations/AzureKubernetesServices/AzureKubernetesServices_test.py
@@ -1,4 +1,3 @@
-import io
import json
import demistomock as demisto
import pytest
@@ -19,7 +18,7 @@ def client(mocker):
def load_test_data(path):
- with io.open(path, mode='r', encoding='utf-8') as f:
+ with open(path, encoding='utf-8') as f:
return json.loads(f.read())
@@ -193,6 +192,6 @@ def test_generate_login_url(mocker):
# assert
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
'response_type=code&scope=offline_access%20https://management.azure.com/.default' \
- f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
+ f'&client_id={client_id}&redirect_uri={redirect_uri})'
res = AzureKubernetesServices.return_results.call_args[0][0].readable_output
assert expected_url in res, "Login URL is incorrect"
diff --git a/Packs/AzureKubernetesServices/ReleaseNotes/1_1_25.md b/Packs/AzureKubernetesServices/ReleaseNotes/1_1_25.md
new file mode 100644
index 000000000000..e7284a4d07bf
--- /dev/null
+++ b/Packs/AzureKubernetesServices/ReleaseNotes/1_1_25.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Azure Kubernetes Services
+- Updated the Docker image to: *demisto/crypto:1.0.0.96042*.
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/AzureKubernetesServices/ReleaseNotes/1_1_26.md b/Packs/AzureKubernetesServices/ReleaseNotes/1_1_26.md
new file mode 100644
index 000000000000..93f9bdd5cdc0
--- /dev/null
+++ b/Packs/AzureKubernetesServices/ReleaseNotes/1_1_26.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Kubernetes Services
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/AzureKubernetesServices/pack_metadata.json b/Packs/AzureKubernetesServices/pack_metadata.json
index 74d03e31029b..811bc57b6608 100644
--- a/Packs/AzureKubernetesServices/pack_metadata.json
+++ b/Packs/AzureKubernetesServices/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Kubernetes Services",
"description": "Deploy and manage containerized applications with a fully managed Kubernetes service.",
"support": "xsoar",
- "currentVersion": "1.1.24",
+ "currentVersion": "1.1.26",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureLogAnalytics/.pack-ignore b/Packs/AzureLogAnalytics/.pack-ignore
index d3bc015cc001..b17c467abf29 100644
--- a/Packs/AzureLogAnalytics/.pack-ignore
+++ b/Packs/AzureLogAnalytics/.pack-ignore
@@ -2,4 +2,7 @@
ignore=RM104
[file:AzureLogAnalytics_image.png]
-ignore=IM111
\ No newline at end of file
+ignore=IM111
+
+[known_words]
+GCC
\ No newline at end of file
diff --git a/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics.py b/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics.py
index dace765fadd7..af3f80c20f14 100644
--- a/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics.py
+++ b/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics.py
@@ -1,4 +1,5 @@
from collections.abc import Callable
+
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
@@ -23,18 +24,23 @@
]
LOG_ANALYTICS_RESOURCE = 'https://api.loganalytics.io'
AZURE_MANAGEMENT_RESOURCE = 'https://management.azure.com'
-AUTH_CODE_SCOPE = 'https://api.loganalytics.io/Data.Read%20https://management.azure.com/user_impersonation'
+AUTH_CODE_SCOPE = "https://api.loganalytics.io/Data.Read%20https://management.azure.com/user_impersonation"
class Client:
def __init__(self, self_deployed, refresh_token, auth_and_token_url, enc_key, redirect_uri, auth_code,
subscription_id, resource_group_name, workspace_name, verify, proxy, certificate_thumbprint,
- private_key, client_credentials, managed_identities_client_id=None):
+ private_key, client_credentials, azure_cloud, managed_identities_client_id=None):
tenant_id = refresh_token if self_deployed else ''
refresh_token = get_integration_context().get('current_refresh_token') or refresh_token
- base_url = f'https://management.azure.com/subscriptions/{subscription_id}/resourceGroups/' \
- f'{resource_group_name}/providers/Microsoft.OperationalInsights/workspaces/{workspace_name}'
+ self.azure_cloud = azure_cloud or AZURE_WORLDWIDE_CLOUD
+ suffix = (
+ f"subscriptions/{subscription_id}/resourceGroups/{resource_group_name}/"
+ + f"providers/Microsoft.OperationalInsights/workspaces/{workspace_name}"
+ )
+ base_url = urljoin(url=self.azure_cloud.endpoints.resource_manager, suffix=suffix)
+
self.ms_client = MicrosoftClient(
self_deployed=self_deployed,
auth_id=auth_and_token_url, # client_id for client credential
@@ -58,6 +64,7 @@ def __init__(self, self_deployed, refresh_token, auth_and_token_url, enc_key, re
managed_identities_client_id=managed_identities_client_id,
managed_identities_resource_uri=Resources.management_azure,
command_prefix="azure-log-analytics",
+ azure_cloud=azure_cloud
)
self.subscription_id = subscription_id
self.resource_group_name = resource_group_name
@@ -660,6 +667,7 @@ def main():
certificate_thumbprint=certificate_thumbprint,
private_key=private_key,
client_credentials=client_credentials,
+ azure_cloud=get_azure_cloud(params, 'Azure Log Analytics'),
managed_identities_client_id=managed_identities_client_id,
)
diff --git a/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics.yml b/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics.yml
index b78b8f12e82f..eedf4cf7dcdb 100644
--- a/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics.yml
+++ b/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics.yml
@@ -3,88 +3,113 @@ commonfields:
id: Azure Log Analytics
version: -1
configuration:
+- additionalinfo: When selecting the Custom option, the Server URL parameter must be filled. More information about National clouds can be found here - https://xsoar.pan.dev/docs/reference/articles/microsoft-integrations---authentication#using-national-cloud
+ display: Azure Cloud
+ name: azure_cloud
+ type: 15
+ required: false
+ defaultvalue: Worldwide
+ options:
+ - Worldwide
+ - US GCC
+ - US GCC-High
+ - DoD
+ - Germany
+ - China
+ - Custom
+ section: Connect
+ advanced: true
+- name: server_url
+ type: 0
+ required: false
+ section: Connect
+ display: Server URL
+ advanced: true
+ defaultvalue: https://management.azure.com
+ hidden: false
+ additionalinfo: Use this option when required to customize the URL to the Azure management endpoint. More information can be found here - https://xsoar.pan.dev/docs/reference/articles/microsoft-integrations---authentication#using-national-cloud
- additionalinfo: Received from the authorization process or from the self-deployed configuration process (find the tenant ID in your app overview page in the Azure portal)
display: Token / Tenant ID
name: refresh_token
- hidden: true
type: 4
required: false
-- displaypassword: Token / Tenant ID
- name: credentials_refresh_token
- hiddenusername: true
+ hidden: true
+- name: credentials_refresh_token
type: 9
required: false
+ displaypassword: Token / Tenant ID
+ hiddenusername: true
section: Connect
-- additionalinfo: Received from the authorization process or from the self-deployed configuration process.
- display: ID / Client ID
+- displaypassword: Key / Client Secret
name: credentials
type: 9
+ additionalinfo: Received from the authorization process or from the self-deployed configuration process.
required: false
- displaypassword: Key / Client Secret
section: Connect
+ display: ID / Client ID
- additionalinfo: Used for certificate authentication. As appears in the "Certificates & secrets" page of the app.
display: Certificate Thumbprint
name: certificate_thumbprint
- hidden: true
type: 4
required: false
-- displaypassword: Certificate Thumbprint
+ hidden: true
+- additionalinfo: Used for certificate authentication. As appears in the "Certificates & secrets" page of the app.
name: credentials_certificate_thumbprint
- hiddenusername: true
type: 9
- additionalinfo: Used for certificate authentication. As appears in the "Certificates & secrets" page of the app.
required: false
section: Connect
+ displaypassword: Certificate Thumbprint
+ hiddenusername: true
- additionalinfo: Used for certificate authentication. The private key of the registered certificate.
display: Private Key
name: private_key
type: 14
required: false
section: Connect
-- additionalinfo: Check when authenticating using the Authorization Code flow.
- display: Use a self-deployed Azure application - Authorization Code flow
+- display: Use a self-deployed Azure application - Authorization Code flow
name: self_deployed
type: 8
required: false
section: Connect
+ additionalinfo: Check when authenticating using the Authorization Code flow.
- additionalinfo: Check when authenticating using the Client Credentials flow.
display: Use a self-deployed Azure application - Client Credentials Flow
name: client_credentials
type: 8
required: false
section: Connect
-- display: Application redirect URI (for self-deployed mode)
- name: redirect_uri
+- name: redirect_uri
type: 0
required: false
section: Connect
+ display: Application redirect URI (for self-deployed mode)
- additionalinfo: Get the Authorization code from steps 3-5 in the self deployed authorization process.
display: Authorization code
name: auth_code
type: 4
- hidden: true
required: false
-- displaypassword: Authorization code
+ hidden: true
+- additionalinfo: Get the Authorization code from steps 3-5 in the self deployed authorization process.
+ displaypassword: Authorization code
name: credentials_auth_code
hiddenusername: true
type: 9
- additionalinfo: Get the Authorization code from steps 3-5 in the self deployed authorization process.
required: false
section: Connect
-- additionalinfo: Relevant only if the integration is running on Azure VM. If selected, authenticates based on the value provided for the Azure Managed Identities Client ID field. If no value is provided for the Azure Managed Identities Client ID field, authenticates based on the System Assigned Managed Identity. For additional information, see the Help tab.
- display: Use Azure Managed Identities
+- display: Use Azure Managed Identities
name: use_managed_identities
- type: 8
required: false
+ type: 8
+ additionalinfo: Relevant only if the integration is running on Azure VM. If selected, authenticates based on the value provided for the Azure Managed Identities Client ID field. If no value is provided for the Azure Managed Identities Client ID field, authenticates based on the System Assigned Managed Identity. For additional information, see the Help tab.
section: Connect
advanced: true
-- additionalinfo: The Managed Identities client id for authentication - relevant only if the integration is running on Azure VM.
- displaypassword: Azure Managed Identities Client ID
- name: managed_identities_client_id
- hiddenusername: true
- type: 9
+- name: managed_identities_client_id
required: false
+ type: 9
+ additionalinfo: The Managed Identities client id for authentication - relevant only if the integration is running on Azure VM.
section: Connect
+ displaypassword: Azure Managed Identities Client ID
+ hiddenusername: true
advanced: true
- display: Default Subscription ID
name: subscriptionID
@@ -100,16 +125,16 @@ configuration:
section: Connect
- display: Default Workspace Name
name: workspaceName
- required: true
type: 0
- additionalinfo: The parameter can be saved as 000-000 and added as an argument to each command.
+ required: true
section: Connect
+ additionalinfo: The parameter can be saved as 000-000 and added as an argument to each command.
- display: Default Workspace ID (the UUID of the workspace, e.g. 123e4567-e89b-12d3-a456-426614174000)
name: workspaceID
- required: true
type: 0
- additionalinfo: The parameter can be saved as 000-000 and added as an argument to each command, but Test button will fail.
+ required: true
section: Connect
+ additionalinfo: The parameter can be saved as 000-000 and added as an argument to each command, but Test button will fail.
- display: Trust any certificate (not secure)
name: insecure
type: 8
@@ -683,7 +708,7 @@ script:
name: workspace_name
description: Delete a Log Analytics workspace table. We recommend you delete the search job when you're done querying the table. This reduces workspace clutter and extra charges for data retention.
name: azure-log-analytics-delete-search-job
- dockerimage: demisto/crypto:1.0.0.87358
+ dockerimage: demisto/crypto:1.0.0.97029
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics_test.py b/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics_test.py
index 4749e216e742..5975792f06f9 100644
--- a/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics_test.py
+++ b/Packs/AzureLogAnalytics/Integrations/AzureLogAnalytics/AzureLogAnalytics_test.py
@@ -1,22 +1,25 @@
import json
from collections.abc import Callable
-import pytest
from pathlib import Path
-from pytest_mock import MockerFixture
-from requests_mock import MockerCore
-from CommonServerPython import CommandResults, ScheduledCommand, DemistoException
+
+import pytest
from AzureLogAnalytics import (
Client,
- execute_query_command,
- list_saved_searches_command,
- tags_arg_to_request_format,
- get_saved_search_by_id_command,
create_or_update_saved_search_command,
delete_saved_search_command,
- run_search_job_command,
+ delete_search_job_command,
+ execute_query_command,
+ get_saved_search_by_id_command,
get_search_job_command,
- delete_search_job_command
+ list_saved_searches_command,
+ run_search_job_command,
+ tags_arg_to_request_format,
)
+from pytest_mock import MockerFixture
+from requests_mock import MockerCore
+
+from CommonServerPython import CommandResults, DemistoException, ScheduledCommand
+from MicrosoftApiModule import * # noqa: E402
def util_load_json(path: str) -> dict:
@@ -118,6 +121,7 @@ def authorization_mock(requests_mock: MockerCore) -> None:
proxy=False,
certificate_thumbprint=None,
private_key=None,
+ azure_cloud=AZURE_WORLDWIDE_CLOUD,
client_credentials=False,
)
@@ -259,8 +263,9 @@ def test_test_module_command_with_managed_identities(
Then:
- Ensure the output are as expected
"""
- from AzureLogAnalytics import main, MANAGED_IDENTITIES_TOKEN_URL
import AzureLogAnalytics
+ from AzureLogAnalytics import MANAGED_IDENTITIES_TOKEN_URL, main
+
import demistomock as demisto
mock_token = {"access_token": "test_token", "expires_in": "86400"}
@@ -297,9 +302,10 @@ def test_generate_login_url(mocker: MockerFixture) -> None:
- Ensure the generated url are as expected.
"""
# prepare
- import demistomock as demisto
- from AzureLogAnalytics import main
import AzureLogAnalytics
+ from AzureLogAnalytics import main
+
+ import demistomock as demisto
redirect_uri = "redirect_uri"
tenant_id = "tenant_id"
@@ -327,7 +333,7 @@ def test_generate_login_url(mocker: MockerFixture) -> None:
f"[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?"
"response_type=code&scope=offline_access%20https://api.loganalytics.io/Data.Read"
"%20https://management.azure.com/user_impersonation"
- f"&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)"
+ f"&client_id={client_id}&redirect_uri={redirect_uri})"
)
res = AzureLogAnalytics.return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/AzureLogAnalytics/ReleaseNotes/1_1_31.md b/Packs/AzureLogAnalytics/ReleaseNotes/1_1_31.md
new file mode 100644
index 000000000000..0b33af95a78e
--- /dev/null
+++ b/Packs/AzureLogAnalytics/ReleaseNotes/1_1_31.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Azure Log Analytics
+- Updated the Docker image to: *demisto/crypto:1.0.0.96042*.
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/AzureLogAnalytics/ReleaseNotes/1_1_32.md b/Packs/AzureLogAnalytics/ReleaseNotes/1_1_32.md
new file mode 100644
index 000000000000..1355879fe47e
--- /dev/null
+++ b/Packs/AzureLogAnalytics/ReleaseNotes/1_1_32.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Log Analytics
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/AzureLogAnalytics/ReleaseNotes/1_1_33.md b/Packs/AzureLogAnalytics/ReleaseNotes/1_1_33.md
new file mode 100644
index 000000000000..6c6130f24193
--- /dev/null
+++ b/Packs/AzureLogAnalytics/ReleaseNotes/1_1_33.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Azure Log Analytics
+
+- Added support for Microsoft GCC high instances.
+- Updated the Docker image to: *demisto/crypto:1.0.0.97029*.
diff --git a/Packs/AzureLogAnalytics/pack_metadata.json b/Packs/AzureLogAnalytics/pack_metadata.json
index 37b16939a711..0fb5639b580d 100644
--- a/Packs/AzureLogAnalytics/pack_metadata.json
+++ b/Packs/AzureLogAnalytics/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Log Analytics",
"description": "Log Analytics is a service that helps you collect and analyze data generated by resources in your cloud and on-premises environments.",
"support": "xsoar",
- "currentVersion": "1.1.30",
+ "currentVersion": "1.1.33",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups.py b/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups.py
index 76c61aafcb01..f26094bb4c5d 100644
--- a/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups.py
+++ b/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups.py
@@ -11,12 +11,15 @@
''' CONSTANTS '''
-
DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
API_VERSION = '2022-09-01'
-GRANT_BY_CONNECTION = {'Device Code': DEVICE_CODE, 'Authorization Code': AUTHORIZATION_CODE}
+GRANT_BY_CONNECTION = {'Device Code': DEVICE_CODE,
+ 'Authorization Code': AUTHORIZATION_CODE,
+ 'Client Credentials': CLIENT_CREDENTIALS}
SCOPE_BY_CONNECTION = {'Device Code': "https://management.azure.com/user_impersonation offline_access user.read",
- 'Authorization Code': "https://management.azure.com/.default"}
+ 'Authorization Code': "https://management.azure.com/.default",
+ 'Client Credentials': "https://management.azure.com/.default"}
+
DEFAULT_LIMIT = 50
PREFIX_URL = 'https://management.azure.com/subscriptions/'
''' CLIENT CLASS '''
@@ -34,11 +37,11 @@ def __init__(self, app_id, subscription_id, resource_group_name, verify, proxy,
integration_context.update(current_refresh_token=refresh_token)
set_integration_context(integration_context)
base_url = f'{PREFIX_URL}{subscription_id}/' \
- f'resourceGroups/{resource_group_name}/providers/Microsoft.Network/networkSecurityGroups'
+ f'resourceGroups/{resource_group_name}/providers/Microsoft.Network/networkSecurityGroups'
client_args = assign_params(
self_deployed=True, # We always set the self_deployed key as True because when not using a self
- # deployed machine, the DEVICE_CODE flow should behave somewhat like a self deployed
- # flow and most of the same arguments should be set, as we're !not! using OProxy.
+ # deployed machine, the DEVICE_CODE flow should behave somewhat like a self deployed
+ # flow and most of the same arguments should be set, as we're !not! using OProxy.
auth_id=app_id,
token_retrieval_url='https://login.microsoftonline.com/organizations/oauth2/v2.0/token' if 'Device Code' in
connection_type else None,
@@ -47,7 +50,7 @@ def __init__(self, app_id, subscription_id, resource_group_name, verify, proxy,
verify=verify,
proxy=proxy,
resource='https://management.core.windows.net' if 'Device Code' in connection_type
- else None, # disable-secrets-detection
+ else None, # disable-secrets-detection
scope=SCOPE_BY_CONNECTION.get(connection_type),
ok_codes=(200, 201, 202, 204),
azure_ad_endpoint=azure_ad_endpoint,
@@ -529,7 +532,7 @@ def test_module(client: AzureNSGClient) -> str:
"and `!azure-nsg-auth-complete` to log in."
"You can validate the connection by running `!azure-nsg-auth-test`\n"
"For more details press the (?) button.")
- elif client.connection_type == 'Azure Managed Identities':
+ elif client.connection_type == 'Azure Managed Identities' or client.connection_type == 'Client Credentials':
client.ms_client.get_access_token()
return 'ok'
@@ -541,7 +544,7 @@ def test_module(client: AzureNSGClient) -> str:
''' MAIN FUNCTION '''
-def main() -> None: # pragma: no cover
+def main() -> None: # pragma: no cover
params = demisto.params()
command = demisto.command()
args = demisto.args()
diff --git a/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups.yml b/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups.yml
index fb3feb3592df..85ee3056307e 100644
--- a/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups.yml
+++ b/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups.yml
@@ -56,27 +56,28 @@ configuration:
additionalinfo: Type of authentication - can be Authorization Code Flow (recommended), Device Code Flow or Azure Managed Identities.
options:
- Authorization Code
+ - Client Credentials
- Device Code
- Azure Managed Identities
section: Connect
- name: tenant_id
- display: Tenant ID (for user-auth mode)
+ display: Tenant ID
defaultvalue:
type: 0
additionalinfo: ""
section: Connect
required: false
- name: credentials
- display: Client Secret (for user-auth mode)
+ display: Client Secret
defaultvalue:
type: 9
additionalinfo: ""
- displaypassword: Client Secret (for user-auth mode)
+ displaypassword: Client Secret
hiddenusername: true
section: Connect
required: false
- name: redirect_uri
- display: Application redirect URI (for user-auth mode)
+ display: Application redirect URI
defaultvalue:
type: 0
additionalinfo: ""
diff --git a/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups_description.md b/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups_description.md
index faff7c3c0018..128617fa1484 100644
--- a/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups_description.md
+++ b/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups_description.md
@@ -1,54 +1,85 @@
In order to connect to the Azure Network Security Groups use one of the following methods:
1. *Authorization Code Flow* (Recommended).
-2. *Device Code Flow*.
-3. *Azure Managed Identities Flow*.
+2. *Client Credentials Flow*
+3. *Device Code Flow*.
+4. *Azure Managed Identities Flow*.
+
+## Self-Deployed Azure App
+
+To use a self-configured Azure application, you need to add a new Azure App Registration in the Azure Portal.
+
+To add the registration, refer to the following [Microsoft article](https://learn.microsoft.com/en-us/defender-xdr/api-create-app-web?view=o365-worldwide) steps 1-8.
+
+### Required permissions
+
+- Azure Service Management - permission `user_impersonation` of type Delegated
+- Microsoft Graph - permission `offline_access` of type Delegated
+
+To add a permission:
+
+1. Navigate to **Home** > **App registrations**.
+2. Search for your app under 'all applications'.
+3. Click **API permissions** > **Add permission**.
+4. Search for the specific Microsoft API and select the specific permission of type Delegated.
### Authentication Using the Authorization Code Flow (recommended)
-1. To use a self-configured Azure application, you need to add a new Azure App Registration in the Azure Portal. To add the registration, refer to the following [Microsoft article](https://docs.microsoft.com/en-us/microsoft-365/security/defender/api-create-app-web?view=o365-worldwide#create-an-app) steps 1-8.
-2. In the **Authentication Type** field, select the **Authorization Code** option.
-3. In the **Application ID** field, enter your Client/Application ID.
-4. In the **Client Secret** field, enter your Client Secret.
-5. In the **Tenant ID** field, enter your Tenant ID .
-6. In the **Application redirect URI** field, enter your Application redirect URI.
-7. Save the instance.
-8. Run the `!azure-nsg-generate-login-url` command in the War Room and follow the instruction.
+1. In the *Authentication Type* field, select the **Authorization Code** option.
+2. In the *Application ID* field, enter your Client/Application ID.
+3. In the *Client Secret* field, enter your Client Secret.
+4. In the *Tenant ID* field, enter your Tenant ID .
+5. In the *Application redirect URI* field, enter your Application redirect URI.
+6. Save the instance.
+7. Run the `!azure-nsg-generate-login-url` command in the War Room and follow the instructions.
+
+### Authentication Using the Client Credentials Flow
+
+1. Assign Azure roles using the Azure portal [Microsoft article](https://learn.microsoft.com/en-us/azure/role-based-access-control/role-assignments-portal)
+
+ *Note:* In the *Select members* section, assign the application you created earlier.
+
+2. To configure a Microsoft integration that uses this authorization flow with a self-deployed Azure application:
+ a. In the *Authentication Type* field, select the **Client Credentials** option.
+ b. In the *Application ID* field, enter your Client/Application ID.
+ c. In the *Subscription ID* field, enter your Subscription ID.
+ d. In the *Resource Group Name* field, enter you Resource Group Name.
+ e. In the *Tenant ID* field, enter your Tenant ID .
+ f. In the *Client Secret* field, enter your Client Secret.
+ g. Click **Test** to validate the URLs, token, and connection
+ h. Save the instance.
### Authentication Using the Device Code Flow
Use the [device authorization grant flow](https://docs.microsoft.com/en-us/azure/active-directory/develop/v2-oauth2-device-code).
In order to connect to the Azure Network Security Group using either Cortex XSOAR Azure App or the Self-Deployed Azure App:
+
1. Fill in the required parameters.
-2. Run the ***!azure-nsg-auth-start*** command.
+2. Run the ***!azure-nsg-auth-start*** command.
3. Follow the instructions that appear.
4. Run the ***!azure-nsg-auth-complete*** command.
-At end of the process you'll see a message that you've logged in successfully.
+At end of the process you'll see a message that you've logged in successfully.
#### Cortex XSOAR Azure App
In order to use the Cortex XSOAR Azure application, use the default application ID (d4736600-e3d5-4c97-8e65-57abd2b979fe).
-You only need to fill in your subscription ID and resource group name.
-
-#### Self-Deployed Azure App
-
-To use a self-configured Azure application, you need to add a new Azure App Registration in the Azure Portal.
-
-The application must have *user_impersonation* permission and must allow public client flows (can be found under the **Authentication** section of the app).
+You only need to fill in your subscription ID and resource group name.
### Azure Managed Identities Authentication
+
##### Note: This option is relevant only if the integration is running on Azure VM.
+
Follow one of these steps for authentication based on Azure Managed Identities:
- ##### To use System Assigned Managed Identity
- - In the **Authentication Type** drop-down list, select **Azure Managed Identities** and leave the **Azure Managed Identities Client ID** field empty.
+ - In the *Authentication Type* drop-down list, select **Azure Managed Identities** and leave the *Azure Managed Identities Client ID* field empty.
- ##### To use User Assigned Managed Identity
- 1. Go to [Azure Portal](https://portal.azure.com/) -> **Managed Identities**
- 2. Select your User Assigned Managed Identity -> copy the Client ID -> paste it in the **Azure Managed Identities client ID** field in the instance configuration.
- 3. In the **Authentication Type** drop-down list, select **Azure Managed Identities**.
+ 1. Go to [Azure Portal](https://portal.azure.com/) > **Managed Identities**
+ 2. Select your User Assigned Managed Identity > copy the Client ID and paste it in the *Azure Managed Identities client ID* field in the instance configuration.
+ 3. In the *Authentication Type* drop-down list, select **Azure Managed Identities**.
For information about Azure Managed Identities see [Managed identities for Azure resources](https://learn.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/overview)
diff --git a/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups_test.py b/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups_test.py
index e447d040a7a5..79e1d906d8b4 100644
--- a/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups_test.py
+++ b/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/AzureNetworkSecurityGroups_test.py
@@ -5,6 +5,11 @@
import demistomock as demisto
from AzureNetworkSecurityGroups import AzureNSGClient
+AUTHORIZATION_CODE = 'Authorization Code'
+CLIENT_CREDENTIALS_FLOW = 'Client Credentials'
+SNAKED_CASE_AUTHORIZATION_CODE = 'authorization_code'
+SNAKED_CASE_CLIENT_CREDENTIALS_FLOW = 'client_credentials'
+
def mock_client(mocker, http_request_result=None):
mocker.patch.object(demisto, 'getIntegrationContext', return_value={'current_refresh_token': 'refresh_token'})
@@ -61,7 +66,7 @@ def test_create_rule_command(mocker):
from AzureNetworkSecurityGroups import create_rule_command
client = mock_client(mocker, util_load_json("test_data/list_network_groups_result.json"))
create_rule_command(client, args={'security_group_name': 'securityGroup', 'security_rule_name': 'test_rule',
- 'direction': 'Inbound', 'action': 'Allow', 'protocol': 'Any', 'source': 'Any',
+ 'direction': 'Inbound', 'action': 'Allow', 'protocol': 'Any', 'source': 'Any',
'source_ports': '900-1000', 'destination_ports': '1,2,3,4-6'},
params={'subscription_id': 'subscriptionID',
'resource_group_name': 'resourceGroupName'})
@@ -84,7 +89,7 @@ def test_update_rule_command(mocker):
from AzureNetworkSecurityGroups import update_rule_command
client = mock_client(mocker, util_load_json("test_data/get_rule_result.json"))
update_rule_command(client, args={'security_group_name': 'securityGroup', 'security_rule_name': 'wow', 'direction': 'Inbound',
- 'action': 'Allow', 'protocol': 'Any', 'source': 'Any', 'source_ports': '900-1000',
+ 'action': 'Allow', 'protocol': 'Any', 'source': 'Any', 'source_ports': '900-1000',
'destination_ports': '1,2,3,4-6'}, params={'subscription_id': 'subscriptionID',
'resource_group_name': 'resourceGroupName'})
properties = client.http_request.call_args_list[1][1].get('data').get('properties')
@@ -193,7 +198,7 @@ def test_generate_login_url(mocker):
# assert
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
'response_type=code&scope=offline_access%20https://management.azure.com/.default' \
- f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
+ f'&client_id={client_id}&redirect_uri={redirect_uri})'
res = AzureNetworkSecurityGroups.return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/README.md b/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/README.md
index 1d50303d97a8..7ede592c6d72 100644
--- a/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/README.md
+++ b/Packs/AzureNetworkSecurityGroups/Integrations/AzureNetworkSecurityGroups/README.md
@@ -1,59 +1,136 @@
Azure network security groups are used to filter network traffic to and from Azure resources in an Azure virtual network.
This integration was integrated and tested with version 2022-09-01 of Azure Network Security Groups.
+
+# Authorization
+
+In order to connect to the Azure Storage Accounts and the Blob Service use either the Cortex XSOAR Azure App or the Self-Deployed Azure App.
+Use one of the following methods:
+
+1. *Authorization Code Flow* (Recommended).
+2. *Client Credentials*
+3. *Device Code Flow*.
+
+## Self-Deployed Azure App
+
+To use a self-configured Azure application, you need to add a new Azure App Registration in the Azure Portal.
+
+To add the registration, refer to the following [Microsoft article](https://learn.microsoft.com/en-us/defender-xdr/api-create-app-web?view=o365-worldwide) steps 1-8.
+
+### Required permissions
+
+- Azure Service Management - permission `user_impersonation` of type Delegated
+- Microsoft Graph - permission `offline_access` of type Delegated
+
+To add a permission:
+1. Navigate to **Home** > **App registrations**.
+2. Search for your app under 'all applications'.
+3. Click **API permissions** > **Add permission**.
+4. Search for the specific Microsoft API and select the specific permission of type Delegated.
+
+### Authentication Using the Authorization Code Flow (recommended)
+
+1. In the *Authentication Type* field, select the **Authorization Code** option.
+2. In the *Application ID* field, enter your Client/Application ID.
+3. In the *Client Secret* field, enter your Client Secret.
+4. In the *Tenant ID* field, enter your Tenant ID .
+5. In the *Application redirect URI* field, enter your Application redirect URI.
+6. Save the instance.
+7. Run the `!azure-nsg-generate-login-url` command in the War Room and follow the instruction.
+
+### Authentication Using the Client Credentials Flow
+
+1. Assign Azure roles using the Azure portal [Microsoft article](https://learn.microsoft.com/en-us/azure/role-based-access-control/role-assignments-portal)
+
+ *Note:* In the *Select members* section, assign the application you created earlier.
+
+2. To configure a Microsoft integration that uses this authorization flow with a self-deployed Azure application:
+ a. In the *Authentication Type* field, select the **Client Credentials** option.
+ b. In the *Application ID* field, enter your Client/Application ID.
+ c. In the *Subscription ID* field, enter your Subscription ID.
+ d. In the *Resource Group Name* field, enter you Resource Group Name.
+ e. In the *Tenant ID* field, enter your Tenant ID .
+ f. In the *Client Secret* field, enter your Client Secret.
+ g. Click **Test** to validate the URLs, token, and connection
+ h. Save the instance.
+
+### Authentication Using the Device Code Flow
+
+Use the [device authorization grant flow](https://docs.microsoft.com/en-us/azure/active-directory/develop/v2-oauth2-device-code).
+
+In order to connect to the Azure Network Security Group using either Cortex XSOAR Azure App or the Self-Deployed Azure App:
+
+1. Fill in the required parameters.
+2. Run the ***!azure-nsg-auth-start*** command.
+3. Follow the instructions that appear.
+4. Run the ***!azure-nsg-auth-complete*** command.
+
+At end of the process you'll see a message that you've logged in successfully.
+
+#### Cortex XSOAR Azure App
+
+In order to use the Cortex XSOAR Azure application, use the default application ID (d4736600-e3d5-4c97-8e65-57abd2b979fe).
+
+You only need to fill in your subscription ID and resource group name.
+
## Configure Azure Network Security Groups on Cortex XSOAR
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
2. Search for Azure Network Security Groups.
3. Click **Add instance** to create and configure a new integration instance.
- | **Parameter** | **Description** | **Required** |
- | --- | --- | --- |
- | Application ID | | False |
- | Default Subscription ID | There are two options to set the specified value, either in the configuration or directly within the commands. However, setting values in both places will cause an override by the command value. | True |
- | Default Resource Group Name |There are two options to set the specified value, either in the configuration or directly within the commands. However, setting values in both places will cause an override by the command value. | True |
- | Azure AD endpoint | Azure AD endpoint associated with a national cloud. | False |
- | Trust any certificate (not secure) | | False |
- | Use system proxy settings | | False |
- | Authentication Type | Type of authentication - can be Authorization Code flow \(recommended\), Device Code Flow, or Azure Managed Identities. | True |
- | Tenant ID (for user-auth mode) | | False |
- | Client Secret (for user-auth mode) | | False |
- | Application redirect URI (for user-auth mode) | | False |
- | Authorization code | For user-auth mode - received from the authorization step. See Detailed Instructions \(?\) section. | False |
- | Azure Managed Identities Client ID | The Managed Identities client ID for authentication - relevant only if the integration is running on Azure VM. |False |
+ | **Parameter** | **Description** | **Required** |
+ |------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------|
+ | Application ID | | False |
+ | Default Subscription ID | There are two options to set the specified value, either in the configuration or directly within the commands. However, setting values in both places will cause an override by the command value. | True |
+ | Default Resource Group Name | There are two options to set the specified value, either in the configuration or directly within the commands. However, setting values in both places will cause an override by the command value. | True |
+ | Azure AD endpoint | Azure AD endpoint associated with a national cloud. | False |
+ | Trust any certificate (not secure) | | False |
+ | Use system proxy settings | | False |
+ | Authentication Type | Type of authentication - can be Authorization Code flow \(recommended\), Client Credentials, Device Code Flow, or Azure Managed Identities. | True |
+ | Tenant ID | | False |
+ | Client Secret | | False |
+ | Application redirect URI | | False |
+ | Authorization code | For user-auth mode - received from the authorization step. See Detailed Instructions \(?\) section. | False |
+ | Azure Managed Identities Client ID | The Managed Identities client ID for authentication - relevant only if the integration is running on Azure VM. | False |
4. Click **Test** to validate the URLs, token, and connection.
+
## Commands
+
You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
After you successfully execute a command, a DBot message appears in the War Room with the command details.
+
### azure-nsg-security-groups-list
+
***
List all network security groups.
-
#### Base Command
`azure-nsg-security-groups-list`
+
#### Input
-| subscription_id | The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
-| resource_group_name | The resource group name. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
+| subscription_id | The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
+| resource_group_name | The resource group name. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| AzureNSG.SecurityGroup.name | String | The security group's name. |
-| AzureNSG.SecurityGroup.id | String | The security group's ID. |
-| AzureNSG.SecurityGroup.etag | String | The security group's ETag. |
-| AzureNSG.SecurityGroup.type | String | The security group's type. |
-| AzureNSG.SecurityGroup.location | String | The security group's location. |
-| AzureNSG.SecurityGroup.tags | String | The security group's tags. |
-
+| **Path** | **Type** | **Description** |
+|---------------------------------|----------|--------------------------------|
+| AzureNSG.SecurityGroup.name | String | The security group's name. |
+| AzureNSG.SecurityGroup.id | String | The security group's ID. |
+| AzureNSG.SecurityGroup.etag | String | The security group's ETag. |
+| AzureNSG.SecurityGroup.type | String | The security group's type. |
+| AzureNSG.SecurityGroup.location | String | The security group's location. |
+| AzureNSG.SecurityGroup.tags | String | The security group's tags. |
#### Command Example
+
```!azure-nsg-security-groups-list```
#### Context Example
+
```json
{
"AzureNSG": {
@@ -71,56 +148,56 @@ List all network security groups.
#### Human Readable Output
->### Network Security Groups
+> ### Network Security Groups
>|etag|id|location|name|tags|type|
>|---|---|---|---|---|---|
>| W/"fdba51cf-46b3-44af-8da5-16666aa578cc" | /subscriptions/123456789/resourceGroups/cloud-shell-storage-eastus/providers/Microsoft.Network/networkSecurityGroups/alerts-nsg | westeurope | alerts-nsg | | Microsoft.Network/networkSecurityGroups |
-
### azure-nsg-security-rules-list
+
***
List all rules of the specified security groups.
-
#### Base Command
`azure-nsg-security-rules-list`
-#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| security_group_name | A comma-separated list of the names of the security groups. | Required |
-| subscription_id | The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
-| resource_group_name | The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'. | Optional |
-| limit | The maximum number of rules to display. Default is 50. | Optional |
-| offset | The index of the first rule to display. Used for pagination. Default is 0. | Optional |
+#### Input
+| **Argument Name** | **Description** | **Required** |
+|---------------------|------------------------------------------------------------------------------------------------------------------|--------------|
+| security_group_name | A comma-separated list of the names of the security groups. | Required |
+| subscription_id | The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
+| resource_group_name | The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'. | Optional |
+| limit | The maximum number of rules to display. Default is 50. | Optional |
+| offset | The index of the first rule to display. Used for pagination. Default is 0. | Optional |
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| AzureNSG.Rule.name | String | The rule's name. |
-| AzureNSG.Rule.id | String | The rule's ID. |
-| AzureNSG.Rule.etag | String | The rule's ETag. |
-| AzureNSG.Rule.type | String | The rule's type. |
-| AzureNSG.Rule.provisioningState | String | The rule's provisioning state. |
-| AzureNSG.Rule.protocol | String | The protocol. Can be "TCP", "UDP", "ICMP", or "\*"". |
-| AzureNSG.Rule.sourcePortRange | String | For a single port, the source port or range of ports. Note that for multiple ports, \`sourcePortRanges\` will appear instead. |
-| AzureNSG.Rule.sourcePortRanges | String | For multiple ports, a list of source ports. Note that for single ports, \`sourcePortRange\` will appear instead. |
-| AzureNSG.Rule.destinationPortRange | String | For a single port, the destination port or range of ports. Note that for multiple ports, \`destinationPortRanges\` will appear instead. |
-| AzureNSG.Rule.destinationPortRanges | String | For multiple ports, a list of destination ports. Note that for single ports, \`destinationPortRange\` will appear instead. |
-| AzureNSG.Rule.sourceAddressPrefix | String | The source address. |
-| AzureNSG.Rule.destinationAddressPrefix | String | The destination address. |
-| AzureNSG.Rule.access | String | The rule's access. Can be either "Allow" or "Deny". |
-| AzureNSG.Rule.priority | Number | The rule's priority. Can be from 100 to 4096. |
-| AzureNSG.Rule.direction | String | The rule's direction. Can be either "Inbound" or "Outbound". |
-
+| **Path** | **Type** | **Description** |
+|----------------------------------------|----------|-----------------------------------------------------------------------------------------------------------------------------------------|
+| AzureNSG.Rule.name | String | The rule's name. |
+| AzureNSG.Rule.id | String | The rule's ID. |
+| AzureNSG.Rule.etag | String | The rule's ETag. |
+| AzureNSG.Rule.type | String | The rule's type. |
+| AzureNSG.Rule.provisioningState | String | The rule's provisioning state. |
+| AzureNSG.Rule.protocol | String | The protocol. Can be "TCP", "UDP", "ICMP", or "\*"". |
+| AzureNSG.Rule.sourcePortRange | String | For a single port, the source port or range of ports. Note that for multiple ports, \`sourcePortRanges\` will appear instead. |
+| AzureNSG.Rule.sourcePortRanges | String | For multiple ports, a list of source ports. Note that for single ports, \`sourcePortRange\` will appear instead. |
+| AzureNSG.Rule.destinationPortRange | String | For a single port, the destination port or range of ports. Note that for multiple ports, \`destinationPortRanges\` will appear instead. |
+| AzureNSG.Rule.destinationPortRanges | String | For multiple ports, a list of destination ports. Note that for single ports, \`destinationPortRange\` will appear instead. |
+| AzureNSG.Rule.sourceAddressPrefix | String | The source address. |
+| AzureNSG.Rule.destinationAddressPrefix | String | The destination address. |
+| AzureNSG.Rule.access | String | The rule's access. Can be either "Allow" or "Deny". |
+| AzureNSG.Rule.priority | Number | The rule's priority. Can be from 100 to 4096. |
+| AzureNSG.Rule.direction | String | The rule's direction. Can be either "Inbound" or "Outbound". |
#### Command Example
+
```!azure-nsg-security-rules-list security_group_name=alerts-nsg```
#### Context Example
+
```json
{
"AzureNSG": {
@@ -152,20 +229,20 @@ List all rules of the specified security groups.
#### Human Readable Output
->### Rules in alerts-nsg
+> ### Rules in alerts-nsg
>|access|destinationAddressPrefix|destinationPortRange|direction|etag|id|name|priority|protocol|provisioningState|sourceAddressPrefix|sourcePortRanges|type|
>|---|---|---|---|---|---|---|---|---|---|---|---|---|
>| Allow | 1.1.1.1 | * | Inbound | W/"fdba51cf-46b3-44af-8da5-16666aa578cc" | /subscriptions/123456789/resourceGroups/cloud-shell-storage-eastus/providers/Microsoft.Network/networkSecurityGroups/alerts-nsg/securityRules/wow | wow | 3323 | * | Succeeded | 8.8.8.8 | 1, 2, 3 | Microsoft.Network/networkSecurityGroups/securityRules |
-
### azure-nsg-auth-test
+
***
Tests the connectivity to the Azure Network Security Groups.
-
#### Base Command
`azure-nsg-auth-test`
+
#### Input
There are no input arguments for this command.
@@ -175,94 +252,96 @@ There are no input arguments for this command.
There is no context output for this command.
#### Command Example
-```!azure-nsg-auth-test```
+```!azure-nsg-auth-test```
#### Human Readable Output
->✅ Success!
+> ✅ Success!
### azure-nsg-security-rules-delete
+
***
Delete a security rule.
-
#### Base Command
`azure-nsg-security-rule-delete`
-#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| security_group_name | The name of the security group. | Required |
-| security_rule_name | The name of the rule to be deleted. | Required |
-|subscription_id|The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. |Optional|
-resource_group_name| The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'.|Optional|
+#### Input
+| **Argument Name** | **Description** | **Required** |
+|---------------------|------------------------------------------------------------------------------------------------------------------|--------------|
+| security_group_name | The name of the security group. | Required |
+| security_rule_name | The name of the rule to be deleted. | Required |
+| subscription_id | The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
+ resource_group_name | The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'. | Optional |
#### Context Output
There is no context output for this command.
#### Command Example
+
```!azure-nsg-security-rules-delete security_group_name=alerts-nsg security_rule_name=wow```
#### Human Readable Output
->Rule wow deleted.
+> Rule wow deleted.
### azure-nsg-security-rules-create
+
***
Create a security rule.
-
#### Base Command
`azure-nsg-security-rule-create`
-#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| security_group_name | The name of the security group. | Required |
-| security_rule_name | The name of the rule to be created. | Required |
-| direction | The direction of the rule. Possible values are: "Inbound" and "Outbound". Possible values are: Inbound, Outbound. | Required |
-| action | Whether to allow the traffic. Possible values are: "Allow" and "Deny". Possible values are: Allow, Deny. | Optional |
-| protocol | The protocol on which to apply the rule. Possible values are: "Any", "TCP", "UDP" and "ICMP". Possible values are: Any, TCP, UDP, ICMP. | Optional |
-| source | The source IP address range from which incoming traffic will be allowed or denied by this rule. Possible values are "Any", an IP address range, an application security group, or a default tag. Default is "Any". | Optional |
-| priority | The priority by which the rules will be processed. The lower the number, the higher the priority. We recommend leaving gaps between rules - 100, 200, 300, etc. - so that it is easier to add new rules without having to edit existing rules. Default is "4096".| Optional |
-| source_ports | The source ports from which traffic will be allowed or denied by this rule. Provide a single port, such as 80; a port range, such as 1024-65535; or a comma-separated list of single ports and/or port ranges, such as 80,1024-65535. Use an asterisk (*) to allow traffic on any port. Default is "*". | Optional |
-| destination | The specific destination IP address range for outgoing traffic that will be allowed or denied by this rule. The destination filter can be "Any", an IP address range, an application security group, or a default tag. | Optional |
-| destination_ports | The destination ports for which traffic will be allowed or denied by this rule. Provide a single port, such as 80; a port range, such as 1024-65535; or a comma-separated list of single ports and/or port ranges, such as 80,1024-65535. Use an asterisk (*) to allow traffic on any port. | Optional |
-| description | A description to add to the rule. | Optional |
-|subscription_id|The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. |Optional|
-resource_group_name| The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'.|Optional|
+#### Input
+| **Argument Name** | **Description** | **Required** |
+|---------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------|
+| security_group_name | The name of the security group. | Required |
+| security_rule_name | The name of the rule to be created. | Required |
+| direction | The direction of the rule. Possible values are: "Inbound" and "Outbound". Possible values are: Inbound, Outbound. | Required |
+| action | Whether to allow the traffic. Possible values are: "Allow" and "Deny". Possible values are: Allow, Deny. | Optional |
+| protocol | The protocol on which to apply the rule. Possible values are: "Any", "TCP", "UDP" and "ICMP". Possible values are: Any, TCP, UDP, ICMP. | Optional |
+| source | The source IP address range from which incoming traffic will be allowed or denied by this rule. Possible values are "Any", an IP address range, an application security group, or a default tag. Default is "Any". | Optional |
+| priority | The priority by which the rules will be processed. The lower the number, the higher the priority. We recommend leaving gaps between rules - 100, 200, 300, etc. - so that it is easier to add new rules without having to edit existing rules. Default is "4096". | Optional |
+| source_ports | The source ports from which traffic will be allowed or denied by this rule. Provide a single port, such as 80; a port range, such as 1024-65535; or a comma-separated list of single ports and/or port ranges, such as 80,1024-65535. Use an asterisk (*) to allow traffic on any port. Default is "*". | Optional |
+| destination | The specific destination IP address range for outgoing traffic that will be allowed or denied by this rule. The destination filter can be "Any", an IP address range, an application security group, or a default tag. | Optional |
+| destination_ports | The destination ports for which traffic will be allowed or denied by this rule. Provide a single port, such as 80; a port range, such as 1024-65535; or a comma-separated list of single ports and/or port ranges, such as 80,1024-65535. Use an asterisk (*) to allow traffic on any port. | Optional |
+| description | A description to add to the rule. | Optional |
+| subscription_id | The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
+ resource_group_name | The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'. | Optional |
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| AzureNSG.Rule.name | String | The rule's name. |
-| AzureNSG.Rule.id | String | The rule's ID. |
-| AzureNSG.Rule.etag | String | The rule's ETag. |
-| AzureNSG.Rule.type | String | The rule's type. |
-| AzureNSG.Rule.provisioningState | String | The rule's provisioning state. |
-| AzureNSG.Rule.protocol | String | The protocol. Can be "TCP", "UDP", "ICMP", or "\*". |
-| AzureNSG.Rule.sourcePortRange | String | For a single port, the source port or a range of ports. Note that for multiple ports, \`sourcePortRanges\` will appear instead. |
-| AzureNSG.Rule.sourcePortRanges | String | For multiple ports, a list of these ports. Note that for single ports, \`sourcePortRange\` will appear instead. |
-| AzureNSG.Rule.destinationPortRange | String | For a single port, the destination port or range of ports. Note that for multiple ports, \`destinationPortRanges\` will appear instead. |
-| AzureNSG.Rule.destinationPortRanges | String | For multiple ports, a list of destination ports. Note that for single ports, \`destinationPortRange\` will appear instead. |
-| AzureNSG.Rule.sourceAddressPrefix | String | The source address. |
-| AzureNSG.Rule.destinationAddressPrefix | String | The destination address. |
-| AzureNSG.Rule.access | String | The rule's access. Can be "Allow" or "Deny". |
-| AzureNSG.Rule.priority | Number | The rule's priority. Can be from 100 to 4096. |
-| AzureNSG.Rule.direction | String | The rule's direction. Can be "Inbound" or "Outbound". |
-
+| **Path** | **Type** | **Description** |
+|----------------------------------------|----------|-----------------------------------------------------------------------------------------------------------------------------------------|
+| AzureNSG.Rule.name | String | The rule's name. |
+| AzureNSG.Rule.id | String | The rule's ID. |
+| AzureNSG.Rule.etag | String | The rule's ETag. |
+| AzureNSG.Rule.type | String | The rule's type. |
+| AzureNSG.Rule.provisioningState | String | The rule's provisioning state. |
+| AzureNSG.Rule.protocol | String | The protocol. Can be "TCP", "UDP", "ICMP", or "\*". |
+| AzureNSG.Rule.sourcePortRange | String | For a single port, the source port or a range of ports. Note that for multiple ports, \`sourcePortRanges\` will appear instead. |
+| AzureNSG.Rule.sourcePortRanges | String | For multiple ports, a list of these ports. Note that for single ports, \`sourcePortRange\` will appear instead. |
+| AzureNSG.Rule.destinationPortRange | String | For a single port, the destination port or range of ports. Note that for multiple ports, \`destinationPortRanges\` will appear instead. |
+| AzureNSG.Rule.destinationPortRanges | String | For multiple ports, a list of destination ports. Note that for single ports, \`destinationPortRange\` will appear instead. |
+| AzureNSG.Rule.sourceAddressPrefix | String | The source address. |
+| AzureNSG.Rule.destinationAddressPrefix | String | The destination address. |
+| AzureNSG.Rule.access | String | The rule's access. Can be "Allow" or "Deny". |
+| AzureNSG.Rule.priority | Number | The rule's priority. Can be from 100 to 4096. |
+| AzureNSG.Rule.direction | String | The rule's direction. Can be "Inbound" or "Outbound". |
#### Command Example
+
```!azure-nsg-security-rules-create direction=Inbound security_group_name=alerts-nsg security_rule_name=rulerule source=1.1.1.1```
#### Context Example
+
```json
{
"AzureNSG": {
@@ -291,64 +370,64 @@ resource_group_name| The resource group name. Note: This argument will override
#### Human Readable Output
->### Rules rulerule
+> ### Rules rulerule
>|access|destinationAddressPrefix|destinationPortRange|direction|etag|id|name|priority|protocol|provisioningState|sourceAddressPrefix|sourcePortRange|type|
>|---|---|---|---|---|---|---|---|---|---|---|---|---|
>| Allow | * | * | Inbound | W/"276dc93a-488d-47a1-8971-19a1171242a9" | /subscriptions/123456789/resourceGroups/cloud-shell-storage-eastus/providers/Microsoft.Network/networkSecurityGroups/alerts-nsg/securityRules/rulerule | rulerule | 4096 | * | Updating | 1.1.1.1 | * | Microsoft.Network/networkSecurityGroups/securityRules |
-
### azure-nsg-security-rules-update
+
***
Update a security rule. If one does not exist, it will be created.
-
#### Base Command
`azure-nsg-security-rule-update`
-#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| security_group_name | The name of the security group. | Required |
-| security_rule_name | The name of the rule to be updated. | Required |
-| direction | The direction of the rule. Possible values are: "Inbound" and "Outbound". Possible values are: Inbound, Outbound. | Optional |
-| action | Whether to allow the traffic. Possible values are "Allow" and "Deny". Possible values are: Allow, Deny. | Optional |
-| protocol | The protocol on which to apply the rule. Possible values are: "Any", "TCP", "UDP", and "ICMP". Possible values are: Any, TCP, UDP, ICMP. | Optional |
-| source | The source IP address range from which incoming traffic will be allowed or denied by this rule. Possible values are "Any", an IP address range, an application security group, or a default tag. Default is "Any". | Optional |
-| priority | The priority by which the rules will be processed. The lower the number, the higher the priority. We recommend leaving gaps between rules - 100, 200, 300, etc. - so that it is easier to add new rules without having to edit existing rules. Default is "4096". | Optional |
-| source_ports | The source ports from which traffic will be allowed or denied by this rule. Provide a single port, such as 80; a port range, such as 1024-65535; or a comma-separated list of single ports and/or port ranges, such as 80,1024-65535. Use an asterisk (*) to allow traffic on any port. Default is "*".| Optional |
-| destination | The specific destination IP address range for outgoing traffic that will be allowed or denied by this rule. The destination filter can be "Any", an IP address range, an application security group, or a default tag. | Optional |
-| destination_ports | The destination ports for which traffic will be allowed or denied by this rule. Provide a single port, such as 80; a port range, such as 1024-65535; or a comma-separated list of single ports and/or port ranges, such as 80,1024-65535. Use an asterisk (*) to allow traffic on any port. | Optional |
-| description | A description to add to the rule. | Optional |
-|subscription_id|The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. |Optional|
-resource_group_name|The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'. |Optional|
+#### Input
+| **Argument Name** | **Description** | **Required** |
+|---------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------|
+| security_group_name | The name of the security group. | Required |
+| security_rule_name | The name of the rule to be updated. | Required |
+| direction | The direction of the rule. Possible values are: "Inbound" and "Outbound". Possible values are: Inbound, Outbound. | Optional |
+| action | Whether to allow the traffic. Possible values are "Allow" and "Deny". Possible values are: Allow, Deny. | Optional |
+| protocol | The protocol on which to apply the rule. Possible values are: "Any", "TCP", "UDP", and "ICMP". Possible values are: Any, TCP, UDP, ICMP. | Optional |
+| source | The source IP address range from which incoming traffic will be allowed or denied by this rule. Possible values are "Any", an IP address range, an application security group, or a default tag. Default is "Any". | Optional |
+| priority | The priority by which the rules will be processed. The lower the number, the higher the priority. We recommend leaving gaps between rules - 100, 200, 300, etc. - so that it is easier to add new rules without having to edit existing rules. Default is "4096". | Optional |
+| source_ports | The source ports from which traffic will be allowed or denied by this rule. Provide a single port, such as 80; a port range, such as 1024-65535; or a comma-separated list of single ports and/or port ranges, such as 80,1024-65535. Use an asterisk (*) to allow traffic on any port. Default is "*". | Optional |
+| destination | The specific destination IP address range for outgoing traffic that will be allowed or denied by this rule. The destination filter can be "Any", an IP address range, an application security group, or a default tag. | Optional |
+| destination_ports | The destination ports for which traffic will be allowed or denied by this rule. Provide a single port, such as 80; a port range, such as 1024-65535; or a comma-separated list of single ports and/or port ranges, such as 80,1024-65535. Use an asterisk (*) to allow traffic on any port. | Optional |
+| description | A description to add to the rule. | Optional |
+| subscription_id | The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
+ resource_group_name | The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'. | Optional |
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| AzureNSG.Rule.name | String | The rule's name. |
-| AzureNSG.Rule.id | String | The rule's ID. |
-| AzureNSG.Rule.etag | String | The rule's ETag. |
-| AzureNSG.Rule.type | String | The rule's type. |
-| AzureNSG.Rule.provisioningState | String | The rule's provisioning state. |
-| AzureNSG.Rule.protocol | String | The protocol. Can be "TCP", "UDP", "ICMP", "\*". |
-| AzureNSG.Rule.sourcePortRange | String | For a single port, the source port or a range of ports. Note that for multiple ports, \`sourcePortRanges\` will appear instead. |
-| AzureNSG.Rule.sourcePortRanges | String | For multiple ports, a list of these ports. Note that for single ports, \`sourcePortRange\` will appear instead. |
-| AzureNSG.Rule.destinationPortRange | String | For a single port, the destination port or range of ports. Note that for multiple ports, \`destinationPortRanges\` will appear instead. |
-| AzureNSG.Rule.destinationPortRanges | String | For multiple ports, a list of destination ports. Note that for single ports, \`destinationPortRange\` will appear instead. |
-| AzureNSG.Rule.sourceAddressPrefix | String | The source address. |
-| AzureNSG.Rule.destinationAddressPrefix | String | The destination address. |
-| AzureNSG.Rule.access | String | The rule's access. Can be "Allow" or "Deny". |
-| AzureNSG.Rule.priority | Number | The rule's priority. Can be from 100 to 4096. |
-| AzureNSG.Rule.direction | String | The rule's direction. Can be "Inbound" or "Outbound". |
-
+| **Path** | **Type** | **Description** |
+|----------------------------------------|----------|-----------------------------------------------------------------------------------------------------------------------------------------|
+| AzureNSG.Rule.name | String | The rule's name. |
+| AzureNSG.Rule.id | String | The rule's ID. |
+| AzureNSG.Rule.etag | String | The rule's ETag. |
+| AzureNSG.Rule.type | String | The rule's type. |
+| AzureNSG.Rule.provisioningState | String | The rule's provisioning state. |
+| AzureNSG.Rule.protocol | String | The protocol. Can be "TCP", "UDP", "ICMP", "\*". |
+| AzureNSG.Rule.sourcePortRange | String | For a single port, the source port or a range of ports. Note that for multiple ports, \`sourcePortRanges\` will appear instead. |
+| AzureNSG.Rule.sourcePortRanges | String | For multiple ports, a list of these ports. Note that for single ports, \`sourcePortRange\` will appear instead. |
+| AzureNSG.Rule.destinationPortRange | String | For a single port, the destination port or range of ports. Note that for multiple ports, \`destinationPortRanges\` will appear instead. |
+| AzureNSG.Rule.destinationPortRanges | String | For multiple ports, a list of destination ports. Note that for single ports, \`destinationPortRange\` will appear instead. |
+| AzureNSG.Rule.sourceAddressPrefix | String | The source address. |
+| AzureNSG.Rule.destinationAddressPrefix | String | The destination address. |
+| AzureNSG.Rule.access | String | The rule's access. Can be "Allow" or "Deny". |
+| AzureNSG.Rule.priority | Number | The rule's priority. Can be from 100 to 4096. |
+| AzureNSG.Rule.direction | String | The rule's direction. Can be "Inbound" or "Outbound". |
#### Command Example
+
```!azure-nsg-security-rules-update security_group_name=alerts-nsg security_rule_name=XSOAR_Rule action=Allow description=description```
#### Context Example
+
```json
{
"AzureNSG": {
@@ -378,55 +457,55 @@ resource_group_name|The resource group name. Note: This argument will override t
#### Human Readable Output
->### Rules XSOAR_Rule
+> ### Rules XSOAR_Rule
>|access|description|destinationAddressPrefix|destinationPortRange|direction|etag|id|name|priority|protocol|provisioningState|sourceAddressPrefix|sourcePortRange|type|
>|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
>| Allow | description | 11.0.0.0/8 | 8080 | Outbound | W/"9fad6036-4c3a-4d60-aac9-18281dba3305" | /subscriptions/123456789/resourceGroups/cloud-shell-storage-eastus/providers/Microsoft.Network/networkSecurityGroups/alerts-nsg/securityRules/XSOAR_Rule | XSOAR_Rule | 100 | * | Succeeded | 10.0.0.0/8 | * | Microsoft.Network/networkSecurityGroups/securityRules |
-
### azure-nsg-security-rules-get
+
***
Get a specific rule.
-
#### Base Command
`azure-nsg-security-rule-get`
-#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| security_group_name | The name of the security group. | Optional |
-| security_rule_name | A comma-separated list of the names of the rules to get. | Optional |
-|subscription_id|The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. |Optional|
-resource_group_name| The name of the resource group. Note: This argument will override the instance parameter ‘Default Resource Group Name'. |Optional|
+#### Input
+| **Argument Name** | **Description** | **Required** |
+|---------------------|-------------------------------------------------------------------------------------------------------------------------|--------------|
+| security_group_name | The name of the security group. | Optional |
+| security_rule_name | A comma-separated list of the names of the rules to get. | Optional |
+| subscription_id | The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
+ resource_group_name | The name of the resource group. Note: This argument will override the instance parameter ‘Default Resource Group Name'. | Optional |
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| AzureNSG.Rule.name | String | The rule's name. |
-| AzureNSG.Rule.id | String | The rule's ID. |
-| AzureNSG.Rule.etag | String | The rule's ETag. |
-| AzureNSG.Rule.type | String | The rule's type. |
-| AzureNSG.Rule.provisioningState | String | The rule's provisioning state. |
-| AzureNSG.Rule.protocol | String | The protocol. Can be "TCP", "UDP", "ICMP", "\*". |
-| AzureNSG.Rule.sourcePortRange | String | For a single port, the source port or a range of ports. Note that for multiple ports, \`sourcePortRanges\` will appear instead. |
-| AzureNSG.Rule.sourcePortRanges | String | For multiple ports, a list of these ports. Note that for single ports, \`sourcePortRange\` will appear instead. |
-| AzureNSG.Rule.destinationPortRange | String | For a single port, the destination port or range of ports. Note that for multiple ports, \`destinationPortRanges\` will appear instead. |
-| AzureNSG.Rule.destinationPortRanges | String | For multiple ports, a list of destination ports. Note that for single ports, \`destinationPortRange\` will appear instead. |
-| AzureNSG.Rule.sourceAddressPrefix | String | The source address. |
-| AzureNSG.Rule.destinationAddressPrefix | String | The destination address. |
-| AzureNSG.Rule.access | String | The rule's access. Can be "Allow" or "Deny". |
-| AzureNSG.Rule.priority | Number | The rule's priority. Can be from 100 to 4096. |
-| AzureNSG.Rule.direction | String | The rule's direction. Can be "Inbound" or "Outbound". |
-
+| **Path** | **Type** | **Description** |
+|----------------------------------------|----------|-----------------------------------------------------------------------------------------------------------------------------------------|
+| AzureNSG.Rule.name | String | The rule's name. |
+| AzureNSG.Rule.id | String | The rule's ID. |
+| AzureNSG.Rule.etag | String | The rule's ETag. |
+| AzureNSG.Rule.type | String | The rule's type. |
+| AzureNSG.Rule.provisioningState | String | The rule's provisioning state. |
+| AzureNSG.Rule.protocol | String | The protocol. Can be "TCP", "UDP", "ICMP", "\*". |
+| AzureNSG.Rule.sourcePortRange | String | For a single port, the source port or a range of ports. Note that for multiple ports, \`sourcePortRanges\` will appear instead. |
+| AzureNSG.Rule.sourcePortRanges | String | For multiple ports, a list of these ports. Note that for single ports, \`sourcePortRange\` will appear instead. |
+| AzureNSG.Rule.destinationPortRange | String | For a single port, the destination port or range of ports. Note that for multiple ports, \`destinationPortRanges\` will appear instead. |
+| AzureNSG.Rule.destinationPortRanges | String | For multiple ports, a list of destination ports. Note that for single ports, \`destinationPortRange\` will appear instead. |
+| AzureNSG.Rule.sourceAddressPrefix | String | The source address. |
+| AzureNSG.Rule.destinationAddressPrefix | String | The destination address. |
+| AzureNSG.Rule.access | String | The rule's access. Can be "Allow" or "Deny". |
+| AzureNSG.Rule.priority | Number | The rule's priority. Can be from 100 to 4096. |
+| AzureNSG.Rule.direction | String | The rule's direction. Can be "Inbound" or "Outbound". |
#### Command Example
+
```!azure-nsg-security-rules-get security_group_name=alerts-nsg security_rule_name=wow```
#### Context Example
+
```json
{
"AzureNSG": {
@@ -458,20 +537,20 @@ resource_group_name| The name of the resource group. Note: This argument will ov
#### Human Readable Output
->### Rules wow
+> ### Rules wow
>|access|destinationAddressPrefix|destinationPortRange|direction|etag|id|name|priority|protocol|provisioningState|sourceAddressPrefix|sourcePortRanges|type|
>|---|---|---|---|---|---|---|---|---|---|---|---|---|
>| Allow | 1.1.1.1 | * | Inbound | W/"fdba51cf-46b3-44af-8da5-16666aa578cc" | /subscriptions/123456789/resourceGroups/cloud-shell-storage-eastus/providers/Microsoft.Network/networkSecurityGroups/alerts-nsg/securityRules/wow | wow | 3323 | * | Succeeded | 8.8.8.8 | 1, 2, 3 | Microsoft.Network/networkSecurityGroups/securityRules |
-
### azure-nsg-auth-start
+
***
Run this command to start the authorization process and follow the instructions in the command results.
-
#### Base Command
`azure-nsg-auth-start`
+
#### Input
There are no input arguments for this command.
@@ -481,22 +560,24 @@ There are no input arguments for this command.
There is no context output for this command.
#### Command Example
+
```!azure-nsg-auth-start ```
#### Human Readable Output
->To sign in, use a web browser to open the page https://microsoft.com/devicelogin
-and enter the code CODECODE to authenticate.
-Run the !azure-nsg-auth-complete command in the War Room.
+> To sign in, use a web browser to open the page https://microsoft.com/devicelogin
+> and enter the code CODECODE to authenticate.
+> Run the ***!azure-nsg-auth-complete*** command in the War Room.
### azure-nsg-auth-complete
+
***
Run this command to complete the authorization process. Should be used after running the azure-nsg-auth-start command.
-
#### Base Command
`azure-nsg-auth-complete`
+
#### Input
There are no input arguments for this command.
@@ -506,21 +587,22 @@ There are no input arguments for this command.
There is no context output for this command.
#### Command Example
-```!azure-nsg-auth-complete```
+```!azure-nsg-auth-complete```
#### Human Readable Output
->✅ Authorization completed successfully.
+> ✅ Authorization completed successfully.
### azure-nsg-auth-reset
+
***
Run this command if for some reason you need to rerun the authentication process.
-
#### Base Command
`azure-nsg-auth-reset`
+
#### Input
There are no input arguments for this command.
@@ -530,163 +612,168 @@ There are no input arguments for this command.
There is no context output for this command.
#### Command Example
-```!azure-nsg-auth-reset```
+```!azure-nsg-auth-reset```
#### Human Readable Output
->Authorization was reset successfully. You can now run **!azure-nsg-auth-start** and **!azure-nsg-auth-complete**.
+> Authorization was reset successfully. You can now run **!azure-nsg-auth-start** and **!azure-nsg-auth-complete**.
+
### azure-nsg-security-rule-delete
+
***
Delete a security rule.
-
#### Base Command
`azure-nsg-security-rule-delete`
-#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| security_group_name | The name of the security group. | Required |
-| security_rule_name | The name of the rule to be deleted. | Required |
+#### Input
+| **Argument Name** | **Description** | **Required** |
+|---------------------|-------------------------------------|--------------|
+| security_group_name | The name of the security group. | Required |
+| security_rule_name | The name of the rule to be deleted. | Required |
#### Context Output
There is no context output for this command.
+
### azure-nsg-security-rule-create
+
***
Create a security rule.
-
#### Base Command
`azure-nsg-security-rule-create`
-#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| security_group_name | The name of the security group. | Required |
-| security_rule_name | The name of the rule to be created. | Required |
-| direction | The direction of the rule. Possible values are: "Inbound" and "Outbound". Possible values are: Inbound, Outbound. | Required |
-| action | Whether to allow the traffic. Possible values are: "Allow" and "Deny". Possible values are: Allow, Deny. | Optional |
-| protocol | The protocol on which to apply the rule. Possible values are: "Any", "TCP", "UDP" and "ICMP". Possible values are: Any, TCP, UDP, ICMP. | Optional |
-| source | The source IP address range from which incoming traffic will be allowed or denied by this rule. Possible values are "Any", an IP address range, an application security group, or a default tag. | Optional |
-| priority | The priority by which the rules will be processed. The lower the number, the higher the priority. We recommend leaving gaps between rules - 100, 200, 300, etc. - so that it is easier to add new rules without having to edit existing rules. | Optional |
-| source_ports | The source ports from which traffic will be allowed or denied by this rule. Provide a single port, such as 80; a port range, such as 1024-65535; or a comma-separated list of single ports and/or port ranges, such as 80,1024-65535. Use an asterisk (*) to allow traffic on any port. | Optional |
-| destination | The specific destination IP address range for outgoing traffic that will be allowed or denied by this rule. The destination filter can be "Any", an IP address range, an application security group, or a default tag. | Optional |
-| destination_ports | The destination ports for which traffic will be allowed or denied by this rule. Provide a single port, such as 80; a port range, such as 1024-65535; or a comma-separated list of single ports and/or port ranges, such as 80,1024-65535. Use an asterisk (*) to allow traffic on any port. | Optional |
-| description | A description to add to the rule. | Optional |
+#### Input
+| **Argument Name** | **Description** | **Required** |
+|---------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------|
+| security_group_name | The name of the security group. | Required |
+| security_rule_name | The name of the rule to be created. | Required |
+| direction | The direction of the rule. Possible values are: "Inbound" and "Outbound". Possible values are: Inbound, Outbound. | Required |
+| action | Whether to allow the traffic. Possible values are: "Allow" and "Deny". Possible values are: Allow, Deny. | Optional |
+| protocol | The protocol on which to apply the rule. Possible values are: "Any", "TCP", "UDP" and "ICMP". Possible values are: Any, TCP, UDP, ICMP. | Optional |
+| source | The source IP address range from which incoming traffic will be allowed or denied by this rule. Possible values are "Any", an IP address range, an application security group, or a default tag. | Optional |
+| priority | The priority by which the rules will be processed. The lower the number, the higher the priority. We recommend leaving gaps between rules - 100, 200, 300, etc. - so that it is easier to add new rules without having to edit existing rules. | Optional |
+| source_ports | The source ports from which traffic will be allowed or denied by this rule. Provide a single port, such as 80; a port range, such as 1024-65535; or a comma-separated list of single ports and/or port ranges, such as 80,1024-65535. Use an asterisk (*) to allow traffic on any port. | Optional |
+| destination | The specific destination IP address range for outgoing traffic that will be allowed or denied by this rule. The destination filter can be "Any", an IP address range, an application security group, or a default tag. | Optional |
+| destination_ports | The destination ports for which traffic will be allowed or denied by this rule. Provide a single port, such as 80; a port range, such as 1024-65535; or a comma-separated list of single ports and/or port ranges, such as 80,1024-65535. Use an asterisk (*) to allow traffic on any port. | Optional |
+| description | A description to add to the rule. | Optional |
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| AzureNSG.Rule.name | String | The rule's name. |
-| AzureNSG.Rule.id | String | The rule's ID. |
-| AzureNSG.Rule.etag | String | The rule's ETag. |
-| AzureNSG.Rule.type | String | The rule's type. |
-| AzureNSG.Rule.provisioningState | String | The rule's provisioning state. |
-| AzureNSG.Rule.protocol | String | The protocol. Can be "TCP", "UDP", "ICMP", or "\*". |
-| AzureNSG.Rule.sourcePortRange | String | For a single port, the source port or a range of ports. Note that for multiple ports, \`sourcePortRanges\` will appear instead. |
-| AzureNSG.Rule.sourcePortRanges | String | For multiple ports, a list of these ports. Note that for single ports, \`sourcePortRange\` will appear instead. |
-| AzureNSG.Rule.destinationPortRange | String | For a single port, the destination port or range of ports. Note that for multiple ports, \`destinationPortRanges\` will appear instead. |
-| AzureNSG.Rule.destinationPortRanges | String | For multiple ports, a list of destination ports. Note that for single ports, \`destinationPortRange\` will appear instead. |
-| AzureNSG.Rule.sourceAddressPrefix | String | The source address. |
-| AzureNSG.Rule.destinationAddressPrefix | String | The destination address. |
-| AzureNSG.Rule.access | String | The rule's access. Can be "Allow" or "Deny". |
-| AzureNSG.Rule.priority | Number | The rule's priority. Can be from 100 to 4096. |
-| AzureNSG.Rule.direction | String | The rule's direction. Can be "Inbound" or "Outbound". |
+| **Path** | **Type** | **Description** |
+|----------------------------------------|----------|-----------------------------------------------------------------------------------------------------------------------------------------|
+| AzureNSG.Rule.name | String | The rule's name. |
+| AzureNSG.Rule.id | String | The rule's ID. |
+| AzureNSG.Rule.etag | String | The rule's ETag. |
+| AzureNSG.Rule.type | String | The rule's type. |
+| AzureNSG.Rule.provisioningState | String | The rule's provisioning state. |
+| AzureNSG.Rule.protocol | String | The protocol. Can be "TCP", "UDP", "ICMP", or "\*". |
+| AzureNSG.Rule.sourcePortRange | String | For a single port, the source port or a range of ports. Note that for multiple ports, \`sourcePortRanges\` will appear instead. |
+| AzureNSG.Rule.sourcePortRanges | String | For multiple ports, a list of these ports. Note that for single ports, \`sourcePortRange\` will appear instead. |
+| AzureNSG.Rule.destinationPortRange | String | For a single port, the destination port or range of ports. Note that for multiple ports, \`destinationPortRanges\` will appear instead. |
+| AzureNSG.Rule.destinationPortRanges | String | For multiple ports, a list of destination ports. Note that for single ports, \`destinationPortRange\` will appear instead. |
+| AzureNSG.Rule.sourceAddressPrefix | String | The source address. |
+| AzureNSG.Rule.destinationAddressPrefix | String | The destination address. |
+| AzureNSG.Rule.access | String | The rule's access. Can be "Allow" or "Deny". |
+| AzureNSG.Rule.priority | Number | The rule's priority. Can be from 100 to 4096. |
+| AzureNSG.Rule.direction | String | The rule's direction. Can be "Inbound" or "Outbound". |
+
### azure-nsg-security-rule-update
+
***
Update a security rule. If one does not exist, it will be created.
-
#### Base Command
`azure-nsg-security-rule-update`
-#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| security_group_name | The name of the security group. | Required |
-| security_rule_name | The name of the rule to be updated. | Required |
-| direction | The direction of the rule. Possible values are: "Inbound" and "Outbound". Possible values are: Inbound, Outbound. | Optional |
-| action | Whether to allow the traffic. Possible values are "Allow" and "Deny". Possible values are: Allow, Deny. | Optional |
-| protocol | The protocol on which to apply the rule. Possible values are: "Any", "TCP", "UDP", and "ICMP". Possible values are: Any, TCP, UDP, ICMP. | Optional |
-| source | The source IP address range from which incoming traffic will be allowed or denied by this rule. Possible values are "Any", an IP address range, an application security group, or a default tag. | Optional |
-| priority | The priority by which the rules will be processed. The lower the number, the higher the priority. We recommend leaving gaps between rules - 100, 200, 300, etc. - so that it is easier to add new rules without having to edit existing rules. | Optional |
-| source_ports | The source ports from which traffic will be allowed or denied by this rule. Provide a single port, such as 80; a port range, such as 1024-65535; or a comma-separated list of single ports and/or port ranges, such as 80,1024-65535. Use an asterisk (*) to allow traffic on any port. | Optional |
-| destination | The specific destination IP address range for outgoing traffic that will be allowed or denied by this rule. The destination filter can be "Any", an IP address range, an application security group, or a default tag. | Optional |
-| destination_ports | The destination ports for which traffic will be allowed or denied by this rule. Provide a single port, such as 80; a port range, such as 1024-65535; or a comma-separated list of single ports and/or port ranges, such as 80,1024-65535. Use an asterisk (*) to allow traffic on any port. | Optional |
-| description | A description to add to the rule. | Optional |
+#### Input
+| **Argument Name** | **Description** | **Required** |
+|---------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------|
+| security_group_name | The name of the security group. | Required |
+| security_rule_name | The name of the rule to be updated. | Required |
+| direction | The direction of the rule. Possible values are: "Inbound" and "Outbound". Possible values are: Inbound, Outbound. | Optional |
+| action | Whether to allow the traffic. Possible values are "Allow" and "Deny". Possible values are: Allow, Deny. | Optional |
+| protocol | The protocol on which to apply the rule. Possible values are: "Any", "TCP", "UDP", and "ICMP". Possible values are: Any, TCP, UDP, ICMP. | Optional |
+| source | The source IP address range from which incoming traffic will be allowed or denied by this rule. Possible values are "Any", an IP address range, an application security group, or a default tag. | Optional |
+| priority | The priority by which the rules will be processed. The lower the number, the higher the priority. We recommend leaving gaps between rules - 100, 200, 300, etc. - so that it is easier to add new rules without having to edit existing rules. | Optional |
+| source_ports | The source ports from which traffic will be allowed or denied by this rule. Provide a single port, such as 80; a port range, such as 1024-65535; or a comma-separated list of single ports and/or port ranges, such as 80,1024-65535. Use an asterisk (*) to allow traffic on any port. | Optional |
+| destination | The specific destination IP address range for outgoing traffic that will be allowed or denied by this rule. The destination filter can be "Any", an IP address range, an application security group, or a default tag. | Optional |
+| destination_ports | The destination ports for which traffic will be allowed or denied by this rule. Provide a single port, such as 80; a port range, such as 1024-65535; or a comma-separated list of single ports and/or port ranges, such as 80,1024-65535. Use an asterisk (*) to allow traffic on any port. | Optional |
+| description | A description to add to the rule. | Optional |
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| AzureNSG.Rule.name | String | The rule's name. |
-| AzureNSG.Rule.id | String | The rule's ID. |
-| AzureNSG.Rule.etag | String | The rule's ETag. |
-| AzureNSG.Rule.type | String | The rule's type. |
-| AzureNSG.Rule.provisioningState | String | The rule's provisioning state. |
-| AzureNSG.Rule.protocol | String | The protocol. Can be "TCP", "UDP", "ICMP", "\*". |
-| AzureNSG.Rule.sourcePortRange | String | For a single port, the source port or a range of ports. Note that for multiple ports, \`sourcePortRanges\` will appear instead. |
-| AzureNSG.Rule.sourcePortRanges | String | For multiple ports, a list of these ports. Note that for single ports, \`sourcePortRange\` will appear instead. |
-| AzureNSG.Rule.destinationPortRange | String | For a single port, the destination port or range of ports. Note that for multiple ports, \`destinationPortRanges\` will appear instead. |
-| AzureNSG.Rule.destinationPortRanges | String | For multiple ports, a list of destination ports. Note that for single ports, \`destinationPortRange\` will appear instead. |
-| AzureNSG.Rule.sourceAddressPrefix | String | The source address. |
-| AzureNSG.Rule.destinationAddressPrefix | String | The destination address. |
-| AzureNSG.Rule.access | String | The rule's access. Can be "Allow" or "Deny". |
-| AzureNSG.Rule.priority | Number | The rule's priority. Can be from 100 to 4096. |
-| AzureNSG.Rule.direction | String | The rule's direction. Can be "Inbound" or "Outbound". |
+| **Path** | **Type** | **Description** |
+|----------------------------------------|----------|-----------------------------------------------------------------------------------------------------------------------------------------|
+| AzureNSG.Rule.name | String | The rule's name. |
+| AzureNSG.Rule.id | String | The rule's ID. |
+| AzureNSG.Rule.etag | String | The rule's ETag. |
+| AzureNSG.Rule.type | String | The rule's type. |
+| AzureNSG.Rule.provisioningState | String | The rule's provisioning state. |
+| AzureNSG.Rule.protocol | String | The protocol. Can be "TCP", "UDP", "ICMP", "\*". |
+| AzureNSG.Rule.sourcePortRange | String | For a single port, the source port or a range of ports. Note that for multiple ports, \`sourcePortRanges\` will appear instead. |
+| AzureNSG.Rule.sourcePortRanges | String | For multiple ports, a list of these ports. Note that for single ports, \`sourcePortRange\` will appear instead. |
+| AzureNSG.Rule.destinationPortRange | String | For a single port, the destination port or range of ports. Note that for multiple ports, \`destinationPortRanges\` will appear instead. |
+| AzureNSG.Rule.destinationPortRanges | String | For multiple ports, a list of destination ports. Note that for single ports, \`destinationPortRange\` will appear instead. |
+| AzureNSG.Rule.sourceAddressPrefix | String | The source address. |
+| AzureNSG.Rule.destinationAddressPrefix | String | The destination address. |
+| AzureNSG.Rule.access | String | The rule's access. Can be "Allow" or "Deny". |
+| AzureNSG.Rule.priority | Number | The rule's priority. Can be from 100 to 4096. |
+| AzureNSG.Rule.direction | String | The rule's direction. Can be "Inbound" or "Outbound". |
+
### azure-nsg-security-rule-get
+
***
Get a specific rule.
-
#### Base Command
`azure-nsg-security-rule-get`
-#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| security_group_name | The name of the security group. | Optional |
-| security_rule_name | A comma-separated list of the names of the rules to get. | Optional |
+#### Input
+| **Argument Name** | **Description** | **Required** |
+|---------------------|----------------------------------------------------------|--------------|
+| security_group_name | The name of the security group. | Optional |
+| security_rule_name | A comma-separated list of the names of the rules to get. | Optional |
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| AzureNSG.Rule.name | String | The rule's name. |
-| AzureNSG.Rule.id | String | The rule's ID. |
-| AzureNSG.Rule.etag | String | The rule's ETag. |
-| AzureNSG.Rule.type | String | The rule's type. |
-| AzureNSG.Rule.provisioningState | String | The rule's provisioning state. |
-| AzureNSG.Rule.protocol | String | The protocol. Can be "TCP", "UDP", "ICMP", "\*". |
-| AzureNSG.Rule.sourcePortRange | String | For a single port, the source port or a range of ports. Note that for multiple ports, \`sourcePortRanges\` will appear instead. |
-| AzureNSG.Rule.sourcePortRanges | String | For multiple ports, a list of these ports. Note that for single ports, \`sourcePortRange\` will appear instead. |
-| AzureNSG.Rule.destinationPortRange | String | For a single port, the destination port or range of ports. Note that for multiple ports, \`destinationPortRanges\` will appear instead. |
-| AzureNSG.Rule.destinationPortRanges | String | For multiple ports, a list of destination ports. Note that for single ports, \`destinationPortRange\` will appear instead. |
-| AzureNSG.Rule.sourceAddressPrefix | String | The source address. |
-| AzureNSG.Rule.destinationAddressPrefix | String | The destination address. |
-| AzureNSG.Rule.access | String | The rule's access. Can be "Allow" or "Deny". |
-| AzureNSG.Rule.priority | Number | The rule's priority. Can be from 100 to 4096. |
-| AzureNSG.Rule.direction | String | The rule's direction. Can be "Inbound" or "Outbound". |
-
+| **Path** | **Type** | **Description** |
+|----------------------------------------|----------|-----------------------------------------------------------------------------------------------------------------------------------------|
+| AzureNSG.Rule.name | String | The rule's name. |
+| AzureNSG.Rule.id | String | The rule's ID. |
+| AzureNSG.Rule.etag | String | The rule's ETag. |
+| AzureNSG.Rule.type | String | The rule's type. |
+| AzureNSG.Rule.provisioningState | String | The rule's provisioning state. |
+| AzureNSG.Rule.protocol | String | The protocol. Can be "TCP", "UDP", "ICMP", "\*". |
+| AzureNSG.Rule.sourcePortRange | String | For a single port, the source port or a range of ports. Note that for multiple ports, \`sourcePortRanges\` will appear instead. |
+| AzureNSG.Rule.sourcePortRanges | String | For multiple ports, a list of these ports. Note that for single ports, \`sourcePortRange\` will appear instead. |
+| AzureNSG.Rule.destinationPortRange | String | For a single port, the destination port or range of ports. Note that for multiple ports, \`destinationPortRanges\` will appear instead. |
+| AzureNSG.Rule.destinationPortRanges | String | For multiple ports, a list of destination ports. Note that for single ports, \`destinationPortRange\` will appear instead. |
+| AzureNSG.Rule.sourceAddressPrefix | String | The source address. |
+| AzureNSG.Rule.destinationAddressPrefix | String | The destination address. |
+| AzureNSG.Rule.access | String | The rule's access. Can be "Allow" or "Deny". |
+| AzureNSG.Rule.priority | Number | The rule's priority. Can be from 100 to 4096. |
+| AzureNSG.Rule.direction | String | The rule's direction. Can be "Inbound" or "Outbound". |
### azure-nsg-generate-login-url
+
***
-Generate the login url used for Authorization code flow.
+Generate the login url used for Authorization code.
#### Base Command
`azure-nsg-generate-login-url`
+
#### Input
There are no input arguments for this command.
@@ -696,20 +783,23 @@ There are no input arguments for this command.
There is no context output for this command.
#### Command Example
+
```azure-nsg-generate-login-url```
#### Human Readable Output
->### Authorization instructions
+> ### Authorization instructions
>1. Click on the [login URL]() to sign in and grant Cortex XSOAR permissions for your Azure Service Management.
-You will be automatically redirected to a link with the following structure:
-```REDIRECT_URI?code=AUTH_CODE&session_state=SESSION_STATE```
+ You will be automatically redirected to a link with the following structure:
+ ```REDIRECT_URI?code=AUTH_CODE&session_state=SESSION_STATE```
>2. Copy the `AUTH_CODE` (without the `code=` prefix, and the `session_state` parameter)
-and paste it in your instance configuration under the **Authorization code** parameter.
+ and paste it in your instance configuration under the **Authorization code** parameter.
### azure-nsg-subscriptions-list
+
***
Gets all subscriptions for a tenant.
+
#### Base Command
`azure-nsg-subscriptions-list`
@@ -720,24 +810,27 @@ There are no input arguments for this command.
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| AzureNSG.Subscription.id | String | The unique identifier of the Azure Network Security Groups subscription. |
-| AzureNSG.Subscription.authorizationSource | String | The source of authorization for the Azure Network Security Groups subscription. |
-| AzureNSG.Subscription.managedByTenants | Unknown | The tenants that have access to manage the Azure Network Security Groups subscription. |
-| AzureNSG.Subscription.subscriptionId | String | The ID of the Azure Network Security Groups subscription. |
-| AzureNSG.Subscription.tenantId | String | The ID of the tenant associated with the Azure Network Security Groups subscription. |
-| AzureNSG.Subscription.displayName | String | The display name of the Azure Network Security Groups subscription. |
-| AzureNSG.Subscription.state | String | The current state of the Azure Network Security Groups subscription. |
-| AzureNSG.Subscription.subscriptionPolicies.locationPlacementId | String | The ID of the location placement policy for the Azure Network Security Groups subscription. |
-| AzureNSG.Subscription.subscriptionPolicies.quotaId | String | The ID of the quota policy for the Azure Network Security Groups subscription. |
-| AzureNSG.Subscription.subscriptionPolicies.spendingLimit | String | The spending limit policy for the Azure Network Security Groups subscription. |
-| AzureNSG.Subscription.count.type | String | The type of the Azure Network Security Groups subscription count. |
-| AzureNSG.Subscription.count.value | Number | The value of the Azure Network Security Groups subscription count. |
+| **Path** | **Type** | **Description** |
+|----------------------------------------------------------------|----------|---------------------------------------------------------------------------------------------|
+| AzureNSG.Subscription.id | String | The unique identifier of the Azure Network Security Groups subscription. |
+| AzureNSG.Subscription.authorizationSource | String | The source of authorization for the Azure Network Security Groups subscription. |
+| AzureNSG.Subscription.managedByTenants | Unknown | The tenants that have access to manage the Azure Network Security Groups subscription. |
+| AzureNSG.Subscription.subscriptionId | String | The ID of the Azure Network Security Groups subscription. |
+| AzureNSG.Subscription.tenantId | String | The ID of the tenant associated with the Azure Network Security Groups subscription. |
+| AzureNSG.Subscription.displayName | String | The display name of the Azure Network Security Groups subscription. |
+| AzureNSG.Subscription.state | String | The current state of the Azure Network Security Groups subscription. |
+| AzureNSG.Subscription.subscriptionPolicies.locationPlacementId | String | The ID of the location placement policy for the Azure Network Security Groups subscription. |
+| AzureNSG.Subscription.subscriptionPolicies.quotaId | String | The ID of the quota policy for the Azure Network Security Groups subscription. |
+| AzureNSG.Subscription.subscriptionPolicies.spendingLimit | String | The spending limit policy for the Azure Network Security Groups subscription. |
+| AzureNSG.Subscription.count.type | String | The type of the Azure Network Security Groups subscription count. |
+| AzureNSG.Subscription.count.value | Number | The value of the Azure Network Security Groups subscription count. |
#### Command example
+
```!azure-nsg-subscriptions-list```
+
#### Context Example
+
```json
{
"AzureNSG": {
@@ -777,14 +870,12 @@ There are no input arguments for this command.
#### Human Readable Output
->### Azure Network Security Groups Subscriptions list
+> ### Azure Network Security Groups Subscriptions list
>|subscriptionId|tenantId|displayName|state|
>|---|---|---|---|
>| 057b1785-fd7b-4 | ebac1a16-81bf-449 | Access to Azure Active Directory | Enabled |
>| 0f907ea4-bc8b-4 | ebac1a16-81bf-449 | Pay-As-You-Go | Enabled |
-
-
### azure-nsg-resource-group-list
***
@@ -796,32 +887,35 @@ Gets all resource groups for a subscription.
#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| subscription_id | The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
-| limit | Limit on the number of resource groups to return. Default is 50. | Optional |
-| tag | A single tag in the form of '{"Tag Name":"Tag Value"}' to filter the list by. | Optional |
+| **Argument Name** | **Description** | **Required** |
+|-------------------|----------------------------------------------------------------------------------------------------------|--------------|
+| subscription_id | The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
+| limit | Limit on the number of resource groups to return. Default is 50. | Optional |
+| tag | A single tag in the form of '{"Tag Name":"Tag Value"}' to filter the list by. | Optional |
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| AzureNSG.ResourceGroup.id | String | The unique identifier of the Azure Network Security Groups resource group. |
-| AzureNSG.ResourceGroup.name | String | The name of the Azure Network Security Groups resource group. |
-| AzureNSG.ResourceGroup.type | String | The type of the Azure Network Security Groups resource group. |
-| AzureNSG.ResourceGroup.location | String | The location of the Azure Network Security Groups resource group. |
-| AzureNSG.ResourceGroup.properties.provisioningState | String | The provisioning state of the Azure Network Security Groups resource group. |
-| AzureNSG.ResourceGroup.tags.Owner | String | The owner tag of the Azure Network Security Groups resource group. |
-| AzureNSG.ResourceGroup.tags | Unknown | The tags associated with the Azure Network Security Groups resource group. |
-| AzureNSG.ResourceGroup.tags.Name | String | The name tag of the Azure Network Security Groups resource group. |
-| AzureNSG.ResourceGroup.managedBy | String | The entity that manages the Azure Network Security Groups resource group. |
-| AzureNSG.ResourceGroup.tags.aNSG-managed-cluster-name | String | The ANSG managed cluster name tag associated with the Azure Network Security Groups resource group. |
-| AzureNSG.ResourceGroup.tags.aNSG-managed-cluster-rg | String | The ANSG managed cluster resource group tag associated with the Azure Network Security Groups resource group. |
-| AzureNSG.ResourceGroup.tags.type | String | The type tag associated with the Azure Network Security Groups resource group. |
+| **Path** | **Type** | **Description** |
+|-------------------------------------------------------|----------|---------------------------------------------------------------------------------------------------------------|
+| AzureNSG.ResourceGroup.id | String | The unique identifier of the Azure Network Security Groups resource group. |
+| AzureNSG.ResourceGroup.name | String | The name of the Azure Network Security Groups resource group. |
+| AzureNSG.ResourceGroup.type | String | The type of the Azure Network Security Groups resource group. |
+| AzureNSG.ResourceGroup.location | String | The location of the Azure Network Security Groups resource group. |
+| AzureNSG.ResourceGroup.properties.provisioningState | String | The provisioning state of the Azure Network Security Groups resource group. |
+| AzureNSG.ResourceGroup.tags.Owner | String | The owner tag of the Azure Network Security Groups resource group. |
+| AzureNSG.ResourceGroup.tags | Unknown | The tags associated with the Azure Network Security Groups resource group. |
+| AzureNSG.ResourceGroup.tags.Name | String | The name tag of the Azure Network Security Groups resource group. |
+| AzureNSG.ResourceGroup.managedBy | String | The entity that manages the Azure Network Security Groups resource group. |
+| AzureNSG.ResourceGroup.tags.aNSG-managed-cluster-name | String | The ANSG managed cluster name tag associated with the Azure Network Security Groups resource group. |
+| AzureNSG.ResourceGroup.tags.aNSG-managed-cluster-rg | String | The ANSG managed cluster resource group tag associated with the Azure Network Security Groups resource group. |
+| AzureNSG.ResourceGroup.tags.type | String | The type tag associated with the Azure Network Security Groups resource group. |
#### Command example
+
```!azure-nsg-resource-group-list```
+
#### Context Example
+
```json
{
"AzureNSG": {
@@ -846,7 +940,7 @@ Gets all resource groups for a subscription.
"Owner": "Demi"
},
"type": "Microsoft.Resources/resourceGroups"
- },
+ }
]
}
}
@@ -854,7 +948,7 @@ Gets all resource groups for a subscription.
#### Human Readable Output
->### Resource Groups List
+> ### Resource Groups List
>|Name|Location|Tags|
>|---|---|---|
>| cloud-shell-storage-eastus | eastus | |
diff --git a/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_29.md b/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_29.md
new file mode 100644
index 000000000000..c272ab275963
--- /dev/null
+++ b/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_29.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Network Security Groups
+
+Added support for the *Client Credentials* authentication flow.
\ No newline at end of file
diff --git a/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_30.md b/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_30.md
new file mode 100644
index 000000000000..7eba3029fe2c
--- /dev/null
+++ b/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_30.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Azure Network Security Groups
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_31.md b/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_31.md
new file mode 100644
index 000000000000..492f4c321a95
--- /dev/null
+++ b/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_31.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Network Security Groups
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_32.md b/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_32.md
new file mode 100644
index 000000000000..1d2b7caed54e
--- /dev/null
+++ b/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_32.md
@@ -0,0 +1,3 @@
+## Azure Network Security Groups
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/AzureNetworkSecurityGroups/pack_metadata.json b/Packs/AzureNetworkSecurityGroups/pack_metadata.json
index b682f9b02fe3..686963c64489 100644
--- a/Packs/AzureNetworkSecurityGroups/pack_metadata.json
+++ b/Packs/AzureNetworkSecurityGroups/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Network Security Groups",
"description": "Azure Network Security Groups are used to filter network traffic to and from Azure resources in an Azure virtual network",
"support": "xsoar",
- "currentVersion": "1.2.28",
+ "currentVersion": "1.2.32",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureRiskyUsers/ReleaseNotes/1_1_35.md b/Packs/AzureRiskyUsers/ReleaseNotes/1_1_35.md
new file mode 100644
index 000000000000..f686d7917655
--- /dev/null
+++ b/Packs/AzureRiskyUsers/ReleaseNotes/1_1_35.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Azure Risky Users
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/AzureRiskyUsers/ReleaseNotes/1_1_36.md b/Packs/AzureRiskyUsers/ReleaseNotes/1_1_36.md
new file mode 100644
index 000000000000..c53ca85fb849
--- /dev/null
+++ b/Packs/AzureRiskyUsers/ReleaseNotes/1_1_36.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Risky Users
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/AzureRiskyUsers/pack_metadata.json b/Packs/AzureRiskyUsers/pack_metadata.json
index fa036e589bd5..1a63916c015d 100644
--- a/Packs/AzureRiskyUsers/pack_metadata.json
+++ b/Packs/AzureRiskyUsers/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Risky Users",
"description": "Azure Risky Users provides access to all at-risk users and risk detections in Azure AD environment.",
"support": "xsoar",
- "currentVersion": "1.1.34",
+ "currentVersion": "1.1.36",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureSQLManagement/Integrations/AzureSQLManagement/AzureSQLManagement.yml b/Packs/AzureSQLManagement/Integrations/AzureSQLManagement/AzureSQLManagement.yml
index c32e3f291c3d..38a2b0001202 100644
--- a/Packs/AzureSQLManagement/Integrations/AzureSQLManagement/AzureSQLManagement.yml
+++ b/Packs/AzureSQLManagement/Integrations/AzureSQLManagement/AzureSQLManagement.yml
@@ -651,7 +651,7 @@ script:
- contextPath: AzureSQL.ResourceGroup.tags
description: The tags attached to the resource group.
type: String
- dockerimage: demisto/crypto:1.0.0.87358
+ dockerimage: demisto/crypto:1.0.0.96042
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/AzureSQLManagement/Integrations/AzureSQLManagement/AzureSQLManagement_test.py b/Packs/AzureSQLManagement/Integrations/AzureSQLManagement/AzureSQLManagement_test.py
index b6d1c9f9ac50..4b1e7f03b7f3 100644
--- a/Packs/AzureSQLManagement/Integrations/AzureSQLManagement/AzureSQLManagement_test.py
+++ b/Packs/AzureSQLManagement/Integrations/AzureSQLManagement/AzureSQLManagement_test.py
@@ -1,12 +1,11 @@
import json
-import io
import pytest
import demistomock as demisto
from AzureSQLManagement import Client
def util_load_json(path):
- with io.open(path, mode='r', encoding='utf-8') as f:
+ with open(path, encoding='utf-8') as f:
return json.loads(f.read())
@@ -303,7 +302,7 @@ def test_generate_login_url(mocker):
# assert
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
'response_type=code&scope=offline_access%20https://management.azure.com/.default' \
- f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
+ f'&client_id={client_id}&redirect_uri={redirect_uri})'
res = AzureSQLManagement.return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/AzureSQLManagement/ReleaseNotes/1_1_45.md b/Packs/AzureSQLManagement/ReleaseNotes/1_1_45.md
new file mode 100644
index 000000000000..ba4d13bfad12
--- /dev/null
+++ b/Packs/AzureSQLManagement/ReleaseNotes/1_1_45.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Azure SQL Management
+- Updated the Docker image to: *demisto/crypto:1.0.0.96042*.
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/AzureSQLManagement/ReleaseNotes/1_1_46.md b/Packs/AzureSQLManagement/ReleaseNotes/1_1_46.md
new file mode 100644
index 000000000000..74fe4829d459
--- /dev/null
+++ b/Packs/AzureSQLManagement/ReleaseNotes/1_1_46.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure SQL Management
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/AzureSQLManagement/pack_metadata.json b/Packs/AzureSQLManagement/pack_metadata.json
index 362084c66d8e..df8eb5570ea1 100644
--- a/Packs/AzureSQLManagement/pack_metadata.json
+++ b/Packs/AzureSQLManagement/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure SQL Management",
"description": "Microsoft Azure SQL Database is a managed cloud database provided as part of Microsoft Azure",
"support": "xsoar",
- "currentVersion": "1.1.44",
+ "currentVersion": "1.1.46",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureSecurityCenter/Integrations/AzureSecurityCenter_v2/AzureSecurityCenter_v2.yml b/Packs/AzureSecurityCenter/Integrations/AzureSecurityCenter_v2/AzureSecurityCenter_v2.yml
index 29de2e176c36..8fbbb7925212 100644
--- a/Packs/AzureSecurityCenter/Integrations/AzureSecurityCenter_v2/AzureSecurityCenter_v2.yml
+++ b/Packs/AzureSecurityCenter/Integrations/AzureSecurityCenter_v2/AzureSecurityCenter_v2.yml
@@ -466,7 +466,7 @@ script:
type: Unknown
- contextPath: Azure.ResourceGroupName.properties.provisioningState
description: Resource group provisioning state.
- dockerimage: demisto/crypto:1.0.0.86361
+ dockerimage: demisto/crypto:1.0.0.96042
runonce: false
script: '-'
type: python
diff --git a/Packs/AzureSecurityCenter/Integrations/AzureSecurityCenter_v2/AzureSecurityCenter_v2_test.py b/Packs/AzureSecurityCenter/Integrations/AzureSecurityCenter_v2/AzureSecurityCenter_v2_test.py
index f0e6dec57449..cf0a10f012dd 100644
--- a/Packs/AzureSecurityCenter/Integrations/AzureSecurityCenter_v2/AzureSecurityCenter_v2_test.py
+++ b/Packs/AzureSecurityCenter/Integrations/AzureSecurityCenter_v2/AzureSecurityCenter_v2_test.py
@@ -60,7 +60,7 @@ def test_get_atp_command(mocker):
"setting_name": 'test',
"storage_account": 'test'}
_, ec, _ = get_atp_command(client, args)
- assert EXPECTED_GET_ATP_COMMAND_CONTEXT == ec
+ assert ec == EXPECTED_GET_ATP_COMMAND_CONTEXT
def test_update_atp_command(mocker):
@@ -70,21 +70,21 @@ def test_update_atp_command(mocker):
"is_enabled": "test",
"storage_account": "test"}
_, ec, _ = update_atp_command(client, args)
- assert EXPECTED_UPDATE_ATP_CONTEXT == ec
+ assert ec == EXPECTED_UPDATE_ATP_CONTEXT
def test_get_aps_command(mocker):
mocker.patch.object(client, 'get_aps', return_value=GET_APS_RAW_RESPONSE)
args = {"setting_name": 'test'}
_, ec, _ = get_aps_command(client, args)
- assert EXPECTED_GET_APS_CONTEXT == ec
+ assert ec == EXPECTED_GET_APS_CONTEXT
def test_get_secure_score_command(mocker):
mocker.patch.object(client, 'get_secure_scores', return_value=GET_SECURE_SCORE_RAW_RESPONSE)
args = {"secure_score_name": 'ascScore'}
_, ec, _ = get_secure_scores_command(client, args)
- assert EXPECTED_GET_SECURE_SCORE_CONTEXT == ec
+ assert ec == EXPECTED_GET_SECURE_SCORE_CONTEXT
@pytest.mark.parametrize(argnames='client_id', argvalues=['test_client_id', None])
diff --git a/Packs/AzureSecurityCenter/Integrations/MicrosoftDefenderForCloudEventCollector/MicrosoftDefenderForCloudEventCollector.yml b/Packs/AzureSecurityCenter/Integrations/MicrosoftDefenderForCloudEventCollector/MicrosoftDefenderForCloudEventCollector.yml
index 00d2e093075d..faafa70878c8 100644
--- a/Packs/AzureSecurityCenter/Integrations/MicrosoftDefenderForCloudEventCollector/MicrosoftDefenderForCloudEventCollector.yml
+++ b/Packs/AzureSecurityCenter/Integrations/MicrosoftDefenderForCloudEventCollector/MicrosoftDefenderForCloudEventCollector.yml
@@ -103,7 +103,7 @@ script:
- description: Run this command if for some reason you need to rerun the authentication process.
name: ms-defender-for-cloud-auth-reset
arguments: []
- dockerimage: demisto/crypto:1.0.0.72229
+ dockerimage: demisto/crypto:1.0.0.96042
isfetchevents: true
script: '-'
subtype: python3
diff --git a/Packs/AzureSecurityCenter/Integrations/MicrosoftDefenderForCloudEventCollector/MicrosoftDefenderForCloudEventCollector_description.md b/Packs/AzureSecurityCenter/Integrations/MicrosoftDefenderForCloudEventCollector/MicrosoftDefenderForCloudEventCollector_description.md
index 849910107f78..b30b5552629d 100644
--- a/Packs/AzureSecurityCenter/Integrations/MicrosoftDefenderForCloudEventCollector/MicrosoftDefenderForCloudEventCollector_description.md
+++ b/Packs/AzureSecurityCenter/Integrations/MicrosoftDefenderForCloudEventCollector/MicrosoftDefenderForCloudEventCollector_description.md
@@ -10,7 +10,14 @@ Self-deployed configuration:
After you finish configuring your application,
add a “Security Reader” role to the application from the subscription.
-In order to add a role to a subscription, refer to:
+1. In the Azure portal, go to the subscription > **Access control (IAM)**.
+2. Click **Add**.
+3. Click **Add role assignment**.
+4. in the *Role* tab search for and select **Security Reader**.
+5. In the *Members* tab, click **Select members** and select the the created application.
+6. Click **Review + assign**.
+
+For additional information about roles, refer to:
* [Azure AD built-in roles](https://learn.microsoft.com/en-us/azure/active-directory/roles/permissions-reference)
#### Additional information
diff --git a/Packs/AzureSecurityCenter/Integrations/MicrosoftDefenderForCloudEventCollector/MicrosoftDefenderForCloudEventCollector_test.py b/Packs/AzureSecurityCenter/Integrations/MicrosoftDefenderForCloudEventCollector/MicrosoftDefenderForCloudEventCollector_test.py
index d9d8ed2d2a9d..59ffd0cbb3ce 100644
--- a/Packs/AzureSecurityCenter/Integrations/MicrosoftDefenderForCloudEventCollector/MicrosoftDefenderForCloudEventCollector_test.py
+++ b/Packs/AzureSecurityCenter/Integrations/MicrosoftDefenderForCloudEventCollector/MicrosoftDefenderForCloudEventCollector_test.py
@@ -23,7 +23,7 @@ def read_json_util(path: str):
"""
Read json util functions
"""
- with open(path, 'r') as f:
+ with open(path) as f:
json_file = json.load(f)
return json_file
diff --git a/Packs/AzureSecurityCenter/ReleaseNotes/2_0_25.md b/Packs/AzureSecurityCenter/ReleaseNotes/2_0_25.md
new file mode 100644
index 000000000000..6570bf77775e
--- /dev/null
+++ b/Packs/AzureSecurityCenter/ReleaseNotes/2_0_25.md
@@ -0,0 +1,13 @@
+
+#### Integrations
+
+##### Microsoft Defender for Cloud Event Collector
+
+- Updated the Docker image to: *demisto/crypto:1.0.0.96042*.
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
+
+##### Microsoft Defender for Cloud
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/AzureSecurityCenter/ReleaseNotes/2_0_26.md b/Packs/AzureSecurityCenter/ReleaseNotes/2_0_26.md
new file mode 100644
index 000000000000..7d903bfa3c35
--- /dev/null
+++ b/Packs/AzureSecurityCenter/ReleaseNotes/2_0_26.md
@@ -0,0 +1,10 @@
+
+#### Integrations
+
+##### Microsoft Defender for Cloud
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
+
+##### Microsoft Defender for Cloud Event Collector
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/AzureSecurityCenter/ReleaseNotes/2_0_27.md b/Packs/AzureSecurityCenter/ReleaseNotes/2_0_27.md
new file mode 100644
index 000000000000..9a5b3e30843a
--- /dev/null
+++ b/Packs/AzureSecurityCenter/ReleaseNotes/2_0_27.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Microsoft Defender for Cloud Event Collector
+
+- Documentation and metadata improvements.
diff --git a/Packs/AzureSecurityCenter/pack_metadata.json b/Packs/AzureSecurityCenter/pack_metadata.json
index fc52e50fe290..1814d51a6ca4 100644
--- a/Packs/AzureSecurityCenter/pack_metadata.json
+++ b/Packs/AzureSecurityCenter/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Defender for Cloud",
"description": "Unified security management and advanced threat protection across hybrid cloud workloads.",
"support": "xsoar",
- "currentVersion": "2.0.24",
+ "currentVersion": "2.0.27",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureSentinel/ReleaseNotes/1_5_44.md b/Packs/AzureSentinel/ReleaseNotes/1_5_44.md
new file mode 100644
index 000000000000..daeda1f4e9e0
--- /dev/null
+++ b/Packs/AzureSentinel/ReleaseNotes/1_5_44.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Microsoft Sentinel
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
diff --git a/Packs/AzureSentinel/ReleaseNotes/1_5_45.md b/Packs/AzureSentinel/ReleaseNotes/1_5_45.md
new file mode 100644
index 000000000000..246fdeee2ba8
--- /dev/null
+++ b/Packs/AzureSentinel/ReleaseNotes/1_5_45.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Microsoft Sentinel
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/AzureSentinel/pack_metadata.json b/Packs/AzureSentinel/pack_metadata.json
index 79e2017052a1..79c35b628492 100644
--- a/Packs/AzureSentinel/pack_metadata.json
+++ b/Packs/AzureSentinel/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Sentinel",
"description": "Microsoft Sentinel is a cloud-native security information and event manager (SIEM) platform that uses built-in AI to help analyze large volumes of data across an enterprise.",
"support": "xsoar",
- "currentVersion": "1.5.43",
+ "currentVersion": "1.5.45",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage.py b/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage.py
index 17514c81d0fe..eb5fbb9a14c2 100644
--- a/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage.py
+++ b/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage.py
@@ -8,9 +8,12 @@
urllib3.disable_warnings()
API_VERSION = '2022-09-01'
-GRANT_BY_CONNECTION = {'Device Code': DEVICE_CODE, 'Authorization Code': AUTHORIZATION_CODE}
+GRANT_BY_CONNECTION = {'Device Code': DEVICE_CODE,
+ 'Authorization Code': AUTHORIZATION_CODE,
+ 'Client Credentials': CLIENT_CREDENTIALS}
SCOPE_BY_CONNECTION = {'Device Code': "https://management.azure.com/user_impersonation offline_access user.read",
- 'Authorization Code': "https://management.azure.com/.default"}
+ 'Authorization Code': "https://management.azure.com/.default",
+ 'Client Credentials': "https://management.azure.com/.default"}
PREFIX_URL = 'https://management.azure.com/subscriptions/'
@@ -28,7 +31,8 @@ def __init__(self, app_id: str, subscription_id: str, resource_group_name: str,
client_args = assign_params(
self_deployed=True,
auth_id=app_id,
- token_retrieval_url='https://login.microsoftonline.com/organizations/oauth2/v2.0/token',
+ token_retrieval_url='https://login.microsoftonline.com/organizations/oauth2/v2.0/token' if 'Device Code' in
+ connection_type else None,
grant_type=GRANT_BY_CONNECTION.get(connection_type),
base_url=f'{PREFIX_URL}{subscription_id}',
verify=verify,
@@ -388,6 +392,7 @@ def list_resource_groups_request(self, subscription_id: str | None,
params={'$filter': filter_by_tag, '$top': limit,
'api-version': API_VERSION})
+
# Storage Account Commands
@@ -606,11 +611,12 @@ def storage_blob_service_properties_set(client: ASClient, params: Dict, args: Di
'Azure Storage Blob Service Properties',
readable_output,
['Name', 'Account Name', 'Subscription ID', 'Resource Group', 'Change Feed', 'Delete Retention Policy',
- 'Versioning'],
+ 'Versioning'],
),
raw_response=response
)
+
# Blob Containers Commands
@@ -895,7 +901,7 @@ def test_module(client: ASClient) -> str:
"You can validate the connection by running `!azure-storage-auth-test`\n"
"For more details press the (?) button.")
- elif client.connection_type == 'Azure Managed Identities':
+ elif client.connection_type == 'Azure Managed Identities' or client.connection_type == 'Client Credentials':
client.ms_client.get_access_token()
return 'ok'
else:
diff --git a/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage.yml b/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage.yml
index 45719c0f7ba1..3f4710796df1 100644
--- a/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage.yml
+++ b/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage.yml
@@ -43,27 +43,28 @@ configuration:
additionalinfo: Type of authentication - can be Authorization Code Flow (recommended), Device Code Flow, or Azure Managed Identities.
options:
- Authorization Code
+ - Client Credentials
- Device Code
- Azure Managed Identities
section: Connect
- name: tenant_id
- display: Tenant ID (for user-auth mode)
+ display: Tenant ID
defaultvalue:
type: 0
additionalinfo: ""
section: Connect
required: false
- name: credentials
- display: Client Secret (for user-auth mode)
+ display: Client Secret
defaultvalue:
type: 9
additionalinfo: ""
- displaypassword: Client Secret (for user-auth mode)
+ displaypassword: Client Secret
hiddenusername: true
section: Connect
required: false
- name: redirect_uri
- display: Application redirect URI (for user-auth mode)
+ display: Application redirect URI
defaultvalue:
type: 0
additionalinfo: ""
@@ -116,7 +117,7 @@ script:
name: subscription_id
required: false
secret: false
- description: Run this command to get the all or specific account storage details
+ description: Run this command to get the all or specific account storage details.
name: azure-storage-account-list
outputs:
- contextPath: AzureStorage.StorageAccount.id
@@ -126,10 +127,10 @@ script:
description: Gets the Kind.
type: String
- contextPath: AzureStorage.StorageAccount.location
- description: The geo-location where the resource lives
+ description: The geo-location where the resource lives.
type: String
- contextPath: AzureStorage.StorageAccount.name
- description: The name of the resource
+ description: The name of the resource.
type: String
- contextPath: AzureStorage.StorageAccount.properties.isHnsEnabled
description: Account HierarchicalNamespace enabled if sets to true.
@@ -237,7 +238,7 @@ script:
description: Gets the location of the geo-replicated secondary for the storage account. Only available if the accountType is Standard_GRS or Standard_RAGRS.
type: String
- contextPath: AzureStorage.StorageAccount.properties.statusOfPrimary
- description: Gets the status indicating whether the primary location of the storage account is available or unavailable
+ description: Gets the status indicating whether the primary location of the storage account is available or unavailable.
type: String
- contextPath: AzureStorage.StorageAccount.properties.statusOfSecondary
description: Gets the status indicating whether the secondary location of the storage account is available or unavailable. Only available if the SKU name is Standard_GRS or Standard_RAGRS.
@@ -414,9 +415,9 @@ script:
- None
- description: Specifies the default action of allow or deny when no other rules match.
name: network_ruleset_default_action
- - description: Sets the IP ACL rules
+ - description: Sets the IP ACL rules.
name: network_ruleset_ipRules
- - description: Sets the virtual network rules
+ - description: Sets the virtual network rules.
name: virtual_network_rules
description: |-
Run this command to create or update a specific
@@ -541,7 +542,7 @@ script:
description: Gets the location of the geo-replicated secondary for the storage account. Only available if the accountType is Standard_GRS or Standard_RAGRS.
type: String
- contextPath: AzureStorage.StorageAccount.properties.statusOfPrimary
- description: Gets the status indicating whether the primary location of the storage account is available or unavailable
+ description: Gets the status indicating whether the primary location of the storage account is available or unavailable.
type: String
- contextPath: AzureStorage.StorageAccount.properties.statusOfSecondary
description: Gets the status indicating whether the secondary location of the storage account is available or unavailable. Only available if the SKU name is Standard_GRS or Standard_RAGRS.
@@ -586,7 +587,7 @@ script:
description: Fully qualified resource ID for the resource.
type: String
- contextPath: AzureStorage.BlobServiceProperties.name
- description: The name of the resource
+ description: The name of the resource.
type: String
- contextPath: AzureStorage.BlobServiceProperties.type
description: The type of the resource.
@@ -674,7 +675,7 @@ script:
name: restore_policy_days
description: |-
Run this command to set properties for
- the blob service in a specific account storage
+ the blob service in a specific account storage.
name: azure-storage-blob-service-properties-set
outputs:
- contextPath: AzureStorage.BlobServiceProperties.id
@@ -761,7 +762,7 @@ script:
- Blob
- Container
- None
- description: Run this command to create a blob container
+ description: Run this command to create a blob container.
name: azure-storage-blob-containers-create
outputs:
- contextPath: AzureStorage.BlobContainer.id
@@ -1012,7 +1013,7 @@ script:
- description: Generate the login url used for Authorization code flow.
name: azure-storage-generate-login-url
arguments: []
- dockerimage: demisto/crypto:1.0.0.66562
+ dockerimage: demisto/crypto:1.0.0.96042
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage_description.md b/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage_description.md
index 84d0eec9173f..5c150896ef3c 100644
--- a/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage_description.md
+++ b/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage_description.md
@@ -2,25 +2,69 @@ In order to connect to the Azure Storage Accounts and the Blob Service use eithe
Use one of the following methods:
1. *Authorization Code Flow* (Recommended).
-2. *Device Code Flow*.
+2. *Client Credentials Flow*
+3. *Device Code Flow*
+
+## Self-Deployed Azure App
+
+To use a self-configured Azure application, you need to add a new Azure App Registration in the Azure Portal.
+
+To add the registration, refer to the following [Microsoft article](https://learn.microsoft.com/en-us/defender-xdr/api-create-app-web?view=o365-worldwide) steps 1-8.
+
+### Required permissions
+
+- Azure Service Management - permission `user_impersonation` of type Delegated
+- Microsoft Graph - permission `offline_access` of type Delegated
+
+To add a permission:
+
+1. Navigate to **Home** > **App registrations**.
+2. Search for your app under 'all applications'.
+3. Click **API permissions** > **Add permission**.
+4. Search for the specific Microsoft API and select the specific permission of type Delegated.
### Authentication Using the Authorization Code Flow (recommended)
1. To use a self-configured Azure application, you need to add a new Azure App Registration in the Azure Portal. To add the registration, refer to the following [Microsoft article](https://docs.microsoft.com/en-us/microsoft-365/security/defender/api-create-app-web?view=o365-worldwide#create-an-app) steps 1-8.
-2. In the **Authentication Type** field, select the **Authorization Code** option.
-3. In the **Application ID** field, enter your Client/Application ID.
-4. In the **Client Secret** field, enter your Client Secret.
-5. In the **Tenant ID** field, enter your Tenant ID .
-6. In the **Application redirect URI** field, enter your Application redirect URI.
+2. In the *Authentication Type* field, select the **Authorization Code** option.
+3. In the *Application ID* field, enter your Client/Application ID.
+4. In the *Client Secret* field, enter your Client Secret.
+5. In the *Tenant ID* field, enter your Tenant ID .
+6. In the *Application redirect URI* field, enter your Application redirect URI.
7. Save the instance.
8. Run the `!azure-storage-generate-login-url` command in the War Room and follow the instruction.
+### Authentication Using the Client Credentials Flow
+
+1. Assign Azure roles using the Azure portal [Microsoft article](https://learn.microsoft.com/en-us/azure/role-based-access-control/role-assignments-portal)
+
+ *Note:* At the 'Select members' section, assign the application you created before.
+
+2. To configure a Microsoft integration that uses this authorization flow with a self-deployed Azure application:
+
+ a. In the **Authentication Type** field, select the **Client Credentials** option.
+
+ b. In the **Application ID** field, enter your Client/Application ID.
+
+ c. In the **Subscription ID** field, enter your Subscription ID.
+
+ d. In the **Resource Group Name** field, enter you Resource Group Name.
+
+ e. In the **Tenant ID** field, enter your Tenant ID.
+
+ f. In the **Client Secret** field, enter your Client Secret.
+
+ g. Click **Test** to validate the URLs, token, and connection
+
+ h. Save the instance.
+
+
### Authentication Using the Device Code Flow
Use the [device authorization grant flow](https://docs.microsoft.com/en-us/azure/active-directory/develop/v2-oauth2-device-code).
1. Fill in the required parameters.
-2. Run the ***!azure-storage-auth-start*** command.
+2. Run the ***!azure-storage-auth-start*** command.
3. Follow the instructions that appear.
4. Run the ***!azure-storage-auth-complete*** command.
@@ -39,15 +83,17 @@ To use a self-configured Azure application, you need to add a new Azure App Regi
The application must have *user_impersonation* permission and must allow public client flows (can be found under the **Authentication** section of the app).
### Azure Managed Identities Authentication
+
##### Note: This option is relevant only if the integration is running on Azure VM.
+
Follow one of these steps for authentication based on Azure Managed Identities:
- ##### To use System Assigned Managed Identity
- - In the **Authentication Type** drop-down list, select **Azure Managed Identities** and leave the **Azure Managed Identities Client ID** field empty.
+ - In the *Authentication Type* drop-down list, select **Azure Managed Identities** and leave the *Azure Managed Identities Client ID* field empty.
- ##### To use User Assigned Managed Identity
- 1. Go to [Azure Portal](https://portal.azure.com/) -> **Managed Identities**.
- 2. Select your User Assigned Managed Identity -> copy the Client ID -> paste it in the **Azure Managed Identities client ID** field in the instance configuration.
- 3. In the **Authentication Type** drop-down list, select **Azure Managed Identities**.
+ 1. Go to [Azure Portal](https://portal.azure.com/) > **Managed Identities**
+ 2. Select your User Assigned Managed Identity > copy the Client ID and paste it in the *Azure Managed Identities client ID* field in the instance configuration.
+ 3. In the *Authentication Type* drop-down list, select **Azure Managed Identities**.
For more information, see [Managed identities for Azure resources](https://learn.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/overview).
diff --git a/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage_test.py b/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage_test.py
index 1e26a1ce6305..4774ebabbc5e 100644
--- a/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage_test.py
+++ b/Packs/AzureStorage/Integrations/AzureStorage/AzureStorage_test.py
@@ -1,4 +1,3 @@
-import io
import json
import requests
@@ -20,7 +19,7 @@ def client(mocker):
def util_load_json(path):
- with io.open(path, mode='r', encoding='utf-8') as f:
+ with open(path, encoding='utf-8') as f:
return json.loads(f.read())
@@ -126,7 +125,7 @@ def test_storage_blob_service_properties_get(client, mocker):
params={'subscription_id': subscription_id,
'resource_group_name': resource_group_name})
expected_hr = '### Azure Storage Blob Service Properties\n' \
- '|Name|Account Name|Subscription ID|Resource Group|Change Feed|Delete Retention Policy|Versioning|\n'\
+ '|Name|Account Name|Subscription ID|Resource Group|Change Feed|Delete Retention Policy|Versioning|\n' \
'|---|---|---|---|---|---|---|\n' \
'| default | account_name | subscription_id | resource_group_name | | false ' \
'| |\n'
@@ -361,6 +360,6 @@ def test_generate_login_url(mocker):
# assert
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
'response_type=code&scope=offline_access%20https://management.azure.com/.default' \
- f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
+ f'&client_id={client_id}&redirect_uri={redirect_uri})'
res = AzureStorage.return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/AzureStorage/Integrations/AzureStorage/README.md b/Packs/AzureStorage/Integrations/AzureStorage/README.md
index 82e180822bd2..fb16f2ff161e 100644
--- a/Packs/AzureStorage/Integrations/AzureStorage/README.md
+++ b/Packs/AzureStorage/Integrations/AzureStorage/README.md
@@ -2,29 +2,74 @@ This integration enables you to deploy and manage storage accounts and blob serv
This integration was integrated and tested with version 2022-09-01 of Azure Storage
# Authorization
+
In order to connect to the Azure Storage Accounts and the Blob Service use either the Cortex XSOAR Azure App or the Self-Deployed Azure App.
Use one of the following methods:
1. *Authorization Code Flow* (Recommended).
-2. *Device Code Flow*.
+2. *Client Credentials Flow*.
+3. *Device Code Flow*.
+
+## Self-Deployed Azure App
+
+To use a self-configured Azure application, you need to add a new Azure App Registration in the Azure Portal.
+
+To add the registration, refer to the following [Microsoft article](https://learn.microsoft.com/en-us/defender-xdr/api-create-app-web?view=o365-worldwide) steps 1-8.
+
+### Required permissions
+
+- Azure Service Management - permission `user_impersonation` of type Delegated
+- Microsoft Graph - permission `offline_access` of type Delegated
+
+To add a permission:
+
+1. Navigate to **Home** > **App registrations**.
+2. Search for your app under 'all applications'.
+3. Click **API permissions** > **Add permission**.
+4. Search for the specific Microsoft API and select the specific permission of type Delegated.
### Authentication Using the Authorization Code Flow (recommended)
1. To use a self-configured Azure application, you need to add a new Azure App Registration in the Azure Portal. To add the registration, refer to the following [Microsoft article](https://docs.microsoft.com/en-us/microsoft-365/security/defender/api-create-app-web?view=o365-worldwide#create-an-app) steps 1-8.
-2. In the **Authentication Type** field, select the **Authorization Code** option.
-3. In the **Application ID** field, enter your Client/Application ID.
-4. In the **Client Secret** field, enter your Client Secret.
-5. In the **Tenant ID** field, enter your Tenant ID .
-6. In the **Application redirect URI** field, enter your Application redirect URI.
+2. In the *Authentication Type* field, select the **Authorization Code** option.
+3. In the *Application ID* field, enter your Client/Application ID.
+4. In the *Client Secret* field, enter your Client Secret.
+5. In the *Tenant ID* field, enter your Tenant ID .
+6. In the *Application redirect URI* field, enter your Application redirect URI.
7. Save the instance.
8. Run the `!azure-storage-generate-login-url` command in the War Room and follow the instruction.
+### Authentication Using the Client Credentials Flow
+
+1. Assign Azure roles using the Azure portal [Microsoft article](https://learn.microsoft.com/en-us/azure/role-based-access-control/role-assignments-portal)
+
+ *Note:* At the 'Select members' section, assign the application you created before.
+
+2. To configure a Microsoft integration that uses this authorization flow with a self-deployed Azure application:
+
+ a. In the **Authentication Type** field, select the **Client Credentials** option.
+
+ b. In the **Application ID** field, enter your Client/Application ID.
+
+ c. In the **Subscription ID** field, enter your Subscription ID.
+
+ d. In the **Resource Group Name** field, enter you Resource Group Name.
+
+ e. In the **Tenant ID** field, enter your Tenant ID.
+
+ f. In the **Client Secret** field, enter your Client Secret.
+
+ g. Click **Test** to validate the URLs, token, and connection
+
+ h. Save the instance.
+
+
### Authentication Using the Device Code Flow
Use the [device authorization grant flow](https://docs.microsoft.com/en-us/azure/active-directory/develop/v2-oauth2-device-code).
1. Fill in the required parameters.
-2. Run the ***!azure-storage-auth-start*** command.
+2. Run the ***!azure-storage-auth-start*** command.
3. Follow the instructions that appear.
4. Run the ***!azure-storage-auth-complete*** command.
@@ -42,174 +87,179 @@ To use a self-configured Azure application, you need to add a new Azure App Regi
The application must have *user_impersonation* permission and must allow public client flows (can be found under the **Authentication** section of the app).
-
## Configure Azure Storage on Cortex XSOAR
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
2. Search for Azure Storage Management.
3. Click **Add instance** to create and configure a new integration instance.
- | **Parameter** | **Description** | **Required** |
- | --- | --- | --- |
- | Application ID | | False |
- | Default Subscription ID | There are two options to set the specified value, either in the configuration or directly within the commands. However, setting values in both places will cause an override by the command value. | True |
- | Default Resource Group Name | There are two options to set the specified value, either in the configuration or directly within the commands. However, setting values in both places will cause an override by the command value. | True |
- | Trust any certificate (not secure) | | False |
- | Use system proxy settings | | False |
- | Authentication Type | Type of authentication - can be Authorization Code flow \(recommended\), Device Code Flow, or Azure Managed Identities. | True |
- | Tenant ID (for user-auth mode) | | False |
- | Client Secret (for user-auth mode) | | False |
- | Azure Managed Identities Client ID | The Managed Identities client ID for authentication - relevant only if the integration is running on Azure VM. | False |
- | Application redirect URI (for user-auth mode) | | False |
- | Authorization code | For user-auth mode - received from the authorization step. See Detailed Instructions \(?\) section. | False |
+ | **Parameter** | **Description** | **Required** |
+ | --- | --- | --- |
+ | Application ID | | False |
+ | Default Subscription ID | There are two options to set the specified value, either in the configuration or directly within the commands. However, setting values in both places will cause an override by the command value. | True |
+ | Default Resource Group Name | There are two options to set the specified value, either in the configuration or directly within the commands. However, setting values in both places will cause an override by the command value. | True |
+ | Trust any certificate (not secure) | | False |
+ | Use system proxy settings | | False |
+ | Authentication Type | Type of authentication - can be Authorization Code flow \(recommended\), Device Code Flow, or Azure Managed Identities. | True |
+ | Tenant ID (for user-auth mode) | | False |
+ | Client Secret (for user-auth mode) | | False |
+ | Azure Managed Identities Client ID | The Managed Identities client ID for authentication - relevant only if the integration is running on Azure VM. | False |
+ | Application redirect URI (for user-auth mode) | | False |
+ | Authorization code | For user-auth mode - received from the authorization step. See Detailed Instructions \(?\) section. | False |
1. Click **Test** to validate the URLs, token, and connection.
+
## Commands
+
You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
After you successfully execute a command, a DBot message appears in the War Room with the command details.
+
### azure-storage-auth-test
+
***
Tests the connectivity to Azure.
-
#### Base Command
`azure-storage-auth-test`
+
#### Input
There are no input arguments for this command.
#### Human Readable Output
->✅ Success!
+> ✅ Success!
### azure-storage-auth-start
+
***
Run this command to start the authorization process and follow the instructions in the command results.
-
#### Base Command
`azure-storage-auth-start`
+
#### Input
There are no input arguments for this command.
#### Human Readable Output
->### Authorization instructions
+
+> ### Authorization instructions
> 1. To sign in, use a web browser to open the page:
> [https://microsoft.com/devicelogin](https://microsoft.com/devicelogin)
> and enter the code **XXXXXXXX** to authenticate.
> 2. Run the ***!azure-storage-auth-complete*** command in the War Room.
-
-
### azure-storage-auth-complete
+
***
Run this command to complete the authorization process. Should be used after running the ***azure-storage-auth-start*** command.
-
#### Base Command
`azure-storage-auth-complete`
+
#### Input
There are no input arguments for this command.
#### Human Readable Output
->✅ Authorization completed successfully.
+> ✅ Authorization completed successfully.
### azure-storage-auth-reset
+
***
Run this command if for some reason you need to rerun the authentication process.
-
#### Base Command
`azure-storage-auth-reset`
+
#### Input
There are no input arguments for this command.
#### Human Readable Output
->Authorization was reset successfully. You can now run ***!azure-storage-auth-start*** and ***!azure-storage-auth-complete***.
-
+> Authorization was reset successfully. You can now run ***!azure-storage-auth-start*** and ***!azure-storage-auth-complete***.
### azure-storage-account-list
+
***
Run this command to get the all or specific account storage details.
-
#### Base Command
`azure-storage-account-list`
-#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| account_name | The name of the storage account, optional. | Optional |
-|subscription_id|The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'.|Optional|
-resource_group_name| The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'.|Optional|
+#### Input
+| **Argument Name** | **Description** | **Required** |
+|---------------------|------------------------------------------------------------------------------------------------------------------|--------------|
+| account_name | The name of the storage account, optional. | Optional |
+| subscription_id | The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
+ resource_group_name | The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'. | Optional |
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| AzureStorage.StorageAccount.id | String | Fully qualified resource ID for the resource. |
-| AzureStorage.StorageAccount.kind | String | Gets the Kind. |
-| AzureStorage.StorageAccount.location | String | The geo-location where the resource lives |
-| AzureStorage.StorageAccount.name | String | The name of the resource |
-| AzureStorage.StorageAccount.properties.isHnsEnabled | Boolean | Account HierarchicalNamespace enabled if sets to true. |
-| AzureStorage.StorageAccount.properties.allowBlobPublicAccess | Boolean | Allow or disallow public access to all blobs or containers in the storage account. The default interpretation is true for this property. |
-| AzureStorage.StorageAccount.properties.minimumTlsVersion | String | Set the minimum TLS version to be permitted on requests to storage. The default interpretation is TLS 1.0 for this property. |
-| AzureStorage.StorageAccount.properties.allowSharedKeyAccess | Boolean | Indicates whether the storage account permits requests to be authorized with the account access key via Shared Key. If false, then all requests, including shared access signatures, must be authorized with Azure Active Directory \(Azure AD\). |
-| AzureStorage.StorageAccount.properties.creationTime | Date | Gets the creation date and time of the storage account in UTC. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.web | String | Gets the web endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.dfs | String | Gets the dfs endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.blob | String | Gets the blob endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.file | String | Gets the file endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.queue | String | Gets the queue endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.table | String | Gets the table endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.web | String | Gets the web microsoft endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.dfs | String | Gets the dfs microsoft endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.blob | String | Gets the blob microsoft endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.file | String | Gets the file microsoft endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.queue | String | Gets the queue microsoft endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.table | String | Gets the table microsoft endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.internetEndpoints.web | String | Gets the web internet endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.internetEndpoints.dfs | String | Gets the dfs internet endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.internetEndpoints.blob | String | Gets the blob internet endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.internetEndpoints.file | String | Gets the file internet endpoint. |
-| AzureStorage.StorageAccount.properties.primaryLocation | String | Gets the location of the primary data center for the storage account. |
-| AzureStorage.StorageAccount.properties.provisioningState | String | Gets the status of the storage account at the time the operation was called. |
-| AzureStorage.StorageAccount.properties.routingPreference.routingChoice | String | Routing Choice defines the kind of network routing opted by the user. |
-| AzureStorage.StorageAccount.properties.routingPreference.publishMicrosoftEndpoints | Boolean | A boolean flag which indicates whether microsoft routing storage endpoints are to be published. |
-| AzureStorage.StorageAccount.properties.routingPreference.publishInternetEndpoints | Boolean | A boolean flag which indicates whether internet routing storage endpoints are to be published. |
-| AzureStorage.StorageAccount.properties.encryption.services.file.keyType | String | Encryption key type to be used for the encryption service. 'Account' key type implies that an account-scoped encryption key will be used. 'Service' key type implies that a default service key is used. |
-| AzureStorage.StorageAccount.properties.encryption.services.file.enabled | Boolean | A boolean indicating whether or not the service encrypts the data as it is stored. |
-| AzureStorage.StorageAccount.properties.encryption.services.file.lastEnabledTime | Date | Gets a rough estimate of the date/time when the encryption was last enabled by the user. |
-| AzureStorage.StorageAccount.properties.encryption.services.blob.keyType | String | Encryption key type to be used for the encryption service. 'Account' key type implies that an account-scoped encryption key will be used. 'Service' key type implies that a default service key is used. |
-| AzureStorage.StorageAccount.properties.encryption.services.blob.enabled | Boolean | A boolean indicating whether or not the service encrypts the data as it is stored. |
-| AzureStorage.StorageAccount.properties.encryption.services.blob.lastEnabledTime | Date | Gets a rough estimate of the date/time when the encryption was last enabled by the user. |
-| AzureStorage.StorageAccount.properties.encryption.requireInfrastructureEncryption | Boolean | A boolean indicating whether or not the service applies a secondary layer of encryption with platform managed keys for data at rest. |
-| AzureStorage.StorageAccount.properties.encryption.keySource | String | The encryption keySource \(provider\). Possible values \(case-insensitive\): Microsoft.Storage, Microsoft.Keyvault. |
-| AzureStorage.StorageAccount.properties.secondaryLocation | String | Gets the location of the geo-replicated secondary for the storage account. Only available if the accountType is Standard_GRS or Standard_RAGRS. |
-| AzureStorage.StorageAccount.properties.statusOfPrimary | String | Gets the status indicating whether the primary location of the storage account is available or unavailable |
-| AzureStorage.StorageAccount.properties.statusOfSecondary | String | Gets the status indicating whether the secondary location of the storage account is available or unavailable. Only available if the SKU name is Standard_GRS or Standard_RAGRS. |
-| AzureStorage.StorageAccount.properties.supportsHttpsTrafficOnly | Boolean | Allows https traffic only to storage service if sets to true. |
-| AzureStorage.StorageAccount.sku.name | String | The SKU name. Required for account creation; optional for update. |
-| AzureStorage.StorageAccount.sku.tier | String | The SKU tier. This is based on the SKU name. |
-| AzureStorage.StorageAccount.tags | Unknown | Resource tags. |
-| AzureStorage.StorageAccount.type | String | The type of the resource. |
-
+| **Path** | **Type** | **Description** |
+|------------------------------------------------------------------------------------|----------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| AzureStorage.StorageAccount.id | String | Fully qualified resource ID for the resource. |
+| AzureStorage.StorageAccount.kind | String | Gets the Kind. |
+| AzureStorage.StorageAccount.location | String | The geo-location where the resource lives |
+| AzureStorage.StorageAccount.name | String | The name of the resource |
+| AzureStorage.StorageAccount.properties.isHnsEnabled | Boolean | Account HierarchicalNamespace enabled if sets to true. |
+| AzureStorage.StorageAccount.properties.allowBlobPublicAccess | Boolean | Allow or disallow public access to all blobs or containers in the storage account. The default interpretation is true for this property. |
+| AzureStorage.StorageAccount.properties.minimumTlsVersion | String | Set the minimum TLS version to be permitted on requests to storage. The default interpretation is TLS 1.0 for this property. |
+| AzureStorage.StorageAccount.properties.allowSharedKeyAccess | Boolean | Indicates whether the storage account permits requests to be authorized with the account access key via Shared Key. If false, then all requests, including shared access signatures, must be authorized with Azure Active Directory \(Azure AD\). |
+| AzureStorage.StorageAccount.properties.creationTime | Date | Gets the creation date and time of the storage account in UTC. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.web | String | Gets the web endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.dfs | String | Gets the dfs endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.blob | String | Gets the blob endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.file | String | Gets the file endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.queue | String | Gets the queue endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.table | String | Gets the table endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.web | String | Gets the web microsoft endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.dfs | String | Gets the dfs microsoft endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.blob | String | Gets the blob microsoft endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.file | String | Gets the file microsoft endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.queue | String | Gets the queue microsoft endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.table | String | Gets the table microsoft endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.internetEndpoints.web | String | Gets the web internet endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.internetEndpoints.dfs | String | Gets the dfs internet endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.internetEndpoints.blob | String | Gets the blob internet endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.internetEndpoints.file | String | Gets the file internet endpoint. |
+| AzureStorage.StorageAccount.properties.primaryLocation | String | Gets the location of the primary data center for the storage account. |
+| AzureStorage.StorageAccount.properties.provisioningState | String | Gets the status of the storage account at the time the operation was called. |
+| AzureStorage.StorageAccount.properties.routingPreference.routingChoice | String | Routing Choice defines the kind of network routing opted by the user. |
+| AzureStorage.StorageAccount.properties.routingPreference.publishMicrosoftEndpoints | Boolean | A boolean flag which indicates whether microsoft routing storage endpoints are to be published. |
+| AzureStorage.StorageAccount.properties.routingPreference.publishInternetEndpoints | Boolean | A boolean flag which indicates whether internet routing storage endpoints are to be published. |
+| AzureStorage.StorageAccount.properties.encryption.services.file.keyType | String | Encryption key type to be used for the encryption service. 'Account' key type implies that an account-scoped encryption key will be used. 'Service' key type implies that a default service key is used. |
+| AzureStorage.StorageAccount.properties.encryption.services.file.enabled | Boolean | A boolean indicating whether or not the service encrypts the data as it is stored. |
+| AzureStorage.StorageAccount.properties.encryption.services.file.lastEnabledTime | Date | Gets a rough estimate of the date/time when the encryption was last enabled by the user. |
+| AzureStorage.StorageAccount.properties.encryption.services.blob.keyType | String | Encryption key type to be used for the encryption service. 'Account' key type implies that an account-scoped encryption key will be used. 'Service' key type implies that a default service key is used. |
+| AzureStorage.StorageAccount.properties.encryption.services.blob.enabled | Boolean | A boolean indicating whether or not the service encrypts the data as it is stored. |
+| AzureStorage.StorageAccount.properties.encryption.services.blob.lastEnabledTime | Date | Gets a rough estimate of the date/time when the encryption was last enabled by the user. |
+| AzureStorage.StorageAccount.properties.encryption.requireInfrastructureEncryption | Boolean | A boolean indicating whether or not the service applies a secondary layer of encryption with platform managed keys for data at rest. |
+| AzureStorage.StorageAccount.properties.encryption.keySource | String | The encryption keySource \(provider\). Possible values \(case-insensitive\): Microsoft.Storage, Microsoft.Keyvault. |
+| AzureStorage.StorageAccount.properties.secondaryLocation | String | Gets the location of the geo-replicated secondary for the storage account. Only available if the accountType is Standard_GRS or Standard_RAGRS. |
+| AzureStorage.StorageAccount.properties.statusOfPrimary | String | Gets the status indicating whether the primary location of the storage account is available or unavailable |
+| AzureStorage.StorageAccount.properties.statusOfSecondary | String | Gets the status indicating whether the secondary location of the storage account is available or unavailable. Only available if the SKU name is Standard_GRS or Standard_RAGRS. |
+| AzureStorage.StorageAccount.properties.supportsHttpsTrafficOnly | Boolean | Allows https traffic only to storage service if sets to true. |
+| AzureStorage.StorageAccount.sku.name | String | The SKU name. Required for account creation; optional for update. |
+| AzureStorage.StorageAccount.sku.tier | String | The SKU tier. This is based on the SKU name. |
+| AzureStorage.StorageAccount.tags | Unknown | Resource tags. |
+| AzureStorage.StorageAccount.type | String | The type of the resource. |
#### Command Example
+
```!azure-storage-account-list```
#### Context Example
+
```json
{
"AzureStorage": {
@@ -273,109 +323,109 @@ resource_group_name| The resource group name. Note: This argument will override
#### Human Readable Output
->### Azure Storage Account List
+> ### Azure Storage Account List
>|Account Name|Subscription ID|Resource Group|Kind|Status Primary|Status Secondary|Location|
>|---|---|---|---|---|---|---|
>| acount_name1 | subscription_id1 | resource_group_name1 | Storage | available | available | eastus |
>| acount_name2 | subscription_id2 | resource_group_name2 | BlobStorage | available | available | eastus |
>| acount_name3 | subscription_id3 | resource_group_name3 | Storage | available | | westeurope |
-
### azure-storage-account-create-update
+
***
Run this command to create or update a specific
account storage.
-
#### Base Command
`azure-storage-account-create-update`
-#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| account_name | The name of the storage account. | Required |
-| sku | Gets or sets the SKU name, Required for account creation; optional for update. Possible values are: Premium_LRS, Premium_ZRS, Standard_GRS, Standard_GZRS, Standard_LRS, Standard_RAGRS, Standard_RAGZRS, Standard_ZRS. | Required |
-| kind | Indicates the type of storage account. Possible values are: Storage, StorageV2, BlobStorage, FileStorage, BlockBlobStorage. | Required |
-| location | Gets or sets the location of the resource. The geo region of a resource cannot be changed once it is created, but if an identical geo region is specified on update, the request will succeed. Possible values are: eastus, eastus2, westus, westeurope, eastasia, southeastasia, japaneast, japanwest, northcentralus, southcentralus, centralus, northeurope, brazilsouth, australiaeast, australiasoutheast, southindia, centralindia, westindia, canadaeast, canadacentral, westus2, westcentralus, uksouth, ukwest, koreacentral, koreasouth, francecentral, australiacentral, southafricanorth, uaenorth, switzerlandnorth, germanywestcentral, norwayeast. | Required |
-| tags | Gets or sets a list of tags that describe the resource. | Optional |
-| custom_domain_name | Gets or sets the custom domain name assigned to the storage account. | Optional |
-| use_sub_domain_name | Indicates whether indirect CName validation is enabled. Possible values are: true, false. | Optional |
-| enc_key_source | The encryption keySource. Possible values are: Microsoft.Storage, Microsoft.Keyvault. | Optional |
-| enc_requireInfrastructureEncryption | Indicating whether the service applies a secondary layer of encryption with platform managed keys for data at rest. Possible values are: true, false. | Optional |
-| enc_keyvault_key_name | The name of KeyVault key. | Optional |
-| enc_keyvault_key_version | The version of KeyVault key. | Optional |
-| enc_keyvault_uri | The Uri of KeyVault. | Optional |
-| access_tier | The access tier for the account. Required where kind = BlobStorage. Possible values are: Hot, Cool. | Optional |
-| supports_https_traffic_only | Allows https traffic only to storage service if sets to true. Possible values are: true, false. | Optional |
-| is_hns_enabled | Account HierarchicalNamespace enabled if sets to true. Possible values are: true, false. | Optional |
-| large_file_shares_state | Allow large file shares if sets to Enabled. Possible values are: Disabled, Enabled. | Optional |
-| allow_blob_public_access | Allow or disallow public access to all blobs or containers in the storage account. Possible values are: true, false. | Optional |
-| minimum_tls_version | Set the minimum TLS version to be permitted on requests to storage. Possible values are: TLS1_0, TLS1_1, TLS1_2. | Optional |
-| network_ruleset_bypass | Specifies whether traffic is bypassed for Logging/Metrics/AzureServices. Possible values are: AzureServices, Logging, Metrics, None. | Optional |
-| network_ruleset_default_action | Specifies the default action of allow or deny when no other rules match. | Optional |
-| network_ruleset_ipRules | Sets the IP ACL rules. | Optional |
-| virtual_network_rules | Sets the virtual network rules. | Optional |
-|subscription_id|The subscription ID. Note: This argument will override the instance parameter Default Subscription ID'.|Optional|
-resource_group_name| The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'.|Optional|
+#### Input
+| **Argument Name** | **Description** | **Required** |
+|-------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------|
+| account_name | The name of the storage account. | Required |
+| sku | Gets or sets the SKU name, Required for account creation; optional for update. Possible values are: Premium_LRS, Premium_ZRS, Standard_GRS, Standard_GZRS, Standard_LRS, Standard_RAGRS, Standard_RAGZRS, Standard_ZRS. | Required |
+| kind | Indicates the type of storage account. Possible values are: Storage, StorageV2, BlobStorage, FileStorage, BlockBlobStorage. | Required |
+| location | Gets or sets the location of the resource. The geo region of a resource cannot be changed once it is created, but if an identical geo region is specified on update, the request will succeed. Possible values are: eastus, eastus2, westus, westeurope, eastasia, southeastasia, japaneast, japanwest, northcentralus, southcentralus, centralus, northeurope, brazilsouth, australiaeast, australiasoutheast, southindia, centralindia, westindia, canadaeast, canadacentral, westus2, westcentralus, uksouth, ukwest, koreacentral, koreasouth, francecentral, australiacentral, southafricanorth, uaenorth, switzerlandnorth, germanywestcentral, norwayeast. | Required |
+| tags | Gets or sets a list of tags that describe the resource. | Optional |
+| custom_domain_name | Gets or sets the custom domain name assigned to the storage account. | Optional |
+| use_sub_domain_name | Indicates whether indirect CName validation is enabled. Possible values are: true, false. | Optional |
+| enc_key_source | The encryption keySource. Possible values are: Microsoft.Storage, Microsoft.Keyvault. | Optional |
+| enc_requireInfrastructureEncryption | Indicating whether the service applies a secondary layer of encryption with platform managed keys for data at rest. Possible values are: true, false. | Optional |
+| enc_keyvault_key_name | The name of KeyVault key. | Optional |
+| enc_keyvault_key_version | The version of KeyVault key. | Optional |
+| enc_keyvault_uri | The Uri of KeyVault. | Optional |
+| access_tier | The access tier for the account. Required where kind = BlobStorage. Possible values are: Hot, Cool. | Optional |
+| supports_https_traffic_only | Allows https traffic only to storage service if sets to true. Possible values are: true, false. | Optional |
+| is_hns_enabled | Account HierarchicalNamespace enabled if sets to true. Possible values are: true, false. | Optional |
+| large_file_shares_state | Allow large file shares if sets to Enabled. Possible values are: Disabled, Enabled. | Optional |
+| allow_blob_public_access | Allow or disallow public access to all blobs or containers in the storage account. Possible values are: true, false. | Optional |
+| minimum_tls_version | Set the minimum TLS version to be permitted on requests to storage. Possible values are: TLS1_0, TLS1_1, TLS1_2. | Optional |
+| network_ruleset_bypass | Specifies whether traffic is bypassed for Logging/Metrics/AzureServices. Possible values are: AzureServices, Logging, Metrics, None. | Optional |
+| network_ruleset_default_action | Specifies the default action of allow or deny when no other rules match. | Optional |
+| network_ruleset_ipRules | Sets the IP ACL rules. | Optional |
+| virtual_network_rules | Sets the virtual network rules. | Optional |
+| subscription_id | The subscription ID. Note: This argument will override the instance parameter Default Subscription ID'. | Optional |
+ resource_group_name | The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'. | Optional |
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| AzureStorage.StorageAccount.id | String | Fully qualified resource ID for the resource. |
-| AzureStorage.StorageAccount.kind | String | Gets the Kind. |
-| AzureStorage.StorageAccount.location | String | The geo-location where the resource lives. |
-| AzureStorage.StorageAccount.name | String | The name of the resource. |
-| AzureStorage.StorageAccount.properties.isHnsEnabled | Boolean | Account HierarchicalNamespace enabled if sets to true. |
-| AzureStorage.StorageAccount.properties.allowBlobPublicAccess | Boolean | Allow or disallow public access to all blobs or containers in the storage account. The default interpretation is true for this property. |
-| AzureStorage.StorageAccount.properties.minimumTlsVersion | String | Set the minimum TLS version to be permitted on requests to storage. The default interpretation is TLS 1.0 for this property. |
-| AzureStorage.StorageAccount.properties.allowSharedKeyAccess | Boolean | Indicates whether the storage account permits requests to be authorized with the account access key via Shared Key. If false, then all requests, including shared access signatures, must be authorized with Azure Active Directory \(Azure AD\). |
-| AzureStorage.StorageAccount.properties.creationTime | Date | Gets the creation date and time of the storage account in UTC. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.web | String | Gets the web endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.dfs | String | Gets the dfs endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.blob | String | Gets the blob endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.file | String | Gets the file endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.queue | String | Gets the queue endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.table | String | Gets the table endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.web | String | Gets the web microsoft endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.dfs | String | Gets the dfs microsoft endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.blob | String | Gets the blob microsoft endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.file | String | Gets the file microsoft endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.queue | String | Gets the queue microsoft endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.table | String | Gets the table microsoft endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.internetEndpoints.web | String | Gets the web internet endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.internetEndpoints.dfs | String | Gets the dfs internet endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.internetEndpoints.blob | String | Gets the blob internet endpoint. |
-| AzureStorage.StorageAccount.properties.primaryEndpoints.internetEndpoints.file | String | Gets the file internet endpoint. |
-| AzureStorage.StorageAccount.properties.primaryLocation | String | Gets the location of the primary data center for the storage account. |
-| AzureStorage.StorageAccount.properties.provisioningState | String | Gets the status of the storage account at the time the operation was called. |
-| AzureStorage.StorageAccount.properties.routingPreference.routingChoice | String | Routing Choice defines the kind of network routing opted by the user. |
-| AzureStorage.StorageAccount.properties.routingPreference.publishMicrosoftEndpoints | Boolean | A boolean flag which indicates whether microsoft routing storage endpoints are to be published. |
-| AzureStorage.StorageAccount.properties.routingPreference.publishInternetEndpoints | Boolean | A boolean flag which indicates whether internet routing storage endpoints are to be published. |
-| AzureStorage.StorageAccount.properties.encryption.services.file.keyType | String | Encryption key type to be used for the encryption service. 'Account' key type implies that an account-scoped encryption key will be used. 'Service' key type implies that a default service key is used. |
-| AzureStorage.StorageAccount.properties.encryption.services.file.enabled | Boolean | A boolean indicating whether or not the service encrypts the data as it is stored. |
-| AzureStorage.StorageAccount.properties.encryption.services.file.lastEnabledTime | Date | Gets a rough estimate of the date/time when the encryption was last enabled by the user. |
-| AzureStorage.StorageAccount.properties.encryption.services.blob.keyType | String | Encryption key type to be used for the encryption service. 'Account' key type implies that an account-scoped encryption key will be used. 'Service' key type implies that a default service key is used. |
-| AzureStorage.StorageAccount.properties.encryption.services.blob.enabled | Boolean | A boolean indicating whether or not the service encrypts the data as it is stored. |
-| AzureStorage.StorageAccount.properties.encryption.services.blob.lastEnabledTime | Date | Gets a rough estimate of the date/time when the encryption was last enabled by the user. |
-| AzureStorage.StorageAccount.properties.encryption.requireInfrastructureEncryption | Boolean | A boolean indicating whether or not the service applies a secondary layer of encryption with platform managed keys for data at rest. |
-| AzureStorage.StorageAccount.properties.encryption.keySource | String | The encryption keySource \(provider\). Possible values \(case-insensitive\): Microsoft.Storage, Microsoft.Keyvault. |
-| AzureStorage.StorageAccount.properties.secondaryLocation | String | Gets the location of the geo-replicated secondary for the storage account. Only available if the accountType is Standard_GRS or Standard_RAGRS. |
-| AzureStorage.StorageAccount.properties.statusOfPrimary | String | Gets the status indicating whether the primary location of the storage account is available or unavailable |
-| AzureStorage.StorageAccount.properties.statusOfSecondary | String | Gets the status indicating whether the secondary location of the storage account is available or unavailable. Only available if the SKU name is Standard_GRS or Standard_RAGRS. |
-| AzureStorage.StorageAccount.properties.supportsHttpsTrafficOnly | Boolean | Allows https traffic only to storage service if sets to true. |
-| AzureStorage.StorageAccount.sku.name | String | The SKU name. Required for account creation; optional for update. |
-| AzureStorage.StorageAccount.sku.tier | String | The SKU tier. This is based on the SKU name. |
-| AzureStorage.StorageAccount.tags | Unknown | Resource tags. |
-| AzureStorage.StorageAccount.type | String | The type of the resource. |
-
+| **Path** | **Type** | **Description** |
+|------------------------------------------------------------------------------------|----------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| AzureStorage.StorageAccount.id | String | Fully qualified resource ID for the resource. |
+| AzureStorage.StorageAccount.kind | String | Gets the Kind. |
+| AzureStorage.StorageAccount.location | String | The geo-location where the resource lives. |
+| AzureStorage.StorageAccount.name | String | The name of the resource. |
+| AzureStorage.StorageAccount.properties.isHnsEnabled | Boolean | Account HierarchicalNamespace enabled if sets to true. |
+| AzureStorage.StorageAccount.properties.allowBlobPublicAccess | Boolean | Allow or disallow public access to all blobs or containers in the storage account. The default interpretation is true for this property. |
+| AzureStorage.StorageAccount.properties.minimumTlsVersion | String | Set the minimum TLS version to be permitted on requests to storage. The default interpretation is TLS 1.0 for this property. |
+| AzureStorage.StorageAccount.properties.allowSharedKeyAccess | Boolean | Indicates whether the storage account permits requests to be authorized with the account access key via Shared Key. If false, then all requests, including shared access signatures, must be authorized with Azure Active Directory \(Azure AD\). |
+| AzureStorage.StorageAccount.properties.creationTime | Date | Gets the creation date and time of the storage account in UTC. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.web | String | Gets the web endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.dfs | String | Gets the dfs endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.blob | String | Gets the blob endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.file | String | Gets the file endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.queue | String | Gets the queue endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.table | String | Gets the table endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.web | String | Gets the web microsoft endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.dfs | String | Gets the dfs microsoft endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.blob | String | Gets the blob microsoft endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.file | String | Gets the file microsoft endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.queue | String | Gets the queue microsoft endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.microsoftEndpoints.table | String | Gets the table microsoft endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.internetEndpoints.web | String | Gets the web internet endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.internetEndpoints.dfs | String | Gets the dfs internet endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.internetEndpoints.blob | String | Gets the blob internet endpoint. |
+| AzureStorage.StorageAccount.properties.primaryEndpoints.internetEndpoints.file | String | Gets the file internet endpoint. |
+| AzureStorage.StorageAccount.properties.primaryLocation | String | Gets the location of the primary data center for the storage account. |
+| AzureStorage.StorageAccount.properties.provisioningState | String | Gets the status of the storage account at the time the operation was called. |
+| AzureStorage.StorageAccount.properties.routingPreference.routingChoice | String | Routing Choice defines the kind of network routing opted by the user. |
+| AzureStorage.StorageAccount.properties.routingPreference.publishMicrosoftEndpoints | Boolean | A boolean flag which indicates whether microsoft routing storage endpoints are to be published. |
+| AzureStorage.StorageAccount.properties.routingPreference.publishInternetEndpoints | Boolean | A boolean flag which indicates whether internet routing storage endpoints are to be published. |
+| AzureStorage.StorageAccount.properties.encryption.services.file.keyType | String | Encryption key type to be used for the encryption service. 'Account' key type implies that an account-scoped encryption key will be used. 'Service' key type implies that a default service key is used. |
+| AzureStorage.StorageAccount.properties.encryption.services.file.enabled | Boolean | A boolean indicating whether or not the service encrypts the data as it is stored. |
+| AzureStorage.StorageAccount.properties.encryption.services.file.lastEnabledTime | Date | Gets a rough estimate of the date/time when the encryption was last enabled by the user. |
+| AzureStorage.StorageAccount.properties.encryption.services.blob.keyType | String | Encryption key type to be used for the encryption service. 'Account' key type implies that an account-scoped encryption key will be used. 'Service' key type implies that a default service key is used. |
+| AzureStorage.StorageAccount.properties.encryption.services.blob.enabled | Boolean | A boolean indicating whether or not the service encrypts the data as it is stored. |
+| AzureStorage.StorageAccount.properties.encryption.services.blob.lastEnabledTime | Date | Gets a rough estimate of the date/time when the encryption was last enabled by the user. |
+| AzureStorage.StorageAccount.properties.encryption.requireInfrastructureEncryption | Boolean | A boolean indicating whether or not the service applies a secondary layer of encryption with platform managed keys for data at rest. |
+| AzureStorage.StorageAccount.properties.encryption.keySource | String | The encryption keySource \(provider\). Possible values \(case-insensitive\): Microsoft.Storage, Microsoft.Keyvault. |
+| AzureStorage.StorageAccount.properties.secondaryLocation | String | Gets the location of the geo-replicated secondary for the storage account. Only available if the accountType is Standard_GRS or Standard_RAGRS. |
+| AzureStorage.StorageAccount.properties.statusOfPrimary | String | Gets the status indicating whether the primary location of the storage account is available or unavailable |
+| AzureStorage.StorageAccount.properties.statusOfSecondary | String | Gets the status indicating whether the secondary location of the storage account is available or unavailable. Only available if the SKU name is Standard_GRS or Standard_RAGRS. |
+| AzureStorage.StorageAccount.properties.supportsHttpsTrafficOnly | Boolean | Allows https traffic only to storage service if sets to true. |
+| AzureStorage.StorageAccount.sku.name | String | The SKU name. Required for account creation; optional for update. |
+| AzureStorage.StorageAccount.sku.tier | String | The SKU tier. This is based on the SKU name. |
+| AzureStorage.StorageAccount.tags | Unknown | Resource tags. |
+| AzureStorage.StorageAccount.type | String | The type of the resource. |
#### Command Example
+
```!azure-storage-account-create-update account_name=account_name1 kind=BlobStorage location=eastus sku=Standard_GRS```
#### Context Example
+
```json
{
"AzureStorage": {
@@ -435,51 +485,51 @@ resource_group_name| The resource group name. Note: This argument will override
#### Human Readable Output
->### Azure Storage Account
+> ### Azure Storage Account
>|Account Name|Subscription ID|Resource Group|Kind|Status Primary|Status Secondary|Location|
>|---|---|---|---|---|---|---|
>| acount_name1 | subscription_id1 | resource_group_name1 | BlobStorage | available | available | eastus |
-
### azure-storage-blob-service-properties-get
+
***
Run this command to get the blob service properties of a specific account storage.
-
#### Base Command
`azure-storage-blob-service-properties-get`
-#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| account_name | The name of the storage account. | Required |
-|subscription_id|The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'.|Optional|
-resource_group_name| The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'.|Optional|
+#### Input
+| **Argument Name** | **Description** | **Required** |
+|---------------------|------------------------------------------------------------------------------------------------------------------|--------------|
+| account_name | The name of the storage account. | Required |
+| subscription_id | The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
+ resource_group_name | The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'. | Optional |
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| AzureStorage.BlobServiceProperties.id | String | Fully qualified resource ID for the resource. |
-| AzureStorage.BlobServiceProperties.name | String | The name of the resource |
-| AzureStorage.BlobServiceProperties.type | String | The type of the resource. |
-| AzureStorage.BlobServiceProperties.properties.lastAccessTimeTrackingPolicy.enable | Boolean | When set to true last access time based tracking is enabled. |
-| AzureStorage.BlobServiceProperties.properties.lastAccessTimeTrackingPolicy.name | String | Name of the policy. The valid value is AccessTimeTracking. |
-| AzureStorage.BlobServiceProperties.properties.lastAccessTimeTrackingPolicy.trackingGranularityInDays | Number | The field specifies blob object tracking granularity in days, typically how often the blob object should be tracked. |
-| AzureStorage.BlobServiceProperties.properties.lastAccessTimeTrackingPolicy.blobType | String | An array of predefined supported blob types. Only blockBlob is the supported value. |
-
+| **Path** | **Type** | **Description** |
+|------------------------------------------------------------------------------------------------------|----------|----------------------------------------------------------------------------------------------------------------------|
+| AzureStorage.BlobServiceProperties.id | String | Fully qualified resource ID for the resource. |
+| AzureStorage.BlobServiceProperties.name | String | The name of the resource |
+| AzureStorage.BlobServiceProperties.type | String | The type of the resource. |
+| AzureStorage.BlobServiceProperties.properties.lastAccessTimeTrackingPolicy.enable | Boolean | When set to true last access time based tracking is enabled. |
+| AzureStorage.BlobServiceProperties.properties.lastAccessTimeTrackingPolicy.name | String | Name of the policy. The valid value is AccessTimeTracking. |
+| AzureStorage.BlobServiceProperties.properties.lastAccessTimeTrackingPolicy.trackingGranularityInDays | Number | The field specifies blob object tracking granularity in days, typically how often the blob object should be tracked. |
+| AzureStorage.BlobServiceProperties.properties.lastAccessTimeTrackingPolicy.blobType | String | An array of predefined supported blob types. Only blockBlob is the supported value. |
#### Command Example
+
```!azure-storage-blob-service-properties-get account_name=account_name1```
#### Context Example
+
```json
{
"AzureStorage": {
"BlobServiceProperties": {
- "id": "/subscriptions/sub_id/resourceGroups/resource_g_name/providers/Microsoft.Storage/storageAccounts/account_name/blobServices/default",
+ "id": "/subscriptions/sub_id/resourceGroups/resource_g_name/providers/Microsoft.Storage/storageAccounts/account_name/blobServices/default",
"name": "default",
"properties": {
"changeFeed": {
@@ -505,69 +555,69 @@ resource_group_name| The resource group name. Note: This argument will override
#### Human Readable Output
->### Azure Storage Blob Service Properties
+> ### Azure Storage Blob Service Properties
>|Name|Account Name|Subscription ID|Resource Group|Change Feed|Delete Retention Policy|Versioning|
>|---|---|---|---|---|---|---|
>| default | account_name | subscription_id | resource_group_name | change_feed_enabled | delete_retention_policy_enabled | is_versioning_enabled |
-
### azure-storage-blob-service-properties-set
+
***
Run this command to set properties for
the blob service in a specific account storage
-
#### Base Command
`azure-storage-blob-service-properties-set`
-#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| account_name | The name of the storage account. | Required |
-| change_feed_enabled | Indicates whether change feed event logging is enabled for the Blob service. Possible values are: true, false. | Optional |
-| change_feed_retention_days | Indicates the duration of changeFeed retention in days. | Optional |
-| container_delete_rentention_policy_enabled | Indicates whether DeleteRetentionPolicy is enabled. Possible values are: true, false. | Optional |
-| container_delete_rentention_policy_days | Indicates the number of days that the deleted item should be retained. | Optional |
-| delete_rentention_policy_enabled | Indicates whether DeleteRetentionPolicy is enabled. Possible values are: true, false. | Optional |
-| delete_rentention_policy_days | Indicates the number of days that the deleted item should be retained. | Optional |
-| versioning | Versioning is enabled if set to true. Possible values are: true, false. | Optional |
-| last_access_time_tracking_policy_enabled | When set to true last access time based tracking is enabled. Possible values are: true, false. | Optional |
-| last_access_time_tracking_policy_blob_types | An array of predefined supported blob types. | Optional |
-| last_access_time_tracking_policy_days | The field specifies blob object tracking granularity in days. | Optional |
-| restore_policy_enabled | Blob restore is enabled if set to true. Possible values are: true, false. | Optional |
-| restore_policy_min_restore_time | The minimum date and time that the restore can be started. | Optional |
-| restore_policy_days | how long this blob can be restored. | Optional |
-|subscription_id|The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'.|Optional|
-resource_group_name| The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'.|Optional|
+#### Input
+| **Argument Name** | **Description** | **Required** |
+|---------------------------------------------|------------------------------------------------------------------------------------------------------------------|--------------|
+| account_name | The name of the storage account. | Required |
+| change_feed_enabled | Indicates whether change feed event logging is enabled for the Blob service. Possible values are: true, false. | Optional |
+| change_feed_retention_days | Indicates the duration of changeFeed retention in days. | Optional |
+| container_delete_rentention_policy_enabled | Indicates whether DeleteRetentionPolicy is enabled. Possible values are: true, false. | Optional |
+| container_delete_rentention_policy_days | Indicates the number of days that the deleted item should be retained. | Optional |
+| delete_rentention_policy_enabled | Indicates whether DeleteRetentionPolicy is enabled. Possible values are: true, false. | Optional |
+| delete_rentention_policy_days | Indicates the number of days that the deleted item should be retained. | Optional |
+| versioning | Versioning is enabled if set to true. Possible values are: true, false. | Optional |
+| last_access_time_tracking_policy_enabled | When set to true last access time based tracking is enabled. Possible values are: true, false. | Optional |
+| last_access_time_tracking_policy_blob_types | An array of predefined supported blob types. | Optional |
+| last_access_time_tracking_policy_days | The field specifies blob object tracking granularity in days. | Optional |
+| restore_policy_enabled | Blob restore is enabled if set to true. Possible values are: true, false. | Optional |
+| restore_policy_min_restore_time | The minimum date and time that the restore can be started. | Optional |
+| restore_policy_days | how long this blob can be restored. | Optional |
+| subscription_id | The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
+ resource_group_name | The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'. | Optional |
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| AzureStorage.BlobServiceProperties.id | String | Fully qualified resource ID for the resource. |
-| AzureStorage.BlobServiceProperties.name | String | The name of the resource. |
-| AzureStorage.BlobServiceProperties.type | String | The type of the resource. |
-| AzureStorage.BlobServiceProperties.properties.cors.corsRules.allowedOrigins | String | Required if CorsRule element is present. A list of origin domains that will be allowed via CORS, or "\*" to allow all domains. |
-| AzureStorage.BlobServiceProperties.properties.cors.corsRules.allowedMethods | String | Required if CorsRule element is present. A list of HTTP methods that are allowed to be executed by the origin. |
-| AzureStorage.BlobServiceProperties.properties.cors.corsRules.maxAgeInSeconds | Number | Required if CorsRule element is present. The number of seconds that the client/browser should cache a preflight response. |
-| AzureStorage.BlobServiceProperties.properties.cors.corsRules.exposedHeaders | String | Required if CorsRule element is present. A list of response headers to expose to CORS clients. |
-| AzureStorage.BlobServiceProperties.properties.cors.corsRules.allowedHeaders | String | Required if CorsRule element is present. A list of headers allowed to be part of the cross-origin request. |
-| AzureStorage.BlobServiceProperties.properties.defaultServiceVersion | Date | Indicates the default version to use for requests to the Blob service if an incoming request\\u2019s version is not specified. Possible values include version 2008-10-27 and all more recent versions. |
-| AzureStorage.BlobServiceProperties.properties.deleteRetentionPolicy.enabled | Boolean | Indicates whether DeleteRetentionPolicy is enabled. |
-| AzureStorage.BlobServiceProperties.properties.deleteRetentionPolicy.days | Number | Indicates the number of days that the deleted item should be retained. The minimum specified value can be 1 and the maximum value can be 365. |
-| AzureStorage.BlobServiceProperties.properties.isVersioningEnabled | Boolean | Versioning is enabled if set to true. |
-| AzureStorage.BlobServiceProperties.properties.changeFeed.enabled | Boolean | Indicates whether change feed event logging is enabled for the Blob service. |
-| AzureStorage.BlobServiceProperties.properties.changeFeed.retentionInDays | Number | Indicates the duration of changeFeed retention in days. Minimum value is 1 day and maximum value is 146000 days. |
-| AzureStorage.BlobServiceProperties.sku.name | String | The SKU name. |
-| AzureStorage.BlobServiceProperties.sku.tier | String | The SKU tier. |
-
+| **Path** | **Type** | **Description** |
+|------------------------------------------------------------------------------|----------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| AzureStorage.BlobServiceProperties.id | String | Fully qualified resource ID for the resource. |
+| AzureStorage.BlobServiceProperties.name | String | The name of the resource. |
+| AzureStorage.BlobServiceProperties.type | String | The type of the resource. |
+| AzureStorage.BlobServiceProperties.properties.cors.corsRules.allowedOrigins | String | Required if CorsRule element is present. A list of origin domains that will be allowed via CORS, or "\*" to allow all domains. |
+| AzureStorage.BlobServiceProperties.properties.cors.corsRules.allowedMethods | String | Required if CorsRule element is present. A list of HTTP methods that are allowed to be executed by the origin. |
+| AzureStorage.BlobServiceProperties.properties.cors.corsRules.maxAgeInSeconds | Number | Required if CorsRule element is present. The number of seconds that the client/browser should cache a preflight response. |
+| AzureStorage.BlobServiceProperties.properties.cors.corsRules.exposedHeaders | String | Required if CorsRule element is present. A list of response headers to expose to CORS clients. |
+| AzureStorage.BlobServiceProperties.properties.cors.corsRules.allowedHeaders | String | Required if CorsRule element is present. A list of headers allowed to be part of the cross-origin request. |
+| AzureStorage.BlobServiceProperties.properties.defaultServiceVersion | Date | Indicates the default version to use for requests to the Blob service if an incoming request\\u2019s version is not specified. Possible values include version 2008-10-27 and all more recent versions. |
+| AzureStorage.BlobServiceProperties.properties.deleteRetentionPolicy.enabled | Boolean | Indicates whether DeleteRetentionPolicy is enabled. |
+| AzureStorage.BlobServiceProperties.properties.deleteRetentionPolicy.days | Number | Indicates the number of days that the deleted item should be retained. The minimum specified value can be 1 and the maximum value can be 365. |
+| AzureStorage.BlobServiceProperties.properties.isVersioningEnabled | Boolean | Versioning is enabled if set to true. |
+| AzureStorage.BlobServiceProperties.properties.changeFeed.enabled | Boolean | Indicates whether change feed event logging is enabled for the Blob service. |
+| AzureStorage.BlobServiceProperties.properties.changeFeed.retentionInDays | Number | Indicates the duration of changeFeed retention in days. Minimum value is 1 day and maximum value is 146000 days. |
+| AzureStorage.BlobServiceProperties.sku.name | String | The SKU name. |
+| AzureStorage.BlobServiceProperties.sku.tier | String | The SKU tier. |
#### Command Example
+
```!azure-storage-blob-service-properties-set account_name=account_name1 delete_rentention_policy_enabled=false```
#### Context Example
+
```json
{
"AzureStorage": {
@@ -587,46 +637,46 @@ resource_group_name| The resource group name. Note: This argument will override
#### Human Readable Output
->### Azure Storage Blob Service Properties
+> ### Azure Storage Blob Service Properties
>|Name|Account Name|Subscription ID|Resource Group|Change Feed|Delete Retention Policy|Versioning|
>|---|---|---|---|---|---|---|
>| default | account_name | subscription_id | resource_group_name | change_feed_enabled | delete_retention_policy_enabled | is_versioning_enabled |
-
### azure-storage-blob-containers-create
+
***
Run this command to create a blob container.
-
#### Base Command
`azure-storage-blob-containers-create`
-#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| account_name | The name of the storage account. | Required |
-| container_name | The name of the container. | Required |
-| default_encryption_scope | Default the container to use specified encryption scope for all writes. | Optional |
-| deny_encryption_scope_override | Block override of encryption scope from the container default. Possible values are: true, false. | Optional |
-| public_access | Specifies the level of access. Possible values are: Blob, Container, None. | Optional |
-|subscription_id|The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'.|Optional|
-resource_group_name| The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'.|Optional|
+#### Input
+| **Argument Name** | **Description** | **Required** |
+|--------------------------------|------------------------------------------------------------------------------------------------------------------|--------------|
+| account_name | The name of the storage account. | Required |
+| container_name | The name of the container. | Required |
+| default_encryption_scope | Default the container to use specified encryption scope for all writes. | Optional |
+| deny_encryption_scope_override | Block override of encryption scope from the container default. Possible values are: true, false. | Optional |
+| public_access | Specifies the level of access. Possible values are: Blob, Container, None. | Optional |
+| subscription_id | The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
+ resource_group_name | The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'. | Optional |
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| AzureStorage.BlobContainer.id | String | Fully qualified resource ID for the resource. |
-| AzureStorage.BlobContainer.name | String | The name of the resource. |
-| AzureStorage.BlobContainer.type | String | The type of the resource. |
-
+| **Path** | **Type** | **Description** |
+|---------------------------------|----------|-----------------------------------------------|
+| AzureStorage.BlobContainer.id | String | Fully qualified resource ID for the resource. |
+| AzureStorage.BlobContainer.name | String | The name of the resource. |
+| AzureStorage.BlobContainer.type | String | The type of the resource. |
#### Command Example
+
```!azure-storage-blob-containers-create account_name=account_name container_name=container_name```
#### Context Example
+
```json
{
"AzureStorage": {
@@ -647,50 +697,51 @@ resource_group_name| The resource group name. Note: This argument will override
#### Human Readable Output
->### Azure Storage Blob Containers Properties
+> ### Azure Storage Blob Containers Properties
>|Name|Account Name|Subscription ID|Resource Group|Public Access|
>|---|---|---|---|---|
>| container_name | account_name | subscription_id | resource_group | |
-
### azure-storage-blob-containers-update
+
***
Run this command to update a specific
blob container.
-
#### Base Command
`azure-storage-blob-containers-update`
+
#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| account_name | The name of the storage account. | Required |
-| container_name | The name of the container. | Required |
-| default_encryption_scope | Default the container to use specified encryption scope for all writes. | Optional |
-| deny_encryption_scope_override | Block override of encryption scope from the container default. Possible values are: true, false. | Optional |
-| public_access | Specifies the level of access. Possible values are: Blob, Container, None. | Optional |
-|subscription_id|The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'.|Optional|
-resource_group_name| The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'.|Optional|
+| **Argument Name** | **Description** | **Required** |
+|--------------------------------|------------------------------------------------------------------------------------------------------------------|--------------|
+| account_name | The name of the storage account. | Required |
+| container_name | The name of the container. | Required |
+| default_encryption_scope | Default the container to use specified encryption scope for all writes. | Optional |
+| deny_encryption_scope_override | Block override of encryption scope from the container default. Possible values are: true, false. | Optional |
+| public_access | Specifies the level of access. Possible values are: Blob, Container, None. | Optional |
+| subscription_id | The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
+ resource_group_name | The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'. | Optional |
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| AzureStorage.BlobContainer.id | String | Fully qualified resource ID for the resource. |
-| AzureStorage.BlobContainer.name | String | The name of the resource. |
-| AzureStorage.BlobContainer.type | String | The type of the resource. |
-| AzureStorage.BlobContainer.properties.metadata.metadata | String | A name-value pair to associate with the container as metadata. |
-| AzureStorage.BlobContainer.properties.publicAccess | String | Specifies whether data in the container may be accessed publicly and the level of access. |
-| AzureStorage.BlobContainer.properties.hasImmutabilityPolicy | Boolean | The hasImmutabilityPolicy public property is set to true by SRP if ImmutabilityPolicy has been created for this container. The hasImmutabilityPolicy public property is set to false by SRP if ImmutabilityPolicy has not been created for this container. |
-| AzureStorage.BlobContainer.properties.hasLegalHold | Boolean | The hasLegalHold public property is set to true by SRP if there are at least one existing tag. The hasLegalHold public property is set to false by SRP if all existing legal hold tags are cleared out. |
-
+| **Path** | **Type** | **Description** |
+|-------------------------------------------------------------|----------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| AzureStorage.BlobContainer.id | String | Fully qualified resource ID for the resource. |
+| AzureStorage.BlobContainer.name | String | The name of the resource. |
+| AzureStorage.BlobContainer.type | String | The type of the resource. |
+| AzureStorage.BlobContainer.properties.metadata.metadata | String | A name-value pair to associate with the container as metadata. |
+| AzureStorage.BlobContainer.properties.publicAccess | String | Specifies whether data in the container may be accessed publicly and the level of access. |
+| AzureStorage.BlobContainer.properties.hasImmutabilityPolicy | Boolean | The hasImmutabilityPolicy public property is set to true by SRP if ImmutabilityPolicy has been created for this container. The hasImmutabilityPolicy public property is set to false by SRP if ImmutabilityPolicy has not been created for this container. |
+| AzureStorage.BlobContainer.properties.hasLegalHold | Boolean | The hasLegalHold public property is set to true by SRP if there are at least one existing tag. The hasLegalHold public property is set to false by SRP if all existing legal hold tags are cleared out. |
#### Command Example
+
```!azure-storage-blob-containers-update account_name=account_name container_name=container_name```
#### Context Example
+
```json
{
"AzureStorage": {
@@ -711,51 +762,51 @@ resource_group_name| The resource group name. Note: This argument will override
#### Human Readable Output
->### Azure Storage Blob Containers Properties
+> ### Azure Storage Blob Containers Properties
>|Name|Account Name|Subscription ID|Resource Group|Public Access|
>|---|---|---|---|---|
>| container_name | account_name | subscription_id | resource_group | |
-
### azure-storage-blob-containers-list
+
***
Run this command to get the all or specific blob container details.
-
#### Base Command
`azure-storage-blob-containers-list`
-#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| account_name | The name of the storage account. | Required |
-| container_name | The name of the container. | Optional |
-| include_deleted | Specifies whether include the properties for soft deleted blob containers. Possible values are: true, false. | Optional |
-| maxpagesize | Specified maximum number of containers that can be included in the list. | Optional |
-|subscription_id|The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'.|Optional|
-resource_group_name| The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'.|Optional|
+#### Input
+| **Argument Name** | **Description** | **Required** |
+|---------------------|------------------------------------------------------------------------------------------------------------------|--------------|
+| account_name | The name of the storage account. | Required |
+| container_name | The name of the container. | Optional |
+| include_deleted | Specifies whether include the properties for soft deleted blob containers. Possible values are: true, false. | Optional |
+| maxpagesize | Specified maximum number of containers that can be included in the list. | Optional |
+| subscription_id | The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
+ resource_group_name | The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'. | Optional |
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| AzureStorage.BlobContainer.id | String | Fully qualified resource ID for the resource. |
-| AzureStorage.BlobContainer.name | String | The name of the resource. |
-| AzureStorage.BlobContainer.type | String | The type of the resource. |
-| AzureStorage.BlobContainer.properties.publicAccess | String | Specifies whether data in the container may be accessed publicly and the level of access. |
-| AzureStorage.BlobContainer.properties.leaseStatus | String | The lease status of the container. |
-| AzureStorage.BlobContainer.properties.leaseState | String | Lease state of the container. |
-| AzureStorage.BlobContainer.properties.lastModifiedTime | Date | Returns the date and time the container was last modified. |
-| AzureStorage.BlobContainer.properties.hasImmutabilityPolicy | Boolean | The hasImmutabilityPolicy public property is set to true by SRP if ImmutabilityPolicy has been created for this container. The hasImmutabilityPolicy public property is set to false by SRP if ImmutabilityPolicy has not been created for this container. |
-| AzureStorage.BlobContainer.properties.hasLegalHold | Boolean | The hasLegalHold public property is set to true by SRP if there are at least one existing tag. The hasLegalHold public property is set to false by SRP if all existing legal hold tags are cleared out. |
-
+| **Path** | **Type** | **Description** |
+|-------------------------------------------------------------|----------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| AzureStorage.BlobContainer.id | String | Fully qualified resource ID for the resource. |
+| AzureStorage.BlobContainer.name | String | The name of the resource. |
+| AzureStorage.BlobContainer.type | String | The type of the resource. |
+| AzureStorage.BlobContainer.properties.publicAccess | String | Specifies whether data in the container may be accessed publicly and the level of access. |
+| AzureStorage.BlobContainer.properties.leaseStatus | String | The lease status of the container. |
+| AzureStorage.BlobContainer.properties.leaseState | String | Lease state of the container. |
+| AzureStorage.BlobContainer.properties.lastModifiedTime | Date | Returns the date and time the container was last modified. |
+| AzureStorage.BlobContainer.properties.hasImmutabilityPolicy | Boolean | The hasImmutabilityPolicy public property is set to true by SRP if ImmutabilityPolicy has been created for this container. The hasImmutabilityPolicy public property is set to false by SRP if ImmutabilityPolicy has not been created for this container. |
+| AzureStorage.BlobContainer.properties.hasLegalHold | Boolean | The hasLegalHold public property is set to true by SRP if there are at least one existing tag. The hasLegalHold public property is set to false by SRP if all existing legal hold tags are cleared out. |
#### Command Example
+
```!azure-storage-blob-containers-list account_name=account_name```
#### Context Example
+
```json
{
"AzureStorage": {
@@ -788,7 +839,7 @@ resource_group_name| The resource group name. Note: This argument will override
"hasLegalHold": false,
"lastModifiedTime": "2021-03-31T06:45:30.0000000Z",
"leaseState": "Available",
- "leaseStatus": "Unlocked",
+ "leaseStatus": "Unlocked",
"publicAccess": "None",
"remainingRetentionDays": 0
},
@@ -801,49 +852,51 @@ resource_group_name| The resource group name. Note: This argument will override
#### Human Readable Output
->### Azure Storage Blob Containers list
+> ### Azure Storage Blob Containers list
>|Container Name|Account Name|Subscription ID|Resource Group|Public Access|Lease State|Last Modified Time|
>|---|---|---|---|---|---|---|
>| container_name1 | account_name | subscription_id | resource_group | None | Available | 2021-03-31T06:49:57.0000000Z |
>| container_name2 | account_name | subscription_id | resource_group | None | Available | 2021-03-31T06:45:30.0000000Z |
### azure-storage-blob-container-delete
+
***
Run this command to delete a specific blob container.
-
#### Base Command
`azure-storage-blob-container-delete`
-#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| account_name | The name of the storage account. | Required |
-| container_name | The name of the container. | Required |
-|subscription_id|The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'.|Optional|
-resource_group_name| The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'.|Optional|
+#### Input
+| **Argument Name** | **Description** | **Required** |
+|---------------------|------------------------------------------------------------------------------------------------------------------|--------------|
+| account_name | The name of the storage account. | Required |
+| container_name | The name of the container. | Required |
+| subscription_id | The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
+ resource_group_name | The resource group name. Note: This argument will override the instance parameter ‘Default Resource Group Name'. | Optional |
#### Context Output
There is no context output for this command.
#### Command Example
+
```!azure-storage-blob-container-delete account_name=account_name container_name=container_name```
#### Human Readable Output
->The request to delete the blob container was sent successfully.
-
+> The request to delete the blob container was sent successfully.
### azure-storage-generate-login-url
+
***
Generate the login url used for Authorization code flow.
#### Base Command
`azure-storage-generate-login-url`
+
#### Input
There are no input arguments for this command.
@@ -853,16 +906,18 @@ There are no input arguments for this command.
There is no context output for this command.
#### Command Example
+
```azure-storage-generate-login-url```
#### Human Readable Output
->### Authorization instructions
+> ### Authorization instructions
>1. Click on the [login URL]() to sign in and grant Cortex XSOAR permissions for your Azure Service Management.
-You will be automatically redirected to a link with the following structure:
-```REDIRECT_URI?code=AUTH_CODE&session_state=SESSION_STATE```
+ You will be automatically redirected to a link with the following structure:
+ ```REDIRECT_URI?code=AUTH_CODE&session_state=SESSION_STATE```
>2. Copy the `AUTH_CODE` (without the `code=` prefix, and the `session_state` parameter)
-and paste it in your instance configuration under the **Authorization code** parameter.
+ and paste it in your instance configuration under the **Authorization code** parameter.
+
### azure-storage-subscriptions-list
***
@@ -878,24 +933,27 @@ There are no input arguments for this command.
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| AzureStorage.Subscription.id | String | The unique identifier of the Azure storage subscription. |
-| AzureStorage.Subscription.authorizationSource | String | The source of authorization for the Azure storage subscription. |
-| AzureStorage.Subscription.managedByTenants | Unknown | The tenants that have access to manage the Azure storage subscription. |
-| AzureStorage.Subscription.subscriptionId | String | The ID of the Azure storage subscription. |
-| AzureStorage.Subscription.tenantId | String | The ID of the tenant associated with the Azure storage subscription. |
-| AzureStorage.Subscription.displayName | String | The display name of the Azure storage subscription. |
-| AzureStorage.Subscription.state | String | The current state of the Azure storage subscription. |
-| AzureStorage.Subscription.subscriptionPolicies.locationPlacementId | String | The ID of the location placement policy for the Azure storage subscription. |
-| AzureStorage.Subscription.subscriptionPolicies.quotaId | String | The ID of the quota policy for the Azure storage subscription. |
-| AzureStorage.Subscription.subscriptionPolicies.spendingLimit | String | The spending limit policy for the Azure storage subscription. |
-| AzureStorage.Subscription.count.type | String | The type of the Azure storage subscription count. |
-| AzureStorage.Subscription.count.value | Number | The value of the Azure storage subscription count. |
+| **Path** | **Type** | **Description** |
+|--------------------------------------------------------------------|----------|-----------------------------------------------------------------------------|
+| AzureStorage.Subscription.id | String | The unique identifier of the Azure storage subscription. |
+| AzureStorage.Subscription.authorizationSource | String | The source of authorization for the Azure storage subscription. |
+| AzureStorage.Subscription.managedByTenants | Unknown | The tenants that have access to manage the Azure storage subscription. |
+| AzureStorage.Subscription.subscriptionId | String | The ID of the Azure storage subscription. |
+| AzureStorage.Subscription.tenantId | String | The ID of the tenant associated with the Azure storage subscription. |
+| AzureStorage.Subscription.displayName | String | The display name of the Azure storage subscription. |
+| AzureStorage.Subscription.state | String | The current state of the Azure storage subscription. |
+| AzureStorage.Subscription.subscriptionPolicies.locationPlacementId | String | The ID of the location placement policy for the Azure storage subscription. |
+| AzureStorage.Subscription.subscriptionPolicies.quotaId | String | The ID of the quota policy for the Azure storage subscription. |
+| AzureStorage.Subscription.subscriptionPolicies.spendingLimit | String | The spending limit policy for the Azure storage subscription. |
+| AzureStorage.Subscription.count.type | String | The type of the Azure storage subscription count. |
+| AzureStorage.Subscription.count.value | Number | The value of the Azure storage subscription count. |
#### Command example
+
```!azure-storage-subscriptions-list```
+
#### Context Example
+
```json
{
"AzureStorage": {
@@ -935,7 +993,7 @@ There are no input arguments for this command.
#### Human Readable Output
->### Azure Storage Subscriptions list
+> ### Azure Storage Subscriptions list
>|subscriptionId|tenantId|displayName|state|
>|---|---|---|---|
>| 00000000000000000000 | 00000000000000000 | Access to Azure Active Directory | Enabled |
@@ -952,32 +1010,35 @@ Gets all resource groups for a subscription.
#### Input
-| **Argument Name** | **Description** | **Required** |
-| --- | --- | --- |
-| subscription_id | The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
-| limit | Limit on the number of resource groups to return. Default value is 50. Default is 50. | Optional |
-| tag | A single tag in the form of `{"Tag Name":"Tag Value"}` to filter the list by. | Optional |
+| **Argument Name** | **Description** | **Required** |
+|-------------------|----------------------------------------------------------------------------------------------------------|--------------|
+| subscription_id | The subscription ID. Note: This argument will override the instance parameter ‘Default Subscription ID'. | Optional |
+| limit | Limit on the number of resource groups to return. Default value is 50. Default is 50. | Optional |
+| tag | A single tag in the form of `{"Tag Name":"Tag Value"}` to filter the list by. | Optional |
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| AzureStorage.ResourceGroup.id | String | The unique identifier of the Azure storage resource group. |
-| AzureStorage.ResourceGroup.name | String | The name of the Azure storage resource group. |
-| AzureStorage.ResourceGroup.type | String | The type of the Azure storage resource group. |
-| AzureStorage.ResourceGroup.location | String | The location of the Azure storage resource group. |
-| AzureStorage.ResourceGroup.properties.provisioningState | String | The provisioning state of the Azure storage resource group. |
-| AzureStorage.ResourceGroup.tags.Owner | String | The owner tag of the Azure storage resource group. |
-| AzureStorage.ResourceGroup.tags | Unknown | The tags associated with the Azure storage resource group. |
-| AzureStorage.ResourceGroup.tags.Name | String | The name tag of the Azure storage resource group. |
-| AzureStorage.ResourceGroup.managedBy | String | The entity that manages the Azure storage resource group. |
-| AzureStorage.ResourceGroup.tags.aks-managed-cluster-name | String | The AKS managed cluster name tag associated with the Azure storage resource group. |
-| AzureStorage.ResourceGroup.tags.aks-managed-cluster-rg | String | The AKS managed cluster resource group tag associated with the Azure storage resource group. |
-| AzureStorage.ResourceGroup.tags.type | String | The type tag associated with the Azure storage resource group. |
+| **Path** | **Type** | **Description** |
+|----------------------------------------------------------|----------|----------------------------------------------------------------------------------------------|
+| AzureStorage.ResourceGroup.id | String | The unique identifier of the Azure storage resource group. |
+| AzureStorage.ResourceGroup.name | String | The name of the Azure storage resource group. |
+| AzureStorage.ResourceGroup.type | String | The type of the Azure storage resource group. |
+| AzureStorage.ResourceGroup.location | String | The location of the Azure storage resource group. |
+| AzureStorage.ResourceGroup.properties.provisioningState | String | The provisioning state of the Azure storage resource group. |
+| AzureStorage.ResourceGroup.tags.Owner | String | The owner tag of the Azure storage resource group. |
+| AzureStorage.ResourceGroup.tags | Unknown | The tags associated with the Azure storage resource group. |
+| AzureStorage.ResourceGroup.tags.Name | String | The name tag of the Azure storage resource group. |
+| AzureStorage.ResourceGroup.managedBy | String | The entity that manages the Azure storage resource group. |
+| AzureStorage.ResourceGroup.tags.aks-managed-cluster-name | String | The AKS managed cluster name tag associated with the Azure storage resource group. |
+| AzureStorage.ResourceGroup.tags.aks-managed-cluster-rg | String | The AKS managed cluster resource group tag associated with the Azure storage resource group. |
+| AzureStorage.ResourceGroup.tags.type | String | The type tag associated with the Azure storage resource group. |
#### Command example
+
```!azure-storage-resource-group-list```
+
#### Context Example
+
```json
{
"AzureStorage": {
@@ -1002,7 +1063,7 @@ Gets all resource groups for a subscription.
"Owner": "Demi"
},
"type": "Microsoft.Resources/resourceGroups"
- },
+ }
]
}
}
@@ -1010,7 +1071,7 @@ Gets all resource groups for a subscription.
#### Human Readable Output
->### Resource Groups List
+> ### Resource Groups List
>|Name|Location|Tags|
>|---|---|---|
>| cloud-shell-storage-eastus | eastus | |
diff --git a/Packs/AzureStorage/ReleaseNotes/1_2_25.md b/Packs/AzureStorage/ReleaseNotes/1_2_25.md
new file mode 100644
index 000000000000..9e732264a34d
--- /dev/null
+++ b/Packs/AzureStorage/ReleaseNotes/1_2_25.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Azure Storage Management
+
+- Added support for *Client Credentials* authentication flow.
+- Updated the Docker image to: *demisto/crypto:1.0.0.94037*.
\ No newline at end of file
diff --git a/Packs/AzureStorage/ReleaseNotes/1_2_26.md b/Packs/AzureStorage/ReleaseNotes/1_2_26.md
new file mode 100644
index 000000000000..76fe715eeb08
--- /dev/null
+++ b/Packs/AzureStorage/ReleaseNotes/1_2_26.md
@@ -0,0 +1,6 @@
+#### Integrations
+
+##### Azure Storage Management
+- Updated the Docker image to: *demisto/crypto:1.0.0.96042*.
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/AzureStorage/ReleaseNotes/1_2_27.md b/Packs/AzureStorage/ReleaseNotes/1_2_27.md
new file mode 100644
index 000000000000..0a4781f03773
--- /dev/null
+++ b/Packs/AzureStorage/ReleaseNotes/1_2_27.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Storage Management
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/AzureStorage/pack_metadata.json b/Packs/AzureStorage/pack_metadata.json
index 01637f02aec0..0bebfd368d6f 100644
--- a/Packs/AzureStorage/pack_metadata.json
+++ b/Packs/AzureStorage/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure Storage Management",
"description": "Deploy and manage storage accounts and blob service properties.",
"support": "xsoar",
- "currentVersion": "1.2.24",
+ "currentVersion": "1.2.27",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/AzureWAF/Integrations/AzureWAF/AzureWAF.yml b/Packs/AzureWAF/Integrations/AzureWAF/AzureWAF.yml
index 088ce6822c69..1934e7f62e87 100644
--- a/Packs/AzureWAF/Integrations/AzureWAF/AzureWAF.yml
+++ b/Packs/AzureWAF/Integrations/AzureWAF/AzureWAF.yml
@@ -514,9 +514,11 @@ script:
- contextPath: AzureWAF.ResourceGroup.tags
description: Resource group tags.
type: String
- dockerimage: demisto/crypto:1.0.0.66562
+ dockerimage: demisto/crypto:1.0.0.96042
runonce: false
script: '-'
subtype: python3
type: python
fromversion: 5.0.0
+tests:
+- No tests (auto formatted)
diff --git a/Packs/AzureWAF/Integrations/AzureWAF/AzureWAF_test.py b/Packs/AzureWAF/Integrations/AzureWAF/AzureWAF_test.py
index 8312ca8c0871..ed1e04c25a77 100644
--- a/Packs/AzureWAF/Integrations/AzureWAF/AzureWAF_test.py
+++ b/Packs/AzureWAF/Integrations/AzureWAF/AzureWAF_test.py
@@ -379,7 +379,7 @@ def test_generate_login_url(mocker):
# assert
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
f'response_type=code&scope=offline_access%20{Scopes.management_azure}' \
- f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
+ f'&client_id={client_id}&redirect_uri={redirect_uri})'
res = AzureWAF.return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/AzureWAF/ReleaseNotes/1_1_23.md b/Packs/AzureWAF/ReleaseNotes/1_1_23.md
new file mode 100644
index 000000000000..af860e86fbf3
--- /dev/null
+++ b/Packs/AzureWAF/ReleaseNotes/1_1_23.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Azure Web Application Firewall
+- Updated the Docker image to: *demisto/crypto:1.0.0.96042*.
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/AzureWAF/ReleaseNotes/1_1_24.md b/Packs/AzureWAF/ReleaseNotes/1_1_24.md
new file mode 100644
index 000000000000..055424e71e1c
--- /dev/null
+++ b/Packs/AzureWAF/ReleaseNotes/1_1_24.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Web Application Firewall
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/AzureWAF/pack_metadata.json b/Packs/AzureWAF/pack_metadata.json
index f60fc73e222c..2ca908941407 100644
--- a/Packs/AzureWAF/pack_metadata.json
+++ b/Packs/AzureWAF/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Azure WAF",
"description": "Azure Web Application Firewall is used to detect web related attacks targeting your web servers hosted in azure and allow quick respond to threats",
"support": "xsoar",
- "currentVersion": "1.1.22",
+ "currentVersion": "1.1.24",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Base/.pack-ignore b/Packs/Base/.pack-ignore
index 0af5e65fab1e..dc5e874ecbec 100644
--- a/Packs/Base/.pack-ignore
+++ b/Packs/Base/.pack-ignore
@@ -86,4 +86,5 @@ clickable
[tests_require_network]
CommonServerPython
SanePdfReports
-CheckDockerImageAvailable
\ No newline at end of file
+CheckDockerImageAvailable
+enrichers
\ No newline at end of file
diff --git a/Packs/Base/ReleaseNotes/1_34_10.md b/Packs/Base/ReleaseNotes/1_34_10.md
new file mode 100644
index 000000000000..0dbf9fa857c1
--- /dev/null
+++ b/Packs/Base/ReleaseNotes/1_34_10.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### CommonServerPython
+
+- Fixed an issue where the script information was not available in cloud environments.
diff --git a/Packs/Base/ReleaseNotes/1_34_11.md b/Packs/Base/ReleaseNotes/1_34_11.md
new file mode 100644
index 000000000000..0d34ec44f800
--- /dev/null
+++ b/Packs/Base/ReleaseNotes/1_34_11.md
@@ -0,0 +1,3 @@
+## Base
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/Base/ReleaseNotes/1_34_6.md b/Packs/Base/ReleaseNotes/1_34_6.md
new file mode 100644
index 000000000000..53693d741ca2
--- /dev/null
+++ b/Packs/Base/ReleaseNotes/1_34_6.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### CommonServer
+
+Fixed the *tableToMarkdown* function to handle text with bad formatting.
\ No newline at end of file
diff --git a/Packs/Base/ReleaseNotes/1_34_7.md b/Packs/Base/ReleaseNotes/1_34_7.md
new file mode 100644
index 000000000000..abdd94494b76
--- /dev/null
+++ b/Packs/Base/ReleaseNotes/1_34_7.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### SanePdfReports
+
+- Updated the Docker image to: *demisto/sane-pdf-reports:1.0.0.95847*.
+- Fixed an issue where SLA fields would be missing information from table sections when exported in CSV format.
\ No newline at end of file
diff --git a/Packs/Base/ReleaseNotes/1_34_8.json b/Packs/Base/ReleaseNotes/1_34_8.json
new file mode 100644
index 000000000000..6bdc004ee428
--- /dev/null
+++ b/Packs/Base/ReleaseNotes/1_34_8.json
@@ -0,0 +1,4 @@
+{
+ "breakingChanges": true,
+ "breakingChangesNotes": "Fixed an issue which caused email enrichers to save results in the wrong context key. `Email` key was changed to `Account.Email`. This change might affect the following integrations which output Email indicators to context:\n 1. Anomali Threat Stream V3\n 2. Cofense Intelligence V2\n 3. Eclectic IQ Intelligence Center V3\n 4. Email Hippo\n 5. IP Quality Score\n 6. MISP V3\n 7. Reversing Labs Titanium Cloud V2\n 8. SEKOIA Intelligence Center\n 9. Threat Crowd V2\n 10. Threat Zone\n 11. VM Ray"
+}
\ No newline at end of file
diff --git a/Packs/Base/ReleaseNotes/1_34_8.md b/Packs/Base/ReleaseNotes/1_34_8.md
new file mode 100644
index 000000000000..da97f5d68da7
--- /dev/null
+++ b/Packs/Base/ReleaseNotes/1_34_8.md
@@ -0,0 +1,16 @@
+
+#### Scripts
+
+##### CommonServerPython
+
+Fixed an issue which caused email enrichers to save results in the wrong context key. `Email` key was changed to `Account.Email`. This change might affect the following integrations which output Email indicators to context:
+- Anomali Threat Stream V3
+- Cofense Intelligence V2
+- Eclectic IQ Intelligence Center V3
+- Email Hippo
+- IP Quality Score
+- MISP V3
+- Reversing Labs Titanium Cloud V2
+- SEKOIA Intelligence Center
+- Threat Zone
+- VM Ray
diff --git a/Packs/Base/ReleaseNotes/1_34_9.md b/Packs/Base/ReleaseNotes/1_34_9.md
new file mode 100644
index 000000000000..7941cc15f848
--- /dev/null
+++ b/Packs/Base/ReleaseNotes/1_34_9.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### CommonServerPython
+
+- Added support for determining which script runs in the Docker container.
\ No newline at end of file
diff --git a/Packs/Base/Scripts/CommonServer/CommonServer.js b/Packs/Base/Scripts/CommonServer/CommonServer.js
index 945d22b78145..10fc6b17e3b4 100644
--- a/Packs/Base/Scripts/CommonServer/CommonServer.js
+++ b/Packs/Base/Scripts/CommonServer/CommonServer.js
@@ -278,19 +278,12 @@ function tableToMarkdown(name, t, headers, cellDelimiter, headerTransform) {
for(var i=0; i 1) {
- mdResults += newHeaders.join('|') + '\n';
- } else {
- mdResults += newHeaders[0] + '|' + '\n';
- }
+ mdResults += '| ' + newHeaders.join(' | ') + ' |' + '\n';
var sep = [];
headers.forEach(function(h){
sep.push('---');
});
- if (sep.length === 1) {
- sep[0] = sep[0]+'|';
- }
- mdResults += sep.join('|') + '\n';
+ mdResults += '| ' + sep.join(' | ') + ' |' + '\n';
t.forEach(function(entry){
var vals = [];
if(typeof(entry) !== 'object' && !(entry instanceof Array)){
@@ -305,10 +298,8 @@ function tableToMarkdown(name, t, headers, cellDelimiter, headerTransform) {
vals.push(stringEscapeMD(formatCell(entry[h], cellDelimiter), true, true) || ' ');
}
});
- if (vals.length === 1) {
- vals[0] = vals[0]+'|';
- }
- mdResults += vals.join(' | ') + '\n';
+
+ mdResults += '| ' + vals.join(' | ') + ' |' + '\n';
});
} else{
mdResults += 'No data returned\n';
diff --git a/Packs/Base/Scripts/CommonServerPython/CommonServerPython.py b/Packs/Base/Scripts/CommonServerPython/CommonServerPython.py
index c2292aff7940..4eacefb369bf 100644
--- a/Packs/Base/Scripts/CommonServerPython/CommonServerPython.py
+++ b/Packs/Base/Scripts/CommonServerPython/CommonServerPython.py
@@ -53,6 +53,17 @@ def __line__():
SEND_PREFIX = "send: b'"
SAFE_SLEEP_START_TIME = datetime.now()
+try:
+ if 'ExecutedCommands' in demisto.callingContext['context'] \
+ and demisto.callingContext['context']['ExecutedCommands'] is not None \
+ and len(demisto.callingContext['context']['ExecutedCommands']) > 0 \
+ and 'name' in demisto.callingContext['context']['ExecutedCommands'][0]:
+ context_executed_commands_name = demisto.callingContext['context']['ExecutedCommands'][0]['name']
+ with open('script_info.txt', 'w') as file_demisto_info:
+ file_demisto_info.write(context_executed_commands_name)
+except Exception as exc_script_info:
+ demisto.info('failed to save the script info.\nError: {}'.format(exc_script_info))
+
def register_module_line(module_name, start_end, line, wrapper=0):
"""
@@ -4256,7 +4267,7 @@ class EMAIL(Indicator):
:return: None
:rtype: ``None``
"""
- CONTEXT_PATH = 'Email(val.Address && val.Address == obj.Address)'
+ CONTEXT_PATH = 'Account(val.Email.Address && val.Email.Address == obj.Email.Address)'
def __init__(self, address, dbot_score, domain=None, blocked=None, relationships=None, description=None,
internal=None, stix_id=None, tags=None, traffic_light_protocol=None):
@@ -4282,7 +4293,7 @@ def __init__(self, address, dbot_score, domain=None, blocked=None, relationships
def to_context(self):
email_context = {
- 'Address': self.address
+ 'Email': {'Address': self.address}
}
if self.blocked:
diff --git a/Packs/Base/Scripts/CommonServerPython/CommonServerPython_test.py b/Packs/Base/Scripts/CommonServerPython/CommonServerPython_test.py
index ca2f4ebbbcbe..56789a9593b2 100644
--- a/Packs/Base/Scripts/CommonServerPython/CommonServerPython_test.py
+++ b/Packs/Base/Scripts/CommonServerPython/CommonServerPython_test.py
@@ -6,6 +6,7 @@
import re
import sys
import urllib
+import uuid
import warnings
import dateparser
@@ -1272,9 +1273,10 @@ def test_get_error_need_raise_error_on_non_error_input():
(b"binary data\x15\x00", b"binary data\x15\x00", "test.txt"),
]) # noqa: E124
def test_fileResult(mocker, request, data, data_expected, filename):
- mocker.patch.object(demisto, 'uniqueFile', return_value="test_file_result")
- mocker.patch.object(demisto, 'investigation', return_value={'id': '1'})
- file_name = "1_test_file_result"
+ file_id = str(uuid.uuid4())
+ mocker.patch.object(demisto, 'uniqueFile', return_value="fileresult")
+ mocker.patch.object(demisto, 'investigation', return_value={'id': file_id})
+ file_name = "{}_fileresult".format(file_id)
def cleanup():
try:
@@ -6904,13 +6906,13 @@ def test_email_indicator_type(self, mocker):
traffic_light_protocol='traffic_light_protocol_test'
)
assert email_context.to_context()[email_context.CONTEXT_PATH] == \
- {'Address': 'user@example.com',
+ {"Email": {'Address': 'user@example.com'},
'Domain': 'example.com',
- 'Description': 'test',
- 'Internal': True,
- 'STIXID': 'stix_id_test',
- 'Tags': ['tag1', 'tag2'],
- 'TrafficLightProtocol': 'traffic_light_protocol_test'}
+ 'Description': 'test',
+ 'Internal': True,
+ 'STIXID': 'stix_id_test',
+ 'Tags': ['tag1', 'tag2'],
+ 'TrafficLightProtocol': 'traffic_light_protocol_test'}
@pytest.mark.parametrize('item', [
'CommunityNotes', 'Publications', 'ThreatTypes'
diff --git a/Packs/Base/Scripts/SanePdfReport/SanePdfReport.yml b/Packs/Base/Scripts/SanePdfReport/SanePdfReport.yml
index 3bb422b6c638..04f6a4f5f1f5 100644
--- a/Packs/Base/Scripts/SanePdfReport/SanePdfReport.yml
+++ b/Packs/Base/Scripts/SanePdfReport/SanePdfReport.yml
@@ -51,7 +51,7 @@ tags:
- pdf
timeout: '0'
type: python
-dockerimage: demisto/sane-pdf-reports:1.0.0.93953
+dockerimage: demisto/sane-pdf-reports:1.0.0.95847
runas: DBotWeakRole
tests:
- No Test
diff --git a/Packs/Base/pack_metadata.json b/Packs/Base/pack_metadata.json
index 5dafc82af113..9a9cdf294bbe 100644
--- a/Packs/Base/pack_metadata.json
+++ b/Packs/Base/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Base",
"description": "The base pack for Cortex XSOAR.",
"support": "xsoar",
- "currentVersion": "1.34.5",
+ "currentVersion": "1.34.11",
"author": "Cortex XSOAR",
"serverMinVersion": "6.0.0",
"url": "https://www.paloaltonetworks.com/cortex",
diff --git a/Packs/Box/Integrations/BoxEventsCollector/README.md b/Packs/Box/Integrations/BoxEventsCollector/README.md
index eca4b8e20e36..3df285f024fa 100644
--- a/Packs/Box/Integrations/BoxEventsCollector/README.md
+++ b/Packs/Box/Integrations/BoxEventsCollector/README.md
@@ -7,6 +7,8 @@ Collect events from Box's logs.
The command is using the [events endpoint](https://developer.box.com/reference/get-events/) with enterprise login.
The user making the API call will need to have admin privileges, and the application will need to have the scope manage enterprise properties checked.
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure Box Event Collector on Cortex XSOAR
To acquire the "Credential JSON", you need to get a JWT token and an app from Box.
diff --git a/Packs/Box/pack_metadata.json b/Packs/Box/pack_metadata.json
index a431e6d859a5..40a037ac6cec 100644
--- a/Packs/Box/pack_metadata.json
+++ b/Packs/Box/pack_metadata.json
@@ -16,5 +16,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "BoxEventsCollector"
}
\ No newline at end of file
diff --git a/Packs/CarbonBlackDefense/Integrations/CarbonBlackEndpointStandardEventCollector/README.md b/Packs/CarbonBlackDefense/Integrations/CarbonBlackEndpointStandardEventCollector/README.md
index b4c2d74b97d5..30aa1ac39e94 100644
--- a/Packs/CarbonBlackDefense/Integrations/CarbonBlackEndpointStandardEventCollector/README.md
+++ b/Packs/CarbonBlackDefense/Integrations/CarbonBlackEndpointStandardEventCollector/README.md
@@ -1,5 +1,7 @@
Endpoint Standard (formerly called Carbon Black Defense), a Next-Generation Anti-Virus + EDR. Collect Anti-Virus & EDR alerts and Audit Log Events.
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure Carbon Black Endpoint Standard Event Collector on Cortex XSOAR
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
diff --git a/Packs/CarbonBlackDefense/pack_metadata.json b/Packs/CarbonBlackDefense/pack_metadata.json
index 8c6b0c1b76ea..b7023f6955fa 100644
--- a/Packs/CarbonBlackDefense/pack_metadata.json
+++ b/Packs/CarbonBlackDefense/pack_metadata.json
@@ -16,5 +16,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "CarbonBlackEndpointStandardEventCollector"
}
\ No newline at end of file
diff --git a/Packs/Censys/Integrations/CensysV2/CensysV2.yml b/Packs/Censys/Integrations/CensysV2/CensysV2.yml
index 14e8d84c483a..1a0d8e7e6de7 100644
--- a/Packs/Censys/Integrations/CensysV2/CensysV2.yml
+++ b/Packs/Censys/Integrations/CensysV2/CensysV2.yml
@@ -29,7 +29,8 @@ configuration:
name: premium_access
type: 8
required: false
- section: Connect
+ section: Collect
+ advanced: true
additionalinfo: |-
Censys API provides reputation data exclusively to paid subscribers.
When set to True, the integration will use labels to determine the IP score.
@@ -60,6 +61,7 @@ configuration:
Labels to classify IP as Malicious.
Input can be an array or comma-separated values.
section: Collect
+ advanced: true
- display: IP Suspicious labels
name: suspicious_labels
type: 16
@@ -87,18 +89,21 @@ configuration:
Labels to classify IP as Suspicious.
Input can be an array or comma-separated values.
section: Collect
+ advanced: true
- display: Malicious labels threshold
name: malicious_labels_threshold
type: 0
required: false
additionalinfo: Determines the minimum number of labels returned that are classified as malicious for IP.
section: Collect
+ advanced: true
- display: Suspicious labels threshold
name: suspicious_labels_threshold
type: 0
required: false
additionalinfo: Determines the minimum number of labels returned that are classified as suspicious for IP.
section: Collect
+ advanced: true
- display: Source Reliability
name: integration_reliability
defaultvalue: C - Fairly reliable
@@ -112,7 +117,7 @@ configuration:
- E - Unreliable
- F - Reliability cannot be judged
additionalinfo: Reliability of the source providing the intelligence data.
- section: collect
+ section: Connect
description: Censys is a search engine that allows computer scientists to ask questions about the devices and networks that compose the internet. Driven by internet-wide scanning, Censys lets researchers find specific hosts and create aggregate reports on how devices, and certificates are configured and deployed.
display: Censys v2
name: CensysV2
diff --git a/Packs/Censys/ReleaseNotes/2_0_30.md b/Packs/Censys/ReleaseNotes/2_0_30.md
new file mode 100644
index 000000000000..7982fd1ff2ca
--- /dev/null
+++ b/Packs/Censys/ReleaseNotes/2_0_30.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Censys v2
+
+- Documentation and metadata improvements.
diff --git a/Packs/Censys/pack_metadata.json b/Packs/Censys/pack_metadata.json
index 2b44b209fb72..36093ec01b28 100644
--- a/Packs/Censys/pack_metadata.json
+++ b/Packs/Censys/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Censys",
"description": "Censys is a search engine that allows computer scientists to ask questions about the devices and networks that compose the Internet. Driven by Internet-wide scanning, Censys lets researchers find specific hosts and create aggregate reports on how devices, websites, and certificates are configured and deployed.",
"support": "xsoar",
- "currentVersion": "2.0.29",
+ "currentVersion": "2.0.30",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CertStream/Integrations/CertStream/CertStream_description.md b/Packs/CertStream/Integrations/CertStream/CertStream_description.md
index daec8e719d9e..e13d4715ca8a 100644
--- a/Packs/CertStream/Integrations/CertStream/CertStream_description.md
+++ b/Packs/CertStream/Integrations/CertStream/CertStream_description.md
@@ -4,6 +4,29 @@
CertStream is a service that provides real-time certificate transparency log updates. This integration allows ingesting CertStream data into our platform to detect new domain certificates in real-time.
+## Prerequisites
+
+Before using the `Certstream` integration, ensure that you have completed the following steps:
+
+1. **Create Domain's Homographs List**: Run the `Create list for PTH` playbook in the playground to generate a list of domains and their homographs or create the list manually with the expected format:
+
+```json
+{
+ "domain1": [
+ "domain1_homograph1",
+ "domain1_homograph2",
+ "domain1_homograph2"
+ ],
+ "domain2": [
+ "domain2_homograph1",
+ "domain2_homograph2",
+ "domain2_homograph3"
+ ]
+}
+```
+
+After the list is created in the valid format, proceed with configuring integration instance.
+
## Usage
The integration connects to the CertStream public API server and watch the certificate transparency log.
diff --git a/Packs/CertStream/Playbooks/playbook-Create_list_for_PTH.yml b/Packs/CertStream/Playbooks/playbook-Create_list_for_PTH.yml
index d19243cf731c..119775bcbc73 100644
--- a/Packs/CertStream/Playbooks/playbook-Create_list_for_PTH.yml
+++ b/Packs/CertStream/Playbooks/playbook-Create_list_for_PTH.yml
@@ -17,129 +17,14 @@ tasks:
description: ""
nexttasks:
'#none#':
- - "5"
- separatecontext: false
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": 450,
- "y": -160
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
- "1":
- id: "1"
- taskid: 13686d12-2dfd-4f15-8d6c-3cd04047eaba
- type: collection
- task:
- id: 13686d12-2dfd-4f15-8d6c-3cd04047eaba
- version: -1
- name: How would you like to create a list for PTH?
- description: ''
- type: collection
- iscommand: false
- brand: ""
- nexttasks:
- '#none#':
- - "2"
- separatecontext: false
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": 450,
- "y": 200
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- message:
- to:
- subject:
- body:
- methods: []
- format: ""
- bcc:
- cc:
- timings:
- retriescount: 2
- retriesinterval: 360
- completeafterreplies: 1
- completeafterv2: true
- completeaftersla: false
- form:
- questions:
- - id: "0"
- label: ""
- labelarg:
- simple: Who would you like to create a list for PTH?
- required: false
- gridcolumns: []
- defaultrows: []
- type: singleSelect
- options: []
- optionsarg:
- - simple: Manual
- - simple: Xpanse
- fieldassociated: ""
- placeholder: ""
- tooltip: ""
- readonly: false
- title: How would you like to create a list for PTH
- description: ""
- sender: ""
- expired: false
- totalanswers: 0
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
- "2":
- id: "2"
- taskid: d3220b37-400f-4ae4-8ba9-51b9484c6a90
- type: condition
- task:
- id: d3220b37-400f-4ae4-8ba9-51b9484c6a90
- version: -1
- name: Check the answer
- description: ''
- type: condition
- iscommand: false
- brand: ""
- nexttasks:
- '#default#':
- - "15"
- Manual:
- - "3"
+ - "28"
separatecontext: false
- conditions:
- - label: Manual
- condition:
- - - operator: containsGeneral
- left:
- value:
- complex:
- root: How would you like to create a list for PTH.Answers
- accessor: "0"
- iscontext: true
- right:
- value:
- simple: manual
- ignorecase: true
continueonerrortype: ""
view: |-
{
"position": {
"x": 450,
- "y": 400
+ "y": -850
}
}
note: false
@@ -151,66 +36,31 @@ tasks:
isautoswitchedtoquietmode: false
"3":
id: "3"
- taskid: 58b13177-4614-4efc-8bde-8e4f8568df27
- type: collection
+ taskid: 6e306b51-9434-4e89-87e3-8ca7848f70b2
+ type: title
task:
- id: 58b13177-4614-4efc-8bde-8e4f8568df27
+ id: 6e306b51-9434-4e89-87e3-8ca7848f70b2
version: -1
name: Add your website manually
- description: ''
- type: collection
+ type: title
iscommand: false
brand: ""
+ description: ''
nexttasks:
'#none#':
- - "20"
+ - "30"
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
"x": 130,
- "y": 570
+ "y": 50
}
}
note: false
timertriggers: []
ignoreworker: false
- message:
- to:
- subject:
- body:
- methods: []
- format: ""
- bcc:
- cc:
- timings:
- retriescount: 2
- retriesinterval: 360
- completeafterreplies: 1
- completeafterv2: true
- completeaftersla: false
- form:
- questions:
- - id: "0"
- label: ""
- labelarg:
- simple: 'Add your website manually - use the command delimiter '
- required: false
- gridcolumns: []
- defaultrows: []
- type: shortText
- options: []
- optionsarg: []
- fieldassociated: ""
- placeholder: ""
- tooltip: ""
- readonly: false
- title: Add manually your website
- description: ""
- sender: ""
- expired: false
- totalanswers: 0
skipunavailable: false
quietmode: 0
isoversize: false
@@ -232,7 +82,7 @@ tasks:
'#default#':
- "3"
"yes":
- - "1"
+ - "45"
scriptarguments:
brandname:
simple: Cortex Xpanse
@@ -242,55 +92,7 @@ tasks:
{
"position": {
"x": 450,
- "y": -20
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
- "7":
- id: "7"
- taskid: f5374823-9891-470f-8003-f355274f8a85
- type: regular
- task:
- id: f5374823-9891-470f-8003-f355274f8a85
- version: -1
- name: 'Create List '
- description: commands.local.cmd.list.create
- script: Builtin|||createList
- type: regular
- iscommand: true
- brand: Builtin
- nexttasks:
- '#none#':
- - "9"
- scriptarguments:
- listData:
- complex:
- root: ASM.ExternalWebsite.host
- filters:
- - - operator: notContainsGeneral
- left:
- value:
- simple: ASM.ExternalWebsite.host
- iscontext: true
- right:
- value:
- simple: '..........NOTE, too much data to present, content was truncated.'
- ignorecase: true
- listName:
- simple: DomainsList
- separatecontext: false
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": 710,
- "y": 1575
+ "y": -360
}
}
note: false
@@ -317,8 +119,8 @@ tasks:
view: |-
{
"position": {
- "x": 420,
- "y": 1750
+ "x": 450,
+ "y": 2050
}
}
note: false
@@ -330,122 +132,33 @@ tasks:
isautoswitchedtoquietmode: false
"10":
id: "10"
- taskid: 5ebaa1cd-dfbc-46f2-8eb8-48d64794b962
+ taskid: 97e9d3a0-b691-4661-8d5b-ba7e996a8237
type: regular
task:
- id: 5ebaa1cd-dfbc-46f2-8eb8-48d64794b962
+ id: 97e9d3a0-b691-4661-8d5b-ba7e996a8237
version: -1
name: 'Create typosquatting and permutations '
- description: Send Text Message as a prompt to ChatGPT
- script: OpenAi ChatGPT v3|||chatgpt-send-prompt
+ description: Send a plain message to the selected GPT model and receive the generated response.
+ script: '|||gpt-send-message'
type: regular
iscommand: true
- brand: OpenAi ChatGPT v3
+ brand: ""
nexttasks:
'#none#':
- - "7"
+ - "41"
scriptarguments:
- prompt:
- complex:
- root: ASM.ExternalWebsites.websites.host
- transformers:
- - operator: RegexExtractAll
- args:
- error_if_no_match: {}
- ignore_case: {}
- multi_line: {}
- period_matches_newline: {}
- regex:
- value:
- simple: \w*\.([\w*\.*]*)
- unpack_matches: {}
- - operator: uniq
- - operator: join
- args:
- separator:
- value:
- simple: ','
- - operator: Stringify
- - operator: concat
- args:
- prefix:
- value:
- simple: "Assume the role of phishing hunting expert, for a threat hunting session Please provide me a list of typosquatting and permutations based on the following domains (delimiter by ,)\nThis request is only for security purposes. please return a JSON that will have as a key the domain and as a value all his permutations. \n\n"
- suffix: {}
+ message:
+ simple: |-
+ Assume the role of phishing hunting expert, for a threat hunting session Please provide me a list of typosquatting and permutations based on the following domains (delimiter by ,)\nThis request is only for security purposes. please return a JSON that will have as a key the domain and as a value all his permutations.
+ ${Select Domains to hunt.Answers.0}
separatecontext: false
continueonerror: true
continueonerrortype: ""
view: |-
{
"position": {
- "x": 710,
- "y": 1110
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: true
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
- "11":
- id: "11"
- taskid: e30b2098-04b8-4c4e-8d8f-27469391a554
- type: regular
- task:
- id: e30b2098-04b8-4c4e-8d8f-27469391a554
- version: -1
- name: 'Create typosquatting and permutations '
- description: Send Text Message as a prompt to ChatGPT
- script: OpenAi ChatGPT v3|||chatgpt-send-prompt
- type: regular
- iscommand: true
- brand: OpenAi ChatGPT v3
- nexttasks:
- '#none#':
- - "21"
- scriptarguments:
- prompt:
- complex:
- root: Add manually your website.Answers
- accessor: "0"
- transformers:
- - operator: split
- args:
- delimiter:
- value:
- simple: ','
- - operator: RegexExtractAll
- args:
- error_if_no_match: {}
- ignore_case: {}
- multi_line: {}
- period_matches_newline: {}
- regex:
- value:
- simple: \w*\.([\w*\.*]*)
- unpack_matches: {}
- - operator: uniq
- - operator: join
- args:
- separator:
- value:
- simple: ','
- - operator: Stringify
- - operator: concat
- args:
- prefix:
- value:
- simple: "Please provide me a list of typosquatting and permutations of the following domains (delimiter by ,)\nThis request is only for security purposes. please return a JSON that will have as a key the domain and as a value all his permutations. \n\n"
- suffix: {}
- separatecontext: false
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": -200,
- "y": 1065
+ "x": 1070,
+ "y": 1040
}
}
note: false
@@ -457,48 +170,19 @@ tasks:
isautoswitchedtoquietmode: false
"13":
id: "13"
- taskid: 02dabf52-2089-4f4d-8ef7-e129fbabe0d4
- type: regular
+ taskid: 9dcbd640-9dac-4404-87a1-2dc94e33651c
+ type: title
task:
- id: 02dabf52-2089-4f4d-8ef7-e129fbabe0d4
+ id: 9dcbd640-9dac-4404-87a1-2dc94e33651c
version: -1
- name: Create array list
+ name: Create Domains Homographs List
description: 'Will create an array object in context from given string input '
- scriptName: CreateArray
- type: regular
+ type: title
iscommand: false
brand: ""
nexttasks:
'#none#':
- "14"
- scriptarguments:
- arrayData:
- complex:
- root: Add manually your website.Answers
- accessor: "0"
- contextKey:
- complex:
- root: Add manually your website.Answers
- accessor: "0"
- transformers:
- - operator: RegexReplace
- args:
- action_dt: {}
- ignore_case: {}
- multi_line: {}
- output_format:
- value:
- simple: \1
- period_matches_newline: {}
- regex:
- value:
- simple: (^\S+)\.(com|net|co\.il)
- - operator: concat
- args:
- prefix:
- value:
- simple: DomainsList.
- suffix: {}
separatecontext: false
continueonerrortype: ""
view: |-
@@ -517,12 +201,12 @@ tasks:
isautoswitchedtoquietmode: false
"14":
id: "14"
- taskid: 100cddc4-8f20-4bd7-8458-0cf5c4e05019
+ taskid: 585f5b4b-2ebf-465d-846c-29adf1b2752a
type: regular
task:
- id: 100cddc4-8f20-4bd7-8458-0cf5c4e05019
+ id: 585f5b4b-2ebf-465d-846c-29adf1b2752a
version: -1
- name: 'Create List '
+ name: Create List Manually
description: commands.local.cmd.list.create
script: Builtin|||createList
type: regular
@@ -533,16 +217,16 @@ tasks:
- "9"
scriptarguments:
listData:
- simple: ${DomainsList}
+ simple: ${ListData}
listName:
- simple: DomainsList
+ simple: ${inputs.DomainsListName}
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
"x": 130,
- "y": 1575
+ "y": 1880
}
}
note: false
@@ -554,10 +238,10 @@ tasks:
isautoswitchedtoquietmode: false
"15":
id: "15"
- taskid: 03eaff10-f16c-45ff-8103-c4309934205b
+ taskid: 198862df-328f-45ab-8118-ac4abd68f570
type: regular
task:
- id: 03eaff10-f16c-45ff-8103-c4309934205b
+ id: 198862df-328f-45ab-8118-ac4abd68f570
version: -1
name: Get login pages from xpanse
description: Get external websites assets.
@@ -567,7 +251,7 @@ tasks:
brand: ""
nexttasks:
'#none#':
- - "22"
+ - "37"
scriptarguments:
authentication:
complex:
@@ -580,25 +264,25 @@ tasks:
view: |-
{
"position": {
- "x": 710,
- "y": 570
+ "x": 800,
+ "y": 50
}
}
note: false
timertriggers: []
ignoreworker: false
- skipunavailable: false
+ skipunavailable: true
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
"19":
id: "19"
- taskid: a44b5181-763a-4acb-8170-e5383a2a1470
+ taskid: 92e8c889-2606-4dc6-878d-f0853e929e31
type: condition
task:
- id: a44b5181-763a-4acb-8170-e5383a2a1470
+ id: 92e8c889-2606-4dc6-878d-f0853e929e31
version: -1
- name: Manual\Xpanse permutation
+ name: Generate Homographs using LLM
description: ''
type: condition
iscommand: false
@@ -607,7 +291,7 @@ tasks:
'#default#':
- "13"
Use LLM Permutations:
- - "11"
+ - "42"
separatecontext: false
conditions:
- label: Use LLM Permutations
@@ -639,12 +323,12 @@ tasks:
isautoswitchedtoquietmode: false
"20":
id: "20"
- taskid: 7bbae5e5-bd39-4d9f-8dd2-5fbd6c6826bd
+ taskid: c3e8ca3b-61d6-4376-8c77-cfb23be4f6d1
type: title
task:
- id: 7bbae5e5-bd39-4d9f-8dd2-5fbd6c6826bd
+ id: c3e8ca3b-61d6-4376-8c77-cfb23be4f6d1
version: -1
- name: Use LLM Permutations
+ name: Use LLM Homographs
type: title
iscommand: false
brand: ""
@@ -652,13 +336,14 @@ tasks:
nexttasks:
'#none#':
- "19"
+ - "42"
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
"x": 130,
- "y": 750
+ "y": 720
}
}
note: false
@@ -668,43 +353,636 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "21":
- id: "21"
- taskid: 33d463ef-b2e8-47e6-80c8-474888850b30
- type: regular
+ "22":
+ id: "22"
+ taskid: 8d4c0b06-4b6f-4df5-838f-5d131004eb94
+ type: title
task:
- id: 33d463ef-b2e8-47e6-80c8-474888850b30
+ id: 8d4c0b06-4b6f-4df5-838f-5d131004eb94
version: -1
- name: Set to Context
- description: |-
- Set a value in context under the key you entered. If no value is entered, the script doesn't do anything.
-
- This automation runs using the default Limited User role, unless you explicitly change the permissions.
- For more information, see the section about permissions here:
- https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
- scriptName: SetAndHandleEmpty
- type: regular
+ name: Use LLM Homographs
+ type: title
iscommand: false
brand: ""
+ description: ''
nexttasks:
'#none#':
- - "13"
+ - "23"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 800,
+ "y": 720
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "23":
+ id: "23"
+ taskid: b7264512-7905-4a24-80cf-c6c42990e27a
+ type: condition
+ task:
+ id: b7264512-7905-4a24-80cf-c6c42990e27a
+ version: -1
+ name: Generate Homographs using LLM
+ description: ""
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "38"
+ Use LLM Permutations:
+ - "10"
+ separatecontext: false
+ conditions:
+ - label: Use LLM Permutations
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: inputs.LLMHomogrpahEnable
+ iscontext: true
+ right:
+ value:
+ simple: "true"
+ ignorecase: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 800,
+ "y": 870
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "28":
+ id: "28"
+ taskid: 85557240-fe2c-4300-8df3-ea44ca95ecd8
+ type: condition
+ task:
+ id: 85557240-fe2c-4300-8df3-ea44ca95ecd8
+ version: -1
+ name: Is list exist already?
+ description: Check if list exist in demisto lists.
+ scriptName: IsListExist
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "44"
+ "No":
+ - "5"
+ scriptarguments:
+ listName:
+ simple: ${inputs.DomainsListName}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": -700
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "30":
+ id: "30"
+ taskid: 7e27f484-a492-4d28-8d28-a47cb89dddb2
+ type: condition
+ task:
+ id: 7e27f484-a492-4d28-8d28-a47cb89dddb2
+ version: -1
+ name: Add single/multiple domain
+ description: ""
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ Multiple:
+ - "33"
+ Single:
+ - "31"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 130,
+ "y": 180
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ message:
+ to:
+ subject:
+ body:
+ simple: Add single or multiple domain?
+ methods: []
+ format: ""
+ bcc:
+ cc:
+ timings:
+ retriescount: 2
+ retriesinterval: 360
+ completeafterreplies: 1
+ completeafterv2: true
+ completeaftersla: false
+ replyOptions:
+ - Single
+ - Multiple
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "31":
+ id: "31"
+ taskid: 5672824c-759a-4e36-83b0-c564b213fda6
+ type: collection
+ task:
+ id: 5672824c-759a-4e36-83b0-c564b213fda6
+ version: -1
+ name: Add a single domain to hunt
+ description: ""
+ type: collection
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "34"
+ scriptarguments:
+ extend-context:
+ simple: Answers=.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 340,
+ "y": 350
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ message:
+ to:
+ subject:
+ body:
+ methods: []
+ format: ""
+ bcc:
+ cc:
+ timings:
+ retriescount: 2
+ retriesinterval: 360
+ completeafterreplies: 1
+ completeafterv2: true
+ completeaftersla: false
+ form:
+ questions:
+ - id: "0"
+ label: ""
+ labelarg:
+ simple: Please provide the primary domain name?
+ required: false
+ gridcolumns: []
+ defaultrows: []
+ type: shortText
+ options: []
+ optionsarg: []
+ fieldassociated: ""
+ placeholder: exampleDomain
+ tooltip: The domain name for which to hunt homographs list. e.g. exampleDomain
+ readonly: false
+ - id: "1"
+ label: ""
+ labelarg:
+ simple: Please provide the list of homographs to match?
+ required: false
+ gridcolumns: []
+ defaultrows: []
+ type: shortText
+ options: []
+ optionsarg: []
+ fieldassociated: ""
+ placeholder: ""
+ tooltip: 'Note: If using LLM, this field should be empty. The list of primary
+ domain''s homographs to match. e.g. ```examp1e, ex4mple, exampl3, xample```'
+ readonly: false
+ title: Add a single domain to hunt
+ description: ""
+ sender: ""
+ expired: false
+ totalanswers: 0
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "33":
+ id: "33"
+ taskid: c9a81353-1447-494d-8e8b-2c2701da63e0
+ type: collection
+ task:
+ id: c9a81353-1447-494d-8e8b-2c2701da63e0
+ version: -1
+ name: Add multiple domains to hunt
+ description: ""
+ type: collection
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "35"
+ scriptarguments:
+ extend-context:
+ simple: ListData=.
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -90,
+ "y": 350
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ message:
+ to:
+ subject:
+ body:
+ methods: []
+ format: ""
+ bcc:
+ cc:
+ timings:
+ retriescount: 2
+ retriesinterval: 360
+ completeafterreplies: 1
+ completeafterv2: true
+ completeaftersla: false
+ form:
+ questions:
+ - id: "0"
+ label: ""
+ labelarg:
+ simple: Add your website manually - in JSON format (see example above)
+ required: false
+ gridcolumns: []
+ defaultrows: []
+ type: shortText
+ options: []
+ optionsarg: []
+ fieldassociated: ""
+ placeholder: 'List of homographs to hunt. provided in the following format:
+ ```json { "domain1": [ "domain1_homograph1", "domain1_homograph2", "domain1_homograph3"], "domain2":
+ [ "domain2_homograph1", "domain2_homograph2", "domain2_homograph3" ] } ```'
+ tooltip: 'Note: If using LLM, Please change only the domain names and leave
+ the homographs lists with the examples'
+ readonly: false
+ title: Add multiple domains to hunt
+ description: |-
+ List of homographs to hunt. provided in the following format:
+ ```json
+ {
+ "domain1": [
+ "domain1_homograph1",
+ "domain1_homograph2",
+ "domain1_homograph2"
+ ],
+ "domain2": [
+ "domain2_homograph1",
+ "domain2_homograph2",
+ "domain2_homograph3"
+ ]
+ }
+ ```
+ sender: ""
+ expired: false
+ totalanswers: 0
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "34":
+ id: "34"
+ taskid: 138e67da-b659-4c8d-8080-158073e0d7ff
+ type: regular
+ task:
+ id: 138e67da-b659-4c8d-8080-158073e0d7ff
+ version: -1
+ name: Set Answers to Context
+ description: |-
+ Set a value in context under the key you entered. If no value is entered, the script doesn't do anything.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: SetAndHandleEmpty
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "20"
+ scriptarguments:
+ append:
+ simple: "false"
+ key:
+ simple: ListData
+ value:
+ complex:
+ root: Add a single domain to hunt.Answers
+ accessor: "1"
+ transformers:
+ - operator: splitAndTrim
+ args:
+ delimiter:
+ value:
+ simple: ','
+ - operator: StringifyArray
+ - operator: concat
+ args:
+ prefix:
+ value:
+ simple: '":'
+ suffix: {}
+ - operator: concat
+ args:
+ prefix:
+ value:
+ simple: ${Add a single domain to hunt.Answers.0}
+ iscontext: true
+ suffix: {}
+ - operator: concat
+ args:
+ prefix:
+ value:
+ simple: '{"'
+ suffix:
+ value:
+ simple: '}'
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 340,
+ "y": 510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "35":
+ id: "35"
+ taskid: d389df96-65c5-4052-896f-fecceea5e25e
+ type: regular
+ task:
+ id: d389df96-65c5-4052-896f-fecceea5e25e
+ version: -1
+ name: Set Answer to Context
+ description: |-
+ Set a value in context under the key you entered. If no value is entered, the script doesn't do anything.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: SetAndHandleEmpty
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "20"
+ scriptarguments:
+ append:
+ simple: "false"
+ key:
+ simple: ListData
+ value:
+ simple: ${Add multiple domains to hunt.Answers.0}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -90,
+ "y": 510
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "37":
+ id: "37"
+ taskid: 62ca251a-b45e-4cf0-8cb7-57f68f20ed78
+ type: collection
+ task:
+ id: 62ca251a-b45e-4cf0-8cb7-57f68f20ed78
+ version: -1
+ name: Which Domains should be added to DomainsList?
+ description: ""
+ type: collection
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "40"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 800,
+ "y": 350
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ message:
+ to:
+ subject:
+ body:
+ methods: []
+ format: ""
+ bcc:
+ cc:
+ timings:
+ retriescount: 2
+ retriesinterval: 360
+ completeafterreplies: 1
+ completeafterv2: true
+ completeaftersla: false
+ form:
+ questions:
+ - id: "0"
+ label: ""
+ labelarg:
+ simple: Which Domains should be added to DomainsList?
+ required: false
+ gridcolumns: []
+ defaultrows: []
+ type: multiSelect
+ options: []
+ optionsarg:
+ - complex:
+ root: ASM.ExternalWebsite
+ accessor: host
+ transformers:
+ - operator: RegexExtractAll
+ args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: \w+\.\w+$
+ unpack_matches: {}
+ - operator: uniq
+ fieldassociated: ""
+ placeholder: ""
+ tooltip: ""
+ readonly: false
+ title: Select Domains to hunt
+ description: ""
+ sender: ""
+ expired: false
+ totalanswers: 0
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "38":
+ id: "38"
+ taskid: 985a7f3e-5bf2-40b7-8624-c31a5c22f405
+ type: regular
+ task:
+ id: 985a7f3e-5bf2-40b7-8624-c31a5c22f405
+ version: -1
+ name: Create List using Xpanse
+ description: commands.local.cmd.list.create
+ script: Builtin|||createList
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "46"
+ scriptarguments:
+ listData:
+ complex:
+ root: ListData
+ transformers:
+ - operator: concat
+ args:
+ prefix:
+ value:
+ simple: '{'
+ suffix:
+ value:
+ simple: '}'
+ listName:
+ simple: ${inputs.DomainsListName}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 800,
+ "y": 1630
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "40":
+ id: "40"
+ taskid: 6118deac-1287-4e72-857d-12cd04290c4f
+ type: regular
+ task:
+ id: 6118deac-1287-4e72-857d-12cd04290c4f
+ version: -1
+ name: Set Answers to Context
+ description: |-
+ Set a value in context under the key you entered. If no value is entered, the script doesn't do anything.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: SetAndHandleEmpty
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "22"
scriptarguments:
append:
simple: "false"
key:
- simple: Add manually your website.Answers.0
+ simple: ListData
+ stringify:
+ simple: "true"
value:
complex:
- root: ChatGPTResponse
- accessor: ChatGPT Response
+ root: Select Domains to hunt.Answers
+ accessor: "0"
+ transformers:
+ - operator: concat
+ args:
+ prefix:
+ value:
+ simple: '"'
+ suffix:
+ value:
+ simple: '":["homograph"]'
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": -200,
- "y": 1230
+ "x": 800,
+ "y": 510
}
}
note: false
@@ -714,28 +992,49 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "22":
- id: "22"
- taskid: 11db74ed-76d5-4328-86eb-4d0c697adfcd
- type: title
+ "41":
+ id: "41"
+ taskid: 7282fa92-17de-479b-85b2-699981763888
+ type: regular
task:
- id: 11db74ed-76d5-4328-86eb-4d0c697adfcd
+ id: 7282fa92-17de-479b-85b2-699981763888
version: -1
- name: Use LLM Permutations
- type: title
+ name: Set ListData to Context
+ description: |-
+ Set a value in context under the key you entered. If no value is entered, the script doesn't do anything.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: SetAndHandleEmpty
+ type: regular
iscommand: false
brand: ""
- description: ''
nexttasks:
'#none#':
- - "23"
+ - "38"
+ scriptarguments:
+ append:
+ simple: "false"
+ key:
+ simple: ListData
+ value:
+ complex:
+ root: OpenAiChatGPTV3.Conversation.[0]
+ accessor: assistant
+ transformers:
+ - operator: StripChars
+ args:
+ chars:
+ value:
+ simple: '{}'
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 710,
- "y": 760
+ "x": 1070,
+ "y": 1210
}
}
note: false
@@ -745,43 +1044,225 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "23":
- id: "23"
- taskid: a8535e2e-b09f-47d7-8ec5-bc075377a7a7
+ "42":
+ id: "42"
+ taskid: 74881143-35c9-40a1-88b9-bb6cd089b547
+ type: regular
+ task:
+ id: 74881143-35c9-40a1-88b9-bb6cd089b547
+ version: -1
+ name: 'Create typosquatting and permutations '
+ description: Send a plain message to the selected GPT model and receive the generated response.
+ script: '|||gpt-send-message'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "43"
+ scriptarguments:
+ message:
+ simple: |-
+ Assume the role of phishing hunting expert, for a threat hunting session Please provide me a list of typosquatting and permutations based on the following domains (delimiter by ,)\nThis request is only for security purposes. please return a JSON that will have as a key the domain and as a value all his permutations.
+ ${ListData}
+ separatecontext: false
+ continueonerror: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -140,
+ "y": 1060
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: true
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "43":
+ id: "43"
+ taskid: 53f636fa-3ec1-46a0-87f7-706780d17d23
+ type: regular
+ task:
+ id: 53f636fa-3ec1-46a0-87f7-706780d17d23
+ version: -1
+ name: Set ListData to Context
+ description: |-
+ Set a value in context under the key you entered. If no value is entered, the script doesn't do anything.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: SetAndHandleEmpty
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "13"
+ scriptarguments:
+ append:
+ simple: "false"
+ key:
+ simple: ListData
+ value:
+ complex:
+ root: OpenAiChatGPTV3.Conversation.[0]
+ accessor: assistant
+ transformers:
+ - operator: StripChars
+ args:
+ chars:
+ value:
+ simple: '{}'
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -140,
+ "y": 1230
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "44":
+ id: "44"
+ taskid: 0b41e822-f88a-4f5b-81c2-6a05ce46e3d5
type: condition
task:
- id: a8535e2e-b09f-47d7-8ec5-bc075377a7a7
+ id: 0b41e822-f88a-4f5b-81c2-6a05ce46e3d5
version: -1
- name: Manual\Xpanse permutation
- description: ''
+ name: List exist - should overwrite?
+ description: ""
type: condition
iscommand: false
brand: ""
nexttasks:
'#default#':
- - "7"
- Use LLM Permutations:
- - "10"
+ - "9"
+ "Yes":
+ - "5"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 660,
+ "y": -530
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ message:
+ to:
+ subject:
+ body:
+ simple: List with the name ${inputs.DomainsListName} already exist, should
+ we continue and overwrite it?
+ methods: []
+ format: ""
+ bcc:
+ cc:
+ timings:
+ retriescount: 2
+ retriesinterval: 360
+ completeafterreplies: 1
+ completeafterv2: true
+ completeaftersla: false
+ replyOptions:
+ - "Yes"
+ - "No"
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "45":
+ id: "45"
+ taskid: eaf35e8c-75f8-4f6b-8c7a-95988baa2e6b
+ type: condition
+ task:
+ id: eaf35e8c-75f8-4f6b-8c7a-95988baa2e6b
+ version: -1
+ name: How would you like to create a list for PTH?
+ description: ""
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ Expanse:
+ - "15"
+ Manual:
+ - "3"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": -125
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ message:
+ to: null
+ subject: null
+ body:
+ simple: how would you like to create the list?
+ methods: []
+ format: ""
+ bcc: null
+ cc: null
+ timings:
+ retriescount: 2
+ retriesinterval: 360
+ completeafterreplies: 1
+ completeafterv2: true
+ completeaftersla: false
+ replyOptions:
+ - Expanse
+ - Manual
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "46":
+ id: "46"
+ taskid: 463488fd-2958-48dc-8092-95fa6cc14a84
+ type: regular
+ task:
+ id: 463488fd-2958-48dc-8092-95fa6cc14a84
+ version: -1
+ name: Add Domain Homographs Manually
+ description: "Almost Done! Now go over to the XSOAR Lists page and manually
+ replace the \"homograph\" placeholder for each domain to hunt. \n\nThe List
+ Name: ${inputs.DomainsListName}\nExpected List Format:\n{\n \"domain1\":
+ [\n \"d0main1\",\n \"doma1n1\",\n \"domainn1\",\n ],\n \"domain2\":
+ [\n \"dOmain2\",\n \"domaוm2\",\n \"6oma!n2\"\n ]\n}"
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "9"
separatecontext: false
- conditions:
- - label: Use LLM Permutations
- condition:
- - - operator: isEqualString
- left:
- value:
- complex:
- root: inputs.LLMHomogrpahEnable
- iscontext: true
- right:
- value:
- simple: "true"
- ignorecase: true
continueonerrortype: ""
view: |-
{
"position": {
- "x": 710,
- "y": 890
+ "x": 800,
+ "y": 1880
}
}
note: false
@@ -791,19 +1272,23 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+system: true
view: |-
{
"linkLabelsPosition": {
- "19_11_Use LLM Permutations": 0.59,
+ "19_42_Use LLM Permutations": 0.5,
"23_10_Use LLM Permutations": 0.5,
- "2_3_Manual": 0.56
+ "28_44_#default#": 0.59,
+ "28_5_No": 0.4,
+ "30_31_Single": 0.48,
+ "30_33_Multiple": 0.54
},
"paper": {
"dimensions": {
- "height": 1975,
- "width": 1290,
- "x": -200,
- "y": -160
+ "height": 2965,
+ "width": 1590,
+ "x": -140,
+ "y": -850
}
}
}
@@ -815,7 +1300,8 @@ inputs:
description: Number of websites results to return
playbookInputQuery:
- key: AuthenticationType
- value: {}
+ value:
+ simple: Form
required: false
description: The authentication type of the returned websites. default is all
playbookInputQuery:
@@ -825,7 +1311,27 @@ inputs:
required: false
description: Enable/Disable using LLM (default to chatGPT) to generate homographic permutations of the domain to hunt
playbookInputQuery:
+- key: DomainsListName
+ value:
+ simple: DomainsList
+ required: false
+ description: The name for the list of domain homographs
+ playbookInputQuery:
outputs: []
tests:
- No tests (auto formatted)
-fromversion: 6.10.0
\ No newline at end of file
+fromversion: 6.10.0
+contentitemexportablefields:
+ contentitemfields: {}
+inputSections:
+- inputs:
+ - ResultsLimit
+ - AuthenticationType
+ - LLMHomogrpahEnable
+ - DomainsListName
+ name: General (Inputs group)
+ description: Generic group for inputs
+outputSections:
+- outputs: []
+ name: General (Outputs group)
+ description: Generic group for outputs
diff --git a/Packs/CertStream/Playbooks/playbook-Create_list_for_PTH_README.md b/Packs/CertStream/Playbooks/playbook-Create_list_for_PTH_README.md
index 71405e257119..003164a96813 100644
--- a/Packs/CertStream/Playbooks/playbook-Create_list_for_PTH_README.md
+++ b/Packs/CertStream/Playbooks/playbook-Create_list_for_PTH_README.md
@@ -10,19 +10,19 @@ This playbook does not use any sub-playbooks.
### Integrations
-* OpenAi ChatGPT v3
+This playbook does not use any integrations.
### Scripts
-* SetAndHandleEmpty
* IsIntegrationAvailable
-* CreateArray
+* SetAndHandleEmpty
+* IsListExist
### Commands
-* asm-list-external-websites
* createList
-* chatgpt-send-prompt
+* asm-list-external-websites
+* gpt-send-message
## Playbook Inputs
@@ -31,8 +31,9 @@ This playbook does not use any sub-playbooks.
| **Name** | **Description** | **Default Value** | **Required** |
| --- | --- | --- | --- |
| ResultsLimit | Number of websites results to return | 100 | Optional |
-| AuthenticationType | The authentication type of the returned websites. default is all | | Optional |
+| AuthenticationType | The authentication type of the returned websites. default is all | Form | Optional |
| LLMHomogrpahEnable | Enable/Disable using LLM \(default to chatGPT\) to generate homographic permutations of the domain to hunt | False | Optional |
+| DomainsListName | The name for the list of domain homographs | DomainsList | Optional |
## Playbook Outputs
diff --git a/Packs/CertStream/README.md b/Packs/CertStream/README.md
index dcfaa8f03fc2..361e5f523c11 100644
--- a/Packs/CertStream/README.md
+++ b/Packs/CertStream/README.md
@@ -12,4 +12,48 @@ What does this pack do?
- Triggers incidents for certificates with high suspicion scores
- Provides analysts with detailed certificate information to investigate incidents
-This pack contains a Certstream integration, parsing scripts, threat intel integrations, and a playbook to generate list of domain names to streamline the end-to-end workflow.
\ No newline at end of file
+This pack contains a Certstream integration, parsing scripts, threat intel integrations, and a playbook to generate list of domain names to streamline the end-to-end workflow.
+
+## Prerequisites
+
+Before using the Certstream Pack and integration, ensure that you have completed the following steps:
+
+1. **Create Domain's Homographs List**: Run the `Create list for PTH` playbook in playground to generate a list of domains and their homographs or create the list manually in the expected format:
+
+```json
+{
+ "domain1": [
+ "domain1_homograph1",
+ "domain1_homograph2",
+ "domain1_homograph2"
+ ],
+ "domain2": [
+ "domain2_homograph1",
+ "domain2_homograph2",
+ "domain2_homograph3"
+ ]
+}
+```
+
+After the list is created in the valid format, proceed with configuring integration instance.
+
+## Integration Configuration
+
+To configure the Certstream integration in XSOAR, follow these steps:
+
+1. Access the **Settings** tab in XSOAR.
+2. Select **Integrations** > **Instances** and search for the Certstream integration.
+3. Click on the **Add instance** button and configure the instance by providing the following information:
+ - **Homograph list name**: Specify the name of the list generated manually or using the playbook in the prerequisites. This list contains the domains and homographs to be matched against the Certstream Transparency Log.
+ - **Homograph list update time interval**: Optionally you can set the time interval for the integration to check for changed in the list.
+ - **Levenshtein distance threshold**: Optionally change the default threshold for matching homograph with the certificates' domain name
+
+That's it! Once the integration is properly configured, it will continuously monitor the Certstream Transparency Log for any matches with the provided list of domains and homographs.
+
+## Incident Creation
+
+When a match is found between the Certstream Transparency Log and the list of domains and homographs, a new incident will be automatically created in XSOAR. The incident will contain all the relevant data related to the identified suspicious domain, allowing your team to promptly respond and mitigate any potential threats.
+
+Please note that it is essential to regularly update and maintain the list of domains and homographs to ensure accurate and effective monitoring of potential security risks.
+
+Please consider to download and use the `Suspicious Domain Hunting` pack to automate the handling and investigation this type of incidents.
\ No newline at end of file
diff --git a/Packs/CertStream/ReleaseNotes/1_0_1.md b/Packs/CertStream/ReleaseNotes/1_0_1.md
new file mode 100644
index 000000000000..5e9a0875bb16
--- /dev/null
+++ b/Packs/CertStream/ReleaseNotes/1_0_1.md
@@ -0,0 +1,14 @@
+
+#### Playbooks
+
+##### Create list for PTH
+
+- Fixed an issue in the playbook logic which resulted broken format of the domains' list
+- Improved implementation to provide better user experience
+
+#### Integrations
+
+##### Kali Dog Security CertStream
+
+- Updated the integration description.
+
diff --git a/Packs/CertStream/doc_files/Create_list_for_PTH.png b/Packs/CertStream/doc_files/Create_list_for_PTH.png
index 15742fda73e4..b5480acc3dde 100644
Binary files a/Packs/CertStream/doc_files/Create_list_for_PTH.png and b/Packs/CertStream/doc_files/Create_list_for_PTH.png differ
diff --git a/Packs/CertStream/pack_metadata.json b/Packs/CertStream/pack_metadata.json
index 60e22b01772b..12d58f5f3e17 100644
--- a/Packs/CertStream/pack_metadata.json
+++ b/Packs/CertStream/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CertStream",
"description": "Gets a stream of newly created certificates from Certificate Transparency (https://certificate.transparency.dev/)",
"support": "community",
- "currentVersion": "1.0.0",
+ "currentVersion": "1.0.1",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -19,5 +19,29 @@
],
"marketplaces": [
"xsoar"
+ ],
+ "dependencies": {
+ "CortexXpanse": {
+ "mandatory": false,
+ "display_name": "Cortex Xpanse"
+ },
+ "CommonScripts": {
+ "mandatory": false,
+ "display_name": "Common Scripts"
+ },
+ "FiltersAndTransformers": {
+ "mandatory": true,
+ "display_name": "Filters And Transformers"
+ },
+ "OpenAI": {
+ "mandatory": false,
+ "display_name": "OpenAI"
+ }
+ },
+ "displayedImages": [
+ "CortexXpanse",
+ "CommonScripts",
+ "FiltersAndTransformers",
+ "OpenAI"
]
}
\ No newline at end of file
diff --git a/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.py b/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.py
index d69282147ec6..493327ba477a 100644
--- a/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.py
+++ b/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.py
@@ -416,11 +416,14 @@ def fetch_incidents(client: Client, first_fetch: str, saas_apps: List[str], stat
def checkpointhec_get_entity(client: Client, entity: str) -> CommandResults:
result = client.get_entity(entity)
- if entities := result['responseData']:
+ if entities := result.get('responseData'):
+ entity = entities[0]['entityPayload']
+ human_readable = tableToMarkdown('entity', entity, removeNull=True)
return CommandResults(
outputs_prefix='CheckPointHEC.Entity',
outputs_key_field='internetMessageId',
- outputs=entities[0]['entityPayload']
+ readable_output=human_readable,
+ outputs=entity,
)
else:
return CommandResults(
@@ -435,11 +438,14 @@ def checkpointhec_get_events(client: Client, start_date: str, end_date: str = No
start_date=start_date, end_date=end_date, saas_apps=saas_apps, states=states, severities=severities,
threat_types=threat_types
)
- if events := result['responseData']:
+ if events := result.get('responseData'):
+ _events = events[:min(limit, len(events))]
+ human_readable = tableToMarkdown('events', _events, removeNull=True)
return CommandResults(
outputs_prefix='CheckPointHEC.Event',
outputs_key_field='eventId',
- outputs=events[:min(limit, len(events))]
+ readable_output=human_readable,
+ outputs=_events,
)
else:
return CommandResults(
@@ -450,7 +456,7 @@ def checkpointhec_get_events(client: Client, start_date: str, end_date: str = No
def checkpointhec_get_scan_info(client: Client, entity: str) -> CommandResults:
result = client.get_entity(entity)
outputs = {}
- if entities := result['responseData']:
+ if entities := result.get('responseData'):
sec_result = entities[0]['entitySecurityResult']
for tool, verdict in sec_result['combinedVerdict'].items():
if verdict not in (None, 'clean'):
@@ -540,7 +546,7 @@ def checkpointhec_search_emails(client: Client, date_last: str = None, date_from
def checkpointhec_send_action(client: Client, entities: list, entity_type: str, action: str) -> CommandResults:
result = client.send_action(entities, entity_type, action)
- if resp := result['responseData']:
+ if resp := result.get('responseData'):
return CommandResults(
outputs_prefix='CheckPointHEC.Task',
outputs={'task': resp[0]['taskId']}
@@ -553,7 +559,7 @@ def checkpointhec_send_action(client: Client, entities: list, entity_type: str,
def checkpointhec_get_action_result(client: Client, task: str) -> CommandResults:
result = client.get_task(task)
- if resp := result['responseData']:
+ if resp := result.get('responseData'):
return CommandResults(
outputs_prefix='CheckPointHEC.ActionResult',
outputs=resp
diff --git a/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.yml b/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.yml
index 8ad018354c8d..928a4824ef4c 100644
--- a/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.yml
+++ b/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.yml
@@ -11,10 +11,18 @@ configuration:
options:
- https://smart-api-production-1-us.avanan.net
- https://smart-api-production-1-eu.avanan.net
+ - https://smart-api-production-5-ap.avanan.net
- https://smart-api-production-1-ca.avanan.net
- - https://cloudinfra-gw.portal.checkpoint.com
+ - https://smart-api-production-1-euw2.avanan.net
+ - https://smart-api-production-1-mec1.avanan.net
+ - https://smart-api-production-1-aps1.avanan.net
- https://cloudinfra-gw-us.portal.checkpoint.com
+ - https://cloudinfra-gw.portal.checkpoint.com
+ - https://cloudinfra-gw.ca.portal.checkpoint.com
- https://cloudinfra-gw.ap.portal.checkpoint.com
+ - https://cloudinfra-gw.uk.portal.checkpoint.com
+ - https://cloudinfra-gw.me.portal.checkpoint.com
+ - https://cloudinfra-gw.in.portal.checkpoint.com
required: true
- section: Collect
display: Fetch incidents
@@ -638,7 +646,7 @@ script:
script: '-'
type: python
subtype: python3
- dockerimage: demisto/python3:3.10.14.91134
+ dockerimage: demisto/python3:3.10.14.95137
fromversion: 6.9.0
tests:
- No tests (auto formatted)
diff --git a/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC_description.md b/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC_description.md
index 5c5f4288f7e9..4efbed4ebf99 100644
--- a/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC_description.md
+++ b/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC_description.md
@@ -2,15 +2,23 @@
To set up a Check Point HEC instance, please provide the SMART API url based on your portal's region:
-* US: https://smart-api-production-1-us.avanan.net
-* EU: https://smart-api-production-1-eu.avanan.net
-* CA: https://smart-api-production-1-ca.avanan.net
+* USA: https://smart-api-production-1-us.avanan.net
+* Europe: https://smart-api-production-1-eu.avanan.net
+* Australia: https://smart-api-production-5-ap.avanan.net
+* Canada: https://smart-api-production-1-ca.avanan.net
+* United Kingdom (UK): https://smart-api-production-1-euw2.avanan.net
+* United Arab Emirates (UAE): https://smart-api-production-1-mec1.avanan.net
+* India: https://smart-api-production-1-aps1.avanan.net
You can also use Check Point Infinity API Credentials instead of the SMART API. To do so, please provide one of the following urls based on your region:
-* EU: https://cloudinfra-gw.portal.checkpoint.com
-* US: https://cloudinfra-gw-us.portal.checkpoint.com
-* AU: https://cloudinfra-gw.ap.portal.checkpoint.com
+* USA: https://cloudinfra-gw-us.portal.checkpoint.com
+* Europe: https://cloudinfra-gw.portal.checkpoint.com
+* Australia: https://cloudinfra-gw.ap.portal.checkpoint.com
+* Canada: https://cloudinfra-gw.ca.portal.checkpoint.com
+* United Kingdom (UK): https://cloudinfra-gw.uk.portal.checkpoint.com
+* United Arab Emirates (UAE): https://cloudinfra-gw.me.portal.checkpoint.com
+* India: https://cloudinfra-gw.in.portal.checkpoint.com
If you have several portals in different regions, you will need to use an instance per region
diff --git a/Packs/CheckPointHEC/Integrations/CheckPointHEC/README.md b/Packs/CheckPointHEC/Integrations/CheckPointHEC/README.md
index 71648e0b07bd..97611ae7cdc0 100644
--- a/Packs/CheckPointHEC/Integrations/CheckPointHEC/README.md
+++ b/Packs/CheckPointHEC/Integrations/CheckPointHEC/README.md
@@ -1,5 +1,5 @@
The Best Way to Protect Enterprise Email & Collaboration from phishing, malware, account takeover, data loss, etc.
-This integration was integrated and tested with version 1.1.1 of CheckPointHEC
+This integration was integrated and tested with version 1.1.3 of CheckPointHEC
## Configure Check Point Harmony Email and Collaboration (HEC) on Cortex XSOAR
@@ -27,6 +27,7 @@ This integration was integrated and tested with version 1.1.1 of CheckPointHEC
4. Click **Test** to validate the URLs, token, and connection.
+
## Commands
You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
@@ -349,4 +350,3 @@ Retrieve security events.
| CheckPointHEC.Event.actions | unknown | Performed actions related to the security event. |
| CheckPointHEC.Event.senderAddress | String | Sender of email related to the security event. |
| CheckPointHEC.Event.entityLink | String | Email link. |
-
diff --git a/Packs/CheckPointHEC/ReleaseNotes/1_1_2.md b/Packs/CheckPointHEC/ReleaseNotes/1_1_2.md
new file mode 100644
index 000000000000..61cb616f8020
--- /dev/null
+++ b/Packs/CheckPointHEC/ReleaseNotes/1_1_2.md
@@ -0,0 +1,27 @@
+
+#### Scripts
+
+##### SendCPAction
+
+- Fixed an issue where the script would not work properly when there are multiple instances running.
+- Updated the Docker image to: *demisto/python3:3.10.14.95137*.
+
+##### ShowCPEmailInfo
+
+- Fixed an issue where the script would not work properly when there are multiple instances running.
+- Updated the Docker image to: *demisto/python3:3.10.14.95137*.
+
+##### ShowCPScanInfo
+
+- Fixed an issue where the script would not work properly when there are multiple instances running.
+- Updated the Docker image to: *demisto/python3:3.10.14.95137*.
+
+
+#### Integrations
+
+##### Check Point Harmony Email and Collaboration (HEC)
+
+- Updated the human-readable section in the following commands:
+ - **checkpointhec_get_entity**
+ - **checkpointhec_get_events**
+- Updated the Docker image to: *demisto/python3:3.10.14.95137*.
diff --git a/Packs/CheckPointHEC/ReleaseNotes/1_1_3.md b/Packs/CheckPointHEC/ReleaseNotes/1_1_3.md
new file mode 100644
index 000000000000..3606865a1994
--- /dev/null
+++ b/Packs/CheckPointHEC/ReleaseNotes/1_1_3.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Check Point Harmony Email and Collaboration (HEC)
+
+- Updated the option for url in the connect section configuration. Now more regions are supported.
+
diff --git a/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction.py b/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction.py
index 56cbab980d44..ef243833b075 100644
--- a/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction.py
+++ b/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction.py
@@ -1,12 +1,13 @@
from CommonServerPython import *
-def send_action_and_update_incident(entity: str, action: str):
+def send_action_and_update_incident(entity: str, action: str, incident: str):
result = demisto.executeCommand(
"checkpointhec-send-action",
{
'entity': entity,
'action': action,
+ 'using': incident
}
)
demisto.executeCommand(
@@ -22,10 +23,11 @@ def send_action_and_update_incident(entity: str, action: str):
def main(): # pragma: no cover
try:
+ incident = demisto.incident()['sourceInstance']
args = demisto.args()
entity = args.get('entity')
action = args.get('action')
- return_results(send_action_and_update_incident(entity, action))
+ return_results(send_action_and_update_incident(entity, action, incident))
except Exception as ex:
demisto.error(traceback.format_exc())
return_error(f'Failed to execute BaseScript. Error: {str(ex)}')
diff --git a/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction.yml b/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction.yml
index 97ae8386a505..7c5c36d3de39 100644
--- a/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction.yml
+++ b/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction.yml
@@ -25,7 +25,7 @@ dependson:
must:
- CheckPointHEC|||checkpointhec-send-action
runonce: false
-dockerimage: demisto/python3:3.10.14.91134
+dockerimage: demisto/python3:3.10.14.95137
runas: DBotWeakRole
fromversion: 6.9.0
tests:
diff --git a/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction_test.py b/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction_test.py
index 89bfedfda279..a75531af48d6 100644
--- a/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction_test.py
+++ b/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction_test.py
@@ -14,5 +14,5 @@ def execute_command(name, args):
mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command)
- result = send_action_and_update_incident('0000', 'quarantine')
+ result = send_action_and_update_incident('0000', 'quarantine', 'CheckPointHEC-instance-1')
assert result == [{'Contents': {'task': 1}}]
diff --git a/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo.py b/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo.py
index 6129adfaad39..2e736b1ffc9c 100644
--- a/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo.py
+++ b/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo.py
@@ -30,21 +30,29 @@ def dict_to_md(info: dict) -> str:
return '\n'.join(lines)
-def get_email_info(entity: str):
+def get_email_info(entity: str, instance: str) -> tuple[bool, str]:
email_info = demisto.executeCommand(
"checkpointhec-get-entity",
- {'entity': entity}
+ {'entity': entity, 'using': instance}
)[0]['Contents']
- return dict_to_md(email_info)
+ if isinstance(email_info, str):
+ return False, email_info
+
+ return True, dict_to_md(email_info)
def main(): # pragma: no cover
try:
- custom_fields = demisto.incident()['CustomFields']
+ incident = demisto.incident()
+ instance = incident['sourceInstance']
+ custom_fields = incident['CustomFields']
if not (email_info := custom_fields.get(EMAIL_INFO_FIELD)):
entity = custom_fields.get('checkpointhecentity')
- email_info = get_email_info(entity)
+ success, email_info = get_email_info(entity, instance)
+ if not success:
+ raise Exception(email_info)
+
demisto.executeCommand(
"setIncident",
{
diff --git a/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo.yml b/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo.yml
index 9100b7682ab3..d0ad6bdd5520 100644
--- a/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo.yml
+++ b/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo.yml
@@ -13,7 +13,7 @@ dependson:
must:
- CheckPointHEC|||checkpointhec-get-entity
runonce: false
-dockerimage: demisto/python3:3.10.14.91134
+dockerimage: demisto/python3:3.10.14.95137
runas: DBotWeakRole
fromversion: 6.9.0
tests:
diff --git a/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo_test.py b/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo_test.py
index f9307351b51f..19b5b6c97787 100644
--- a/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo_test.py
+++ b/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo_test.py
@@ -9,7 +9,23 @@ def util_load_json(path):
return json.loads(f.read())
-def test_get_email_info(mocker):
+def test_get_email_info_error(mocker):
+ error = 'Error: Entity not found'
+
+ def execute_command(name, args):
+ if name == 'checkpointhec-get-entity':
+ return [{'Contents': error}]
+
+ raise ValueError(f'Error: Unknown command or command/argument pair: {name} {args!r}')
+
+ mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command)
+
+ success, email_info = get_email_info('0000', 'CheckPointHEC-instance-1')
+ assert success is False
+ assert email_info == error
+
+
+def test_get_email_info_success(mocker):
mock_response = util_load_json('./test_data/checkpointhec-get_entity.json')
def execute_command(name, args):
@@ -23,7 +39,8 @@ def execute_command(name, args):
mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command)
- email_info = get_email_info('0000')
+ success, email_info = get_email_info('0000', 'CheckPointHEC-instance-1')
+ assert success is True
assert email_info == dict_to_md(mock_response['responseData'][0]['entityPayload'])
diff --git a/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo.py b/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo.py
index 43c982e8542b..f2ff757b32ac 100644
--- a/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo.py
+++ b/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo.py
@@ -3,24 +3,32 @@
SCAN_INFO_FIELD = 'checkpointhecscaninfo'
-def get_scan_info(entity: str) -> str:
+def get_scan_info(entity: str, instance: str) -> tuple[bool, str]:
scan_info = demisto.executeCommand(
"checkpointhec-get-scan-info",
- {'entity': entity}
+ {'entity': entity, 'using': instance}
)[0]['Contents']
+ if isinstance(scan_info, str):
+ return False, scan_info
+
for k, v in scan_info.items():
scan_info[k] = json.loads(v)
- return json.dumps(scan_info)
+ return True, json.dumps(scan_info)
def main(): # pragma: no cover
try:
- custom_fields = demisto.incident()['CustomFields']
+ incident = demisto.incident()
+ instance = incident['sourceInstance']
+ custom_fields = incident['CustomFields']
if not (scan_info := custom_fields.get(SCAN_INFO_FIELD)):
entity = custom_fields.get('checkpointhecentity')
- scan_info = get_scan_info(entity)
+ success, scan_info = get_scan_info(entity, instance)
+ if not success:
+ raise Exception(scan_info)
+
demisto.executeCommand(
"setIncident",
{
diff --git a/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo.yml b/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo.yml
index e0784aa78a96..e26220b550eb 100644
--- a/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo.yml
+++ b/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo.yml
@@ -13,7 +13,7 @@ dependson:
must:
- CheckPointHEC|||checkpointhec-get-scan-info
runonce: false
-dockerimage: demisto/python3:3.10.14.91134
+dockerimage: demisto/python3:3.10.14.95137
runas: DBotWeakRole
fromversion: 6.9.0
tests:
diff --git a/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo_test.py b/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo_test.py
index 5de10417f43d..9dc68579ffdc 100644
--- a/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo_test.py
+++ b/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo_test.py
@@ -9,7 +9,23 @@ def util_load_json(path):
return json.loads(f.read())
-def test_get_scan_info(mocker):
+def test_get_scan_info_error(mocker):
+ error = 'Error: Entity not found'
+
+ def execute_command(name, args):
+ if name == 'checkpointhec-get-scan-info':
+ return [{'Contents': error}]
+
+ raise ValueError(f'Error: Unknown command or command/argument pair: {name} {args!r}')
+
+ mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command)
+
+ success, scan_info = get_scan_info('0000', 'CheckPointHEC-instance-1')
+ assert success is False
+ assert scan_info == error
+
+
+def test_get_scan_info_success(mocker):
mock_response = util_load_json('./test_data/checkpointhec-get_entity.json')
def execute_command(name, args):
@@ -20,5 +36,6 @@ def execute_command(name, args):
mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command)
- result = get_scan_info('0000')
- assert result == json.dumps({'av': mock_response['responseData'][0]['entitySecurityResult']['av']})
+ success, scan_info = get_scan_info('0000', 'CheckPointHEC-instance-1')
+ assert success is True
+ assert scan_info == json.dumps({'av': mock_response['responseData'][0]['entitySecurityResult']['av']})
diff --git a/Packs/CheckPointHEC/pack_metadata.json b/Packs/CheckPointHEC/pack_metadata.json
index f588d515a413..04166aaee2f3 100644
--- a/Packs/CheckPointHEC/pack_metadata.json
+++ b/Packs/CheckPointHEC/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Check Point Harmony Email and Collaboration (HEC)",
"description": "The Best Way to Protect Enterprise Email & Collaboration from phishing, malware, account takeover, data loss, etc.",
"support": "partner",
- "currentVersion": "1.1.1",
+ "currentVersion": "1.1.3",
"author": "Check Point Harmony Email & Collaboration (HEC)",
"url": "https://supportcenter.checkpoint.com/",
"email": "EmailSecurity_Support@checkpoint.com",
diff --git a/Packs/CiscoASA/ModelingRules/CiscoASA_1_4/CiscoASA_1_4.xif b/Packs/CiscoASA/ModelingRules/CiscoASA_1_4/CiscoASA_1_4.xif
index 0d117e8d8b2a..e12b42dd6dac 100644
--- a/Packs/CiscoASA/ModelingRules/CiscoASA_1_4/CiscoASA_1_4.xif
+++ b/Packs/CiscoASA/ModelingRules/CiscoASA_1_4/CiscoASA_1_4.xif
@@ -677,7 +677,8 @@ config case_sensitive = true
src_group2 = arrayindex(regextract(_raw_log ,"GroupPolicy\s\<([^\>]+)\>"),0),
tunnel_group = arrayindex(regextract(_raw_log,"\sTunnelGroup\s\<([^\>]+)\>"),0),
src_username = arrayindex(regextract(_raw_log ,"User\s\<([^\>]+)\>"),0),
- src_ipv4 = arrayindex(regextract(_raw_log ,"IP\s\<(\d+\.\d+\.\d+\.\d+)\>"),0),
+ IP = arrayindex(regextract(_raw_log ,"IP\s\<(\d+\.\d+\.\d+\.\d+)\>"),0),
+ src_ipv4 = arrayindex(regextract(_raw_log ,"IPv4\sAddress\s\<([^\>]+)\>"),0),
src_ipv6 = arrayindex(regextract(_raw_log ,"IPv6\saddress\s\<([^\>]+)\>"),0),
protocol1 = arrayindex(regextract(_raw_log ,"IP\s\<[^\>]+\>\s([UDP|TCP]+)\s"),0),
protocol2 = arrayindex(regextract(_raw_log ,"IP\s\<[^\>]+\>\s\w+\s([UDP|TCP]+)\s"),0),
@@ -699,8 +700,10 @@ config case_sensitive = true
xdm.source.user_agent = user_agent,
xdm.event.operation_sub_type = if(type_num = 0, "Normal", type_num = 16, "Logout", type_num = 17 ,"Closed due to error", type_num = 18, "Closed due to rekey", null ),
xdm.event.log_level = if(event_log_level = "INFO", XDM_CONST.LOG_LEVEL_INFORMATIONAL ,event_log_level = "ERROR",XDM_CONST.LOG_LEVEL_ERROR ,event_log_level = "NOTICE",XDM_CONST.LOG_LEVEL_NOTICE,event_log_level = "WARNING",XDM_CONST.LOG_LEVEL_WARNING , null),
- xdm.source.ipv6 = src_ipv6,
- xdm.source.ipv4 = src_ipv4,
+ xdm.source.ipv4 = if(IP ~= "\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}", IP, null),
+ xdm.source.ipv6 = if(IP ~= "[a-fA-F0-9\:]{1,5}[a-fA-F0-9\:]{1,5}[a-fA-F0-9\:]{1,5}[a-fA-F0-9\:]{1,5}[a-fA-F0-9\:]{1,5}[a-fA-F0-9\:]{1,5}[a-fA-F0-9\:]{1,5}[a-fA-F0-9\:]{1,5}", IP, null),
+ xdm.source.host.ipv4_addresses = arraycreate(src_ipv4),
+ xdm.source.host.ipv6_addresses = if(src_ipv6 != "::", arraycreate(src_ipv6), null),
xdm.event.outcome = if(authentication_outcome = "successful", XDM_CONST.OUTCOME_SUCCESS,authentication_outcome = "rejected",XDM_CONST.OUTCOME_FAILED, to_String(authentication_outcome )),
xdm.network.ip_protocol = if(protocol="HOPOPT",XDM_CONST.IP_PROTOCOL_HOPOPT, protocol="ICMP",XDM_CONST.IP_PROTOCOL_ICMP, protocol="IGMP",XDM_CONST.IP_PROTOCOL_IGMP, protocol="GGP",XDM_CONST.IP_PROTOCOL_GGP, protocol="IP",XDM_CONST.IP_PROTOCOL_IP, protocol="ST",XDM_CONST.IP_PROTOCOL_ST, protocol="TCP",XDM_CONST.IP_PROTOCOL_TCP, protocol="CBT",XDM_CONST.IP_PROTOCOL_CBT, protocol="EGP",XDM_CONST.IP_PROTOCOL_EGP, protocol="IGP",XDM_CONST.IP_PROTOCOL_IGP, protocol="BBN_RCC_MON",XDM_CONST.IP_PROTOCOL_BBN_RCC_MON, protocol="NVP_II",XDM_CONST.IP_PROTOCOL_NVP_II, protocol="PUP",XDM_CONST.IP_PROTOCOL_PUP, protocol="ARGUS",XDM_CONST.IP_PROTOCOL_ARGUS, protocol="EMCON",XDM_CONST.IP_PROTOCOL_EMCON, protocol="XNET",XDM_CONST.IP_PROTOCOL_XNET, protocol="CHAOS",XDM_CONST.IP_PROTOCOL_CHAOS, protocol="UDP",XDM_CONST.IP_PROTOCOL_UDP, protocol="MUX",XDM_CONST.IP_PROTOCOL_MUX, protocol="DCN_MEAS",XDM_CONST.IP_PROTOCOL_DCN_MEAS, protocol="HMP",XDM_CONST.IP_PROTOCOL_HMP, protocol="PRM",XDM_CONST.IP_PROTOCOL_PRM, protocol="XNS_IDP",XDM_CONST.IP_PROTOCOL_XNS_IDP, protocol="TRUNK_1",XDM_CONST.IP_PROTOCOL_TRUNK_1, protocol="TRUNK_2",XDM_CONST.IP_PROTOCOL_TRUNK_2, protocol="LEAF_1",XDM_CONST.IP_PROTOCOL_LEAF_1, protocol="LEAF_2",XDM_CONST.IP_PROTOCOL_LEAF_2, protocol="RDP",XDM_CONST.IP_PROTOCOL_RDP, protocol="IRTP",XDM_CONST.IP_PROTOCOL_IRTP, protocol="ISO_TP4",XDM_CONST.IP_PROTOCOL_ISO_TP4, protocol="NETBLT",XDM_CONST.IP_PROTOCOL_NETBLT, protocol="MFE_NSP",XDM_CONST.IP_PROTOCOL_MFE_NSP, protocol="MERIT_INP",XDM_CONST.IP_PROTOCOL_MERIT_INP, protocol="DCCP",XDM_CONST.IP_PROTOCOL_DCCP, protocol="3PC",XDM_CONST.IP_PROTOCOL_3PC, protocol="IDPR",XDM_CONST.IP_PROTOCOL_IDPR, protocol="XTP",XDM_CONST.IP_PROTOCOL_XTP, protocol="DDP",XDM_CONST.IP_PROTOCOL_DDP, protocol="IDPR_CMTP",XDM_CONST.IP_PROTOCOL_IDPR_CMTP, protocol="TP",XDM_CONST.IP_PROTOCOL_TP, protocol="IL",XDM_CONST.IP_PROTOCOL_IL, protocol="IPV6",XDM_CONST.IP_PROTOCOL_IPV6, protocol="SDRP",XDM_CONST.IP_PROTOCOL_SDRP, protocol="IPV6_ROUTE",XDM_CONST.IP_PROTOCOL_IPV6_ROUTE, protocol="IPV6_FRAG",XDM_CONST.IP_PROTOCOL_IPV6_FRAG, protocol="IDRP",XDM_CONST.IP_PROTOCOL_IDRP, protocol="RSVP",XDM_CONST.IP_PROTOCOL_RSVP, protocol="GRE",XDM_CONST.IP_PROTOCOL_GRE, protocol="DSR",XDM_CONST.IP_PROTOCOL_DSR, protocol="BNA",XDM_CONST.IP_PROTOCOL_BNA, protocol="ESP",XDM_CONST.IP_PROTOCOL_ESP, protocol="AH",XDM_CONST.IP_PROTOCOL_AH, protocol="I_NLSP",XDM_CONST.IP_PROTOCOL_I_NLSP, protocol="SWIPE",XDM_CONST.IP_PROTOCOL_SWIPE, protocol="NARP",XDM_CONST.IP_PROTOCOL_NARP, protocol="MOBILE",XDM_CONST.IP_PROTOCOL_MOBILE, protocol="TLSP",XDM_CONST.IP_PROTOCOL_TLSP, protocol="SKIP",XDM_CONST.IP_PROTOCOL_SKIP, protocol="IPV6_ICMP",XDM_CONST.IP_PROTOCOL_IPV6_ICMP, protocol="IPV6_NONXT",XDM_CONST.IP_PROTOCOL_IPV6_NONXT, protocol="IPV6_OPTS",XDM_CONST.IP_PROTOCOL_IPV6_OPTS, protocol="CFTP",XDM_CONST.IP_PROTOCOL_CFTP, protocol="SAT_EXPAK",XDM_CONST.IP_PROTOCOL_SAT_EXPAK, protocol="KRYPTOLAN",XDM_CONST.IP_PROTOCOL_KRYPTOLAN, protocol="RVD",XDM_CONST.IP_PROTOCOL_RVD, protocol="IPPC",XDM_CONST.IP_PROTOCOL_IPPC, protocol="SAT_MON",XDM_CONST.IP_PROTOCOL_SAT_MON, protocol="VISA",XDM_CONST.IP_PROTOCOL_VISA, protocol="IPCV",XDM_CONST.IP_PROTOCOL_IPCV, protocol="CPNX",XDM_CONST.IP_PROTOCOL_CPNX, protocol="CPHB",XDM_CONST.IP_PROTOCOL_CPHB, protocol="WSN",XDM_CONST.IP_PROTOCOL_WSN, protocol="PVP",XDM_CONST.IP_PROTOCOL_PVP, protocol="BR_SAT_MON",XDM_CONST.IP_PROTOCOL_BR_SAT_MON, protocol="SUN_ND",XDM_CONST.IP_PROTOCOL_SUN_ND, protocol="WB_MON",XDM_CONST.IP_PROTOCOL_WB_MON, protocol="WB_EXPAK",XDM_CONST.IP_PROTOCOL_WB_EXPAK, protocol="ISO_IP",XDM_CONST.IP_PROTOCOL_ISO_IP, protocol="VMTP",XDM_CONST.IP_PROTOCOL_VMTP, protocol="SECURE_VMTP",XDM_CONST.IP_PROTOCOL_SECURE_VMTP, protocol="VINES",XDM_CONST.IP_PROTOCOL_VINES, protocol="TTP",XDM_CONST.IP_PROTOCOL_TTP, protocol="NSFNET_IGP",XDM_CONST.IP_PROTOCOL_NSFNET_IGP, protocol="DGP",XDM_CONST.IP_PROTOCOL_DGP, protocol="TCF",XDM_CONST.IP_PROTOCOL_TCF, protocol="EIGRP",XDM_CONST.IP_PROTOCOL_EIGRP, protocol="OSPFIGP",XDM_CONST.IP_PROTOCOL_OSPFIGP, protocol="SPRITE_RPC",XDM_CONST.IP_PROTOCOL_SPRITE_RPC, protocol="LARP",XDM_CONST.IP_PROTOCOL_LARP, protocol="MTP",XDM_CONST.IP_PROTOCOL_MTP, protocol="AX25",XDM_CONST.IP_PROTOCOL_AX25, protocol="IPIP",XDM_CONST.IP_PROTOCOL_IPIP, protocol="MICP",XDM_CONST.IP_PROTOCOL_MICP, protocol="SCC_SP",XDM_CONST.IP_PROTOCOL_SCC_SP, protocol="ETHERIP",XDM_CONST.IP_PROTOCOL_ETHERIP, protocol="ENCAP",XDM_CONST.IP_PROTOCOL_ENCAP, protocol="GMTP",XDM_CONST.IP_PROTOCOL_GMTP, protocol="IFMP",XDM_CONST.IP_PROTOCOL_IFMP, protocol="PNNI",XDM_CONST.IP_PROTOCOL_PNNI, protocol="PIM",XDM_CONST.IP_PROTOCOL_PIM, protocol="ARIS",XDM_CONST.IP_PROTOCOL_ARIS, protocol="SCPS",XDM_CONST.IP_PROTOCOL_SCPS, protocol="QNX",XDM_CONST.IP_PROTOCOL_QNX, protocol="AN",XDM_CONST.IP_PROTOCOL_AN, protocol="IPCOMP",XDM_CONST.IP_PROTOCOL_IPCOMP, protocol="COMPAQ_PEER",XDM_CONST.IP_PROTOCOL_COMPAQ_PEER, protocol="IPX_IN_IP",XDM_CONST.IP_PROTOCOL_IPX_IN_IP, protocol="VRRP",XDM_CONST.IP_PROTOCOL_VRRP, protocol="PGM",XDM_CONST.IP_PROTOCOL_PGM, protocol="L2TP",XDM_CONST.IP_PROTOCOL_L2TP, protocol="DDX",XDM_CONST.IP_PROTOCOL_DDX, protocol="IATP",XDM_CONST.IP_PROTOCOL_IATP, protocol="STP",XDM_CONST.IP_PROTOCOL_STP, protocol="SRP",XDM_CONST.IP_PROTOCOL_SRP, protocol="UTI",XDM_CONST.IP_PROTOCOL_UTI, protocol="SMP",XDM_CONST.IP_PROTOCOL_SMP, protocol="SM",XDM_CONST.IP_PROTOCOL_SM, protocol="PTP",XDM_CONST.IP_PROTOCOL_PTP, protocol="ISIS",XDM_CONST.IP_PROTOCOL_ISIS, protocol="FIRE",XDM_CONST.IP_PROTOCOL_FIRE, protocol="CRTP",XDM_CONST.IP_PROTOCOL_CRTP, protocol="CRUDP",XDM_CONST.IP_PROTOCOL_CRUDP, protocol="SSCOPMCE",XDM_CONST.IP_PROTOCOL_SSCOPMCE, protocol="IPLT",XDM_CONST.IP_PROTOCOL_IPLT, protocol="SPS",XDM_CONST.IP_PROTOCOL_SPS, protocol="PIPE",XDM_CONST.IP_PROTOCOL_PIPE, protocol="SCTP",XDM_CONST.IP_PROTOCOL_SCTP, protocol="FC",XDM_CONST.IP_PROTOCOL_FC, protocol="RSVP_E2E_IGNORE",XDM_CONST.IP_PROTOCOL_RSVP_E2E_IGNORE, protocol="MOBILITY",XDM_CONST.IP_PROTOCOL_MOBILITY, protocol="UDPLITE",XDM_CONST.IP_PROTOCOL_UDPLITE, protocol="MPLS_IN_IP",XDM_CONST.IP_PROTOCOL_MPLS_IN_IP,to_string(protocol));
diff --git a/Packs/CiscoASA/ReleaseNotes/1_1_6.md b/Packs/CiscoASA/ReleaseNotes/1_1_6.md
new file mode 100644
index 000000000000..1608f8897375
--- /dev/null
+++ b/Packs/CiscoASA/ReleaseNotes/1_1_6.md
@@ -0,0 +1,5 @@
+#### Modeling Rules
+##### Cisco ASA Modeling Rule
+- Improved implementation of xdm.source.ipv4/6 (Group events).
+- Updated the previous mapping xdm.source.ipv6 to xdm.source.host.ipv6_addresses (Group events).
+- Added support for "IPv4 Address" mapping to xdm.source.host.ipv4_addresses (Group events).
diff --git a/Packs/CiscoASA/pack_metadata.json b/Packs/CiscoASA/pack_metadata.json
index b148e0fcfa4f..f79cf3d2369b 100644
--- a/Packs/CiscoASA/pack_metadata.json
+++ b/Packs/CiscoASA/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cisco ASA",
"description": "Cisco Adaptive Security Appliance Software is the core operating system for the Cisco ASA Family. It delivers enterprise-class firewall capabilities for ASA devices.",
"support": "xsoar",
- "currentVersion": "1.1.5",
+ "currentVersion": "1.1.6",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/ClarotyXDome/.pack-ignore b/Packs/ClarotyXDome/.pack-ignore
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Packs/ClarotyXDome/.secrets-ignore b/Packs/ClarotyXDome/.secrets-ignore
new file mode 100644
index 000000000000..3147caeb8de4
--- /dev/null
+++ b/Packs/ClarotyXDome/.secrets-ignore
@@ -0,0 +1,12 @@
+https://api.claroty.com/
+62.172.138.35
+https://not.really.api.claroty.com
+https://not.really.api.claroty.com/api/v1/
+https://not.really.vulnerability.source.url
+1a:2b:3c:d4:e5:f6
+6.4.4.25
+6.5.4.19
+8.3.0.15
+8.5.0.12
+8.6.0.8
+8.7.1.2
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/Author_image.png b/Packs/ClarotyXDome/Author_image.png
new file mode 100644
index 000000000000..84f2edda7b70
Binary files /dev/null and b/Packs/ClarotyXDome/Author_image.png differ
diff --git a/Packs/ClarotyXDome/Classifiers/classifier-XDome.json b/Packs/ClarotyXDome/Classifiers/classifier-XDome.json
new file mode 100644
index 000000000000..e07e50e847dc
--- /dev/null
+++ b/Packs/ClarotyXDome/Classifiers/classifier-XDome.json
@@ -0,0 +1,14 @@
+{
+ "id": "xDome - Classifier",
+ "name": "xDome - Classifier",
+ "type": "classification",
+ "description": "Classifies xDome incidents (device-alert-relations) that are fetched via the built in XSOAR fetch mechanism.",
+ "defaultIncidentType": "Claroty xDome Alert",
+ "keyTypeMap": {},
+ "transformer": {
+ "complex": null,
+ "simple": ""
+ },
+ "version": -1,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/Classifiers/classifier-mapper-incoming-XDome.json b/Packs/ClarotyXDome/Classifiers/classifier-mapper-incoming-XDome.json
new file mode 100644
index 000000000000..d6a6875cae4f
--- /dev/null
+++ b/Packs/ClarotyXDome/Classifiers/classifier-mapper-incoming-XDome.json
@@ -0,0 +1,130 @@
+{
+ "feed": false,
+ "id": "xDome - Incoming Mapper",
+ "name": "xDome - Incoming Mapper",
+ "description": "Maps incoming xDome incidents fields for use in Integration Playbooks.",
+ "type": "mapping-incoming",
+ "mapping": {
+ "Claroty xDome Alert": {
+ "dontMapEventToLabels": false,
+ "internalMapping": {
+ "Alert Category": {
+ "simple": "alert_category"
+ },
+ "Alert ID": {
+ "simple": "alert_id"
+ },
+ "Alert Name": {
+ "simple": "alert_name"
+ },
+ "Device Id": {
+ "simple": "device_uid"
+ },
+ "Device Name": {
+ "simple": "device_name"
+ },
+ "Claroty xDome Alert Assignees": {
+ "simple": "alert_assignees"
+ },
+ "Claroty xDome Alert Class": {
+ "simple": "alert_class"
+ },
+ "Claroty xDome Alert Labels": {
+ "simple": "alert_labels"
+ },
+ "Claroty xDome Alert Type": {
+ "simple": "alert_type_name"
+ },
+ "Claroty xDome Device Assignees": {
+ "simple": "device_assignees"
+ },
+ "Claroty xDome Device Category": {
+ "simple": "device_category"
+ },
+ "Claroty xDome Device Effective Likelihood Subscore": {
+ "simple": "device_effective_likelihood_subscore"
+ },
+ "Claroty xDome Device Effective Likelihood Subscore Points": {
+ "simple": "device_effective_likelihood_subscore_points"
+ },
+ "Claroty xDome Device First Seen": {
+ "simple": "device_first_seen_list"
+ },
+ "Claroty xDome Device IP": {
+ "simple": "device_ip_list"
+ },
+ "Claroty xDome Device Impact Subscore": {
+ "simple": "device_impact_subscore"
+ },
+ "Claroty xDome Device Impact Subscore Points": {
+ "simple": "device_impact_subscore_points"
+ },
+ "Claroty xDome Device Insecure Protocols": {
+ "simple": "device_insecure_protocols"
+ },
+ "Claroty xDome Device Insecure Protocols Points": {
+ "simple": "device_insecure_protocols_points"
+ },
+ "Claroty xDome Device Internet Communication": {
+ "simple": "device_internet_communication"
+ },
+ "Claroty xDome Device Known Vulnerabilities": {
+ "simple": "device_known_vulnerabilities"
+ },
+ "Claroty xDome Device Known Vulnerabilities Points": {
+ "simple": "device_known_vulnerabilities_points"
+ },
+ "Claroty xDome Device Labels": {
+ "simple": "device_labels"
+ },
+ "Claroty xDome Device Last Seen": {
+ "simple": "device_last_seen_list"
+ },
+ "Claroty xDome Device Likelihood Subscore": {
+ "simple": "device_likelihood_subscore"
+ },
+ "Claroty xDome Device Likelihood Subscore Points": {
+ "simple": "device_likelihood_subscore_points"
+ },
+ "Claroty xDome Device MAC": {
+ "simple": "device_mac_list"
+ },
+ "Claroty xDome Device Manufacturer": {
+ "simple": "device_manufacturer"
+ },
+ "Claroty xDome Device Network": {
+ "simple": "device_network_list"
+ },
+ "Claroty xDome Device Purdue Level": {
+ "simple": "device_purdue_level"
+ },
+ "Claroty xDome Device Risk Score": {
+ "simple": "device_risk_score"
+ },
+ "Claroty xDome Device Risk Score Points": {
+ "simple": "device_risk_score_points"
+ },
+ "Claroty xDome Device Site Name": {
+ "simple": "device_site_name"
+ },
+ "Claroty xDome Device Subcategory": {
+ "simple": "device_subcategory"
+ },
+ "Claroty xDome Device Type": {
+ "simple": "device_type"
+ },
+ "Claroty xDome Device-Alert Detected Time": {
+ "simple": "device_alert_detected_time"
+ },
+ "Claroty xDome Device-Alert Status": {
+ "simple": "device_alert_status"
+ },
+ "Claroty xDome Device-Alert Updated Time": {
+ "simple": "device_alert_updated_time"
+ }
+ }
+ }
+ },
+ "version": -1,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Alert_Assignees.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Alert_Assignees.json
new file mode 100644
index 000000000000..38295ff7ee6f
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Alert_Assignees.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomealertassignees",
+ "version": -1,
+ "modified": "2024-04-18T10:35:35.209772537Z",
+ "name": "Claroty xDome Alert Assignees",
+ "ownerOnly": false,
+ "description": "The users and or groups the alert is assigned to",
+ "cliName": "clarotyxdomealertassignees",
+ "type": "multiSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": true,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Alert_Class.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Alert_Class.json
new file mode 100644
index 000000000000..06e32ea5bb75
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Alert_Class.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomealertclass",
+ "version": -1,
+ "modified": "2024-04-18T10:35:35.349053927Z",
+ "name": "Claroty xDome Alert Class",
+ "ownerOnly": false,
+ "description": "The alert class, such as “Pre-Defined Alerts” and “Custom Alerts”",
+ "cliName": "clarotyxdomealertclass",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Alert_Labels.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Alert_Labels.json
new file mode 100644
index 000000000000..534eb857dcf9
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Alert_Labels.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomealertlabels",
+ "version": -1,
+ "modified": "2024-04-18T10:35:35.501042153Z",
+ "name": "Claroty xDome Alert Labels",
+ "ownerOnly": false,
+ "description": "The labels added to the alert manually or automatically",
+ "cliName": "clarotyxdomealertlabels",
+ "type": "multiSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": true,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Alert_Type.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Alert_Type.json
new file mode 100644
index 000000000000..bca075f66172
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Alert_Type.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomealerttype",
+ "version": -1,
+ "modified": "2024-04-18T10:35:35.644755497Z",
+ "name": "Claroty xDome Alert Type",
+ "ownerOnly": false,
+ "description": "An alert type such as \"Outdated Firmware\"",
+ "cliName": "clarotyxdomealerttype",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Alert_Detected_Time.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Alert_Detected_Time.json
new file mode 100644
index 000000000000..e78369e0eef3
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Alert_Detected_Time.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedevicealertdetectedtime",
+ "version": -1,
+ "modified": "2024-04-18T10:35:37.988452273Z",
+ "name": "Claroty xDome Device-Alert Detected Time",
+ "ownerOnly": false,
+ "description": "Date and time when the Alert was first detected",
+ "cliName": "clarotyxdomedevicealertdetectedtime",
+ "type": "date",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Alert_Status.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Alert_Status.json
new file mode 100644
index 000000000000..200fe8c183bc
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Alert_Status.json
@@ -0,0 +1,37 @@
+{
+ "id": "incident_clarotyxdomedevicealertstatus",
+ "version": -1,
+ "modified": "2024-04-18T10:35:38.070435248Z",
+ "name": "Claroty xDome Device-Alert Status",
+ "ownerOnly": false,
+ "description": "Alert status such as \"Resolved\" or \"Acknowledged\"",
+ "cliName": "clarotyxdomedevicealertstatus",
+ "type": "singleSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [
+ "Resolved",
+ "Unresolved",
+ "Acknowledged"
+ ],
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Alert_Updated_Time.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Alert_Updated_Time.json
new file mode 100644
index 000000000000..081af7979a4d
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Alert_Updated_Time.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedevicealertupdatedtime",
+ "version": -1,
+ "modified": "2024-04-18T10:35:38.14304194Z",
+ "name": "Claroty xDome Device-Alert Updated Time",
+ "ownerOnly": false,
+ "description": "Date and time of last Alert update",
+ "cliName": "clarotyxdomedevicealertupdatedtime",
+ "type": "date",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Assignees.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Assignees.json
new file mode 100644
index 000000000000..67540d0c26ed
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Assignees.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedeviceassignees",
+ "version": -1,
+ "modified": "2024-04-18T10:35:35.719928492Z",
+ "name": "Claroty xDome Device Assignees",
+ "ownerOnly": false,
+ "description": "The users and or groups the device is assigned to",
+ "cliName": "clarotyxdomedeviceassignees",
+ "type": "multiSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": true,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Category.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Category.json
new file mode 100644
index 000000000000..0cc1f97f6159
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Category.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedevicecategory",
+ "version": -1,
+ "modified": "2024-04-18T10:35:35.79129949Z",
+ "name": "Claroty xDome Device Category",
+ "ownerOnly": false,
+ "description": "The device category group (see \"About Device Categorization\" in the Knowledge Base)",
+ "cliName": "clarotyxdomedevicecategory",
+ "type": "shortText",
+ "closeForm": true,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Effective_Likelihood_Subscore.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Effective_Likelihood_Subscore.json
new file mode 100644
index 000000000000..ae4988dd43eb
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Effective_Likelihood_Subscore.json
@@ -0,0 +1,39 @@
+{
+ "id": "incident_clarotyxdomedeviceeffectivelikelihoodsubscore",
+ "version": -1,
+ "modified": "2024-04-18T10:35:35.868044049Z",
+ "name": "Claroty xDome Device Effective Likelihood Subscore",
+ "ownerOnly": false,
+ "description": "The calculated effective likelihood subscore level of a device, such as \"Critical\", or \"High\"",
+ "cliName": "clarotyxdomedeviceeffectivelikelihoodsubscore",
+ "type": "singleSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [
+ "Very Low",
+ "Low",
+ "Medium",
+ "High",
+ "Critical"
+ ],
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Effective_Likelihood_Subscore_Points.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Effective_Likelihood_Subscore_Points.json
new file mode 100644
index 000000000000..95dffc188c13
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Effective_Likelihood_Subscore_Points.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedeviceeffectivelikelihoodsubscorepoints",
+ "version": -1,
+ "modified": "2024-04-18T10:35:35.94630875Z",
+ "name": "Claroty xDome Device Effective Likelihood Subscore Points",
+ "ownerOnly": false,
+ "description": "The calculated effective likelihood subscore points of a device, such as \"54.1\"",
+ "cliName": "clarotyxdomedeviceeffectivelikelihoodsubscorepoints",
+ "type": "number",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_First_Seen.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_First_Seen.json
new file mode 100644
index 000000000000..67e3e0fec4ae
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_First_Seen.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedevicefirstseen",
+ "version": -1,
+ "modified": "2024-04-18T10:35:36.061254113Z",
+ "name": "Claroty xDome Device First Seen",
+ "ownerOnly": false,
+ "description": "The date and time a device's NIC was first seen",
+ "cliName": "clarotyxdomedevicefirstseen",
+ "type": "multiSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": true,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_IP.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_IP.json
new file mode 100644
index 000000000000..987e1d6355f0
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_IP.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedeviceip",
+ "version": -1,
+ "modified": "2024-04-18T10:35:36.132820607Z",
+ "name": "Claroty xDome Device IP",
+ "ownerOnly": false,
+ "description": "IP address associated with the device. IPs may be suffixed by a / (annotation), where annotation may be a child device ID or (Last Known IP)",
+ "cliName": "clarotyxdomedeviceip",
+ "type": "multiSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": true,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Impact_Subscore.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Impact_Subscore.json
new file mode 100644
index 000000000000..8844f41e7250
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Impact_Subscore.json
@@ -0,0 +1,39 @@
+{
+ "id": "incident_clarotyxdomedeviceimpactsubscore",
+ "version": -1,
+ "modified": "2024-04-18T10:35:36.202540569Z",
+ "name": "Claroty xDome Device Impact Subscore",
+ "ownerOnly": false,
+ "description": "The calculated impact subscore level of a device, such as \"Critical\", or \"High\"",
+ "cliName": "clarotyxdomedeviceimpactsubscore",
+ "type": "singleSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [
+ "Very Low",
+ "Low",
+ "Medium",
+ "High",
+ "Critical"
+ ],
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Impact_Subscore_Points.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Impact_Subscore_Points.json
new file mode 100644
index 000000000000..7fbc57f45bef
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Impact_Subscore_Points.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedeviceimpactsubscorepoints",
+ "version": -1,
+ "modified": "2024-04-18T10:35:36.273687642Z",
+ "name": "Claroty xDome Device Impact Subscore Points",
+ "ownerOnly": false,
+ "description": "The calculated impact subscore points of a device, such as \"54.1\"",
+ "cliName": "clarotyxdomedeviceimpactsubscorepoints",
+ "type": "number",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Insecure_Protocols.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Insecure_Protocols.json
new file mode 100644
index 000000000000..2bb36f3213c1
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Insecure_Protocols.json
@@ -0,0 +1,39 @@
+{
+ "id": "incident_clarotyxdomedeviceinsecureprotocols",
+ "version": -1,
+ "modified": "2024-04-18T10:35:36.346255144Z",
+ "name": "Claroty xDome Device Insecure Protocols",
+ "ownerOnly": false,
+ "description": "The calculated level of the device’s ‘insecure protocols’ likelihood factor, such as \"Critical\", or \"High\"",
+ "cliName": "clarotyxdomedeviceinsecureprotocols",
+ "type": "singleSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [
+ "Very Low",
+ "Low",
+ "Medium",
+ "High",
+ "Critical"
+ ],
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Insecure_Protocols_Points.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Insecure_Protocols_Points.json
new file mode 100644
index 000000000000..4aec4808ca00
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Insecure_Protocols_Points.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedeviceinsecureprotocolspoints",
+ "version": -1,
+ "modified": "2024-04-18T10:35:36.422639212Z",
+ "name": "Claroty xDome Device Insecure Protocols Points",
+ "ownerOnly": false,
+ "description": "The calculated points for ‘insecure protocols’ likelihood factor of a device, such as \"54.1\"",
+ "cliName": "clarotyxdomedeviceinsecureprotocolspoints",
+ "type": "number",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Internet_Communication.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Internet_Communication.json
new file mode 100644
index 000000000000..83d5a7766c33
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Internet_Communication.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedeviceinternetcommunication",
+ "version": -1,
+ "modified": "2024-04-18T10:35:36.495359369Z",
+ "name": "Claroty xDome Device Internet Communication",
+ "ownerOnly": false,
+ "description": "The manner of the device's communication over the internet",
+ "cliName": "clarotyxdomedeviceinternetcommunication",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Known_Vulnerabilities.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Known_Vulnerabilities.json
new file mode 100644
index 000000000000..38e4b00b3634
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Known_Vulnerabilities.json
@@ -0,0 +1,39 @@
+{
+ "id": "incident_clarotyxdomedeviceknownvulnerabilities",
+ "version": -1,
+ "modified": "2024-04-18T10:35:36.569467323Z",
+ "name": "Claroty xDome Device Known Vulnerabilities",
+ "ownerOnly": false,
+ "description": "The calculated level of the device’s ‘known vulnerabilities’ likelihood factor, such as \"Critical\", or \"High\"",
+ "cliName": "clarotyxdomedeviceknownvulnerabilities",
+ "type": "singleSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [
+ "Very Low",
+ "Low",
+ "Medium",
+ "High",
+ "Critical"
+ ],
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Known_Vulnerabilities_Points.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Known_Vulnerabilities_Points.json
new file mode 100644
index 000000000000..7a720a970765
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Known_Vulnerabilities_Points.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedeviceknownvulnerabilitiespoints",
+ "version": -1,
+ "modified": "2024-04-18T10:35:36.650138022Z",
+ "name": "Claroty xDome Device Known Vulnerabilities Points",
+ "ownerOnly": false,
+ "description": "The calculated points for ‘known vulnerabilities’ likelihood factor of a device, such as \"54.1\"",
+ "cliName": "clarotyxdomedeviceknownvulnerabilitiespoints",
+ "type": "number",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Labels.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Labels.json
new file mode 100644
index 000000000000..0104c8e30975
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Labels.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedevicelabels",
+ "version": -1,
+ "modified": "2024-04-18T10:35:36.72474268Z",
+ "name": "Claroty xDome Device Labels",
+ "ownerOnly": false,
+ "description": "The labels added to the device manually or automatically",
+ "cliName": "clarotyxdomedevicelabels",
+ "type": "multiSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": true,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Last_Seen.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Last_Seen.json
new file mode 100644
index 000000000000..b0e64ac903fa
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Last_Seen.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedevicelastseen",
+ "version": -1,
+ "modified": "2024-04-18T10:35:36.815308343Z",
+ "name": "Claroty xDome Device Last Seen",
+ "ownerOnly": false,
+ "description": "The date and time a device's NIC was last seen",
+ "cliName": "clarotyxdomedevicelastseen",
+ "type": "multiSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": true,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Likelihood_Subscore.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Likelihood_Subscore.json
new file mode 100644
index 000000000000..93a30c4efcb8
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Likelihood_Subscore.json
@@ -0,0 +1,39 @@
+{
+ "id": "incident_clarotyxdomedevicelikelihoodsubscore",
+ "version": -1,
+ "modified": "2024-04-18T10:35:36.91207491Z",
+ "name": "Claroty xDome Device Likelihood Subscore",
+ "ownerOnly": false,
+ "description": "The calculated likelihood subscore level of a device, such as \"Critical\", or \"High\"",
+ "cliName": "clarotyxdomedevicelikelihoodsubscore",
+ "type": "singleSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [
+ "Very Low",
+ "Low",
+ "Medium",
+ "High",
+ "Critical"
+ ],
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Likelihood_Subscore_Points.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Likelihood_Subscore_Points.json
new file mode 100644
index 000000000000..c7c8a0c99621
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Likelihood_Subscore_Points.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedevicelikelihoodsubscorepoints",
+ "version": -1,
+ "modified": "2024-04-18T10:35:36.984493348Z",
+ "name": "Claroty xDome Device Likelihood Subscore Points",
+ "ownerOnly": false,
+ "description": "The calculated likelihood subscore points of a device, such as \"54.1\"",
+ "cliName": "clarotyxdomedevicelikelihoodsubscorepoints",
+ "type": "number",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_MAC.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_MAC.json
new file mode 100644
index 000000000000..2e32eb64c9fd
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_MAC.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedevicemac",
+ "version": -1,
+ "modified": "2024-04-18T10:35:37.057225684Z",
+ "name": "Claroty xDome Device MAC",
+ "ownerOnly": false,
+ "description": "MAC address associated with the device",
+ "cliName": "clarotyxdomedevicemac",
+ "type": "multiSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": true,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Manufacturer.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Manufacturer.json
new file mode 100644
index 000000000000..a7026141c416
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Manufacturer.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedevicemanufacturer",
+ "version": -1,
+ "modified": "2024-04-18T10:35:37.130748724Z",
+ "name": "Claroty xDome Device Manufacturer",
+ "ownerOnly": false,
+ "description": "Manufacturer of the device, such as \"Alaris\"",
+ "cliName": "clarotyxdomedevicemanufacturer",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Network.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Network.json
new file mode 100644
index 000000000000..35b483a867aa
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Network.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedevicenetwork",
+ "version": -1,
+ "modified": "2024-04-18T10:35:37.270283021Z",
+ "name": "Claroty xDome Device Network",
+ "ownerOnly": false,
+ "description": "The network types, \"Corporate\" and or \"Guest\", that the device belongs to",
+ "cliName": "clarotyxdomedevicenetwork",
+ "type": "multiSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": true,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Purdue_Level.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Purdue_Level.json
new file mode 100644
index 000000000000..4804be8e50b5
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Purdue_Level.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedevicepurduelevel",
+ "version": -1,
+ "modified": "2024-04-18T10:35:37.344431268Z",
+ "name": "Claroty xDome Device Purdue Level",
+ "ownerOnly": false,
+ "description": "The network layer the device belongs to, based on the Purdue Reference Model for Industrial Control System (ICS). The network segmentation-based model defines OT and IT systems into six levels and the logical network boundary controls for securing these networks",
+ "cliName": "clarotyxdomedevicepurduelevel",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Risk_Score.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Risk_Score.json
new file mode 100644
index 000000000000..a4194e5979ab
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Risk_Score.json
@@ -0,0 +1,39 @@
+{
+ "id": "incident_clarotyxdomedeviceriskscore",
+ "version": -1,
+ "modified": "2024-04-18T10:35:37.489063158Z",
+ "name": "Claroty xDome Device Risk Score",
+ "ownerOnly": false,
+ "description": "The calculated risk level of a device, such as \"Critical\", or \"High\"",
+ "cliName": "clarotyxdomedeviceriskscore",
+ "type": "singleSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [
+ "Very Low",
+ "Low",
+ "Medium",
+ "High",
+ "Critical"
+ ],
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Risk_Score_Points.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Risk_Score_Points.json
new file mode 100644
index 000000000000..f1db3bd3a47e
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Risk_Score_Points.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedeviceriskscorepoints",
+ "version": -1,
+ "modified": "2024-04-18T10:35:37.558979666Z",
+ "name": "Claroty xDome Device Risk Score Points",
+ "ownerOnly": false,
+ "description": "The calculated risk points of a device, such as \"54.1\"",
+ "cliName": "clarotyxdomedeviceriskscorepoints",
+ "type": "number",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Site_Name.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Site_Name.json
new file mode 100644
index 000000000000..00e3bfeea028
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Site_Name.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedevicesitename",
+ "version": -1,
+ "modified": "2024-04-18T10:35:37.634073698Z",
+ "name": "Claroty xDome Device Site Name",
+ "ownerOnly": false,
+ "description": "The name of the site within the organization the device is associated with",
+ "cliName": "clarotyxdomedevicesitename",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Subcategory.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Subcategory.json
new file mode 100644
index 000000000000..059e505d91ac
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Subcategory.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedevicesubcategory",
+ "version": -1,
+ "modified": "2024-04-18T10:35:37.706705592Z",
+ "name": "Claroty xDome Device Subcategory",
+ "ownerOnly": false,
+ "description": "The device sub-category group (see \"About Device Categorization\" in the Knowledge Base)",
+ "cliName": "clarotyxdomedevicesubcategory",
+ "type": "shortText",
+ "closeForm": true,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Type.json b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Type.json
new file mode 100644
index 000000000000..e56e1e21d748
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentFields/incidentfield-Claroty_xDome_Device_Type.json
@@ -0,0 +1,32 @@
+{
+ "id": "incident_clarotyxdomedevicetype",
+ "version": -1,
+ "modified": "2024-04-18T10:35:37.786449335Z",
+ "name": "Claroty xDome Device Type",
+ "ownerOnly": false,
+ "description": "The device type group (see \"About Device Categorization\" in the Knowledge Base)",
+ "cliName": "clarotyxdomedevicetype",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "openEnded": false,
+ "associatedTypes": [
+ "Claroty xDome Alert"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/IncidentTypes/incidenttype-Claroty_XDome_Alert.json b/Packs/ClarotyXDome/IncidentTypes/incidenttype-Claroty_XDome_Alert.json
new file mode 100644
index 000000000000..f7cff6661b13
--- /dev/null
+++ b/Packs/ClarotyXDome/IncidentTypes/incidenttype-Claroty_XDome_Alert.json
@@ -0,0 +1,28 @@
+{
+ "id": "Claroty xDome Alert",
+ "version": -1,
+ "vcShouldIgnore": false,
+ "locked": false,
+ "name": "Claroty xDome Alert",
+ "prevName": "Claroty xDome Alert",
+ "color": "#8E00B0",
+ "hours": 0,
+ "days": 0,
+ "weeks": 0,
+ "hoursR": 0,
+ "daysR": 0,
+ "weeksR": 0,
+ "system": false,
+ "readonly": false,
+ "default": false,
+ "autorun": false,
+ "disabled": false,
+ "reputationCalc": 0,
+ "onChangeRepAlg": 0,
+ "detached": false,
+ "extractSettings": {
+ "mode": "All",
+ "fieldCliNameToExtractSettings": {}
+ },
+ "fromVersion": "6.10.0"
+}
\ No newline at end of file
diff --git a/Packs/ClarotyXDome/Integrations/XDome/README.md b/Packs/ClarotyXDome/Integrations/XDome/README.md
new file mode 100644
index 000000000000..56926aa63dca
--- /dev/null
+++ b/Packs/ClarotyXDome/Integrations/XDome/README.md
@@ -0,0 +1,201 @@
+Use the xDome integration to manage assets and alerts.
+This integration was integrated and tested with version 1.0.0 of XDome.
+
+## Configure xDome on Cortex XSOAR
+
+1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+2. Search for xDome.
+3. Click **Add instance** to create and configure a new integration instance.
+
+ | **Parameter** | **Description** | **Required** |
+ | --- | --- | --- |
+ | XDome public API base URL | | True |
+ | API Token | The API token to use for connection | True |
+ | Trust any certificate (not secure) | | False |
+ | Use system proxy settings | | False |
+ | The initial time to fetch from | | True |
+ | Fetch Only Unresolved Device-Alert Pairs | | False |
+ | Alert Types Selection | If no alert types are selected, all types will be fetched | False |
+
+4. Click **Test** to validate the URLs, token, and connection.
+
+## Commands
+
+You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
+After you successfully execute a command, a DBot message appears in the War Room with the command details.
+
+### xdome-get-device-alert-relations
+
+***
+Gets all device-alert pairs from xDome. You can apply a query-filter.
+
+#### Base Command
+
+`xdome-get-device-alert-relations`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| fields | Fields to return. Possible values are: all, alert_assignees, alert_category, alert_class, alert_id, alert_labels, alert_name, alert_type_name, device_alert_detected_time, device_alert_status, device_alert_updated_time, device_assignees, device_category, device_effective_likelihood_subscore, device_effective_likelihood_subscore_points, device_first_seen_list, device_impact_subscore, device_impact_subscore_points, device_insecure_protocols, device_insecure_protocols_points, device_internet_communication, device_ip_list, device_known_vulnerabilities, device_known_vulnerabilities_points, device_labels, device_last_seen_list, device_likelihood_subscore, device_likelihood_subscore_points, device_mac_list, device_manufacturer, device_name, device_network_list, device_purdue_level, device_retired, device_risk_score, device_risk_score_points, device_site_name, device_subcategory, device_type, device_uid. Default is all. | Optional |
+| filter_by | A filter_by object, refer to the xDome API documentation. | Optional |
+| offset | An offset in the data. This can be used to fetch all data in a paginated manner, by e.g requesting (offset=0, limit=100) followed by (offset=100, limit=100), (offset=200, limit=100), etc. | Optional |
+| limit | Maximum amount of items to fetch. | Optional |
+| sort_by | Default: [{"field":"device_uid","order":"asc"},{"field":"alert_id","order":"asc"}]. Specifies how the returned data should be sorted. If more than one sort clause is passed, additional clauses will be used to sort data that is equal in all previous clauses. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| XDome.DeviceAlert.alert_id | Number | Platform unique Alert ID. |
+| XDome.DeviceAlert.alert_name | String | The alert name, such as “Malicious Internet Communication: 62.172.138.35”. |
+| XDome.DeviceAlert.alert_type_name | String | An alert type such as "Outdated Firmware". |
+| XDome.DeviceAlert.alert_class | String | The alert class, such as “Pre-Defined Alerts” and “Custom Alerts”. |
+| XDome.DeviceAlert.alert_category | String | Alert category such as "Risk" or "Segmentation". |
+| XDome.DeviceAlert.alert_labels | String | The labels added to the alert manually or automatically. |
+| XDome.DeviceAlert.alert_assignees | String | The users and or groups the alert is assigned to. |
+| XDome.DeviceAlert.device_alert_detected_time | Date | Date and time when the Alert was first detected. |
+| XDome.DeviceAlert.device_alert_updated_time | Date | Date and time of last Alert update. |
+| XDome.DeviceAlert.device_alert_status | String | Device-Alert relation status \(Resolved or Unresolved\). |
+| XDome.DeviceAlert.device_uid | UUID | A universal unique identifier \(UUID\) for the device. |
+| XDome.DeviceAlert.device_name | String | The Device Name attribute is set automatically based on the priority of the Auto-Assigned Device attribute. You can also set it manually. The Device Name can be the device’s IP, hostname, etc. |
+| XDome.DeviceAlert.device_ip_list | List | IP address associated with the device. IPs may be suffixed by a / \(annotation\), where annotation may be a child device ID or \(Last Known IP\). |
+| XDome.DeviceAlert.device_mac_list | List | MAC address associated with the device. |
+| XDome.DeviceAlert.device_network_list | List | The network types, "Corporate" and or "Guest", that the device belongs to. |
+| XDome.DeviceAlert.device_category | String | The device category group \(see "About Device Categorization" in the Knowledge Base\). |
+| XDome.DeviceAlert.device_subcategory | String | The device sub-category group \(see "About Device Categorization" in the Knowledge Base\). |
+| XDome.DeviceAlert.device_type | String | The device type group \(see "About Device Categorization" in the Knowledge Base\). |
+| XDome.DeviceAlert.device_assignees | String | The users and or groups the device is assigned to. |
+| XDome.DeviceAlert.device_labels | String | The labels added to the device manually or automatically. |
+| XDome.DeviceAlert.device_retired | String | A boolean field indicating if the device is retired or not. |
+| XDome.DeviceAlert.device_purdue_level | String | The network layer the device belongs to, based on the Purdue Reference Model for Industrial Control System \(ICS\). The network segmentation-based model defines OT and IT systems into six levels and the logical network boundary controls for securing these networks. |
+| XDome.DeviceAlert.device_site_name | String | The name of the site within the organization the device is associated with. |
+| XDome.DeviceAlert.device_first_seen_list | List | The date and time a device's NIC was first seen. |
+| XDome.DeviceAlert.device_last_seen_list | List | The date and time a device's NIC was last seen. |
+| XDome.DeviceAlert.device_risk_score | String | The calculated risk level of a device, such as "Critical", or "High". |
+| XDome.DeviceAlert.device_risk_score_points | Number | The calculated risk points of a device, such as "54.1". |
+| XDome.DeviceAlert.device_effective_likelihood_subscore | String | The calculated effective likelihood subscore level of a device, such as "Critical", or "High". |
+| XDome.DeviceAlert.device_effective_likelihood_subscore_points | Number | The calculated effective likelihood subscore points of a device, such as "54.1". |
+| XDome.DeviceAlert.device_likelihood_subscore | String | The calculated likelihood subscore level of a device, such as "Critical", or "High". |
+| XDome.DeviceAlert.device_likelihood_subscore_points | Number | The calculated likelihood subscore points of a device, such as "54.1". |
+| XDome.DeviceAlert.device_impact_subscore | String | The calculated impact subscore level of a device, such as "Critical", or "High". |
+| XDome.DeviceAlert.device_impact_subscore_points | Number | The calculated impact subscore points of a device, such as "54.1". |
+| XDome.DeviceAlert.device_insecure_protocols | String | The calculated level of the device’s ‘insecure protocols’ likelihood factor, such as "Critical", or "High". |
+| XDome.DeviceAlert.device_insecure_protocols_points | Number | The calculated points for ‘insecure protocols’ likelihood factor of a device, such as "54.1". |
+| XDome.DeviceAlert.device_internet_communication | String | The manner of the device's communication over the internet. |
+| XDome.DeviceAlert.device_known_vulnerabilities | String | The calculated level of the device’s ‘known vulnerabilities’ likelihood factor, such as "Critical", or "High". |
+| XDome.DeviceAlert.device_known_vulnerabilities_points | Number | The calculated points for ‘known vulnerabilities’ likelihood factor of a device, such as "54.1". |
+| XDome.DeviceAlert.device_manufacturer | String | Manufacturer of the device, such as "Alaris". |
+
+### xdome-set-status-for-device-alert-relations
+
+***
+Set device-alert status to resolved or unresolved.
+
+#### Base Command
+
+`xdome-set-status-for-device-alert-relations`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| alert_id | Alert ID, as indicated in the id field of an alert. | Required |
+| device_uids | Device UUIDs, as indicated in the uid field of a device. | Optional |
+| status | Set the device-alert status to resolve or unresolved. Possible values are: resolved, unresolved. | Required |
+
+#### Context Output
+
+There is no context output for this command.
+### xdome-get-device-vulnerability-relations
+
+***
+Get details of devices with their related vulnerabilities from the database. The data returned by this endpoint for each device corresponds to the vulnerabilities table in the single device page.
+
+#### Base Command
+
+`xdome-get-device-vulnerability-relations`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| fields | Fields to return. Possible values are: all, device_network_list, device_category, device_subcategory, device_type, device_uid, device_asset_id, device_mac_list, device_ip_list, device_type_family, device_model, device_os_category, device_serial_number, device_vlan_list, device_retired, device_labels, device_assignees, device_hw_version, device_local_name, device_os_name, device_os_version, device_os_revision, device_os_subcategory, device_combined_os, device_endpoint_security_names, device_equipment_class, device_consequence_of_failure, device_management_services, device_ad_distinguished_name, device_ad_description, device_mdm_ownership, device_mdm_enrollment_status, device_mdm_compliance_status, device_last_domain_user, device_fda_class, device_mobility, device_purdue_level, device_purdue_level_source, device_dhcp_hostnames, device_http_hostnames, device_snmp_hostnames, device_windows_hostnames, device_other_hostnames, device_windows_last_seen_hostname, device_dhcp_last_seen_hostname, device_http_last_seen_hostname, device_snmp_last_seen_hostname, device_ae_titles, device_dhcp_fingerprint, device_note, device_domains, device_battery_level, device_internet_communication, device_financial_cost, device_handles_pii, device_machine_type, device_phi, device_cmms_state, device_cmms_ownership, device_cmms_asset_tag, device_cmms_campus, device_cmms_building, device_cmms_location, device_cmms_floor, device_cmms_department, device_cmms_owning_cost_center, device_cmms_asset_purchase_cost, device_cmms_room, device_cmms_manufacturer, device_cmms_model, device_cmms_serial_number, device_cmms_last_pm, device_cmms_technician, device_edr_is_up_to_date_text, device_mac_oui_list, device_ip_assignment_list, device_protocol_location_list, device_vlan_name_list, device_vlan_description_list, device_connection_type_list, device_ssid_list, device_bssid_list, device_wireless_encryption_type_list, device_ap_name_list, device_ap_location_list, device_switch_mac_list, device_switch_ip_list, device_switch_name_list, device_switch_port_list, device_switch_location_list, device_switch_port_description_list, device_wlc_name_list, device_wlc_location_list, device_applied_acl_list, device_applied_acl_type_list, device_collection_servers, device_edge_locations, device_number_of_nics, device_last_domain_user_activity, device_last_scan_time, device_edr_last_scan_time, device_retired_since, device_os_eol_date, device_last_seen_list, device_first_seen_list, device_wifi_last_seen_list, device_last_seen_on_switch_list, device_is_online, device_network_scope_list, device_ise_authentication_method_list, device_ise_endpoint_profile_list, device_ise_identity_group_list, device_ise_security_group_name_list, device_ise_security_group_tag_list, device_ise_logical_profile_list, device_cppm_authentication_status_list, device_cppm_roles_list, device_cppm_service_list, device_name, device_manufacturer, device_site_name, device_risk_score, device_risk_score_points, device_effective_likelihood_subscore, device_effective_likelihood_subscore_points, device_likelihood_subscore, device_likelihood_subscore_points, device_impact_subscore, device_impact_subscore_points, device_known_vulnerabilities, device_known_vulnerabilities_points, device_insecure_protocols, device_insecure_protocols_points, device_suspicious, device_switch_group_name_list, device_managed_by, device_authentication_user_list, device_collection_interfaces, device_slot_cards, device_cmms_financial_cost, device_software_or_firmware_version, device_enforcement_or_authorization_profiles_list, device_ise_security_group_description_list, device_recommended_firewall_group_name, device_recommended_zone_name, vulnerability_id, vulnerability_name, vulnerability_type, vulnerability_cve_ids, vulnerability_cvss_v2_score, vulnerability_cvss_v2_exploitability_subscore, vulnerability_cvss_v3_score, vulnerability_cvss_v3_exploitability_subscore, vulnerability_adjusted_vulnerability_score, vulnerability_adjusted_vulnerability_score_level, vulnerability_epss_score, vulnerability_sources, vulnerability_description, vulnerability_affected_products, vulnerability_recommendations, vulnerability_exploits_count, vulnerability_is_known_exploited, vulnerability_published_date, vulnerability_labels, vulnerability_assignees, vulnerability_note, vulnerability_last_updated, vulnerability_relevance, vulnerability_relevance_sources, vulnerability_manufacturer_remediation_info, vulnerability_manufacturer_remediation_info_source, vulnerability_overall_cvss_v3_score, device_vulnerability_detection_date, device_vulnerability_resolution_date, device_vulnerability_days_to_resolution, patch_install_date. Default is all. | Optional |
+| filter_by | A filter_by object, refer to the xDome API documentation. Input as a string and dont forget to escape quotes (\"). | Optional |
+| sort_by | Default: [{"field":"device_uid","order":"asc"}, {"field":"vulnerability_id","order":"asc"}]. Specifies how the returned data should be sorted. If more than one sort clause is passed, additional clauses will be used to sort data that is equal in all previous clauses. | Optional |
+| offset | An offset in the data. This can be used to fetch all data in a paginated manner, by e.g requesting (offset=0, limit=100) followed by (offset=100, limit=100), (offset=200, limit=100), etc. | Optional |
+| limit | Maximum amount of items to fetch. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| XDome.DeviceVulnerability.vulnerability_name | String | Name designated by Claroty's Research team, based on the advisory name or CVE ID. |
+| XDome.DeviceVulnerability.vulnerability_type | String | Type such as "Application", "Clinical", "IoT" or "Platform". |
+| XDome.DeviceVulnerability.vulnerability_cve_ids | List | Relevant Common Vulnerability Exploits for the selected vulnerability. |
+| XDome.DeviceVulnerability.vulnerability_cvss_v3_score | Number | Common Vulnerability Scoring System Version 3 score \(0-10\). In case of multiple CVEs, the highest Subscore is displayed. |
+| XDome.DeviceVulnerability.vulnerability_adjusted_vulnerability_score | Number | The Adjusted Vulnerability Score represents the vulnerability score based on its impact and exploitability. |
+| XDome.DeviceVulnerability.vulnerability_adjusted_vulnerability_score_level | String | The calculated Adjusted vulnerability Score \(AVS\) level of a vulnerability, such as "Critical", or "High". |
+| XDome.DeviceVulnerability.vulnerability_epss_score | Number | A probability score between 0 to 1 indicating the likelihoodof a vulnerability to be exploited in the wild, based on the Exploit Prediction Scoring System \(EPSS\) model. |
+| XDome.DeviceVulnerability.vulnerability_description | String | Details about the vulnerability. |
+| XDome.DeviceVulnerability.vulnerability_exploits_count | Number | An aggregated numeric field of the number of known exploits based on ExploitDB. |
+| XDome.DeviceVulnerability.vulnerability_is_known_exploited | Boolean | A boolean field indicating whether a vulnerability is currently exploited in-the-wild, based on the CISA Catalog of Known Exploited Vulnerabilities. |
+| XDome.DeviceVulnerability.vulnerability_published_date | Date | The date and time the vulnerability was released. |
+| XDome.DeviceVulnerability.vulnerability_relevance | String | The device vulnerability relevance reflects the confidence level of the detection process, corresponding to several components, such as the vulnerability type. |
+| XDome.DeviceVulnerability.device_vulnerability_detection_date | Date | The date when the vulnerability was initially detected on the device. A vulnerability is considered detected once marked as “confirmed” or “potentially relevant” for the respective device. |
+| XDome.DeviceVulnerability.device_network_list | List | The network types, "Corporate" and or "Guest", that the device belongs to. |
+| XDome.DeviceVulnerability.device_category | String | The device category group \(see "About Device Categorization" in the Knowledge Base\). |
+| XDome.DeviceVulnerability.device_subcategory | String | The device sub-category group \(see "About Device Categorization" in the Knowledge Base\). |
+| XDome.DeviceVulnerability.device_type | String | The device type group \(see "About Device Categorization" in the Knowledge Base\). |
+| XDome.DeviceVulnerability.device_uid | String | A universal unique identifier \(UUID\) for the device. |
+| XDome.DeviceVulnerability.device_asset_id | String | Asset ID. |
+| XDome.DeviceVulnerability.device_mac_list | List | MAC address associated with the device. |
+| XDome.DeviceVulnerability.device_ip_list | List | IP address associated with the device. IPs may be suffixed by a / \(annotation\), where annotation may be a child device ID or \(Last Known IP\). |
+| XDome.DeviceVulnerability.device_type_family | String | The device type family group \(see "About Device Categorization" in the Knowledge Base\). |
+| XDome.DeviceVulnerability.device_model | String | The device's model. |
+| XDome.DeviceVulnerability.device_os_category | String | The device's OS category, such as "Windows", "Linux" or "Other". |
+| XDome.DeviceVulnerability.device_serial_number | String | The device's serial number. |
+| XDome.DeviceVulnerability.device_vlan_list | List | The virtual LAN to which the device belongs. |
+| XDome.DeviceVulnerability.device_labels | List | The labels added to the device manually or automatically. |
+| XDome.DeviceVulnerability.device_assignees | List | The users and or groups the device is assigned to. |
+| XDome.DeviceVulnerability.device_hw_version | String | The hardware version of the device. |
+| XDome.DeviceVulnerability.device_local_name | String | Similar to hostname, the device name identifier is extracted from protocol traffic. |
+| XDome.DeviceVulnerability.device_combined_os | String | The aggregated value of OS name, version and revision, such as "Windows XP SP3". |
+| XDome.DeviceVulnerability.device_endpoint_security_names | List | The names of endpoint security applications installed on the device. |
+| XDome.DeviceVulnerability.device_equipment_class | String | Determines the equipment class of the device, according to The Joint Commission \(TJC\). |
+| XDome.DeviceVulnerability.device_management_services | String | Defines whether the device is managed by Active Directory, Mobile Device Management, or neither. |
+| XDome.DeviceVulnerability.device_purdue_level | String | The network layer the device belongs to, based on the Purdue Reference Model for Industrial Control System \(ICS\). The network segmentation-based model defines OT and IT systems into six levels and the logical network boundary controls for securing these networks. |
+| XDome.DeviceVulnerability.device_http_last_seen_hostname | String | The most recent unique hostname identifier of the device, extracted from HTTP protocol traffic. |
+| XDome.DeviceVulnerability.device_snmp_last_seen_hostname | String | The most recent unique hostname identifier of the device, extracted from SNMP protocol traffic. |
+| XDome.DeviceVulnerability.device_note | String | The notes added to the device. |
+| XDome.DeviceVulnerability.device_domains | List | The domain name of the network that the device belongs to. |
+| XDome.DeviceVulnerability.device_internet_communication | String | The manner of the device's communication over the internet. |
+| XDome.DeviceVulnerability.device_edr_is_up_to_date_text | String | Determines whether the endpoint security application installed on the device is up-to-date. |
+| XDome.DeviceVulnerability.device_mac_oui_list | List | The vendor of the device's NIC, according to the OUI \(Organizational Unique Identifier\) in the MAC address. |
+| XDome.DeviceVulnerability.device_ip_assignment_list | List | The device's IP assignment method, extracted from DHCP protocol traffic, such as "DHCP", "DHCP \(Static Lease\)", or "Static". |
+| XDome.DeviceVulnerability.device_vlan_name_list | List | The name of the VLAN, extracted from switch configurations. |
+| XDome.DeviceVulnerability.device_vlan_description_list | List | The description of the VLAN, extracted from switch configurations. |
+| XDome.DeviceVulnerability.device_connection_type_list | List | The connection types of a device, such as "Ethernet". |
+| XDome.DeviceVulnerability.device_ssid_list | List | The name of the wireless network the device is connected to, such as "Guest". |
+| XDome.DeviceVulnerability.device_ap_location_list | List | The location of the access point the device is connected to, extracted from Network Management integrations. |
+| XDome.DeviceVulnerability.device_switch_port_list | List | The port identifier of the switch the device is connected to. |
+| XDome.DeviceVulnerability.device_switch_location_list | List | The location of the switch the device is connected to. |
+| XDome.DeviceVulnerability.device_number_of_nics | Number | The number of network interface cards seen on the network. |
+| XDome.DeviceVulnerability.device_last_seen_list | List | The date and time a device's NIC was last seen. |
+| XDome.DeviceVulnerability.device_first_seen_list | List | The date and time a device's NIC was first seen. |
+| XDome.DeviceVulnerability.device_is_online | Boolean | A boolean field indicating whether the device is online or not. |
+| XDome.DeviceVulnerability.device_network_scope_list | List | The device's Network Scope - used to differentiate between internal networks that share the same IP subnets. |
+| XDome.DeviceVulnerability.device_name | String | The Device Name attribute is set automatically based on the priority of the Auto-Assigned Device attribute. You can also set it manually. The Device Name can be the device’s IP, hostname, etc. |
+| XDome.DeviceVulnerability.device_manufacturer | String | Manufacturer of the device, such as "Alaris". |
+| XDome.DeviceVulnerability.device_site_name | String | The name of the site within the healthcare organization the device is associated with. |
+| XDome.DeviceVulnerability.device_risk_score | String | The calculated risk level of a device, such as "Critical", or "High". |
+| XDome.DeviceVulnerability.device_risk_score_points | Number | The calculated risk points of a device, such as "54.1". |
+| XDome.DeviceVulnerability.device_effective_likelihood_subscore | String | The calculated effective likelihood subscore level of a device, such as "Critical", or "High". |
+| XDome.DeviceVulnerability.device_effective_likelihood_subscore_points | Number | The calculated effective likelihood subscore points of a device, such as "54.1". |
+| XDome.DeviceVulnerability.device_likelihood_subscore | String | The calculated likelihood subscore level of a device, such as "Critical", or "High". |
+| XDome.DeviceVulnerability.device_likelihood_subscore_points | Number | The calculated likelihood subscore points of a device, such as "54.1". |
+| XDome.DeviceVulnerability.device_impact_subscore | String | The calculated impact subscore level of a device, such as "Critical", or "High". |
+| XDome.DeviceVulnerability.device_impact_subscore_points | Number | The calculated impact subscore points of a device, such as "54.1". |
+| XDome.DeviceVulnerability.device_suspicious | List | The reasons for which the device was marked as suspicious. |
+| XDome.DeviceVulnerability.device_authentication_user_list | List | The User name used to authenticate the device to the network using Radius/802.1x is extracted from the NAC integration and the traffic. |
+| XDome.DeviceVulnerability.device_software_or_firmware_version | String | The application version running on the device. |
diff --git a/Packs/ClarotyXDome/Integrations/XDome/XDome.py b/Packs/ClarotyXDome/Integrations/XDome/XDome.py
new file mode 100644
index 000000000000..682b7db7bf76
--- /dev/null
+++ b/Packs/ClarotyXDome/Integrations/XDome/XDome.py
@@ -0,0 +1,779 @@
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa: F401
+
+from CommonServerUserPython import * # noqa
+
+import abc
+import dateparser
+from datetime import datetime, timedelta
+from typing import Dict, Any, Collection, Set, Optional, Tuple, Union, Callable, List
+import urllib3
+
+# Disable insecure warnings
+urllib3.disable_warnings()
+
+
+''' CONSTANTS '''
+
+DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ' # ISO8601 format with UTC, default in XSOAR
+
+SMALLEST_TIME_UNIT = timedelta(seconds=1)
+
+MAX_REQUEST_LIMIT = 5000
+
+DEFAULT_REQUEST_LIMIT = 1000
+
+MAX_FETCH_LIMIT = 50_000
+DEFAULT_FETCH_LIMIT = 50_000
+
+DEVICE_ALERT_FIELDS = {
+ "alert_assignees",
+ "alert_category",
+ "alert_class",
+ "alert_id",
+ "alert_labels",
+ "alert_name",
+ "alert_type_name",
+ "device_alert_detected_time",
+ "device_alert_status",
+ "device_alert_updated_time",
+ "device_assignees",
+ "device_category",
+ "device_effective_likelihood_subscore",
+ "device_effective_likelihood_subscore_points",
+ "device_first_seen_list",
+ "device_impact_subscore",
+ "device_impact_subscore_points",
+ "device_insecure_protocols",
+ "device_insecure_protocols_points",
+ "device_internet_communication",
+ "device_ip_list",
+ "device_known_vulnerabilities",
+ "device_known_vulnerabilities_points",
+ "device_labels",
+ "device_last_seen_list",
+ "device_likelihood_subscore",
+ "device_likelihood_subscore_points",
+ "device_mac_list",
+ "device_manufacturer",
+ "device_name",
+ "device_network_list",
+ "device_purdue_level",
+ "device_retired",
+ "device_risk_score",
+ "device_risk_score_points",
+ "device_site_name",
+ "device_subcategory",
+ "device_type",
+ "device_uid",
+}
+
+DEVICE_VULNERABILITY_FIELDS = {
+ "device_network_list",
+ "device_category",
+ "device_subcategory",
+ "device_type",
+ "device_uid",
+ "device_asset_id",
+ "device_mac_list",
+ "device_ip_list",
+ "device_type_family",
+ "device_model",
+ "device_os_category",
+ "device_serial_number",
+ "device_vlan_list",
+ "device_retired",
+ "device_labels",
+ "device_assignees",
+ "device_hw_version",
+ "device_local_name",
+ "device_os_name",
+ "device_os_version",
+ "device_os_revision",
+ "device_os_subcategory",
+ "device_combined_os",
+ "device_endpoint_security_names",
+ "device_equipment_class",
+ "device_consequence_of_failure",
+ "device_management_services",
+ "device_ad_distinguished_name",
+ "device_ad_description",
+ "device_mdm_ownership",
+ "device_mdm_enrollment_status",
+ "device_mdm_compliance_status",
+ "device_last_domain_user",
+ "device_fda_class",
+ "device_mobility",
+ "device_purdue_level",
+ "device_purdue_level_source",
+ "device_dhcp_hostnames",
+ "device_http_hostnames",
+ "device_snmp_hostnames",
+ "device_windows_hostnames",
+ "device_other_hostnames",
+ "device_windows_last_seen_hostname",
+ "device_dhcp_last_seen_hostname",
+ "device_http_last_seen_hostname",
+ "device_snmp_last_seen_hostname",
+ "device_ae_titles",
+ "device_dhcp_fingerprint",
+ "device_note",
+ "device_domains",
+ "device_battery_level",
+ "device_internet_communication",
+ "device_financial_cost",
+ "device_handles_pii",
+ "device_machine_type",
+ "device_phi",
+ "device_cmms_state",
+ "device_cmms_ownership",
+ "device_cmms_asset_tag",
+ "device_cmms_campus",
+ "device_cmms_building",
+ "device_cmms_location",
+ "device_cmms_floor",
+ "device_cmms_department",
+ "device_cmms_owning_cost_center",
+ "device_cmms_asset_purchase_cost",
+ "device_cmms_room",
+ "device_cmms_manufacturer",
+ "device_cmms_model",
+ "device_cmms_serial_number",
+ "device_cmms_last_pm",
+ "device_cmms_technician",
+ "device_edr_is_up_to_date_text",
+ "device_mac_oui_list",
+ "device_ip_assignment_list",
+ "device_protocol_location_list",
+ "device_vlan_name_list",
+ "device_vlan_description_list",
+ "device_connection_type_list",
+ "device_ssid_list",
+ "device_bssid_list",
+ "device_wireless_encryption_type_list",
+ "device_ap_name_list",
+ "device_ap_location_list",
+ "device_switch_mac_list",
+ "device_switch_ip_list",
+ "device_switch_name_list",
+ "device_switch_port_list",
+ "device_switch_location_list",
+ "device_switch_port_description_list",
+ "device_wlc_name_list",
+ "device_wlc_location_list",
+ "device_applied_acl_list",
+ "device_applied_acl_type_list",
+ "device_collection_servers",
+ "device_edge_locations",
+ "device_number_of_nics",
+ "device_last_domain_user_activity",
+ "device_last_scan_time",
+ "device_edr_last_scan_time",
+ "device_retired_since",
+ "device_os_eol_date",
+ "device_last_seen_list",
+ "device_first_seen_list",
+ "device_wifi_last_seen_list",
+ "device_last_seen_on_switch_list",
+ "device_is_online",
+ "device_network_scope_list",
+ "device_ise_authentication_method_list",
+ "device_ise_endpoint_profile_list",
+ "device_ise_identity_group_list",
+ "device_ise_security_group_name_list",
+ "device_ise_security_group_tag_list",
+ "device_ise_logical_profile_list",
+ "device_cppm_authentication_status_list",
+ "device_cppm_roles_list",
+ "device_cppm_service_list",
+ "device_name",
+ "device_manufacturer",
+ "device_site_name",
+ "device_risk_score",
+ "device_risk_score_points",
+ "device_effective_likelihood_subscore",
+ "device_effective_likelihood_subscore_points",
+ "device_likelihood_subscore",
+ "device_likelihood_subscore_points",
+ "device_impact_subscore",
+ "device_impact_subscore_points",
+ "device_known_vulnerabilities",
+ "device_known_vulnerabilities_points",
+ "device_insecure_protocols",
+ "device_insecure_protocols_points",
+ "device_suspicious",
+ "device_switch_group_name_list",
+ "device_managed_by",
+ "device_authentication_user_list",
+ "device_collection_interfaces",
+ "device_slot_cards",
+ "device_cmms_financial_cost",
+ "device_software_or_firmware_version",
+ "device_enforcement_or_authorization_profiles_list",
+ "device_ise_security_group_description_list",
+ "device_recommended_firewall_group_name",
+ "device_recommended_zone_name",
+ "vulnerability_id",
+ "vulnerability_name",
+ "vulnerability_type",
+ "vulnerability_cve_ids",
+ "vulnerability_cvss_v2_score",
+ "vulnerability_cvss_v2_exploitability_subscore",
+ "vulnerability_cvss_v3_score",
+ "vulnerability_cvss_v3_exploitability_subscore",
+ "vulnerability_adjusted_vulnerability_score",
+ "vulnerability_adjusted_vulnerability_score_level",
+ "vulnerability_epss_score",
+ "vulnerability_sources",
+ "vulnerability_description",
+ "vulnerability_affected_products",
+ "vulnerability_recommendations",
+ "vulnerability_exploits_count",
+ "vulnerability_is_known_exploited",
+ "vulnerability_published_date",
+ "vulnerability_labels",
+ "vulnerability_assignees",
+ "vulnerability_note",
+ "vulnerability_last_updated",
+ "vulnerability_relevance",
+ "vulnerability_relevance_sources",
+ "vulnerability_manufacturer_remediation_info",
+ "vulnerability_manufacturer_remediation_info_source",
+ "vulnerability_overall_cvss_v3_score",
+ "device_vulnerability_detection_date",
+ "device_vulnerability_resolution_date",
+ "device_vulnerability_days_to_resolution",
+ "patch_install_date",
+}
+
+INCIDENT_TIMESTAMP_FIELD = "device_alert_updated_time"
+
+QueryFilterType = Dict[str, Any]
+
+
+''' CLIENT CLASS '''
+
+
+class Client(BaseClient):
+ """Client class to interact with the service API
+
+ This Client implements API calls, and does not contain any XSOAR logic.
+ Should only do requests and return data.
+ It inherits from BaseClient defined in CommonServer Python.
+ Most calls use _http_request() that handles proxy, SSL verification, etc.
+ For this implementation, no special attributes defined
+ """
+
+ def _force_get_all_wrapper(
+ self,
+ paginated_getter_func: Callable,
+ items_name: str,
+ fields: Collection[str],
+ filter_by: Optional[QueryFilterType] = None,
+ sort_by: Optional[List[Dict]] = None,
+ stop_after: Optional[int] = None,
+ start_from: Optional[int] = None,
+ ) -> List[Dict]:
+ offset = start_from or 0
+ batch_size = MAX_REQUEST_LIMIT
+ result = paginated_getter_func(
+ fields=fields, filter_by=filter_by, offset=offset, limit=batch_size, sort_by=sort_by, count=True
+ )
+ last_fetched_items = result.get(items_name, [])
+ all_items = last_fetched_items
+
+ while (res_cnt := len(last_fetched_items)) >= batch_size and (stop_after is None or len(all_items) < stop_after):
+ offset += res_cnt
+ result = paginated_getter_func(
+ fields=fields, filter_by=filter_by, offset=offset, limit=batch_size, sort_by=sort_by, count=True
+ )
+ last_fetched_items = result.get(items_name, [])
+ all_items.extend(last_fetched_items)
+ return all_items[:stop_after] if stop_after is not None else all_items
+
+ def get_device_alert_relations(
+ self,
+ fields: Collection[str],
+ filter_by: Optional[QueryFilterType] = None,
+ offset: int = 0,
+ limit: int = DEFAULT_REQUEST_LIMIT,
+ sort_by: Optional[List[Dict]] = None,
+ count: bool = False,
+ ) -> Dict:
+ body = {"offset": offset, "limit": limit, "fields": list(fields), "include_count": count}
+ if filter_by:
+ body["filter_by"] = filter_by
+ if sort_by:
+ body["sort_by"] = sort_by
+ return self._http_request("POST", url_suffix="device_alert_relations/", json_data=body)
+
+ def get_device_vulnerability_relations(
+ self,
+ fields: Collection[str],
+ filter_by: Optional[QueryFilterType] = None,
+ offset: int = 0,
+ limit: int = DEFAULT_REQUEST_LIMIT,
+ sort_by: Optional[List[Dict]] = None,
+ count: bool = False,
+ ) -> Dict:
+ body = {"offset": offset, "limit": limit, "fields": list(fields), "include_count": count}
+ if filter_by:
+ body["filter_by"] = filter_by
+ if sort_by:
+ body["sort_by"] = sort_by
+ return self._http_request("POST", url_suffix="device_vulnerability_relations/", json_data=body)
+
+ def force_get_all_device_vulnerability_relations(
+ self,
+ fields: Collection[str],
+ filter_by: Optional[QueryFilterType] = None,
+ sort_by: Optional[List[Dict]] = None,
+ stop_after: Optional[int] = None,
+ start_from: Optional[int] = None,
+ ) -> List[Dict]:
+ return self._force_get_all_wrapper(
+ paginated_getter_func=self.get_device_vulnerability_relations,
+ items_name="devices_vulnerabilities",
+ fields=fields,
+ filter_by=filter_by,
+ sort_by=sort_by,
+ stop_after=stop_after,
+ start_from=start_from,
+ )
+
+ def force_get_all_device_alert_relations(
+ self,
+ fields: Collection[str],
+ filter_by: Optional[QueryFilterType] = None,
+ sort_by: Optional[List[Dict]] = None,
+ stop_after: Optional[int] = None,
+ start_from: Optional[int] = None,
+ ) -> List[Dict]:
+ return self._force_get_all_wrapper(
+ paginated_getter_func=self.get_device_alert_relations,
+ items_name="devices_alerts",
+ fields=fields,
+ filter_by=filter_by,
+ sort_by=sort_by,
+ stop_after=stop_after,
+ start_from=start_from,
+ )
+
+ def set_device_single_alert_relations(self, alert_id: int, device_uids: Optional[List[str]], status: str) -> Optional[Dict]:
+ devices_uids_filter = _simple_filter("uid", "in", device_uids) if device_uids else None
+ return self.set_device_alert_relations([alert_id], devices_uids_filter, status)
+
+ def set_device_alert_relations(self, alert_ids: List[int], device_filter_by: Optional[Dict], status: str) -> Dict:
+ body = {"alerts": {"alert_ids": alert_ids}, "status": status}
+ if device_filter_by:
+ body["devices"] = {"filter_by": device_filter_by}
+ return self._http_request("POST", url_suffix="device-alert-status/set/", json_data=body)
+
+
+''' HELPER FUNCTIONS '''
+
+
+def _device_alert_relation_id(device_alert_relation: Dict) -> Tuple[int, str]:
+ return device_alert_relation["alert_id"], device_alert_relation["device_uid"]
+
+
+def _device_alert_relation_id_str(device_alert_relation: Dict) -> str:
+ dar_id = _device_alert_relation_id(device_alert_relation)
+ return f"{dar_id[0]}↔{dar_id[1]}"
+
+
+def _split_device_alert_relation_id(device_alert_relation_id: str) -> Tuple[int, str]:
+ alert_id, device_uid = device_alert_relation_id.split("↔")
+ return int(alert_id), device_uid
+
+
+def _device_alert_relation_name(device_alert_relation: Dict) -> str:
+ return f"Alert “{device_alert_relation.get('alert_name', '')}” on Device “{device_alert_relation.get('device_name', '')}”"
+
+
+def _format_date(date: Union[str, datetime], format: str = DATE_FORMAT) -> str:
+ dt = date if isinstance(date, datetime) else dateparser.parse(date)
+ assert dt is not None
+ return dt.strftime(format)
+
+
+def _simple_filter(field: str, operation: str, value: Any):
+ return {"field": field, "operation": operation, "value": value}
+
+
+def _build_alert_types_filter(alert_types: List[str]) -> QueryFilterType:
+ return _simple_filter("alert_type_name", "in", [at.strip() for at in alert_types])
+
+
+def _compound_filter(op: str, *filters: Optional[Dict]) -> Optional[QueryFilterType]:
+ filters = [f for f in filters if f]
+ return None if not filters else filters[0] if len(filters) == 1 else {"operation": op, "operands": filters}
+
+
+def _and(*filters: Optional[Dict]) -> Optional[QueryFilterType]:
+ return _compound_filter("and", *filters)
+
+
+def _or(*filters: Optional[Dict]) -> Optional[QueryFilterType]:
+ return _compound_filter("or", *filters)
+
+
+def _device_alert_relation_to_incident(device_alert_relation: Dict[str, Any]) -> Dict[str, Any]:
+ return {
+ "dbotMirrorId": _device_alert_relation_id_str(device_alert_relation),
+ "name": _device_alert_relation_name(device_alert_relation),
+ "occurred": device_alert_relation[INCIDENT_TIMESTAMP_FIELD],
+ "rawJSON": json.dumps(device_alert_relation),
+ }
+
+
+def _next_tick(date_time: str) -> str:
+ parsed_time = dateparser.parse(date_time)
+ assert parsed_time is not None
+ return _format_date(parsed_time + SMALLEST_TIME_UNIT)
+
+
+''' COMMAND FUNCTIONS '''
+
+
+def test_module(client: Client) -> str:
+ """Tests API connectivity and authentication'
+
+ Returning 'ok' indicates that the integration works like it is supposed to.
+ Connection to the service is successful.
+ Raises exceptions if something goes wrong.
+
+ :type client: ``Client``
+ :param Client: client to use
+
+ :return: 'ok' if test passed, anything else will fail the test.
+ :rtype: ``str``
+ """
+
+ message: str = ''
+ try:
+ # This should validate all the inputs given in the integration configuration panel,
+ # either manually or by using an API that uses them.
+ client.get_device_alert_relations(fields=["device_uid", "alert_id"], limit=1)
+ message = 'ok'
+ except DemistoException as e:
+ if 'Forbidden' in str(e) or 'Authorization' in str(e):
+ message = 'Authorization Error: make sure API Key is correctly set'
+ else:
+ raise e
+ return message
+
+
+class XDomeCommand(abc.ABC):
+ retired_field_name: str = "retired"
+
+ @classmethod
+ @abc.abstractmethod
+ def all_fields(cls) -> Set[str]:
+ ...
+
+ @classmethod
+ def _constant_filter(cls) -> Optional[QueryFilterType]:
+ return None
+
+ @classmethod
+ def exclude_retired_filter(cls):
+ return _simple_filter(cls.retired_field_name, "in", [False])
+
+ def __init__(
+ self,
+ client: Client,
+ fields: Optional[str],
+ filter_by: Optional[str],
+ offset: Optional[str],
+ limit: Optional[str],
+ sort_by: Optional[str],
+ ):
+ self._client = client
+ self._raw_args = {"fields": fields, "filter_by": filter_by, "offset": offset, "limit": limit, "sort_by": sort_by}
+ self._fields = self._parse_fields(fields or "all")
+ self._filter_by = self._parse_filter_by(filter_by)
+ self._offset = int(offset or 0)
+ self._limit = int(limit or DEFAULT_REQUEST_LIMIT)
+ self._sort_by = json.loads(sort_by) if sort_by else None
+
+ def execute(self) -> CommandResults:
+ return self._generate_results(self._get_data())
+
+ def _parse_fields(self, raw_fields: str) -> Collection[str]:
+ parsed_fields = [field.strip() for field in raw_fields.split(",")]
+ return self.all_fields() if "all" in parsed_fields else parsed_fields
+
+ def _parse_filter_by(self, raw_filter_by: Optional[str]) -> QueryFilterType:
+ """parse the raw filter input and make sure to always exclude retired devices"""
+ filter_by = json.loads(raw_filter_by) if raw_filter_by else None
+ filter_by = _and(filter_by, self.exclude_retired_filter(), self._constant_filter())
+ assert filter_by
+ return filter_by
+
+ @abc.abstractmethod
+ def _get_data(self) -> List:
+ ...
+
+ @abc.abstractmethod
+ def _generate_results(self, raw_response: Union[List, Dict]) -> CommandResults:
+ ...
+
+
+class XDomeGetDeviceAlertRelationsCommand(XDomeCommand):
+ retired_field_name: str = "device_retired"
+
+ @classmethod
+ def all_fields(cls) -> Set[str]:
+ return DEVICE_ALERT_FIELDS
+
+ def _get_data(self) -> List:
+ return self._client.force_get_all_device_alert_relations(
+ fields=self._fields,
+ filter_by=self._filter_by,
+ sort_by=self._sort_by,
+ stop_after=self._limit if self._raw_args.get("limit") is not None else None,
+ start_from=self._offset if self._raw_args.get("offset") is not None else None,
+ )
+
+ def _generate_results(self, raw_response: Union[List, Dict]) -> CommandResults:
+ device_alert_pairs = raw_response
+ outputs = {
+ "XDome.DeviceAlert(val.device_uid == obj.device_uid && val.alert_id == obj.alert_id)": device_alert_pairs
+ }
+ human_readable_output = tableToMarkdown("xDome device-alert-pairs List", device_alert_pairs)
+ return CommandResults(
+ outputs_prefix='XDome.DeviceAlert',
+ outputs=outputs,
+ readable_output=human_readable_output,
+ raw_response=raw_response,
+ )
+
+
+class XDomeGetDeviceVulnerabilityRelationsCommand(XDomeCommand):
+ retired_field_name: str = "device_retired"
+
+ @classmethod
+ def all_fields(cls) -> Set[str]:
+ return DEVICE_VULNERABILITY_FIELDS
+
+ @classmethod
+ def _constant_filter(cls) -> Optional[QueryFilterType]:
+ return _simple_filter("vulnerability_relevance", "in", ["Confirmed", "Potentially Relevant"])
+
+ def _get_data(self) -> List:
+ return self._client.force_get_all_device_vulnerability_relations(
+ fields=self._fields,
+ filter_by=self._filter_by,
+ sort_by=self._sort_by,
+ stop_after=self._limit if self._raw_args.get("limit") is not None else None,
+ start_from=self._offset if self._raw_args.get("offset") is not None else None,
+ )
+
+ def _generate_results(self, raw_response: Union[List, Dict]) -> CommandResults:
+ device_vulnerability_pairs = raw_response
+ outputs = {
+ "XDome.DeviceVulnerability(val.device_uid == obj.device_uid "
+ "&& val.vulnerability_id == obj.vulnerability_id)": device_vulnerability_pairs
+ }
+ human_readable_output = tableToMarkdown('xDome device-vulnerability-pairs List', device_vulnerability_pairs)
+ return CommandResults(
+ outputs_prefix="XDome.DeviceVulnerability",
+ outputs=outputs,
+ readable_output=human_readable_output,
+ raw_response=raw_response,
+ )
+
+
+def get_device_alert_relations_command(client: Client, args: Dict) -> CommandResults:
+ cmd = XDomeGetDeviceAlertRelationsCommand(
+ client=client,
+ fields=args.get("fields"),
+ filter_by=args.get("filter_by"),
+ offset=args.get("offset"),
+ limit=args.get("limit"),
+ sort_by=args.get("sort_by"),
+ )
+ return cmd.execute()
+
+
+def get_device_vulnerability_relations_command(client: Client, args: dict) -> CommandResults:
+ cmd = XDomeGetDeviceVulnerabilityRelationsCommand(
+ client=client,
+ fields=args.get("fields"),
+ filter_by=args.get("filter_by"),
+ sort_by=args.get("sort_by"),
+ offset=args.get("offset"),
+ limit=args.get("limit"),
+ )
+ return cmd.execute()
+
+
+def set_device_alert_relations_command(client: Client, args: dict) -> CommandResults:
+ alert_id = int(args["alert_id"])
+ device_uids = args.get("device_uids")
+ if device_uids:
+ device_uids = [field.strip() for field in device_uids.split(",")]
+ status = args["status"]
+
+ res = client.set_device_single_alert_relations(alert_id, device_uids, status)
+ if res and "details" in res:
+ return CommandResults(readable_output=res["details"][0].get("msg"), raw_response=res)
+ return CommandResults(readable_output="success", raw_response="success")
+
+
+def fetch_incidents(
+ client: Client,
+ last_run: Dict,
+ initial_fetch_time: str,
+ fetch_limit: int,
+ alert_types: Optional[List[str]],
+ fetch_only_unresolved: bool,
+):
+ """This function will execute each interval (default is 1 minute)"""
+ start_time = last_run.get("last_fetch", initial_fetch_time)
+ start_time = _format_date(start_time)
+ latest_ids = last_run.get("latest_ids", [])
+
+ only_unresolved_filter = (
+ _simple_filter("device_alert_status", "in", ["Unresolved"]) if fetch_only_unresolved else None
+ )
+ alert_types_filter = _build_alert_types_filter(alert_types) if alert_types else None
+ if latest_ids:
+ last_run_alert_id_device_uid_pairs = [_split_device_alert_relation_id(dar_id) for dar_id in latest_ids]
+ not_in_last_fetched_ids_filter = _and(*(
+ _or(
+ _simple_filter("alert_id", "not_in", [alert_id]),
+ _simple_filter("device_uid", "not_in", [device_uid]),
+ )
+ for alert_id, device_uid in last_run_alert_id_device_uid_pairs
+ ))
+ # should be the 'not_equals' or the 'greater' operation, but they're currently not working.
+ # not_last_fetched_time_filter = _simple_filter(INCIDENT_TIMESTAMP_FIELD, "not_equals", start_time)
+ # patch: use the 'greater_or_equal' operation on value 'Time + 1s'
+ not_last_fetched_time_filter = _simple_filter(
+ field=INCIDENT_TIMESTAMP_FIELD,
+ operation="greater_or_equal",
+ value=_next_tick(start_time),
+ )
+ no_last_run_dups_filter = _or(not_in_last_fetched_ids_filter, not_last_fetched_time_filter)
+ else:
+ no_last_run_dups_filter = None
+
+ start_time_filter = _simple_filter(INCIDENT_TIMESTAMP_FIELD, "greater_or_equal", start_time)
+ sort_by_update_time = [{"field": INCIDENT_TIMESTAMP_FIELD, "order": "asc"}]
+
+ try:
+ device_alert_relations = client.force_get_all_device_alert_relations(
+ fields=DEVICE_ALERT_FIELDS,
+ filter_by=_and(
+ XDomeGetDeviceAlertRelationsCommand.exclude_retired_filter(),
+ only_unresolved_filter,
+ alert_types_filter,
+ no_last_run_dups_filter,
+ start_time_filter,
+ ),
+ sort_by=sort_by_update_time,
+ stop_after=fetch_limit,
+ )
+ except DemistoException as e:
+ demisto.error(f"An error occurred while fetching xDome incidents:\n{str(e)}")
+ return last_run, []
+
+ for dar in device_alert_relations:
+ dar[INCIDENT_TIMESTAMP_FIELD] = _format_date(dar[INCIDENT_TIMESTAMP_FIELD])
+
+ incidents = [_device_alert_relation_to_incident(dar) for dar in device_alert_relations]
+
+ if incidents:
+ next_start_time = device_alert_relations[-1][INCIDENT_TIMESTAMP_FIELD]
+ next_latest_ids = [
+ _device_alert_relation_id_str(dar) for dar in device_alert_relations
+ if dar[INCIDENT_TIMESTAMP_FIELD] == next_start_time
+ ]
+ else:
+ next_start_time = _next_tick(start_time)
+ next_latest_ids = []
+
+ next_run = {"last_fetch": next_start_time, "latest_ids": next_latest_ids}
+ return next_run, incidents
+
+
+''' MAIN FUNCTION '''
+
+
+def main() -> None:
+ """main function, parses params and runs command functions
+
+ :return:
+ :rtype:
+ """
+ command = demisto.command()
+ params = demisto.params()
+ args = demisto.args()
+
+ api_key = params.get('credentials', {}).get('password')
+
+ # get the service API url
+ base_url = urljoin(params['url'], '/api/v1')
+
+ verify_certificate = not params.get('insecure', False)
+ proxy = params.get('proxy', False)
+
+ demisto.debug(f'Command being called is {command}')
+ try:
+ headers: Dict = {"Authorization": f"Bearer {api_key}"}
+
+ client = Client(
+ base_url=base_url,
+ verify=verify_certificate,
+ headers=headers,
+ proxy=proxy,
+ )
+
+ if command == 'test-module':
+ # This is the call made when pressing the integration Test button.
+ result = test_module(client)
+ return_results(result)
+
+ elif command == 'xdome-get-device-alert-relations':
+ return_results(get_device_alert_relations_command(client, args))
+
+ elif command == 'xdome-get-device-vulnerability-relations':
+ return_results(get_device_vulnerability_relations_command(client, args))
+
+ elif command == 'xdome-set-status-for-device-alert-relations':
+ return_results(set_device_alert_relations_command(client, args))
+
+ elif command == 'fetch-incidents':
+ initial_fetch_time = params.get('first_fetch').strip()
+ fetch_limit = params.get('max_fetch')
+ fetch_limit = int(fetch_limit) if fetch_limit is not None else DEFAULT_FETCH_LIMIT
+ fetch_limit = min(fetch_limit, MAX_FETCH_LIMIT)
+ alert_types = params.get('alert_types')
+ fetch_only_unresolved = params.get('fetch_only_unresolved')
+ next_run, incidents = fetch_incidents(
+ client=client,
+ last_run=demisto.getLastRun(),
+ initial_fetch_time=initial_fetch_time,
+ fetch_limit=fetch_limit,
+ alert_types=alert_types,
+ fetch_only_unresolved=fetch_only_unresolved,
+ )
+
+ demisto.setLastRun(next_run)
+ demisto.incidents(incidents or [])
+
+ else:
+ raise Exception('Unrecognized command: ' + command)
+
+ # Log exceptions and return errors
+ except Exception as e:
+ return_error(f'Failed to execute {command} command.\nError:\n{str(e)}')
+
+
+''' ENTRY POINT '''
+
+
+if __name__ in ('__main__', '__builtin__', 'builtins'):
+ main()
diff --git a/Packs/ClarotyXDome/Integrations/XDome/XDome.yml b/Packs/ClarotyXDome/Integrations/XDome/XDome.yml
new file mode 100644
index 000000000000..902602e3325c
--- /dev/null
+++ b/Packs/ClarotyXDome/Integrations/XDome/XDome.yml
@@ -0,0 +1,710 @@
+category: Network Security
+commonfields:
+ id: XDome
+ version: -1
+configuration:
+- display: Fetch incidents
+ name: isFetch
+ type: 8
+ required: false
+- display: Incident type
+ name: incidentType
+ type: 13
+ required: false
+- defaultvalue: https://api.claroty.com/
+ display: XDome API Base URL
+ name: url
+ required: true
+ type: 0
+- displaypassword: API Token
+ additionalinfo: The API token to use for connection
+ name: credentials
+ required: true
+ hiddenusername: true
+ type: 9
+- display: Trust any certificate (not secure)
+ name: insecure
+ type: 8
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ type: 8
+ required: false
+- defaultvalue: 7 days
+ display: Initial Incidents Fetch Time
+ name: first_fetch
+ required: false
+ type: 0
+- display: Maximum number of incidents per fetch
+ name: max_fetch
+ defaultvalue: '50000'
+ type: 0
+ required: false
+- display: Fetch Only Unresolved Device-Alert Pairs
+ name: fetch_only_unresolved
+ type: 8
+ required: false
+ defaultvalue: 'true'
+- display: Alert Types Selection
+ name: alert_types
+ type: 16
+ additionalinfo: If no alert types are selected, all types will be fetched
+ options:
+ - Malicious Internet Communication
+ - Outdated Firmware
+ - Functionality Recall
+ - Weak/Default Password
+ - Attempted Malicious Internet Communication
+ - Multiple Failed Login Attempts
+ - External Plaintext Credentials Transmission
+ - External Unencrypted PHI Transmission
+ - Guest VLAN Segmentation Violation
+ - Corporate VLAN Segmentation Violation
+ - Industrial VLAN Segmentation Violation
+ - Out-of-Policy Communication
+ - Expired TLS Certificate
+ - SMBv1 Communication
+ - Executable File Transfer
+ - Suspicious Device Behavior
+ - Infected Device
+ - Custom Device Alert
+ - Custom Communication Alert
+ - Network Threat Signature
+ - Device End-of-Life
+ - Offline Status Change
+ - OT Activity
+ - Location Change
+ - Version Change
+ - Risk Change
+ - Network Change
+ - Utilization Change
+ - Retention Change
+ - Enforcement Change
+ required: false
+description: Use the xDome integration to manage assets and alerts.
+display: xDome
+name: XDome
+script:
+ commands:
+ - name: xdome-get-device-alert-relations
+ description: Gets all device-alert pairs from xDome. You can apply a query-filter.
+ arguments:
+ - name: fields
+ description: Fields to return.
+ required: false
+ defaultValue: all
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - all
+ - alert_assignees
+ - alert_category
+ - alert_class
+ - alert_id
+ - alert_labels
+ - alert_name
+ - alert_type_name
+ - device_alert_detected_time
+ - device_alert_status
+ - device_alert_updated_time
+ - device_assignees
+ - device_category
+ - device_effective_likelihood_subscore
+ - device_effective_likelihood_subscore_points
+ - device_first_seen_list
+ - device_impact_subscore
+ - device_impact_subscore_points
+ - device_insecure_protocols
+ - device_insecure_protocols_points
+ - device_internet_communication
+ - device_ip_list
+ - device_known_vulnerabilities
+ - device_known_vulnerabilities_points
+ - device_labels
+ - device_last_seen_list
+ - device_likelihood_subscore
+ - device_likelihood_subscore_points
+ - device_mac_list
+ - device_manufacturer
+ - device_name
+ - device_network_list
+ - device_purdue_level
+ - device_retired
+ - device_risk_score
+ - device_risk_score_points
+ - device_site_name
+ - device_subcategory
+ - device_type
+ - device_uid
+ - name: filter_by
+ description: A filter_by object, refer to the xDome API documentation.
+ required: false
+ isArray: false
+ - name: offset
+ description: An offset in the data. This can be used to fetch all data in a paginated manner, by e.g requesting (offset=0, limit=100) followed by (offset=100, limit=100), (offset=200, limit=100), etc.
+ required: false
+ isArray: false
+ - name: limit
+ description: Maximum amount of items to fetch.
+ required: false
+ isArray: false
+ - name: sort_by
+ description: 'Default: [{"field":"device_uid","order":"asc"},{"field":"alert_id","order":"asc"}]. Specifies how the returned data should be sorted. If more than one sort clause is passed, additional clauses will be used to sort data that is equal in all previous clauses.'
+ required: false
+ isArray: false
+ outputs:
+ - contextPath: XDome.DeviceAlert.alert_id
+ description: Platform unique Alert ID.
+ type: Number
+ - contextPath: XDome.DeviceAlert.alert_name
+ description: 'The alert name, such as “Malicious Internet Communication: 62.172.138.35”.'
+ type: String
+ - contextPath: XDome.DeviceAlert.alert_type_name
+ description: An alert type such as "Outdated Firmware".
+ type: String
+ - contextPath: XDome.DeviceAlert.alert_class
+ description: The alert class, such as “Pre-Defined Alerts” and “Custom Alerts”.
+ type: String
+ - contextPath: XDome.DeviceAlert.alert_category
+ description: Alert category such as "Risk" or "Segmentation".
+ type: String
+ - contextPath: XDome.DeviceAlert.alert_labels
+ description: The labels added to the alert manually or automatically.
+ type: String
+ - contextPath: XDome.DeviceAlert.alert_assignees
+ description: The users and or groups the alert is assigned to.
+ type: String
+ - contextPath: XDome.DeviceAlert.device_alert_detected_time
+ description: Date and time when the Alert was first detected.
+ type: Date
+ - contextPath: XDome.DeviceAlert.device_alert_updated_time
+ description: Date and time of last Alert update.
+ type: Date
+ - contextPath: XDome.DeviceAlert.device_alert_status
+ description: Device-Alert relation status (Resolved or Unresolved).
+ type: String
+ - contextPath: XDome.DeviceAlert.device_uid
+ description: A universal unique identifier (UUID) for the device.
+ type: UUID
+ - contextPath: XDome.DeviceAlert.device_name
+ description: The Device Name attribute is set automatically based on the priority of the Auto-Assigned Device attribute. You can also set it manually. The Device Name can be the device’s IP, hostname, etc.
+ type: String
+ - contextPath: XDome.DeviceAlert.device_ip_list
+ description: IP address associated with the device. IPs may be suffixed by a / (annotation), where annotation may be a child device ID or (Last Known IP).
+ type: List
+ - contextPath: XDome.DeviceAlert.device_mac_list
+ description: MAC address associated with the device.
+ type: List
+ - contextPath: XDome.DeviceAlert.device_network_list
+ description: The network types, "Corporate" and or "Guest", that the device belongs to.
+ type: List
+ - contextPath: XDome.DeviceAlert.device_category
+ description: The device category group (see "About Device Categorization" in the Knowledge Base).
+ type: String
+ - contextPath: XDome.DeviceAlert.device_subcategory
+ description: The device sub-category group (see "About Device Categorization" in the Knowledge Base).
+ type: String
+ - contextPath: XDome.DeviceAlert.device_type
+ description: The device type group (see "About Device Categorization" in the Knowledge Base).
+ type: String
+ - contextPath: XDome.DeviceAlert.device_assignees
+ description: The users and or groups the device is assigned to.
+ type: String
+ - contextPath: XDome.DeviceAlert.device_labels
+ description: The labels added to the device manually or automatically.
+ type: String
+ - contextPath: XDome.DeviceAlert.device_retired
+ description: A boolean field indicating if the device is retired or not.
+ type: String
+ - contextPath: XDome.DeviceAlert.device_purdue_level
+ description: The network layer the device belongs to, based on the Purdue Reference Model for Industrial Control System (ICS). The network segmentation-based model defines OT and IT systems into six levels and the logical network boundary controls for securing these networks.
+ type: String
+ - contextPath: XDome.DeviceAlert.device_site_name
+ description: The name of the site within the organization the device is associated with.
+ type: String
+ - contextPath: XDome.DeviceAlert.device_first_seen_list
+ description: The date and time a device's NIC was first seen.
+ type: List
+ - contextPath: XDome.DeviceAlert.device_last_seen_list
+ description: The date and time a device's NIC was last seen.
+ type: List
+ - contextPath: XDome.DeviceAlert.device_risk_score
+ description: The calculated risk level of a device, such as "Critical", or "High".
+ type: String
+ - contextPath: XDome.DeviceAlert.device_risk_score_points
+ description: The calculated risk points of a device, such as "54.1".
+ type: Number
+ - contextPath: XDome.DeviceAlert.device_effective_likelihood_subscore
+ description: The calculated effective likelihood subscore level of a device, such as "Critical", or "High".
+ type: String
+ - contextPath: XDome.DeviceAlert.device_effective_likelihood_subscore_points
+ description: The calculated effective likelihood subscore points of a device, such as "54.1".
+ type: Number
+ - contextPath: XDome.DeviceAlert.device_likelihood_subscore
+ description: The calculated likelihood subscore level of a device, such as "Critical", or "High".
+ type: String
+ - contextPath: XDome.DeviceAlert.device_likelihood_subscore_points
+ description: The calculated likelihood subscore points of a device, such as "54.1".
+ type: Number
+ - contextPath: XDome.DeviceAlert.device_impact_subscore
+ description: The calculated impact subscore level of a device, such as "Critical", or "High".
+ type: String
+ - contextPath: XDome.DeviceAlert.device_impact_subscore_points
+ description: The calculated impact subscore points of a device, such as "54.1".
+ type: Number
+ - contextPath: XDome.DeviceAlert.device_insecure_protocols
+ description: The calculated level of the device’s ‘insecure protocols’ likelihood factor, such as "Critical", or "High".
+ type: String
+ - contextPath: XDome.DeviceAlert.device_insecure_protocols_points
+ description: The calculated points for ‘insecure protocols’ likelihood factor of a device, such as "54.1".
+ type: Number
+ - contextPath: XDome.DeviceAlert.device_internet_communication
+ description: The manner of the device's communication over the internet.
+ type: String
+ - contextPath: XDome.DeviceAlert.device_known_vulnerabilities
+ description: The calculated level of the device’s ‘known vulnerabilities’ likelihood factor, such as "Critical", or "High".
+ type: String
+ - contextPath: XDome.DeviceAlert.device_known_vulnerabilities_points
+ description: The calculated points for ‘known vulnerabilities’ likelihood factor of a device, such as "54.1".
+ type: Number
+ - contextPath: XDome.DeviceAlert.device_manufacturer
+ description: Manufacturer of the device, such as "Alaris".
+ type: String
+ - name: xdome-set-status-for-device-alert-relations
+ description: Set device-alert status to resolved or unresolved.
+ arguments:
+ - name: alert_id
+ description: Alert ID, as indicated in the id field of an alert.
+ required: true
+ isArray: false
+ - name: device_uids
+ description: Device UUIDs, as indicated in the uid field of a device.
+ required: false
+ isArray: true
+ - name: status
+ description: Set the device-alert status to resolve or unresolved.
+ required: true
+ isArray: false
+ predefined:
+ - resolved
+ - unresolved
+ auto: PREDEFINED
+ - name: xdome-get-device-vulnerability-relations
+ description: Get details of devices with their related vulnerabilities from the database. The data returned by this endpoint for each device corresponds to the vulnerabilities table in the single device page.
+ arguments:
+ - name: fields
+ description: Fields to return.
+ required: false
+ defaultValue: all
+ isArray: true
+ auto: PREDEFINED
+ predefined:
+ - all
+ - device_network_list
+ - device_category
+ - device_subcategory
+ - device_type
+ - device_uid
+ - device_asset_id
+ - device_mac_list
+ - device_ip_list
+ - device_type_family
+ - device_model
+ - device_os_category
+ - device_serial_number
+ - device_vlan_list
+ - device_retired
+ - device_labels
+ - device_assignees
+ - device_hw_version
+ - device_local_name
+ - device_os_name
+ - device_os_version
+ - device_os_revision
+ - device_os_subcategory
+ - device_combined_os
+ - device_endpoint_security_names
+ - device_equipment_class
+ - device_consequence_of_failure
+ - device_management_services
+ - device_ad_distinguished_name
+ - device_ad_description
+ - device_mdm_ownership
+ - device_mdm_enrollment_status
+ - device_mdm_compliance_status
+ - device_last_domain_user
+ - device_fda_class
+ - device_mobility
+ - device_purdue_level
+ - device_purdue_level_source
+ - device_dhcp_hostnames
+ - device_http_hostnames
+ - device_snmp_hostnames
+ - device_windows_hostnames
+ - device_other_hostnames
+ - device_windows_last_seen_hostname
+ - device_dhcp_last_seen_hostname
+ - device_http_last_seen_hostname
+ - device_snmp_last_seen_hostname
+ - device_ae_titles
+ - device_dhcp_fingerprint
+ - device_note
+ - device_domains
+ - device_battery_level
+ - device_internet_communication
+ - device_financial_cost
+ - device_handles_pii
+ - device_machine_type
+ - device_phi
+ - device_cmms_state
+ - device_cmms_ownership
+ - device_cmms_asset_tag
+ - device_cmms_campus
+ - device_cmms_building
+ - device_cmms_location
+ - device_cmms_floor
+ - device_cmms_department
+ - device_cmms_owning_cost_center
+ - device_cmms_asset_purchase_cost
+ - device_cmms_room
+ - device_cmms_manufacturer
+ - device_cmms_model
+ - device_cmms_serial_number
+ - device_cmms_last_pm
+ - device_cmms_technician
+ - device_edr_is_up_to_date_text
+ - device_mac_oui_list
+ - device_ip_assignment_list
+ - device_protocol_location_list
+ - device_vlan_name_list
+ - device_vlan_description_list
+ - device_connection_type_list
+ - device_ssid_list
+ - device_bssid_list
+ - device_wireless_encryption_type_list
+ - device_ap_name_list
+ - device_ap_location_list
+ - device_switch_mac_list
+ - device_switch_ip_list
+ - device_switch_name_list
+ - device_switch_port_list
+ - device_switch_location_list
+ - device_switch_port_description_list
+ - device_wlc_name_list
+ - device_wlc_location_list
+ - device_applied_acl_list
+ - device_applied_acl_type_list
+ - device_collection_servers
+ - device_edge_locations
+ - device_number_of_nics
+ - device_last_domain_user_activity
+ - device_last_scan_time
+ - device_edr_last_scan_time
+ - device_retired_since
+ - device_os_eol_date
+ - device_last_seen_list
+ - device_first_seen_list
+ - device_wifi_last_seen_list
+ - device_last_seen_on_switch_list
+ - device_is_online
+ - device_network_scope_list
+ - device_ise_authentication_method_list
+ - device_ise_endpoint_profile_list
+ - device_ise_identity_group_list
+ - device_ise_security_group_name_list
+ - device_ise_security_group_tag_list
+ - device_ise_logical_profile_list
+ - device_cppm_authentication_status_list
+ - device_cppm_roles_list
+ - device_cppm_service_list
+ - device_name
+ - device_manufacturer
+ - device_site_name
+ - device_risk_score
+ - device_risk_score_points
+ - device_effective_likelihood_subscore
+ - device_effective_likelihood_subscore_points
+ - device_likelihood_subscore
+ - device_likelihood_subscore_points
+ - device_impact_subscore
+ - device_impact_subscore_points
+ - device_known_vulnerabilities
+ - device_known_vulnerabilities_points
+ - device_insecure_protocols
+ - device_insecure_protocols_points
+ - device_suspicious
+ - device_switch_group_name_list
+ - device_managed_by
+ - device_authentication_user_list
+ - device_collection_interfaces
+ - device_slot_cards
+ - device_cmms_financial_cost
+ - device_software_or_firmware_version
+ - device_enforcement_or_authorization_profiles_list
+ - device_ise_security_group_description_list
+ - device_recommended_firewall_group_name
+ - device_recommended_zone_name
+ - vulnerability_id
+ - vulnerability_name
+ - vulnerability_type
+ - vulnerability_cve_ids
+ - vulnerability_cvss_v2_score
+ - vulnerability_cvss_v2_exploitability_subscore
+ - vulnerability_cvss_v3_score
+ - vulnerability_cvss_v3_exploitability_subscore
+ - vulnerability_adjusted_vulnerability_score
+ - vulnerability_adjusted_vulnerability_score_level
+ - vulnerability_epss_score
+ - vulnerability_sources
+ - vulnerability_description
+ - vulnerability_affected_products
+ - vulnerability_recommendations
+ - vulnerability_exploits_count
+ - vulnerability_is_known_exploited
+ - vulnerability_published_date
+ - vulnerability_labels
+ - vulnerability_assignees
+ - vulnerability_note
+ - vulnerability_last_updated
+ - vulnerability_relevance
+ - vulnerability_relevance_sources
+ - vulnerability_manufacturer_remediation_info
+ - vulnerability_manufacturer_remediation_info_source
+ - vulnerability_overall_cvss_v3_score
+ - device_vulnerability_detection_date
+ - device_vulnerability_resolution_date
+ - device_vulnerability_days_to_resolution
+ - patch_install_date
+ - name: filter_by
+ description: 'A filter_by object, refer to the xDome API documentation. Input as a string and dont forget to escape quotes (\").'
+ required: false
+ isArray: false
+ - name: sort_by
+ description: 'Default: [{"field":"device_uid","order":"asc"}, {"field":"vulnerability_id","order":"asc"}]. Specifies how the returned data should be sorted. If more than one sort clause is passed, additional clauses will be used to sort data that is equal in all previous clauses.'
+ required: false
+ isArray: false
+ - name: offset
+ description: An offset in the data. This can be used to fetch all data in a paginated manner, by e.g requesting (offset=0, limit=100) followed by (offset=100, limit=100), (offset=200, limit=100), etc.
+ required: false
+ isArray: false
+ - name: limit
+ description: Maximum amount of items to fetch.
+ required: false
+ isArray: false
+ outputs:
+ - contextPath: XDome.DeviceVulnerability.vulnerability_name
+ description: Name designated by Claroty's Research team, based on the advisory name or CVE ID.
+ type: String
+ - contextPath: XDome.DeviceVulnerability.vulnerability_type
+ description: Type such as "Application", "Clinical", "IoT" or "Platform".
+ type: String
+ - contextPath: XDome.DeviceVulnerability.vulnerability_cve_ids
+ description: Relevant Common Vulnerability Exploits for the selected vulnerability.
+ type: List
+ - contextPath: XDome.DeviceVulnerability.vulnerability_cvss_v3_score
+ description: Common Vulnerability Scoring System Version 3 score (0-10). In case of multiple CVEs, the highest Subscore is displayed.
+ type: Number
+ - contextPath: XDome.DeviceVulnerability.vulnerability_adjusted_vulnerability_score
+ description: The Adjusted Vulnerability Score represents the vulnerability score based on its impact and exploitability.
+ type: Number
+ - contextPath: XDome.DeviceVulnerability.vulnerability_adjusted_vulnerability_score_level
+ description: The calculated Adjusted vulnerability Score (AVS) level of a vulnerability, such as "Critical", or "High".
+ type: String
+ - contextPath: XDome.DeviceVulnerability.vulnerability_epss_score
+ description: A probability score between 0 to 1 indicating the likelihoodof a vulnerability to be exploited in the wild, based on the Exploit Prediction Scoring System (EPSS) model.
+ type: Number
+ - contextPath: XDome.DeviceVulnerability.vulnerability_description
+ description: Details about the vulnerability.
+ type: String
+ - contextPath: XDome.DeviceVulnerability.vulnerability_exploits_count
+ description: An aggregated numeric field of the number of known exploits based on ExploitDB.
+ type: Number
+ - contextPath: XDome.DeviceVulnerability.vulnerability_is_known_exploited
+ description: A boolean field indicating whether a vulnerability is currently exploited in-the-wild, based on the CISA Catalog of Known Exploited Vulnerabilities.
+ type: Boolean
+ - contextPath: XDome.DeviceVulnerability.vulnerability_published_date
+ description: The date and time the vulnerability was released.
+ type: Date
+ - contextPath: XDome.DeviceVulnerability.vulnerability_relevance
+ description: The device vulnerability relevance reflects the confidence level of the detection process, corresponding to several components, such as the vulnerability type.
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_vulnerability_detection_date
+ description: The date when the vulnerability was initially detected on the device. A vulnerability is considered detected once marked as “confirmed” or “potentially relevant” for the respective device.
+ type: Date
+ - contextPath: XDome.DeviceVulnerability.device_network_list
+ description: The network types, "Corporate" and or "Guest", that the device belongs to.
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_category
+ description: The device category group (see "About Device Categorization" in the Knowledge Base).
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_subcategory
+ description: The device sub-category group (see "About Device Categorization" in the Knowledge Base).
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_type
+ description: The device type group (see "About Device Categorization" in the Knowledge Base).
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_uid
+ description: A universal unique identifier (UUID) for the device.
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_asset_id
+ description: Asset ID.
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_mac_list
+ description: MAC address associated with the device.
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_ip_list
+ description: IP address associated with the device. IPs may be suffixed by a / (annotation), where annotation may be a child device ID or (Last Known IP).
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_type_family
+ description: The device type family group (see "About Device Categorization" in the Knowledge Base).
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_model
+ description: The device's model.
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_os_category
+ description: The device's OS category, such as "Windows", "Linux" or "Other".
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_serial_number
+ description: The device's serial number.
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_vlan_list
+ description: The virtual LAN to which the device belongs.
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_labels
+ description: The labels added to the device manually or automatically.
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_assignees
+ description: The users and or groups the device is assigned to.
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_hw_version
+ description: The hardware version of the device.
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_local_name
+ description: Similar to hostname, the device name identifier is extracted from protocol traffic.
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_combined_os
+ description: The aggregated value of OS name, version and revision, such as "Windows XP SP3".
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_endpoint_security_names
+ description: The names of endpoint security applications installed on the device.
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_equipment_class
+ description: Determines the equipment class of the device, according to The Joint Commission (TJC).
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_management_services
+ description: Defines whether the device is managed by Active Directory, Mobile Device Management, or neither.
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_purdue_level
+ description: The network layer the device belongs to, based on the Purdue Reference Model for Industrial Control System (ICS). The network segmentation-based model defines OT and IT systems into six levels and the logical network boundary controls for securing these networks.
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_http_last_seen_hostname
+ description: The most recent unique hostname identifier of the device, extracted from HTTP protocol traffic.
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_snmp_last_seen_hostname
+ description: The most recent unique hostname identifier of the device, extracted from SNMP protocol traffic.
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_note
+ description: The notes added to the device.
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_domains
+ description: The domain name of the network that the device belongs to.
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_internet_communication
+ description: The manner of the device's communication over the internet.
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_edr_is_up_to_date_text
+ description: Determines whether the endpoint security application installed on the device is up-to-date.
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_mac_oui_list
+ description: The vendor of the device's NIC, according to the OUI (Organizational Unique Identifier) in the MAC address.
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_ip_assignment_list
+ description: The device's IP assignment method, extracted from DHCP protocol traffic, such as "DHCP", "DHCP (Static Lease)", or "Static".
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_vlan_name_list
+ description: The name of the VLAN, extracted from switch configurations.
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_vlan_description_list
+ description: The description of the VLAN, extracted from switch configurations.
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_connection_type_list
+ description: The connection types of a device, such as "Ethernet".
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_ssid_list
+ description: The name of the wireless network the device is connected to, such as "Guest".
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_ap_location_list
+ description: The location of the access point the device is connected to, extracted from Network Management integrations.
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_switch_port_list
+ description: The port identifier of the switch the device is connected to.
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_switch_location_list
+ description: The location of the switch the device is connected to.
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_number_of_nics
+ description: The number of network interface cards seen on the network.
+ type: Number
+ - contextPath: XDome.DeviceVulnerability.device_last_seen_list
+ description: The date and time a device's NIC was last seen.
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_first_seen_list
+ description: The date and time a device's NIC was first seen.
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_is_online
+ description: A boolean field indicating whether the device is online or not.
+ type: Boolean
+ - contextPath: XDome.DeviceVulnerability.device_network_scope_list
+ description: The device's Network Scope - used to differentiate between internal networks that share the same IP subnets.
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_name
+ description: The Device Name attribute is set automatically based on the priority of the Auto-Assigned Device attribute. You can also set it manually. The Device Name can be the device’s IP, hostname, etc.
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_manufacturer
+ description: Manufacturer of the device, such as "Alaris".
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_site_name
+ description: The name of the site within the healthcare organization the device is associated with.
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_risk_score
+ description: The calculated risk level of a device, such as "Critical", or "High".
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_risk_score_points
+ description: The calculated risk points of a device, such as "54.1".
+ type: Number
+ - contextPath: XDome.DeviceVulnerability.device_effective_likelihood_subscore
+ description: The calculated effective likelihood subscore level of a device, such as "Critical", or "High".
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_effective_likelihood_subscore_points
+ description: The calculated effective likelihood subscore points of a device, such as "54.1".
+ type: Number
+ - contextPath: XDome.DeviceVulnerability.device_likelihood_subscore
+ description: The calculated likelihood subscore level of a device, such as "Critical", or "High".
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_likelihood_subscore_points
+ description: The calculated likelihood subscore points of a device, such as "54.1".
+ type: Number
+ - contextPath: XDome.DeviceVulnerability.device_impact_subscore
+ description: The calculated impact subscore level of a device, such as "Critical", or "High".
+ type: String
+ - contextPath: XDome.DeviceVulnerability.device_impact_subscore_points
+ description: The calculated impact subscore points of a device, such as "54.1".
+ type: Number
+ - contextPath: XDome.DeviceVulnerability.device_suspicious
+ description: The reasons for which the device was marked as suspicious.
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_authentication_user_list
+ description: The User name used to authenticate the device to the network using Radius/802.1x is extracted from the NAC integration and the traffic.
+ type: List
+ - contextPath: XDome.DeviceVulnerability.device_software_or_firmware_version
+ description: The application version running on the device.
+ type: String
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+ dockerimage: demisto/python3:3.10.14.92207
+fromversion: 6.10.0
+tests:
+- No tests (auto formatted)
diff --git a/Packs/ClarotyXDome/Integrations/XDome/XDome_description.md b/Packs/ClarotyXDome/Integrations/XDome/XDome_description.md
new file mode 100644
index 000000000000..537d80242ce3
--- /dev/null
+++ b/Packs/ClarotyXDome/Integrations/XDome/XDome_description.md
@@ -0,0 +1,20 @@
+In the XDome Instance Settings dialog box, do the following:
+* **Name**: Enter a meaningful name for the integration.
+* **Fetch incidents**: Choose this option to use the pull-based integration from Cortex XSOAR.
+* **Classifier and Mapper**:
+ * In the Classifier drop-down list, select xDome - Classifier.
+ * In the Mapper drop-down list, select xDome - Incoming Mapper.
+* **XDome public API base URL**: Enter your Claroty base url. For additional information please refer to the Integration Guide inside the Claroty Dashboard.
+* **API Token**: Enter your API token (generated in the xDome dashboard)
+* **The initial time to fetch from**: Define the initial starting time to fetch incidents. For example, if you enter 7 days, the incidents from the previous seven days will be fetched.
+* **Maximum number of incidents per fetch**: Limit the maximum number of incidents to fetch per fetching-interval
+* **Fetch only unresolved Device-Alert Pairs**: Choose this option to only fetch unresolved device-alert pairs.
+* **Alert Types Selection**: Select the required alert types.
+* **Incidents Fetch Interval**: Define how often the incidents should be fetched. For example, if you enter 5 minutes, the incidents will be fetched every five minutes.
+* **Log Level**: Choose a log level from the drop-down list.
+* **Single engine**: Select No engine from the drop-down list.
+
+Click Test. Go to the Test results tab to view the results. If the test was successful, click Save & exit.
+
+
+
diff --git a/Packs/ClarotyXDome/Integrations/XDome/XDome_image.png b/Packs/ClarotyXDome/Integrations/XDome/XDome_image.png
new file mode 100644
index 000000000000..cfbe927a3f57
Binary files /dev/null and b/Packs/ClarotyXDome/Integrations/XDome/XDome_image.png differ
diff --git a/Packs/ClarotyXDome/Integrations/XDome/XDome_test.py b/Packs/ClarotyXDome/Integrations/XDome/XDome_test.py
new file mode 100644
index 000000000000..ba78285d3bd5
--- /dev/null
+++ b/Packs/ClarotyXDome/Integrations/XDome/XDome_test.py
@@ -0,0 +1,370 @@
+import json
+import dateparser
+import pytest
+import demistomock as demisto
+from XDome import (
+ Client, _split_device_alert_relation_id, _format_date, _build_alert_types_filter, _or, _simple_filter, _next_tick
+)
+
+INTEGRATION_PARAMS = {
+ "url": "https://not.really.api.claroty.com",
+ "credentials": {"password": "some_api_key"},
+ "initial_fetch_time": "7 days",
+ "alert_types": None,
+ "fetch_only_unresolved": True,
+}
+
+
+@pytest.fixture(autouse=True)
+def set_mocks(mocker):
+ mocker.patch.object(demisto, 'params', return_value=INTEGRATION_PARAMS)
+
+
+def util_load_json(path):
+ with open(path, encoding='utf-8') as f:
+ return json.loads(f.read())
+
+
+DEVICE_ALERT_ERROR_RESPONSE = {
+ "detail": [
+ {
+ "loc": [
+ "string"
+ ],
+ "msg": "string",
+ "type": "string"
+ }
+ ]
+}
+
+DEVICE_ALERT_SUCCESS_RESPONSE = {
+ "devices_alerts": [
+ {
+ "alert_assignees": [],
+ "alert_category": "Risk",
+ "alert_class": "predefined",
+ "alert_id": 2,
+ "alert_labels": [
+ "Top Priority"
+ ],
+ "alert_type_name": "Outdated Firmware",
+ "device_alert_detected_time": "2023-10-19T16:21:01+00:00",
+ "device_alert_status": "Unresolved",
+ "device_alert_updated_time": "2023-10-19T16:21:01+00:00",
+ "device_assignees": [
+ "Admin"
+ ],
+ "device_category": "Medical",
+ "device_first_seen_list": [
+ "2023-10-19T16:32:04.127979+00:00"
+ ],
+ "device_ip_list": [
+ "1.1.1.1"
+ ],
+ "device_labels": [],
+ "device_last_seen_list": [
+ "2023-10-19T16:32:01+00:00"
+ ],
+ "device_mac_list": [
+ "1a:2b:3c:d4:e5:f6"
+ ],
+ "device_network_list": [
+ "Corporate"
+ ],
+ "device_purdue_level": "Level 4",
+ "device_retired": False,
+ "device_risk_score": "Very Low",
+ "device_site_name": "New York General Hospital",
+ "device_subcategory": "Patient Devices",
+ "device_type": "Patient Monitor",
+ "device_uid": "f342efb7-4f4a-4ac0-8045-0711fb2c5528",
+ "alert_name": "alert name here",
+ "device_name": "device name here",
+ }
+ ]
+}
+
+DEVICE_VULNERABILITY_ERROR_RESPONSE = DEVICE_ALERT_ERROR_RESPONSE
+
+DEVICE_VULNERABILITY_SUCCESS_RESPONSE = {
+ "devices_vulnerabilities": [
+ {
+ "device_assignees": [],
+ "device_category": "Medical",
+ "device_labels": [],
+ "device_network_list": [
+ "Corporate"
+ ],
+ "device_purdue_level": "Level 4",
+ "device_retired": False,
+ "device_risk_score": "Medium",
+ "device_site_name": "Main Campus",
+ "device_subcategory": "Clinical IoT",
+ "device_type": "Nurse Call",
+ "device_uid": "811997e7-cb4f-448f-9b68-68022d745404",
+ "vulnerability_affected_products": "* All the Wi-Fi devices\n"
+ "* Aruba:\n"
+ " - ArubaOS 6.4.x: prior to 6.4.4.25\n"
+ " - ArubaOS 6.5.x: prior to 6.5.4.19\n"
+ " - ArubaOS 8.3.x: prior to 8.3.0.15\n"
+ " - ArubaOS 8.5.x: prior to 8.5.0.12\n"
+ " - ArubaOS 8.6.x: prior to 8.6.0.8\n"
+ " - ArubaOS 8.7.x: prior to 8.7.1.2\n"
+ " - Aruba instant AP\n"
+ "* SUSE:\n"
+ " - SUSE Linux Enterprise Server 15\n"
+ " - SUSE Linux Enterprise Desktop 15\n"
+ " - SUSE Linux Enterprise Server 12\n"
+ " - SUSE Linux Enterprise Desktop 12\n"
+ " - SUSE Linux Enterprise Server 11\n"
+ " - SUSE Linux Enterprise Desktop 11\n"
+ "* Synology:\n"
+ " - RT2600ac\n"
+ " - MR2200ac\n"
+ " - RT1900ac\n"
+ "* Microsoft - according to the affected versions detailed in the attached "
+ "advisories.\n"
+ "* Juniper:\n"
+ " * the following models affected in specific versions and see attached "
+ "advisory:\n"
+ " - AP12 / AP21 / AP32 / AP33 / AP41 / AP43 / AP61 / AP63 / SRX series",
+ "vulnerability_cve_ids": [
+ "CVE-2020-11111",
+ "CVE-2020-22222",
+ "CVE-2020-33333",
+ "CVE-2020-44444",
+ "CVE-2020-55555",
+ "CVE-2020-66666",
+ "CVE-2020-77777",
+ "CVE-2020-88888",
+ "CVE-2020-99999",
+ "CVE-2020-00000",
+ "CVE-2020-12121",
+ "CVE-2020-13131"
+ ],
+ "vulnerability_cvss_v2_exploitability_subscore": 6.5,
+ "vulnerability_cvss_v2_score": 3.3,
+ "vulnerability_cvss_v3_exploitability_subscore": 2.8,
+ "vulnerability_cvss_v3_score": 6.5,
+ "vulnerability_description": "A collection of new 12 security vulnerabilities that affect Wi-Fi devices.\n"
+ "An adversary that is within range of a victim's Wi-Fi network can abuse these "
+ "vulnerabilities to\n"
+ "steal user information or attack devices.\n"
+ "Three of the discovered vulnerabilities are design flaws in the Wi-Fi standard and "
+ "therefore\n"
+ "affect most devices. On top of this, several other vulnerabilities were discovered that"
+ " are\n"
+ "caused by widespread programming mistakes in Wi-Fi products.\n"
+ "Experiments indicate that every Wi-Fi product is affected by at least one "
+ "vulnerability\n"
+ "and that most products are affected by several vulnerabilities.\n"
+ "The discovered vulnerabilities affect all modern security protocols of Wi-Fi, including"
+ " the\n"
+ "latest WPA3.\n"
+ "The design flaws are hard to abuse because doing so requires user interaction or is "
+ "only possible\n"
+ "when using uncommon network settings. As a result, in practice the biggest concern are "
+ "the\n"
+ "programming mistakes in Wi-Fi products since several of them are trivial to exploit.\n"
+ "When a website is configured with HSTS to always use HTTPS as an extra layer of "
+ "security,\n"
+ "the transmitted data cannot be stolen",
+ "vulnerability_id": "ALKIFVSA",
+ "vulnerability_is_known_exploited": False,
+ "vulnerability_last_updated": "2019-08-24T18:56:24.888211+00:00",
+ "vulnerability_name": "FragAttacks",
+ "vulnerability_published_date": "2021-05-12T00:00:00.485000+00:00",
+ "vulnerability_recommendations": "some vulnerability recommendations",
+ "vulnerability_relevance": "Potentially Relevant",
+ "vulnerability_relevance_sources": [
+ "Claroty"
+ ],
+ "vulnerability_sources": [
+ {
+ "name": "vulnerability source name 1",
+ "url": "https://not.really.vulnerability.source.url"
+ }
+ ],
+ "vulnerability_type": "Platform"
+ }
+ ]
+}
+
+
+@pytest.fixture
+def xdome_client_mock(mocker):
+ def _xdome_client_mock():
+ client = Client(base_url="https://not.really.api.claroty.com/api/v1/")
+ mocker.patch.object(client, "get_device_alert_relations", return_value=DEVICE_ALERT_SUCCESS_RESPONSE)
+ mocker.patch.object(
+ client, "get_device_vulnerability_relations", return_value=DEVICE_VULNERABILITY_SUCCESS_RESPONSE
+ )
+ mocker.patch.object(client, "set_device_alert_relations", return_value=None)
+ return client
+
+ return _xdome_client_mock
+
+
+DEVICE_ALERT_VALID_RAW_ARGS = {
+ "limit": 1,
+ "filter_by": json.dumps({
+ "operation": "and",
+ "operands": [
+ {"field": "alert_id", "operation": "in", "value": [2]},
+ {"field": "device_uid", "operation": "in", "value": ["f342efb7-4f4a-4ac0-8045-0711fb2c5528"]},
+ ]
+ })
+}
+
+
+def test_get_device_alert_relations(xdome_client_mock):
+ from XDome import get_device_alert_relations_command
+
+ cmd_res = get_device_alert_relations_command(xdome_client_mock(), DEVICE_ALERT_VALID_RAW_ARGS)
+ expected_device_alert_pairs = DEVICE_ALERT_SUCCESS_RESPONSE["devices_alerts"]
+ assert cmd_res.raw_response == expected_device_alert_pairs
+ assert cmd_res.outputs == {
+ "XDome.DeviceAlert(val.device_uid == obj.device_uid && val.alert_id == obj.alert_id)": expected_device_alert_pairs
+ }
+
+
+DEVICE_VULNERABILITY_VALID_RAW_ARGS = {
+ "limit": 1,
+ "filter_by": json.dumps({
+ "operation": "and",
+ "operands": [
+ {"field": "vulnerability_id", "operation": "in", "value": ["ALKIFVSA"]},
+ {"field": "device_uid", "operation": "in", "value": ["811997e7-cb4f-448f-9b68-68022d745404"]},
+ ]
+ })
+}
+
+
+def test_get_device_vulnerability_relations(xdome_client_mock):
+ from XDome import get_device_vulnerability_relations_command
+
+ cmd_res = get_device_vulnerability_relations_command(xdome_client_mock(), DEVICE_VULNERABILITY_VALID_RAW_ARGS)
+ expected_device_vulnerability_pairs = DEVICE_VULNERABILITY_SUCCESS_RESPONSE["devices_vulnerabilities"]
+ assert cmd_res.raw_response == expected_device_vulnerability_pairs
+ assert cmd_res.outputs == {
+ "XDome.DeviceVulnerability(val.device_uid == obj.device_uid && val.vulnerability_id == obj.vulnerability_id)": (
+ expected_device_vulnerability_pairs
+ )
+ }
+
+
+RESOLVE_DEVICE_ALERT_VALID_RAW_ARGS = {
+ "alert_id": 123,
+ "device_uids": json.dumps(["asdf-asdf-asdf-asdf", "qwer-wqer-qwer-wqer"]),
+ "status": "resolve",
+}
+
+
+def test_resolve_device_alert_relations(xdome_client_mock):
+ from XDome import set_device_alert_relations_command
+
+ cmd_res = set_device_alert_relations_command(xdome_client_mock(), RESOLVE_DEVICE_ALERT_VALID_RAW_ARGS)
+ assert cmd_res.raw_response == "success"
+ assert cmd_res.readable_output == "success"
+
+
+def test_fetch_incidents(xdome_client_mock):
+ from XDome import fetch_incidents
+
+ next_run, incidents = fetch_incidents(
+ xdome_client_mock(), last_run={}, initial_fetch_time="1 day", fetch_limit=1, alert_types=None, fetch_only_unresolved=True
+ )
+
+ mock_pair = DEVICE_ALERT_SUCCESS_RESPONSE["devices_alerts"][0]
+
+ incident = incidents[0]
+ assert incident == {
+ "dbotMirrorId": f"{mock_pair['alert_id']}↔{mock_pair['device_uid']}",
+ "name": f"Alert “{mock_pair['alert_name']}” on Device “{mock_pair['device_name']}”",
+ "occurred": mock_pair["device_alert_updated_time"],
+ "rawJSON": json.dumps(mock_pair),
+ }
+ assert next_run == {"last_fetch": incident["occurred"], "latest_ids": [incident["dbotMirrorId"]]}
+
+
+def test_force_get_all_wrapper(xdome_client_mock):
+ response_len = 100_000
+ big_response_items = [{"alert_id": i, "device_uid": str(i)} for i in range(response_len)]
+
+ def big_response_mock_getter(
+ fields,
+ filter_by=None,
+ offset=0,
+ limit=1,
+ sort_by=None,
+ count=False,
+ ):
+ return {
+ "items": big_response_items[offset:min(offset + limit, response_len)]
+ }
+
+ client = xdome_client_mock()
+ res = client._force_get_all_wrapper(
+ paginated_getter_func=big_response_mock_getter,
+ items_name="items",
+ fields=["alert_id", "device_uid"],
+ )
+ assert res == big_response_items
+
+ res = client._force_get_all_wrapper(
+ paginated_getter_func=big_response_mock_getter,
+ items_name="items",
+ fields=["alert_id", "device_uid"],
+ stop_after=60_000,
+ start_from=9,
+ )
+ assert res == big_response_items[9:60_009]
+
+
+''' Test Util Functions '''
+
+
+def test_split_device_alert_relation_id():
+ alert_id, device_uid = 123, "01234567-89ab-cdef-edcb-a98765432101"
+ device_alert_relation_id_str = f"{alert_id}↔{device_uid}"
+ assert _split_device_alert_relation_id(device_alert_relation_id_str) == (alert_id, device_uid)
+
+
+def test_format_date():
+ assert isinstance(_format_date("7 days"), str)
+ str_date = "2023-10-19T16:21:01+00:00"
+ str_date_fmt = "2023-10-19T16:21:01Z"
+ assert _format_date(str_date) == str_date_fmt
+ assert _format_date(str_date_fmt) == str_date_fmt
+ assert _format_date(dateparser.parse(str_date)) == str_date_fmt
+ assert _format_date(dateparser.parse(str_date_fmt)) == str_date_fmt
+ with pytest.raises(Exception):
+ _format_date("")
+ with pytest.raises(Exception):
+ _format_date("2 corns")
+
+
+def test_build_alert_types_filter():
+ alert_types = [" at1", "at2 ", " at3 ", " at4", "at5 "]
+ assert _build_alert_types_filter(alert_types) == {
+ "field": "alert_type_name", "operation": "in", "value": ["at1", "at2", "at3", "at4", "at5"]
+ }
+
+
+def test_or_compound_filter():
+ filter1 = _simple_filter("field1", "op1", ["val11", "val12"])
+ filter2 = _simple_filter("field2", "op2", ["val21"])
+ filter3 = _simple_filter("field3", "op3", ["val31", "val32", "val33"])
+ assert _or(None, filter1, None, filter2, None, None, filter3, None) == {
+ "operation": "or",
+ "operands": [filter1, filter2, filter3]
+ }
+
+
+def test_next_tick():
+ with pytest.raises(Exception):
+ _next_tick("")
+
+ str_date = "2023-10-19T16:21:01+00:00"
+ str_date_fmt = "2023-10-19T16:21:01Z"
+ assert _next_tick(str_date) == _next_tick(str_date_fmt) == "2023-10-19T16:21:02Z" # 1 sec later
diff --git a/Packs/ClarotyXDome/Integrations/XDome/command_examples b/Packs/ClarotyXDome/Integrations/XDome/command_examples
new file mode 100644
index 000000000000..f11073978317
--- /dev/null
+++ b/Packs/ClarotyXDome/Integrations/XDome/command_examples
@@ -0,0 +1,12 @@
+
+!xdome-get-device-alert-relations fields=all limit=20
+!xdome-get-device-alert-relations fields=all limit=20 offset=20
+!xdome-get-device-alert-relations fields=alert_id,device_uid limit=20000
+!xdome-get-device-alert-relations fields=all filter_by="{\"field\": \"alert_type_name\", \"operation\": \"in\", \"value\": [\"Outdated Firmware\"]}"
+!xdome-get-device-alert-relations fields=all filter_by="{\"operation\": \"and\", \"operands\": [{\"field\": \"alert_id\", \"operation\": \"in\", \"value\": [4181]}, {\"field\": \"device_uid\", \"operation\": \"in\", \"value\": [\"a5921d65-2b08-4077-a347-578a7eff4929\"]}]}"
+!xdome-get-device-alert-relations fields=alert_id,device_uid filter_by="{\"field\": \"device_alert_updated_time\", \"operation\": \"greater_or_equal\", \"value\": \"2024-04-10T07:00:20.236776Z\"}" sort_by="[{\"field\": \"device_alert_updated_time\", \"order\": \"asc\"}, {\"field\": \"alert_id\", \"order\": \"desc\"}]"
+
+!xdome-get-device-vulnerability-relations fields=all limit=200
+
+!xdome-set-status-for-device-alert-relations status=resolved alert_id=123 device_uids=a5921d65-2b08-4077-a347-578a7eff4929,d5911d32-fb31-4027-ad4a-578a7efa4929
+!xdome-set-status-for-device-alert-relations status=unresolved alert_id=123 device_uids=a5921d65-2b08-4077-a347-578a7eff4929
diff --git a/Packs/ClarotyXDome/README.md b/Packs/ClarotyXDome/README.md
new file mode 100644
index 000000000000..56926aa63dca
--- /dev/null
+++ b/Packs/ClarotyXDome/README.md
@@ -0,0 +1,201 @@
+Use the xDome integration to manage assets and alerts.
+This integration was integrated and tested with version 1.0.0 of XDome.
+
+## Configure xDome on Cortex XSOAR
+
+1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+2. Search for xDome.
+3. Click **Add instance** to create and configure a new integration instance.
+
+ | **Parameter** | **Description** | **Required** |
+ | --- | --- | --- |
+ | XDome public API base URL | | True |
+ | API Token | The API token to use for connection | True |
+ | Trust any certificate (not secure) | | False |
+ | Use system proxy settings | | False |
+ | The initial time to fetch from | | True |
+ | Fetch Only Unresolved Device-Alert Pairs | | False |
+ | Alert Types Selection | If no alert types are selected, all types will be fetched | False |
+
+4. Click **Test** to validate the URLs, token, and connection.
+
+## Commands
+
+You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
+After you successfully execute a command, a DBot message appears in the War Room with the command details.
+
+### xdome-get-device-alert-relations
+
+***
+Gets all device-alert pairs from xDome. You can apply a query-filter.
+
+#### Base Command
+
+`xdome-get-device-alert-relations`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| fields | Fields to return. Possible values are: all, alert_assignees, alert_category, alert_class, alert_id, alert_labels, alert_name, alert_type_name, device_alert_detected_time, device_alert_status, device_alert_updated_time, device_assignees, device_category, device_effective_likelihood_subscore, device_effective_likelihood_subscore_points, device_first_seen_list, device_impact_subscore, device_impact_subscore_points, device_insecure_protocols, device_insecure_protocols_points, device_internet_communication, device_ip_list, device_known_vulnerabilities, device_known_vulnerabilities_points, device_labels, device_last_seen_list, device_likelihood_subscore, device_likelihood_subscore_points, device_mac_list, device_manufacturer, device_name, device_network_list, device_purdue_level, device_retired, device_risk_score, device_risk_score_points, device_site_name, device_subcategory, device_type, device_uid. Default is all. | Optional |
+| filter_by | A filter_by object, refer to the xDome API documentation. | Optional |
+| offset | An offset in the data. This can be used to fetch all data in a paginated manner, by e.g requesting (offset=0, limit=100) followed by (offset=100, limit=100), (offset=200, limit=100), etc. | Optional |
+| limit | Maximum amount of items to fetch. | Optional |
+| sort_by | Default: [{"field":"device_uid","order":"asc"},{"field":"alert_id","order":"asc"}]. Specifies how the returned data should be sorted. If more than one sort clause is passed, additional clauses will be used to sort data that is equal in all previous clauses. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| XDome.DeviceAlert.alert_id | Number | Platform unique Alert ID. |
+| XDome.DeviceAlert.alert_name | String | The alert name, such as “Malicious Internet Communication: 62.172.138.35”. |
+| XDome.DeviceAlert.alert_type_name | String | An alert type such as "Outdated Firmware". |
+| XDome.DeviceAlert.alert_class | String | The alert class, such as “Pre-Defined Alerts” and “Custom Alerts”. |
+| XDome.DeviceAlert.alert_category | String | Alert category such as "Risk" or "Segmentation". |
+| XDome.DeviceAlert.alert_labels | String | The labels added to the alert manually or automatically. |
+| XDome.DeviceAlert.alert_assignees | String | The users and or groups the alert is assigned to. |
+| XDome.DeviceAlert.device_alert_detected_time | Date | Date and time when the Alert was first detected. |
+| XDome.DeviceAlert.device_alert_updated_time | Date | Date and time of last Alert update. |
+| XDome.DeviceAlert.device_alert_status | String | Device-Alert relation status \(Resolved or Unresolved\). |
+| XDome.DeviceAlert.device_uid | UUID | A universal unique identifier \(UUID\) for the device. |
+| XDome.DeviceAlert.device_name | String | The Device Name attribute is set automatically based on the priority of the Auto-Assigned Device attribute. You can also set it manually. The Device Name can be the device’s IP, hostname, etc. |
+| XDome.DeviceAlert.device_ip_list | List | IP address associated with the device. IPs may be suffixed by a / \(annotation\), where annotation may be a child device ID or \(Last Known IP\). |
+| XDome.DeviceAlert.device_mac_list | List | MAC address associated with the device. |
+| XDome.DeviceAlert.device_network_list | List | The network types, "Corporate" and or "Guest", that the device belongs to. |
+| XDome.DeviceAlert.device_category | String | The device category group \(see "About Device Categorization" in the Knowledge Base\). |
+| XDome.DeviceAlert.device_subcategory | String | The device sub-category group \(see "About Device Categorization" in the Knowledge Base\). |
+| XDome.DeviceAlert.device_type | String | The device type group \(see "About Device Categorization" in the Knowledge Base\). |
+| XDome.DeviceAlert.device_assignees | String | The users and or groups the device is assigned to. |
+| XDome.DeviceAlert.device_labels | String | The labels added to the device manually or automatically. |
+| XDome.DeviceAlert.device_retired | String | A boolean field indicating if the device is retired or not. |
+| XDome.DeviceAlert.device_purdue_level | String | The network layer the device belongs to, based on the Purdue Reference Model for Industrial Control System \(ICS\). The network segmentation-based model defines OT and IT systems into six levels and the logical network boundary controls for securing these networks. |
+| XDome.DeviceAlert.device_site_name | String | The name of the site within the organization the device is associated with. |
+| XDome.DeviceAlert.device_first_seen_list | List | The date and time a device's NIC was first seen. |
+| XDome.DeviceAlert.device_last_seen_list | List | The date and time a device's NIC was last seen. |
+| XDome.DeviceAlert.device_risk_score | String | The calculated risk level of a device, such as "Critical", or "High". |
+| XDome.DeviceAlert.device_risk_score_points | Number | The calculated risk points of a device, such as "54.1". |
+| XDome.DeviceAlert.device_effective_likelihood_subscore | String | The calculated effective likelihood subscore level of a device, such as "Critical", or "High". |
+| XDome.DeviceAlert.device_effective_likelihood_subscore_points | Number | The calculated effective likelihood subscore points of a device, such as "54.1". |
+| XDome.DeviceAlert.device_likelihood_subscore | String | The calculated likelihood subscore level of a device, such as "Critical", or "High". |
+| XDome.DeviceAlert.device_likelihood_subscore_points | Number | The calculated likelihood subscore points of a device, such as "54.1". |
+| XDome.DeviceAlert.device_impact_subscore | String | The calculated impact subscore level of a device, such as "Critical", or "High". |
+| XDome.DeviceAlert.device_impact_subscore_points | Number | The calculated impact subscore points of a device, such as "54.1". |
+| XDome.DeviceAlert.device_insecure_protocols | String | The calculated level of the device’s ‘insecure protocols’ likelihood factor, such as "Critical", or "High". |
+| XDome.DeviceAlert.device_insecure_protocols_points | Number | The calculated points for ‘insecure protocols’ likelihood factor of a device, such as "54.1". |
+| XDome.DeviceAlert.device_internet_communication | String | The manner of the device's communication over the internet. |
+| XDome.DeviceAlert.device_known_vulnerabilities | String | The calculated level of the device’s ‘known vulnerabilities’ likelihood factor, such as "Critical", or "High". |
+| XDome.DeviceAlert.device_known_vulnerabilities_points | Number | The calculated points for ‘known vulnerabilities’ likelihood factor of a device, such as "54.1". |
+| XDome.DeviceAlert.device_manufacturer | String | Manufacturer of the device, such as "Alaris". |
+
+### xdome-set-status-for-device-alert-relations
+
+***
+Set device-alert status to resolved or unresolved.
+
+#### Base Command
+
+`xdome-set-status-for-device-alert-relations`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| alert_id | Alert ID, as indicated in the id field of an alert. | Required |
+| device_uids | Device UUIDs, as indicated in the uid field of a device. | Optional |
+| status | Set the device-alert status to resolve or unresolved. Possible values are: resolved, unresolved. | Required |
+
+#### Context Output
+
+There is no context output for this command.
+### xdome-get-device-vulnerability-relations
+
+***
+Get details of devices with their related vulnerabilities from the database. The data returned by this endpoint for each device corresponds to the vulnerabilities table in the single device page.
+
+#### Base Command
+
+`xdome-get-device-vulnerability-relations`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| fields | Fields to return. Possible values are: all, device_network_list, device_category, device_subcategory, device_type, device_uid, device_asset_id, device_mac_list, device_ip_list, device_type_family, device_model, device_os_category, device_serial_number, device_vlan_list, device_retired, device_labels, device_assignees, device_hw_version, device_local_name, device_os_name, device_os_version, device_os_revision, device_os_subcategory, device_combined_os, device_endpoint_security_names, device_equipment_class, device_consequence_of_failure, device_management_services, device_ad_distinguished_name, device_ad_description, device_mdm_ownership, device_mdm_enrollment_status, device_mdm_compliance_status, device_last_domain_user, device_fda_class, device_mobility, device_purdue_level, device_purdue_level_source, device_dhcp_hostnames, device_http_hostnames, device_snmp_hostnames, device_windows_hostnames, device_other_hostnames, device_windows_last_seen_hostname, device_dhcp_last_seen_hostname, device_http_last_seen_hostname, device_snmp_last_seen_hostname, device_ae_titles, device_dhcp_fingerprint, device_note, device_domains, device_battery_level, device_internet_communication, device_financial_cost, device_handles_pii, device_machine_type, device_phi, device_cmms_state, device_cmms_ownership, device_cmms_asset_tag, device_cmms_campus, device_cmms_building, device_cmms_location, device_cmms_floor, device_cmms_department, device_cmms_owning_cost_center, device_cmms_asset_purchase_cost, device_cmms_room, device_cmms_manufacturer, device_cmms_model, device_cmms_serial_number, device_cmms_last_pm, device_cmms_technician, device_edr_is_up_to_date_text, device_mac_oui_list, device_ip_assignment_list, device_protocol_location_list, device_vlan_name_list, device_vlan_description_list, device_connection_type_list, device_ssid_list, device_bssid_list, device_wireless_encryption_type_list, device_ap_name_list, device_ap_location_list, device_switch_mac_list, device_switch_ip_list, device_switch_name_list, device_switch_port_list, device_switch_location_list, device_switch_port_description_list, device_wlc_name_list, device_wlc_location_list, device_applied_acl_list, device_applied_acl_type_list, device_collection_servers, device_edge_locations, device_number_of_nics, device_last_domain_user_activity, device_last_scan_time, device_edr_last_scan_time, device_retired_since, device_os_eol_date, device_last_seen_list, device_first_seen_list, device_wifi_last_seen_list, device_last_seen_on_switch_list, device_is_online, device_network_scope_list, device_ise_authentication_method_list, device_ise_endpoint_profile_list, device_ise_identity_group_list, device_ise_security_group_name_list, device_ise_security_group_tag_list, device_ise_logical_profile_list, device_cppm_authentication_status_list, device_cppm_roles_list, device_cppm_service_list, device_name, device_manufacturer, device_site_name, device_risk_score, device_risk_score_points, device_effective_likelihood_subscore, device_effective_likelihood_subscore_points, device_likelihood_subscore, device_likelihood_subscore_points, device_impact_subscore, device_impact_subscore_points, device_known_vulnerabilities, device_known_vulnerabilities_points, device_insecure_protocols, device_insecure_protocols_points, device_suspicious, device_switch_group_name_list, device_managed_by, device_authentication_user_list, device_collection_interfaces, device_slot_cards, device_cmms_financial_cost, device_software_or_firmware_version, device_enforcement_or_authorization_profiles_list, device_ise_security_group_description_list, device_recommended_firewall_group_name, device_recommended_zone_name, vulnerability_id, vulnerability_name, vulnerability_type, vulnerability_cve_ids, vulnerability_cvss_v2_score, vulnerability_cvss_v2_exploitability_subscore, vulnerability_cvss_v3_score, vulnerability_cvss_v3_exploitability_subscore, vulnerability_adjusted_vulnerability_score, vulnerability_adjusted_vulnerability_score_level, vulnerability_epss_score, vulnerability_sources, vulnerability_description, vulnerability_affected_products, vulnerability_recommendations, vulnerability_exploits_count, vulnerability_is_known_exploited, vulnerability_published_date, vulnerability_labels, vulnerability_assignees, vulnerability_note, vulnerability_last_updated, vulnerability_relevance, vulnerability_relevance_sources, vulnerability_manufacturer_remediation_info, vulnerability_manufacturer_remediation_info_source, vulnerability_overall_cvss_v3_score, device_vulnerability_detection_date, device_vulnerability_resolution_date, device_vulnerability_days_to_resolution, patch_install_date. Default is all. | Optional |
+| filter_by | A filter_by object, refer to the xDome API documentation. Input as a string and dont forget to escape quotes (\"). | Optional |
+| sort_by | Default: [{"field":"device_uid","order":"asc"}, {"field":"vulnerability_id","order":"asc"}]. Specifies how the returned data should be sorted. If more than one sort clause is passed, additional clauses will be used to sort data that is equal in all previous clauses. | Optional |
+| offset | An offset in the data. This can be used to fetch all data in a paginated manner, by e.g requesting (offset=0, limit=100) followed by (offset=100, limit=100), (offset=200, limit=100), etc. | Optional |
+| limit | Maximum amount of items to fetch. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| XDome.DeviceVulnerability.vulnerability_name | String | Name designated by Claroty's Research team, based on the advisory name or CVE ID. |
+| XDome.DeviceVulnerability.vulnerability_type | String | Type such as "Application", "Clinical", "IoT" or "Platform". |
+| XDome.DeviceVulnerability.vulnerability_cve_ids | List | Relevant Common Vulnerability Exploits for the selected vulnerability. |
+| XDome.DeviceVulnerability.vulnerability_cvss_v3_score | Number | Common Vulnerability Scoring System Version 3 score \(0-10\). In case of multiple CVEs, the highest Subscore is displayed. |
+| XDome.DeviceVulnerability.vulnerability_adjusted_vulnerability_score | Number | The Adjusted Vulnerability Score represents the vulnerability score based on its impact and exploitability. |
+| XDome.DeviceVulnerability.vulnerability_adjusted_vulnerability_score_level | String | The calculated Adjusted vulnerability Score \(AVS\) level of a vulnerability, such as "Critical", or "High". |
+| XDome.DeviceVulnerability.vulnerability_epss_score | Number | A probability score between 0 to 1 indicating the likelihoodof a vulnerability to be exploited in the wild, based on the Exploit Prediction Scoring System \(EPSS\) model. |
+| XDome.DeviceVulnerability.vulnerability_description | String | Details about the vulnerability. |
+| XDome.DeviceVulnerability.vulnerability_exploits_count | Number | An aggregated numeric field of the number of known exploits based on ExploitDB. |
+| XDome.DeviceVulnerability.vulnerability_is_known_exploited | Boolean | A boolean field indicating whether a vulnerability is currently exploited in-the-wild, based on the CISA Catalog of Known Exploited Vulnerabilities. |
+| XDome.DeviceVulnerability.vulnerability_published_date | Date | The date and time the vulnerability was released. |
+| XDome.DeviceVulnerability.vulnerability_relevance | String | The device vulnerability relevance reflects the confidence level of the detection process, corresponding to several components, such as the vulnerability type. |
+| XDome.DeviceVulnerability.device_vulnerability_detection_date | Date | The date when the vulnerability was initially detected on the device. A vulnerability is considered detected once marked as “confirmed” or “potentially relevant” for the respective device. |
+| XDome.DeviceVulnerability.device_network_list | List | The network types, "Corporate" and or "Guest", that the device belongs to. |
+| XDome.DeviceVulnerability.device_category | String | The device category group \(see "About Device Categorization" in the Knowledge Base\). |
+| XDome.DeviceVulnerability.device_subcategory | String | The device sub-category group \(see "About Device Categorization" in the Knowledge Base\). |
+| XDome.DeviceVulnerability.device_type | String | The device type group \(see "About Device Categorization" in the Knowledge Base\). |
+| XDome.DeviceVulnerability.device_uid | String | A universal unique identifier \(UUID\) for the device. |
+| XDome.DeviceVulnerability.device_asset_id | String | Asset ID. |
+| XDome.DeviceVulnerability.device_mac_list | List | MAC address associated with the device. |
+| XDome.DeviceVulnerability.device_ip_list | List | IP address associated with the device. IPs may be suffixed by a / \(annotation\), where annotation may be a child device ID or \(Last Known IP\). |
+| XDome.DeviceVulnerability.device_type_family | String | The device type family group \(see "About Device Categorization" in the Knowledge Base\). |
+| XDome.DeviceVulnerability.device_model | String | The device's model. |
+| XDome.DeviceVulnerability.device_os_category | String | The device's OS category, such as "Windows", "Linux" or "Other". |
+| XDome.DeviceVulnerability.device_serial_number | String | The device's serial number. |
+| XDome.DeviceVulnerability.device_vlan_list | List | The virtual LAN to which the device belongs. |
+| XDome.DeviceVulnerability.device_labels | List | The labels added to the device manually or automatically. |
+| XDome.DeviceVulnerability.device_assignees | List | The users and or groups the device is assigned to. |
+| XDome.DeviceVulnerability.device_hw_version | String | The hardware version of the device. |
+| XDome.DeviceVulnerability.device_local_name | String | Similar to hostname, the device name identifier is extracted from protocol traffic. |
+| XDome.DeviceVulnerability.device_combined_os | String | The aggregated value of OS name, version and revision, such as "Windows XP SP3". |
+| XDome.DeviceVulnerability.device_endpoint_security_names | List | The names of endpoint security applications installed on the device. |
+| XDome.DeviceVulnerability.device_equipment_class | String | Determines the equipment class of the device, according to The Joint Commission \(TJC\). |
+| XDome.DeviceVulnerability.device_management_services | String | Defines whether the device is managed by Active Directory, Mobile Device Management, or neither. |
+| XDome.DeviceVulnerability.device_purdue_level | String | The network layer the device belongs to, based on the Purdue Reference Model for Industrial Control System \(ICS\). The network segmentation-based model defines OT and IT systems into six levels and the logical network boundary controls for securing these networks. |
+| XDome.DeviceVulnerability.device_http_last_seen_hostname | String | The most recent unique hostname identifier of the device, extracted from HTTP protocol traffic. |
+| XDome.DeviceVulnerability.device_snmp_last_seen_hostname | String | The most recent unique hostname identifier of the device, extracted from SNMP protocol traffic. |
+| XDome.DeviceVulnerability.device_note | String | The notes added to the device. |
+| XDome.DeviceVulnerability.device_domains | List | The domain name of the network that the device belongs to. |
+| XDome.DeviceVulnerability.device_internet_communication | String | The manner of the device's communication over the internet. |
+| XDome.DeviceVulnerability.device_edr_is_up_to_date_text | String | Determines whether the endpoint security application installed on the device is up-to-date. |
+| XDome.DeviceVulnerability.device_mac_oui_list | List | The vendor of the device's NIC, according to the OUI \(Organizational Unique Identifier\) in the MAC address. |
+| XDome.DeviceVulnerability.device_ip_assignment_list | List | The device's IP assignment method, extracted from DHCP protocol traffic, such as "DHCP", "DHCP \(Static Lease\)", or "Static". |
+| XDome.DeviceVulnerability.device_vlan_name_list | List | The name of the VLAN, extracted from switch configurations. |
+| XDome.DeviceVulnerability.device_vlan_description_list | List | The description of the VLAN, extracted from switch configurations. |
+| XDome.DeviceVulnerability.device_connection_type_list | List | The connection types of a device, such as "Ethernet". |
+| XDome.DeviceVulnerability.device_ssid_list | List | The name of the wireless network the device is connected to, such as "Guest". |
+| XDome.DeviceVulnerability.device_ap_location_list | List | The location of the access point the device is connected to, extracted from Network Management integrations. |
+| XDome.DeviceVulnerability.device_switch_port_list | List | The port identifier of the switch the device is connected to. |
+| XDome.DeviceVulnerability.device_switch_location_list | List | The location of the switch the device is connected to. |
+| XDome.DeviceVulnerability.device_number_of_nics | Number | The number of network interface cards seen on the network. |
+| XDome.DeviceVulnerability.device_last_seen_list | List | The date and time a device's NIC was last seen. |
+| XDome.DeviceVulnerability.device_first_seen_list | List | The date and time a device's NIC was first seen. |
+| XDome.DeviceVulnerability.device_is_online | Boolean | A boolean field indicating whether the device is online or not. |
+| XDome.DeviceVulnerability.device_network_scope_list | List | The device's Network Scope - used to differentiate between internal networks that share the same IP subnets. |
+| XDome.DeviceVulnerability.device_name | String | The Device Name attribute is set automatically based on the priority of the Auto-Assigned Device attribute. You can also set it manually. The Device Name can be the device’s IP, hostname, etc. |
+| XDome.DeviceVulnerability.device_manufacturer | String | Manufacturer of the device, such as "Alaris". |
+| XDome.DeviceVulnerability.device_site_name | String | The name of the site within the healthcare organization the device is associated with. |
+| XDome.DeviceVulnerability.device_risk_score | String | The calculated risk level of a device, such as "Critical", or "High". |
+| XDome.DeviceVulnerability.device_risk_score_points | Number | The calculated risk points of a device, such as "54.1". |
+| XDome.DeviceVulnerability.device_effective_likelihood_subscore | String | The calculated effective likelihood subscore level of a device, such as "Critical", or "High". |
+| XDome.DeviceVulnerability.device_effective_likelihood_subscore_points | Number | The calculated effective likelihood subscore points of a device, such as "54.1". |
+| XDome.DeviceVulnerability.device_likelihood_subscore | String | The calculated likelihood subscore level of a device, such as "Critical", or "High". |
+| XDome.DeviceVulnerability.device_likelihood_subscore_points | Number | The calculated likelihood subscore points of a device, such as "54.1". |
+| XDome.DeviceVulnerability.device_impact_subscore | String | The calculated impact subscore level of a device, such as "Critical", or "High". |
+| XDome.DeviceVulnerability.device_impact_subscore_points | Number | The calculated impact subscore points of a device, such as "54.1". |
+| XDome.DeviceVulnerability.device_suspicious | List | The reasons for which the device was marked as suspicious. |
+| XDome.DeviceVulnerability.device_authentication_user_list | List | The User name used to authenticate the device to the network using Radius/802.1x is extracted from the NAC integration and the traffic. |
+| XDome.DeviceVulnerability.device_software_or_firmware_version | String | The application version running on the device. |
diff --git a/Packs/ClarotyXDome/pack_metadata.json b/Packs/ClarotyXDome/pack_metadata.json
new file mode 100644
index 000000000000..b7504f828e8c
--- /dev/null
+++ b/Packs/ClarotyXDome/pack_metadata.json
@@ -0,0 +1,26 @@
+{
+ "name": "Claroty xDome",
+ "description": "Use xDome to manage assets and alerts.",
+ "support": "partner",
+ "currentVersion": "1.0.0",
+ "author": "Claroty",
+ "url": "",
+ "email": "support@claroty.com",
+ "categories": [
+ "Network Security"
+ ],
+ "tags": [],
+ "useCases": [],
+ "keywords": [
+ "Claroty",
+ "xDome",
+ "ATD"
+ ],
+ "marketplaces": [
+ "xsoar",
+ "marketplacev2"
+ ],
+ "githubUser": [
+ "tomlandes"
+ ]
+}
\ No newline at end of file
diff --git a/Packs/Code42/Integrations/Code42EventCollector/README.md b/Packs/Code42/Integrations/Code42EventCollector/README.md
index 44f5faa2a9c7..a17c3d59e9f9 100644
--- a/Packs/Code42/Integrations/Code42EventCollector/README.md
+++ b/Packs/Code42/Integrations/Code42EventCollector/README.md
@@ -1,5 +1,6 @@
Code42 Insider Risk software solutions provide the right balance of transparency, technology and training to detect and appropriately respond to data risk. Use the Code42EventCollector integration to fetch file events and audit logs.
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
## Configure Code42 Event Collector on Cortex XSIAM
diff --git a/Packs/Code42/pack_metadata.json b/Packs/Code42/pack_metadata.json
index 5c5dbc943313..0aa963360031 100644
--- a/Packs/Code42/pack_metadata.json
+++ b/Packs/Code42/pack_metadata.json
@@ -64,5 +64,6 @@
"Active_Directory_Query",
"CommonTypes",
"CrowdStrikeFalcon"
- ]
+ ],
+ "defaultDataSource": "Code42 Event Collector"
}
\ No newline at end of file
diff --git a/Packs/CofenseIntelligenceV2/ReleaseNotes/1_1_16.json b/Packs/CofenseIntelligenceV2/ReleaseNotes/1_1_16.json
new file mode 100644
index 000000000000..ca3ffd9a2076
--- /dev/null
+++ b/Packs/CofenseIntelligenceV2/ReleaseNotes/1_1_16.json
@@ -0,0 +1,4 @@
+{
+ "breakingChanges": true,
+ "breakingChangesNotes": "Due to a fixed issue in `Base` pack (Version `1.33.52`) Cofense Intelligence V2 will now correctly input email addresses into context under `Account.Email` and not under `Email` as it did up until now."
+}
\ No newline at end of file
diff --git a/Packs/CofenseIntelligenceV2/ReleaseNotes/1_1_16.md b/Packs/CofenseIntelligenceV2/ReleaseNotes/1_1_16.md
new file mode 100644
index 000000000000..887561cf25b5
--- /dev/null
+++ b/Packs/CofenseIntelligenceV2/ReleaseNotes/1_1_16.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Cofense Intelligence v2
+
+Fixed an issue in `Base` pack (Version `1.33.52`) so now Cofense Intelligence V2 will correctly input email addresses into context under `Account.Email` and not under `Email`.
diff --git a/Packs/CofenseIntelligenceV2/pack_metadata.json b/Packs/CofenseIntelligenceV2/pack_metadata.json
index 0f95c1651f61..858e6c5afe78 100644
--- a/Packs/CofenseIntelligenceV2/pack_metadata.json
+++ b/Packs/CofenseIntelligenceV2/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cofense Intelligence v2",
"description": "Cofense Intelligence allows users to search for threat intelligence reports based on domains, IPs, email address, file hashes, URLs and extracted strings.",
"support": "partner",
- "currentVersion": "1.1.15",
+ "currentVersion": "1.1.16",
"author": "Cofense",
"url": "https://cofense.com/contact-support/",
"email": "support@cofense.com",
diff --git a/Packs/CofenseTriage/Integrations/CofenseTriagev3/README.md b/Packs/CofenseTriage/Integrations/CofenseTriagev3/README.md
index 7a06eadfb6ac..96ca553bb794 100644
--- a/Packs/CofenseTriage/Integrations/CofenseTriagev3/README.md
+++ b/Packs/CofenseTriage/Integrations/CofenseTriagev3/README.md
@@ -2,6 +2,8 @@ The Cofense Triage v3 integration uses the Cofense Triage v2 API (previous integ
Security teams can ingest data from Triage such as email reporters, email reports and clusters, threat indicators, and rule matching to name a few. In addition, ingest and create threat indicators, categorize reports, and obtain second stage threat indicators from malicious emails. This integration was integrated and tested with version 1.22.0 of Cofense Triage.
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
Some changes have been made that might affect your existing content.
If you are upgrading from a previous of this integration, see [Breaking Changes](#Breaking-changes-from-the-previous-version-of-this-integration---Cofense-Triage-v3).
diff --git a/Packs/CofenseTriage/ReleaseNotes/2_1_25.md b/Packs/CofenseTriage/ReleaseNotes/2_1_25.md
new file mode 100644
index 000000000000..512966359e67
--- /dev/null
+++ b/Packs/CofenseTriage/ReleaseNotes/2_1_25.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### CofenseTriageThreatEnrichment
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/CofenseTriage/Scripts/CofenseTriageThreatEnrichment/CofenseTriageThreatEnrichment.yml b/Packs/CofenseTriage/Scripts/CofenseTriageThreatEnrichment/CofenseTriageThreatEnrichment.yml
index 88810f106e86..3565585c89d8 100644
--- a/Packs/CofenseTriage/Scripts/CofenseTriageThreatEnrichment/CofenseTriageThreatEnrichment.yml
+++ b/Packs/CofenseTriage/Scripts/CofenseTriageThreatEnrichment/CofenseTriageThreatEnrichment.yml
@@ -18,7 +18,7 @@ subtype: python3
dependson:
must:
- '|||cofense-threat-indicator-list'
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
fromversion: 6.0.0
tests:
diff --git a/Packs/CofenseTriage/pack_metadata.json b/Packs/CofenseTriage/pack_metadata.json
index 47cd5bd812ec..8c6776058999 100644
--- a/Packs/CofenseTriage/pack_metadata.json
+++ b/Packs/CofenseTriage/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cofense Triage",
"description": "Cofense Triage allows users to fetch reports by using the fetch incidents capability. It also provides commands to get entities like reporters, rules, categories, and more.",
"support": "partner",
- "currentVersion": "2.1.24",
+ "currentVersion": "2.1.25",
"author": "Cofense",
"url": "https://cofense.com/contact-support/",
"email": "support@cofense.com",
@@ -27,5 +27,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "Cofense Triage v3"
}
\ No newline at end of file
diff --git a/Packs/CofenseVision/ReleaseNotes/1_0_12.md b/Packs/CofenseVision/ReleaseNotes/1_0_12.md
new file mode 100644
index 000000000000..589dcc99441f
--- /dev/null
+++ b/Packs/CofenseVision/ReleaseNotes/1_0_12.md
@@ -0,0 +1,11 @@
+
+#### Scripts
+
+##### GetMessageIdAndRecipients
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
+##### ConvertDictOfListToListOfDict
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/CofenseVision/Scripts/ConvertDictOfListToListOfDict/ConvertDictOfListToListOfDict.py b/Packs/CofenseVision/Scripts/ConvertDictOfListToListOfDict/ConvertDictOfListToListOfDict.py
index 980810995880..0f40a8243523 100644
--- a/Packs/CofenseVision/Scripts/ConvertDictOfListToListOfDict/ConvertDictOfListToListOfDict.py
+++ b/Packs/CofenseVision/Scripts/ConvertDictOfListToListOfDict/ConvertDictOfListToListOfDict.py
@@ -1,16 +1,15 @@
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
-from typing import Dict
def main():
try:
- args: Dict = demisto.args()
+ args: dict = demisto.args()
root = args.get('key', [])
for i in root:
- if isinstance(root[i], str) or isinstance(root[i], int):
+ if isinstance(root[i], int | str):
root[i] = [root[i]]
max_len = max([len(root[i]) for i in root])
diff --git a/Packs/CofenseVision/Scripts/ConvertDictOfListToListOfDict/ConvertDictOfListToListOfDict.yml b/Packs/CofenseVision/Scripts/ConvertDictOfListToListOfDict/ConvertDictOfListToListOfDict.yml
index 5c2ef882eced..efdeac24136c 100644
--- a/Packs/CofenseVision/Scripts/ConvertDictOfListToListOfDict/ConvertDictOfListToListOfDict.yml
+++ b/Packs/CofenseVision/Scripts/ConvertDictOfListToListOfDict/ConvertDictOfListToListOfDict.yml
@@ -14,7 +14,7 @@ args:
description: Object Key.
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.8.36650
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
fromversion: 6.2.0
tests:
diff --git a/Packs/CofenseVision/Scripts/GetMessageIdAndRecipients/GetMessageIdAndRecipients.py b/Packs/CofenseVision/Scripts/GetMessageIdAndRecipients/GetMessageIdAndRecipients.py
index 64919cd4f8e4..ef606bf988ac 100644
--- a/Packs/CofenseVision/Scripts/GetMessageIdAndRecipients/GetMessageIdAndRecipients.py
+++ b/Packs/CofenseVision/Scripts/GetMessageIdAndRecipients/GetMessageIdAndRecipients.py
@@ -1,17 +1,16 @@
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
-from typing import Dict, List
def main():
try:
- args: Dict = demisto.args()
+ args: dict = demisto.args()
root = args.get('key')
if root:
if not isinstance(root, list):
root = [root]
- t: List = []
+ t: list = []
for obj in root:
internet_message_id = obj.get('internetMessageId')
recipients = obj.get('recipients', [])
diff --git a/Packs/CofenseVision/Scripts/GetMessageIdAndRecipients/GetMessageIdAndRecipients.yml b/Packs/CofenseVision/Scripts/GetMessageIdAndRecipients/GetMessageIdAndRecipients.yml
index d5dd8ea2b2ea..d485f0674c98 100644
--- a/Packs/CofenseVision/Scripts/GetMessageIdAndRecipients/GetMessageIdAndRecipients.yml
+++ b/Packs/CofenseVision/Scripts/GetMessageIdAndRecipients/GetMessageIdAndRecipients.yml
@@ -6,7 +6,7 @@ script: ''
type: python
tags:
- transformer
-comment: Get the Internet Message Id and Recipient's address of Messages in a format of `internet_message_id:recipients_address.`
+comment: Get the Internet Message Id and Recipient's address of Messages in a format of `internet_message_id:recipients_address.`.
enabled: true
args:
- name: key
@@ -18,7 +18,7 @@ outputs:
type: Unknown
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.8.36650
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
fromversion: 6.2.0
tests:
diff --git a/Packs/CofenseVision/pack_metadata.json b/Packs/CofenseVision/pack_metadata.json
index 64c717ace277..5dc33d3bb94a 100644
--- a/Packs/CofenseVision/pack_metadata.json
+++ b/Packs/CofenseVision/pack_metadata.json
@@ -1,37 +1,37 @@
{
- "name": "Cofense Vision",
- "description": "Cofense Vision empowers security teams to hunt for email messages and quarantine threats in mailboxes. Analysts can setup jobs to remove emerging phishing campaigns based on trusted and credible IOCs through an automated workflow.",
- "support": "partner",
- "currentVersion": "1.0.11",
- "author": "Cofense",
- "url": "https://cofense.com/contact-support/",
- "email": "support@cofense.com",
- "categories": [
- "Data Enrichment & Threat Intelligence"
- ],
- "tags": [],
- "useCases": [
- "Phishing"
- ],
- "keywords": [
- "phishing",
- "quarantine",
- "quarantine job",
- "malicious email",
- "email",
- "cofense",
- "vision",
- "triage",
- "iocs",
- "message",
- "attachments",
- "search"
- ],
- "marketplaces": [
- "xsoar",
- "marketplacev2"
- ],
- "githubUser": [
- "crestdatasystems"
- ]
+ "name": "Cofense Vision",
+ "description": "Cofense Vision empowers security teams to hunt for email messages and quarantine threats in mailboxes. Analysts can setup jobs to remove emerging phishing campaigns based on trusted and credible IOCs through an automated workflow.",
+ "support": "partner",
+ "currentVersion": "1.0.12",
+ "author": "Cofense",
+ "url": "https://cofense.com/contact-support/",
+ "email": "support@cofense.com",
+ "categories": [
+ "Data Enrichment & Threat Intelligence"
+ ],
+ "tags": [],
+ "useCases": [
+ "Phishing"
+ ],
+ "keywords": [
+ "phishing",
+ "quarantine",
+ "quarantine job",
+ "malicious email",
+ "email",
+ "cofense",
+ "vision",
+ "triage",
+ "iocs",
+ "message",
+ "attachments",
+ "search"
+ ],
+ "marketplaces": [
+ "xsoar",
+ "marketplacev2"
+ ],
+ "githubUser": [
+ "crestdatasystems"
+ ]
}
\ No newline at end of file
diff --git a/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/README.md b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/README.md
index 5b92340a0dbc..cc6d717682dd 100644
--- a/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/README.md
+++ b/Packs/CohesityHelios/Integrations/CohesityHeliosEventCollector/README.md
@@ -1,5 +1,7 @@
This is the Cohesity Helios Event Collector integration for XSIAM.
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure Cohesity Helios Event Collector on Cortex XSOAR
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
diff --git a/Packs/CohesityHelios/pack_metadata.json b/Packs/CohesityHelios/pack_metadata.json
index 3387a013f40a..74bc24547dce 100644
--- a/Packs/CohesityHelios/pack_metadata.json
+++ b/Packs/CohesityHelios/pack_metadata.json
@@ -26,5 +26,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "Cohesity Helios Event Collector"
}
\ No newline at end of file
diff --git a/Packs/CommonDashboards/Dashboards/dashboard-APIExecutionMetrics.json b/Packs/CommonDashboards/Dashboards/dashboard-APIExecutionMetrics.json
index d34dc68ce604..f973c1c07cb7 100644
--- a/Packs/CommonDashboards/Dashboards/dashboard-APIExecutionMetrics.json
+++ b/Packs/CommonDashboards/Dashboards/dashboard-APIExecutionMetrics.json
@@ -14,99 +14,6 @@
"fromDateLicense": "0001-01-01T00:00:00Z",
"name": "API Execution Metrics",
"layout": [
- {
- "id": "e7377a00-e12b-11ee-8377-d592a71a5531",
- "forceRange": false,
- "x": 8,
- "y": 0,
- "i": "e7377a00-e12b-11ee-8377-d592a71a5531",
- "w": 4,
- "h": 3,
- "widget": {
- "id": "0821903b-1099-4f3d-8c30-27decd8c5c06",
- "version": 3,
- "cacheVersn": 0,
- "modified": "2024-03-13T07:21:27.174372329-04:00",
- "packID": "",
- "packName": "",
- "itemVersion": "",
- "fromServerVersion": "",
- "toServerVersion": "",
- "propagationLabels": [],
- "definitionId": "",
- "vcShouldIgnore": false,
- "vcShouldKeepItemLegacyProdMachine": false,
- "commitMessage": "",
- "shouldCommit": false,
- "Cache": null,
- "name": "API Call Results for FortiSandbox2",
- "prevName": "API Call Results for FortiSandbox2",
- "dataType": "metrics",
- "widgetType": "line",
- "query": "type:integration and name:FortiSandboxv2",
- "isPredefined": false,
- "dateRange": {
- "fromDate": "0001-01-01T00:00:00Z",
- "toDate": "0001-01-01T00:00:00Z",
- "period": {
- "by": "",
- "byTo": "days",
- "byFrom": "hours",
- "toValue": 0,
- "fromValue": 3,
- "field": ""
- },
- "fromDateLicense": "0001-01-01T00:00:00Z"
- },
- "params": {
- "customGroupBy": [
- null,
- {
- "General Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "GeneralError",
- "type": "string"
- }
- ]
- ],
- "name": "General Error"
- },
- "Success": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "Successful",
- "type": "string"
- }
- ]
- ],
- "name": "Success"
- }
- }
- ],
- "groupBy": [
- "modified(h)",
- "apiResponseType"
- ],
- "keys": [
- "sum|totalAPICalls"
- ],
- "referenceLine": {},
- "timeFrame": "hours",
- "valuesFormat": "abbreviated",
- "xAxisLabel": "Time",
- "yAxisLabel": "Request Counts"
- },
- "category": ""
- },
- "reflectDimensions": true
- },
{
"id": "870b0d20-e0df-11ec-a7f6-39e12e3bcb7a",
"forceRange": false,
@@ -134,11 +41,11 @@
"commitMessage": "",
"shouldCommit": false,
"Cache": null,
- "name": "Successful vs. Rate Limited API Calls by Integration",
+ "name": "API Call Status by Integration",
"prevName": "Successful vs. Rate Limited API Calls by Integration",
"dataType": "metrics",
"widgetType": "bar",
- "query": "type:integration and metricType:",
+ "query": "type:integration and apiResponseType:Successful or apiResponseType:QuotaError or apiResponseType:ConnectionError or apiResponseType:TimeoutError or apiResponseType: GeneralError or apiResponseType:AuthError or apiResponseType:RetryError or apiResponseType:SSLError or apiResponseType:ProxyError or apiResponseType:ServiceError",
"isPredefined": false,
"dateRange": {
"fromDate": "0001-01-01T00:00:00Z",
@@ -182,6 +89,113 @@
]
],
"name": "Success"
+ },
+ "Auth Error": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "AuthError",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "Auth Error"
+ },
+ "Connection Error": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "ConnectionError",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "Connection Error"
+ },
+ "General Error": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "GeneralError",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "General Error"
+ },
+ "Other": {
+ "name": "Other"
+ },
+ "Proxy Error": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "ProxyError",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "Proxy Error"
+ },
+ "Retry Error": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "RetryError",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "Retry Error"
+ },
+ "SSL Error": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "SSLError",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "SSL Error"
+ },
+ "Service Error": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "ServiceError",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "Service Error"
+ },
+ "Timeout Error": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "TimeoutError",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "Timeout Error"
}
}
],
@@ -192,32 +206,39 @@
"keys": [
"sum|totalAPICalls"
],
+ "valuesFormat": "abbreviated",
+ "hideLegend": false,
"legend": [
{
- "color": "#01A2EC",
- "name": "urlscan.io"
+ "color": "#F50057",
+ "name": "General Error"
},
{
"color": "#229D80",
- "name": "VirusTotal (API v3)"
+ "name": "Success"
},
{
- "color": "#F50057",
- "name": "Rasterize"
+ "color": "#FD5BDE",
+ "name": "Quota Error"
},
{
- "color": "#ce5050",
- "name": "Quota Error"
+ "color": "#FFC4C6",
+ "name": "Service Error"
},
{
- "color": "#4fa327",
- "name": "Success"
+ "color": "#FF8411",
+ "name": "Auth Error"
+ },
+ {
+ "color": "#01A2EC",
+ "name": "Other"
}
],
- "limitType": "top",
- "valuesFormat": "abbreviated"
+ "limitType": "top"
},
- "category": ""
+ "category": "",
+ "sizeInBytes": 0,
+ "created": "0001-01-01T00:00:00Z"
},
"reflectDimensions": true
},
@@ -271,119 +292,26 @@
"keys": [
"sum|totalAPICalls"
],
- "valuesFormat": "abbreviated"
- },
- "category": ""
- },
- "reflectDimensions": true
- },
- {
- "id": "7f0bd220-e0e0-11ec-a7f6-39e12e3bcb7a",
- "forceRange": false,
- "x": 8,
- "y": 0,
- "i": "7f0bd220-e0e0-11ec-a7f6-39e12e3bcb7a",
- "w": 4,
- "h": 3,
- "widget": {
- "id": "0821903b-1099-4f3d-8c30-27decd8c5c07",
- "version": 4,
- "cacheVersn": 0,
- "sequenceNumber": 409396,
- "primaryTerm": 2,
- "modified": "2022-04-27T15:34:53.64268093Z",
- "packID": "",
- "packName": "",
- "itemVersion": "",
- "fromServerVersion": "",
- "toServerVersion": "",
- "propagationLabels": [
- "all"
- ],
- "definitionId": "",
- "vcShouldIgnore": false,
- "vcShouldKeepItemLegacyProdMachine": false,
- "commitMessage": "",
- "shouldCommit": false,
- "Cache": null,
- "name": "API Call Results for VirusTotal",
- "prevName": "API Call Results for VirusTotal",
- "dataType": "metrics",
- "widgetType": "line",
- "query": "type:integration and instance:\"VirusTotal (API v3)_instance_1\" or \"VirusTotal (API v3)_instance_1_copy\"",
- "isPredefined": false,
- "dateRange": {
- "fromDate": "0001-01-01T00:00:00Z",
- "toDate": "0001-01-01T00:00:00Z",
- "period": {
- "by": "",
- "byTo": "days",
- "byFrom": "hours",
- "toValue": 0,
- "fromValue": 3,
- "field": ""
- },
- "fromDateLicense": "0001-01-01T00:00:00Z"
- },
- "params": {
- "customGroupBy": [
- null,
- {
- "General Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "GeneralError",
- "type": "string"
- }
- ]
- ],
- "name": "General Error"
+ "valuesFormat": "abbreviated",
+ "colors": {
+ "isEnabled": false,
+ "items": {
+ "#1DB846": {
+ "value": 3
},
- "Quota Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "QuotaError",
- "type": "string"
- }
- ]
- ],
- "name": "Quota Error"
+ "#D13C3C": {
+ "value": 0
},
- "Success": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "Successful",
- "type": "string"
- }
- ]
- ],
- "name": "Success"
+ "#EF9700": {
+ "value": -1
}
- }
- ],
- "groupBy": [
- "modified(h)",
- "apiResponseType"
- ],
- "keys": [
- "sum|totalAPICalls"
- ],
- "referenceLine": {},
- "timeFrame": "hours",
- "valuesFormat": "abbreviated",
- "xAxisLabel": "Time",
- "yAxisLabel": "Request Counts"
+ },
+ "type": "above"
+ }
},
- "category": ""
+ "category": "",
+ "sizeInBytes": 0,
+ "created": "0001-01-01T00:00:00Z"
},
"reflectDimensions": true
},
@@ -413,7 +341,6 @@
"vcShouldKeepItemLegacyProdMachine": false,
"commitMessage": "Widget imported",
"shouldCommit": true,
- "size": 5,
"Cache": null,
"name": "API Execution Metrics For Enrichment Commands",
"prevName": "API Execution Metrics For Enrichment Commands",
@@ -421,7 +348,6 @@
"widgetType": "column",
"query": "type:integration and command:domain or command:url or command:ip or command:file",
"isPredefined": false,
- "description": "Errors by Incident Type per Command (top 5)",
"dateRange": {
"fromDate": "0001-01-01T00:00:00Z",
"toDate": "0001-01-01T00:00:00Z",
@@ -436,6 +362,10 @@
"fromDateLicense": "0001-01-01T00:00:00Z"
},
"params": {
+ "keys": [
+ "sum|totalAPICalls"
+ ],
+ "valuesFormat": "regular",
"customGroupBy": [
null,
{
@@ -484,9 +414,6 @@
"command",
"apiResponseType"
],
- "keys": [
- "sum|totalAPICalls"
- ],
"referenceLine": {},
"showGraphValues": true,
"tableColumns": [
@@ -521,29 +448,30 @@
"position": 5
}
],
- "valuesFormat": "regular",
- "xAxisLabel": "Enrichment Command name ",
+ "xAxisLabel": "Enrichment Command Name ",
"yAxisLabel": "Error count"
},
- "category": ""
+ "category": "",
+ "sizeInBytes": 0,
+ "created": "0001-01-01T00:00:00Z",
+ "size": 5,
+ "description": "Errors by Incident Type per Command (top 5)"
},
"reflectDimensions": true
},
{
- "id": "63de01c0-e0e1-11ec-a7f6-39e12e3bcb7a",
+ "id": "5b389a70-21b8-11ef-86d3-29ed4c950ba8",
"forceRange": false,
"x": 8,
- "y": 3,
- "i": "63de01c0-e0e1-11ec-a7f6-39e12e3bcb7a",
+ "y": 0,
+ "i": "5b389a70-21b8-11ef-86d3-29ed4c950ba8",
"w": 4,
"h": 3,
"widget": {
- "id": "0821903b-1099-4f3d-8c30-27decd8c5c07",
- "version": 5,
+ "id": "b42d7e6f-fe25-4963-8dc2-c2ca9cae8f1e",
+ "version": 2,
"cacheVersn": 0,
- "sequenceNumber": 409396,
- "primaryTerm": 2,
- "modified": "2022-05-31T12:58:36.448784342Z",
+ "modified": "2024-06-03T14:48:31.630201802Z",
"packID": "",
"packName": "",
"itemVersion": "",
@@ -555,14 +483,14 @@
"definitionId": "",
"vcShouldIgnore": false,
"vcShouldKeepItemLegacyProdMachine": false,
- "commitMessage": "Widget imported",
- "shouldCommit": true,
+ "commitMessage": "",
+ "shouldCommit": false,
"Cache": null,
- "name": "API Call Results for UrlScan",
- "prevName": "API Call Results for UrlScan",
+ "name": "API Call Results for Email Category",
+ "prevName": "Email",
"dataType": "metrics",
"widgetType": "line",
- "query": "type:integration and instance:\"urlscan.io_instance_1\"",
+ "query": "type:integration and (apiResponseType:Successful or apiResponseType:QuotaError or apiResponseType:ConnectionError or apiResponseType:TimeoutError or apiResponseType: GeneralError or apiResponseType:AuthError or apiResponseType:RetryError or apiResponseType:SSLError or apiResponseType:ProxyError or apiResponseType:ServiceError) and category:\"Email\"",
"isPredefined": false,
"dateRange": {
"fromDate": "0001-01-01T00:00:00Z",
@@ -572,675 +500,41 @@
"byTo": "",
"byFrom": "days",
"toValue": null,
- "fromValue": 0,
+ "fromValue": 7,
"field": ""
},
"fromDateLicense": "0001-01-01T00:00:00Z"
},
"params": {
- "customGroupBy": [
- null,
- {
- "General Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "GeneralError",
- "type": "string"
- }
- ]
- ],
- "name": "General Error"
- },
- "Quota Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "QuotaError",
- "type": "string"
- }
- ]
- ],
- "name": "Quota Error"
- },
- "Retry Timeout": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "RetryTimeout",
- "type": "string"
- }
- ]
- ],
- "name": "Retry Timeout"
- },
- "Success": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "Successful",
- "type": "string"
- }
- ]
- ],
- "name": "Success"
- }
- }
- ],
- "groupBy": [
- "modified(h)",
- "apiResponseType"
- ],
- "keys": [
- "sum|totalAPICalls"
- ],
- "referenceLine": {
- "type": "max"
- },
- "timeFrame": "hours",
- "valuesFormat": "abbreviated",
- "xAxisLabel": "Time",
- "yAxisLabel": "Request Counts"
- },
- "category": ""
- },
- "reflectDimensions": true
- },
- {
- "id": "8caf2770-583a-11ed-b0d6-31717adfe334",
- "forceRange": false,
- "x": 8,
- "y": 6,
- "i": "8caf2770-583a-11ed-b0d6-31717adfe334",
- "w": 4,
- "h": 3,
- "widget": {
- "id": "API Call Results for Microsoft Defender for Endpoint",
- "version": 1,
- "cacheVersn": 0,
- "modified": "2022-10-30T10:01:15.237569307Z",
- "packID": "",
- "packName": "",
- "itemVersion": "",
- "fromServerVersion": "",
- "toServerVersion": "",
- "propagationLabels": [
- "all"
- ],
- "definitionId": "",
- "vcShouldIgnore": false,
- "vcShouldKeepItemLegacyProdMachine": false,
- "commitMessage": "",
- "shouldCommit": false,
- "Cache": null,
- "name": "API Call Results for Microsoft Defender for Endpoint",
- "prevName": "API Call Results for Microsoft Defender for Endpoint",
- "dataType": "metrics",
- "widgetType": "line",
- "query": "type:integration and instance:\"Microsoft Defender Advanced Threat Protection_instance_1\"",
- "isPredefined": false,
- "dateRange": {
- "fromDate": "0001-01-01T00:00:00Z",
- "toDate": "0001-01-01T00:00:00Z",
- "period": {
- "by": "",
- "byTo": "days",
- "byFrom": "hours",
- "toValue": 0,
- "fromValue": 3,
- "field": ""
- },
- "fromDateLicense": "0001-01-01T00:00:00Z"
- },
- "params": {
- "customGroupBy": [
- null,
- {
- "General Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "GeneralError",
- "type": "string"
- }
- ]
- ],
- "name": "General Error"
- },
- "Quota Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "QuotaError",
- "type": "string"
- }
- ]
- ],
- "name": "Quota Error"
- },
- "Success": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "Successful",
- "type": "string"
- }
- ]
- ],
- "name": "Success"
- }
- }
- ],
- "groupBy": [
- "modified(h)",
- "apiResponseType"
- ],
- "keys": [
- "sum|totalAPICalls"
- ],
- "referenceLine": {},
- "timeFrame": "hours",
- "valuesFormat": "abbreviated",
- "xAxisLabel": "Time",
- "yAxisLabel": "Request Counts"
- },
- "category": ""
- },
- "reflectDimensions": true
- },
- {
- "id": "8d4f5f10-583a-11ed-b0d6-31717adfe334",
- "forceRange": false,
- "x": 4,
- "y": 8,
- "i": "8d4f5f10-583a-11ed-b0d6-31717adfe334",
- "w": 4,
- "h": 3,
- "widget": {
- "id": "API Call Results for Microsoft Graph Mail",
- "version": 1,
- "cacheVersn": 0,
- "modified": "2022-10-30T10:05:03.146716149Z",
- "packID": "",
- "packName": "",
- "itemVersion": "",
- "fromServerVersion": "",
- "toServerVersion": "",
- "propagationLabels": [
- "all"
- ],
- "definitionId": "",
- "vcShouldIgnore": false,
- "vcShouldKeepItemLegacyProdMachine": false,
- "commitMessage": "",
- "shouldCommit": false,
- "Cache": null,
- "name": "API Call Results for Microsoft Graph Mail",
- "prevName": "API Call Results for Microsoft Graph Mail",
- "dataType": "metrics",
- "widgetType": "line",
- "query": "type:integration and instance:\"MicrosoftGraphMail_instance_1\"",
- "isPredefined": false,
- "dateRange": {
- "fromDate": "0001-01-01T00:00:00Z",
- "toDate": "0001-01-01T00:00:00Z",
- "period": {
- "by": "",
- "byTo": "days",
- "byFrom": "hours",
- "toValue": 0,
- "fromValue": 3,
- "field": ""
- },
- "fromDateLicense": "0001-01-01T00:00:00Z"
- },
- "params": {
- "customGroupBy": [
- null,
- {
- "General Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "GeneralError",
- "type": "string"
- }
- ]
- ],
- "name": "General Error"
- },
- "Quota Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "QuotaError",
- "type": "string"
- }
- ]
- ],
- "name": "Quota Error"
- },
- "Success": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "Successful",
- "type": "string"
- }
- ]
- ],
- "name": "Success"
- }
- }
- ],
- "groupBy": [
- "modified(h)",
- "apiResponseType"
- ],
- "keys": [
- "sum|totalAPICalls"
- ],
- "referenceLine": {},
- "timeFrame": "hours",
- "valuesFormat": "abbreviated",
- "xAxisLabel": "Time",
- "yAxisLabel": "Request Counts"
- },
- "category": ""
- },
- "reflectDimensions": true
- },
- {
- "id": "e3006ae0-23cc-11ee-bc30-99ab8f4422d3",
- "forceRange": false,
- "x": 8,
- "y": 9,
- "i": "e3006ae0-23cc-11ee-bc30-99ab8f4422d3",
- "w": 4,
- "h": 2,
- "widget": {
- "id": "c63390c0-0c5e-4906-8b44-a748e6a639ee",
- "version": 1,
- "cacheVersn": 0,
- "modified": "2023-07-16T11:10:20.740319Z",
- "packID": "JoeSecurity",
- "packName": "Joe Security",
- "itemVersion": "1.1.9",
- "fromServerVersion": "6.5.0",
- "toServerVersion": "",
- "propagationLabels": [],
- "packPropagationLabels": [
- "all"
- ],
- "definitionId": "",
- "vcShouldIgnore": false,
- "vcShouldKeepItemLegacyProdMachine": false,
- "commitMessage": "",
- "shouldCommit": false,
- "Cache": null,
- "name": "API Call Results for Joe Security",
- "prevName": "API Call Results for Joe Security",
- "dataType": "metrics",
- "widgetType": "line",
- "query": "type:integration and instance:\"JoeSecurityV2_instance_1\"",
- "isPredefined": false,
- "dateRange": {
- "fromDate": "0001-01-01T00:00:00Z",
- "toDate": "0001-01-01T00:00:00Z",
- "period": {
- "by": "",
- "byTo": "",
- "byFrom": "days",
- "toValue": null,
- "fromValue": 0,
- "field": ""
- },
- "fromDateLicense": "0001-01-01T00:00:00Z"
- },
- "params": {
- "customGroupBy": [
- null,
- {
- "Auth Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "AuthError",
- "type": "string"
- }
- ]
- ],
- "name": "Auth Error"
- },
- "Connection Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "ConnectionError",
- "type": "string"
- }
- ]
- ],
- "name": "Connection Error"
- },
- "General Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "GeneralError",
- "type": "string"
- }
- ]
- ],
- "name": "General Error"
- },
- "Quota Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "QuotaError",
- "type": "string"
- }
- ]
- ],
- "name": "Quota Error"
- },
- "Success": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "Successful",
- "type": "string"
- }
- ]
- ],
- "name": "Success"
- }
- }
- ],
"groupBy": [
"modified(d)",
- "apiResponseType"
+ "name"
],
"keys": [
- "sum|totalAPICalls"
+ "sum|executionCount"
],
- "referenceLine": {
- "type": "max"
- },
- "showOthers": false,
"timeFrame": "days",
- "valuesFormat": "abbreviated",
- "xAxisLabel": "Time",
- "yAxisLabel": "Request Counts"
+ "valuesFormat": "abbreviated"
},
- "category": ""
- },
- "reflectDimensions": true
- },
- {
- "forceRange": false,
- "h": 3,
- "i": "0a993750-ef32-11ed-a72f-dd3156f45ab2",
- "id": "0a993750-ef32-11ed-a72f-dd3156f45ab2",
- "reflectDimensions": true,
- "w": 4,
- "widget": {
- "Cache": null,
- "cacheVersn": 0,
"category": "",
- "commitMessage": "",
- "dataType": "metrics",
- "dateRange": {
- "fromDate": "0001-01-01T00:00:00Z",
- "fromDateLicense": "0001-01-01T00:00:00Z",
- "period": {
- "by": "",
- "byFrom": "days",
- "byTo": "",
- "field": "",
- "fromValue": 0,
- "toValue": null
- },
- "toDate": "0001-01-01T00:00:00Z"
- },
- "definitionId": "",
- "fromServerVersion": "6.5.0",
- "id": "c63390c0-0c5e-4906-8b44-a748e6a639ea",
- "isPredefined": true,
- "itemVersion": "3.0.0",
- "modified": "2023-05-10T12:54:41.634649071Z",
- "name": "API Call Results for Rapid7 Threat Command",
- "packID": "IntSight",
- "packName": "Rapid7 - Threat Command (IntSights)",
- "packPropagationLabels": [
- "all"
- ],
- "params": {
- "customGroupBy": [
- null,
- {
- "General Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "GeneralError",
- "type": "string"
- }
- ]
- ],
- "name": "General Error"
- },
- "Quota Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "QuotaError",
- "type": "string"
- }
- ]
- ],
- "name": "Quota Error"
- },
- "Success": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "Successful",
- "type": "string"
- }
- ]
- ],
- "name": "Success"
- }
- }
- ],
- "groupBy": [
- "modified(d)",
- "apiResponseType"
- ],
- "keys": [
- "sum|totalAPICalls"
- ],
- "referenceLine": {
- "type": "max"
- },
- "showOthers": false,
- "timeFrame": "days",
- "valuesFormat": "abbreviated",
- "xAxisLabel": "Time",
- "yAxisLabel": "Request Counts"
- },
- "prevName": "API Call Results for Rapid7 Threat Command",
- "propagationLabels": [],
- "query": "type:integration and instance:\"rapid7_threat_command_instance_1\" or \"rapid7_threat_command_instance_1_copy\"",
- "shouldCommit": false,
- "toServerVersion": "",
- "vcShouldIgnore": false,
- "vcShouldKeepItemLegacyProdMachine": false,
- "version": 3,
- "widgetType": "line"
- },
- "x": 0,
- "y": 8
- },
- {
- "id": "9e8acc20-8d3a-11ee-a736-df621984533b",
- "forceRange": false,
- "x": 4,
- "y": 11,
- "i": "9e8acc20-8d3a-11ee-a736-df621984533b",
- "w": 4,
- "h": 3,
- "widget": {
- "id": "API Call Results for Email Hippo",
- "version": 1,
- "cacheVersn": 0,
- "modified": "2022-10-30T10:01:15.237569307Z",
- "packID": "",
- "packName": "",
- "itemVersion": "",
- "fromServerVersion": "",
- "toServerVersion": "",
- "propagationLabels": [
- "all"
- ],
- "definitionId": "",
- "vcShouldIgnore": false,
- "vcShouldKeepItemLegacyProdMachine": false,
- "commitMessage": "",
- "shouldCommit": false,
- "Cache": null,
- "name": "API Call Results for Email Hippo",
- "prevName": "API Call Results for Email Hippo",
- "dataType": "metrics",
- "widgetType": "line",
- "query": "type:integration and instance:\"Email Hippo_instance_1\"",
- "isPredefined": false,
- "dateRange": {
- "fromDate": "0001-01-01T00:00:00Z",
- "toDate": "0001-01-01T00:00:00Z",
- "period": {
- "by": "",
- "byTo": "days",
- "byFrom": "hours",
- "toValue": 0,
- "fromValue": 3,
- "field": ""
- },
- "fromDateLicense": "0001-01-01T00:00:00Z"
- },
- "params": {
- "customGroupBy": [
- null,
- {
- " Success": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "Successful",
- "type": "string"
- }
- ]
- ],
- "name": " Success"
- },
- "General Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "GeneralError",
- "type": "string"
- }
- ]
- ],
- "name": "General Error"
- },
- "Quota Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "QuotaError",
- "type": "string"
- }
- ]
- ],
- "name": "Quota Error"
- }
- }
- ],
- "groupBy": [
- "modified(h)",
- "apiResponseType"
- ],
- "keys": [
- "sum|totalAPICalls"
- ],
- "referenceLine": {},
- "timeFrame": "hours",
- "valuesFormat": "abbreviated",
- "xAxisLabel": "Time",
- "yAxisLabel": "Request Counts"
- },
- "category": ""
+ "sizeInBytes": 0,
+ "created": "2024-06-03T14:48:11.173825958Z"
},
"reflectDimensions": true
},
{
- "id": "c4d4f820-b9fd-11ee-9dd7-37fe5ed8a2cc",
+ "id": "e6c4a200-21b8-11ef-86d3-29ed4c950ba8",
"forceRange": false,
- "x": 0,
- "y": 11,
- "i": "c4d4f820-b9fd-11ee-9dd7-37fe5ed8a2cc",
+ "x": 8,
+ "y": 3,
+ "i": "e6c4a200-21b8-11ef-86d3-29ed4c950ba8",
"w": 4,
- "h": 3,
+ "h": 5,
"widget": {
- "id": "0821903b-1099-4f3d-8c30-12345d8c5c07",
+ "id": "f99ff2ec-6e20-4b5e-8d59-664d6f80b3c0",
"version": 1,
"cacheVersn": 0,
- "modified": "2024-01-14T16:57:28.451017133Z",
+ "modified": "2024-06-03T14:52:28.706525616Z",
"packID": "",
"packName": "",
"itemVersion": "",
@@ -1255,84 +549,41 @@
"commitMessage": "",
"shouldCommit": false,
"Cache": null,
- "name": "API Call Results for Autofocus",
- "prevName": "API Call Results for Autofocus",
+ "name": "API Call Metrics",
+ "prevName": "API Metrics",
"dataType": "metrics",
"widgetType": "line",
- "query": "type:integration and name:\"AutoFocus V2\"",
+ "query": "type:integration and (apiResponseType:Successful or apiResponseType:QuotaError or apiResponseType:ConnectionError or apiResponseType:TimeoutError or apiResponseType: GeneralError or apiResponseType:AuthError or apiResponseType:RetryError or apiResponseType:SSLError or apiResponseType:ProxyError or apiResponseType:ServiceError) and -category:Email",
"isPredefined": false,
"dateRange": {
"fromDate": "0001-01-01T00:00:00Z",
"toDate": "0001-01-01T00:00:00Z",
"period": {
"by": "",
- "byTo": "days",
- "byFrom": "hours",
- "toValue": 0,
- "fromValue": 3,
+ "byTo": "",
+ "byFrom": "days",
+ "toValue": null,
+ "fromValue": 7,
"field": ""
},
"fromDateLicense": "0001-01-01T00:00:00Z"
},
"params": {
- "customGroupBy": [
- null,
- {
- "General Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "GeneralError",
- "type": "string"
- }
- ]
- ],
- "name": "General Error"
- },
- "Quota Error": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "QuotaError",
- "type": "string"
- }
- ]
- ],
- "name": "Quota Error"
- },
- "Success": {
- "conditions": [
- [
- {
- "field": "apiResponseType",
- "operator": "isEqualCaseString",
- "right": "Successful",
- "type": "string"
- }
- ]
- ],
- "name": "Success"
- }
- }
- ],
"groupBy": [
- "modified(h)",
- "apiResponseType"
+ "modified(d)",
+ "name"
],
"keys": [
"sum|totalAPICalls"
],
- "referenceLine": {},
- "timeFrame": "hours",
+ "showGraphValues": true,
"valuesFormat": "abbreviated",
- "xAxisLabel": "Time",
- "yAxisLabel": "Request Counts"
+ "hideLegend": false,
+ "timeFrame": "days"
},
- "category": ""
+ "category": "",
+ "sizeInBytes": 0,
+ "created": "2024-06-03T14:52:28.705817022Z"
},
"reflectDimensions": true
}
@@ -1340,4 +591,4 @@
"fromVersion": "6.8.0",
"description": "",
"isPredefined": true
-}
+}
\ No newline at end of file
diff --git a/Packs/CommonDashboards/ReleaseNotes/1_5_0.md b/Packs/CommonDashboards/ReleaseNotes/1_5_0.md
new file mode 100644
index 000000000000..704729029007
--- /dev/null
+++ b/Packs/CommonDashboards/ReleaseNotes/1_5_0.md
@@ -0,0 +1,6 @@
+
+#### Dashboards
+
+##### API Execution Metrics
+
+Added a widget showing API execution metrics for the **Gmail Single User** integration.
diff --git a/Packs/CommonDashboards/ReleaseNotes/1_6_0.md b/Packs/CommonDashboards/ReleaseNotes/1_6_0.md
new file mode 100644
index 000000000000..0cd1cd65b3fe
--- /dev/null
+++ b/Packs/CommonDashboards/ReleaseNotes/1_6_0.md
@@ -0,0 +1,6 @@
+
+#### Dashboards
+
+##### API Execution Metrics
+
+Added error types to the metrics in the **API Call Status By Integration** widget.
diff --git a/Packs/CommonDashboards/ReleaseNotes/1_6_1.md b/Packs/CommonDashboards/ReleaseNotes/1_6_1.md
new file mode 100644
index 000000000000..539e793bb5ce
--- /dev/null
+++ b/Packs/CommonDashboards/ReleaseNotes/1_6_1.md
@@ -0,0 +1,3 @@
+## Common Dashboards
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/CommonDashboards/ReleaseNotes/1_7_0.md b/Packs/CommonDashboards/ReleaseNotes/1_7_0.md
new file mode 100644
index 000000000000..42c65a0d8807
--- /dev/null
+++ b/Packs/CommonDashboards/ReleaseNotes/1_7_0.md
@@ -0,0 +1,6 @@
+
+#### Dashboards
+
+##### API Execution Metrics
+
+Replaced integration-specific widgets showing API metrics with the **API Call Metrics** and **API Call Results for Email Category** widgets.
diff --git a/Packs/CommonDashboards/pack_metadata.json b/Packs/CommonDashboards/pack_metadata.json
index a40e943d8846..e6d6af15642b 100644
--- a/Packs/CommonDashboards/pack_metadata.json
+++ b/Packs/CommonDashboards/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Common Dashboards",
"description": "Frequently used dashboards pack.",
"support": "xsoar",
- "currentVersion": "1.4.3",
+ "currentVersion": "1.7.0",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CommonPlaybooks/ReleaseNotes/2_6_33.md b/Packs/CommonPlaybooks/ReleaseNotes/2_6_33.md
new file mode 100644
index 000000000000..03b567b57bd5
--- /dev/null
+++ b/Packs/CommonPlaybooks/ReleaseNotes/2_6_33.md
@@ -0,0 +1,3 @@
+## Common Playbooks
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/CommonPlaybooks/pack_metadata.json b/Packs/CommonPlaybooks/pack_metadata.json
index 1cd9f11d9cb6..478cc0b269be 100644
--- a/Packs/CommonPlaybooks/pack_metadata.json
+++ b/Packs/CommonPlaybooks/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Common Playbooks",
"description": "Frequently used playbooks pack.",
"support": "xsoar",
- "currentVersion": "2.6.32",
+ "currentVersion": "2.6.33",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CommonReports/ReleaseNotes/1_0_9.md b/Packs/CommonReports/ReleaseNotes/1_0_9.md
new file mode 100644
index 000000000000..29e7ddc36447
--- /dev/null
+++ b/Packs/CommonReports/ReleaseNotes/1_0_9.md
@@ -0,0 +1,3 @@
+## Common Reports
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/CommonReports/pack_metadata.json b/Packs/CommonReports/pack_metadata.json
index d0896f5d82ea..c7bd4561a85e 100644
--- a/Packs/CommonReports/pack_metadata.json
+++ b/Packs/CommonReports/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Common Reports",
"description": "Frequently used reports pack.",
"support": "xsoar",
- "currentVersion": "1.0.8",
+ "currentVersion": "1.0.9",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CommonScripts/.pack-ignore b/Packs/CommonScripts/.pack-ignore
index 6103e5e17988..b20d2cd2e766 100644
--- a/Packs/CommonScripts/.pack-ignore
+++ b/Packs/CommonScripts/.pack-ignore
@@ -107,6 +107,8 @@ ignore=RN114
ignore=SC106,BA124
[known_words]
+pyminizip
+pyzipper
unzipfile
zipstrings
extractinbetween
@@ -173,6 +175,7 @@ qr
cv
unescape_url
unescape
+hyperlinks
[file:ScheduleGenericPolling.yml]
ignore=BA124
diff --git a/Packs/CommonScripts/.secrets-ignore b/Packs/CommonScripts/.secrets-ignore
index 56e847aba51c..c974ff84f2f0 100644
--- a/Packs/CommonScripts/.secrets-ignore
+++ b/Packs/CommonScripts/.secrets-ignore
@@ -315,3 +315,5 @@ https://www.linkedin.com
http://en.m.wikipedia.org
https://xsoar.pan.dev/docs/concepts/demisto-sdk#secrets
http://www.yahoo.de
+https://mocktarget.apigee.net
+https://mocktarget.apigee.net/xml
\ No newline at end of file
diff --git a/Packs/CommonScripts/ReleaseNotes/1_14_49.md b/Packs/CommonScripts/ReleaseNotes/1_14_49.md
new file mode 100644
index 000000000000..0858913e4d03
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_14_49.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### StixCreator
+
+Fixed an issue in the **TAXII2ApiModule** related to *TAXII2 server* integration.
\ No newline at end of file
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_0.md b/Packs/CommonScripts/ReleaseNotes/1_15_0.md
new file mode 100644
index 000000000000..e628c2aa6619
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_0.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### ZipFile
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.95440*.
+- Replaced the *pyminizip* library with *pyzipper*.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_1.md b/Packs/CommonScripts/ReleaseNotes/1_15_1.md
new file mode 100644
index 000000000000..8ad178bf19d8
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_1.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### GetIndicatorDBotScoreFromCache
+
+- Fixed an issue where some special chars of indicator values were not escaped correctly.
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_2.md b/Packs/CommonScripts/ReleaseNotes/1_15_2.md
new file mode 100644
index 000000000000..d242da2bdf4e
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_2.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### VerdictResult
+
+- Added support for **Suspicious** verdict.
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_3.md b/Packs/CommonScripts/ReleaseNotes/1_15_3.md
new file mode 100644
index 000000000000..016bd47368ba
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_3.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### GetIndicatorDBotScoreFromCache
+
+- Fixed an issue where not all special chars were escaped correctly.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_4.md b/Packs/CommonScripts/ReleaseNotes/1_15_4.md
new file mode 100644
index 000000000000..6ca28f7dbd4e
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_4.md
@@ -0,0 +1,43 @@
+
+#### Scripts
+
+##### FetchIndicatorsFromFile
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.96102*.
+
+
+
+##### ExifRead
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.96102*.
+
+
+
+##### ExtractDomainAndFQDNFromUrlAndEmail
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.96102*.
+
+
+
+##### ExtractFQDNFromUrlAndEmail
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.96102*.
+
+
+
+##### LanguageDetect
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.96102*.
+
+
+
+##### StixCreator
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.96102*.
+
+
+
+##### ExtractDomainFromUrlAndEmail
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.96102*.
+
+
+
+##### ParseExcel
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.96102*.
+
+
+
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_5.md b/Packs/CommonScripts/ReleaseNotes/1_15_5.md
new file mode 100644
index 000000000000..ce28a7b1ec15
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_5.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### ExtractHyperlinksFromOfficeFiles
+- Updated the Docker image to: *demisto/office-utils:2.0.0.96781*.
+- Fixed an issue where images with hyperlinks were not extracted properly from docx files.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_6.md b/Packs/CommonScripts/ReleaseNotes/1_15_6.md
new file mode 100644
index 000000000000..2c724edb925f
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_6.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### GetListRow
+- Updated the Docker image to: *demisto/python3:3.10.14.96411*.
+- Fixed an issue where new lines at the end of the list could lead to an exception.
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_7.md b/Packs/CommonScripts/ReleaseNotes/1_15_7.md
new file mode 100644
index 000000000000..ed91b4927f33
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_7.md
@@ -0,0 +1,3 @@
+## Common Scripts
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/CommonScripts/ReleaseNotes/1_15_8.md b/Packs/CommonScripts/ReleaseNotes/1_15_8.md
new file mode 100644
index 000000000000..76e70187ac1c
--- /dev/null
+++ b/Packs/CommonScripts/ReleaseNotes/1_15_8.md
@@ -0,0 +1,7 @@
+#### Scripts
+
+##### New: PrintToAlert
+Prints a value to the specified alert's war-room.
+
+##### New: PrintToParentIncident
+Prints a value to the parent incident's war-room of the current alert.
diff --git a/Packs/CommonScripts/Scripts/ExifRead/ExifRead.py b/Packs/CommonScripts/Scripts/ExifRead/ExifRead.py
index ea89ef5fdce3..9da4d12565a7 100644
--- a/Packs/CommonScripts/Scripts/ExifRead/ExifRead.py
+++ b/Packs/CommonScripts/Scripts/ExifRead/ExifRead.py
@@ -9,7 +9,7 @@ def get_exif_tags(file_entry_id):
tags = exifread.process_file(f)
arr = []
- for tag in tags.keys():
+ for tag in tags:
arr.append({'tag': str(tag), 'value': str(tags[tag])})
md = tableToMarkdown('Exif Tags', arr)
diff --git a/Packs/CommonScripts/Scripts/ExifRead/ExifRead.yml b/Packs/CommonScripts/Scripts/ExifRead/ExifRead.yml
index ac2c69f2f9b9..c8fd35ab1c2f 100644
--- a/Packs/CommonScripts/Scripts/ExifRead/ExifRead.yml
+++ b/Packs/CommonScripts/Scripts/ExifRead/ExifRead.yml
@@ -7,22 +7,22 @@ type: python
subtype: python3
tags:
- Utility
-comment: Read image files metadata and provide Exif tags
+comment: Read image files metadata and provide Exif tags.
enabled: true
args:
- name: EntryID
required: true
default: true
- description: Entry ID of image file
+ description: Entry ID of image file.
outputs:
- contextPath: Exif.tag
- description: Exif tag name
+ description: Exif tag name.
type: string
- contextPath: Exif.value
- description: Exif tag value
+ description: Exif tag value.
type: string
scripttarget: 0
-dockerimage: demisto/py3-tools:1.0.0.91504
+dockerimage: demisto/py3-tools:1.0.0.96102
fromversion: 6.5.0
tests:
- ExifReadTest
diff --git a/Packs/CommonScripts/Scripts/ExtractDomainAndFQDNFromUrlAndEmail/ExtractDomainAndFQDNFromUrlAndEmail.yml b/Packs/CommonScripts/Scripts/ExtractDomainAndFQDNFromUrlAndEmail/ExtractDomainAndFQDNFromUrlAndEmail.yml
index 158aa6010ece..35ea2915159a 100644
--- a/Packs/CommonScripts/Scripts/ExtractDomainAndFQDNFromUrlAndEmail/ExtractDomainAndFQDNFromUrlAndEmail.yml
+++ b/Packs/CommonScripts/Scripts/ExtractDomainAndFQDNFromUrlAndEmail/ExtractDomainAndFQDNFromUrlAndEmail.yml
@@ -15,7 +15,7 @@ tags:
- indicator-format
timeout: '0'
type: python
-dockerimage: demisto/py3-tools:1.0.0.81280
+dockerimage: demisto/py3-tools:1.0.0.96102
runas: DBotWeakRole
tests:
- ExtractDomainAndFQDNFromUrlAndEmail-Test
diff --git a/Packs/CommonScripts/Scripts/ExtractDomainFromUrlFormat/ExtractDomainFromUrlFormat.py b/Packs/CommonScripts/Scripts/ExtractDomainFromUrlFormat/ExtractDomainFromUrlFormat.py
index 8130aa4b86b0..580f02ff97e5 100644
--- a/Packs/CommonScripts/Scripts/ExtractDomainFromUrlFormat/ExtractDomainFromUrlFormat.py
+++ b/Packs/CommonScripts/Scripts/ExtractDomainFromUrlFormat/ExtractDomainFromUrlFormat.py
@@ -81,7 +81,7 @@ def extract_domain(the_input):
domain = get_fld(full_domain, fail_silently=True)
# convert None to empty string if needed
- domain = '' if not domain else domain
+ domain = domain if domain else ''
return domain
diff --git a/Packs/CommonScripts/Scripts/ExtractDomainFromUrlFormat/ExtractDomainFromUrlFormat.yml b/Packs/CommonScripts/Scripts/ExtractDomainFromUrlFormat/ExtractDomainFromUrlFormat.yml
index 697ac9fbc3e8..81cc88bb3844 100644
--- a/Packs/CommonScripts/Scripts/ExtractDomainFromUrlFormat/ExtractDomainFromUrlFormat.yml
+++ b/Packs/CommonScripts/Scripts/ExtractDomainFromUrlFormat/ExtractDomainFromUrlFormat.yml
@@ -6,14 +6,14 @@ script: ''
type: python
tags:
- indicator-format
-comment: Extract Domain(s) from URL(s) and/or Email(s)
+comment: Extract Domain(s) from URL(s) and/or Email(s).
enabled: true
args:
- name: input
- description: The URL(s) or Email(s) to process
+ description: The URL(s) or Email(s) to process.
isArray: true
scripttarget: 0
-dockerimage: demisto/py3-tools:1.0.0.91504
+dockerimage: demisto/py3-tools:1.0.0.96102
runas: DBotWeakRole
subtype: python3
tests:
diff --git a/Packs/CommonScripts/Scripts/ExtractDomainFromUrlFormat/ExtractDomainFromUrlFormat_test.py b/Packs/CommonScripts/Scripts/ExtractDomainFromUrlFormat/ExtractDomainFromUrlFormat_test.py
index 5c601863bd08..ccde4e4368aa 100644
--- a/Packs/CommonScripts/Scripts/ExtractDomainFromUrlFormat/ExtractDomainFromUrlFormat_test.py
+++ b/Packs/CommonScripts/Scripts/ExtractDomainFromUrlFormat/ExtractDomainFromUrlFormat_test.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
from ExtractDomainFromUrlFormat import extract_domain
from ExtractDomainFromUrlFormat import unescape_url
import pytest
@@ -9,7 +8,7 @@
('http:example.com', 'example.com'),
('http:\\\\example.com', 'example.com'),
('https://caseapi.phishlabs.com', 'phishlabs.com'),
- (u'www.bücher.de', u'bücher.de'),
+ ('www.bücher.de', 'bücher.de'),
('https://urldefense.proofpoint.com/v2/url?u=http-3A__go.getpostman.com_y4wULsdG0h0DDMY0Dv00100&d=DwMFaQ&c=ywDJJevdGcjv4rm9P3FcNg&r=s5kA2oIAQRXsacJiBKmTORIWyRN39ZKhobje2GyRgNs&m=vN1dVSiZvEoM9oExtQqEptm9Dbvq9tnjACDZzrBLaWI&s=zroN7KQdBCPBOfhOmv5SP1DDzZKZ1y9I3x4STS5PbHA&e=', 'getpostman.com'), # noqa: E501
('hxxps://www[.]demisto[.]com', 'demisto.com'),
('https://emea01.safelinks.protection.outlook.com/?url=https%3A%2F%2Ftwitter.com%2FPhilipsBeLux&data=02|01||cb2462dc8640484baf7608d638d2a698|1a407a2d76754d178692b3ac285306e4|0|0|636758874714819880&sdata=dnJiphWFhnAKsk5Ps0bj0p%2FvXVo8TpidtGZcW6t8lDQ%3D&reserved=0%3E%5bcid:image003.gif@01CF4D7F.1DF62650%5d%3C', 'twitter.com'), # noqa: E501 disable-secrets-detection
diff --git a/Packs/CommonScripts/Scripts/ExtractFQDNFromUrlAndEmail/ExtractFQDNFromUrlAndEmail.py b/Packs/CommonScripts/Scripts/ExtractFQDNFromUrlAndEmail/ExtractFQDNFromUrlAndEmail.py
index 4e667d51ffa3..06ad7eed09f9 100644
--- a/Packs/CommonScripts/Scripts/ExtractFQDNFromUrlAndEmail/ExtractFQDNFromUrlAndEmail.py
+++ b/Packs/CommonScripts/Scripts/ExtractFQDNFromUrlAndEmail/ExtractFQDNFromUrlAndEmail.py
@@ -61,7 +61,7 @@ def get_fqdn(the_input):
# get the subdomain using tld.subdomain
subdomain = domain.subdomain
if (subdomain):
- fqdn = "{}.{}".format(subdomain, domain.fld)
+ fqdn = f"{subdomain}.{domain.fld}"
return fqdn
@@ -95,7 +95,7 @@ def extract_fqdn(the_input):
fqdn = get_fqdn(full_domain)
# convert None to empty string if needed
- fqdn = '' if not fqdn else fqdn
+ fqdn = fqdn if fqdn else ''
return fqdn
diff --git a/Packs/CommonScripts/Scripts/ExtractFQDNFromUrlAndEmail/ExtractFQDNFromUrlAndEmail.yml b/Packs/CommonScripts/Scripts/ExtractFQDNFromUrlAndEmail/ExtractFQDNFromUrlAndEmail.yml
index 97a29e59d044..76005d22b592 100644
--- a/Packs/CommonScripts/Scripts/ExtractFQDNFromUrlAndEmail/ExtractFQDNFromUrlAndEmail.yml
+++ b/Packs/CommonScripts/Scripts/ExtractFQDNFromUrlAndEmail/ExtractFQDNFromUrlAndEmail.yml
@@ -14,7 +14,7 @@ tags:
- indicator-format
timeout: '0'
type: python
-dockerimage: demisto/py3-tools:1.0.0.91504
+dockerimage: demisto/py3-tools:1.0.0.96102
runas: DBotWeakRole
subtype: python3
tests:
diff --git a/Packs/CommonScripts/Scripts/ExtractFQDNFromUrlAndEmail/ExtractFQDNFromUrlAndEmail_test.py b/Packs/CommonScripts/Scripts/ExtractFQDNFromUrlAndEmail/ExtractFQDNFromUrlAndEmail_test.py
index a5b2e6aaa11e..1423fe83ba9d 100644
--- a/Packs/CommonScripts/Scripts/ExtractFQDNFromUrlAndEmail/ExtractFQDNFromUrlAndEmail_test.py
+++ b/Packs/CommonScripts/Scripts/ExtractFQDNFromUrlAndEmail/ExtractFQDNFromUrlAndEmail_test.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
from ExtractFQDNFromUrlAndEmail import extract_fqdn
import pytest
@@ -8,7 +7,7 @@
[ # noqa: E501 disable-secrets-detection
("http://this.is.test.com", "this.is.test.com"),
("https://caseapi.phishlabs.com", "caseapi.phishlabs.com"),
- (u"www.bücher.de", u"www.bücher.de"),
+ ("www.bücher.de", "www.bücher.de"),
(
"https://urldefense.proofpoint.com/v2/url?u=http-3A__go.getpostman.com_y4wULsdG0h0DDMY0Dv00100&d=DwMFaQ&c=yw"
"DJJevdGcjv4rm9P3FcNg&r=s5kA2oIAQRXsacJiBKmTORIWyRN39ZKhobje2GyRgNs&m=vN1dVSiZvEoM9oExtQqEptm9Dbvq9tnjACDZzr"
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.py b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.py
index 8786248758d3..913c2264eaa9 100644
--- a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.py
+++ b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.py
@@ -5,6 +5,7 @@
from pptx import Presentation
import zipfile
import pandas as pd
+from docx.opc.constants import RELATIONSHIP_TYPE as RT
def extract_hyperlinks_from_xlsx(file_path: str) -> Set:
@@ -34,10 +35,10 @@ def extract_hyperlinks_from_xlsx(file_path: str) -> Set:
def extract_hyperlinks_from_docx(file_path: str) -> Set:
doc = Document(file_path)
links = set()
- for para in doc.paragraphs:
- for hyper in para.hyperlinks:
- if hyper.address:
- links.add(hyper.address)
+ for rel in doc.part.rels.values():
+ if rel.reltype == RT.HYPERLINK and rel.is_external:
+ links.add(rel._target)
+
return links
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.yml b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.yml
index 2842d24fe96a..e2ee7d1888d7 100644
--- a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.yml
+++ b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles.yml
@@ -18,5 +18,5 @@ script: '-'
timeout: '0'
type: python
subtype: python3
-dockerimage: demisto/office-utils:2.0.0.88298
+dockerimage: demisto/office-utils:2.0.0.96781
fromversion: 5.5.0
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles_test.py b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles_test.py
index 7f26494c3a78..a92e7b8b6692 100644
--- a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles_test.py
+++ b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/ExtractHyperlinksFromOfficeFiles_test.py
@@ -6,6 +6,7 @@
('test_data/d1.docx',
{'https://xsoar.pan.dev/', 'https://www.paloaltonetworks.com/', 'https://jobs.paloaltonetworks.com/en/'}),
('test_data/d2.docx', set()),
+ ('test_data/d3.docx', {'https://www.paloaltonetworks.com/', 'http://www.google.com'}),
('test_data/e1.xlsx', {'http://www.google.com', 'http://www.yahoo.de/'}),
('test_data/e2.xlsx', set()),
('test_data/e3.xlsx', {'https://www.paloaltonetworks.com/'}),
@@ -17,22 +18,24 @@ def test_basescript_dummy(file_path, expected_output):
Given:
1. docx file with hyperlinks on a picture and text.
2. docx file without hyperlinks
- 3. excel file with hyperlinks on a picture and inside text cell.
- 4. excel file with no hyperlinks.
- 5. excel file with hyperlinks inside text cell.
- 6. power point file with hyperlinks on a picture and text.
- 7. power point file without hyperlinks.
+ 3. docx file with hyperlinks on a picture and in the document.
+ 4. excel file with hyperlinks on a picture and inside text cell.
+ 5. excel file with no hyperlinks.
+ 6. excel file with hyperlinks inside text cell.
+ 7. power point file with hyperlinks on a picture and text.
+ 8. power point file without hyperlinks.
When:
Extracting hyperlinks from file using ExtractHyperlinksFromOfficeFiles script.
Then:
Validate that:
1. hyperlinks extracted from docx file
2. no hyperlinks extracted from docx file
- 3. hyperlinks extracted from excel file
- 4. no hyperlinks extracted from excel file
- 5. hyperlinks extracted from excel file
- 6. hyperlinks extracted from power point file
- 7. no hyperlinks extracted from power point file
+ 3. hyperlinks extracted from the docx file.
+ 4. hyperlinks extracted from excel file
+ 5. no hyperlinks extracted from excel file
+ 6. hyperlinks extracted from excel file
+ 7. hyperlinks extracted from power point file
+ 8. no hyperlinks extracted from power point file
"""
response = extract_hyperlink_by_file_type(file_name=file_path, file_path=file_path)
assert set(response.raw_response) == expected_output
diff --git a/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/d3.docx b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/d3.docx
new file mode 100644
index 000000000000..83c45346f3c2
Binary files /dev/null and b/Packs/CommonScripts/Scripts/ExtractHyperlinksFromOfficeFiles/test_data/d3.docx differ
diff --git a/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile.py b/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile.py
index 5b8242a6b3c5..f5736ab09db6 100644
--- a/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile.py
+++ b/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile.py
@@ -15,7 +15,7 @@ def csv_file_to_indicator_list(file_path, col_num, starting_row, auto_detect, de
# TODO: add run on all columns functionality
line_index = 0
- with open(file_path, 'r') as csv_file:
+ with open(file_path) as csv_file:
# csv reader can fail when encountering a NULL byte (\0) - so we go through the file and take out the NUL bytes.
file_reader = csv.reader(line.replace('\0', '') for line in csv_file)
for row in file_reader:
@@ -78,7 +78,7 @@ def xls_file_to_indicator_list(file_path, sheet_name, col_num, starting_row, aut
def txt_file_to_indicator_list(file_path, auto_detect, default_type, limit, offset):
- with open(file_path, "r") as fp:
+ with open(file_path) as fp:
file_data = fp.read()
indicator_list = []
@@ -211,7 +211,7 @@ def fetch_indicators_from_file(args):
file = demisto.getFilePath(args.get('entry_id'))
file_path = file['path']
file_name = file['name']
- auto_detect = True if args.get('auto_detect') == 'True' else False
+ auto_detect = args.get('auto_detect') == 'True'
default_type = args.get('default_type')
limit = args.get("limit")
@@ -229,7 +229,7 @@ def fetch_indicators_from_file(args):
# from which row should I start reading the indicators, it is used to avoid table headers.
starting_row = args.get('starting_row')
- if file_name.endswith('xls') or file_name.endswith('xlsx'):
+ if file_name.endswith(('xls', 'xlsx')):
indicator_list = xls_file_to_indicator_list(file_path, sheet_name, int(indicator_col_num) - 1,
int(starting_row) - 1, auto_detect, default_type,
indicator_type_col_num, limit, offset)
@@ -265,7 +265,7 @@ def main():
try:
return_outputs(*fetch_indicators_from_file(demisto.args()))
except Exception as ex:
- return_error('Failed to execute Fetch Indicators From File. Error: {}'.format(str(ex)),
+ return_error(f'Failed to execute Fetch Indicators From File. Error: {str(ex)}',
error=traceback.format_exc())
diff --git a/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile.yml b/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile.yml
index 40b143180884..0ef4522874e2 100644
--- a/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile.yml
+++ b/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile.yml
@@ -64,7 +64,7 @@ tags:
- indicators
timeout: '0'
type: python
-dockerimage: demisto/py3-tools:1.0.0.89345
+dockerimage: demisto/py3-tools:1.0.0.96102
fromversion: 6.5.0
tests:
- No tests (auto formatted)
diff --git a/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile_test.py b/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile_test.py
index 2ad8e09e11d0..dae42b1f2a03 100644
--- a/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile_test.py
+++ b/Packs/CommonScripts/Scripts/FetchIndicatorsFromFile/FetchIndicatorsFromFile_test.py
@@ -63,7 +63,7 @@ def test_csv_file_to_indicator_list_1():
result = csv_file_to_indicator_list(file_path='test_data/Hashes_list.csv',
col_num=0, starting_row=0, auto_detect=True, default_type=None, type_col=None,
limit=None, offset=0)
- assert CSV_TEST_RESULTS_1 == result
+ assert result == CSV_TEST_RESULTS_1
def test_csv_file_to_indicator_list_2():
@@ -71,7 +71,7 @@ def test_csv_file_to_indicator_list_2():
result = csv_file_to_indicator_list(file_path='test_data/Hashes_list.csv',
col_num=0, starting_row=1, auto_detect=False, default_type='Domain',
type_col=None, limit=2, offset=0)
- assert CSV_TEST_RESULTS_2 == result
+ assert result == CSV_TEST_RESULTS_2
def test_xls_file_to_indicator_list_1():
@@ -124,15 +124,15 @@ def test_txt_file_to_indicator_list_2():
def test_detect_type():
from FetchIndicatorsFromFile import detect_type
- assert 'File' == detect_type('4f79697b40d0932e91105bd496908f8e02c130a0e36f6d3434d6243e79ef82e0')
- assert 'Domain' == detect_type('demisto.com')
- assert 'IP' == detect_type('8.8.8.8')
- assert 'IPv6' == detect_type('2001:db8:85a3:8d3:1319:8a2e:370:7348')
- assert 'URL' == detect_type('www.demisto.com/path')
- assert 'CIDR' == detect_type('8.8.8.8/12')
- assert 'Email' == detect_type('some@mail.com')
- assert 'DomainGlob' == detect_type('*.demisto.com')
- assert 'IPv6CIDR' == detect_type('2001:db8:85a3:8d3:1319:8a2e:370:7348/32')
+ assert detect_type('4f79697b40d0932e91105bd496908f8e02c130a0e36f6d3434d6243e79ef82e0') == 'File'
+ assert detect_type('demisto.com') == 'Domain'
+ assert detect_type('8.8.8.8') == 'IP'
+ assert detect_type('2001:db8:85a3:8d3:1319:8a2e:370:7348') == 'IPv6'
+ assert detect_type('www.demisto.com/path') == 'URL'
+ assert detect_type('8.8.8.8/12') == 'CIDR'
+ assert detect_type('some@mail.com') == 'Email'
+ assert detect_type('*.demisto.com') == 'DomainGlob'
+ assert detect_type('2001:db8:85a3:8d3:1319:8a2e:370:7348/32') == 'IPv6CIDR'
assert None is detect_type('not_an_indicator')
diff --git a/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache.py b/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache.py
index 606bbd897d42..044347f2954a 100644
--- a/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache.py
+++ b/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache.py
@@ -3,11 +3,32 @@
from CommonServerPython import * # noqa: F401
+def escape_special_characters(text: str) -> str:
+ """Add escape char.
+
+ Args:
+ text (str): indicator value.
+
+ Returns:
+ return the value with the added escape char.
+ """
+ text = text.replace('\n', '\\n')
+ text = text.replace('\t', '\\t')
+ text = text.replace('\r', '\\r')
+ text = text.replace('(', '\(')
+ text = text.replace(')', '\)')
+ text = text.replace('[', '\[')
+ text = text.replace(']', '\]')
+ text = text.replace('^', '\^')
+ text = text.replace(':', '\:')
+ return text
+
+
def main():
values: list[str] = argToList(demisto.args().get("value", None))
unique_values: set[str] = {v.lower() for v in values} # search query is case insensitive
- query = f"""value:({' '.join([f'"{value}"' for value in unique_values])})"""
+ query = f"""value:({' '.join([f'"{escape_special_characters(value)}"' for value in unique_values])})"""
demisto.debug(f'{query=}')
res = demisto.searchIndicators(
diff --git a/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache.yml b/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache.yml
index 25e26b884286..a3b83373a165 100644
--- a/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache.yml
+++ b/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache.yml
@@ -7,7 +7,7 @@ comment: Get the overall score for the indicator as calculated by DBot.
commonfields:
id: GetIndicatorDBotScoreFromCache
version: -1
-dockerimage: demisto/python3:3.10.13.86272
+dockerimage: demisto/python3:3.10.14.95956
enabled: true
name: GetIndicatorDBotScoreFromCache
runas: DBotWeakRole
diff --git a/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache_test.py b/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache_test.py
index a5b562df94dd..e395044b72d7 100644
--- a/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache_test.py
+++ b/Packs/CommonScripts/Scripts/GetIndicatorDBotScoreFromCache/GetIndicatorDBotScoreFromCache_test.py
@@ -1,5 +1,6 @@
import GetIndicatorDBotScoreFromCache
import demistomock as demisto
+import pytest
def prepare_mocks(mocker, values, cache):
@@ -160,6 +161,25 @@ def test_query_values(mocker):
]
+def test_query_values_escape_chars(mocker):
+ """
+ Given:
+ An array of indicator value with special chars that need escape
+ When:
+ Running GetIndicatorDBotScoreFromCache script.
+ Then:
+ Ensure all values in the query to demisto.searchIndicators are correct.
+ """
+ mocker.patch.object(demisto, "args", return_value={"value": ["\nhello\rhow\tare"]})
+ mocker.patch.object(demisto, "searchIndicators")
+ GetIndicatorDBotScoreFromCache.main()
+ args_list = demisto.searchIndicators.call_args_list
+ call_query = args_list[0][1]['query']
+ assert call_query in [
+ 'value:("\\nhello\\rhow\\tare")'
+ ]
+
+
def test_no_iocs_returned_from_search_indicators(mocker):
"""
Given:
@@ -179,3 +199,22 @@ def test_no_iocs_returned_from_search_indicators(mocker):
expected_result = set()
indicators_results = return_results_calls[0][1]
assert {i["Indicator"] for i in indicators_results} == expected_result
+
+
+@pytest.mark.parametrize('input, expected_res', [("hello\nhow", "hello\\nhow"),
+ ('a', 'a'),
+ ('', ''),
+ ('\t\r\n', '\\t\\r\\n'),
+ ('([', '\(\['),
+ ('^ASDF:', '\^ASDF\:')])
+def test_escape_special_characters(input, expected_res):
+ """
+ Given:
+ A string value.
+ When:
+ parsing the indicator value
+ Then:
+ Ensure a backslash is added for the chars \n, \t, \r.
+ """
+ from GetIndicatorDBotScoreFromCache import escape_special_characters
+ assert expected_res == escape_special_characters(input)
diff --git a/Packs/CommonScripts/Scripts/GetlistRow/GetListRow.py b/Packs/CommonScripts/Scripts/GetListRow/GetListRow.py
similarity index 96%
rename from Packs/CommonScripts/Scripts/GetlistRow/GetListRow.py
rename to Packs/CommonScripts/Scripts/GetListRow/GetListRow.py
index 452e578add24..faa6560fa0b7 100644
--- a/Packs/CommonScripts/Scripts/GetlistRow/GetListRow.py
+++ b/Packs/CommonScripts/Scripts/GetListRow/GetListRow.py
@@ -17,7 +17,8 @@ def validate_header_exists(headers, header):
return_error("Error: The supplied header name was not found.")
-def list_to_headers_and_lines(list_data, list_separator: str):
+def list_to_headers_and_lines(list_data: str, list_separator: str):
+ list_data = list_data.strip()
lines_and_headers = [(line.replace("\r", "") if line.endswith("\r") else line).split(list_separator)
for line in list_data.split('\n')]
headers = lines_and_headers[0]
diff --git a/Packs/CommonScripts/Scripts/GetlistRow/GetListRow.yml b/Packs/CommonScripts/Scripts/GetListRow/GetListRow.yml
similarity index 97%
rename from Packs/CommonScripts/Scripts/GetlistRow/GetListRow.yml
rename to Packs/CommonScripts/Scripts/GetListRow/GetListRow.yml
index 5c3ce08266d3..6d0a4760f8a1 100644
--- a/Packs/CommonScripts/Scripts/GetlistRow/GetListRow.yml
+++ b/Packs/CommonScripts/Scripts/GetListRow/GetListRow.yml
@@ -42,7 +42,7 @@ script: '-'
subtype: python3
timeout: '0'
type: python
-dockerimage: demisto/python3:3.10.13.80593
+dockerimage: demisto/python3:3.10.14.96411
runas: DBotWeakRole
tests:
- No tests (auto formatted)
diff --git a/Packs/CommonScripts/Scripts/GetlistRow/GetListRow_test.py b/Packs/CommonScripts/Scripts/GetListRow/GetListRow_test.py
similarity index 92%
rename from Packs/CommonScripts/Scripts/GetlistRow/GetListRow_test.py
rename to Packs/CommonScripts/Scripts/GetListRow/GetListRow_test.py
index 9967ee8f9033..3e3f48244de2 100644
--- a/Packs/CommonScripts/Scripts/GetlistRow/GetListRow_test.py
+++ b/Packs/CommonScripts/Scripts/GetListRow/GetListRow_test.py
@@ -208,3 +208,22 @@ def test_list_to_headers_and_lines(list_data, expected_headers, expected_lines):
headers, lines = list_to_headers_and_lines(list_data, ",")
assert expected_headers == headers
assert expected_lines == lines
+
+
+def test_parse_list_with_new_line_at_the_end(mocker):
+ """
+ Given:
+ - A list with a new line at the end.
+ When:
+ - Parsing the list.
+ Then:
+ - Make sure that no exception is raised and the code finished gracefully.
+ """
+ list_with_new_line_at_the_end = """,mapping_framework,mapping_framework_version,capability_group,capability_id
+0,veris,1.3.7,action.hacking
+
+"""
+ from GetListRow import parse_list
+ mocker.patch.object(demisto, "executeCommand", return_value=[{"Contents": list_with_new_line_at_the_end}])
+ res = parse_list(parse_all='false', header="mapping_framework", value="veris", list_name='test_list', list_separator=',')
+ assert res
diff --git a/Packs/CommonScripts/Scripts/GetlistRow/README.md b/Packs/CommonScripts/Scripts/GetListRow/README.md
similarity index 100%
rename from Packs/CommonScripts/Scripts/GetlistRow/README.md
rename to Packs/CommonScripts/Scripts/GetListRow/README.md
diff --git a/Packs/CommonScripts/Scripts/LanguageDetect/LanguageDetect.yml b/Packs/CommonScripts/Scripts/LanguageDetect/LanguageDetect.yml
index 7f0616ee5ae1..aa5fa1b94551 100644
--- a/Packs/CommonScripts/Scripts/LanguageDetect/LanguageDetect.yml
+++ b/Packs/CommonScripts/Scripts/LanguageDetect/LanguageDetect.yml
@@ -13,14 +13,14 @@ args:
- name: text
required: true
default: true
- description: Text to analyse for language detection
+ description: Text to analyse for language detection.
outputs:
- contextPath: langDetect.lang
- description: Language detection
+ description: Language detection.
- contextPath: langDetect.probability
- description: Probability of language detection
+ description: Probability of language detection.
scripttarget: 0
-dockerimage: demisto/py3-tools:1.0.0.91504
+dockerimage: demisto/py3-tools:1.0.0.96102
fromversion: 5.0.0
tests:
- - LanguageDetect-Test
\ No newline at end of file
+- LanguageDetect-Test
diff --git a/Packs/CommonScripts/Scripts/ParseExcel/ParseExcel.yml b/Packs/CommonScripts/Scripts/ParseExcel/ParseExcel.yml
index d7d543049ab0..1dfda45ce03f 100644
--- a/Packs/CommonScripts/Scripts/ParseExcel/ParseExcel.yml
+++ b/Packs/CommonScripts/Scripts/ParseExcel/ParseExcel.yml
@@ -6,17 +6,17 @@ script: ''
type: python
subtype: python3
tags: []
-comment: The automation takes Excel file (entryID) as an input and parses its content to the war room and context
+comment: The automation takes Excel file (entryID) as an input and parses its content to the war room and context.
enabled: true
args:
- name: entryId
required: true
- description: The entry id of the excel file
+ description: The entry id of the excel file.
outputs:
- contextPath: ParseExcel
- description: ParseExcel
+ description: ParseExcel.
scripttarget: 0
-dockerimage: demisto/py3-tools:1.0.0.91504
+dockerimage: demisto/py3-tools:1.0.0.96102
tests:
- ParseExcel-test
fromversion: 5.0.0
diff --git a/Packs/CommonScripts/Scripts/PrintToAlert/PrintToAlert.py b/Packs/CommonScripts/Scripts/PrintToAlert/PrintToAlert.py
new file mode 100644
index 000000000000..77ae224561a9
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/PrintToAlert/PrintToAlert.py
@@ -0,0 +1,42 @@
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa: F401
+
+
+def print_to_alert_command(current_alert_id: str, value: str, alert_id: str) -> None:
+ """Prints a value to the specified alert ID.
+
+ Args:
+ current_alert_id (str): The alert ID running the script.
+ value (str): The value to print.
+ alert_id (str): The alert ID to print to.
+ """
+ entry_note = json.dumps(
+ [{"Type": 1, "ContentsFormat": EntryFormat.MARKDOWN, "Contents": f"Entry from alert #{current_alert_id}:\n{value}"}]
+ )
+ entry_tags_res: list[dict[str, Any]] = demisto.executeCommand(
+ "addEntries", {"entries": entry_note, "id": alert_id, "reputationCalcAsync": True}
+ )
+ if isError(entry_tags_res[0]):
+ return_error(get_error(entry_tags_res))
+ else:
+ return_results(CommandResults(readable_output=f"Successfully printed to alert {alert_id}."))
+
+
+def main(): # pragma: no cover
+ try:
+ current_alert: dict[str, Any] = demisto.incident()
+ current_alert_id: str = current_alert["id"]
+ args = demisto.args()
+ value: str = args["value"]
+ alert_id = args["alert_id"]
+ print_to_alert_command(
+ current_alert_id=current_alert_id,
+ value=value,
+ alert_id=alert_id,
+ )
+ except Exception as ex:
+ return_error(f"Failed to execute PrintToAlert. Error: {str(ex)}")
+
+
+if __name__ in ("__main__", "__builtin__", "builtins"):
+ main()
diff --git a/Packs/CommonScripts/Scripts/PrintToAlert/PrintToAlert.yml b/Packs/CommonScripts/Scripts/PrintToAlert/PrintToAlert.yml
new file mode 100644
index 000000000000..19db3a7aefea
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/PrintToAlert/PrintToAlert.yml
@@ -0,0 +1,26 @@
+args:
+- description: The value to print to the war-room of specified alert.
+ name: value
+ required: true
+- description: The alert ID to print to.
+ name: alert_id
+ required: true
+comment: Prints a value to the specified alert's war-room. The alert must be in status "Under Investigation".
+commonfields:
+ id: PrintToAlert
+ version: -1
+name: PrintToAlert
+script: '-'
+tags: []
+enabled: true
+scripttarget: 0
+timeout: '0'
+runas: DBotWeakRole
+type: python
+subtype: python3
+dockerimage: demisto/python3:3.10.14.97374
+fromversion: 8.7.0
+marketplaces:
+- marketplacev2
+tests:
+- No test - unit test
diff --git a/Packs/CommonScripts/Scripts/PrintToAlert/PrintToAlert_test.py b/Packs/CommonScripts/Scripts/PrintToAlert/PrintToAlert_test.py
new file mode 100644
index 000000000000..275e4ff12e18
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/PrintToAlert/PrintToAlert_test.py
@@ -0,0 +1,75 @@
+import pytest
+from pytest_mock import MockerFixture
+import demistomock as demisto
+from CommonServerPython import EntryType
+
+
+def test_print_to_alert(mocker: MockerFixture):
+ """Tests print_to_alert_command when the executeCommand command succeeds.
+
+ Checks that the addEntries command is called with the right arguments.
+ """
+ from PrintToAlert import print_to_alert_command
+
+ execute_command_mocker = mocker.patch.object(
+ demisto,
+ "executeCommand",
+ return_value=[
+ {
+ "Type": EntryType.NOTE,
+ "Contents": "done",
+ "HumanReadable": None,
+ "EntryContext": None,
+ }
+ ],
+ )
+ mocker.patch.object(demisto, "results")
+ print_to_alert_command(
+ current_alert_id="5",
+ value="Hello",
+ alert_id="4",
+ )
+ # Right command is called
+ assert execute_command_mocker.call_args[0][0] == "addEntries"
+ # Right arguments are given
+ assert execute_command_mocker.call_args[0][1] == {
+ "entries": '[{"Type": 1, "ContentsFormat": "markdown", "Contents": "Entry from alert #5:\\nHello"}]',
+ "id": "4",
+ "reputationCalcAsync": True,
+ }
+ assert demisto.results.call_args[0][0]["HumanReadable"] == "Successfully printed to alert 4."
+
+
+def test_print_to_alert_error(mocker: MockerFixture):
+ """Tests print_to_alert_command when the executeCommand command fails.
+
+ Checks that the system exists and an error message is returned.
+ """
+ from PrintToAlert import print_to_alert_command
+
+ error_message = "Something went wrong"
+ mocker.patch.object(
+ demisto,
+ "executeCommand",
+ return_value=[
+ {
+ "Type": EntryType.ERROR,
+ "Contents": error_message,
+ "HumanReadable": None,
+ "EntryContext": None,
+ }
+ ],
+ )
+ mocker.patch.object(demisto, "results")
+ with pytest.raises(SystemExit):
+ print_to_alert_command(
+ current_alert_id="5",
+ value="Hello",
+ alert_id="4",
+ )
+ assert demisto.results.call_args[0][0] == {
+ "Type": EntryType.ERROR,
+ "ContentsFormat": "text",
+ "Contents": error_message,
+ "EntryContext": None,
+ }
diff --git a/Packs/CommonScripts/Scripts/PrintToAlert/README.md b/Packs/CommonScripts/Scripts/PrintToAlert/README.md
new file mode 100644
index 000000000000..7db8b25ccbd9
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/PrintToAlert/README.md
@@ -0,0 +1,35 @@
+Prints a value to the specified alert's war-room. The alert must be in status "Under Investigation".
+
+## Script Data
+---
+
+| **Name** | **Description** |
+| --- | --- |
+| Script Type | python3 |
+| Tags | |
+| Cortex XSOAR Version | 8.7.0 |
+
+## Inputs
+---
+
+| **Argument Name** | **Description** |
+| --- | --- |
+| value | The value to print to the war-room of specified alert. |
+| alert_id | The alert ID to print to. |
+
+## Outputs
+---
+There are no outputs for this script.
+
+
+## Script Example
+```!PrintToAlert alert_id=5 value="Hello from the other side"```
+
+## Context Example
+```json
+{}
+```
+
+## Human Readable Output
+
+>Successfully printed to alert 5.
diff --git a/Packs/CommonScripts/Scripts/PrintToParentIncident/PrintToParentIncident.py b/Packs/CommonScripts/Scripts/PrintToParentIncident/PrintToParentIncident.py
new file mode 100644
index 000000000000..58f2f700cad9
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/PrintToParentIncident/PrintToParentIncident.py
@@ -0,0 +1,63 @@
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa: F401
+
+
+def print_to_parent_incident(alert_id: str, value: str, parent_incident_id: str) -> None:
+ """Prints a value to the alert's parent incident.
+
+ Args:
+ alert_id (str): The alert ID running the script.
+ value (str): The value to print.
+ parent_incident_id (str): The parent incident's ID of the alert.
+ """
+ entry_note = json.dumps(
+ [{"Type": 1, "ContentsFormat": EntryFormat.MARKDOWN, "Contents": f"Entry from alert #{alert_id}:\n{value}"}]
+ )
+ entry_tags_res: list[dict[str, Any]] = demisto.executeCommand(
+ "addEntries", {"entries": entry_note, "id": parent_incident_id, "reputationCalcAsync": True}
+ )
+ if isError(entry_tags_res[0]):
+ return_error(get_error(entry_tags_res))
+ else:
+ return_results(CommandResults(readable_output=f"Successfully printed to parent incident {parent_incident_id}."))
+
+
+def validate_parent_incident_id(parent_incident_id: str, alert_id: str) -> str:
+ """Validates if the parent incident ID of the alert is not empty, and return it.
+
+ Args:
+ parent_incident_id (str): The parent incident ID of the alert.
+ alert_id (str): The alert ID running the script.
+
+ Raises:
+ DemistoException: If the parent incident ID is an empty string, meaning it couldn't be found.
+
+ Returns:
+ str: The parent incident ID if not empty.
+ """
+ if not parent_incident_id:
+ raise DemistoException(f"No parent incident was found for {alert_id =}")
+ return parent_incident_id
+
+
+def main(): # pragma: no cover
+ try:
+ args = demisto.args()
+ value: str = args["value"]
+ current_alert: dict[str, Any] = demisto.incident()
+ alert_id: str = current_alert["id"]
+ parent_incident_id: str = validate_parent_incident_id(
+ parent_incident_id=current_alert.get("parentXDRIncident", ""),
+ alert_id=alert_id,
+ )
+ print_to_parent_incident(
+ alert_id=alert_id,
+ value=value,
+ parent_incident_id=parent_incident_id,
+ )
+ except Exception as ex:
+ return_error(f"Failed to execute PrintToParentIncident. Error: {str(ex)}")
+
+
+if __name__ in ("__main__", "__builtin__", "builtins"):
+ main()
diff --git a/Packs/CommonScripts/Scripts/PrintToParentIncident/PrintToParentIncident.yml b/Packs/CommonScripts/Scripts/PrintToParentIncident/PrintToParentIncident.yml
new file mode 100644
index 000000000000..d07b297cf704
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/PrintToParentIncident/PrintToParentIncident.yml
@@ -0,0 +1,23 @@
+args:
+- description: The value to print to the parent incident's war-room.
+ name: value
+ required: true
+comment: Prints a value to the parent incident's war-room of the current alert.
+commonfields:
+ id: PrintToParentIncident
+ version: -1
+name: PrintToParentIncident
+script: '-'
+tags: []
+enabled: true
+scripttarget: 0
+timeout: '0'
+runas: DBotWeakRole
+type: python
+subtype: python3
+dockerimage: demisto/python3:3.10.14.97374
+fromversion: 8.7.0
+marketplaces:
+- marketplacev2
+tests:
+- No test - unit test
diff --git a/Packs/CommonScripts/Scripts/PrintToParentIncident/PrintToParentIncident_test.py b/Packs/CommonScripts/Scripts/PrintToParentIncident/PrintToParentIncident_test.py
new file mode 100644
index 000000000000..73fa22ba5d98
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/PrintToParentIncident/PrintToParentIncident_test.py
@@ -0,0 +1,83 @@
+import pytest
+from pytest_mock import MockerFixture
+import demistomock as demisto
+from CommonServerPython import EntryType, DemistoException
+
+
+def test_print_to_parent_incident(mocker: MockerFixture):
+ """Tests print_to_parent_incident when the executeCommand command succeeds.
+
+ Checks that the addEntries command is called with the right arguments.
+ """
+ from PrintToParentIncident import print_to_parent_incident
+
+ execute_command_mocker = mocker.patch.object(
+ demisto,
+ "executeCommand",
+ return_value=[
+ {
+ "Type": EntryType.NOTE,
+ "Contents": "done",
+ "HumanReadable": None,
+ "EntryContext": None,
+ }
+ ],
+ )
+ mocker.patch.object(demisto, "results")
+ print_to_parent_incident(
+ alert_id="4",
+ value="Hello",
+ parent_incident_id="INCIDENT-5",
+ )
+ # Right command is called
+ assert execute_command_mocker.call_args[0][0] == "addEntries"
+ # Right arguments are given
+ assert execute_command_mocker.call_args[0][1] == {
+ "entries": '[{"Type": 1, "ContentsFormat": "markdown", "Contents": "Entry from alert #4:\\nHello"}]',
+ "id": "INCIDENT-5",
+ "reputationCalcAsync": True,
+ }
+ assert demisto.results.call_args[0][0]["HumanReadable"] == "Successfully printed to parent incident INCIDENT-5."
+
+
+def test_print_to_alert_error(mocker: MockerFixture):
+ """Tests print_to_parent_incident when the executeCommand command fails.
+
+ Checks that the system exists and an error message is returned.
+ """
+ from PrintToParentIncident import print_to_parent_incident
+
+ error_message = "Something went wrong"
+ mocker.patch.object(
+ demisto,
+ "executeCommand",
+ return_value=[
+ {
+ "Type": EntryType.ERROR,
+ "Contents": error_message,
+ "HumanReadable": None,
+ "EntryContext": None,
+ }
+ ],
+ )
+ mocker.patch.object(demisto, "results")
+ with pytest.raises(SystemExit):
+ print_to_parent_incident(
+ alert_id="4",
+ value="Hello",
+ parent_incident_id="INCIDENT-5",
+ )
+ assert demisto.results.call_args[0][0] == {
+ "Type": EntryType.ERROR,
+ "ContentsFormat": "text",
+ "Contents": error_message,
+ "EntryContext": None,
+ }
+
+
+def test_no_parent_incident_error():
+ """Check that we return an error when no parent incident is found"""
+ from PrintToParentIncident import validate_parent_incident_id
+
+ with pytest.raises(DemistoException):
+ validate_parent_incident_id(parent_incident_id="", alert_id=4)
diff --git a/Packs/CommonScripts/Scripts/PrintToParentIncident/README.md b/Packs/CommonScripts/Scripts/PrintToParentIncident/README.md
new file mode 100644
index 000000000000..f806a65e6367
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/PrintToParentIncident/README.md
@@ -0,0 +1,34 @@
+Prints a value to the parent incident's war-room of the current alert.
+
+## Script Data
+---
+
+| **Name** | **Description** |
+| --- | --- |
+| Script Type | python3 |
+| Tags | |
+| Cortex XSOAR Version | 8.7.0 |
+
+## Inputs
+---
+
+| **Argument Name** | **Description** |
+| --- | --- |
+| value | The value to print to the parent incident's war-room. |
+
+## Outputs
+---
+There are no outputs for this script.
+
+
+## Script Example
+```!PrintToParentIncident value="Parent of 6 I assume?"```
+
+## Context Example
+```json
+{}
+```
+
+## Human Readable Output
+
+>Successfully printed to parent incident INCIDENT-5.
diff --git a/Packs/CommonScripts/Scripts/StixCreator/StixCreator.yml b/Packs/CommonScripts/Scripts/StixCreator/StixCreator.yml
index 908f64d09f55..886e259bd4dd 100644
--- a/Packs/CommonScripts/Scripts/StixCreator/StixCreator.yml
+++ b/Packs/CommonScripts/Scripts/StixCreator/StixCreator.yml
@@ -38,7 +38,7 @@ outputs:
description: The date/time that the indicator was last seen.
type: date
scripttarget: 0
-dockerimage: demisto/py3-tools:1.0.0.91504
+dockerimage: demisto/py3-tools:1.0.0.96102
subtype: python3
runas: DBotWeakRole
tests:
diff --git a/Packs/CommonScripts/Scripts/VerdictResult/VerdictResult.py b/Packs/CommonScripts/Scripts/VerdictResult/VerdictResult.py
index 92345cb8aa81..73bfda2c9bde 100644
--- a/Packs/CommonScripts/Scripts/VerdictResult/VerdictResult.py
+++ b/Packs/CommonScripts/Scripts/VerdictResult/VerdictResult.py
@@ -1,7 +1,9 @@
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
+
RED_HTML_STYLE = "color:#FF1744;text-align:center;font-size:300%;padding-top:1em>"
GREEN_HTML_STYLE = "color:#00CD33;text-align:center;font-size:300%;padding-top:1em>"
+ORANGE_HTML_STYLE = "color:#FF9000;text-align:center;font-size:300%;padding-top:1em>"
GREY_HTML_STYLE = "color:#808080;text-align:center;font-size:300%;padding-top:1em>"
@@ -11,6 +13,9 @@ def main():
if investigationverdict == 'Malicious':
html = f"
"
+ elif investigationverdict == 'Suspicious':
+ html = f"
"
+
elif investigationverdict == 'Non-Malicious':
html = f"
"
diff --git a/Packs/CommonScripts/Scripts/VerdictResult/VerdictResult.yml b/Packs/CommonScripts/Scripts/VerdictResult/VerdictResult.yml
index 409cae170bad..b036b4819c9f 100644
--- a/Packs/CommonScripts/Scripts/VerdictResult/VerdictResult.yml
+++ b/Packs/CommonScripts/Scripts/VerdictResult/VerdictResult.yml
@@ -2,7 +2,7 @@ comment: This widget displays the incident verdict or the alert verdict based on
commonfields:
id: VerdictResult
version: -1
-dockerimage: demisto/python3:3.10.13.83255
+dockerimage: demisto/python3:3.10.14.95956
enabled: true
engineinfo: {}
name: VerdictResult
diff --git a/Packs/CommonScripts/Scripts/VerdictResult/VerdictResult_test.py b/Packs/CommonScripts/Scripts/VerdictResult/VerdictResult_test.py
index 073db15585ce..2d4fbac08092 100644
--- a/Packs/CommonScripts/Scripts/VerdictResult/VerdictResult_test.py
+++ b/Packs/CommonScripts/Scripts/VerdictResult/VerdictResult_test.py
@@ -8,6 +8,7 @@
[
('Malicious', '
Malicious
'),
('Non-Malicious', '
Non-Malicious
'),
+ ('Suspicious', '
Suspicious
'),
('blabla', '
Not Determined
')
]
)
diff --git a/Packs/CommonScripts/Scripts/ZipFile/README.md b/Packs/CommonScripts/Scripts/ZipFile/README.md
index ceceb3808122..44f44bbfa80c 100644
--- a/Packs/CommonScripts/Scripts/ZipFile/README.md
+++ b/Packs/CommonScripts/Scripts/ZipFile/README.md
@@ -36,3 +36,7 @@ Supported Cortex XSOAR versions: 5.0.0 and later.
| File.SSDeep | The ssdeep hash of the file \(same as displayed in file entries\). | String |
| File.Extension | The file extension, for example: 'xls'. | String |
| File.Type | The file type, as determined by libmagic \(same as displayed in file entries\). | String |
+
+### Troubleshooting
+Because of security reasons we support only AES encryption which is not supported on the Windows OS without 3rd party unzip applications. For more information about the encryption methods, see https://en.wikipedia.org/wiki/ZIP_(file_format)#Encryption.
+
diff --git a/Packs/CommonScripts/Scripts/ZipFile/ZipFile.py b/Packs/CommonScripts/Scripts/ZipFile/ZipFile.py
index 752ddd4166e6..f6fbc25e4d9e 100644
--- a/Packs/CommonScripts/Scripts/ZipFile/ZipFile.py
+++ b/Packs/CommonScripts/Scripts/ZipFile/ZipFile.py
@@ -4,13 +4,41 @@
import re
import shutil
-import zipfile
-import pyminizip
+import pyzipper
from os.path import isfile
ESCAPE_CHARACTERS = r'[/\<>"|?*]'
+def test_compression_succeeded(zip_name: str, password: str = None):
+ with pyzipper.AESZipFile(zip_name) as zf:
+ # testing for file integrity
+ if password:
+ zf.setpassword(bytes(password, 'utf-8'))
+ ret = zf.testzip()
+ if ret is not None:
+ demisto.info('zf.testzip() failed')
+ raise DemistoException('There was a problem with zipping the file: ' + ret + ' is corrupted')
+
+
+def compress_multiple(file_names: List[str], zip_name: str, password: str = None):
+ """
+ Compress multiple files into a zip file.
+ :param file_names: list of file names to compress
+ :param zip_name: name of the zip file to create
+ :param password: password to use for encryption
+ """
+ compression = pyzipper.ZIP_DEFLATED
+ encryption = pyzipper.WZ_AES if password else None
+ demisto.debug(f'zipping {file_names=}')
+ with pyzipper.AESZipFile(zip_name, mode='w', compression=compression, encryption=encryption) as zf:
+ zf.pwd = bytes(password, 'utf-8') if password else None
+ for file_name in file_names:
+ zf.write(file_name)
+ test_compression_succeeded(zip_name, password)
+ zf.close()
+
+
def escape_illegal_characters_in_file_name(file_name: str) -> str:
if file_name:
file_name = re.sub(ESCAPE_CHARACTERS, '-', file_name)
@@ -20,10 +48,6 @@ def escape_illegal_characters_in_file_name(file_name: str) -> str:
def main():
- try: # in order to support compression of the file
- compression = zipfile.ZIP_DEFLATED
- except Exception:
- compression = zipfile.ZIP_STORED
try:
args = demisto.args()
zipName = None
@@ -75,21 +99,7 @@ def main():
zipName = fileCurrentName + '.zip'
# zipping the file
- if password:
- pyminizip.compress_multiple(file_names, ['./'] * len(file_names), zipName, password, 5)
-
- else:
- zf = zipfile.ZipFile(zipName, mode='w')
- try:
- for file_name in file_names:
- zf.write(file_name, compress_type=compression)
- # testing for file integrity
- ret = zf.testzip()
- if ret is not None:
- raise DemistoException('There was a problem with the zipping, file: ' + ret + ' is corrupted')
-
- finally:
- zf.close()
+ compress_multiple(file_names, zipName, password)
with open(zipName, 'rb') as f:
file_data = f.read()
diff --git a/Packs/CommonScripts/Scripts/ZipFile/ZipFile.yml b/Packs/CommonScripts/Scripts/ZipFile/ZipFile.yml
index ea26261fa2b4..680eb9ba654c 100644
--- a/Packs/CommonScripts/Scripts/ZipFile/ZipFile.yml
+++ b/Packs/CommonScripts/Scripts/ZipFile/ZipFile.yml
@@ -3,11 +3,11 @@ args:
isArray: true
name: entryID
required: true
-- description: 'Name of the output file, for example: zipName="test" would result in output file "test.zip"'
+- description: 'Name of the output file, for example: zipName="test" would result in output file "test.zip".'
name: zipName
-- description: 'Used to create a password protected zip file. Example: password="abcd"'
+- description: 'Used to create a password protected zip file. Example: password="abcd".'
name: password
-comment: Zip a file and upload to war room
+comment: Zip a file and upload to war room.
commonfields:
id: ZipFile
version: -1
@@ -54,7 +54,7 @@ tags:
timeout: '0'
type: python
subtype: python3
-dockerimage: demisto/py3-tools:1.0.0.49703
+dockerimage: demisto/py3-tools:1.0.0.95440
fromversion: 5.0.0
tests:
-- ZipFile-Test
\ No newline at end of file
+- ZipFile-Test
diff --git a/Packs/CommonScripts/Scripts/ZipFile/ZipFile_test.py b/Packs/CommonScripts/Scripts/ZipFile/ZipFile_test.py
index c744c828a42c..d62e3fe08a27 100644
--- a/Packs/CommonScripts/Scripts/ZipFile/ZipFile_test.py
+++ b/Packs/CommonScripts/Scripts/ZipFile/ZipFile_test.py
@@ -1,5 +1,8 @@
+import os
import pytest
-from ZipFile import escape_illegal_characters_in_file_name
+import pyzipper
+from ZipFile import escape_illegal_characters_in_file_name, compress_multiple
+import tempfile
ESCAPE_CHARACTERS_PACK = [
('/Users/user/Downloads/b/a/testingfile.txt', '-Users-user-Downloads-b-a-testingfile.txt'),
@@ -9,6 +12,80 @@
]
+def unzip(zip_file_path: str, password: str = None):
+ with tempfile.TemporaryDirectory() as unzip_dir, pyzipper.AESZipFile(zip_file_path) as zf:
+ zf.pwd = bytes(password, 'utf-8') if password else None
+ zf.extractall(path=unzip_dir)
+
+
@pytest.mark.parametrize(('input_name', 'output_name'), ESCAPE_CHARACTERS_PACK)
def test_escape_characters_in_file_name(input_name, output_name):
assert escape_illegal_characters_in_file_name(input_name) == output_name
+
+
+def test_compress_multiple_with_password():
+ """
+ Given:
+ - A directory with files to zip.
+ When:
+ - Calling the function compress_multiple.
+ Then:
+ - The function should not raise an exception.
+ """
+ test_data_dir = './test_data'
+ file_names = [os.path.join(test_data_dir, f) for f in os.listdir(test_data_dir) if
+ os.path.isfile(os.path.join(test_data_dir, f))]
+ with tempfile.NamedTemporaryFile(suffix='.zip') as tmp_zip:
+ zip_name = tmp_zip.name
+ compress_multiple(
+ file_names=file_names,
+ zip_name=zip_name,
+ password='123'
+ )
+
+
+def test_zip_and_unzip_with_password():
+ """
+ Given:
+ - A directory with files to zip.
+ When:
+ - Calling the function compress_multiple with a password.
+ Then:
+ - We can unzip the file with the correct password.
+ """
+ test_data_dir = './test_data'
+ file_names = [os.path.join(test_data_dir, f) for f in os.listdir(test_data_dir) if
+ os.path.isfile(os.path.join(test_data_dir, f))]
+ with tempfile.NamedTemporaryFile(suffix='.zip') as tmp_zip:
+ zip_name = tmp_zip.name
+ compress_multiple(
+ file_names=file_names,
+ zip_name=zip_name,
+ password='123'
+ )
+ unzip(zip_name, '123')
+
+
+def test_unzip_wrong_password():
+ """
+ Given:
+ - A directory with files to zip.
+ When:
+ - Calling the function compress_multiple with a password.
+ Then:
+ - We can not unzip the file with the wrong password.
+ """
+ test_data_dir = './test_data'
+ file_names = [os.path.join(test_data_dir, f) for f in os.listdir(test_data_dir) if
+ os.path.isfile(os.path.join(test_data_dir, f))]
+ with tempfile.NamedTemporaryFile(suffix='.zip') as tmp_zip:
+ zip_name = tmp_zip.name
+ compress_multiple(
+ file_names=file_names,
+ zip_name=zip_name,
+ password='123'
+ )
+ with pytest.raises(Exception) as e:
+ unzip(zip_name, '1234')
+
+ assert 'Bad password' in e.value.args[0]
diff --git a/Packs/CommonScripts/Scripts/ZipFile/test_data/test_image.png b/Packs/CommonScripts/Scripts/ZipFile/test_data/test_image.png
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Packs/CommonScripts/Scripts/ZipFile/test_data/test_image.svg b/Packs/CommonScripts/Scripts/ZipFile/test_data/test_image.svg
new file mode 100644
index 000000000000..c55c7ad33364
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/ZipFile/test_data/test_image.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/Packs/CommonScripts/Scripts/ZipFile/test_data/test_txt.docx b/Packs/CommonScripts/Scripts/ZipFile/test_data/test_txt.docx
new file mode 100644
index 000000000000..47d20de11fca
Binary files /dev/null and b/Packs/CommonScripts/Scripts/ZipFile/test_data/test_txt.docx differ
diff --git a/Packs/CommonScripts/Scripts/ZipFile/test_data/test_txt.pages b/Packs/CommonScripts/Scripts/ZipFile/test_data/test_txt.pages
new file mode 100644
index 000000000000..c6a9efcc3e6a
Binary files /dev/null and b/Packs/CommonScripts/Scripts/ZipFile/test_data/test_txt.pages differ
diff --git a/Packs/CommonScripts/Scripts/ZipFile/test_data/test_txt.pdf b/Packs/CommonScripts/Scripts/ZipFile/test_data/test_txt.pdf
new file mode 100644
index 000000000000..92d6584eeb57
Binary files /dev/null and b/Packs/CommonScripts/Scripts/ZipFile/test_data/test_txt.pdf differ
diff --git a/Packs/CommonScripts/Scripts/ZipFile/test_data/test_txt.txt b/Packs/CommonScripts/Scripts/ZipFile/test_data/test_txt.txt
new file mode 100644
index 000000000000..9daeafb9864c
--- /dev/null
+++ b/Packs/CommonScripts/Scripts/ZipFile/test_data/test_txt.txt
@@ -0,0 +1 @@
+test
diff --git a/Packs/CommonScripts/TestPlaybooks/playbook-HttpV2-Test.yml b/Packs/CommonScripts/TestPlaybooks/playbook-HttpV2-Test.yml
index ab920650de4f..2344afb8a761 100644
--- a/Packs/CommonScripts/TestPlaybooks/playbook-HttpV2-Test.yml
+++ b/Packs/CommonScripts/TestPlaybooks/playbook-HttpV2-Test.yml
@@ -1,29 +1,29 @@
id: HttpV2-test
version: -1
-vcShouldKeepItemLegacyProdMachine: false
name: HttpV2-test
description: Test playbook for HttpV2 script.
starttaskid: "0"
tasks:
"0":
id: "0"
- taskid: 5aafd664-6a5c-47e7-873a-148f26bf3a66
+ taskid: d28cd856-4513-4801-8d1b-ebc8ff3a2440
type: start
task:
- id: 5aafd664-6a5c-47e7-873a-148f26bf3a66
+ id: d28cd856-4513-4801-8d1b-ebc8ff3a2440
version: -1
name: ""
iscommand: false
brand: ""
+ description: ''
nexttasks:
'#none#':
- - "6"
+ - "1"
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 265,
+ "x": 50,
"y": 50
}
}
@@ -36,12 +36,12 @@ tasks:
isautoswitchedtoquietmode: false
"1":
id: "1"
- taskid: 0d291b16-44f3-414c-8cb7-5e1a209de353
+ taskid: 6b7a5d1e-ca69-4abf-8d43-b4b62f95973e
type: regular
task:
- id: 0d291b16-44f3-414c-8cb7-5e1a209de353
+ id: 6b7a5d1e-ca69-4abf-8d43-b4b62f95973e
version: -1
- name: Test Get request
+ name: GET
description: Sends http request
scriptName: HttpV2
type: regular
@@ -51,8 +51,6 @@ tasks:
'#none#':
- "8"
scriptarguments:
- auth_credentials:
- simple: '{TOMMED}j2Itfo6MBOf51amxJ3lzpvzpIR1nk5exU9+f8vAp3B8bWbjzUE7L93zLz/uIRc6ZUTlfbmPSgC+WfR++rMiqni8uaUixoDoB/Qc4oovucYmVPb2Tgo8AdrPUJJ1NtkT2'
method:
simple: GET
parse_response_as:
@@ -61,17 +59,15 @@ tasks:
simple: json
response_content_type:
simple: json
- unsecure:
- simple: "True"
url:
- simple: https://paloaltonfr3.jamfcloud.com/JSSResource/computers/id/1/subset/General
+ simple: https://reqres.in/api/users/2
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 480,
- "y": 360
+ "x": 50,
+ "y": 195
}
}
note: false
@@ -81,14 +77,14 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "2":
- id: "2"
- taskid: 71af1c89-8a24-4ab5-8d9d-4066bece6855
+ "3":
+ id: "3"
+ taskid: dc53d99d-12cd-49d4-86a1-9aedb9275607
type: regular
task:
- id: 71af1c89-8a24-4ab5-8d9d-4066bece6855
+ id: dc53d99d-12cd-49d4-86a1-9aedb9275607
version: -1
- name: Test post XML body request
+ name: Test GET - Save to file
description: Sends http request
scriptName: HttpV2
type: regular
@@ -96,28 +92,22 @@ tasks:
brand: ""
nexttasks:
'#none#':
- - "10"
+ - "15"
scriptarguments:
- auth_credentials:
- simple: '{TOMMED}31q0XK2yezxbkvSH7BuD61zitBApz4wPKWW9+teSmU4nh7vbNRg9QE08t8p+dx6OJMc4yf0mpd8s3Y6HsedRt//eok0QoOiqIXO4K+eOPIYrd0ufMy6+zuI9la3N+aYv'
- body:
- simple: 123456138
- headers:
- keyvalue:
- - key: Content-Type
- value:
- simple: application/xml
- - key: Accept
- value:
- simple: application/xml
+ filename:
+ simple: GetHttpFile
method:
- simple: POST
+ simple: GET
parse_response_as:
- simple: xml
- unsecure:
- simple: "True"
+ simple: json
+ request_content_type:
+ simple: json
+ response_content_type:
+ simple: json
+ save_as_file:
+ simple: "yes"
url:
- simple: https://paloaltonfr3.jamfcloud.com/JSSResource/computercommands/command/DeviceLock
+ simple: https://reqres.in/api/users/2
separatecontext: false
continueonerrortype: ""
view: |-
@@ -134,14 +124,42 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "3":
- id: "3"
- taskid: 106dd435-4d20-411d-8678-6e72e5bc0eee
+ "4":
+ id: "4"
+ taskid: bdcd39a9-fdf6-4afb-8d6e-9c2ee908e9ce
+ type: title
+ task:
+ id: bdcd39a9-fdf6-4afb-8d6e-9c2ee908e9ce
+ version: -1
+ name: Done
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 2295
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "7":
+ id: "7"
+ taskid: 9da19814-556b-4a2b-83e8-6340c4f4226c
type: regular
task:
- id: 106dd435-4d20-411d-8678-6e72e5bc0eee
+ id: 9da19814-556b-4a2b-83e8-6340c4f4226c
version: -1
- name: Test Get request- Save to file
+ name: Test redirect=False
description: Sends http request
scriptName: HttpV2
type: regular
@@ -149,33 +167,23 @@ tasks:
brand: ""
nexttasks:
'#none#':
- - "7"
+ - "9"
scriptarguments:
- auth_credentials:
- simple: '{TOMMED}fhSbZguI0bPFrCdoRev70KJMa8qXaFAPedj2VE08u3f5w2FT2xMMmC6hvHtI441SEMvOeJlu5XFn3KkCtfY4ZD6wJh0q+6WMwb66oO0HOen+fcnIvTaovEUeNViHVXKM'
- filename:
- simple: GetHttpFile
+ enable_redirect:
+ simple: "False"
method:
simple: GET
parse_response_as:
- simple: json
- request_content_type:
- simple: json
- response_content_type:
- simple: json
- save_as_file:
- simple: "yes"
- unsecure:
- simple: "True"
+ simple: raw response
url:
- simple: https://paloaltonfr3.jamfcloud.com/JSSResource/computers/id/1/subset/General
+ simple: https://ynet.co.il
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 480,
- "y": 720
+ "x": 50,
+ "y": 1420
}
}
note: false
@@ -185,24 +193,128 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "4":
- id: "4"
- taskid: 23acf0ba-8f6e-4bbc-8ade-1498f5017af7
- type: title
+ "8":
+ id: "8"
+ taskid: 8731ce03-4225-4172-860d-b5c2f4f64de7
+ type: condition
task:
- id: 23acf0ba-8f6e-4bbc-8ade-1498f5017af7
+ id: 8731ce03-4225-4172-860d-b5c2f4f64de7
version: -1
- name: Done
- type: title
+ name: Verify Outputs
+ type: condition
iscommand: false
brand: ""
+ nexttasks:
+ "yes":
+ - "11"
separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: HttpRequest.Response.StatusCode
+ iscontext: true
+ right:
+ value:
+ simple: "200"
continueonerrortype: ""
view: |-
{
"position": {
- "x": 265,
- "y": 1245
+ "x": 50,
+ "y": 370
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "9":
+ id: "9"
+ taskid: ff38dbd3-0990-415b-8496-3f6a0e5eea76
+ type: condition
+ task:
+ id: ff38dbd3-0990-415b-8496-3f6a0e5eea76
+ version: -1
+ name: Verify status code 301
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "13"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: HttpRequest.Response.StatusCode
+ iscontext: true
+ right:
+ value:
+ simple: "301"
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1595
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "10":
+ id: "10"
+ taskid: a833130a-b650-41d9-8e92-c347aec86a8a
+ type: condition
+ task:
+ id: a833130a-b650-41d9-8e92-c347aec86a8a
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "4"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: HttpRequest.Response.StatusCode
+ iscontext: true
+ right:
+ value:
+ simple: "200"
+ - - operator: isEqualString
+ left:
+ value:
+ simple: ${HttpRequest.Response.ParsedBody.root.firstName}
+ iscontext: true
+ right:
+ value:
+ simple: John
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 2120
}
}
note: false
@@ -212,54 +324,37 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "5":
- id: "5"
- taskid: be99e227-c05c-4d4f-8a34-f60c0515e69b
+ "11":
+ id: "11"
+ taskid: 967d58f3-64a8-47c8-8211-3fcc16a80eee
type: regular
task:
- id: be99e227-c05c-4d4f-8a34-f60c0515e69b
+ id: 967d58f3-64a8-47c8-8211-3fcc16a80eee
version: -1
- name: Test post - Save to file
- description: Sends http request
- scriptName: HttpV2
+ name: '--- Delete context ---'
+ description: |-
+ Delete field from context.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: DeleteContext
type: regular
iscommand: false
brand: ""
nexttasks:
'#none#':
- - "4"
+ - "3"
scriptarguments:
- auth_credentials:
- simple: '{TOMMED}/GcCBEtsf13+e4wYDSs/gd9/6eMw6HJdfGf1s1ziB/SH/a5wfq3kRrhSzM0bKHEbKQEqqac7ZQGNVyyZ08E4HlafejTKqVR/N8nUMPtjZRaHmio3WI+aFt2Zx6Dt1pOv'
- body:
- simple: 123456138
- filename:
- simple: PostHttpFile
- headers:
- keyvalue:
- - key: Content-Type
- value:
- simple: application/xml
- - key: Accept
- value:
- simple: application/xml
- method:
- simple: POST
- parse_response_as:
- simple: xml
- save_as_file:
+ all:
simple: "yes"
- unsecure:
- simple: "True"
- url:
- simple: https://paloaltonfr3.jamfcloud.com/JSSResource/computercommands/command/DeviceLock
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
"x": 50,
- "y": 1070
+ "y": 545
}
}
note: false
@@ -269,14 +364,14 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "6":
- id: "6"
- taskid: ea8b832c-ef54-4568-8275-6f4ccc790fa6
+ "12":
+ id: "12"
+ taskid: 8af68004-75b9-4995-82b1-dd70a7524fef
type: regular
task:
- id: ea8b832c-ef54-4568-8275-6f4ccc790fa6
+ id: 8af68004-75b9-4995-82b1-dd70a7524fef
version: -1
- name: Delete context
+ name: '--- Delete context ---'
description: |-
Delete field from context.
@@ -289,8 +384,7 @@ tasks:
brand: ""
nexttasks:
'#none#':
- - "1"
- - "2"
+ - "7"
scriptarguments:
all:
simple: "yes"
@@ -299,8 +393,8 @@ tasks:
view: |-
{
"position": {
- "x": 265,
- "y": 195
+ "x": 50,
+ "y": 1245
}
}
note: false
@@ -310,38 +404,37 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "7":
- id: "7"
- taskid: 6953ef2a-ac65-4143-8c4c-ca6767e29ced
+ "13":
+ id: "13"
+ taskid: 7a5e3eef-0b94-4174-82f4-40ccebc706ec
type: regular
task:
- id: 6953ef2a-ac65-4143-8c4c-ca6767e29ced
+ id: 7a5e3eef-0b94-4174-82f4-40ccebc706ec
version: -1
- name: Test redirect=False
- description: Sends http request
- scriptName: HttpV2
+ name: '--- Delete context ---'
+ description: |-
+ Delete field from context.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: DeleteContext
type: regular
iscommand: false
brand: ""
nexttasks:
'#none#':
- - "9"
+ - "17"
scriptarguments:
- enable_redirect:
- simple: "False"
- method:
- simple: GET
- parse_response_as:
- simple: raw response
- url:
- simple: https://ynet.co.il
+ all:
+ simple: "yes"
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 480,
- "y": 895
+ "x": 50,
+ "y": 1770
}
}
note: false
@@ -351,20 +444,20 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "8":
- id: "8"
- taskid: 62888d12-9fa4-45bb-81af-3b706f851cb8
+ "15":
+ id: "15"
+ taskid: 6ae49e17-e11b-4929-892e-e7da478e37e6
type: condition
task:
- id: 62888d12-9fa4-45bb-81af-3b706f851cb8
+ id: 6ae49e17-e11b-4929-892e-e7da478e37e6
version: -1
- name: Verify Outputs
+ name: Check downloaded file name
type: condition
iscommand: false
brand: ""
nexttasks:
"yes":
- - "3"
+ - "16"
separatecontext: false
conditions:
- label: "yes"
@@ -372,17 +465,17 @@ tasks:
- - operator: isEqualString
left:
value:
- simple: HttpRequest.Response.StatusCode
+ simple: ${File.Name}
iscontext: true
right:
value:
- simple: "200"
+ simple: GetHttpFile
continueonerrortype: ""
view: |-
{
"position": {
- "x": 480,
- "y": 545
+ "x": 50,
+ "y": 895
}
}
note: false
@@ -392,37 +485,37 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "9":
- id: "9"
- taskid: 2753f23c-0eb8-4494-8b01-f245b58d7e2f
+ "16":
+ id: "16"
+ taskid: 633390b4-b095-4398-87a5-4501e25e24a9
type: condition
task:
- id: 2753f23c-0eb8-4494-8b01-f245b58d7e2f
+ id: 633390b4-b095-4398-87a5-4501e25e24a9
version: -1
- name: Verify Outputs
+ name: Check downloaded file size
type: condition
iscommand: false
brand: ""
nexttasks:
"yes":
- - "4"
+ - "12"
separatecontext: false
conditions:
- label: "yes"
condition:
- - - operator: isEqualString
+ - - operator: isEqualNumber
left:
value:
- simple: HttpRequest.Response.StatusCode
+ simple: ${File.Size}
iscontext: true
right:
value:
- simple: "301"
+ simple: "280"
continueonerrortype: ""
view: |-
{
"position": {
- "x": 480,
+ "x": 50,
"y": 1070
}
}
@@ -433,38 +526,36 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "10":
- id: "10"
- taskid: e7eaf722-9f59-4709-803b-e4bdcfe61bb5
- type: condition
+ "17":
+ id: "17"
+ taskid: bf09a886-208d-47e9-8901-0cc122ed18f8
+ type: regular
task:
- id: e7eaf722-9f59-4709-803b-e4bdcfe61bb5
+ id: bf09a886-208d-47e9-8901-0cc122ed18f8
version: -1
- name: Verify Outputs
- type: condition
+ name: GET xml
+ description: Sends a HTTP request with advanced capabilities
+ scriptName: HttpV2
+ type: regular
iscommand: false
brand: ""
nexttasks:
- "yes":
- - "5"
+ '#none#':
+ - "10"
+ scriptarguments:
+ method:
+ simple: GET
+ parse_response_as:
+ simple: xml
+ url:
+ simple: https://mocktarget.apigee.net/xml
separatecontext: false
- conditions:
- - label: "yes"
- condition:
- - - operator: isEqualString
- left:
- value:
- simple: HttpRequest.Response.StatusCode
- iscontext: true
- right:
- value:
- simple: "201"
continueonerrortype: ""
view: |-
{
"position": {
"x": 50,
- "y": 895
+ "y": 1945
}
}
note: false
@@ -479,8 +570,8 @@ view: |-
"linkLabelsPosition": {},
"paper": {
"dimensions": {
- "height": 1260,
- "width": 810,
+ "height": 2310,
+ "width": 380,
"x": 50,
"y": 50
}
@@ -488,4 +579,4 @@ view: |-
}
inputs: []
outputs: []
-fromversion: 6.5.0
\ No newline at end of file
+fromversion: 6.5.0
diff --git a/Packs/CommonScripts/pack_metadata.json b/Packs/CommonScripts/pack_metadata.json
index dc976d337751..3391455b29b3 100644
--- a/Packs/CommonScripts/pack_metadata.json
+++ b/Packs/CommonScripts/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Common Scripts",
"description": "Frequently used scripts pack.",
"support": "xsoar",
- "currentVersion": "1.14.48",
+ "currentVersion": "1.15.8",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CommonTypes/ReleaseNotes/3_5_4.md b/Packs/CommonTypes/ReleaseNotes/3_5_4.md
new file mode 100644
index 000000000000..068f305dd4b0
--- /dev/null
+++ b/Packs/CommonTypes/ReleaseNotes/3_5_4.md
@@ -0,0 +1,4 @@
+
+#### Indicator Types
+
+- **Email**
diff --git a/Packs/CommonTypes/ReleaseNotes/3_5_5.md b/Packs/CommonTypes/ReleaseNotes/3_5_5.md
new file mode 100644
index 000000000000..10b832ef9b32
--- /dev/null
+++ b/Packs/CommonTypes/ReleaseNotes/3_5_5.md
@@ -0,0 +1,3 @@
+## Common Types
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/CommonTypes/pack_metadata.json b/Packs/CommonTypes/pack_metadata.json
index 22cff7cb58ad..a356f1f32352 100644
--- a/Packs/CommonTypes/pack_metadata.json
+++ b/Packs/CommonTypes/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Common Types",
"description": "This Content Pack will get you up and running in no-time and provide you with the most commonly used incident & indicator fields and types.",
"support": "xsoar",
- "currentVersion": "3.5.3",
+ "currentVersion": "3.5.5",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CommonWidgets/ReleaseNotes/1_2_50.md b/Packs/CommonWidgets/ReleaseNotes/1_2_50.md
new file mode 100644
index 000000000000..f9e87e14b2b5
--- /dev/null
+++ b/Packs/CommonWidgets/ReleaseNotes/1_2_50.md
@@ -0,0 +1,3 @@
+## Common Widgets
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/CommonWidgets/ReleaseNotes/1_2_51.md b/Packs/CommonWidgets/ReleaseNotes/1_2_51.md
new file mode 100644
index 000000000000..619e37f18ef8
--- /dev/null
+++ b/Packs/CommonWidgets/ReleaseNotes/1_2_51.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### MyToDoTasksWidget
+
+- Fixed an issue where the incident link was broken in SaaS platforms.
+- Updated the Docker image to: *demisto/python3:3.10.14.97100*.
diff --git a/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget.py b/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget.py
index 02a6f104a9d8..22a86f0e1d33 100644
--- a/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget.py
+++ b/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget.py
@@ -5,6 +5,13 @@
MAX_ENTRIES = 30
+def get_clickable_incident_id(incident_id):
+ incident_id_url = os.path.join("Custom/caseinfoid", incident_id)
+ if not is_xsiam_or_xsoar_saas():
+ incident_id_url = f'#/{incident_id_url}'
+ return f'[{incident_id}]({incident_id_url})'
+
+
def get_open_to_do_tasks_of_current_user() -> List[Dict]:
body = {
"dataType": "todos",
@@ -28,8 +35,6 @@ def get_open_to_do_tasks_of_current_user() -> List[Dict]:
title = task.get('title', '')
description = task.get('description', '')
task_id = task.get('id', '')
- incident_id = task.get('incidentId', '')
- clickable_incident_id = f'[{incident_id}]({os.path.join("#/Custom/caseinfoid", incident_id)})'
if sla := task.get('dueDate', ''):
sla_dt = parse(sla)
assert sla_dt is not None, f'could not parse {sla}'
@@ -41,7 +46,7 @@ def get_open_to_do_tasks_of_current_user() -> List[Dict]:
'Task ID': task_id,
'SLA': sla,
'Opened By': opened_by,
- 'Incident ID': clickable_incident_id
+ 'Incident ID': get_clickable_incident_id(incident_id=task.get('incidentId', ''))
})
else:
demisto.error(f'Failed running POST query to /v2/statistics/widgets/query.\n{str(todo_tasks_query_res)}')
diff --git a/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget.yml b/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget.yml
index 3800bfec00f9..3c0fcc694ef5 100644
--- a/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget.yml
+++ b/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget.yml
@@ -9,7 +9,7 @@ tags:
timeout: '0'
type: python
subtype: python3
-dockerimage: demisto/python3:3.10.13.83255
+dockerimage: demisto/python3:3.10.14.97100
fromversion: 6.1.0
tests:
- No test
diff --git a/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget_test.py b/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget_test.py
index 6e90c90ebba5..6b346f64d177 100644
--- a/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget_test.py
+++ b/Packs/CommonWidgets/Scripts/MyToDoTasksWidget/MyToDoTasksWidget_test.py
@@ -1,7 +1,8 @@
import json
import demistomock as demisto
-from MyToDoTasksWidget import get_open_to_do_tasks_of_current_user
+from MyToDoTasksWidget import get_open_to_do_tasks_of_current_user, get_clickable_incident_id
+import pytest
def test_open_to_do_tasks_of_current_user(mocker):
@@ -91,3 +92,18 @@ def test_no_open_to_do_tasks(mocker):
table = get_open_to_do_tasks_of_current_user()
assert len(table) == 0
+
+
+@pytest.mark.parametrize('is_xsoar_8_or_xsiam', [True, False])
+def test_clickable_incident_id(mocker, is_xsoar_8_or_xsiam):
+ '''
+ Given:
+ - incident id to create clickable_incident_id
+ When:
+ - Running clickable_incident_id in XSIAM/XSOAR 8 and XSOAR 6
+ Then:
+ - Ensure '#/' is in the created link only in XSOAR 6.
+ '''
+ import MyToDoTasksWidget
+ mocker.patch.object(MyToDoTasksWidget, 'is_xsiam_or_xsoar_saas', return_value=is_xsoar_8_or_xsiam)
+ assert ('#/' in get_clickable_incident_id('1234')) == (not is_xsoar_8_or_xsiam)
diff --git a/Packs/CommonWidgets/pack_metadata.json b/Packs/CommonWidgets/pack_metadata.json
index 7eb06011ed8e..0ca859d3be24 100644
--- a/Packs/CommonWidgets/pack_metadata.json
+++ b/Packs/CommonWidgets/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Common Widgets",
"description": "Frequently used widgets pack.",
"support": "xsoar",
- "currentVersion": "1.2.49",
+ "currentVersion": "1.2.51",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Confluera/ReleaseNotes/1_0_14.md b/Packs/Confluera/ReleaseNotes/1_0_14.md
new file mode 100644
index 000000000000..3c570bd99bc0
--- /dev/null
+++ b/Packs/Confluera/ReleaseNotes/1_0_14.md
@@ -0,0 +1,39 @@
+
+#### Scripts
+
+##### ConflueraDetectionsData
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
+##### ConflueraProgressionsDataWarroom
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
+##### ConflueraDetectionsCount
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
+##### ConflueraProgressionsCount
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
+##### ConflueraProgressionsData
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
+##### ConflueraDetectionsSummary
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
+##### ConflueraDetectionsDataWarroom
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
+##### ConflueraDetectionsSummaryWarroom
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
+##### IqHubLog
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/Confluera/Scripts/ConflueraDetectionsCount/ConflueraDetectionsCount.yml b/Packs/Confluera/Scripts/ConflueraDetectionsCount/ConflueraDetectionsCount.yml
index 54542528dc59..4401f00d01b8 100644
--- a/Packs/Confluera/Scripts/ConflueraDetectionsCount/ConflueraDetectionsCount.yml
+++ b/Packs/Confluera/Scripts/ConflueraDetectionsCount/ConflueraDetectionsCount.yml
@@ -6,7 +6,7 @@ script: '-'
type: python
tags: []
comment: |-
- Logs detections count
+ Logs detections count.
enabled: true
scripttarget: 0
subtype: python3
@@ -15,7 +15,7 @@ dependson:
- Confluera|||confluera-fetch-detections
should:
- Confluera|||confluera-fetch-detections
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
fromversion: 6.0.0
tests:
diff --git a/Packs/Confluera/Scripts/ConflueraDetectionsData/ConflueraDetectionsData.yml b/Packs/Confluera/Scripts/ConflueraDetectionsData/ConflueraDetectionsData.yml
index 8f4cb1418b3a..af1ebb717f9a 100644
--- a/Packs/Confluera/Scripts/ConflueraDetectionsData/ConflueraDetectionsData.yml
+++ b/Packs/Confluera/Scripts/ConflueraDetectionsData/ConflueraDetectionsData.yml
@@ -6,7 +6,7 @@ script: '-'
type: python
tags: []
comment: |-
- Logs detections data ( detection vs risk-contribution )
+ Logs detections data ( detection vs risk-contribution ).
enabled: true
scripttarget: 0
subtype: python3
@@ -15,7 +15,7 @@ dependson:
- Confluera|||confluera-fetch-detections
should:
- Confluera|||confluera-fetch-detections
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
fromversion: 6.0.0
tests:
diff --git a/Packs/Confluera/Scripts/ConflueraDetectionsDataWarroom/ConflueraDetectionsDataWarroom.yml b/Packs/Confluera/Scripts/ConflueraDetectionsDataWarroom/ConflueraDetectionsDataWarroom.yml
index 9e741f1879d6..84c8a1964a23 100644
--- a/Packs/Confluera/Scripts/ConflueraDetectionsDataWarroom/ConflueraDetectionsDataWarroom.yml
+++ b/Packs/Confluera/Scripts/ConflueraDetectionsDataWarroom/ConflueraDetectionsDataWarroom.yml
@@ -7,7 +7,7 @@ type: python
tags:
- dynamic-section
comment: |-
- Logs detections data ( detection vs risk-contribution )
+ Logs detections data ( detection vs risk-contribution ).
enabled: true
scripttarget: 0
subtype: python3
@@ -16,7 +16,7 @@ dependson:
- Confluera|||confluera-fetch-detections
should:
- Confluera|||confluera-fetch-detections
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
fromversion: 6.0.0
tests:
diff --git a/Packs/Confluera/Scripts/ConflueraDetectionsSummary/ConflueraDetectionsSummary.py b/Packs/Confluera/Scripts/ConflueraDetectionsSummary/ConflueraDetectionsSummary.py
index f25e240a2088..3b134d0f5907 100644
--- a/Packs/Confluera/Scripts/ConflueraDetectionsSummary/ConflueraDetectionsSummary.py
+++ b/Packs/Confluera/Scripts/ConflueraDetectionsSummary/ConflueraDetectionsSummary.py
@@ -28,7 +28,7 @@
'#cdb8a8',
'#3cc861'])
-for idx, ioc in enumerate(detections):
+for _idx, ioc in enumerate(detections):
element = [item for item in data if item['name'] == ioc['iocTactic']]
if element and len(element) != 0:
element[0]['data'][0] += 1
diff --git a/Packs/Confluera/Scripts/ConflueraDetectionsSummary/ConflueraDetectionsSummary.yml b/Packs/Confluera/Scripts/ConflueraDetectionsSummary/ConflueraDetectionsSummary.yml
index 6aaa8687157f..3de0f7fe00e6 100644
--- a/Packs/Confluera/Scripts/ConflueraDetectionsSummary/ConflueraDetectionsSummary.yml
+++ b/Packs/Confluera/Scripts/ConflueraDetectionsSummary/ConflueraDetectionsSummary.yml
@@ -6,7 +6,7 @@ script: '-'
type: python
tags: []
comment: |-
- Logs detections data ( categories of detections )
+ Logs detections data ( categories of detections ).
enabled: true
scripttarget: 0
subtype: python3
@@ -15,7 +15,7 @@ dependson:
- Confluera|||confluera-fetch-detections
should:
- Confluera|||confluera-fetch-detections
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
fromversion: 6.0.0
tests:
diff --git a/Packs/Confluera/Scripts/ConflueraDetectionsSummaryWarroom/ConflueraDetectionsSummaryWarroom.py b/Packs/Confluera/Scripts/ConflueraDetectionsSummaryWarroom/ConflueraDetectionsSummaryWarroom.py
index 9ab91a0b1ab5..d1dd358195e6 100644
--- a/Packs/Confluera/Scripts/ConflueraDetectionsSummaryWarroom/ConflueraDetectionsSummaryWarroom.py
+++ b/Packs/Confluera/Scripts/ConflueraDetectionsSummaryWarroom/ConflueraDetectionsSummaryWarroom.py
@@ -27,7 +27,7 @@
'#cdb8a8',
'#3cc861'])
-for idx, ioc in enumerate(detections):
+for _idx, ioc in enumerate(detections):
element = [item for item in data if item['name'] == ioc['iocTactic']]
if element and len(element) != 0:
diff --git a/Packs/Confluera/Scripts/ConflueraDetectionsSummaryWarroom/ConflueraDetectionsSummaryWarroom.yml b/Packs/Confluera/Scripts/ConflueraDetectionsSummaryWarroom/ConflueraDetectionsSummaryWarroom.yml
index 6f5cbb0ce23d..9014f8690152 100644
--- a/Packs/Confluera/Scripts/ConflueraDetectionsSummaryWarroom/ConflueraDetectionsSummaryWarroom.yml
+++ b/Packs/Confluera/Scripts/ConflueraDetectionsSummaryWarroom/ConflueraDetectionsSummaryWarroom.yml
@@ -6,7 +6,7 @@ script: '-'
type: python
tags: []
comment: |-
- Logs detections data ( categories of detections )
+ Logs detections data ( categories of detections ).
enabled: true
scripttarget: 0
subtype: python3
@@ -15,7 +15,7 @@ dependson:
- Confluera|||confluera-fetch-detections
should:
- Confluera|||confluera-fetch-detections
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
fromversion: 6.0.0
tests:
diff --git a/Packs/Confluera/Scripts/ConflueraProgressionsCount/ConflueraProgressionsCount.yml b/Packs/Confluera/Scripts/ConflueraProgressionsCount/ConflueraProgressionsCount.yml
index 5f4a9adeeeae..3a77dac3448f 100644
--- a/Packs/Confluera/Scripts/ConflueraProgressionsCount/ConflueraProgressionsCount.yml
+++ b/Packs/Confluera/Scripts/ConflueraProgressionsCount/ConflueraProgressionsCount.yml
@@ -6,7 +6,7 @@ script: '-'
type: python
tags: []
comment: |-
- Logs progressions count
+ Logs progressions count.
enabled: true
scripttarget: 0
subtype: python3
@@ -15,7 +15,7 @@ dependson:
- Confluera|||confluera-fetch-progressions
should:
- Confluera|||confluera-fetch-progressions
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
fromversion: 6.0.0
tests:
diff --git a/Packs/Confluera/Scripts/ConflueraProgressionsData/ConflueraProgressionsData.py b/Packs/Confluera/Scripts/ConflueraProgressionsData/ConflueraProgressionsData.py
index babb7d835ef5..d1e4ff07bc24 100644
--- a/Packs/Confluera/Scripts/ConflueraProgressionsData/ConflueraProgressionsData.py
+++ b/Packs/Confluera/Scripts/ConflueraProgressionsData/ConflueraProgressionsData.py
@@ -13,7 +13,7 @@
data = []
-for idx, progression in enumerate(progressions):
+for _idx, progression in enumerate(progressions):
if progression['riskScore'] == 0:
color = "blue"
elif progression['riskScore'] < 25:
diff --git a/Packs/Confluera/Scripts/ConflueraProgressionsData/ConflueraProgressionsData.yml b/Packs/Confluera/Scripts/ConflueraProgressionsData/ConflueraProgressionsData.yml
index bdbb7e66bdb8..2265aecedb11 100644
--- a/Packs/Confluera/Scripts/ConflueraProgressionsData/ConflueraProgressionsData.yml
+++ b/Packs/Confluera/Scripts/ConflueraProgressionsData/ConflueraProgressionsData.yml
@@ -6,7 +6,7 @@ script: '-'
type: python
tags: []
comment: |-
- Logs progressions data ( progression vs risk-score )
+ Logs progressions data ( progression vs risk-score ).
enabled: true
scripttarget: 0
subtype: python3
@@ -15,7 +15,7 @@ dependson:
- Confluera|||confluera-fetch-progressions
should:
- Confluera|||confluera-fetch-progressions
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
fromversion: 6.0.0
tests:
diff --git a/Packs/Confluera/Scripts/ConflueraProgressionsDataWarroom/ConflueraProgressionsDataWarroom.py b/Packs/Confluera/Scripts/ConflueraProgressionsDataWarroom/ConflueraProgressionsDataWarroom.py
index 2a63dedcc077..fe677fa97d7a 100644
--- a/Packs/Confluera/Scripts/ConflueraProgressionsDataWarroom/ConflueraProgressionsDataWarroom.py
+++ b/Packs/Confluera/Scripts/ConflueraProgressionsDataWarroom/ConflueraProgressionsDataWarroom.py
@@ -11,7 +11,7 @@
data = []
-for idx, progression in enumerate(progressions):
+for _idx, progression in enumerate(progressions):
if progression['riskScore'] == 0:
color = "blue"
elif progression['riskScore'] < 25:
diff --git a/Packs/Confluera/Scripts/ConflueraProgressionsDataWarroom/ConflueraProgressionsDataWarroom.yml b/Packs/Confluera/Scripts/ConflueraProgressionsDataWarroom/ConflueraProgressionsDataWarroom.yml
index eba7ef084927..1405d40cfe3d 100644
--- a/Packs/Confluera/Scripts/ConflueraProgressionsDataWarroom/ConflueraProgressionsDataWarroom.yml
+++ b/Packs/Confluera/Scripts/ConflueraProgressionsDataWarroom/ConflueraProgressionsDataWarroom.yml
@@ -6,7 +6,7 @@ script: '-'
type: python
tags: []
comment: |-
- Logs progressions data ( progression vs risk-score )
+ Logs progressions data ( progression vs risk-score ).
enabled: true
scripttarget: 0
subtype: python3
@@ -15,7 +15,7 @@ dependson:
- Confluera|||confluera-fetch-progressions
should:
- Confluera|||confluera-fetch-progressions
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
fromversion: 6.0.0
tests:
diff --git a/Packs/Confluera/Scripts/IqHubLog/IqHubLog.yml b/Packs/Confluera/Scripts/IqHubLog/IqHubLog.yml
index 145a9a66ad2e..b37d84ead2ef 100644
--- a/Packs/Confluera/Scripts/IqHubLog/IqHubLog.yml
+++ b/Packs/Confluera/Scripts/IqHubLog/IqHubLog.yml
@@ -5,9 +5,9 @@ name: IqHubLog
script: '-'
type: python
tags: []
-comment: |-
- Logs detection and progression count with respective links to confluera's IQ-Hub portal
- in tabular format
+comment: 'Logs detection and progression count with respective links to confluera''s IQ-Hub portal
+
+ in tabular format.'
enabled: true
scripttarget: 0
subtype: python3
@@ -18,7 +18,7 @@ dependson:
should:
- Confluera|||confluera-fetch-detections
- Confluera|||confluera-fetch-progressions
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
fromversion: 6.0.0
tests:
diff --git a/Packs/Confluera/pack_metadata.json b/Packs/Confluera/pack_metadata.json
index e671e51ad242..a0c136961065 100644
--- a/Packs/Confluera/pack_metadata.json
+++ b/Packs/Confluera/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Confluera",
"description": "This content pack uses the Confluera integration to fetch detections & progressions from confluera's Iq-Hub portal",
"support": "partner",
- "currentVersion": "1.0.13",
+ "currentVersion": "1.0.14",
"author": "confluera",
"url": "https://www.confluera.com",
"email": "support@confluera.com",
diff --git a/Packs/Coralogix/Integrations/Coralogix/Coralogix.yml b/Packs/Coralogix/Integrations/Coralogix/Coralogix.yml
index d139171f5061..c02c437c040f 100644
--- a/Packs/Coralogix/Integrations/Coralogix/Coralogix.yml
+++ b/Packs/Coralogix/Integrations/Coralogix/Coralogix.yml
@@ -77,7 +77,7 @@ configuration:
name: incidents_max_fetch
type: 0
required: false
-description: Fetch incidents, search for supporting data and tag interesting datapoints in/from your Coralogix account
+description: Fetch incidents, search for supporting data and tag interesting datapoints in/from your Coralogix account.
display: Coralogix
name: Coralogix
script:
@@ -88,31 +88,31 @@ script:
required: true
- description: The point in time to tag in Coralogix.
name: timestamp
- - description: A link to an icon file (png, jpeg, SVG) that will be used as the tag image in Coralogix. Cannot exceed 50KB
+ - description: A link to an icon file (png, jpeg, SVG) that will be used as the tag image in Coralogix. Cannot exceed 50KB.
name: icon_url
- description: Tag timestamp in Coralogix
+ description: Tag timestamp in Coralogix.
name: coralogix-tag
- arguments:
- - description: A Coralogix Lucene query
+ - description: A Coralogix Lucene query.
name: query
required: true
- - description: Narrows the search to the specified Coralogix Application name
+ - description: Narrows the search to the specified Coralogix Application name.
name: app_name
- - description: Narrows the search to the specified Coralogix Subsystem name
+ - description: Narrows the search to the specified Coralogix Subsystem name.
name: subsystem_name
- - description: Narrows the search to the specified Coralogix severity value
+ - description: Narrows the search to the specified Coralogix severity value.
name: severity
- - description: Limit the search to the data that arrived after a certain timestamp. If not set will search for all the data matching the query
+ - description: Limit the search to the data that arrived after a certain timestamp. If not set will search for all the data matching the query.
name: since_timestamp
- - description: Limit the search to data that arrived until a certain timestamp.If not set will search for all the data matching the query
+ - description: Limit the search to data that arrived until a certain timestamp.If not set will search for all the data matching the query.
name: to_timestamp
- defaultValue: '50'
- description: The maximum number of events to retrieve per query
+ description: The maximum number of events to retrieve per query.
name: max_items_to_retrieve
required: true
- description: Query Coralogix for information
+ description: Query Coralogix for information.
name: coralogix-search
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.95956
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/Coralogix/ReleaseNotes/1_0_8.md b/Packs/Coralogix/ReleaseNotes/1_0_8.md
new file mode 100644
index 000000000000..e38e2f9ae856
--- /dev/null
+++ b/Packs/Coralogix/ReleaseNotes/1_0_8.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Coralogix
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/Coralogix/pack_metadata.json b/Packs/Coralogix/pack_metadata.json
index f85aeb530493..9fa7131d2b3a 100644
--- a/Packs/Coralogix/pack_metadata.json
+++ b/Packs/Coralogix/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Coralogix",
"description": "Coralogix Integration can be used for searching incidents and other data from Coralogix as well as tagging interesting timestamps at Coralogix from Cortex XSOAR",
"support": "partner",
- "currentVersion": "1.0.7",
+ "currentVersion": "1.0.8",
"author": "Coralogix",
"url": "",
"email": "support@coralogix.com",
diff --git a/Packs/Core/Integrations/CoreIOCs/CoreIOCs.py b/Packs/Core/Integrations/CoreIOCs/CoreIOCs.py
index 89e221376a19..2405e1cc9cec 100644
--- a/Packs/Core/Integrations/CoreIOCs/CoreIOCs.py
+++ b/Packs/Core/Integrations/CoreIOCs/CoreIOCs.py
@@ -4,37 +4,38 @@
import secrets
import tempfile
from datetime import timezone
-from typing import Dict, List, Tuple, Union
from dateparser import parse
from urllib3 import disable_warnings
from math import ceil
from google.cloud import storage
+from CoreIRApiModule import *
disable_warnings()
DEMISTO_TIME_FORMAT: str = '%Y-%m-%dT%H:%M:%SZ'
-core_types_to_demisto: Dict = {
+
+core_types_to_demisto: dict = {
"DOMAIN_NAME": 'Domain',
"HASH": 'File',
"IP": 'IP'
}
-core_reputation_to_demisto: Dict = {
+core_reputation_to_demisto: dict = {
'GOOD': 1,
'SUSPICIOUS': 2,
'BAD': 3
}
-demisto_score_to_core: Dict[int, str] = {
+demisto_score_to_core: dict[int, str] = {
1: 'GOOD',
2: 'SUSPICIOUS',
3: 'BAD'
}
-class Client:
+class Client(CoreClient):
severity: str = ''
query: str = 'reputation:Bad and (type:File or type:Domain or type:IP)'
tag = 'Cortex Core'
tlp_color = None
- error_codes: Dict[int, str] = {
+ error_codes: dict[int, str] = {
500: 'XDR internal server error.',
401: 'Unauthorized access. An issue occurred during authentication. This can indicate an ' # noqa: W504
+ 'incorrect key, id, or other invalid authentication parameters.',
@@ -44,18 +45,22 @@ class Client:
413: 'Request entity too large. Please reach out to the XDR support team.'
}
- def __init__(self, params: Dict):
- url = params.get('url')
- if not url:
- url = "http://" + demisto.getLicenseCustomField("Core.ApiHost") + "/api/webapp/"
+ def __init__(self, params: dict):
+ url = "/api/webapp/"
+ if not FORWARD_USER_RUN_RBAC:
+ url = params.get('url', '')
+ if not url:
+ url = "http://" + demisto.getLicenseCustomField("Core.ApiHost") + "/api/webapp/" # type: ignore
self._base_url: str = urljoin(url, '/public_api/v1/indicators/')
self._verify_cert: bool = not params.get('insecure', False)
self._params = params
handle_proxy()
- def http_request(self, url_suffix: str, requests_kwargs=None) -> Dict:
+ def http_request(self, url_suffix: str, requests_kwargs=None) -> dict:
+ if FORWARD_USER_RUN_RBAC:
+ return CoreClient._http_request(self, method='POST', url_suffix=url_suffix, data=requests_kwargs)
if requests_kwargs is None:
- requests_kwargs = dict()
+ requests_kwargs = {}
res = requests.post(url=self._base_url + url_suffix,
verify=self._verify_cert,
headers=self._headers,
@@ -77,7 +82,7 @@ def _headers(self):
return get_headers(self._params)
-def get_headers(params: Dict) -> Dict:
+def get_headers(params: dict) -> dict:
api_key: str = str(params.get('apikey'))
api_key_id: str = str(params.get('apikey_id'))
if not api_key or not api_key_id:
@@ -98,28 +103,29 @@ def get_headers(params: Dict) -> Dict:
return headers
-def get_requests_kwargs(_json=None) -> Dict:
+def get_requests_kwargs(_json=None) -> dict:
if _json is not None:
- return {'data': json.dumps({"request_data": _json})}
+ return {"request_data": _json} if FORWARD_USER_RUN_RBAC else \
+ {'data': json.dumps({"request_data": _json})}
else:
return {}
-def prepare_get_changes(time_stamp: int) -> Tuple[str, Dict]:
+def prepare_get_changes(time_stamp: int) -> tuple[str, dict]:
url_suffix: str = 'get_changes'
- _json: Dict = {'last_update_ts': time_stamp}
+ _json: dict = {'last_update_ts': time_stamp}
return url_suffix, _json
-def prepare_enable_iocs(iocs: str) -> Tuple[str, List]:
+def prepare_enable_iocs(iocs: str) -> tuple[str, list]:
url_suffix: str = 'enable_iocs'
- _json: List = argToList(iocs)
+ _json: list = argToList(iocs)
return url_suffix, _json
-def prepare_disable_iocs(iocs: str) -> Tuple[str, List]:
+def prepare_disable_iocs(iocs: str) -> tuple[str, list]:
url_suffix: str = 'disable_iocs'
- _json: List = argToList(iocs)
+ _json: list = argToList(iocs)
return url_suffix, _json
@@ -127,8 +133,8 @@ def create_file_iocs_to_keep(file_path, batch_size: int = 200):
with open(file_path, 'a') as _file:
total_size: int = get_iocs_size()
for i in range(0, ceil(total_size / batch_size)):
- iocs: List = get_iocs(page=i, size=batch_size)
- for ios in map(lambda x: x.get('value', ''), iocs):
+ iocs: list = get_iocs(page=i, size=batch_size)
+ for ios in (x.get('value', '') for x in iocs):
_file.write(ios + '\n')
@@ -136,8 +142,8 @@ def create_file_sync(file_path, batch_size: int = 200):
with open(file_path, 'a') as _file:
total_size: int = get_iocs_size()
for i in range(0, ceil(total_size / batch_size)):
- iocs: List = get_iocs(page=i, size=batch_size)
- for ioc in map(lambda x: demisto_ioc_to_core(x), iocs):
+ iocs: list = get_iocs(page=i, size=batch_size)
+ for ioc in (demisto_ioc_to_core(x) for x in iocs):
if ioc:
_file.write(json.dumps(ioc) + '\n')
@@ -150,7 +156,7 @@ def get_iocs_size(query=None) -> int:
.get('total', 0)
-def get_iocs(page=0, size=200, query=None) -> List:
+def get_iocs(page=0, size=200, query=None) -> list:
search_indicators = IndicatorsSearcher(page=page)
query = query if query else Client.query
query = f'expirationStatus:active AND ({query})'
@@ -176,8 +182,8 @@ def demisto_reliability_to_core(reliability: str) -> str:
return 'F'
-def demisto_vendors_to_core(demisto_vendors) -> List[Dict]:
- core_vendors: List[Dict] = []
+def demisto_vendors_to_core(demisto_vendors) -> list[dict]:
+ core_vendors: list[dict] = []
for module_id, data in demisto_vendors.items():
reliability = demisto_reliability_to_core(data.get('reliability'))
reputation = demisto_score_to_core.get(data.get('score'), 'UNKNOWN')
@@ -200,9 +206,9 @@ def demisto_types_to_core(_type: str) -> str:
return core_type
-def demisto_ioc_to_core(ioc: Dict) -> Dict:
+def demisto_ioc_to_core(ioc: dict) -> dict:
try:
- core_ioc: Dict = {
+ core_ioc: dict = {
'indicator': ioc['value'],
'severity': Client.severity,
'type': demisto_types_to_core(str(ioc['indicator_type'])),
@@ -210,7 +216,7 @@ def demisto_ioc_to_core(ioc: Dict) -> Dict:
'expiration_date': demisto_expiration_to_core(ioc.get('expiration'))
}
# get last 'IndicatorCommentRegular'
- comment: Dict = next(filter(lambda x: x.get('type') == 'IndicatorCommentRegular', reversed(ioc.get('comments', []))), {})
+ comment: dict = next(filter(lambda x: x.get('type') == 'IndicatorCommentRegular', reversed(ioc.get('comments', []))), {})
if comment:
core_ioc['comment'] = comment.get('content')
if ioc.get('aggregatedReliability'):
@@ -254,7 +260,7 @@ def sync(client: Client):
def iocs_to_keep(client: Client):
- if not datetime.utcnow().hour in range(1, 3):
+ if datetime.utcnow().hour not in range(1, 3):
raise DemistoException('iocs_to_keep runs only between 01:00 and 03:00.')
temp_file_path: str = get_temp_file()
try:
@@ -271,12 +277,12 @@ def create_last_iocs_query(from_date, to_date):
return f'modified:>={from_date} and modified:<{to_date} and ({Client.query})'
-def get_last_iocs(batch_size=200) -> List:
+def get_last_iocs(batch_size=200) -> list:
current_run: str = datetime.utcnow().strftime(DEMISTO_TIME_FORMAT)
- last_run: Dict = get_integration_context()
+ last_run: dict = get_integration_context()
query = create_last_iocs_query(from_date=last_run['time'], to_date=current_run)
total_size = get_iocs_size(query)
- iocs: List = []
+ iocs: list = []
for i in range(0, ceil(total_size / batch_size)):
iocs.extend(get_iocs(query=query, page=i, size=batch_size))
last_run['time'] = current_run
@@ -284,7 +290,7 @@ def get_last_iocs(batch_size=200) -> List:
return iocs
-def get_indicators(indicators: str) -> List:
+def get_indicators(indicators: str) -> list:
if indicators:
iocs: list = []
not_found = []
@@ -310,7 +316,7 @@ def tim_insert_jsons(client: Client):
iocs = get_indicators(indicators)
if iocs:
path = 'tim_insert_jsons/'
- requests_kwargs: Dict = get_requests_kwargs(_json=list(map(lambda ioc: demisto_ioc_to_core(ioc), iocs)))
+ requests_kwargs: dict = get_requests_kwargs(_json=[demisto_ioc_to_core(ioc) for ioc in iocs])
client.http_request(url_suffix=path, requests_kwargs=requests_kwargs)
return_outputs('push done.')
@@ -322,12 +328,12 @@ def iocs_command(client: Client):
path, iocs = prepare_enable_iocs(indicators)
else: # command == 'disable'
path, iocs = prepare_disable_iocs(indicators)
- requests_kwargs: Dict = get_requests_kwargs(_json=iocs)
+ requests_kwargs: dict = get_requests_kwargs(_json=iocs)
client.http_request(url_suffix=path, requests_kwargs=requests_kwargs)
return_outputs(f'indicators {indicators} {command}d.')
-def core_ioc_to_timeline(iocs: List) -> Dict:
+def core_ioc_to_timeline(iocs: list) -> dict:
ioc_time_line = {
'Value': ','.join(iocs),
'Message': 'indicator updated in Cortex.',
@@ -336,7 +342,7 @@ def core_ioc_to_timeline(iocs: List) -> Dict:
return ioc_time_line
-def core_expiration_to_demisto(expiration) -> Union[str, None]:
+def core_expiration_to_demisto(expiration) -> str | None:
if expiration:
if expiration == -1:
return 'Never'
@@ -348,7 +354,7 @@ def core_expiration_to_demisto(expiration) -> Union[str, None]:
def module_test(client: Client):
ts = int(datetime.now(timezone.utc).timestamp() * 1000) - 1
path, requests_kwargs = prepare_get_changes(ts)
- requests_kwargs: Dict = get_requests_kwargs(_json=requests_kwargs)
+ requests_kwargs: dict = get_requests_kwargs(_json=requests_kwargs)
client.http_request(url_suffix=path, requests_kwargs=requests_kwargs).get('reply', [])
demisto.results('ok')
@@ -395,7 +401,7 @@ def get_indicator_core_score(indicator: str, core_server: int):
if ioc:
ioc = ioc[0]
score = ioc.get('score', 0)
- temp: Dict = next(filter(is_core_data, ioc.get('moduleToFeedMap', {}).values()), {})
+ temp: dict = next(filter(is_core_data, ioc.get('moduleToFeedMap', {}).values()), {})
core_local = temp.get('score', 0)
if core_server != score:
return core_server
@@ -417,7 +423,7 @@ def get_sync_file():
temp_file_path = get_temp_file()
try:
create_file_sync(temp_file_path)
- with open(temp_file_path, 'r') as _tmpfile:
+ with open(temp_file_path) as _tmpfile:
return_results(fileResult('core-sync-file', _tmpfile.read()))
finally:
os.remove(temp_file_path)
diff --git a/Packs/Core/Integrations/CoreIOCs/CoreIOCs.yml b/Packs/Core/Integrations/CoreIOCs/CoreIOCs.yml
index 005f9ea62fb5..00ced1fd7d9b 100644
--- a/Packs/Core/Integrations/CoreIOCs/CoreIOCs.yml
+++ b/Packs/Core/Integrations/CoreIOCs/CoreIOCs.yml
@@ -18,6 +18,13 @@ configuration:
type: 4
hidden: true
required: false
+- display: API Key ID
+ name: credentials
+ defaultvalue: ""
+ type: 9
+ required: false
+ section: Connect
+ displaypassword: API Key
- additionalinfo: Map the severity of each indicator that will be synced to Cortex.
display: Cortex Severity
name: severity
@@ -80,7 +87,7 @@ script:
required: true
description: Disables IOCs in the Cortex server.
name: core-iocs-disable
- dockerimage: demisto/google-cloud-storage:1.0.0.78223
+ dockerimage: demisto/google-cloud-storage:1.0.0.96060
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/Core/Integrations/CortexCoreIR/CortexCoreIR.py b/Packs/Core/Integrations/CortexCoreIR/CortexCoreIR.py
index 3dd76cc9140b..5cb9c5c61873 100644
--- a/Packs/Core/Integrations/CortexCoreIR/CortexCoreIR.py
+++ b/Packs/Core/Integrations/CortexCoreIR/CortexCoreIR.py
@@ -148,27 +148,30 @@ def main(): # pragma: no cover
args = demisto.args()
args["integration_context_brand"] = INTEGRATION_CONTEXT_BRAND
args["integration_name"] = INTEGRATION_NAME
- api_key = demisto.params().get('apikey')
- api_key_id = demisto.params().get('apikey_id')
- url = demisto.params().get('url')
+ headers = {}
url_suffix = '/xsiam' if command in PREVALENCE_COMMANDS else "/public_api/v1"
-
- if not api_key or not api_key_id or not url:
- headers = {
- "HOST": demisto.getLicenseCustomField("Core.ApiHostName"),
- demisto.getLicenseCustomField("Core.ApiHeader"): demisto.getLicenseCustomField("Core.ApiKey"),
- "Content-Type": "application/json"
- }
- url = "http://" + demisto.getLicenseCustomField("Core.ApiHost") + "/api/webapp/"
- add_sensitive_log_strs(demisto.getLicenseCustomField("Core.ApiKey"))
+ if not FORWARD_USER_RUN_RBAC:
+ api_key = demisto.params().get('apikey')
+ api_key_id = demisto.params().get('apikey_id')
+ url = demisto.params().get('url')
+
+ if not api_key or not api_key_id or not url:
+ headers = {
+ "HOST": demisto.getLicenseCustomField("Core.ApiHostName"),
+ demisto.getLicenseCustomField("Core.ApiHeader"): demisto.getLicenseCustomField("Core.ApiKey"),
+ "Content-Type": "application/json"
+ }
+ url = "http://" + demisto.getLicenseCustomField("Core.ApiHost") + "/api/webapp/"
+ add_sensitive_log_strs(demisto.getLicenseCustomField("Core.ApiKey"))
+ else:
+ headers = {
+ "Content-Type": "application/json",
+ "x-xdr-auth-id": str(api_key_id),
+ "Authorization": api_key
+ }
+ add_sensitive_log_strs(api_key)
else:
- headers = {
- "Content-Type": "application/json",
- "x-xdr-auth-id": str(api_key_id),
- "Authorization": api_key
- }
- add_sensitive_log_strs(api_key)
-
+ url = "/api/webapp/"
base_url = urljoin(url, url_suffix)
proxy = demisto.params().get('proxy')
verify_cert = not demisto.params().get('insecure', False)
@@ -178,7 +181,6 @@ def main(): # pragma: no cover
except ValueError as e:
demisto.debug(f'Failed casting timeout parameter to int, falling back to 120 - {e}')
timeout = 120
-
client = Client(
base_url=base_url,
proxy=proxy,
diff --git a/Packs/Core/Integrations/CortexCoreIR/CortexCoreIR.yml b/Packs/Core/Integrations/CortexCoreIR/CortexCoreIR.yml
index 55535c36e8cc..a93ca0608bda 100644
--- a/Packs/Core/Integrations/CortexCoreIR/CortexCoreIR.yml
+++ b/Packs/Core/Integrations/CortexCoreIR/CortexCoreIR.yml
@@ -23,6 +23,13 @@ configuration:
type: 4
hidden: true
required: false
+- display: API Key ID
+ name: credentials
+ defaultvalue: ""
+ type: 9
+ required: false
+ section: Connect
+ displaypassword: API Key
- additionalinfo: The timeout of the HTTP requests sent to Cortex API (in seconds).
defaultvalue: '120'
display: HTTP Timeout
@@ -2936,7 +2943,7 @@ script:
script: '-'
subtype: python3
type: python
- dockerimage: demisto/python3:3.10.14.92207
+ dockerimage: demisto/google-cloud-storage:1.0.0.96060
tests:
- No tests
fromversion: 6.2.0
diff --git a/Packs/Core/Playbooks/playbook-IOC_Alert.yml b/Packs/Core/Playbooks/playbook-IOC_Alert.yml
index 1cb6ccfe4e18..15946f1ebf5a 100644
--- a/Packs/Core/Playbooks/playbook-IOC_Alert.yml
+++ b/Packs/Core/Playbooks/playbook-IOC_Alert.yml
@@ -219,71 +219,7 @@ tasks:
{
"position": {
"x": 410,
- "y": 3720
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
- "43":
- id: "43"
- taskid: b772f110-939d-4f2b-8850-088e8cf36343
- type: condition
- task:
- id: b772f110-939d-4f2b-8850-088e8cf36343
- version: -1
- name: Should continue with the investigation?
- description: Asks the user whether the alert investigation should continue.
- type: condition
- iscommand: false
- brand: ""
- nexttasks:
- '#default#':
- - "44"
- "yes":
- - "60"
- separatecontext: false
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": 410,
- "y": 2530
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
- "44":
- id: "44"
- taskid: a0697551-115b-4674-8204-ba39af9247f5
- type: regular
- task:
- id: a0697551-115b-4674-8204-ba39af9247f5
- version: -1
- name: Continue with the incident investigation
- description: Continue with the incident investigation.
- type: regular
- iscommand: false
- brand: ""
- nexttasks:
- '#none#':
- - "60"
- separatecontext: false
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": 710,
- "y": 2700
+ "y": 3380
}
}
note: false
@@ -338,7 +274,7 @@ tasks:
brand: ""
nexttasks:
'#default#':
- - "43"
+ - "60"
"yes":
- "83"
separatecontext: false
@@ -387,7 +323,7 @@ tasks:
{
"position": {
"x": 410,
- "y": 2870
+ "y": 2530
}
}
note: false
@@ -689,7 +625,7 @@ tasks:
{
"position": {
"x": 750,
- "y": 3540
+ "y": 3200
}
}
note: false
@@ -735,7 +671,7 @@ tasks:
{
"position": {
"x": 410,
- "y": 3005
+ "y": 2665
}
}
note: false
@@ -781,7 +717,7 @@ tasks:
{
"position": {
"x": 410,
- "y": 3360
+ "y": 3020
}
}
note: false
@@ -827,7 +763,7 @@ tasks:
{
"position": {
"x": 760,
- "y": 3180
+ "y": 2840
}
}
note: false
@@ -963,7 +899,7 @@ tasks:
brand: ""
nexttasks:
'#none#':
- - "43"
+ - "60"
scriptarguments:
AutoContainment:
complex:
@@ -1055,7 +991,7 @@ tasks:
brand: ""
nexttasks:
'#none#':
- - "43"
+ - "60"
scriptarguments:
AutoEradicate:
complex:
@@ -1218,10 +1154,7 @@ tasks:
id: f77aa4eb-f400-4a50-829f-4ce59cad479f
version: -1
name: Ticket Management - Generic
- description: "`Ticket Management - Generic` allows you to open new tickets or
- update comments to the existing ticket in the following ticketing systems:\n-ServiceNow
- \n-Zendesk \nusing the following sub-playbooks:\n-`ServiceNow - Ticket Management`\n-`Zendesk
- - Ticket Management`\n"
+ description: "`Ticket Management - Generic` allows you to open new tickets or update comments to the existing ticket in the following ticketing systems:\n-ServiceNow \n-Zendesk \nusing the following sub-playbooks:\n-`ServiceNow - Ticket Management`\n-`Zendesk - Ticket Management`\n"
playbookName: Ticket Management - Generic
type: playbook
iscommand: false
@@ -1309,9 +1242,6 @@ view: |-
{
"linkLabelsPosition": {
"10_12_#default#": 0.52,
- "43_44_#default#": 0.49,
- "43_60_yes": 0.54,
- "54_43_#default#": 0.19,
"54_83_yes": 0.41,
"73_74_#default#": 0.44,
"73_75_yes": 0.41,
@@ -1321,7 +1251,7 @@ view: |-
},
"paper": {
"dimensions": {
- "height": 3895,
+ "height": 3555,
"width": 1770,
"x": -230,
"y": -110
@@ -1538,4 +1468,4 @@ outputs: []
tests:
- No tests (auto formatted)
marketplaces: ["marketplacev2"]
-fromversion: 6.6.0
+fromversion: 6.6.0
\ No newline at end of file
diff --git a/Packs/Core/Playbooks/playbook-IOC_Alert_README.md b/Packs/Core/Playbooks/playbook-IOC_Alert_README.md
index 6faf870a9001..0427ca362496 100644
--- a/Packs/Core/Playbooks/playbook-IOC_Alert_README.md
+++ b/Packs/Core/Playbooks/playbook-IOC_Alert_README.md
@@ -28,13 +28,13 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
+* Endpoint Investigation Plan
* Recovery Plan
* Eradication Plan
-* Ticket Management - Generic
-* Endpoint Investigation Plan
* Enrichment for Verdict
-* Containment Plan
* Handle False Positive Alerts
+* Ticket Management - Generic
+* Containment Plan
### Integrations
@@ -47,8 +47,8 @@ This playbook does not use any scripts.
### Commands
* extractIndicators
-* closeInvestigation
* setParentIncidentFields
+* closeInvestigation
## Playbook Inputs
diff --git a/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling.yml b/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling.yml
index d0ecc4cab148..00c0949218cf 100644
--- a/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling.yml
+++ b/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling.yml
@@ -1225,10 +1225,10 @@ tasks:
isautoswitchedtoquietmode: false
"52":
id: "52"
- taskid: bb00ca3d-82a5-4b82-8ce2-a5593eecb43b
+ taskid: 0ea2b397-c024-4c57-81d1-ccdf630043f6
type: regular
task:
- id: bb00ca3d-82a5-4b82-8ce2-a5593eecb43b
+ id: 0ea2b397-c024-4c57-81d1-ccdf630043f6
version: -1
name: Set Number of Related Alerts
description: |-
@@ -1268,7 +1268,7 @@ tasks:
fieldMapping:
- incidentfield: Number Of Found Related Alerts
output:
- simple: NumOfRelatedAlerts
+ simple: ${NumOfRelatedAlerts}
- incidentfield: Alert Search Results
output:
complex:
@@ -1306,14 +1306,15 @@ tasks:
- incidentfield: Failed Logon Events
output:
complex:
- root: AzureFailLoginCount
+ root: NumOfOktaFailedLogon
transformers:
- operator: append
args:
item:
value:
- simple: NumOfOktaFailedLogon
+ simple: AzureFailLoginCount
iscontext: true
+ - operator: SumList
- incidentfield: Email
output:
complex:
@@ -1644,4 +1645,4 @@ outputs: []
tests:
- No tests (auto formatted)
marketplaces: ["marketplacev2"]
-fromversion: 6.10.0
+fromversion: 6.10.0
\ No newline at end of file
diff --git a/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling_README.md b/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling_README.md
index d5669b650f4d..9227114a53e1 100644
--- a/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling_README.md
+++ b/Packs/Core/Playbooks/playbook-Identity_Analytics_-_Alert_Handling_README.md
@@ -21,12 +21,12 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Sub-playbooks
-* Azure - User Investigation
-* Okta - User Investigation
* Cloud IAM Enrichment - Generic
+* Azure - User Investigation
+* Cloud Credentials Rotation - Azure
* Containment Plan
+* Okta - User Investigation
* Account Enrichment - Generic v2.1
-* Cloud Credentials Rotation - Azure
* Get entity alerts by MITRE tactics
### Integrations
@@ -36,14 +36,14 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
### Scripts
-* Set
* SetAndHandleEmpty
+* Set
### Commands
-* core-get-cloud-original-alerts
-* ip
* closeInvestigation
+* ip
+* core-get-cloud-original-alerts
## Playbook Inputs
diff --git a/Packs/Core/ReleaseNotes/3_0_33.md b/Packs/Core/ReleaseNotes/3_0_33.md
new file mode 100644
index 000000000000..c3b6c81f65c8
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_33.md
@@ -0,0 +1,16 @@
+
+#### Integrations
+
+##### Investigation & Response
+- Updated the Docker image to: *demisto/google-cloud-storage:1.0.0.96060*.
+
+
+- Improved implementation of the authorization process.
+
+
+##### Indicators detection
+- Updated the Docker image to: *demisto/google-cloud-storage:1.0.0.96060*.
+
+
+- Improved implementation of the authorization process.
+
diff --git a/Packs/Core/ReleaseNotes/3_0_34.md b/Packs/Core/ReleaseNotes/3_0_34.md
new file mode 100644
index 000000000000..f93731ab7831
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_34.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Identity Analytics - Alert Handling
+
+Updated the output value for the **'Number Of Found Related Alerts'** alert field mapping rule in the *'Set Number of Related Alerts'* task.
\ No newline at end of file
diff --git a/Packs/Core/ReleaseNotes/3_0_35.md b/Packs/Core/ReleaseNotes/3_0_35.md
new file mode 100644
index 000000000000..43d17a0fa830
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_35.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### IOC Alert
+
+- Deleted two unnecessary manual tasks titled *'Should continue with the investigation?'* and *'Continue with the alert investigation'* to optimize playbook flow.
diff --git a/Packs/Core/ReleaseNotes/3_0_36.md b/Packs/Core/ReleaseNotes/3_0_36.md
new file mode 100644
index 000000000000..0fe4d3b8fab7
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_36.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Identity Analytics - Alert Handling
+
+- Updated the mapping rule for the **'Failed Logon Events'** alert field in the *'Set Number of Related Alerts'* playbook task.
\ No newline at end of file
diff --git a/Packs/Core/ReleaseNotes/3_0_37.md b/Packs/Core/ReleaseNotes/3_0_37.md
new file mode 100644
index 000000000000..4a973ee9c9a7
--- /dev/null
+++ b/Packs/Core/ReleaseNotes/3_0_37.md
@@ -0,0 +1,3 @@
+## Core - Investigation and Response
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/Core/doc_files/IOC_Alert.png b/Packs/Core/doc_files/IOC_Alert.png
index 87cf7dbe8c14..4d92033b0679 100644
Binary files a/Packs/Core/doc_files/IOC_Alert.png and b/Packs/Core/doc_files/IOC_Alert.png differ
diff --git a/Packs/Core/pack_metadata.json b/Packs/Core/pack_metadata.json
index e7ec73e45069..1bb93db0e32c 100644
--- a/Packs/Core/pack_metadata.json
+++ b/Packs/Core/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Core - Investigation and Response",
"description": "Automates incident response",
"support": "xsoar",
- "currentVersion": "3.0.32",
+ "currentVersion": "3.0.37",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CoreAlertFields/ReleaseNotes/1_0_35.md b/Packs/CoreAlertFields/ReleaseNotes/1_0_35.md
new file mode 100644
index 000000000000..650557e14e61
--- /dev/null
+++ b/Packs/CoreAlertFields/ReleaseNotes/1_0_35.md
@@ -0,0 +1,3 @@
+## Core Alert Fields
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/CoreAlertFields/pack_metadata.json b/Packs/CoreAlertFields/pack_metadata.json
index 299acde06b75..79d12adf4799 100644
--- a/Packs/CoreAlertFields/pack_metadata.json
+++ b/Packs/CoreAlertFields/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Core Alert Fields",
"description": "This Content Pack will provide you with the core alert fields.",
"support": "xsoar",
- "currentVersion": "1.0.34",
+ "currentVersion": "1.0.35",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Certificate_Enrichment.yml b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Certificate_Enrichment.yml
new file mode 100644
index 000000000000..9bc421b21f88
--- /dev/null
+++ b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Certificate_Enrichment.yml
@@ -0,0 +1,782 @@
+description: Playbook to enrich certificate information.
+id: 'Cortex ASM - Certificate Enrichment'
+inputSections:
+- description: Generic group for inputs
+ inputs:
+ - Hostname
+ name: General (Inputs group)
+inputs:
+- description: Input for Certificate enrichment
+ key: Hostname
+ playbookInputQuery:
+ required: false
+ value:
+ simple: ${alert.hostname}
+name: Cortex ASM - Certificate Enrichment
+outputSections:
+- description: Generic group for outputs
+ name: General (Outputs group)
+ outputs: []
+outputs: []
+starttaskid: "0"
+tasks:
+ "0":
+ continueonerrortype: ""
+ id: "0"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "16"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 0804bf72-274c-489c-86fa-0c78f7930d42
+ iscommand: false
+ name: ""
+ version: -1
+ description: ''
+ taskid: 0804bf72-274c-489c-86fa-0c78f7930d42
+ timertriggers: []
+ type: start
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": -80
+ }
+ }
+ "1":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ complex:
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: modules.brand
+ operator: isEqualString
+ right:
+ value:
+ simple: VenafiTLSProtect
+ - - left:
+ iscontext: true
+ value:
+ simple: modules.state
+ operator: isEqualString
+ right:
+ value:
+ simple: active
+ root: modules
+ operator: isExists
+ right:
+ value: {}
+ - - left:
+ iscontext: true
+ value:
+ simple: inputs.Hostname
+ operator: isExists
+ label: "yes"
+ continueonerrortype: ""
+ id: "1"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "14"
+ "yes":
+ - "2"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Checks if there’s an active instance of the Venafi TLS Protect integration enabled.
+ id: de7247d4-2023-4a4b-8094-57e373d36ffd
+ iscommand: false
+ name: Is Venafi TLS Protect enabled and Input defined?
+ type: condition
+ version: -1
+ taskid: de7247d4-2023-4a4b-8094-57e373d36ffd
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 215
+ }
+ }
+ "2":
+ continueonerrortype: ""
+ id: "2"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "9"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ CN:
+ simple: ${inputs.Hostname}
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: VenafiTLSProtect
+ description: 'Gets Venafi certificates query. All dates are in 2016-11-12T00:00:00.0000000Z format. For additional field information, see: https://ao-tlspd.dev.ven-eco.com/aperture/help/Content/SDK/WebSDK/r-SDK-Certificates-search-attribute.htm and https://ao-tlspd.dev.ven-eco.com/aperture/help/Content/SDK/WebSDK/r-SDK-Certificates-search-status.htm'
+ id: c8fc2b77-e986-4817-8daa-8386909b3e4b
+ iscommand: true
+ name: Get Certificate ID from Venafi
+ script: VenafiTLSProtect|||venafi-get-certificates
+ type: regular
+ version: -1
+ taskid: c8fc2b77-e986-4817-8daa-8386909b3e4b
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 440
+ }
+ }
+ "3":
+ continueonerrortype: ""
+ id: "3"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 6e37cbda-0468-468d-8bc2-fa3f2f2040d8
+ iscommand: false
+ name: Done
+ type: title
+ version: -1
+ description: ''
+ taskid: 6e37cbda-0468-468d-8bc2-fa3f2f2040d8
+ timertriggers: []
+ type: title
+ view: |-
+ {
+ "position": {
+ "x": 90,
+ "y": 2510
+ }
+ }
+ "5":
+ continueonerrortype: ""
+ id: "5"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "17"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ guid:
+ simple: ${Venafi.Certificate.ID}
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: VenafiTLSProtect
+ description: Uses a certificate GUID to extract more details from the certificate store.
+ id: d5cc8b99-47db-4db3-8c2e-f9098ced9989
+ iscommand: true
+ name: Extract additional details from the venafi certificate store.
+ script: VenafiTLSProtect|||venafi-get-certificate-details
+ type: regular
+ version: -1
+ taskid: d5cc8b99-47db-4db3-8c2e-f9098ced9989
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 840
+ }
+ }
+ "7":
+ continueonerrortype: ""
+ id: "7"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "3"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: asmenrichmentstatus
+ keys:
+ simple: source,record_exists,timestamp
+ val1:
+ simple: Certificate
+ val2:
+ simple: "true"
+ val3:
+ simple: TIMESTAMP
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: Builtin
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Instead of a value you can enter `TIMESTAMP` to get the current timestamp in ISO format. For example:
+ `!GridFieldSetup keys=ip,src,timestamp val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" val3="TIMESTAMP" gridfiled="gridfield"`
+ id: 06891dbc-f1d9-4683-83cd-e6c877ee86c3
+ iscommand: false
+ name: Set ASM enrichment status to true
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 06891dbc-f1d9-4683-83cd-e6c877ee86c3
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": -900,
+ "y": 2260
+ }
+ }
+ "8":
+ continueonerrortype: ""
+ id: "8"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "3"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: asmenrichmentstatus
+ keys:
+ simple: source,record_exists,timestamp
+ val1:
+ simple: Certificate
+ val2:
+ simple: "false"
+ val3:
+ simple: TIMESTAMP
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: Builtin
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Instead of a value you can enter `TIMESTAMP` to get the current timestamp in ISO format. For example:
+ `!GridFieldSetup keys=ip,src,timestamp val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" val3="TIMESTAMP" gridfiled="gridfield"`
+ id: 183d7b27-a308-455c-8bca-f5715de02c12
+ iscommand: false
+ name: Set ASM enrichment status to false
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 183d7b27-a308-455c-8bca-f5715de02c12
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": -390,
+ "y": 2260
+ }
+ }
+ "9":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ complex:
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: Venafi.Certificate.X509.CN
+ operator: inList
+ right:
+ iscontext: true
+ value:
+ simple: inputs.Hostname
+ root: Venafi.Certificate.X509.CN
+ operator: isNotEmpty
+ right:
+ value: {}
+ label: "yes"
+ continueonerrortype: ""
+ id: "9"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "14"
+ "yes":
+ - "5"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Check if there are results from Venafi.
+ id: e02f629f-b980-4090-84a3-3601fffe17ca
+ iscommand: false
+ name: Are there results from Venafi?
+ type: condition
+ version: -1
+ taskid: e02f629f-b980-4090-84a3-3601fffe17ca
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 630
+ }
+ }
+ "10":
+ continueonerrortype: ""
+ id: "10"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "15"
+ - "3"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ append:
+ simple: "true"
+ key:
+ simple: asm_fields_set_for_certificate
+ value:
+ simple: "true"
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Set a value in context under the key you entered.
+ id: b4946827-072f-43c9-8afd-55c42b9ba184
+ iscommand: false
+ name: Set true flag for completed enrichment
+ script: Set
+ type: regular
+ version: -1
+ taskid: b4946827-072f-43c9-8afd-55c42b9ba184
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 90,
+ "y": 1800
+ }
+ }
+ "11":
+ continueonerrortype: ""
+ id: "11"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "12"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 6efaf535-f561-4465-8dc5-ebffce303c83
+ iscommand: false
+ name: Service Owner
+ type: title
+ version: -1
+ description: ''
+ taskid: 6efaf535-f561-4465-8dc5-ebffce303c83
+ timertriggers: []
+ type: title
+ view: |-
+ {
+ "position": {
+ "x": 90,
+ "y": 1140
+ }
+ }
+ "12":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ complex:
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: Venafi.Certificate.CertificateDetails.Subject
+ operator: containsGeneral
+ right:
+ value:
+ simple: '@'
+ root: Venafi.Certificate.CertificateDetails.Subject
+ transformers:
+ - args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: '[\w-\.]+@[\w-]+\.+[\w-]{2,4}'
+ unpack_matches: {}
+ operator: RegexExtractAll
+ operator: isNotEmpty
+ label: "yes"
+ continueonerrortype: ""
+ id: "12"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "14"
+ "yes":
+ - "13"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Check if there are any emails in Subject.
+ id: d67f3e5e-8afb-4521-8ab2-2514809b5846
+ iscommand: false
+ name: Are there emails in Certificate Subject?
+ type: condition
+ version: -1
+ taskid: d67f3e5e-8afb-4521-8ab2-2514809b5846
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": 90,
+ "y": 1320
+ }
+ }
+ "13":
+ continueonerrortype: ""
+ id: "13"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "10"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: asmserviceownerunrankedraw
+ keys:
+ simple: name,email,source,timestamp
+ val1:
+ simple: n/a
+ val2:
+ complex:
+ accessor: Subject
+ root: Venafi.Certificate.CertificateDetails
+ transformers:
+ - args:
+ error_if_no_match: {}
+ ignore_case: {}
+ multi_line: {}
+ period_matches_newline: {}
+ regex:
+ value:
+ simple: '[\w-\.]+@[\w-]+\.+[\w-]{2,4}'
+ unpack_matches: {}
+ operator: RegexExtractAll
+ val3:
+ simple: Certificate-Venafi
+ val4:
+ simple: TIMESTAMP
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: Builtin
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. For example:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: 436ba802-82ee-47f1-823e-19f0c231ef09
+ iscommand: false
+ name: Set service owner grid field
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 436ba802-82ee-47f1-823e-19f0c231ef09
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 90,
+ "y": 1565
+ }
+ }
+ "14":
+ continueonerrortype: ""
+ id: "14"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "15"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 000c1044-7744-4fb0-8168-5a2e14e6d6d9
+ iscommand: false
+ name: Closing stage
+ type: title
+ version: -1
+ description: ''
+ taskid: 000c1044-7744-4fb0-8168-5a2e14e6d6d9
+ timertriggers: []
+ type: title
+ view: |-
+ {
+ "position": {
+ "x": -660,
+ "y": 1825
+ }
+ }
+ "15":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ simple: asm_fields_set_for_certificate
+ operator: isTrue
+ right:
+ value: {}
+ label: "yes"
+ continueonerrortype: ""
+ id: "15"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "8"
+ "yes":
+ - "7"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Check if enrichment is performed.
+ id: d9180488-10ff-4a1e-85ea-81a5b09ef5f0
+ iscommand: false
+ name: Was enrichment performed?
+ type: condition
+ version: -1
+ taskid: d9180488-10ff-4a1e-85ea-81a5b09ef5f0
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": -660,
+ "y": 2080
+ }
+ }
+ "16":
+ continueonerrortype: ""
+ id: "16"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "1"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 22628f06-3656-4c77-8876-eeb4ebe83a25
+ iscommand: false
+ name: Venafi enrichment
+ type: title
+ version: -1
+ description: ''
+ taskid: 22628f06-3656-4c77-8876-eeb4ebe83a25
+ timertriggers: []
+ type: title
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 60
+ }
+ }
+ "17":
+ continueonerrortype: ""
+ id: "17"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "11"
+ - "18"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 9e6da600-66c4-47f4-8b6a-ecd99f4470e7
+ iscommand: false
+ name: Set fields
+ type: title
+ version: -1
+ description: ''
+ taskid: 9e6da600-66c4-47f4-8b6a-ecd99f4470e7
+ timertriggers: []
+ type: title
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 1000
+ }
+ }
+ "18":
+ continueonerrortype: ""
+ id: "18"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "19"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 9aa996ab-38c4-4e2c-85d9-07f65777702d
+ iscommand: false
+ name: System IDs
+ type: title
+ version: -1
+ description: ''
+ taskid: 9aa996ab-38c4-4e2c-85d9-07f65777702d
+ timertriggers: []
+ type: title
+ view: |-
+ {
+ "position": {
+ "x": 750,
+ "y": 1140
+ }
+ }
+ "19":
+ continueonerrortype: ""
+ id: "19"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "10"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: asmsystemids
+ keys:
+ simple: type,id,link
+ val1:
+ simple: CERTIFICATE-VENAFI-ID
+ val2:
+ simple: ${Venafi.Certificate.ID}
+ val3:
+ simple: n/a
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Example of command:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: 578b389a-0877-4f32-88aa-127256ad227b
+ iscommand: false
+ name: Set system IDs grid field (Certificate ID)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 578b389a-0877-4f32-88aa-127256ad227b
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 750,
+ "y": 1320
+ }
+ }
+version: -1
+view: |-
+ {
+ "linkLabelsPosition": {},
+ "paper": {
+ "dimensions": {
+ "height": 2655,
+ "width": 2030,
+ "x": -900,
+ "y": -80
+ }
+ }
+ }
+tests:
+- No tests (auto formatted)
+fromversion: 6.10.0
diff --git a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Certificate_Enrichment_README.md b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Certificate_Enrichment_README.md
new file mode 100644
index 000000000000..99854e35ea5b
--- /dev/null
+++ b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Certificate_Enrichment_README.md
@@ -0,0 +1,42 @@
+Playbook to enrich certificate information.
+
+## Dependencies
+
+This playbook uses the following sub-playbooks, integrations, and scripts.
+
+### Sub-playbooks
+
+This playbook does not use any sub-playbooks.
+
+### Integrations
+
+* VenafiTLSProtect
+
+### Scripts
+
+* Set
+* GridFieldSetup
+
+### Commands
+
+* venafi-get-certificates
+* venafi-get-certificate-details
+
+## Playbook Inputs
+
+---
+
+| **Name** | **Description** | **Default Value** | **Required** |
+| --- | --- | --- | --- |
+| Hostname | Input for Certificate enrichment | ${alert.hostname} | Optional |
+
+## Playbook Outputs
+
+---
+There are no outputs for this playbook.
+
+## Playbook Image
+
+---
+
+![Cortex ASM - Certificate Enrichment](../doc_files/Cortex_ASM_-_Certificate_Enrichment.png)
diff --git a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Enrichment.yml b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Enrichment.yml
index f3dd4dd5aa26..d45174ef68f6 100644
--- a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Enrichment.yml
+++ b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Enrichment.yml
@@ -6,10 +6,10 @@ starttaskid: "0"
tasks:
"0":
id: "0"
- taskid: 66f369ad-a7a1-4875-8411-a50d64741291
+ taskid: 744b37b8-b653-4703-8f32-a0e2b9d75916
type: start
task:
- id: 66f369ad-a7a1-4875-8411-a50d64741291
+ id: 744b37b8-b653-4703-8f32-a0e2b9d75916
version: -1
name: ""
iscommand: false
@@ -36,10 +36,10 @@ tasks:
isautoswitchedtoquietmode: false
"1":
id: "1"
- taskid: c3f3c01d-aa4c-4c4d-88af-4d79f497f147
+ taskid: 032b86e0-f734-4bfc-828b-506b3f38a87a
type: condition
task:
- id: c3f3c01d-aa4c-4c4d-88af-4d79f497f147
+ id: 032b86e0-f734-4bfc-828b-506b3f38a87a
version: -1
name: Is there an IP address?
description: Determines if the IP address has been supplied to proceed with cloud enrichment.
@@ -91,10 +91,10 @@ tasks:
isautoswitchedtoquietmode: false
"3":
id: "3"
- taskid: 7d47dd96-27ca-427d-8018-bc23630b713d
+ taskid: bb873f5f-e58e-47d1-84fc-5b88c2f11e7c
type: title
task:
- id: 7d47dd96-27ca-427d-8018-bc23630b713d
+ id: bb873f5f-e58e-47d1-84fc-5b88c2f11e7c
version: -1
name: ServiceNow Enrichment
type: title
@@ -122,10 +122,10 @@ tasks:
isautoswitchedtoquietmode: false
"6":
id: "6"
- taskid: 85521f3f-c1eb-4e92-86c7-69ddd3d82e54
+ taskid: 0d23935f-2f92-4bf1-853c-d81a4b68886e
type: condition
task:
- id: 85521f3f-c1eb-4e92-86c7-69ddd3d82e54
+ id: 0d23935f-2f92-4bf1-853c-d81a4b68886e
version: -1
name: Was there a result?
description: Determines if there was a result from the previous command to continue cloud enrichment.
@@ -165,10 +165,10 @@ tasks:
isautoswitchedtoquietmode: false
"7":
id: "7"
- taskid: 4c9b1a4b-a562-449a-803a-e0939cd6b5ad
+ taskid: ea81e04a-2e71-4da4-893f-a72fc6d12df1
type: condition
task:
- id: 4c9b1a4b-a562-449a-803a-e0939cd6b5ad
+ id: ea81e04a-2e71-4da4-893f-a72fc6d12df1
version: -1
name: What provider is this service?
description: Determines which cloud provider the service is in order to direct to the correct enrichment.
@@ -321,10 +321,10 @@ tasks:
isautoswitchedtoquietmode: false
"11":
id: "11"
- taskid: bf678446-745d-46e1-8044-afe38da7b09c
+ taskid: 05b6f167-bf9b-48e2-8b4e-5619aa2993a8
type: condition
task:
- id: bf678446-745d-46e1-8044-afe38da7b09c
+ id: 05b6f167-bf9b-48e2-8b4e-5619aa2993a8
version: -1
name: Is Cortex ASM enabled and is there a service?
description: Determines if the "Cortex Attack Surface Management" integration instance is configured and that there is a service to continue with enrichment.
@@ -389,10 +389,10 @@ tasks:
isautoswitchedtoquietmode: false
"35":
id: "35"
- taskid: 41731e1d-66f2-4769-835c-9cd48e4a7baa
+ taskid: c584c4e7-34dc-40b5-8689-cbbc271cba76
type: title
task:
- id: 41731e1d-66f2-4769-835c-9cd48e4a7baa
+ id: c584c4e7-34dc-40b5-8689-cbbc271cba76
version: -1
name: Cloud Enrichment
type: title
@@ -420,10 +420,10 @@ tasks:
isautoswitchedtoquietmode: false
"38":
id: "38"
- taskid: a12624d9-2014-4ca2-8a55-816d0c509b05
+ taskid: 8cd46d49-a0e1-4f1a-8431-fcfdb649a1e3
type: title
task:
- id: a12624d9-2014-4ca2-8a55-816d0c509b05
+ id: 8cd46d49-a0e1-4f1a-8431-fcfdb649a1e3
version: -1
name: Complete
type: title
@@ -436,7 +436,7 @@ tasks:
{
"position": {
"x": 110,
- "y": 5770
+ "y": 6080
}
}
note: false
@@ -448,10 +448,10 @@ tasks:
isautoswitchedtoquietmode: false
"61":
id: "61"
- taskid: e4455d18-e7a6-498f-8d71-773abbd250fa
+ taskid: d3c24b8e-eb31-4b78-8556-d0dde1462c97
type: playbook
task:
- id: e4455d18-e7a6-498f-8d71-773abbd250fa
+ id: d3c24b8e-eb31-4b78-8556-d0dde1462c97
version: -1
name: Cortex ASM - ServiceNow CMDB Enrichment
type: playbook
@@ -489,10 +489,10 @@ tasks:
isautoswitchedtoquietmode: false
"62":
id: "62"
- taskid: e88c0ff6-68e2-4851-8fbd-ad487ed5a3ab
+ taskid: afed84f0-457f-40ac-8b2c-237668c34d89
type: title
task:
- id: e88c0ff6-68e2-4851-8fbd-ad487ed5a3ab
+ id: afed84f0-457f-40ac-8b2c-237668c34d89
version: -1
name: Tenable.io Enrichment
type: title
@@ -520,10 +520,10 @@ tasks:
isautoswitchedtoquietmode: false
"63":
id: "63"
- taskid: e12638c0-cee1-4666-8377-d5eb6b08eac3
+ taskid: 43ca061a-8750-4536-82e7-af3001b49845
type: playbook
task:
- id: e12638c0-cee1-4666-8377-d5eb6b08eac3
+ id: 43ca061a-8750-4536-82e7-af3001b49845
version: -1
name: Cortex ASM - Tenable.io Enrichment
description: Given the IP address this playbook enriches Tenable.io information relevant to ASM alerts.
@@ -563,10 +563,10 @@ tasks:
isautoswitchedtoquietmode: false
"66":
id: "66"
- taskid: 89573dea-4626-4d8f-8ea4-259828f9392b
+ taskid: 9b65a442-4af7-4d64-80e0-d3aed6284a96
type: regular
task:
- id: 89573dea-4626-4d8f-8ea4-259828f9392b
+ id: 9b65a442-4af7-4d64-80e0-d3aed6284a96
version: -1
name: Get external service information
description: Get service details according to the service ID.
@@ -606,10 +606,10 @@ tasks:
isautoswitchedtoquietmode: false
"67":
id: "67"
- taskid: 91d43e90-31d9-4a14-83c6-456e71a371f9
+ taskid: ed07dbf6-da24-4f6a-880c-6661cca1e249
type: regular
task:
- id: 91d43e90-31d9-4a14-83c6-456e71a371f9
+ id: ed07dbf6-da24-4f6a-880c-6661cca1e249
version: -1
name: Set protocol
description: commands.local.cmd.set.incident
@@ -643,10 +643,10 @@ tasks:
isautoswitchedtoquietmode: false
"68":
id: "68"
- taskid: effa61bc-0398-40a0-83f6-4a9b53e90669
+ taskid: 547c9c5b-f2c2-4742-87a6-7a747d916e58
type: regular
task:
- id: effa61bc-0398-40a0-83f6-4a9b53e90669
+ id: 547c9c5b-f2c2-4742-87a6-7a747d916e58
version: -1
name: Infer whether service is used for development (vs. production)
description: Identify whether the service is a "development" server. Development servers have no external users and run no production workflows. These servers might be named "dev", but they might also be named "qa", "pre-production", "user acceptance testing", or use other non-production terms. This automation uses both public data visible to anyone (`active_classifications` as derived by Xpanse ASM) as well as checking internal data for AI-learned indicators of development systems (`asm_tags` as derived from integrations with non-public systems).
@@ -688,7 +688,7 @@ tasks:
{
"position": {
"x": 110,
- "y": 5420
+ "y": 5730
}
}
note: false
@@ -700,10 +700,10 @@ tasks:
isautoswitchedtoquietmode: false
"69":
id: "69"
- taskid: 118f3ab7-3a3b-490c-84f3-65f3c4727262
+ taskid: 35daf2a7-7116-4c04-82f2-fdebe9d1aad5
type: playbook
task:
- id: 118f3ab7-3a3b-490c-84f3-65f3c4727262
+ id: 35daf2a7-7116-4c04-82f2-fdebe9d1aad5
version: -1
name: Cortex ASM - Azure Enrichment
description: Given the IP address, this playbook enriches Azure information relevant to ASM alerts.
@@ -741,10 +741,10 @@ tasks:
isautoswitchedtoquietmode: false
"70":
id: "70"
- taskid: 2aeb3492-5b5d-44d4-8164-4e9dfd5ef0c4
+ taskid: bdae52ae-233c-4a58-806a-b111be48239e
type: title
task:
- id: 2aeb3492-5b5d-44d4-8164-4e9dfd5ef0c4
+ id: bdae52ae-233c-4a58-806a-b111be48239e
version: -1
name: Splunk Enrichment
type: title
@@ -772,10 +772,10 @@ tasks:
isautoswitchedtoquietmode: false
"71":
id: "71"
- taskid: 2e978533-d06c-4ff8-8ebe-d367dce58690
+ taskid: b52ba66b-4088-416c-8d8f-19e32d460833
type: playbook
task:
- id: 2e978533-d06c-4ff8-8ebe-d367dce58690
+ id: b52ba66b-4088-416c-8d8f-19e32d460833
version: -1
name: Cortex ASM - Splunk Enrichment
description: 'Given the IP address this playbook enriches information from Splunk results relevant to ASM alerts. '
@@ -815,10 +815,10 @@ tasks:
isautoswitchedtoquietmode: false
"72":
id: "72"
- taskid: e849c4e9-d53d-4548-85dd-5cd7704dfc8b
+ taskid: 1f4edd55-a394-4fa5-8286-b1199ecd75fb
type: playbook
task:
- id: e849c4e9-d53d-4548-85dd-5cd7704dfc8b
+ id: 1f4edd55-a394-4fa5-8286-b1199ecd75fb
version: -1
name: Cortex ASM - Rapid7 Enrichment
description: Given the IP address this playbook enriches Rapid7 InsightVM (Nexpose) information relevant to ASM alerts.
@@ -858,10 +858,10 @@ tasks:
isautoswitchedtoquietmode: false
"73":
id: "73"
- taskid: 918cca53-7ffd-4aa1-8eb8-305ebaa60ff4
+ taskid: 6fd8b6d1-162e-493f-837f-8be3c652bc54
type: title
task:
- id: 918cca53-7ffd-4aa1-8eb8-305ebaa60ff4
+ id: 6fd8b6d1-162e-493f-837f-8be3c652bc54
version: -1
name: Rapid7 Enrichment
type: title
@@ -889,10 +889,10 @@ tasks:
isautoswitchedtoquietmode: false
"74":
id: "74"
- taskid: 234ac145-3757-4cb1-8501-319f48302242
+ taskid: 3b3e5e95-1326-4e54-87d4-874f97d089b8
type: title
task:
- id: 234ac145-3757-4cb1-8501-319f48302242
+ id: 3b3e5e95-1326-4e54-87d4-874f97d089b8
version: -1
name: Qualys Enrichment
type: title
@@ -920,10 +920,10 @@ tasks:
isautoswitchedtoquietmode: false
"75":
id: "75"
- taskid: d9b7ea21-8bc1-4907-8e5f-b6e2ec013c84
+ taskid: 6800c14b-ef4d-4cc6-8913-1dea41f0da00
type: playbook
task:
- id: d9b7ea21-8bc1-4907-8e5f-b6e2ec013c84
+ id: 6800c14b-ef4d-4cc6-8913-1dea41f0da00
version: -1
name: Cortex ASM - Qualys Enrichment
description: Given the IP address this playbook enriches information from Qualys assets.
@@ -963,10 +963,10 @@ tasks:
isautoswitchedtoquietmode: false
"76":
id: "76"
- taskid: e110e2fe-ca56-4ec8-8ad9-f534f67e89d2
+ taskid: 832fc7bd-8848-447e-8b74-154e1a19a150
type: playbook
task:
- id: e110e2fe-ca56-4ec8-8ad9-f534f67e89d2
+ id: 832fc7bd-8848-447e-8b74-154e1a19a150
version: -1
name: Cortex ASM - GCP Enrichment
description: Given the IP address this playbook enriches GCP information relevant to ASM alerts.
@@ -995,10 +995,10 @@ tasks:
isautoswitchedtoquietmode: false
"78":
id: "78"
- taskid: 6d560875-540c-4624-8246-384bd62bb57c
+ taskid: 0e9be24c-1be3-4998-8f38-85c309d65072
type: playbook
task:
- id: 6d560875-540c-4624-8246-384bd62bb57c
+ id: 0e9be24c-1be3-4998-8f38-85c309d65072
version: -1
name: Cortex ASM - Service Ownership
type: playbook
@@ -1015,7 +1015,7 @@ tasks:
{
"position": {
"x": 110,
- "y": 5600
+ "y": 5910
}
}
note: false
@@ -1027,10 +1027,10 @@ tasks:
isautoswitchedtoquietmode: false
"79":
id: "79"
- taskid: 31483d73-2007-41cc-8866-2f99563d049c
+ taskid: 56a54faf-ee4a-493a-80a1-fe4087457bed
type: playbook
task:
- id: 31483d73-2007-41cc-8866-2f99563d049c
+ id: 56a54faf-ee4a-493a-80a1-fe4087457bed
version: -1
name: Cortex ASM - Prisma Cloud Enrichment
description: Given the IP address this playbook enriches information from Prisma Cloud.
@@ -1074,10 +1074,10 @@ tasks:
isautoswitchedtoquietmode: false
"80":
id: "80"
- taskid: 7679ffb5-fe99-4842-8b58-c651b5f3418b
+ taskid: 6610fa32-db2d-46be-86be-c2d2ccf23db8
type: condition
task:
- id: 7679ffb5-fe99-4842-8b58-c651b5f3418b
+ id: 6610fa32-db2d-46be-86be-c2d2ccf23db8
version: -1
name: Are there any emails in tags?
description: Checks if there is email in the tags.
@@ -1137,10 +1137,10 @@ tasks:
isautoswitchedtoquietmode: false
"81":
id: "81"
- taskid: e57b4529-9731-4ac9-8edb-b2ed951efa1c
+ taskid: 428ebd93-7405-4d3f-80d7-ed88a14e8d4c
type: title
task:
- id: e57b4529-9731-4ac9-8edb-b2ed951efa1c
+ id: 428ebd93-7405-4d3f-80d7-ed88a14e8d4c
version: -1
name: Service Owner from Tags
type: title
@@ -1168,10 +1168,10 @@ tasks:
isautoswitchedtoquietmode: false
"83":
id: "83"
- taskid: 618150ab-c218-432b-8453-0f870c7ca88f
+ taskid: 2a9e6074-cb1c-49fd-8d7a-a0276165b3eb
type: regular
task:
- id: 618150ab-c218-432b-8453-0f870c7ca88f
+ id: 2a9e6074-cb1c-49fd-8d7a-a0276165b3eb
version: -1
name: Set service owners from Tag grid field
description: |-
@@ -1256,10 +1256,10 @@ tasks:
isautoswitchedtoquietmode: false
"84":
id: "84"
- taskid: 5b4e9439-349c-47c4-85e6-ec133c44402f
+ taskid: 65488c43-6f4c-47a6-8b71-0fc785792485
type: playbook
task:
- id: 5b4e9439-349c-47c4-85e6-ec133c44402f
+ id: 65488c43-6f4c-47a6-8b71-0fc785792485
version: -1
name: Cortex ASM - AWS Enrichment
type: playbook
@@ -1303,10 +1303,10 @@ tasks:
isautoswitchedtoquietmode: false
"85":
id: "85"
- taskid: 160ce35c-1f12-40fc-8048-1e8b6da77d36
+ taskid: 674f71b4-615a-4ca0-8181-115f7cfc7325
type: regular
task:
- id: 160ce35c-1f12-40fc-8048-1e8b6da77d36
+ id: 674f71b4-615a-4ca0-8181-115f7cfc7325
version: -1
name: Sleep for 1 hour
description: Sleep for X seconds
@@ -1340,10 +1340,10 @@ tasks:
isautoswitchedtoquietmode: false
"86":
id: "86"
- taskid: a208d92b-e40d-413a-835d-0fd47eb16143
+ taskid: 2e66d2cb-fc68-4ec0-8886-4320d80612da
type: condition
task:
- id: a208d92b-e40d-413a-835d-0fd47eb16143
+ id: 2e66d2cb-fc68-4ec0-8886-4320d80612da
version: -1
name: Was there a result?
description: Determines if there was a result from the previous command to continue cloud enrichment.
@@ -1383,10 +1383,10 @@ tasks:
isautoswitchedtoquietmode: false
"87":
id: "87"
- taskid: 8d4c19d4-c500-49c1-8fd4-61f1b131b42f
+ taskid: b3ec329a-381f-414f-8181-4b094102fbf4
type: regular
task:
- id: 8d4c19d4-c500-49c1-8fd4-61f1b131b42f
+ id: b3ec329a-381f-414f-8181-4b094102fbf4
version: -1
name: Get external service information
description: Get service details according to the service ID.
@@ -1426,10 +1426,10 @@ tasks:
isautoswitchedtoquietmode: false
'88':
id: '88'
- taskid: 376af535-c66f-459c-886f-406efe90345f
+ taskid: 25042dc4-d44e-4cce-8bdf-931ac8c3571c
type: playbook
task:
- id: 376af535-c66f-459c-886f-406efe90345f
+ id: 25042dc4-d44e-4cce-8bdf-931ac8c3571c
version: -1
name: Cortex ASM - On Prem Enrichment
type: playbook
@@ -1475,10 +1475,10 @@ tasks:
isautoswitchedtoquietmode: false
'89':
id: '89'
- taskid: fa3e165b-ff83-4c4d-8de3-7c4d4531f66f
+ taskid: 0e10ac8f-f7da-41bd-871f-a9e472a2db2b
type: playbook
task:
- id: fa3e165b-ff83-4c4d-8de3-7c4d4531f66f
+ id: 0e10ac8f-f7da-41bd-871f-a9e472a2db2b
version: -1
name: Cortex ASM - ServiceNow ITSM Enrichment
type: playbook
@@ -1563,13 +1563,13 @@ tasks:
skipunavailable: false
task:
brand: ""
- id: e943b4d8-cc63-484a-811c-ca45bffb05ae
+ id: 0dd047f2-f372-459e-8324-5833abaedeff
iscommand: false
name: Prisma Cloud Enrichment
type: title
version: -1
description: ''
- taskid: e943b4d8-cc63-484a-811c-ca45bffb05ae
+ taskid: 0dd047f2-f372-459e-8324-5833abaedeff
timertriggers: []
type: title
view: |-
@@ -1603,13 +1603,13 @@ tasks:
task:
brand: ""
description: This playbook is used to pull information from Cortex Endpoint (XSIAM/XDR) systems for enrichment purposes.
- id: cc58ddab-6536-4f67-8d3e-eaba90832c78
+ id: 8b02b5a9-d5f3-44f7-84b4-3c8886d6f379
iscommand: false
name: Cortex ASM - Cortex Endpoint Enrichment
playbookId: Cortex ASM - Cortex Endpoint Enrichment
type: playbook
version: -1
- taskid: cc58ddab-6536-4f67-8d3e-eaba90832c78
+ taskid: 8b02b5a9-d5f3-44f7-84b4-3c8886d6f379
timertriggers: []
type: playbook
view: |-
@@ -1634,13 +1634,13 @@ tasks:
skipunavailable: false
task:
brand: ""
- id: 9d50af6a-4665-4fc3-84d9-9ad558fc031e
+ id: 3bf7ff76-98e9-42aa-84f0-03be04298a7c
iscommand: false
name: Cortex Endpoint Enrichment
type: title
version: -1
description: ''
- taskid: 9d50af6a-4665-4fc3-84d9-9ad558fc031e
+ taskid: 3bf7ff76-98e9-42aa-84f0-03be04298a7c
timertriggers: []
type: title
view: |-
@@ -1663,7 +1663,7 @@ tasks:
wait: 1
nexttasks:
'#none#':
- - "68"
+ - "99"
note: false
quietmode: 0
scriptarguments:
@@ -1674,13 +1674,13 @@ tasks:
task:
brand: ""
description: Playbook to enriches Service ownership info in Azure and On-Prem Active Directory.
- id: 52aab316-1470-447a-8517-3ef2b5bf05bf
+ id: 4d50a364-ef5f-48bd-829a-b9a203560745
iscommand: false
name: Cortex ASM - Active Directory Enrichment
playbookId: Cortex ASM - Active Directory Enrichment
type: playbook
version: -1
- taskid: 52aab316-1470-447a-8517-3ef2b5bf05bf
+ taskid: 4d50a364-ef5f-48bd-829a-b9a203560745
timertriggers: []
type: playbook
view: |-
@@ -1705,13 +1705,13 @@ tasks:
skipunavailable: false
task:
brand: ""
- id: d0aa855c-4b6a-48bb-8610-974a8667ee1e
+ id: 785f967c-452c-4ffa-847f-1684751fabb9
iscommand: false
name: Active Directory Enrichment
type: title
version: -1
description: ''
- taskid: d0aa855c-4b6a-48bb-8610-974a8667ee1e
+ taskid: 785f967c-452c-4ffa-847f-1684751fabb9
timertriggers: []
type: title
view: |-
@@ -1721,6 +1721,69 @@ tasks:
"y": 5100
}
}
+ "98":
+ continueonerrortype: ""
+ id: "98"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "68"
+ note: false
+ quietmode: 0
+ separatecontext: true
+ skipunavailable: false
+ task:
+ brand: ""
+ id: de7fd69a-c449-4f9f-82f3-c03e2184606d
+ iscommand: false
+ name: 'Cortex ASM - Certificate Enrichment'
+ playbookId: 'Cortex ASM - Certificate Enrichment'
+ type: playbook
+ version: -1
+ description: ''
+ taskid: de7fd69a-c449-4f9f-82f3-c03e2184606d
+ timertriggers: []
+ type: playbook
+ view: |-
+ {
+ "position": {
+ "x": 110,
+ "y": 5560
+ }
+ }
+ "99":
+ continueonerrortype: ""
+ id: "99"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "98"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ id: 68ab2492-9ebc-4ed8-871e-be2b5e6954ce
+ iscommand: false
+ name: Certificate Enrichment
+ type: title
+ version: -1
+ description: ''
+ taskid: 68ab2492-9ebc-4ed8-871e-be2b5e6954ce
+ timertriggers: []
+ type: title
+ view: |-
+ {
+ "position": {
+ "x": 110,
+ "y": 5420
+ }
+ }
view: |-
{
"linkLabelsPosition": {
@@ -1734,7 +1797,7 @@ view: |-
},
"paper": {
"dimensions": {
- "height": 5965,
+ "height": 6275,
"width": 1610,
"x": 110,
"y": -130
diff --git a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Enrichment_README.md b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Enrichment_README.md
index 75f8814c0cb3..0d7ba212c1bc 100644
--- a/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Enrichment_README.md
+++ b/Packs/CortexAttackSurfaceManagement/Playbooks/Cortex_ASM_-_Enrichment_README.md
@@ -9,6 +9,7 @@ This playbook uses the following sub-playbooks, integrations, and scripts.
* Cortex ASM - AWS Enrichment
* Cortex ASM - Active Directory Enrichment
* Cortex ASM - Azure Enrichment
+* Cortex ASM - Certificate Enrichment
* Cortex ASM - Cortex Endpoint Enrichment_Core_Combo
* Cortex ASM - GCP Enrichment
* Cortex ASM - On Prem Enrichment
diff --git a/Packs/CortexAttackSurfaceManagement/README.md b/Packs/CortexAttackSurfaceManagement/README.md
index 883e26d7ffbf..94ba81e4477d 100644
--- a/Packs/CortexAttackSurfaceManagement/README.md
+++ b/Packs/CortexAttackSurfaceManagement/README.md
@@ -25,7 +25,7 @@ Aditionally, [a list of integrations used for the Active Response playbook can b
### Demo Video
-[![Active Response in Cortex Xpanse](https://raw.githubusercontent.com/demisto/content/98ead849e9e32921f64f7ac07fda2bff1b5f7c0b/Packs/CortexAttackSurfaceManagement/doc_files/Active_Response_in_Cortex_Xpanse.jpg)](https://www.youtube.com/watch?v=rryAQ23uuqw "Active Response in Cortex Xpanse")
+[![Active Response in Cortex Xpanse](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Active_Response_in_Cortex_Xpanse.jpg)](https://www.youtube.com/watch?v=rryAQ23uuqw "Active Response in Cortex Xpanse")
### Automated Remediation Requirements
@@ -55,6 +55,7 @@ Automated remediation is only possible when the right conditions are met. These
- Active Directory
- AWS IAM
- Azure IAM
+ - Venafi
- Cortex Endpoint (XSIAM/XDR)
- Email addresses found in tags
- GCP IAM
@@ -81,6 +82,7 @@ The main active response playbook is the `Cortex ASM - ASM Alert` playbook. This
- [Cortex ASM - ASM Alert](#cortex-asm---asm-alert)
- [Cortex ASM - AWS Enrichment](#cortex-asm---aws-enrichment)
- [Cortex ASM - Azure Enrichment](#cortex-asm---azure-enrichment)
+ - [Cortex ASM - Certificate Enrichment](#cortex-asm---certificate-enrichment)
- [Cortex ASM - Cortex Endpoint Enrichment](#cortex-asm---cortex-endpoint-enrichment)
- [Cortex ASM - Cortex Endpoint Remediation](#cortex-asm---cortex-endpoint-remediation)
- [Cortex ASM - Detect Service](#cortex-asm---detect-service)
@@ -118,163 +120,171 @@ The main active response playbook is the `Cortex ASM - ASM Alert` playbook. This
A playbook that given the email address enriches Service owner in Azure and On-Prem directory.
-![Cortex ASM - Active Directory Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Active_Directory_Enrichment.png)
+
+![Cortex ASM - Active Directory Enrichment](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Active_Directory_Enrichment.png)
+
#### Cortex ASM - ASM Alert
A playbook that enriches asset information for ASM alerts and provides the means for remediation.
-![Cortex ASM - ASM Alert](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ASM_Alert.png)
+![Cortex ASM - ASM Alert](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ASM_Alert.png)
#### Cortex ASM - AWS Enrichment
A playbook that given the IP address enriches AWS information relevant to ASM alerts.
-![Cortex ASM - AWS Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_AWS_Enrichment.png)
+![Cortex ASM - AWS Enrichment](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_AWS_Enrichment.png)
#### Cortex ASM - Azure Enrichment
A playbook that given the IP address enriches Azure information relevant to ASM alerts.
-![Cortex ASM - Azure Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Azure_Enrichment.png)
+![Cortex ASM - Azure Enrichment](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Azure_Enrichment.png)
+
+#### Cortex ASM - Certificate Enrichment
+
+A playbook to enrich certificate information.
+
+![Cortex ASM - Certificate Enrichment](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Certificate_Enrichment.png)
#### Cortex ASM - Cortex Endpoint Enrichment
This playbook is used to pull information from Cortex Endpoint (XSIAM/XDR) systems for enrichment purposes.
-![Cortex ASM - Cortex Endpoint Enrichment](https://raw.githubusercontent.com/demisto/content/935a77339c2b1ecde3b9ea64992018bd625c61ed/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Cortex_Endpoint_Enrichment.png)
+![Cortex ASM - Cortex Endpoint Enrichment](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Cortex_Endpoint_Enrichment.png)
#### Cortex ASM - Cortex Endpoint Remediation
This playbook is used for remediating a single exposed Cortex Endpoint (XSIAM/XDR) by isolating the endpoint from the network using the "Isolate Endpoint" feature in XSIAM ([see XSIAM details](https://docs-cortex.paloaltonetworks.com/r/Cortex-XSIAM/Cortex-XSIAM-Administrator-Guide/Isolate-an-Endpoint)) and XDR ([see XDR details](https://docs-cortex.paloaltonetworks.com/r/Cortex-XDR/Cortex-XDR-Pro-Administrator-Guide/Isolate-an-Endpoint)).
-![Cortex ASM - Cortex Endpoint Remediation](https://raw.githubusercontent.com/demisto/content/c421d6d3de62992a3ac3afbce09e82224e505641/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Cortex_Endpoint_Remediation.png)
+![Cortex ASM - Cortex Endpoint Remediation](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Cortex_Endpoint_Remediation.png)
#### Cortex ASM - Detect Service
A playbook that utilizes the Remediation Confirmation Scan service to check for mitigated vulnerabilities.
-![Cortex ASM - Detect Service](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Detect_Service.png)
+![Cortex ASM - Detect Service](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Detect_Service.png)
#### Cortex ASM - Email Notification
A playbook that is used to send email notifications to service owners to notify them of their internet exposures.
-![Cortex ASM - Email Notification](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Email_Notification.png)
+![Cortex ASM - Email Notification](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Email_Notification.png)
#### Cortex ASM - Enrichment
A playbook that is used as a container folder for all enrichments of ASM alerts.
-![Cortex ASM - Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Enrichment.png)
+![Cortex ASM - Enrichment](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Enrichment.png)
#### Cortex ASM - GCP Enrichment
A playbook that given the IP address enriches GCP information relevant to ASM alerts.
-![Cortex ASM - GCP Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_GCP_Enrichment.png)
+![Cortex ASM - GCP Enrichment](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_GCP_Enrichment.png)
#### Cortex ASM - Jira Notification
A playbook that is used to create Jira tickets directed toward service owners to notify them of their internet exposures.
-![Cortex ASM - Jira Notification](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Jira_Notification.png)
+![Cortex ASM - Jira Notification](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Jira_Notification.png)
#### Cortex ASM - On Prem Enrichment
A playbook that given an IP address, port, and protocol of a service, enriches using on-prem integrations to find the related firewall rule and other related information.
-![Cortex ASM - On Prem Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_On_Prem_Enrichment.png)
+![Cortex ASM - On Prem Enrichment](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_On_Prem_Enrichment.png)
#### Cortex ASM - On Prem Remediation
A playbook that adds new block rule(s) to on-prem firewall vendors in order to block internet access for internet exposures.
-![Cortex ASM - On Prem Remediation](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_On_Prem_Remediation.png)
+![Cortex ASM - On Prem Remediation](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_On_Prem_Remediation.png)
#### Cortex ASM - Prisma Cloud Enrichment
Playbook that given the IP address enriches Prisma Cloud information relevant to ASM alerts.
-![Cortex ASM - Prisma Cloud Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Prisma_Cloud_Enrichment.png)
+![Cortex ASM - Prisma Cloud Enrichment](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Prisma_Cloud_Enrichment.png)
#### Cortex ASM - Qualys Enrichment
Playbook that given the IP address enriches Qualys information relevant to ASM alerts.
-![Cortex ASM - Qualys Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Qualys_Enrichment.png)
+![Cortex ASM - Qualys Enrichment](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Qualys_Enrichment.png)
#### Cortex ASM - Rapid7 Enrichment
A playbook that given the IP address enriches Rapid7 information relevant to ASM alerts.
-![Cortex ASM - Rapid7 Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Rapid7_Enrichment.png)
+![Cortex ASM - Rapid7 Enrichment](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Rapid7_Enrichment.png)
#### Cortex ASM - Remediation Confirmation Scan
A playbook that creates an ASM Remediation Confirmation Scan using an existing service ID, if the scan does not already exist;. It then polls for results of a scan.
-![Cortex ASM - Remediation Confirmation Scan](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Confirmation_Scan.png)
+![Cortex ASM - Remediation Confirmation Scan](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Confirmation_Scan.png)
#### Cortex ASM - Remediation Guidance
A playbook that pulls remediation guidance off of a list based on ASM RuleID to be used in service owner notifications (email or ticketing system).
-![Cortex ASM - Remediation Guidance](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Guidance.png)
+![Cortex ASM - Remediation Guidance](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Guidance.png)
#### Cortex ASM - Remediation Objectives
A playbook that populates the remediation objectives field that is used to display the remediation actions to the end user.
-![Cortex ASM - Remediation Objectives](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Objectives.png)
+![Cortex ASM - Remediation Objectives](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Objectives.png)
#### Cortex ASM - Remediation Path Rules
A playbook that returns "RemediationAction" options based on the return from the Remediation Path Rules API, or defaults to data collection task options from the "Cortex ADM - Decision" sub-playbook.
-![Cortex ASM - Remediation Path Rules](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Path_Rules.png)
+![Cortex ASM - Remediation Path Rules](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation_Path_Rules.png)
#### Cortex ASM - Remediation
A playbook that is used as a container folder for all remediation of ASM alerts.
-![Cortex ASM - Remediation](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation.png)
+![Cortex ASM - Remediation](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Remediation.png)
#### Cortex ASM - Service Ownership
Playbook that identifies and recommends the most likely owners of a given service.
-![Cortex ASM - Remediation](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Service_Ownership.png)
+![Cortex ASM - Remediation](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Service_Ownership.png)
#### Cortex ASM - ServiceNow CMDB Enrichment
A playbook that given the IP address enriches ServiceNow CMDB information relevant to ASM alerts.
-![Cortex ASM - ServiceNow CMDB Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_CMDB_Enrichment.png)
+![Cortex ASM - ServiceNow CMDB Enrichment](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_CMDB_Enrichment.png)
#### Cortex ASM - ServiceNow ITSM Enrichment
A playbook that given the search terms enriches ServiceNow ITSM service owner information relevant to ASM alerts.
-![Cortex ASM - ServiceNow ITSM Enrichment](https://raw.githubusercontent.com/demisto/content/0fd2fb4a7240673f3a3fcb1dec5339549f0f2fb8/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_ITSM_Enrichment.png)
+![Cortex ASM - ServiceNow ITSM Enrichment](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_ITSM_Enrichment.png)
#### Cortex ASM - ServiceNow Notification
A playbook that is used to create ServiceNow tickets directed toward service owners to notify them of their internet exposures.
-![Cortex ASM - ServiceNow Notification](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_Notification.png)
+![Cortex ASM - ServiceNow Notification](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_ServiceNow_Notification.png)
#### Cortex ASM - Splunk Enrichment
A playbook that given the IP address enriches Splunk information relevant to ASM alerts.
-![Cortex ASM - Splunk Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Splunk_Enrichment.png)
+![Cortex ASM - Splunk Enrichment](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Splunk_Enrichment.png)
#### Cortex ASM - Tenable.io Enrichment
A playbook that given the IP address enriches Tenable.io information relevant to ASM alerts.
-![Cortex ASM - Tenable.io Enrichment](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Tenable_io_Enrichment.png)
+![Cortex ASM - Tenable.io Enrichment](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Tenable_io_Enrichment.png)
### Automation Scripts
@@ -282,13 +292,13 @@ A playbook that given the IP address enriches Tenable.io information relevant to
An automation used to generate an ASM alert summary report with important information found via the playbook run.
-![GenerateASMReport](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/GenerateASMReport.png)
+![GenerateASMReport](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/GenerateASMReport.png)
#### InferWhetherServiceIsDev
An automation that identifies whether the service is a "development" server. Development servers have no external users and run no production workflows. These servers might be named "dev", but they might also be named "qa", "pre-production", "user acceptance testing", or use other non-production terms. This automation uses both public data visible to anyone (`active_classifications` as derived by Xpanse ASM) as well as checking internal data for AI-learned indicators of development systems (`asm_tags` as derived from integrations with non-public systems).
-![InferWhetherServiceIsDev](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/InferWhetherServiceIsDev.png)
+![InferWhetherServiceIsDev](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/InferWhetherServiceIsDev.png)
#### RankServiceOwners
@@ -302,7 +312,7 @@ This automation parses a GCP service account email for the project ID, then look
An automation that is used to find a matching remediation path rule based on criteria. If multiple rules match, it will return the most recently created rule. This assumes that the rules passed in are filtered to correlate with the alert's attack surface rule (Xpanse only).
-![RemediationPathRuleEvaluation](https://raw.githubusercontent.com/demisto/content/master/Packs/CortexAttackSurfaceManagement/doc_files/RemediationPathRuleEvaluation.png)
+![RemediationPathRuleEvaluation](https://github.com/demisto/content/raw/234817ee736286eee1b40ece897cb7b2974771dc/Packs/CortexAttackSurfaceManagement/doc_files/RemediationPathRuleEvaluation.png)
### Layouts
diff --git a/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_39.md b/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_39.md
new file mode 100644
index 000000000000..b2286886bb32
--- /dev/null
+++ b/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_39.md
@@ -0,0 +1,9 @@
+
+#### Playbooks
+
+##### Cortex ASM - Enrichment
+
+- Updated the playbook to add Cortex ASM - Certificate Enrichment sub playbook.
+##### New: Cortex ASM - Certificate Enrichment
+
+- New: Playbook to enrich certificate information.
diff --git a/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_40.md b/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_40.md
new file mode 100644
index 000000000000..44030f5d1339
--- /dev/null
+++ b/Packs/CortexAttackSurfaceManagement/ReleaseNotes/1_7_40.md
@@ -0,0 +1,3 @@
+## Cortex Attack Surface Management
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Certificate_Enrichment.png b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Certificate_Enrichment.png
new file mode 100644
index 000000000000..a0c3cf723135
Binary files /dev/null and b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Certificate_Enrichment.png differ
diff --git a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Enrichment.png b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Enrichment.png
index 9bf29f4de812..18f548ac3bc7 100644
Binary files a/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Enrichment.png and b/Packs/CortexAttackSurfaceManagement/doc_files/Cortex_ASM_-_Enrichment.png differ
diff --git a/Packs/CortexAttackSurfaceManagement/pack_metadata.json b/Packs/CortexAttackSurfaceManagement/pack_metadata.json
index 551ada0fea18..7c44c3e6272b 100644
--- a/Packs/CortexAttackSurfaceManagement/pack_metadata.json
+++ b/Packs/CortexAttackSurfaceManagement/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cortex Attack Surface Management",
"description": "Content for working with Attack Surface Management (ASM).",
"support": "xsoar",
- "currentVersion": "1.7.38",
+ "currentVersion": "1.7.40",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -88,6 +88,10 @@
"MicrosoftGraphUser": {
"mandatory": false,
"display_name": "Microsoft Graph User"
+ },
+ "Venafi": {
+ "mandatory": false,
+ "display_name": "Venafi"
}
},
"marketplaces": [
@@ -110,6 +114,7 @@
"PAN-OS",
"Azure-Enrichment-Remediation",
"Jira",
- "Tenable_io"
+ "Tenable_io",
+ "Venafi"
]
}
\ No newline at end of file
diff --git a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.py b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.py
index eed82207fb71..a250e4c2eff7 100644
--- a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.py
+++ b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.py
@@ -368,7 +368,7 @@ def get_tenant_info(self):
)
return reply.get('reply', {})
- def get_multiple_incidents_extra_data(self, incident_id_list=[], fields_to_exclude=True, gte_creation_time_milliseconds=0,
+ def get_multiple_incidents_extra_data(self, exclude_artifacts, incident_id_list=[], gte_creation_time_milliseconds=0,
status=None, starred=None, starred_incidents_fetch_window=None,
page_number=0, limit=100):
"""
@@ -389,7 +389,7 @@ def get_multiple_incidents_extra_data(self, incident_id_list=[], fields_to_exclu
'operator': 'eq',
'value': status
})
- if demisto.command() == 'fetch-incidents' and fields_to_exclude:
+ if exclude_artifacts:
request_data['fields_to_exclude'] = FIELDS_TO_EXCLUDE # type: ignore
if starred and starred_incidents_fetch_window:
@@ -430,6 +430,23 @@ def get_multiple_incidents_extra_data(self, incident_id_list=[], fields_to_exclu
incidents = reply.get('reply')
return incidents.get('incidents', {}) if isinstance(incidents, dict) else incidents # type: ignore
+ def update_alerts_in_xdr_request(self, alerts_ids, severity, status, comment) -> List[Any]:
+ request_data = {"request_data": {
+ "alert_id_list": alerts_ids,
+ }}
+ update_data = assign_params(severity=severity, status=status, comment=comment)
+ request_data['request_data']['update_data'] = update_data
+ response = self._http_request(
+ method='POST',
+ url_suffix='/alerts/update_alerts',
+ json_data=request_data,
+ headers=self.headers,
+ timeout=self.timeout,
+ )
+ if "reply" not in response or "alerts_ids" not in response["reply"]:
+ raise DemistoException(f"Parse Error. Response not in format, can't find reply key. The response {response}.")
+ return response['reply']['alerts_ids']
+
def get_headers(params: dict) -> dict:
api_key = params.get('apikey_creds', {}).get('password', '') or params.get('apikey', '')
@@ -471,10 +488,12 @@ def update_incident_command(client, args):
assigned_user_mail = args.get('assigned_user_mail')
assigned_user_pretty_name = args.get('assigned_user_pretty_name')
status = args.get('status')
+ demisto.debug(f"this_is_the_status {status}")
severity = args.get('manual_severity')
unassign_user = args.get('unassign_user') == 'true'
resolve_comment = args.get('resolve_comment')
add_comment = args.get('add_comment')
+ resolve_alerts = argToBoolean(args.get('resolve_alerts', False))
if assigned_user_pretty_name and not assigned_user_mail:
raise DemistoException('To set a new assigned_user_pretty_name, '
@@ -490,6 +509,10 @@ def update_incident_command(client, args):
resolve_comment=resolve_comment,
add_comment=add_comment,
)
+ is_closed = resolve_comment or (status and argToList(status, '_')[0] == 'RESOLVED')
+ if resolve_alerts and is_closed:
+ args['status'] = args['status'].lower()
+ update_related_alerts(client, args)
return f'Incident {incident_id} has been updated', None, None
@@ -540,9 +563,9 @@ def sort_incident_data(raw_incident):
- network artifacts.
"""
incident = raw_incident.get('incident', {})
- raw_alerts = raw_incident.get('alerts', {}).get('data', None)
- file_artifacts = raw_incident.get('file_artifacts', {}).get('data')
- network_artifacts = raw_incident.get('network_artifacts', {}).get('data')
+ raw_alerts = raw_incident.get('alerts', {}).get('data', [])
+ file_artifacts = raw_incident.get('file_artifacts', {}).get('data', [])
+ network_artifacts = raw_incident.get('network_artifacts', {}).get('data', [])
context_alerts = clear_trailing_whitespace(raw_alerts)
if context_alerts:
for alert in context_alerts:
@@ -559,6 +582,7 @@ def get_incident_extra_data_command(client, args):
global ALERTS_LIMIT_PER_INCIDENTS
incident_id = args.get('incident_id')
alerts_limit = int(args.get('alerts_limit', 1000))
+ exclude_artifacts = argToBoolean(args.get('excluding_artifacts', 'False'))
return_only_updated_incident = argToBoolean(args.get('return_only_updated_incident', 'False'))
if return_only_updated_incident:
last_mirrored_in_time = get_last_mirrored_in_time(args)
@@ -569,7 +593,7 @@ def get_incident_extra_data_command(client, args):
else: # the incident was not modified
return "The incident was not modified in XDR since the last mirror in.", {}, {}
- raw_incident = client.get_multiple_incidents_extra_data(incident_id_list=[incident_id])
+ raw_incident = client.get_multiple_incidents_extra_data(incident_id_list=[incident_id], exclude_artifacts=exclude_artifacts)
if not raw_incident:
raise DemistoException(f'Incident {incident_id} is not found')
if isinstance(raw_incident, list):
@@ -579,9 +603,10 @@ def get_incident_extra_data_command(client, args):
"alert_count:{raw_incident.get("incident", {}).get("alert_count")} >" \
"limit:{ALERTS_LIMIT_PER_INCIDENTS}')
raw_incident = client.get_incident_extra_data(incident_id, alerts_limit)
- demisto.debug(f"in get_incident_extra_data_command {incident_id=} {raw_incident=}")
readable_output = [tableToMarkdown(f'Incident {incident_id}', raw_incident.get('incident'), removeNull=True)]
+
incident = sort_incident_data(raw_incident)
+
if incident_alerts := incident.get('alerts'):
readable_output.append(tableToMarkdown('Alerts', incident_alerts,
headers=[key for key in incident_alerts[0]
@@ -954,7 +979,6 @@ def update_remote_system_command(client, args):
if remote_args.delta:
demisto.debug(f'Got the following delta keys {str(list(remote_args.delta.keys()))} to update'
f'incident {remote_args.remote_incident_id}')
- demisto.debug(f'{remote_args.delta=}')
try:
if remote_args.incident_changed:
demisto.debug(f"update_remote_system_command {incident_id=} {remote_args.incident_changed=}")
@@ -962,8 +986,26 @@ def update_remote_system_command(client, args):
update_args['incident_id'] = remote_args.remote_incident_id
demisto.debug(f'Sending incident with remote ID [{remote_args.remote_incident_id}]\n')
+ demisto.debug(f"Before checking status {update_args=}")
+ current_remote_status = remote_args.data.get('status') if remote_args.data else None
+ is_closed = (update_args.get('close_reason') or update_args.get('closeReason') or update_args.get('closeNotes')
+ or update_args.get('resolve_comment') or update_args.get('closingUserId'))
+ closed_without_status = not update_args.get('close_reason') and not update_args.get('closeReason')
+ remote_is_already_closed = current_remote_status in XDR_RESOLVED_STATUS_TO_XSOAR
+ demisto.debug(f"{remote_is_already_closed=}")
+ if is_closed and closed_without_status and not remote_is_already_closed:
+ update_args['status'] = XSOAR_RESOLVED_STATUS_TO_XDR.get('Other')
+ demisto.debug(f"After checking status {update_args=}")
update_incident_command(client, update_args)
+ close_alerts_in_xdr = argToBoolean(client._params.get("close_alerts_in_xdr", False))
+ # Check all relevant fields for an incident being closed in XSOAR UI
+ demisto.debug(f"Defining whether to close related alerts by: {is_closed=} {close_alerts_in_xdr=}")
+ if is_closed and closed_without_status and remote_is_already_closed:
+ update_args['status'] = current_remote_status
+ if close_alerts_in_xdr and is_closed:
+ update_related_alerts(client, update_args)
+
else:
demisto.debug(f'Skipping updating remote incident fields [{remote_args.remote_incident_id}] '
f'as it is not new nor changed')
@@ -977,9 +1019,26 @@ def update_remote_system_command(client, args):
return remote_args.remote_incident_id
-def fetch_incidents(client, first_fetch_time, integration_instance, last_run: dict = None, max_fetch: int = 10,
- statuses: List = [], starred: Optional[bool] = None, starred_incidents_fetch_window: str = None,
- fields_to_exclude: bool = True):
+def update_related_alerts(client: Client, args: dict):
+ new_status = args.get('status')
+ incident_id = args.get('incident_id')
+ comment = f"Resolved by XSOAR, due to incident {incident_id} that has been resolved."
+ demisto.debug(f"{new_status=}, {comment=}")
+ if not new_status:
+ raise DemistoException(f"Failed to update alerts related to incident {incident_id},"
+ "no status found")
+ incident_extra_data = client.get_incident_extra_data(incident_id)
+ if 'alerts' in incident_extra_data and 'data' in incident_extra_data['alerts']:
+ alerts_array = incident_extra_data['alerts']['data']
+ related_alerts_ids_array = [str(alert['alert_id']) for alert in alerts_array if 'alert_id' in alert]
+ demisto.debug(f"{related_alerts_ids_array=}")
+ args_for_command = {'alert_ids': related_alerts_ids_array, 'status': new_status, 'comment': comment}
+ return_results(update_alerts_in_xdr_command(client, args_for_command))
+
+
+def fetch_incidents(client, first_fetch_time, integration_instance, exclude_artifacts: bool, last_run: dict = None,
+ max_fetch: int = 10, statuses: List = [], starred: Optional[bool] = None,
+ starred_incidents_fetch_window: str = None):
global ALERTS_LIMIT_PER_INCIDENTS
# Get the last fetch time, if exists
last_fetch = last_run.get('time') if isinstance(last_run, dict) else None
@@ -1005,7 +1064,7 @@ def fetch_incidents(client, first_fetch_time, integration_instance, last_run: di
status=status,
limit=max_fetch, starred=starred,
starred_incidents_fetch_window=starred_incidents_fetch_window,
- fields_to_exclude=fields_to_exclude)
+ exclude_artifacts=exclude_artifacts)
raw_incidents.extend(raw_incident_status)
raw_incidents = sorted(raw_incidents, key=lambda inc: inc.get('incident', {}).get('creation_time'))
else:
@@ -1013,7 +1072,7 @@ def fetch_incidents(client, first_fetch_time, integration_instance, last_run: di
gte_creation_time_milliseconds=last_fetch, limit=max_fetch,
starred=starred,
starred_incidents_fetch_window=starred_incidents_fetch_window,
- fields_to_exclude=fields_to_exclude)
+ exclude_artifacts=exclude_artifacts)
# save the last 100 modified incidents to the integration context - for mirroring purposes
client.save_modified_incidents_to_integration_context()
@@ -1176,6 +1235,27 @@ def replace_featured_field_command(client: Client, args: Dict) -> CommandResults
)
+def update_alerts_in_xdr_command(client: Client, args: Dict) -> CommandResults:
+ alerts_list = argToList(args.get('alert_ids'))
+ array_of_all_ids = []
+ severity = args.get('severity')
+ status = args.get('status')
+ comment = args.get('comment')
+ if not severity and not status and not comment:
+ raise DemistoException(
+ f"Can not find a field to update for alerts {alerts_list}, please fill in severity/status/comment.")
+ # API is limited to 100 alerts per request, doing the request in batches of 100.
+ for index in range(0, len(alerts_list), 100):
+ alerts_sublist = alerts_list[index:index + 100]
+ demisto.debug(f'{alerts_sublist=}, {severity=}, {status=}, {comment=}')
+ array_of_sublist_ids = client.update_alerts_in_xdr_request(alerts_sublist, severity, status, comment)
+ array_of_all_ids += array_of_sublist_ids
+ if not array_of_all_ids:
+ raise DemistoException("Could not find alerts to update, please make sure you used valid alert IDs.")
+ return CommandResults(readable_output="Alerts with IDs {} have been updated successfully.".format(",".join(array_of_all_ids))
+ )
+
+
def main(): # pragma: no cover
"""
Executes an integration command
@@ -1192,7 +1272,7 @@ def main(): # pragma: no cover
statuses = params.get('status')
starred = True if params.get('starred') else None
starred_incidents_fetch_window = params.get('starred_incidents_fetch_window', '3 days')
- fields_to_exclude = params.get('exclude_fields', True)
+ exclude_artifacts = argToBoolean(params.get('exclude_fields', True))
try:
timeout = int(params.get('timeout', 120))
@@ -1224,10 +1304,16 @@ def main(): # pragma: no cover
elif command == 'fetch-incidents':
integration_instance = demisto.integrationInstance()
- next_run, incidents = fetch_incidents(client, first_fetch_time, integration_instance,
- demisto.getLastRun().get('next_run'), max_fetch, statuses, starred,
- starred_incidents_fetch_window,
- fields_to_exclude)
+ next_run, incidents = fetch_incidents(client=client,
+ first_fetch_time=first_fetch_time,
+ integration_instance=integration_instance,
+ exclude_artifacts=exclude_artifacts,
+ last_run=demisto.getLastRun().get('next_run'),
+ max_fetch=max_fetch,
+ statuses=statuses,
+ starred=starred,
+ starred_incidents_fetch_window=starred_incidents_fetch_window,
+ )
last_run_obj = demisto.getLastRun()
last_run_obj['next_run'] = next_run
demisto.setLastRun(last_run_obj)
@@ -1601,6 +1687,9 @@ def main(): # pragma: no cover
elif command in ('xdr-set-user-role', 'xdr-remove-user-role'):
return_results(change_user_role_command(client, args))
+ elif command == 'xdr-update-alert':
+ return_results(update_alerts_in_xdr_command(client, args))
+
except Exception as err:
return_error(str(err))
diff --git a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.yml b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.yml
index 1ef6236fb4fa..6133ffedc971 100644
--- a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.yml
+++ b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.yml
@@ -158,13 +158,21 @@ configuration:
section: Collect
advanced: true
required: false
-- additionalinfo: "Whether to fetch only the essential incident's fields and minimize the incident's information."
+- additionalinfo: "Whether to fetch only the essential incident's fields - without Network Artifacts and File Artifacts to minimize the incident's information."
display: Minimize Incident Information
name: exclude_fields
required: false
type: 8
section: Collect
+ defaultvalue: 'true'
+ advanced: true
+- display: Close all related alerts in XDR
+ name: close_alerts_in_xdr
+ required: false
+ additionalinfo: "Close all related alerts in Cortex XDR once an incident has been closed in Cortex XSOAR."
advanced: true
+ type: 8
+ section: Collect
description: Cortex XDR is the world's first detection and response app that natively integrates network, endpoint, and cloud data to stop sophisticated attacks.
display: Palo Alto Networks Cortex XDR - Investigation and Response
name: Cortex XDR - IR
@@ -293,6 +301,9 @@ script:
- defaultValue: 'False'
description: Return data only if the incident was changed since the last time it was mirrored into Cortex XSOAR. This flag should be used only from within a Cortex XDR incident.
name: return_only_updated_incident
+ - defaultValue: 'False'
+ description: "Excluding Network Artifacts and File Artifacts from incident data."
+ name: excluding_artifacts
description: Returns additional data for the specified incident, for example, related alerts, file artifacts, network artifacts, and so on.
name: xdr-get-incident-extra-data
outputs:
@@ -631,6 +642,13 @@ script:
- 'true'
- description: Add a comment to the incident.
name: add_comment
+ - auto: PREDEFINED
+ description: Whether to resolve alerts related to a resolved incident. The incident is considered resolved when the status argument includes the "RESOLVED" or "resolve_comment" value.
+ name: resolve_alerts
+ predefined:
+ - 'true'
+ - 'false'
+ defaultValue: 'false'
description: Updates one or more fields of a specified incident. Missing fields will be ignored. To remove the assignment for an incident, pass a null value in the assignee email argument.
name: xdr-update-incident
- arguments:
@@ -3493,7 +3511,43 @@ script:
isArray: true
name: xdr-remove-user-role
description: Remove one or more users from a role.
- dockerimage: demisto/python3:3.10.14.91134
+ - arguments:
+ - description: A comma-separated list of alert IDs.
+ name: alert_ids
+ required: true
+ isArray: true
+ - auto: PREDEFINED
+ description: Required severity to update alerts to.
+ name: severity
+ required: false
+ predefined:
+ - critical
+ - high
+ - medium
+ - low
+ - auto: PREDEFINED
+ description: New status for updated alerts.
+ name: status
+ required: false
+ predefined:
+ - new
+ - resolved_threat_handled
+ - under_investigation
+ - resolved_security_testing
+ - resolved_auto
+ - resolved_known_issue
+ - resolved_duplicate
+ - resolved_other
+ - resolved_false_positive
+ - resolved_true_positive
+ - description: Comment to append to updated alerts.
+ name: comment
+ required: false
+ description: |-
+ Update one or more alerts with the provided arguments.
+ Required license: Cortex XDR Prevent, Cortex XDR Pro per Endpoint, or Cortex XDR Pro per GB.
+ name: xdr-update-alert
+ dockerimage: demisto/python3:3.10.14.96411
isfetch: true
isfetch:xpanse: false
script: ''
diff --git a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR_test.py b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR_test.py
index 49bd7fe45ea8..2bc7cccdadee 100644
--- a/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR_test.py
+++ b/Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR_test.py
@@ -1,5 +1,6 @@
import copy
import json
+from unittest.mock import patch
import pytest
from freezegun import freeze_time
@@ -70,7 +71,7 @@ def test_fetch_incidents(requests_mock, mocker):
modified_raw_incident.get('alerts')[0].get('host_ip').split(',')
mocker.patch("CortexXDRIR.ALERTS_LIMIT_PER_INCIDENTS", new=50)
mocker.patch.object(Client, 'save_modified_incidents_to_integration_context')
- next_run, incidents = fetch_incidents(client, '3 month', 'MyInstance')
+ next_run, incidents = fetch_incidents(client, '3 month', 'MyInstance', exclude_artifacts=False)
sort_all_list_incident_fields(modified_raw_incident)
assert len(incidents) == 1
assert incidents[0]['name'] == "XDR Incident 1 - desc1"
@@ -102,7 +103,7 @@ def test_fetch_incidents_filtered_by_status(requests_mock, mocker):
mocker.patch.object(Client, 'save_modified_incidents_to_integration_context')
statuses_to_fetch = ['under_investigation', 'new']
- next_run, incidents = fetch_incidents(client, '3 month', 'MyInstance', statuses=statuses_to_fetch)
+ next_run, incidents = fetch_incidents(client, '3 month', 'MyInstance', exclude_artifacts=False, statuses=statuses_to_fetch)
assert len(incidents) == 2
assert incidents[0]['name'] == "XDR Incident 1 - 'Local Analysis Malware' generated by XDR Agent detected on host AAAAAA "\
@@ -141,7 +142,7 @@ def test_fetch_incidents_with_rate_limit_error(requests_mock, mocker):
client = Client(
base_url=f'{XDR_URL}/public_api/v1', verify=False, timeout=120, proxy=False)
with pytest.raises(Exception) as e:
- next_run, incidents = fetch_incidents(client, '3 month', 'MyInstance')
+ next_run, incidents = fetch_incidents(client, '3 month', 'MyInstance', exclude_artifacts=False)
assert str(e.value) == 'Rate limit exceeded'
@@ -210,7 +211,8 @@ def test_fetch_only_starred_incidents(self, mocker):
mocker.patch.object(Client, 'get_multiple_incidents_extra_data', return_value=get_incidents_list_response)
client = Client(
base_url=f'{XDR_URL}/public_api/v1', verify=False, timeout=120, proxy=False)
- next_run, incidents = fetch_incidents(client, '3 month', 'MyInstance', last_run_obj.get('next_run'),
+ next_run, incidents = fetch_incidents(client, '3 month', 'MyInstance', exclude_artifacts=False,
+ last_run=last_run_obj.get('next_run'),
starred=True,
starred_incidents_fetch_window='3 days')
assert len(incidents) == 2
@@ -738,7 +740,7 @@ def test_fetch_incidents_extra_data(requests_mock, mocker):
mocker.patch.object(Client, 'save_modified_incidents_to_integration_context')
mocker.patch.object(Client, 'save_modified_incidents_to_integration_context')
mocker.patch("CortexXDRIR.ALERTS_LIMIT_PER_INCIDENTS", new=2)
- next_run, incidents = fetch_incidents(client, '3 month', 'MyInstance')
+ next_run, incidents = fetch_incidents(client, '3 month', 'MyInstance', exclude_artifacts=False)
assert len(incidents) == 2
assert incidents[0]['name'] == 'XDR Incident 1 - desc1'
assert json.loads(incidents[0]['rawJSON']).get('incident_id') == '1'
@@ -1241,7 +1243,7 @@ def test_get_multiple_incidents_extra_data(self, requests_mock, mocker):
starred=True,
starred_incidents_fetch_window=1575806909185,
incident_id_list=['1', '2'],
- fields_to_exclude=True)
+ exclude_artifacts=True)
assert len(outputs) == len(multiple_extra_data['reply']['incidents'])
assert outputs[0]['alerts']['total_count'] <= alert_limit
assert outputs[1]['alerts']['total_count'] <= alert_limit
@@ -1291,3 +1293,233 @@ def test_sort_all_incident_data_fields_fetch_case_get_multiple_incidents_extra_d
assert incident_data.get('incident_sources') == ['XDR Agent']
assert incident_data.get('status') == 'new'
assert len(incident_data.get('file_artifacts')) == 1
+
+
+def test_update_alerts_in_xdr_command_expected_result(mocker):
+ """
+ Given:
+ - an XDR client
+ - arguments (incident_id)
+ When
+ - Running update_alerts_in_xdr_command
+ Then
+ - Verify update alerts
+ """
+ from CortexXDRIR import update_alerts_in_xdr_command, Client
+ xdrIr_client = Client(base_url=f'{XDR_URL}/public_api/v1', verify=False, timeout=10, proxy=False)
+ http_request = mocker.patch.object(xdrIr_client, '_http_request')
+ http_request.return_value = {"reply": {"alerts_ids": ['1', '2', '3']}}
+ args = {"alert_ids": "1,2,3", "severity": "high", "status": "resolved_threat_handled", "comment": "fixed from test"}
+ res = update_alerts_in_xdr_command(xdrIr_client, args)
+ assert res.readable_output == "Alerts with IDs 1,2,3 have been updated successfully."
+
+
+def test_update_alerts_in_xdr_command_fail_to_update(mocker):
+ """
+ Given:
+ - an XDR client
+ - arguments (incident_id)
+ When
+ - Running update_alerts_in_xdr_command
+ Then
+ - Did not find alerts to update - raise an error
+ """
+ from CortexXDRIR import update_alerts_in_xdr_command, Client
+ xdrIr_client = Client(base_url=f'{XDR_URL}/public_api/v1', verify=False, timeout=10, proxy=False)
+ http_request = mocker.patch.object(xdrIr_client, '_http_request')
+ http_request.return_value = {"reply": {"alerts_ids": []}}
+ args = {"alert_ids": "1,2,3", "severity": "high", "status": "resolved_threat_handled", "comment": "fixed from test"}
+ with pytest.raises(DemistoException) as e:
+ update_alerts_in_xdr_command(xdrIr_client, args)
+ assert e.value.message == "Could not find alerts to update, please make sure you used valid alert IDs."
+
+
+def test_update_alerts_in_xdr_command_invalid_response_no_reply(mocker):
+ """
+ Given:
+ - an XDR client
+ - arguments (incident_id)
+ When
+ - Running update_alerts_in_xdr_command
+ Then
+ - Verify that if the incident id is not found, it returns an error.
+ """
+ from CortexXDRIR import update_alerts_in_xdr_command, Client
+ xdrIr_client = Client(base_url=f'{XDR_URL}/public_api/v1', verify=False, timeout=10, proxy=False)
+ http_request = mocker.patch.object(xdrIr_client, '_http_request')
+ http_request.return_value = {"alerts_ids": ['1', '2', '3']}
+ args = {"alert_ids": "1,2,3", "severity": "high", "status": "resolved_threat_handled", "comment": "fixed from test"}
+ with pytest.raises(DemistoException) as e:
+ update_alerts_in_xdr_command(xdrIr_client, args)
+ assert e.value.message == ("Parse Error. Response not in format, can't find reply key. "
+ "The response {'alerts_ids': ['1', '2', '3']}.")
+
+
+def test_update_alerts_in_xdr_command_invalid_response_no_alerts_ids(mocker):
+ """
+ Given:
+ - an XDR client
+ - arguments (incident_id)
+ When
+ - Running update_alerts_in_xdr_command
+ Then
+ - Verify that if the incident id is not found, it returns an error.
+ """
+ from CortexXDRIR import update_alerts_in_xdr_command, Client
+ xdrIr_client = Client(base_url=f'{XDR_URL}/public_api/v1', verify=False, timeout=10, proxy=False)
+ http_request = mocker.patch.object(xdrIr_client, '_http_request')
+ http_request.return_value = {"reply": {'alerts_ids': []}}
+ args = {"alert_ids": "1,2,3", "severity": "high", "status": "resolved_threat_handled", "comment": "fixed from test"}
+ with pytest.raises(DemistoException) as e:
+ update_alerts_in_xdr_command(xdrIr_client, args)
+ assert e.value.message == "Could not find alerts to update, please make sure you used valid alert IDs."
+
+
+@pytest.mark.parametrize('incident_changed, delta',
+ [(True, {'CortexXDRIRstatus': 'resolved', "close_reason": "False Positive"}),
+ (False, {})])
+def test_update_remote_system_command_update_alerts(mocker, incident_changed, delta):
+ """
+ Given:
+ - an XDR client
+ - arguments (incident fields)
+ When
+ - update_remote_system_command which triggers Running update_alerts_in_xdr_command
+ Then
+ - Verify alerts related to incident have been changed when closing the incident
+ """
+ from CortexXDRIR import update_remote_system_command, Client
+ client = Client(
+ base_url=f'{XDR_URL}/public_api/v1', verify=False, timeout=120, proxy=False, params={'close_alerts_in_xdr': True})
+ data = {'CortexXDRIRstatus': 'resolved', 'status': 'test'}
+ expected_remote_id = 'remote_id'
+ args = {'remoteId': expected_remote_id, 'data': data, 'entries': [], 'incidentChanged': incident_changed,
+ 'delta': delta,
+ 'status': 2,
+ }
+ with patch("CortexXDRIR.update_incident_command") as mock_update_incident_command:
+ get_incident_extra_data_mock = mocker.patch.object(client, 'get_incident_extra_data')
+ get_incident_extra_data_mock.return_value = {'alerts': {'data': [{'alert_id': '123'}]}}
+ mock_update_incident_command.return_value = {}
+ http_request_mock = mocker.patch.object(client, 'update_alerts_in_xdr_request')
+ http_request_mock.return_value = '1,2,3'
+ update_remote_system_command(client, args)
+
+
+def test_update_alerts_in_xdr_request_called_with():
+ """
+ Given:
+ - an XDR client
+ - arguments (incident fields)
+ When
+ - update_alerts_in_xdr_request is called
+ Then
+ - the http request is called with the right args
+ """
+ alerts_ids = '1,2,3'
+ severity = 'High'
+ status = 'resolved'
+ comment = 'i am a test'
+ from CortexXDRIR import Client
+ client = Client(
+ base_url=f'{XDR_URL}/public_api/v1', verify=False, timeout=120, proxy=False, params={'close_alerts_in_xdr': True})
+ with patch.object(client, '_http_request') as mock_http_request, patch("CortexXDRIR.get_headers") as get_headers_mock:
+ mock_http_request.return_value = {
+ "reply": {
+ "alerts_ids": alerts_ids
+ }
+ }
+ get_headers_mock.return_value = {
+ "x-xdr-timestamp": 123,
+ "x-xdr-nonce": 456,
+ "x-xdr-auth-id": str(678),
+ "Authorization": 123,
+ }
+ client.update_alerts_in_xdr_request(alerts_ids, severity, status, comment)
+ mock_http_request.assert_called_once_with(method='POST',
+ url_suffix='/alerts/update_alerts',
+ json_data={'request_data':
+ {'alert_id_list': '1,2,3',
+ 'update_data':
+ {'severity': 'High', 'status': 'resolved', 'comment': 'i am a test'}
+ }
+ },
+ headers={
+ 'x-xdr-timestamp': 123,
+ 'x-xdr-nonce': 456,
+ 'x-xdr-auth-id': '678',
+ 'Authorization': 123},
+ timeout=120)
+
+
+def test_update_alerts_in_xdr_request_invalid_response():
+ """
+ Given:
+ - an XDR client
+ - arguments (incident fields)
+ When
+ - update_alerts_in_xdr_request is called
+ Then
+ - response is not in format- raise an error
+ """
+ alerts_ids = '1,2,3'
+ severity = 'High'
+ status = 'resolved'
+ comment = 'i am a test'
+ from CortexXDRIR import Client
+ client = Client(
+ base_url=f'{XDR_URL}/public_api/v1', verify=False, timeout=120, proxy=False, params={'close_alerts_in_xdr': True})
+ with patch.object(client, '_http_request') as mock_http_request, patch("CortexXDRIR.get_headers") as get_headers_mock, \
+ pytest.raises(DemistoException) as e:
+ mock_http_request.return_value = {
+ "replys": {
+ "alerts_ids": alerts_ids
+ }
+ }
+ get_headers_mock.return_value = {
+ "x-xdr-timestamp": 123,
+ "x-xdr-nonce": 456,
+ "x-xdr-auth-id": str(678),
+ "Authorization": 123,
+ }
+ client.update_alerts_in_xdr_request(alerts_ids, severity, status, comment)
+ assert e.value.message == ("Parse Error. Response not in format, can't find reply key. "
+ "The response {'replys': {'alerts_ids': '1,2,3'}}.")
+
+
+def test_update_alerts_in_xdr_command():
+ """
+ Given:
+ - an XDR client
+ - arguments (incident fields)
+ When
+ - test_update_alerts_in_xdr_command is called
+ Then
+ - raises an error since there is no field to update
+ """
+ from CortexXDRIR import Client, update_alerts_in_xdr_command
+ from CommonServerPython import DemistoException
+ args = {'alert_ids': '1'}
+ client = Client(
+ base_url=f'{XDR_URL}/public_api/v1', verify=False, timeout=120, proxy=False, params={'close_alerts_in_xdr': True}
+ )
+ with pytest.raises(DemistoException) as e:
+ update_alerts_in_xdr_command(client, args)
+ assert e.value.message == "Can not find a field to update for alerts ['1'], please fill in severity/status/comment."
+
+
+def test_main(mocker):
+ """
+ Given:
+ - Only the required params in the configuration.
+ When:
+ - Running a command.
+ Then:
+ - Validate that the code executes gracefully.
+ """
+ from CortexXDRIR import main
+ mocker.patch.object(demisto, 'params', return_value={'url': 'test_url'})
+ mocker.patch.object(demisto, 'command', return_value='test-module')
+ mock_client = mocker.patch('CortexXDRIR.Client', autospec=True)
+ mock_client.test_module.return_value = 'ok'
+ main()
diff --git a/Packs/CortexXDR/Integrations/CortexXDRIR/README.md b/Packs/CortexXDR/Integrations/CortexXDRIR/README.md
index c971628f9932..ccc9733595ad 100644
--- a/Packs/CortexXDR/Integrations/CortexXDRIR/README.md
+++ b/Packs/CortexXDR/Integrations/CortexXDRIR/README.md
@@ -29,6 +29,7 @@ This integration was integrated and tested with version 2.6.5 of Cortex XDR - IR
| Prevent Only Mode | Whether the XDR tenant Mode is prevent only | False |
| Incident Statuses to Fetch | The statuses of the incidents that will be fetched. If no status is provided then incidents of all the statuses will be fetched. Note: An incident whose status was changed to a filtered status after its creation time will not be fetched. | False |
| Incidents Fetch Interval | | False |
+ | Close all related alerts in XDR | Close all related alerts in Cortex XDR once an incident has been closed in Cortex XSOAR. | False |
4. Click **Test** to validate the URLs, token, and connection.
@@ -3839,3 +3840,33 @@ There is no context output for this command.
>The endpoint alias was changed successfully.
Note: If there is no error in the process, then this is the output even when the specific endpoint does not exist.
+
+### xdr-update-alert
+
+***
+Update one or more alerts. You can update up to 100 alerts per request. Missing fields are ignored. Required license: Cortex XDR Prevent, Cortex XDR Pro per Endpoint, or Cortex XDR Pro per GB.
+
+#### Base Command
+
+`xdr-update-alert`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| alert_ids | Comma-separated list of alert IDs. | Required |
+| severity | Severity of the incident which was closed. Possible values are: critical, high, medium, low. | Optional |
+| status | New status for updated alerts. Possible values are: new, resolved_threat_handled, under_investigation, resolved_security_testing, resolved_auto, resolved_known_issue, resolved_duplicate, resolved_other, resolved_false_positive, resolved_true_positive. | Optional |
+| comment | Comment to append to updated alerts. | Optional |
+
+#### Context Output
+
+There is no context output for this command.
+#### Command example
+```!xdr-update-alert alert_ids=35326 severity=low```
+
+#### Human Readable Output
+
+>Alerts with IDs 35326 have been updated successfully.
+
+
diff --git a/Packs/CortexXDR/Integrations/CortexXDRIR/command_examples b/Packs/CortexXDR/Integrations/CortexXDRIR/command_examples
index e74cf7608f70..facf4a69e935 100644
--- a/Packs/CortexXDR/Integrations/CortexXDRIR/command_examples
+++ b/Packs/CortexXDR/Integrations/CortexXDRIR/command_examples
@@ -52,4 +52,5 @@
!xdr-list-roles role_names="dummy dummy"
!xdr-list-user-groups group_names=test
!xdr-set-user-role role_name=dummy user_emails=email@company.com
-!xdr-remove-user-role user_emails=email@company.com
\ No newline at end of file
+!xdr-remove-user-role user_emails=email@company.com
+!xdr-update-alert alert_ids=35326 severity=low
\ No newline at end of file
diff --git a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_IOCs_-_Push_new_IOCs_to_XDR.yml b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_IOCs_-_Push_new_IOCs_to_XDR.yml
index a4a3a16e3179..2355bf59657e 100644
--- a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_IOCs_-_Push_new_IOCs_to_XDR.yml
+++ b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_IOCs_-_Push_new_IOCs_to_XDR.yml
@@ -536,6 +536,9 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ nexttasks:
+ '#none#':
+ - "45"
"31":
id: "31"
taskid: f515a7d6-8aef-4727-804b-bc26cee421a3
@@ -1083,7 +1086,7 @@ inputs:
playbookInputQuery:
- key: query
value:
- simple: reputation:Bad and (type:File or type:Domain or type:IP) and expirationStatus:active and -tags:xdr_pushed and -tags:xdr_not_processed
+ simple: reputation:Bad and (type:File or type:Domain or type:IP) and expirationStatus:active and (-tags:xdr_pushed or -tags:xdr_not_processed)
required: true
description: ""
playbookInputQuery:
diff --git a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_IOCs_-_Push_new_IOCs_to_XDR_Main.yml b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_IOCs_-_Push_new_IOCs_to_XDR_Main.yml
index 9219cf8e9573..9793148d0d4f 100644
--- a/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_IOCs_-_Push_new_IOCs_to_XDR_Main.yml
+++ b/Packs/CortexXDR/Playbooks/playbook-Cortex_XDR_IOCs_-_Push_new_IOCs_to_XDR_Main.yml
@@ -75,10 +75,10 @@ tasks:
isautoswitchedtoquietmode: false
"5":
id: "5"
- taskid: ffb7a6fc-6010-4c1f-864f-d3b3a9b09683
+ taskid: 9611b261-2f8b-4f1c-8cf1-81cc7b182753
type: title
task:
- id: ffb7a6fc-6010-4c1f-864f-d3b3a9b09683
+ id: 9611b261-2f8b-4f1c-8cf1-81cc7b182753
version: -1
name: Done
type: title
@@ -91,7 +91,7 @@ tasks:
{
"position": {
"x": 50,
- "y": 895
+ "y": 1070
}
}
note: false
@@ -206,7 +206,7 @@ tasks:
brand: ""
nexttasks:
'#none#':
- - "5"
+ - "10"
scriptarguments:
batch_size:
simple: ${inputs.batch_size}
@@ -242,12 +242,44 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ "10":
+ id: "10"
+ taskid: d6d51ca0-e8cd-4942-82a9-8a5ae377e13e
+ type: regular
+ task:
+ id: d6d51ca0-e8cd-4942-82a9-8a5ae377e13e
+ version: -1
+ name: Close Incident
+ description: commands.local.cmd.close.inv
+ script: Builtin|||closeInvestigation
+ type: regular
+ iscommand: true
+ brand: Builtin
+ nexttasks:
+ '#none#':
+ - "5"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 895
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
view: |-
{
"linkLabelsPosition": {},
"paper": {
"dimensions": {
- "height": 910,
+ "height": 1085,
"width": 380,
"x": 50,
"y": 50
@@ -269,6 +301,8 @@ inputs:
playbookInputQuery:
outputs: []
tests:
-- Cortex XDR - IOC - Test
-- Cortex XDR - IOC - Test without fetch
+- Test XDR Playbook general commands
+- Test XDR Playbook
fromversion: 6.10.0
+contentitemexportablefields:
+ contentitemfields: {}
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_37.md b/Packs/CortexXDR/ReleaseNotes/6_1_37.md
new file mode 100644
index 000000000000..bf453de61b87
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_37.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Palo Alto Networks Cortex XDR - Investigation and Response
+
+- Added a functionality to the outgoing mirroring flow that updates alerts of an incident when an incident is closed.
+- Added the ***xdr-update-alert*** command.
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_38.md b/Packs/CortexXDR/ReleaseNotes/6_1_38.md
new file mode 100644
index 000000000000..8d6e09b05cf6
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_38.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Cortex XDR IOCs - Push new IOCs to XDR
+
+- Fixed an issue in the playbook which caused it to not push SHA256 hashes into XDR.
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_39.md b/Packs/CortexXDR/ReleaseNotes/6_1_39.md
new file mode 100644
index 000000000000..e9f7283a8374
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_39.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Palo Alto Networks Cortex XDR - Investigation and Response
+
+- Fixing an issue where incident artifacts is not populated on new incidents when bringing them into XSOAR.
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_40.md b/Packs/CortexXDR/ReleaseNotes/6_1_40.md
new file mode 100644
index 000000000000..fdae2a316d3a
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_40.md
@@ -0,0 +1,6 @@
+
+#### Playbooks
+
+##### Cortex XDR IOCs - Push new IOCs to XDR (Main)
+
+- Updated the main playbook to close the incident when it is done.
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_41.md b/Packs/CortexXDR/ReleaseNotes/6_1_41.md
new file mode 100644
index 000000000000..1f33597f5e62
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_41.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Palo Alto Networks Cortex XDR - Investigation and Response
+
+- Improved implementation of the authorization process.
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_42.md b/Packs/CortexXDR/ReleaseNotes/6_1_42.md
new file mode 100644
index 000000000000..20215e9513b8
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_42.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Palo Alto Networks Cortex XDR - Investigation and Response
+
+- Fixed an issue where the *Minimize Incident Information* parameter would fail the integration, when its value was not provided.
diff --git a/Packs/CortexXDR/ReleaseNotes/6_1_43.md b/Packs/CortexXDR/ReleaseNotes/6_1_43.md
new file mode 100644
index 000000000000..9681e9acd416
--- /dev/null
+++ b/Packs/CortexXDR/ReleaseNotes/6_1_43.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Palo Alto Networks Cortex XDR - Investigation and Response
+
+- Fixed an issue in mirroring where updating the incident status in XDR, would incorrectly set the status to "resolved_other" if the incident was resolved.
+- Updated the Docker image to: *demisto/python3:3.10.14.96411*.
diff --git a/Packs/CortexXDR/TestPlaybooks/Test_Playbook_-_Cortex_XDR_-_Retrieve_File_by_sha256.yml b/Packs/CortexXDR/TestPlaybooks/Test_Playbook_-_Cortex_XDR_-_Retrieve_File_by_sha256.yml
index 13f4a7592688..96a68a05312c 100644
--- a/Packs/CortexXDR/TestPlaybooks/Test_Playbook_-_Cortex_XDR_-_Retrieve_File_by_sha256.yml
+++ b/Packs/CortexXDR/TestPlaybooks/Test_Playbook_-_Cortex_XDR_-_Retrieve_File_by_sha256.yml
@@ -6,10 +6,10 @@ starttaskid: "0"
tasks:
"0":
id: "0"
- taskid: ce53f9de-f436-41e7-8503-462a2f1d335b
+ taskid: 020f0bf8-c79c-4ea3-820c-06de6cae0845
type: start
task:
- id: ce53f9de-f436-41e7-8503-462a2f1d335b
+ id: 020f0bf8-c79c-4ea3-820c-06de6cae0845
version: -1
name: ""
iscommand: false
@@ -24,7 +24,7 @@ tasks:
{
"position": {
"x": 630,
- "y": -2645
+ "y": -2485
}
}
note: false
@@ -36,10 +36,10 @@ tasks:
isautoswitchedtoquietmode: false
"32":
id: "32"
- taskid: 6eebd48a-4a31-4a46-8f8e-9861faacf644
+ taskid: 9b2e82aa-0559-4552-81ec-ae62920cdfe3
type: regular
task:
- id: 6eebd48a-4a31-4a46-8f8e-9861faacf644
+ id: 9b2e82aa-0559-4552-81ec-ae62920cdfe3
version: -1
name: Delete Context
description: The task deletes all of the context data. Having a clean beginning to a test playbook ensures that a test can be sterile and that unrelated issues can be eliminated.
@@ -59,44 +59,7 @@ tasks:
{
"position": {
"x": 630,
- "y": -2515
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
- "34":
- id: "34"
- taskid: 77b797ae-9fc2-459c-87fb-7b5103279004
- type: regular
- task:
- id: 77b797ae-9fc2-459c-87fb-7b5103279004
- version: -1
- name: Investigate the incident
- description: Begin investigating the incident.
- scriptName: InvestigateMine
- type: regular
- iscommand: false
- brand: Builtin
- nexttasks:
- '#none#':
- - "255"
- scriptarguments:
- id:
- complex:
- root: CreatedIncidentID
- separatecontext: false
- continueonerror: true
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": 630,
- "y": -1705
+ "y": -2355
}
}
note: false
@@ -108,10 +71,10 @@ tasks:
isautoswitchedtoquietmode: false
"47":
id: "47"
- taskid: 4a7878c1-f96b-4590-8ecc-21e5f398a2fb
+ taskid: 55855a22-ef28-4f22-8103-e2989573b8fe
type: regular
task:
- id: 4a7878c1-f96b-4590-8ecc-21e5f398a2fb
+ id: 55855a22-ef28-4f22-8103-e2989573b8fe
version: -1
name: Wait for incident to be created
description: 'This task delays the playbook flow progress for 15 seconds to ensure that the incident has been created before moving on. '
@@ -131,7 +94,7 @@ tasks:
{
"position": {
"x": 630,
- "y": -2190
+ "y": -2030
}
}
note: false
@@ -143,10 +106,10 @@ tasks:
isautoswitchedtoquietmode: false
"84":
id: "84"
- taskid: 2e4e3fde-110c-4ce4-8f40-00180e9ff00e
+ taskid: eb61a4e6-23d2-4e52-826f-d54c8ac8d221
type: title
task:
- id: 2e4e3fde-110c-4ce4-8f40-00180e9ff00e
+ id: eb61a4e6-23d2-4e52-826f-d54c8ac8d221
version: -1
name: Start Tests
type: title
@@ -175,10 +138,10 @@ tasks:
isautoswitchedtoquietmode: false
"93":
id: "93"
- taskid: 0139aa5b-acf0-42df-83d4-a5da73d9f4e6
+ taskid: c77b400a-7925-4574-85a6-d099fdf7c607
type: regular
task:
- id: 0139aa5b-acf0-42df-83d4-a5da73d9f4e6
+ id: c77b400a-7925-4574-85a6-d099fdf7c607
version: -1
name: Set playbook for new incident
description: Assign the 'Cortex XDR - Retrieve File by sha256' playbook to the newly created test incident.
@@ -188,7 +151,7 @@ tasks:
brand: Builtin
nexttasks:
'#none#':
- - "34"
+ - "255"
scriptarguments:
incidentId:
complex:
@@ -201,7 +164,7 @@ tasks:
{
"position": {
"x": 630,
- "y": -1870
+ "y": -1710
}
}
note: false
@@ -213,10 +176,10 @@ tasks:
isautoswitchedtoquietmode: false
"154":
id: "154"
- taskid: 4ad68108-f4dd-485e-8e37-e97732468f8e
+ taskid: 7a2b7916-813e-41cb-8762-b2ba32f3a54a
type: regular
task:
- id: 4ad68108-f4dd-485e-8e37-e97732468f8e
+ id: 7a2b7916-813e-41cb-8762-b2ba32f3a54a
version: -1
name: Create New Incident For Testing
description: 'As part of this task, an incident will be created for testing purposes, which includes all the incident fields that are essential to the testing process. '
@@ -229,9 +192,7 @@ tasks:
- "47"
scriptarguments:
deviceid:
- simple: AEEC6A2CC92E46FAB3B6F621722E9916
- filesha256:
- simple: 07dc0e82e198278b8b7179b01dd69f6203fb128b20484edbcdbaa19ec2ee09f0
+ simple: aeec6a2cc92e46fab3b6f621722e9916
name:
simple: Test Incident for Cortex XDR - Retrieve File by sha256
type:
@@ -242,7 +203,7 @@ tasks:
{
"position": {
"x": 630,
- "y": -2350
+ "y": -2190
}
}
note: false
@@ -254,10 +215,10 @@ tasks:
isautoswitchedtoquietmode: false
"186":
id: "186"
- taskid: 1d8ec91a-8cdd-46de-861d-9a7fefb4003f
+ taskid: bb0f6643-331d-40d5-881d-0d08a6892b31
type: regular
task:
- id: 1d8ec91a-8cdd-46de-861d-9a7fefb4003f
+ id: bb0f6643-331d-40d5-881d-0d08a6892b31
version: -1
name: Set 'fileRetrieval.path' To Context
description: For testing purposes, the task establishes the context data key and value for 'fileRetrieval.path'.
@@ -275,14 +236,14 @@ tasks:
key:
simple: fileRetrieval.path
value:
- simple: C:\Program Files (x86)\Google\Update\GoogleUpdate.exe
+ simple: C:\Windows\System32\cmd.exe
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
"x": 630,
- "y": -2030
+ "y": -1870
}
}
note: false
@@ -294,10 +255,10 @@ tasks:
isautoswitchedtoquietmode: false
"202":
id: "202"
- taskid: 8fcb70fe-56c8-472a-8624-63495efdd2f5
+ taskid: 41ac3299-52da-4a4e-8282-a59d639ff7bb
type: title
task:
- id: 8fcb70fe-56c8-472a-8624-63495efdd2f5
+ id: 41ac3299-52da-4a4e-8282-a59d639ff7bb
version: -1
name: Done
type: title
@@ -322,10 +283,10 @@ tasks:
isautoswitchedtoquietmode: false
"203":
id: "203"
- taskid: df53a1ac-e083-4838-8b68-bd25c012c290
+ taskid: 7cd40262-ef0e-4168-8c15-b8f3c6702a0c
type: regular
task:
- id: df53a1ac-e083-4838-8b68-bd25c012c290
+ id: 7cd40262-ef0e-4168-8c15-b8f3c6702a0c
version: -1
name: Wait for playbook to end running
description: Wait until the final task (Done) has been completed before starting tests.
@@ -347,7 +308,7 @@ tasks:
max_timeout:
simple: "300"
task_name:
- simple: Done
+ simple: UnZip File
task_states:
simple: Completed
separatecontext: false
@@ -368,10 +329,10 @@ tasks:
isautoswitchedtoquietmode: false
"208":
id: "208"
- taskid: f6c3e3bf-cc8f-4c40-8527-79691ccdf68b
+ taskid: b8f8cac7-6a8b-4b83-8fe9-064cb76e6fd3
type: regular
task:
- id: f6c3e3bf-cc8f-4c40-8527-79691ccdf68b
+ id: b8f8cac7-6a8b-4b83-8fe9-064cb76e6fd3
version: -1
name: Close Incident
description: commands.local.cmd.close.inv
@@ -406,10 +367,10 @@ tasks:
isautoswitchedtoquietmode: false
"209":
id: "209"
- taskid: 64667197-346e-441d-8fce-ae2ec8ddd6cd
+ taskid: 45244763-8b4d-48b0-8ff9-4c7cff1c809f
type: regular
task:
- id: 64667197-346e-441d-8fce-ae2ec8ddd6cd
+ id: 45244763-8b4d-48b0-8ff9-4c7cff1c809f
version: -1
name: Get the final 'PaloAltoNetworksXDR.RetrievedFiles' context
description: Gets a value from the specified incident's context.
@@ -444,10 +405,10 @@ tasks:
isautoswitchedtoquietmode: false
"210":
id: "210"
- taskid: ff8700d6-1226-42d4-88af-2ec85e8aeaab
+ taskid: e399a617-44ca-4fee-87c6-d87915ce9bdb
type: title
task:
- id: ff8700d6-1226-42d4-88af-2ec85e8aeaab
+ id: e399a617-44ca-4fee-87c6-d87915ce9bdb
version: -1
name: Check Context Data
type: title
@@ -477,10 +438,10 @@ tasks:
isautoswitchedtoquietmode: false
"211":
id: "211"
- taskid: f5079029-9735-4864-8bba-9378d7e76095
+ taskid: 2b1accfb-b65d-4303-8eed-4c52d66db33f
type: title
task:
- id: f5079029-9735-4864-8bba-9378d7e76095
+ id: 2b1accfb-b65d-4303-8eed-4c52d66db33f
version: -1
name: '''PaloAltoNetworksXDR.RetrievedFiles'' Context Data'
type: title
@@ -511,10 +472,10 @@ tasks:
isautoswitchedtoquietmode: false
"212":
id: "212"
- taskid: 7faa1f41-3b80-4d95-8e12-d5002336a5f1
+ taskid: 6ac5c5de-b8b8-449f-8af5-8d72ce3f7c11
type: title
task:
- id: 7faa1f41-3b80-4d95-8e12-d5002336a5f1
+ id: 6ac5c5de-b8b8-449f-8af5-8d72ce3f7c11
version: -1
name: '''ExtractedFiles'' Context Data'
type: title
@@ -542,10 +503,10 @@ tasks:
isautoswitchedtoquietmode: false
"213":
id: "213"
- taskid: 032a1e2f-8407-441d-8ccf-b7ce0a7a9d0c
+ taskid: bf187743-ebb2-49c6-81d6-216973d50e0d
type: condition
task:
- id: 032a1e2f-8407-441d-8ccf-b7ce0a7a9d0c
+ id: bf187743-ebb2-49c6-81d6-216973d50e0d
version: -1
name: Verify Action ID
description: Verify that the 'PaloAltoNetworksXDR.RetrievedFiles.action_id’ context key was populated.
@@ -585,10 +546,10 @@ tasks:
isautoswitchedtoquietmode: false
"214":
id: "214"
- taskid: e2cb419f-c071-4744-87b8-0dcde585313d
+ taskid: 52569f77-bb44-4fcb-81b7-c23347b4ea95
type: regular
task:
- id: e2cb419f-c071-4744-87b8-0dcde585313d
+ id: 52569f77-bb44-4fcb-81b7-c23347b4ea95
version: -1
name: Verify Context Error - Action ID
description: Prints an error entry with a given message
@@ -622,10 +583,10 @@ tasks:
isautoswitchedtoquietmode: false
"215":
id: "215"
- taskid: 5338615b-c371-4fa3-877e-f35ba273ddfb
+ taskid: f6d9e38b-146e-45e9-847f-25335f07c24b
type: title
task:
- id: 5338615b-c371-4fa3-877e-f35ba273ddfb
+ id: f6d9e38b-146e-45e9-847f-25335f07c24b
version: -1
name: Done verifying 'PaloAltoNetworksXDR.RetrievedFiles'
type: title
@@ -653,10 +614,10 @@ tasks:
isautoswitchedtoquietmode: false
"216":
id: "216"
- taskid: 5b7cf209-b8fd-45d3-8f8a-2e8d0102d8c4
+ taskid: 94f6111a-31fb-4aba-8dcb-6dfa5ac93e3f
type: condition
task:
- id: 5b7cf209-b8fd-45d3-8f8a-2e8d0102d8c4
+ id: 94f6111a-31fb-4aba-8dcb-6dfa5ac93e3f
version: -1
name: Verify Endpoint ID
description: Verify that the 'PaloAltoNetworksXDR.RetrievedFiles.endpoint_id’ context key was populated.
@@ -700,10 +661,10 @@ tasks:
isautoswitchedtoquietmode: false
"217":
id: "217"
- taskid: b8670579-fd1a-4fc1-8a86-3aad0abad1da
+ taskid: 4faf0e79-4450-4662-82ab-2b7f9e5bb971
type: condition
task:
- id: b8670579-fd1a-4fc1-8a86-3aad0abad1da
+ id: 4faf0e79-4450-4662-82ab-2b7f9e5bb971
version: -1
name: Verify File Link
description: Verify that the 'PaloAltoNetworksXDR.RetrievedFiles.file_link’ context key was populated.
@@ -743,10 +704,10 @@ tasks:
isautoswitchedtoquietmode: false
"218":
id: "218"
- taskid: 8bc14983-536c-4a24-83c3-85aa8dfa14c7
+ taskid: 64e64690-47cd-44f9-8fbc-fac6fb87a416
type: regular
task:
- id: 8bc14983-536c-4a24-83c3-85aa8dfa14c7
+ id: 64e64690-47cd-44f9-8fbc-fac6fb87a416
version: -1
name: Verify Context Error - Endpoint ID
description: Prints an error entry with a given message
@@ -778,10 +739,10 @@ tasks:
isautoswitchedtoquietmode: false
"219":
id: "219"
- taskid: 43033fec-4761-4a24-8d21-dc8e5d548f22
+ taskid: 92cb8f41-63db-4f64-8b5d-132c196b4a51
type: regular
task:
- id: 43033fec-4761-4a24-8d21-dc8e5d548f22
+ id: 92cb8f41-63db-4f64-8b5d-132c196b4a51
version: -1
name: Verify Context Error - File Link
description: Prints an error entry with a given message
@@ -813,10 +774,10 @@ tasks:
isautoswitchedtoquietmode: false
"220":
id: "220"
- taskid: 9b56ab3b-cd65-45f7-8a03-9f39e48921f6
+ taskid: 930e2525-3ee6-41ef-8935-4c2d8d4ee671
type: condition
task:
- id: 9b56ab3b-cd65-45f7-8a03-9f39e48921f6
+ id: 930e2525-3ee6-41ef-8935-4c2d8d4ee671
version: -1
name: Verify Status
description: Verify that the 'PaloAltoNetworksXDR.RetrievedFiles.status’ context key was populated.
@@ -856,10 +817,10 @@ tasks:
isautoswitchedtoquietmode: false
"221":
id: "221"
- taskid: 3ba0f366-6ee0-45e4-8767-fcfc0ae8e3cf
+ taskid: 7f59fc43-f701-4325-868a-06ab184a2108
type: regular
task:
- id: 3ba0f366-6ee0-45e4-8767-fcfc0ae8e3cf
+ id: 7f59fc43-f701-4325-868a-06ab184a2108
version: -1
name: Verify Context Error - Status
description: Prints an error entry with a given message
@@ -891,10 +852,10 @@ tasks:
isautoswitchedtoquietmode: false
"222":
id: "222"
- taskid: 9a5f192e-0afd-4e15-8508-23b50e3bea8d
+ taskid: 46360281-1db4-4a05-833b-c043e16b97ea
type: regular
task:
- id: 9a5f192e-0afd-4e15-8508-23b50e3bea8d
+ id: 46360281-1db4-4a05-833b-c043e16b97ea
version: -1
name: Get the final 'ExtractedFiles' context
description: Gets a value from the specified incident's context.
@@ -929,10 +890,10 @@ tasks:
isautoswitchedtoquietmode: false
"223":
id: "223"
- taskid: b5882dd5-3f6a-41a0-8e4a-8feae35426c4
+ taskid: 3adfdec4-6c2b-4efb-8116-601e38ca6d82
type: condition
task:
- id: b5882dd5-3f6a-41a0-8e4a-8feae35426c4
+ id: 3adfdec4-6c2b-4efb-8116-601e38ca6d82
version: -1
name: Verify Extracted Files
description: 'Verify that the ''ExtractedFiles'' context key has been extracted correctly and contains both the ''manifest.json'' and ''GoogleUpdate.exe'' files. '
@@ -948,25 +909,12 @@ tasks:
conditions:
- label: Verified
condition:
- - - operator: containsGeneral
+ - - operator: isNotEmpty
left:
value:
complex:
root: ExtractedFiles
iscontext: true
- right:
- value:
- simple: manifest.json
- ignorecase: true
- - - operator: containsGeneral
- left:
- value:
- complex:
- root: ExtractedFiles
- iscontext: true
- right:
- value:
- simple: GoogleUpdate.exe
ignorecase: true
continueonerrortype: ""
view: |-
@@ -985,10 +933,10 @@ tasks:
isautoswitchedtoquietmode: false
"224":
id: "224"
- taskid: d8eef0a4-6c52-40d3-8089-713c83370d66
+ taskid: c92af586-0569-4454-8715-a01d497b80cf
type: regular
task:
- id: d8eef0a4-6c52-40d3-8089-713c83370d66
+ id: c92af586-0569-4454-8715-a01d497b80cf
version: -1
name: Verify Context Error - Extracted Files
description: Prints an error entry with a given message
@@ -1020,10 +968,10 @@ tasks:
isautoswitchedtoquietmode: false
"225":
id: "225"
- taskid: bab61319-43dc-479e-80f9-d12c495dea72
+ taskid: e3fb4dc5-57fe-446b-8a91-4c83a31d229b
type: title
task:
- id: bab61319-43dc-479e-80f9-d12c495dea72
+ id: e3fb4dc5-57fe-446b-8a91-4c83a31d229b
version: -1
name: Done verifying 'ExtractedFiles'
type: title
@@ -1051,10 +999,10 @@ tasks:
isautoswitchedtoquietmode: false
"226":
id: "226"
- taskid: 1971b1bc-dbfc-4343-890e-dc6fe7cb2e7e
+ taskid: d1b7a79b-380f-4a0d-8a8a-32d3ff87bdc2
type: regular
task:
- id: 1971b1bc-dbfc-4343-890e-dc6fe7cb2e7e
+ id: d1b7a79b-380f-4a0d-8a8a-32d3ff87bdc2
version: -1
name: Get the final 'File' context
description: Gets a value from the specified incident's context.
@@ -1089,10 +1037,10 @@ tasks:
isautoswitchedtoquietmode: false
"227":
id: "227"
- taskid: daf2e7a4-5fbd-49ef-8f82-ebb677d8cf49
+ taskid: 30d25830-dfd0-4b6a-8072-e631bb2b2f1d
type: title
task:
- id: daf2e7a4-5fbd-49ef-8f82-ebb677d8cf49
+ id: 30d25830-dfd0-4b6a-8072-e631bb2b2f1d
version: -1
name: '''File'' Context Data'
type: title
@@ -1129,10 +1077,10 @@ tasks:
isautoswitchedtoquietmode: false
"228":
id: "228"
- taskid: 4095e83c-ee55-4c69-809c-16fb8aff4cb7
+ taskid: 78fcc8f0-8e62-4695-80d3-cd6ddbb51d82
type: condition
task:
- id: 4095e83c-ee55-4c69-809c-16fb8aff4cb7
+ id: 78fcc8f0-8e62-4695-80d3-cd6ddbb51d82
version: -1
name: Verify Name
description: Verify that the 'File.Name' context key has been extracted correctly and contains both ZIP and Unzipped file names.
@@ -1148,38 +1096,13 @@ tasks:
conditions:
- label: Verified
condition:
- - - operator: containsGeneral
- left:
- value:
- complex:
- root: File
- accessor: Name
- iscontext: true
- right:
- value:
- simple: manifest.json
- ignorecase: true
- - - operator: containsGeneral
- left:
- value:
- complex:
- root: File
- accessor: Name
- iscontext: true
- right:
- value:
- simple: GoogleUpdate.exe
- ignorecase: true
- - - operator: containsGeneral
+ - - operator: isNotEmpty
left:
value:
complex:
root: File
accessor: Name
iscontext: true
- right:
- value:
- simple: aeec6a2cc92e46fab3b6f621722e9916_1.zip
ignorecase: true
continueonerrortype: ""
view: |-
@@ -1198,10 +1121,10 @@ tasks:
isautoswitchedtoquietmode: false
"229":
id: "229"
- taskid: 1606851c-a915-48d8-879e-ef644da5d63a
+ taskid: d87c4ed8-11fc-452f-8afa-421cd1e2ef31
type: regular
task:
- id: 1606851c-a915-48d8-879e-ef644da5d63a
+ id: d87c4ed8-11fc-452f-8afa-421cd1e2ef31
version: -1
name: Verify Context Error - Name
description: Prints an error entry with a given message
@@ -1234,10 +1157,10 @@ tasks:
isautoswitchedtoquietmode: false
"230":
id: "230"
- taskid: ef68f2ac-98d7-4b6b-8ef8-b36b5ff71b79
+ taskid: 9bef53f1-5217-4ec4-887d-2ecb633f9b7a
type: title
task:
- id: ef68f2ac-98d7-4b6b-8ef8-b36b5ff71b79
+ id: 9bef53f1-5217-4ec4-887d-2ecb633f9b7a
version: -1
name: Done verifying 'File'
type: title
@@ -1265,10 +1188,10 @@ tasks:
isautoswitchedtoquietmode: false
"231":
id: "231"
- taskid: d430d3e9-260b-4150-871e-b016f3d75d51
+ taskid: 0aae456b-24b9-49c1-8430-da495527d61e
type: condition
task:
- id: d430d3e9-260b-4150-871e-b016f3d75d51
+ id: 0aae456b-24b9-49c1-8430-da495527d61e
version: -1
name: Verify Entry ID
description: Verify that the 'File.EntryID' context key has been extracted.
@@ -1291,6 +1214,8 @@ tasks:
root: File
accessor: EntryID
iscontext: true
+ right:
+ value: {}
continueonerrortype: ""
view: |-
{
@@ -1308,10 +1233,10 @@ tasks:
isautoswitchedtoquietmode: false
"232":
id: "232"
- taskid: f573c8c4-da2d-423e-8334-67a072625a71
+ taskid: 963c5ffa-f550-45de-88c0-2614fe3f6220
type: regular
task:
- id: f573c8c4-da2d-423e-8334-67a072625a71
+ id: 963c5ffa-f550-45de-88c0-2614fe3f6220
version: -1
name: Verify Context Error - Entry ID
description: Prints an error entry with a given message
@@ -1343,13 +1268,13 @@ tasks:
isautoswitchedtoquietmode: false
"233":
id: "233"
- taskid: 0f6794c7-153a-4561-8f67-1e1c8275f03e
+ taskid: 184ac37d-2d3f-4ba0-81fe-c4cd9cf4fcfa
type: condition
task:
- id: 0f6794c7-153a-4561-8f67-1e1c8275f03e
+ id: 184ac37d-2d3f-4ba0-81fe-c4cd9cf4fcfa
version: -1
name: Verify Size
- description: Verify that the 'File.SHA512' context key has been extracted correctly and contains both ZIP and Unzipped file sizes.
+ description: Verify that the 'File.SHA512' context key has been extracted correctly.
type: condition
iscommand: false
brand: ""
@@ -1362,36 +1287,13 @@ tasks:
conditions:
- label: Verified
condition:
- - - operator: containsString
- left:
- value:
- complex:
- root: File
- accessor: Size
- iscontext: true
- right:
- value:
- simple: "61498"
- - - operator: containsString
- left:
- value:
- complex:
- root: File
- accessor: Size
- iscontext: true
- right:
- value:
- simple: "375"
- - - operator: containsString
+ - - operator: isNotEmpty
left:
value:
complex:
root: File
accessor: Size
iscontext: true
- right:
- value:
- simple: "156104"
continueonerrortype: ""
view: |-
{
@@ -1409,10 +1311,10 @@ tasks:
isautoswitchedtoquietmode: false
"234":
id: "234"
- taskid: 6803c2cb-40b7-4cf6-809a-4df8ca4dab6a
+ taskid: 8bda98d3-1360-4996-8e25-b349004d2f1e
type: regular
task:
- id: 6803c2cb-40b7-4cf6-809a-4df8ca4dab6a
+ id: 8bda98d3-1360-4996-8e25-b349004d2f1e
version: -1
name: Verify Context Error - Size
description: Prints an error entry with a given message
@@ -1445,13 +1347,13 @@ tasks:
isautoswitchedtoquietmode: false
"235":
id: "235"
- taskid: e8bd497b-7be3-43a8-88fd-e4b90aca90d6
+ taskid: 050b250c-2075-456f-8b50-fc8f826fe93e
type: condition
task:
- id: e8bd497b-7be3-43a8-88fd-e4b90aca90d6
+ id: 050b250c-2075-456f-8b50-fc8f826fe93e
version: -1
name: Verify MD5
- description: Verify that the 'File.MD5' context key has been extracted correctly and contains both the 'manifest.json' and 'GoogleUpdate.exe' MD5 hashes.
+ description: Verify that the 'File.MD5' context key has been extracted correctly.
type: condition
iscommand: false
brand: ""
@@ -1464,27 +1366,13 @@ tasks:
conditions:
- label: Verified
condition:
- - - operator: containsGeneral
- left:
- value:
- complex:
- root: File
- accessor: MD5
- iscontext: true
- right:
- value:
- simple: 8e48ba5777f034731b64130624dd9947
- ignorecase: true
- - - operator: containsGeneral
+ - - operator: isNotEmpty
left:
value:
complex:
root: File
accessor: MD5
iscontext: true
- right:
- value:
- simple: 0bca3f16dd527b4150648ec1e36cb22a
ignorecase: true
continueonerrortype: ""
view: |-
@@ -1503,10 +1391,10 @@ tasks:
isautoswitchedtoquietmode: false
"236":
id: "236"
- taskid: 54c9d168-7dd2-4c30-8b45-e40163444ca0
+ taskid: c03492b2-3761-46fc-8968-604da9446e53
type: regular
task:
- id: 54c9d168-7dd2-4c30-8b45-e40163444ca0
+ id: c03492b2-3761-46fc-8968-604da9446e53
version: -1
name: Verify Context Error - MD5
description: Prints an error entry with a given message
@@ -1539,13 +1427,13 @@ tasks:
isautoswitchedtoquietmode: false
"237":
id: "237"
- taskid: 441c199b-c897-4d05-8a7c-8f6e7b650d76
+ taskid: 2f737c65-9418-4bf8-81e4-48ec47b81891
type: condition
task:
- id: 441c199b-c897-4d05-8a7c-8f6e7b650d76
+ id: 2f737c65-9418-4bf8-81e4-48ec47b81891
version: -1
name: Verify SHA1
- description: Verify that the 'File.SHA1' context key has been extracted correctly and contains both the 'manifest.json' and 'GoogleUpdate.exe' SHA1 hashes.
+ description: Verify that the 'File.SHA1' context key has been extracted correctly.
type: condition
iscommand: false
brand: ""
@@ -1558,27 +1446,13 @@ tasks:
conditions:
- label: Verified
condition:
- - - operator: containsGeneral
- left:
- value:
- complex:
- root: File
- accessor: SHA1
- iscontext: true
- right:
- value:
- simple: 9a60695fbbf4d3fb0e0de7e43d0db5a0b92f5e5d
- ignorecase: true
- - - operator: containsGeneral
+ - - operator: isNotEmpty
left:
value:
complex:
root: File
accessor: SHA1
iscontext: true
- right:
- value:
- simple: 842ae39880c3c0bc501007b42949950c3d3b7ed3
ignorecase: true
continueonerrortype: ""
view: |-
@@ -1597,10 +1471,10 @@ tasks:
isautoswitchedtoquietmode: false
"238":
id: "238"
- taskid: 64c25643-8bbb-4bcf-8220-4a20117e2653
+ taskid: d155d341-5db5-4eaa-88fd-ce81a2c7d920
type: regular
task:
- id: 64c25643-8bbb-4bcf-8220-4a20117e2653
+ id: d155d341-5db5-4eaa-88fd-ce81a2c7d920
version: -1
name: Verify Context Error - SHA1
description: Prints an error entry with a given message
@@ -1633,13 +1507,13 @@ tasks:
isautoswitchedtoquietmode: false
"239":
id: "239"
- taskid: 2010ae09-6b67-4e42-8954-157de9cec522
+ taskid: 7d46c1a3-c10e-4cbf-87f4-f8befb8fcf0a
type: condition
task:
- id: 2010ae09-6b67-4e42-8954-157de9cec522
+ id: 7d46c1a3-c10e-4cbf-87f4-f8befb8fcf0a
version: -1
name: Verify SHA256
- description: 'Verify that the ''File.SHA256'' context key has been extracted correctly and contains both the ''manifest.json'' and ''GoogleUpdate.exe'' SHA256 hashes. '
+ description: 'Verify that the ''File.SHA256'' context key has been extracted correctly.'
type: condition
iscommand: false
brand: ""
@@ -1652,27 +1526,13 @@ tasks:
conditions:
- label: Verified
condition:
- - - operator: containsGeneral
- left:
- value:
- complex:
- root: File
- accessor: SHA256
- iscontext: true
- right:
- value:
- simple: c85883dd5723b7bc384a972a3999684644e0664d86e8c7cad996f91568978294
- ignorecase: true
- - - operator: containsGeneral
+ - - operator: isNotEmpty
left:
value:
complex:
root: File
accessor: SHA256
iscontext: true
- right:
- value:
- simple: b60e92004d394d0b14a8953a2ba29951c79f2f8a6c94f495e3153dfbbef115b6
ignorecase: true
continueonerrortype: ""
view: |-
@@ -1691,10 +1551,10 @@ tasks:
isautoswitchedtoquietmode: false
"240":
id: "240"
- taskid: a3947698-3c78-40bd-8832-bc9d7fc37a3c
+ taskid: 1c0b320a-ab2a-445c-81f4-5e453016cb42
type: regular
task:
- id: a3947698-3c78-40bd-8832-bc9d7fc37a3c
+ id: 1c0b320a-ab2a-445c-81f4-5e453016cb42
version: -1
name: Verify Context Error - SHA256
description: Prints an error entry with a given message
@@ -1727,13 +1587,13 @@ tasks:
isautoswitchedtoquietmode: false
"241":
id: "241"
- taskid: d0552d71-39fe-4dd7-80b6-6e4fdffb803a
+ taskid: b31b23f7-ed41-4501-8224-b3ad12f5bd75
type: condition
task:
- id: d0552d71-39fe-4dd7-80b6-6e4fdffb803a
+ id: b31b23f7-ed41-4501-8224-b3ad12f5bd75
version: -1
name: Verify SHA512
- description: 'Verify that the ''File.SHA512'' context key has been extracted correctly and contains both the ''manifest.json'' and ''GoogleUpdate.exe'' SHA512 hashes. '
+ description: 'Verify that the ''File.SHA512'' context key has been extracted correctly.'
type: condition
iscommand: false
brand: ""
@@ -1746,27 +1606,13 @@ tasks:
conditions:
- label: Verified
condition:
- - - operator: containsGeneral
- left:
- value:
- complex:
- root: File
- accessor: SHA512
- iscontext: true
- right:
- value:
- simple: 27cd3f16016be1f1d8a62f83d4924ae3d81cecc66b51397849cdf999b276d012a832b30b717c7f9afb799aa21323204296b41378650d140130c7e6abfab42faf
- ignorecase: true
- - - operator: containsGeneral
+ - - operator: isNotEmpty
left:
value:
complex:
root: File
accessor: SHA512
iscontext: true
- right:
- value:
- simple: 516e1c9313aaf1d49223a3c06677bdbe5e4f9df392c12696a9eeb086634cf60c42a9c330e2d1095f1e6fdd1f16d2a6a13c9d28110155469159f0959897dff164
ignorecase: true
continueonerrortype: ""
view: |-
@@ -1785,10 +1631,10 @@ tasks:
isautoswitchedtoquietmode: false
"242":
id: "242"
- taskid: 785710d2-164a-41c5-838c-556fa2880701
+ taskid: b9a61ffb-4c09-4a51-88d0-f6a22388733e
type: regular
task:
- id: 785710d2-164a-41c5-838c-556fa2880701
+ id: b9a61ffb-4c09-4a51-88d0-f6a22388733e
version: -1
name: Verify Context Error - SHA512
description: Prints an error entry with a given message
@@ -1821,10 +1667,10 @@ tasks:
isautoswitchedtoquietmode: false
"243":
id: "243"
- taskid: e195de38-755d-41ee-83d3-2c7345b5de31
+ taskid: 3ff8befb-c82a-4995-88ed-58b2d57dead7
type: condition
task:
- id: e195de38-755d-41ee-83d3-2c7345b5de31
+ id: 3ff8befb-c82a-4995-88ed-58b2d57dead7
version: -1
name: Verify Extension
description: Verify that the 'File.Extension' context key has been extracted correctly and contains both ZIP and Unzipped file extensions.
@@ -1840,38 +1686,13 @@ tasks:
conditions:
- label: Verified
condition:
- - - operator: containsGeneral
- left:
- value:
- complex:
- root: File
- accessor: Extension
- iscontext: true
- right:
- value:
- simple: json
- ignorecase: true
- - - operator: containsGeneral
- left:
- value:
- complex:
- root: File
- accessor: Extension
- iscontext: true
- right:
- value:
- simple: exe
- ignorecase: true
- - - operator: containsGeneral
+ - - operator: isNotEmpty
left:
value:
complex:
root: File
accessor: Extension
iscontext: true
- right:
- value:
- simple: zip
ignorecase: true
continueonerrortype: ""
view: |-
@@ -1890,10 +1711,10 @@ tasks:
isautoswitchedtoquietmode: false
"244":
id: "244"
- taskid: 1d05334f-9882-4948-8580-a81690ece9f1
+ taskid: 30a20efa-e938-4727-84d1-c37eedaf941c
type: regular
task:
- id: 1d05334f-9882-4948-8580-a81690ece9f1
+ id: 30a20efa-e938-4727-84d1-c37eedaf941c
version: -1
name: Verify Context Error - Extension
description: Prints an error entry with a given message
@@ -1926,10 +1747,10 @@ tasks:
isautoswitchedtoquietmode: false
"245":
id: "245"
- taskid: 7523f595-44c7-4188-8294-baf5b52ccb06
+ taskid: 31ac2ecd-05f1-436e-8f05-4d29be7a9708
type: condition
task:
- id: 7523f595-44c7-4188-8294-baf5b52ccb06
+ id: 31ac2ecd-05f1-436e-8f05-4d29be7a9708
version: -1
name: Verify Type
description: Verify that the 'File.Type' context key has been extracted correctly and contains both ZIP and Unzipped file types.
@@ -1945,38 +1766,13 @@ tasks:
conditions:
- label: Verified
condition:
- - - operator: containsString
- left:
- value:
- complex:
- root: File
- accessor: Type
- iscontext: true
- right:
- value:
- simple: ASCII text, with very long lines, with no line terminators
- ignorecase: true
- - - operator: containsString
- left:
- value:
- complex:
- root: File
- accessor: Type
- iscontext: true
- right:
- value:
- simple: PE32 executable (GUI) Intel 80386, for MS Windows
- ignorecase: true
- - - operator: containsString
+ - - operator: isNotEmpty
left:
value:
complex:
root: File
accessor: Type
iscontext: true
- right:
- value:
- simple: Zip archive data, at least v2.0 to extract
ignorecase: true
continueonerrortype: ""
view: |-
@@ -1995,10 +1791,10 @@ tasks:
isautoswitchedtoquietmode: false
"246":
id: "246"
- taskid: 6f0a29f3-5b10-412b-8447-c6026353de81
+ taskid: b11c9ff5-32f2-44d6-8a69-6edfa78d1324
type: regular
task:
- id: 6f0a29f3-5b10-412b-8447-c6026353de81
+ id: b11c9ff5-32f2-44d6-8a69-6edfa78d1324
version: -1
name: Verify Context Error - Type
description: Prints an error entry with a given message
@@ -2031,10 +1827,10 @@ tasks:
isautoswitchedtoquietmode: false
"247":
id: "247"
- taskid: 9f04c615-3dcc-46ff-8cf8-8af3ae9562bf
+ taskid: 20dd1a40-8ce4-4ef5-8638-dbf13d98adc9
type: condition
task:
- id: 9f04c615-3dcc-46ff-8cf8-8af3ae9562bf
+ id: 20dd1a40-8ce4-4ef5-8638-dbf13d98adc9
version: -1
name: Verify Unzipped
description: Verify that the 'File.Unzipped' context key has been extracted correctly.
@@ -2054,10 +1850,17 @@ tasks:
left:
value:
complex:
- root: File
- accessor: Unzipped
+ root: File.Unzipped
+ filters:
+ - - operator: isExists
+ left:
+ value:
+ simple: File.Unzipped
+ iscontext: true
iscontext: true
ignorecase: true
+ right:
+ value: {}
continueonerrortype: ""
view: |-
{
@@ -2075,10 +1878,10 @@ tasks:
isautoswitchedtoquietmode: false
"248":
id: "248"
- taskid: 55aa8d27-e4ab-458e-8e78-8ef9760766dc
+ taskid: 194eca0d-db95-4a49-8df3-1af3db8b28b1
type: regular
task:
- id: 55aa8d27-e4ab-458e-8e78-8ef9760766dc
+ id: 194eca0d-db95-4a49-8df3-1af3db8b28b1
version: -1
name: Verify Context Error - Unzipped
description: Prints an error entry with a given message
@@ -2110,10 +1913,10 @@ tasks:
isautoswitchedtoquietmode: false
"249":
id: "249"
- taskid: 6fa7f817-ed88-4b18-8466-62ee68cc8910
+ taskid: 5c991b59-35fe-47c3-89e5-ffe545a977e4
type: title
task:
- id: 6fa7f817-ed88-4b18-8466-62ee68cc8910
+ id: 5c991b59-35fe-47c3-89e5-ffe545a977e4
version: -1
name: Test 'Print Error'
type: title
@@ -2141,10 +1944,10 @@ tasks:
isautoswitchedtoquietmode: false
"253":
id: "253"
- taskid: 32c45d72-3dba-48df-8c46-6616e12a8bff
+ taskid: f1078863-f1a8-433b-8c63-0cd4935c8cc8
type: title
task:
- id: 32c45d72-3dba-48df-8c46-6616e12a8bff
+ id: f1078863-f1a8-433b-8c63-0cd4935c8cc8
version: -1
name: Done Verifying 'Print Error'
type: title
@@ -2172,10 +1975,10 @@ tasks:
isautoswitchedtoquietmode: false
"254":
id: "254"
- taskid: 825bdb4c-809a-4dd9-82f0-a5692643b91b
+ taskid: 1beacfc3-0810-4f44-8ca1-634becfdb194
type: regular
task:
- id: 825bdb4c-809a-4dd9-82f0-a5692643b91b
+ id: 1beacfc3-0810-4f44-8ca1-634becfdb194
version: -1
name: Verify Print Error Message Was Failed
description: Prints an error entry with a given message
@@ -2207,10 +2010,10 @@ tasks:
isautoswitchedtoquietmode: false
"255":
id: "255"
- taskid: 74dfd773-e388-4d5f-8bc7-c950211fe641
+ taskid: eca4f1a2-8c82-45a4-8b85-135acb6a4666
type: regular
task:
- id: 74dfd773-e388-4d5f-8bc7-c950211fe641
+ id: eca4f1a2-8c82-45a4-8b85-135acb6a4666
version: -1
name: Wait for the files to be retrieved from XDR
description: Ensure that 'Cortex XDR - Retrieve File' is complete before closing the incident and starting tests.
@@ -2230,7 +2033,7 @@ tasks:
interval_between_tries:
simple: "10"
max_timeout:
- simple: "300"
+ simple: "600"
task_name:
simple: Cortex XDR - Retrieve File
task_states:
@@ -2253,10 +2056,10 @@ tasks:
isautoswitchedtoquietmode: false
"260":
id: "260"
- taskid: cbb62e6a-b4cb-45e8-8021-01d4f04a5730
+ taskid: 4f04fd07-54e6-4c4d-834a-c88b568ebc5b
type: regular
task:
- id: cbb62e6a-b4cb-45e8-8021-01d4f04a5730
+ id: 4f04fd07-54e6-4c4d-834a-c88b568ebc5b
version: -1
name: Get Error Entries
description: Get the error(s) associated with a given entry/entries. Use ${lastCompletedTaskEntries} to check the previous task entries. The automation will return an array of the error contents from those entries.
@@ -2288,10 +2091,10 @@ tasks:
isautoswitchedtoquietmode: false
"261":
id: "261"
- taskid: 953ce5ff-4231-4327-80b3-ee31abd8a847
+ taskid: 2f68a3e2-2b70-49e5-8a4d-21fbea92d283
type: condition
task:
- id: 953ce5ff-4231-4327-80b3-ee31abd8a847
+ id: 2f68a3e2-2b70-49e5-8a4d-21fbea92d283
version: -1
name: Verify Error Message
description: Ensure that an error message was printed for the 'Print Error - No path was found' task.
@@ -2334,10 +2137,10 @@ tasks:
isautoswitchedtoquietmode: false
"262":
id: "262"
- taskid: 6ea99bb9-8304-4490-81b0-c4ce89db9e03
+ taskid: 0f0d48ee-cfa5-4dc5-8a7f-d3e69b9b183a
type: playbook
task:
- id: 6ea99bb9-8304-4490-81b0-c4ce89db9e03
+ id: 0f0d48ee-cfa5-4dc5-8a7f-d3e69b9b183a
version: -1
name: Cortex XDR - Retrieve File by sha256
description: |-
@@ -2400,10 +2203,10 @@ view: |-
},
"paper": {
"dimensions": {
- "height": 2640,
+ "height": 2480,
"width": 8390,
"x": -4580,
- "y": -2645
+ "y": -2485
}
}
}
diff --git a/Packs/CortexXDR/TestPlaybooks/Test_XDR_Playbook_general_commands.yml b/Packs/CortexXDR/TestPlaybooks/Test_XDR_Playbook_general_commands.yml
index 89cf1a903d8f..554fde1859a4 100644
--- a/Packs/CortexXDR/TestPlaybooks/Test_XDR_Playbook_general_commands.yml
+++ b/Packs/CortexXDR/TestPlaybooks/Test_XDR_Playbook_general_commands.yml
@@ -5,10 +5,10 @@ starttaskid: "0"
tasks:
"0":
id: "0"
- taskid: 4f305e10-332d-474f-8308-8ede620adbb1
+ taskid: 6b6dd065-9258-41ee-8879-0e8eb4195f18
type: start
task:
- id: 4f305e10-332d-474f-8308-8ede620adbb1
+ id: 6b6dd065-9258-41ee-8879-0e8eb4195f18
version: -1
name: ""
iscommand: false
@@ -35,10 +35,10 @@ tasks:
isautoswitchedtoquietmode: false
"1":
id: "1"
- taskid: b405f98a-a3aa-45bd-8fde-0875f159781a
+ taskid: 5108558d-b71f-4a60-827e-168013add520
type: regular
task:
- id: b405f98a-a3aa-45bd-8fde-0875f159781a
+ id: 5108558d-b71f-4a60-827e-168013add520
version: -1
name: DeleteContext
description: DeleteContext
@@ -70,10 +70,10 @@ tasks:
isautoswitchedtoquietmode: false
"2":
id: "2"
- taskid: 94d74732-7d47-40ad-8915-1ef09d9a8687
+ taskid: 71bf8d33-657b-4b99-83ea-9c0192283a4b
type: regular
task:
- id: 94d74732-7d47-40ad-8915-1ef09d9a8687
+ id: 71bf8d33-657b-4b99-83ea-9c0192283a4b
version: -1
name: xdr-get-incidents
description: xdr-get-incidents
@@ -86,7 +86,7 @@ tasks:
- "3"
scriptarguments:
gte_creation_time:
- simple: "2010-10-10T00:00:00"
+ simple: 2010-10-10T00:00:00
limit:
simple: "3"
sort_by_creation_time:
@@ -109,10 +109,10 @@ tasks:
isautoswitchedtoquietmode: false
"3":
id: "3"
- taskid: 7848136a-5584-4c62-8b3b-486f1e3ced25
+ taskid: eec405e8-2d75-4afd-8de7-9acaa60595e9
type: condition
task:
- id: 7848136a-5584-4c62-8b3b-486f1e3ced25
+ id: eec405e8-2d75-4afd-8de7-9acaa60595e9
version: -1
name: Verify Outputs
description: Verify Outputs
@@ -163,10 +163,10 @@ tasks:
isautoswitchedtoquietmode: false
"4":
id: "4"
- taskid: 911f2f05-ecb9-4eee-8692-3707fcce645d
+ taskid: 522421a4-3989-4654-8890-2d239a731154
type: regular
task:
- id: 911f2f05-ecb9-4eee-8692-3707fcce645d
+ id: 522421a4-3989-4654-8890-2d239a731154
version: -1
name: xdr-get-incident-extra-data
description: xdr-get-incident-extra-data
@@ -185,7 +185,7 @@ tasks:
view: |-
{
"position": {
- "x": 265,
+ "x": 275,
"y": 1945
}
}
@@ -198,10 +198,10 @@ tasks:
isautoswitchedtoquietmode: false
"5":
id: "5"
- taskid: 50475d49-6ae9-4c28-8027-6880ccfd068b
+ taskid: d66f5998-ec44-48bf-8129-1ae09fc7db90
type: condition
task:
- id: 50475d49-6ae9-4c28-8027-6880ccfd068b
+ id: d66f5998-ec44-48bf-8129-1ae09fc7db90
version: -1
name: Verify Outputs
description: Verify Outputs
@@ -247,10 +247,10 @@ tasks:
isautoswitchedtoquietmode: false
"6":
id: "6"
- taskid: adc16957-9f31-43e1-87ae-f1a09bf42574
+ taskid: 62bc7f21-5016-4061-8310-82c8dda337d8
type: regular
task:
- id: adc16957-9f31-43e1-87ae-f1a09bf42574
+ id: 62bc7f21-5016-4061-8310-82c8dda337d8
version: -1
name: xdr-update-incident
description: xdr-update-incident
@@ -290,10 +290,10 @@ tasks:
isautoswitchedtoquietmode: false
"7":
id: "7"
- taskid: 3d8e1bcb-3038-48d0-89c5-a66fa6d27e64
+ taskid: 09a7f6c5-ea7c-442f-8638-964f2e955ece
type: regular
task:
- id: 3d8e1bcb-3038-48d0-89c5-a66fa6d27e64
+ id: 09a7f6c5-ea7c-442f-8638-964f2e955ece
version: -1
name: Save incident id
description: Save incident id
@@ -331,10 +331,10 @@ tasks:
isautoswitchedtoquietmode: false
"8":
id: "8"
- taskid: abab169d-d8f5-4df4-8843-4fa319360784
+ taskid: f2eccf4e-ab50-4bfc-80f8-154552618493
type: regular
task:
- id: abab169d-d8f5-4df4-8843-4fa319360784
+ id: f2eccf4e-ab50-4bfc-80f8-154552618493
version: -1
name: DeleteContext
description: DeleteContext
@@ -368,10 +368,10 @@ tasks:
isautoswitchedtoquietmode: false
"9":
id: "9"
- taskid: 220e4ba2-2658-4065-8ed6-93ac1e0ffb10
+ taskid: 74df4dbb-cd5e-46fe-8770-04b198cb7ddc
type: regular
task:
- id: 220e4ba2-2658-4065-8ed6-93ac1e0ffb10
+ id: 74df4dbb-cd5e-46fe-8770-04b198cb7ddc
version: -1
name: xdr-get-incidents
description: xdr-get-incidents
@@ -405,10 +405,10 @@ tasks:
isautoswitchedtoquietmode: false
"10":
id: "10"
- taskid: c00c7a9a-55e6-49e6-8203-a875ed7cd77e
+ taskid: a09c3bf5-c008-417b-8690-615229a1131d
type: condition
task:
- id: c00c7a9a-55e6-49e6-8203-a875ed7cd77e
+ id: a09c3bf5-c008-417b-8690-615229a1131d
version: -1
name: Verify updated values
description: Verify updated values
@@ -417,7 +417,7 @@ tasks:
brand: ""
nexttasks:
"yes":
- - "11"
+ - "87"
separatecontext: false
conditions:
- label: "yes"
@@ -455,10 +455,10 @@ tasks:
isautoswitchedtoquietmode: false
"11":
id: "11"
- taskid: 9cfc97cf-9e72-4aa6-8868-888575a3cca7
+ taskid: 23ae9dd6-4d6c-4c77-8444-19c054b27452
type: regular
task:
- id: 9cfc97cf-9e72-4aa6-8868-888575a3cca7
+ id: 23ae9dd6-4d6c-4c77-8444-19c054b27452
version: -1
name: xdr-update-incident
description: xdr-update-incident
@@ -482,7 +482,7 @@ tasks:
{
"position": {
"x": 265,
- "y": 3170
+ "y": 4395
}
}
note: false
@@ -494,10 +494,10 @@ tasks:
isautoswitchedtoquietmode: false
"12":
id: "12"
- taskid: 1b68a0fe-560c-42e0-80f9-61f321d396ca
+ taskid: fb7fff95-e0d8-429d-8cf6-7a7ad5e45d0c
type: regular
task:
- id: 1b68a0fe-560c-42e0-80f9-61f321d396ca
+ id: fb7fff95-e0d8-429d-8cf6-7a7ad5e45d0c
version: -1
name: xdr-insert-parsed-alert
description: xdr-insert-parsed-alert
@@ -533,7 +533,7 @@ tasks:
{
"position": {
"x": 265,
- "y": 3345
+ "y": 4570
}
}
note: false
@@ -545,10 +545,10 @@ tasks:
isautoswitchedtoquietmode: false
"13":
id: "13"
- taskid: 9e271e37-5f54-4994-84fc-2c2227c7080b
+ taskid: d443eeab-24c6-4f48-8fef-b2991c53fb7c
type: regular
task:
- id: 9e271e37-5f54-4994-84fc-2c2227c7080b
+ id: d443eeab-24c6-4f48-8fef-b2991c53fb7c
version: -1
name: Set 2 CEF alerts to context
description: Set 2 CEF alerts to context
@@ -576,7 +576,7 @@ tasks:
{
"position": {
"x": 265,
- "y": 3520
+ "y": 4745
}
}
note: false
@@ -588,10 +588,10 @@ tasks:
isautoswitchedtoquietmode: false
"14":
id: "14"
- taskid: 89a83b9d-88cf-4af0-8a00-494ace221802
+ taskid: 6b783bde-8fbf-40d8-8e5c-b8139fc2565f
type: regular
task:
- id: 89a83b9d-88cf-4af0-8a00-494ace221802
+ id: 6b783bde-8fbf-40d8-8e5c-b8139fc2565f
version: -1
name: xdr-insert-cef-alerts
description: xdr-insert-cef-alerts
@@ -611,7 +611,7 @@ tasks:
{
"position": {
"x": 265,
- "y": 3695
+ "y": 4920
}
}
note: false
@@ -623,13 +623,16 @@ tasks:
isautoswitchedtoquietmode: false
"15":
id: "15"
- taskid: 60f68df9-f41d-4737-88ff-bfaa46cecad6
+ taskid: 3b287735-4f70-4c5b-8571-49cc1830613d
type: regular
task:
- id: 60f68df9-f41d-4737-88ff-bfaa46cecad6
+ id: 3b287735-4f70-4c5b-8571-49cc1830613d
version: -1
name: Get Endpoint aeec6a2cc92e46fab3b6f621722e9916
- description: Gets a list of endpoints, according to the passed filters. Filtering by multiple fields will be concatenated using AND condition (OR is not supported). Maximum result set size is 100. Offset is the zero-based number of endpoint from the start of the result set (start by counting from 0).
+ description: Gets a list of endpoints, according to the passed filters. Filtering
+ by multiple fields will be concatenated using AND condition (OR is not supported).
+ Maximum result set size is 100. Offset is the zero-based number of endpoint
+ from the start of the result set (start by counting from 0).
script: '|||xdr-get-endpoints'
type: regular
iscommand: true
@@ -646,7 +649,7 @@ tasks:
{
"position": {
"x": 265,
- "y": 3870
+ "y": 5095
}
}
note: false
@@ -658,10 +661,10 @@ tasks:
isautoswitchedtoquietmode: false
"16":
id: "16"
- taskid: 1a2593ca-ab0f-4f1a-8eff-b0cf90b2dd67
+ taskid: 23977136-2167-4018-8ffa-b0f46e91afc5
type: condition
task:
- id: 1a2593ca-ab0f-4f1a-8eff-b0cf90b2dd67
+ id: 23977136-2167-4018-8ffa-b0f46e91afc5
version: -1
name: To isolate ?
description: To isolate
@@ -690,7 +693,7 @@ tasks:
{
"position": {
"x": 265,
- "y": 4045
+ "y": 5270
}
}
note: false
@@ -702,10 +705,10 @@ tasks:
isautoswitchedtoquietmode: false
"17":
id: "17"
- taskid: 53f3e4f3-8ed8-4044-8e5b-a1258f2a919b
+ taskid: ccedb2be-e05d-42da-84d4-e4752e5da885
type: condition
task:
- id: 53f3e4f3-8ed8-4044-8e5b-a1258f2a919b
+ id: ccedb2be-e05d-42da-84d4-e4752e5da885
version: -1
name: To unisolate?
description: To unisolate
@@ -734,7 +737,7 @@ tasks:
{
"position": {
"x": 50,
- "y": 4220
+ "y": 5445
}
}
note: false
@@ -746,10 +749,10 @@ tasks:
isautoswitchedtoquietmode: false
"18":
id: "18"
- taskid: 01230f3a-584c-4678-8b23-2fa1be3b9ef6
+ taskid: 3699ee8d-54d5-40cf-88bb-4b2d6ad2bcf3
type: regular
task:
- id: 01230f3a-584c-4678-8b23-2fa1be3b9ef6
+ id: 3699ee8d-54d5-40cf-88bb-4b2d6ad2bcf3
version: -1
name: xdr-endpoint-unisolate
description: Reverses the isolation of an endpoint.
@@ -769,7 +772,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 4395
+ "y": 5620
}
}
note: false
@@ -781,10 +784,10 @@ tasks:
isautoswitchedtoquietmode: false
"19":
id: "19"
- taskid: 56f0fa6e-53cd-4787-8ce6-0bbe2bb12f61
+ taskid: fc2906cf-19ae-42e9-8d27-1dfe4fffdd53
type: regular
task:
- id: 56f0fa6e-53cd-4787-8ce6-0bbe2bb12f61
+ id: fc2906cf-19ae-42e9-8d27-1dfe4fffdd53
version: -1
name: xdr-endpoint-isolate
description: Isolates the specified endpoint.
@@ -804,7 +807,7 @@ tasks:
{
"position": {
"x": 592.5,
- "y": 4395
+ "y": 5620
}
}
note: false
@@ -816,10 +819,10 @@ tasks:
isautoswitchedtoquietmode: false
"20":
id: "20"
- taskid: 84196c73-6624-4c3e-8f75-d5d66967e06e
+ taskid: bf07f6d6-04c7-4082-86b6-8a942f0f9ece
type: regular
task:
- id: 84196c73-6624-4c3e-8f75-d5d66967e06e
+ id: bf07f6d6-04c7-4082-86b6-8a942f0f9ece
version: -1
name: xdr-get-endpoints
description: xdr-get-endpoints
@@ -839,7 +842,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 4715
+ "y": 5940
}
}
note: false
@@ -851,10 +854,10 @@ tasks:
isautoswitchedtoquietmode: false
"21":
id: "21"
- taskid: 3725a85d-77c7-4ccd-8b0a-44a74297631b
+ taskid: d568f86e-2813-48a4-8398-d91e0a434065
type: condition
task:
- id: 3725a85d-77c7-4ccd-8b0a-44a74297631b
+ id: d568f86e-2813-48a4-8398-d91e0a434065
version: -1
name: Verify Outputs
description: Verify Outputs
@@ -891,7 +894,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 4890
+ "y": 6115
}
}
note: false
@@ -903,10 +906,10 @@ tasks:
isautoswitchedtoquietmode: false
"22":
id: "22"
- taskid: f30d0a24-1dba-40f4-865f-711d9ccb8aa1
+ taskid: 870959ce-0a58-414d-8b36-124d9b43c77d
type: title
task:
- id: f30d0a24-1dba-40f4-865f-711d9ccb8aa1
+ id: 870959ce-0a58-414d-8b36-124d9b43c77d
version: -1
name: Isolation done
description: Isolation done
@@ -922,7 +925,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 4570
+ "y": 5795
}
}
note: false
@@ -934,23 +937,22 @@ tasks:
isautoswitchedtoquietmode: false
"24":
id: "24"
- taskid: ea905c62-4b2a-4709-8c3c-51ad06725585
+ taskid: 6e92a149-0f85-4769-874f-593380ee4f64
type: title
task:
- id: ea905c62-4b2a-4709-8c3c-51ad06725585
+ id: 6e92a149-0f85-4769-874f-593380ee4f64
version: -1
name: Success!
type: title
iscommand: false
brand: ""
- description: ''
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
"x": 162.5,
- "y": 12240
+ "y": 13465
}
}
note: false
@@ -962,10 +964,10 @@ tasks:
isautoswitchedtoquietmode: false
"30":
id: "30"
- taskid: fa09a9de-c11e-40a9-8a36-4157cc5041e5
+ taskid: f2af6e97-182c-4589-8ad9-6a476f745211
type: condition
task:
- id: fa09a9de-c11e-40a9-8a36-4157cc5041e5
+ id: f2af6e97-182c-4589-8ad9-6a476f745211
version: -1
name: Verify Outputs
description: Verify Outputs
@@ -989,7 +991,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 9090
+ "y": 10315
}
}
note: false
@@ -1001,10 +1003,10 @@ tasks:
isautoswitchedtoquietmode: false
"31":
id: "31"
- taskid: 7d9f28b4-2162-4a12-8364-c40d6ca14ff0
+ taskid: 642e2638-de55-4c61-8e6a-0a8bb99bd34f
type: regular
task:
- id: 7d9f28b4-2162-4a12-8364-c40d6ca14ff0
+ id: 642e2638-de55-4c61-8e6a-0a8bb99bd34f
version: -1
name: xdr-get-script-code
description: Gets the code of a specific script in the script library.
@@ -1028,7 +1030,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 8915
+ "y": 10140
}
}
note: false
@@ -1040,10 +1042,10 @@ tasks:
isautoswitchedtoquietmode: false
"32":
id: "32"
- taskid: c10af7a8-ffa1-4d96-840a-6ec3272fe035
+ taskid: b9bde45d-be6d-4fa4-86a1-48d5915ef35d
type: condition
task:
- id: c10af7a8-ffa1-4d96-840a-6ec3272fe035
+ id: b9bde45d-be6d-4fa4-86a1-48d5915ef35d
version: -1
name: Verify Outputs
description: Verify Outputs
@@ -1067,7 +1069,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 8740
+ "y": 9965
}
}
note: false
@@ -1079,10 +1081,10 @@ tasks:
isautoswitchedtoquietmode: false
"33":
id: "33"
- taskid: e5554fc2-56ed-4cee-8861-9cfc570b7ac9
+ taskid: d5897c53-fd2f-4556-8f3a-9b4a6e24db74
type: regular
task:
- id: e5554fc2-56ed-4cee-8861-9cfc570b7ac9
+ id: d5897c53-fd2f-4556-8f3a-9b4a6e24db74
version: -1
name: xdr-get-script-metadata
description: Gets the full definition of a specific script in the scripts library.
@@ -1106,7 +1108,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 8565
+ "y": 9790
}
}
note: false
@@ -1118,10 +1120,10 @@ tasks:
isautoswitchedtoquietmode: false
"34":
id: "34"
- taskid: 8766fc54-11c0-499a-8915-3735ad3d32ac
+ taskid: 1bccedaf-bd37-4552-86a3-1a45a962c4d5
type: condition
task:
- id: 8766fc54-11c0-499a-8915-3735ad3d32ac
+ id: 1bccedaf-bd37-4552-86a3-1a45a962c4d5
version: -1
name: Verify Outputs
description: Verify Outputs
@@ -1149,7 +1151,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 8390
+ "y": 9615
}
}
note: false
@@ -1161,10 +1163,10 @@ tasks:
isautoswitchedtoquietmode: false
"35":
id: "35"
- taskid: 4ead1dd5-1b24-472a-8e88-65bf63fc9d49
+ taskid: 1aad15f5-7175-4676-8ac7-55dce7333298
type: regular
task:
- id: 4ead1dd5-1b24-472a-8e88-65bf63fc9d49
+ id: 1aad15f5-7175-4676-8ac7-55dce7333298
version: -1
name: xdr-get-scripts
description: Gets a list of scripts available in the scripts library.
@@ -1181,7 +1183,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 8215
+ "y": 9440
}
}
note: false
@@ -1193,10 +1195,10 @@ tasks:
isautoswitchedtoquietmode: false
"36":
id: "36"
- taskid: 712cfcdc-8941-43ca-84e8-abbfbbf6f908
+ taskid: 17d2a985-6762-46f2-8dd3-572625549c8d
type: condition
task:
- id: 712cfcdc-8941-43ca-84e8-abbfbbf6f908
+ id: 17d2a985-6762-46f2-8dd3-572625549c8d
version: -1
name: Verify Outputs
description: Verify Outputs
@@ -1220,7 +1222,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 8040
+ "y": 9265
}
}
note: false
@@ -1232,13 +1234,14 @@ tasks:
isautoswitchedtoquietmode: false
"37":
id: "37"
- taskid: b8eac6df-bd74-46f8-8932-1da6d69ba27f
+ taskid: 4fe38232-6e2c-4444-8ea0-56be9592fc28
type: regular
task:
- id: b8eac6df-bd74-46f8-8932-1da6d69ba27f
+ id: 4fe38232-6e2c-4444-8ea0-56be9592fc28
version: -1
name: xdr-get-endpoint-device-control-violations
- description: Gets a list of device control violations filtered by selected fields. You can retrieve up to 100 violations.
+ description: Gets a list of device control violations filtered by selected fields.
+ You can retrieve up to 100 violations.
script: '|||xdr-get-endpoint-device-control-violations'
type: regular
iscommand: true
@@ -1252,7 +1255,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 7865
+ "y": 9090
}
}
note: false
@@ -1264,10 +1267,10 @@ tasks:
isautoswitchedtoquietmode: false
"38":
id: "38"
- taskid: 867ec5eb-bba0-400b-8a1d-9b245f59366a
+ taskid: d8dc6348-fd6a-45f6-8b1f-7901bccaf3d1
type: condition
task:
- id: 867ec5eb-bba0-400b-8a1d-9b245f59366a
+ id: d8dc6348-fd6a-45f6-8b1f-7901bccaf3d1
version: -1
name: Verify Outputs
description: Verify Outputs
@@ -1291,7 +1294,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 7690
+ "y": 8915
}
}
note: false
@@ -1303,10 +1306,10 @@ tasks:
isautoswitchedtoquietmode: false
"39":
id: "39"
- taskid: 570e471c-34a2-4acc-85ec-f69fa9e24c08
+ taskid: baead857-3397-45e8-8805-ee7685bf7e96
type: regular
task:
- id: 570e471c-34a2-4acc-85ec-f69fa9e24c08
+ id: baead857-3397-45e8-8805-ee7685bf7e96
version: -1
name: xdr-get-policy
description: Gets the policy name for a specific endpoint.
@@ -1326,7 +1329,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 7515
+ "y": 8740
}
}
note: false
@@ -1338,10 +1341,10 @@ tasks:
isautoswitchedtoquietmode: false
"41":
id: "41"
- taskid: e93e57c0-9568-4f24-89ac-7048d123faa7
+ taskid: f4613475-beda-4486-8d6a-474d9d8fdd29
type: condition
task:
- id: e93e57c0-9568-4f24-89ac-7048d123faa7
+ id: f4613475-beda-4486-8d6a-474d9d8fdd29
version: -1
name: Check the number of endpoints in context
description: Check the number of endpoints in context
@@ -1372,7 +1375,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 7340
+ "y": 8565
}
}
note: false
@@ -1384,13 +1387,17 @@ tasks:
isautoswitchedtoquietmode: false
"42":
id: "42"
- taskid: 0c3149cb-1676-4dff-803f-43aeef74f1ff
+ taskid: d88f8f39-f06b-47d5-8a58-f73565b0ebe0
type: regular
task:
- id: 0c3149cb-1676-4dff-803f-43aeef74f1ff
+ id: d88f8f39-f06b-47d5-8a58-f73565b0ebe0
version: -1
name: xdr-get-endpoints
- description: Gets a list of endpoints, according to the passed filters. If there are no filters, all endpoints are returned. Filtering by multiple fields will be concatenated using AND condition (OR is not supported). Maximum result set size is 100. Offset is the zero-based number of endpoint from the start of the result set (start by counting from 0).
+ description: Gets a list of endpoints, according to the passed filters. If there
+ are no filters, all endpoints are returned. Filtering by multiple fields will
+ be concatenated using AND condition (OR is not supported). Maximum result
+ set size is 100. Offset is the zero-based number of endpoint from the start
+ of the result set (start by counting from 0).
script: '|||xdr-get-endpoints'
type: regular
iscommand: true
@@ -1404,7 +1411,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 7165
+ "y": 8390
}
}
note: false
@@ -1416,10 +1423,10 @@ tasks:
isautoswitchedtoquietmode: false
"43":
id: "43"
- taskid: 6d94f0c2-bf2a-4537-8e31-c7c763724b30
+ taskid: 4d9a6288-6da6-45be-854c-5749303309c6
type: condition
task:
- id: 6d94f0c2-bf2a-4537-8e31-c7c763724b30
+ id: 4d9a6288-6da6-45be-854c-5749303309c6
version: -1
name: Verify Outputs
description: Verify Outputs
@@ -1453,7 +1460,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 6990
+ "y": 8215
}
}
note: false
@@ -1465,10 +1472,10 @@ tasks:
isautoswitchedtoquietmode: false
"44":
id: "44"
- taskid: 6befa70f-59de-4115-8228-e0c211cdc2db
+ taskid: 3a555b81-ef0a-401e-805d-905a8076a5ee
type: regular
task:
- id: 6befa70f-59de-4115-8228-e0c211cdc2db
+ id: 3a555b81-ef0a-401e-805d-905a8076a5ee
version: -1
name: xdr-get-audit-agent-reports
description: xdr-get-audit-agent-reports
@@ -1498,7 +1505,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 6815
+ "y": 8040
}
}
note: false
@@ -1510,10 +1517,10 @@ tasks:
isautoswitchedtoquietmode: false
"45":
id: "45"
- taskid: 37e494cc-9fa5-4be0-80f8-510b45346781
+ taskid: e3bf8e0c-9fdd-47b9-8a74-d87f39d0b92d
type: condition
task:
- id: 37e494cc-9fa5-4be0-80f8-510b45346781
+ id: e3bf8e0c-9fdd-47b9-8a74-d87f39d0b92d
version: -1
name: Verify Outputs
description: Verify Outputs
@@ -1545,7 +1552,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 6640
+ "y": 7865
}
}
note: false
@@ -1557,10 +1564,10 @@ tasks:
isautoswitchedtoquietmode: false
"46":
id: "46"
- taskid: c86ed33e-3dc1-4f5d-83bf-4502171ff024
+ taskid: 326020c9-142a-4c74-8785-cabe41c8a9ff
type: regular
task:
- id: c86ed33e-3dc1-4f5d-83bf-4502171ff024
+ id: 326020c9-142a-4c74-8785-cabe41c8a9ff
version: -1
name: xdr-get-audit-management-logs
description: xdr-get-audit-management-logs
@@ -1588,7 +1595,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 6465
+ "y": 7690
}
}
note: false
@@ -1600,10 +1607,10 @@ tasks:
isautoswitchedtoquietmode: false
"47":
id: "47"
- taskid: bb077d2d-ce22-43f6-82eb-66c880f3fcba
+ taskid: bf00e823-710c-436f-8a68-1fce0e194758
type: condition
task:
- id: bb077d2d-ce22-43f6-82eb-66c880f3fcba
+ id: bf00e823-710c-436f-8a68-1fce0e194758
version: -1
name: Verify Outputs
description: Verify Outputs
@@ -1627,7 +1634,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 6290
+ "y": 7515
}
}
note: false
@@ -1639,10 +1646,10 @@ tasks:
isautoswitchedtoquietmode: false
"48":
id: "48"
- taskid: 95b2b5f0-4433-4f90-8efc-8c80cbcfd4f9
+ taskid: 9e77874f-cc77-4e31-8896-804c7b2389fd
type: regular
task:
- id: 95b2b5f0-4433-4f90-8efc-8c80cbcfd4f9
+ id: 9e77874f-cc77-4e31-8896-804c7b2389fd
version: -1
name: xdr-get-distribution-url
description: xdr-get-distribution-url
@@ -1664,7 +1671,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 6115
+ "y": 7340
}
}
note: false
@@ -1676,10 +1683,10 @@ tasks:
isautoswitchedtoquietmode: false
"49":
id: "49"
- taskid: 65261983-1e66-4d93-8c2b-20b041c62d85
+ taskid: 6404abee-7f26-41fd-861a-1391da20325a
type: condition
task:
- id: 65261983-1e66-4d93-8c2b-20b041c62d85
+ id: 6404abee-7f26-41fd-861a-1391da20325a
version: -1
name: Verify Outputs
description: Verify Outputs
@@ -1714,7 +1721,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 5940
+ "y": 7165
}
}
note: false
@@ -1726,10 +1733,10 @@ tasks:
isautoswitchedtoquietmode: false
"50":
id: "50"
- taskid: 0dd9537d-1f0d-47ab-8a81-8e3e79b76135
+ taskid: 141c24c3-6bc7-4ff0-8b7a-60c513efa489
type: regular
task:
- id: 0dd9537d-1f0d-47ab-8a81-8e3e79b76135
+ id: 141c24c3-6bc7-4ff0-8b7a-60c513efa489
version: -1
name: xdr-get-create-distribution-status
description: xdr-get-create-distribution-status
@@ -1749,7 +1756,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 5765
+ "y": 6990
}
}
note: false
@@ -1761,10 +1768,10 @@ tasks:
isautoswitchedtoquietmode: false
"51":
id: "51"
- taskid: 8c311634-3aeb-4410-8557-abdd47311e9c
+ taskid: 968c522a-9b69-4e7f-8cda-db725d0b9b33
type: condition
task:
- id: 8c311634-3aeb-4410-8557-abdd47311e9c
+ id: 968c522a-9b69-4e7f-8cda-db725d0b9b33
version: -1
name: Verify Outputs
description: Verify Outputs
@@ -1793,7 +1800,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 5590
+ "y": 6815
}
}
note: false
@@ -1805,10 +1812,10 @@ tasks:
isautoswitchedtoquietmode: false
"52":
id: "52"
- taskid: 5348d7a2-d351-4ceb-8fd0-c902ba568c43
+ taskid: 77733b73-69a2-41d6-8820-4fa6c7d7f1ff
type: regular
task:
- id: 5348d7a2-d351-4ceb-8fd0-c902ba568c43
+ id: 77733b73-69a2-41d6-8820-4fa6c7d7f1ff
version: -1
name: xdr-create-distribution
description: xdr-create-distribution
@@ -1840,7 +1847,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 5415
+ "y": 6640
}
}
note: false
@@ -1852,10 +1859,10 @@ tasks:
isautoswitchedtoquietmode: false
"53":
id: "53"
- taskid: cb4bef5d-63b1-4eb2-80c2-16921a448e76
+ taskid: 1d017770-3feb-4ebc-8580-0b8f6a86cc1d
type: condition
task:
- id: cb4bef5d-63b1-4eb2-80c2-16921a448e76
+ id: 1d017770-3feb-4ebc-8580-0b8f6a86cc1d
version: -1
name: Verify Outputs
description: Verify Outputs
@@ -1884,7 +1891,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 5240
+ "y": 6465
}
}
note: false
@@ -1896,10 +1903,10 @@ tasks:
isautoswitchedtoquietmode: false
"54":
id: "54"
- taskid: c0313e74-2be3-4c32-8dfe-1b4a35c1eae1
+ taskid: 29a4690e-e984-4a8d-825e-befa52744113
type: regular
task:
- id: c0313e74-2be3-4c32-8dfe-1b4a35c1eae1
+ id: 29a4690e-e984-4a8d-825e-befa52744113
version: -1
name: xdr-get-distribution-versions
description: xdr-get-distribution-versions
@@ -1916,7 +1923,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 5065
+ "y": 6290
}
}
note: false
@@ -1928,10 +1935,10 @@ tasks:
isautoswitchedtoquietmode: false
"55":
id: "55"
- taskid: 3faa8121-3caf-479d-8bf7-8f995bafcd4f
+ taskid: f9734f16-c236-43dd-8f58-fa64409095e6
type: regular
task:
- id: 3faa8121-3caf-479d-8bf7-8f995bafcd4f
+ id: f9734f16-c236-43dd-8f58-fa64409095e6
version: -1
name: xdr-get-contributing-event
description: Retrieves contributing events for a specific alert.
@@ -1967,10 +1974,10 @@ tasks:
isautoswitchedtoquietmode: false
"56":
id: "56"
- taskid: 056fac58-c561-4350-8b51-b97e8ec26b4e
+ taskid: 8f09ec9a-589c-4043-8772-c791ff01136b
type: condition
task:
- id: 056fac58-c561-4350-8b51-b97e8ec26b4e
+ id: 8f09ec9a-589c-4043-8772-c791ff01136b
version: -1
name: Verify Outputs
type: condition
@@ -2014,13 +2021,14 @@ tasks:
isautoswitchedtoquietmode: false
"57":
id: "57"
- taskid: 130a85df-ba4c-4ffc-81c6-96e195b060c8
+ taskid: 3eaa72ae-4de8-458a-8276-9a4002305439
type: regular
task:
- id: 130a85df-ba4c-4ffc-81c6-96e195b060c8
+ id: 3eaa72ae-4de8-458a-8276-9a4002305439
version: -1
name: xdr-replace-featured-field
- description: Replace the featured hosts\users\ips\active-directory_groups listed in your environment.
+ description: Replace the featured hosts\users\ips\active-directory_groups listed
+ in your environment.
script: '|||xdr-replace-featured-field'
type: regular
iscommand: true
@@ -2053,10 +2061,10 @@ tasks:
isautoswitchedtoquietmode: false
"58":
id: "58"
- taskid: 1abd739a-5124-482d-8eec-b516420223ac
+ taskid: 5619520f-b792-471c-8489-297ba0ad731c
type: condition
task:
- id: 1abd739a-5124-482d-8eec-b516420223ac
+ id: 5619520f-b792-471c-8489-297ba0ad731c
version: -1
name: Verify Outputs
type: condition
@@ -2106,10 +2114,10 @@ tasks:
isautoswitchedtoquietmode: false
"59":
id: "59"
- taskid: e857b035-996c-458a-8a02-58c72d8f2d46
+ taskid: 733be7d7-2b56-4fc8-8537-e78594161e4c
type: regular
task:
- id: e857b035-996c-458a-8a02-58c72d8f2d46
+ id: 733be7d7-2b56-4fc8-8537-e78594161e4c
version: -1
name: DeleteContext
description: DeleteContext
@@ -2141,13 +2149,17 @@ tasks:
isautoswitchedtoquietmode: false
"60":
id: "60"
- taskid: e3d7421b-ed0b-4673-8f19-62655a30fd90
+ taskid: 799f44a9-2a02-49a5-87d6-f82780418b1d
type: regular
task:
- id: e3d7421b-ed0b-4673-8f19-62655a30fd90
+ id: 799f44a9-2a02-49a5-87d6-f82780418b1d
version: -1
name: Get endpoint
- description: Gets a list of endpoints, according to the passed filters. If there are no filters, all endpoints are returned. Filtering by multiple fields will be concatenated using AND condition (OR is not supported). Maximum result set size is 100. Offset is the zero-based number of endpoint from the start of the result set (start by counting from 0).
+ description: Gets a list of endpoints, according to the passed filters. If there
+ are no filters, all endpoints are returned. Filtering by multiple fields will
+ be concatenated using AND condition (OR is not supported). Maximum result
+ set size is 100. Offset is the zero-based number of endpoint from the start
+ of the result set (start by counting from 0).
script: '|||xdr-get-endpoints'
type: regular
iscommand: true
@@ -2164,7 +2176,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 9440
+ "y": 10665
}
}
note: false
@@ -2176,10 +2188,10 @@ tasks:
isautoswitchedtoquietmode: false
"61":
id: "61"
- taskid: 8b23f7d1-60ab-4cb2-8117-83d83c0ef9c6
+ taskid: 7d0e3ccd-5b57-48eb-8809-2052f2a3d21f
type: regular
task:
- id: 8b23f7d1-60ab-4cb2-8117-83d83c0ef9c6
+ id: 7d0e3ccd-5b57-48eb-8809-2052f2a3d21f
version: -1
name: Save original alias name
description: Set a value in context under the key you entered.
@@ -2212,7 +2224,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 9615
+ "y": 10840
}
}
note: false
@@ -2224,10 +2236,10 @@ tasks:
isautoswitchedtoquietmode: false
"62":
id: "62"
- taskid: bbf3a8f6-a015-46a8-8047-f86e869f7f34
+ taskid: 60e3aee4-85f7-4a81-806f-b758c90eaad7
type: regular
task:
- id: bbf3a8f6-a015-46a8-8047-f86e869f7f34
+ id: 60e3aee4-85f7-4a81-806f-b758c90eaad7
version: -1
name: Delete context
description: |-
@@ -2252,7 +2264,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 9265
+ "y": 10490
}
}
note: false
@@ -2264,13 +2276,15 @@ tasks:
isautoswitchedtoquietmode: false
"63":
id: "63"
- taskid: 678fff69-0790-494a-8fbc-719a342197b4
+ taskid: e14234aa-ad15-4252-8bb7-8ba29881c9ef
type: regular
task:
- id: 678fff69-0790-494a-8fbc-719a342197b4
+ id: e14234aa-ad15-4252-8bb7-8ba29881c9ef
version: -1
name: Change alias name
- description: Gets a list of endpoints according to the passed filters, and changes their alias name. Filtering by multiple fields will be concatenated using the AND condition (OR is not supported).
+ description: Gets a list of endpoints according to the passed filters, and changes
+ their alias name. Filtering by multiple fields will be concatenated using
+ the AND condition (OR is not supported).
script: '|||xdr-endpoint-alias-change'
type: regular
iscommand: true
@@ -2289,7 +2303,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 9790
+ "y": 11015
}
}
note: false
@@ -2301,13 +2315,17 @@ tasks:
isautoswitchedtoquietmode: false
"64":
id: "64"
- taskid: 7eebb593-574d-4a26-8f16-705a906bac52
+ taskid: 8e94876f-cac9-4c36-89d6-dab7ac6f0649
type: condition
task:
- id: 7eebb593-574d-4a26-8f16-705a906bac52
+ id: 8e94876f-cac9-4c36-89d6-dab7ac6f0649
version: -1
name: Verify the name was changed
- description: Gets a list of endpoints, according to the passed filters. If there are no filters, all endpoints are returned. Filtering by multiple fields will be concatenated using AND condition (OR is not supported). Maximum result set size is 100. Offset is the zero-based number of endpoint from the start of the result set (start by counting from 0).
+ description: Gets a list of endpoints, according to the passed filters. If there
+ are no filters, all endpoints are returned. Filtering by multiple fields will
+ be concatenated using AND condition (OR is not supported). Maximum result
+ set size is 100. Offset is the zero-based number of endpoint from the start
+ of the result set (start by counting from 0).
type: condition
iscommand: false
brand: ""
@@ -2342,7 +2360,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 10140
+ "y": 11365
}
}
note: false
@@ -2354,13 +2372,17 @@ tasks:
isautoswitchedtoquietmode: false
"65":
id: "65"
- taskid: a5df2c98-bc32-472b-88cc-8104223ebe4e
+ taskid: ecbf63df-2b51-420d-8054-41fec8323dc9
type: regular
task:
- id: a5df2c98-bc32-472b-88cc-8104223ebe4e
+ id: ecbf63df-2b51-420d-8054-41fec8323dc9
version: -1
name: Get endpoint
- description: Gets a list of endpoints, according to the passed filters. If there are no filters, all endpoints are returned. Filtering by multiple fields will be concatenated using AND condition (OR is not supported). Maximum result set size is 100. Offset is the zero-based number of endpoint from the start of the result set (start by counting from 0).
+ description: Gets a list of endpoints, according to the passed filters. If there
+ are no filters, all endpoints are returned. Filtering by multiple fields will
+ be concatenated using AND condition (OR is not supported). Maximum result
+ set size is 100. Offset is the zero-based number of endpoint from the start
+ of the result set (start by counting from 0).
script: '|||xdr-get-endpoints'
type: regular
iscommand: true
@@ -2377,7 +2399,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 9965
+ "y": 11190
}
}
note: false
@@ -2389,13 +2411,15 @@ tasks:
isautoswitchedtoquietmode: false
"66":
id: "66"
- taskid: b8f94113-d51e-47b0-8d10-f562e7918c98
+ taskid: 052eb07d-745c-46d7-8f6f-2d092132d8d6
type: regular
task:
- id: b8f94113-d51e-47b0-8d10-f562e7918c98
+ id: 052eb07d-745c-46d7-8f6f-2d092132d8d6
version: -1
name: Change alias name to the original name
- description: Gets a list of endpoints according to the passed filters, and changes their alias name. Filtering by multiple fields will be concatenated using the AND condition (OR is not supported).
+ description: Gets a list of endpoints according to the passed filters, and changes
+ their alias name. Filtering by multiple fields will be concatenated using
+ the AND condition (OR is not supported).
script: '|||xdr-endpoint-alias-change'
type: regular
iscommand: true
@@ -2417,7 +2441,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 10315
+ "y": 11540
}
}
note: false
@@ -2429,10 +2453,10 @@ tasks:
isautoswitchedtoquietmode: false
"67":
id: "67"
- taskid: aa0ce78b-0d02-4390-8f86-c9366f5f37da
+ taskid: a1e5073e-9476-489a-8050-b89e86410450
type: regular
task:
- id: aa0ce78b-0d02-4390-8f86-c9366f5f37da
+ id: a1e5073e-9476-489a-8050-b89e86410450
version: -1
name: xdr-list-users
description: Retrieve a list of the current users in your environment.
@@ -2449,7 +2473,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 10490
+ "y": 11715
}
}
note: false
@@ -2461,10 +2485,10 @@ tasks:
isautoswitchedtoquietmode: false
"68":
id: "68"
- taskid: ae82db0b-64da-4288-8fd0-d840f5bf0a60
+ taskid: d5736e76-6d47-46c5-83a1-7b6887ea7a72
type: condition
task:
- id: ae82db0b-64da-4288-8fd0-d840f5bf0a60
+ id: d5736e76-6d47-46c5-83a1-7b6887ea7a72
version: -1
name: Verify Outputs
type: condition
@@ -2490,7 +2514,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 10665
+ "y": 11890
}
}
note: false
@@ -2502,13 +2526,15 @@ tasks:
isautoswitchedtoquietmode: false
"72":
id: "72"
- taskid: 93864493-d431-429d-83b4-52a71842849f
+ taskid: 53da4e85-c85a-40e5-8fa3-f0a8bc277f4d
type: regular
task:
- id: 93864493-d431-429d-83b4-52a71842849f
+ id: 53da4e85-c85a-40e5-8fa3-f0a8bc277f4d
version: -1
name: xdr-list-risky-users
- description: Retrieve the risk score of a specific user or list of users with the highest risk score in your environment along with the reason affecting each score.
+ description: Retrieve the risk score of a specific user or list of users with
+ the highest risk score in your environment along with the reason affecting
+ each score.
script: '|||xdr-list-risky-users'
type: regular
iscommand: true
@@ -2525,7 +2551,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 10840
+ "y": 12065
}
}
note: false
@@ -2537,13 +2563,15 @@ tasks:
isautoswitchedtoquietmode: false
"76":
id: "76"
- taskid: 8ca0b1ad-b5cf-43d2-8ad6-a1fa9c0ed0bc
+ taskid: d98bf87e-4928-48fc-8318-728651c6ffd8
type: regular
task:
- id: 8ca0b1ad-b5cf-43d2-8ad6-a1fa9c0ed0bc
+ id: d98bf87e-4928-48fc-8318-728651c6ffd8
version: -1
name: xdr-list-risky-hosts
- description: Retrieve the risk score of a specific host or list of hosts with the highest risk score in your environment along with the reason affecting each score.
+ description: Retrieve the risk score of a specific host or list of hosts with
+ the highest risk score in your environment along with the reason affecting
+ each score.
script: '|||xdr-list-risky-hosts'
type: regular
iscommand: true
@@ -2560,7 +2588,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 11365
+ "y": 12590
}
}
note: false
@@ -2572,20 +2600,20 @@ tasks:
isautoswitchedtoquietmode: false
"77":
id: "77"
- taskid: 69b7e5f8-1d59-4c23-8fe3-a30f16a2142a
+ taskid: f8c2b5d5-ef23-4274-8329-68b7e2a32996
type: condition
task:
- id: 69b7e5f8-1d59-4c23-8fe3-a30f16a2142a
+ id: f8c2b5d5-ef23-4274-8329-68b7e2a32996
version: -1
name: Assert wasnt returned
type: condition
iscommand: false
brand: ""
nexttasks:
- "yes":
- - "78"
'#default#':
- "85"
+ "yes":
+ - "78"
separatecontext: false
conditions:
- label: "yes"
@@ -2600,7 +2628,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 11540
+ "y": 12765
}
}
note: false
@@ -2612,10 +2640,10 @@ tasks:
isautoswitchedtoquietmode: false
"78":
id: "78"
- taskid: 6b64bb47-7185-4c09-8af7-90cb4311e9ad
+ taskid: d25fdfdc-2dd0-4010-8664-ddbd05ad0583
type: regular
task:
- id: 6b64bb47-7185-4c09-8af7-90cb4311e9ad
+ id: d25fdfdc-2dd0-4010-8664-ddbd05ad0583
version: -1
name: xdr-list-roles
description: Retrieve information about one or more roles created in your environment.
@@ -2635,7 +2663,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 11890
+ "y": 13115
}
}
note: false
@@ -2647,10 +2675,10 @@ tasks:
isautoswitchedtoquietmode: false
"79":
id: "79"
- taskid: 21902920-dd58-4134-82f8-ac72847e8e6a
+ taskid: 335c4c51-a8e4-4da7-8955-89f80ae97c36
type: condition
task:
- id: 21902920-dd58-4134-82f8-ac72847e8e6a
+ id: 335c4c51-a8e4-4da7-8955-89f80ae97c36
version: -1
name: Verify Outputs
type: condition
@@ -2678,7 +2706,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 12065
+ "y": 13290
}
}
note: false
@@ -2690,13 +2718,17 @@ tasks:
isautoswitchedtoquietmode: false
"81":
id: "81"
- taskid: 5938152b-19e5-4317-87fe-da2babccee32
+ taskid: 45033af4-c92f-4f1d-861b-84280040abf5
type: regular
task:
- id: 5938152b-19e5-4317-87fe-da2babccee32
+ id: 45033af4-c92f-4f1d-861b-84280040abf5
version: -1
name: get-correlation-alerts
- description: "Returns a list of alerts and their metadata, which you can filter by built-in arguments or use the custom_filter to input a JSON filter object. \nMultiple filter arguments will be concatenated using the AND operator, while arguments that support a comma-separated list of values will use an OR operator between each value."
+ description: "Returns a list of alerts and their metadata, which you can filter
+ by built-in arguments or use the custom_filter to input a JSON filter object.
+ \nMultiple filter arguments will be concatenated using the AND operator, while
+ arguments that support a comma-separated list of values will use an OR operator
+ between each value."
script: '|||xdr-get-alerts'
type: regular
iscommand: true
@@ -2725,10 +2757,10 @@ tasks:
isautoswitchedtoquietmode: false
"82":
id: "82"
- taskid: cb2f555f-4925-43cd-8608-e27a56d43b11
+ taskid: 9045a7cf-4ea2-4457-828b-a4358345f84a
type: condition
task:
- id: cb2f555f-4925-43cd-8608-e27a56d43b11
+ id: 9045a7cf-4ea2-4457-828b-a4358345f84a
version: -1
name: check if have any correlation alerts
type: condition
@@ -2765,10 +2797,10 @@ tasks:
isautoswitchedtoquietmode: false
"83":
id: "83"
- taskid: c77d991d-2e2e-4718-800f-2481b9307ffd
+ taskid: a6e92f85-9aa3-4126-8d28-1210ba409ad0
type: condition
task:
- id: c77d991d-2e2e-4718-800f-2481b9307ffd
+ id: a6e92f85-9aa3-4126-8d28-1210ba409ad0
version: -1
name: Verify outputs if exists
type: condition
@@ -2803,7 +2835,7 @@ tasks:
{
"position": {
"x": 162.5,
- "y": 11015
+ "y": 12240
}
}
note: false
@@ -2815,10 +2847,10 @@ tasks:
isautoswitchedtoquietmode: false
"84":
id: "84"
- taskid: 4c3919fa-c0a2-4b90-8db3-305930568e46
+ taskid: 9e005118-4256-4074-8987-aa49188c4c0b
type: condition
task:
- id: 4c3919fa-c0a2-4b90-8db3-305930568e46
+ id: 9e005118-4256-4074-8987-aa49188c4c0b
version: -1
name: Assert wasnt returned
type: condition
@@ -2843,7 +2875,7 @@ tasks:
{
"position": {
"x": 275,
- "y": 11190
+ "y": 12415
}
}
note: false
@@ -2855,10 +2887,10 @@ tasks:
isautoswitchedtoquietmode: false
"85":
id: "85"
- taskid: e2e56c98-7ee6-4bd9-8baf-f7446f9d09b6
+ taskid: b7e1558b-2d3f-4f7f-8a48-08d72579f963
type: condition
task:
- id: e2e56c98-7ee6-4bd9-8baf-f7446f9d09b6
+ id: b7e1558b-2d3f-4f7f-8a48-08d72579f963
version: -1
name: Assert schema
type: condition
@@ -2891,7 +2923,52 @@ tasks:
{
"position": {
"x": 275,
- "y": 11715
+ "y": 12940
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "87":
+ id: "87"
+ taskid: b596e625-5531-41d7-8b6c-cfffd87afc3b
+ type: regular
+ task:
+ id: b596e625-5531-41d7-8b6c-cfffd87afc3b
+ version: -1
+ name: xdr-update-incident-including-alerts
+ description: Updates one or more fields of a specified incident. Missing fields
+ will be ignored. To remove the assignment for an incident, pass a null value
+ in the assignee email argument.
+ script: '|||xdr-update-incident'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "90"
+ scriptarguments:
+ incident_id:
+ simple: ${XDRIncidentID}
+ manual_severity:
+ simple: LOW
+ resolve_alerts:
+ simple: "true"
+ resolve_comment:
+ simple: i am resolved
+ status:
+ simple: RESOLVED_DUPLICATE
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 265,
+ "y": 3170
}
}
note: false
@@ -2901,12 +2978,283 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ "88":
+ id: "88"
+ taskid: 55048f19-9e63-4341-8488-7c5582e203ea
+ type: regular
+ task:
+ id: 55048f19-9e63-4341-8488-7c5582e203ea
+ version: -1
+ name: xdr-get-incident-extra-data
+ description: Returns additional data for the specified incident, for example,
+ related alerts, file artifacts, network artifacts, and so on.
+ script: '|||xdr-get-incident-extra-data'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "89"
+ scriptarguments:
+ alerts_limit:
+ simple: "1"
+ incident_id:
+ complex:
+ root: PaloAltoNetworksXDR.Incident
+ accessor: incident_id
+ transformers:
+ - operator: FirstArrayElement
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 265,
+ "y": 3705
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "89":
+ id: "89"
+ taskid: 30065881-eaf6-4e58-8706-8e1085561b63
+ type: regular
+ task:
+ id: 30065881-eaf6-4e58-8706-8e1085561b63
+ version: -1
+ name: xdr-update-alert
+ description: |-
+ Update one or more alerts with the provided arguments.
+ Required license: Cortex XDR Prevent, Cortex XDR Pro per Endpoint, or Cortex XDR Pro per GB.
+ script: '|||xdr-update-alert'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "92"
+ scriptarguments:
+ alert_ids:
+ complex:
+ root: PaloAltoNetworksXDR.Incident.alerts
+ accessor: alert_id
+ transformers:
+ - operator: FirstArrayElement
+ comment:
+ simple: i was updated from playbook
+ severity:
+ simple: low
+ status:
+ simple: under_investigation
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 265,
+ "y": 3870
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "90":
+ id: "90"
+ taskid: f78a3ee5-0a55-4cf2-8114-58eefd194359
+ type: regular
+ task:
+ id: f78a3ee5-0a55-4cf2-8114-58eefd194359
+ version: -1
+ name: xdr-get-incident-extra-data
+ description: Returns additional data for the specified incident, for example,
+ related alerts, file artifacts, network artifacts, and so on.
+ script: '|||xdr-get-incident-extra-data'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "91"
+ scriptarguments:
+ incident_id:
+ simple: ${XDRIncidentID}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 265,
+ "y": 3345
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "91":
+ id: "91"
+ taskid: 4a51b72d-c627-4bfd-8be1-f1134b6f2a00
+ type: condition
+ task:
+ id: 4a51b72d-c627-4bfd-8be1-f1134b6f2a00
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "88"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: ${PaloAltoNetworksXDR.Incident.severity}
+ iscontext: true
+ right:
+ value:
+ simple: low
+ - - operator: isEqualString
+ left:
+ value:
+ simple: ${PaloAltoNetworksXDR.Incident.resolve_comment}
+ iscontext: true
+ right:
+ value:
+ simple: i am resolved
+ - - operator: isEqualString
+ left:
+ value:
+ simple: ${PaloAltoNetworksXDR.Incident.alerts.[0].resolution_status}
+ iscontext: true
+ right:
+ value:
+ simple: STATUS_050_RESOLVED_DUPLICATE
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 265,
+ "y": 3520
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "92":
+ id: "92"
+ taskid: b1583580-5673-4d1e-8e2a-6440e6d1dfa8
+ type: regular
+ task:
+ id: b1583580-5673-4d1e-8e2a-6440e6d1dfa8
+ version: -1
+ name: xdr-get-incident-extra-data
+ description: Returns additional data for the specified incident, for example,
+ related alerts, file artifacts, network artifacts, and so on.
+ script: '|||xdr-get-incident-extra-data'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "93"
+ scriptarguments:
+ incident_id:
+ complex:
+ root: PaloAltoNetworksXDR.Incident
+ accessor: incident_id
+ transformers:
+ - operator: FirstArrayElement
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 265,
+ "y": 4045
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "93":
+ id: "93"
+ taskid: 4f374a84-d948-40bc-8af5-e212d307ca40
+ type: condition
+ task:
+ id: 4f374a84-d948-40bc-8af5-e212d307ca40
+ version: -1
+ name: Verify Outputs
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "11"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ simple: ${PaloAltoNetworksXDR.Incident.alerts.[0].severity}
+ iscontext: true
+ right:
+ value:
+ simple: low
+ - - operator: isEqualString
+ left:
+ value:
+ simple: ${PaloAltoNetworksXDR.Incident.alerts.[0].resolution_comment}
+ iscontext: true
+ right:
+ value:
+ simple: i was updated from playbook
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 265,
+ "y": 4220
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 2
+ isoversize: false
+ isautoswitchedtoquietmode: false
view: |-
{
"linkLabelsPosition": {},
"paper": {
"dimensions": {
- "height": 12255,
+ "height": 13480,
"width": 922.5,
"x": 50,
"y": 50
@@ -2916,4 +3264,4 @@ view: |-
inputs: []
outputs: []
fromversion: 5.0.0
-description: ''
+description: ''
\ No newline at end of file
diff --git a/Packs/CortexXDR/doc_files/Cortex_XDR_IOCs_-_Push_new_IOCs_to_XDR.png b/Packs/CortexXDR/doc_files/Cortex_XDR_IOCs_-_Push_new_IOCs_to_XDR.png
index 1221f356b03b..722f8df8a9f6 100644
Binary files a/Packs/CortexXDR/doc_files/Cortex_XDR_IOCs_-_Push_new_IOCs_to_XDR.png and b/Packs/CortexXDR/doc_files/Cortex_XDR_IOCs_-_Push_new_IOCs_to_XDR.png differ
diff --git a/Packs/CortexXDR/doc_files/Cortex_XDR_IOCs_-_Push_new_IOCs_to_XDR_Main.png b/Packs/CortexXDR/doc_files/Cortex_XDR_IOCs_-_Push_new_IOCs_to_XDR_Main.png
index 9bd5f0565cc9..6387f255d142 100644
Binary files a/Packs/CortexXDR/doc_files/Cortex_XDR_IOCs_-_Push_new_IOCs_to_XDR_Main.png and b/Packs/CortexXDR/doc_files/Cortex_XDR_IOCs_-_Push_new_IOCs_to_XDR_Main.png differ
diff --git a/Packs/CortexXDR/pack_metadata.json b/Packs/CortexXDR/pack_metadata.json
index b64e16550a7d..d437a14ee015 100644
--- a/Packs/CortexXDR/pack_metadata.json
+++ b/Packs/CortexXDR/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cortex XDR by Palo Alto Networks",
"description": "Automates Cortex XDR incident response, and includes custom Cortex XDR incident views and layouts to aid analyst investigations.",
"support": "xsoar",
- "currentVersion": "6.1.36",
+ "currentVersion": "6.1.43",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CortexXpanse/.secrets-ignore b/Packs/CortexXpanse/.secrets-ignore
index edab5b1fb3b2..6781d45c158f 100644
--- a/Packs/CortexXpanse/.secrets-ignore
+++ b/Packs/CortexXpanse/.secrets-ignore
@@ -3,4 +3,7 @@ inferredCveMatchType
noc@acme.com
cs@acme.com
2600:1900:4000:9664:0:7
-00:11:22:33:44:55
\ No newline at end of file
+00:11:22:33:44:55
+192.168.1.1
+192.168.1.2
+https://test.com
\ No newline at end of file
diff --git a/Packs/CortexXpanse/Classifiers/classifier-Xpanse_-_Incoming_Mapper.json b/Packs/CortexXpanse/Classifiers/classifier-Xpanse_-_Incoming_Mapper.json
index f6026c304883..1cdeeba6f3ca 100644
--- a/Packs/CortexXpanse/Classifiers/classifier-Xpanse_-_Incoming_Mapper.json
+++ b/Packs/CortexXpanse/Classifiers/classifier-Xpanse_-_Incoming_Mapper.json
@@ -6,195 +6,198 @@
"Xpanse Alert": {
"dontMapEventToLabels": false,
"internalMapping": {
- "Xpanse Alert ID": {
- "complex": {
- "filters": [],
- "root": "alert_id",
- "transformers": []
- }
- },
- "Xpanse Asset IDs": {
- "complex": {
- "filters": [],
- "root": "asset_ids",
- "transformers": []
- }
- },
+ "Xpanse Alert ID": {
+ "complex": {
+ "filters": [],
+ "root": "alert_id",
+ "transformers": []
+ }
+ },
+ "Xpanse Asset IDs": {
+ "complex": {
+ "filters": [],
+ "root": "asset_ids",
+ "transformers": []
+ }
+ },
"Xpanse Business Units": {
- "complex": {
- "accessor": "name",
- "filters": [],
- "root": "business_unit_hierarchies.[].[]",
- "transformers": [
- {
- "args": {
- "separator": {
- "value": {
- "simple": ","
- }
- }
- },
- "operator": "join"
- }
- ]
- }
- },
- "Xpanse Country Code": {
- "complex": {
- "filters": [],
- "root": "country_codes",
- "transformers": [
- {
- "operator": "FirstArrayElement"
- }
- ]
- }
- },
- "Xpanse Description": {
- "complex": {
- "filters": [],
- "root": "description",
- "transformers": []
- }
- },
- "Xpanse External ID": {
- "complex": {
- "filters": [],
- "root": "external_id",
- "transformers": []
- }
- },
- "Xpanse Host Name": {
- "complex": {
- "filters": [],
- "root": "domain_names",
- "transformers": [
- {
- "operator": "FirstArrayElement"
- }
- ]
- }
- },
- "Xpanse IP": {
- "complex": {
- "filters": [],
- "root": "ipv4_addresses",
- "transformers": [
- {
- "operator": "FirstArrayElement"
- }
- ]
- }
- },
- "Xpanse Port": {
- "complex": {
- "filters": [],
- "root": "action_remote_port",
- "transformers": []
- }
- },
- "Xpanse Progress Status": {
- "complex": {
- "filters": [],
- "root": "resolution_status",
- "transformers": []
- }
- },
- "Xpanse Protocol": {
- "complex": {
- "filters": [],
- "root": "port_protocol",
- "transformers": []
- }
- },
- "Xpanse Provider": {
- "complex": {
- "filters": [],
- "root": "cloud_providers",
- "transformers": [
- {
- "operator": "FirstArrayElement"
- }
- ]
- }
- },
- "Xpanse Remediation Guidance": {
- "complex": {
- "filters": [],
- "root": "remediation_guidance",
- "transformers": []
- }
- },
- "Xpanse Service ID": {
- "complex": {
- "filters": [],
- "root": "service_ids",
- "transformers": [
- {
- "operator": "FirstArrayElement"
- }
- ]
- }
- },
- "Xpanse Tags": {
- "complex": {
- "filters": [],
- "root": "tags",
- "transformers": [
- {
- "args": {
- "applyIfEmpty": {
- "isContext": true
- },
- "defaultValue": {
- "isContext": true,
- "value": {
- "simple": "original_tags"
- }
- }
- },
- "operator": "SetIfEmpty"
- }
- ]
- }
- },
+ "complex": {
+ "accessor": "name",
+ "filters": [],
+ "root": "business_unit_hierarchies.[].[]",
+ "transformers": [
+ {
+ "args": {
+ "separator": {
+ "value": {
+ "simple": ","
+ }
+ }
+ },
+ "operator": "join"
+ }
+ ]
+ }
+ },
+ "Xpanse Country Code": {
+ "complex": {
+ "filters": [],
+ "root": "country_codes",
+ "transformers": [
+ {
+ "operator": "FirstArrayElement"
+ }
+ ]
+ }
+ },
+ "Xpanse Description": {
+ "complex": {
+ "filters": [],
+ "root": "description",
+ "transformers": []
+ }
+ },
+ "Xpanse External ID": {
+ "complex": {
+ "filters": [],
+ "root": "external_id",
+ "transformers": []
+ }
+ },
+ "Xpanse Host Name": {
+ "complex": {
+ "filters": [],
+ "root": "domain_names",
+ "transformers": [
+ {
+ "operator": "FirstArrayElement"
+ }
+ ]
+ }
+ },
+ "Xpanse IP": {
+ "complex": {
+ "filters": [],
+ "root": "ipv4_addresses",
+ "transformers": [
+ {
+ "operator": "FirstArrayElement"
+ }
+ ]
+ }
+ },
+ "Xpanse Port": {
+ "complex": {
+ "filters": [],
+ "root": "action_remote_port",
+ "transformers": []
+ }
+ },
+ "Xpanse Progress Status": {
+ "complex": {
+ "filters": [],
+ "root": "resolution_status",
+ "transformers": []
+ }
+ },
+ "Xpanse Protocol": {
+ "complex": {
+ "filters": [],
+ "root": "port_protocol",
+ "transformers": []
+ }
+ },
+ "Xpanse Provider": {
+ "complex": {
+ "filters": [],
+ "root": "cloud_providers",
+ "transformers": [
+ {
+ "operator": "FirstArrayElement"
+ }
+ ]
+ }
+ },
+ "Xpanse Remediation Guidance": {
+ "complex": {
+ "filters": [],
+ "root": "remediation_guidance",
+ "transformers": []
+ }
+ },
+ "Xpanse Service ID": {
+ "complex": {
+ "filters": [],
+ "root": "service_ids",
+ "transformers": [
+ {
+ "operator": "FirstArrayElement"
+ }
+ ]
+ }
+ },
+ "Xpanse Tags": {
+ "complex": {
+ "filters": [],
+ "root": "tags",
+ "transformers": [
+ {
+ "args": {
+ "applyIfEmpty": {
+ "isContext": true
+ },
+ "defaultValue": {
+ "isContext": true,
+ "value": {
+ "simple": "original_tags"
+ }
+ }
+ },
+ "operator": "SetIfEmpty"
+ }
+ ]
+ }
+ },
"Xpanse Category": {
"simple": "asm_alert_categories"
+ },
+ "Xpanse Attack Surface Rule Name": {
+ "simple": "attack_surface_rule_name"
}
}
- },
+ },
"dbot_classification_incident_type_all": {
"dontMapEventToLabels": false,
"internalMapping": {
- "Description": {
- "simple": "description"
- },
- "Destination IP": {
- "complex": {
- "filters": [],
- "root": "ipv4_addresses",
- "transformers": [
- {
- "operator": "FirstArrayElement"
- }
- ]
- }
- },
- "Protocol": {
- "complex": {
- "filters": [],
- "root": "port_protocol",
- "transformers": []
- }
- },
- "Tags": {
- "complex": {
+ "Description": {
+ "simple": "description"
+ },
+ "Destination IP": {
+ "complex": {
+ "filters": [],
+ "root": "ipv4_addresses",
+ "transformers": [
+ {
+ "operator": "FirstArrayElement"
+ }
+ ]
+ }
+ },
+ "Protocol": {
+ "complex": {
+ "filters": [],
+ "root": "port_protocol",
+ "transformers": []
+ }
+ },
+ "Tags": {
+ "complex": {
"filters": [],
"root": "tags",
"transformers": []
}
- }
- }
+ }
+ }
}
},
"name": "Xpanse - Incoming Mapper",
diff --git a/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Attack_Surface_Rule_Name.json b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Attack_Surface_Rule_Name.json
new file mode 100644
index 000000000000..381655387a1c
--- /dev/null
+++ b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Attack_Surface_Rule_Name.json
@@ -0,0 +1,31 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Xpanse Alert"
+ ],
+ "caseInsensitive": true,
+ "cliName": "xpanseattacksurfacerulename",
+ "closeForm": false,
+ "content": true,
+ "description": "Attack Surface Rule Name",
+ "editForm": true,
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xpanseattacksurfacerulename",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Xpanse Attack Surface Rule Name",
+ "neverSetAsRequired": false,
+ "openEnded": false,
+ "ownerOnly": false,
+ "required": false,
+ "sla": 0,
+ "system": false,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": true,
+ "useAsKpi": false,
+ "version": -1,
+ "fromVersion": "6.8.0"
+}
\ No newline at end of file
diff --git a/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Certificate_Asset.json b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Certificate_Asset.json
new file mode 100644
index 000000000000..4ddfdcae4279
--- /dev/null
+++ b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Certificate_Asset.json
@@ -0,0 +1,164 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Xpanse Alert"
+ ],
+ "caseInsensitive": true,
+ "cliName": "xpansecertificateasset",
+ "closeForm": false,
+ "columns": [
+ {
+ "displayName": "name",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "name",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "type",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "type",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "date_added",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "date_added",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "explainers",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "explainers",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "subject",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "subject",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "subject_alerternative_names",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "subject_alerternative_names",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "issuer",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "issuer",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "issuer_email",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "issuer_email",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "expires",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "expires",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "algorithm",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "algorithm",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ }
+ ],
+ "content": true,
+ "defaultRows": [],
+ "description": "Certificate Asset Details",
+ "editForm": true,
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xpansecertificateasset",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Xpanse Certificate Asset",
+ "neverSetAsRequired": false,
+ "openEnded": false,
+ "ownerOnly": false,
+ "required": false,
+ "sla": 0,
+ "system": false,
+ "threshold": 72,
+ "type": "grid",
+ "unmapped": false,
+ "unsearchable": true,
+ "useAsKpi": false,
+ "version": -1,
+ "fromVersion": "6.8.0"
+}
\ No newline at end of file
diff --git a/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Cloud_Asset.json b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Cloud_Asset.json
new file mode 100644
index 000000000000..3774ebc5a366
--- /dev/null
+++ b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Cloud_Asset.json
@@ -0,0 +1,125 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Xpanse Alert"
+ ],
+ "caseInsensitive": true,
+ "cliName": "xpansecloudasset",
+ "closeForm": false,
+ "columns": [
+ {
+ "displayName": "name",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "name",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "type",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "type",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "date_added",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "date_added",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "explainers",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "explainers",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "externally_detected_providers",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "externally_detected_providers",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "ips",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "ips",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "domain",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "domain",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ }
+ ],
+ "content": true,
+ "defaultRows": [],
+ "description": "Cloud Asset Details",
+ "editForm": true,
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xpansecloudasset",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Xpanse Cloud Asset",
+ "neverSetAsRequired": false,
+ "openEnded": false,
+ "ownerOnly": false,
+ "required": false,
+ "sla": 0,
+ "system": false,
+ "threshold": 72,
+ "type": "grid",
+ "unmapped": false,
+ "unsearchable": true,
+ "useAsKpi": false,
+ "version": -1,
+ "fromVersion": "6.8.0"
+}
\ No newline at end of file
diff --git a/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Domain_Asset.json b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Domain_Asset.json
new file mode 100644
index 000000000000..822546907b0b
--- /dev/null
+++ b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Domain_Asset.json
@@ -0,0 +1,164 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Xpanse Alert"
+ ],
+ "caseInsensitive": true,
+ "cliName": "xpansedomainasset",
+ "closeForm": false,
+ "columns": [
+ {
+ "displayName": "name",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "name",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "type",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "type",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "date_added",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "date_added",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "explainers",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "explainers",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "registrar_name",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "registrar_name",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "registry_expiration",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "registry_expiration",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "domain_status",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "domain_status",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "registrant_name",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "registrant_name",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "registrant_org",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "registrant_org",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "registrant_email",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "registrant_email",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ }
+ ],
+ "content": true,
+ "defaultRows": [],
+ "description": "Domain Asset Details",
+ "editForm": true,
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xpansedomainasset",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Xpanse Domain Asset",
+ "neverSetAsRequired": false,
+ "openEnded": false,
+ "ownerOnly": false,
+ "required": false,
+ "sla": 0,
+ "system": false,
+ "threshold": 72,
+ "type": "grid",
+ "unmapped": false,
+ "unsearchable": true,
+ "useAsKpi": false,
+ "version": -1,
+ "fromVersion": "6.8.0"
+}
\ No newline at end of file
diff --git a/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Responsive_IP_Asset.json b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Responsive_IP_Asset.json
new file mode 100644
index 000000000000..c54197dc4145
--- /dev/null
+++ b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Responsive_IP_Asset.json
@@ -0,0 +1,138 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Xpanse Alert"
+ ],
+ "caseInsensitive": true,
+ "cliName": "xpanseresponsiveipasset",
+ "closeForm": false,
+ "columns": [
+ {
+ "displayName": "name",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "name",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "type",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "type",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "date_added",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "date_added",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "explainers",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "explainers",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "ip_version",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "ip_version",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "range",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "range",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "asn_number",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "asn_number",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "asn_country",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "asn_country",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ }
+ ],
+ "content": true,
+ "defaultRows": [],
+ "description": "Responsive IP Asset Details",
+ "editForm": true,
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xpanseresponsiveipasset",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Xpanse Responsive IP Asset",
+ "neverSetAsRequired": false,
+ "openEnded": false,
+ "ownerOnly": false,
+ "required": false,
+ "sla": 0,
+ "system": false,
+ "threshold": 72,
+ "type": "grid",
+ "unmapped": false,
+ "unsearchable": true,
+ "useAsKpi": false,
+ "version": -1,
+ "fromVersion": "6.8.0"
+}
\ No newline at end of file
diff --git a/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Service_Details.json b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Service_Details.json
new file mode 100644
index 000000000000..daea8a87a957
--- /dev/null
+++ b/Packs/CortexXpanse/IncidentFields/incidentfield-Xpanse_Service_Details.json
@@ -0,0 +1,60 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Xpanse Alert"
+ ],
+ "caseInsensitive": true,
+ "cliName": "xpanseservicedetails",
+ "closeForm": false,
+ "columns": [
+ {
+ "displayName": "field",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "field",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "value",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "value",
+ "orgType": "shortText",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ }
+ ],
+ "content": true,
+ "defaultRows": [],
+ "description": "Service Details",
+ "editForm": true,
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xpanseservicedetails",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Xpanse Service Details",
+ "neverSetAsRequired": false,
+ "openEnded": false,
+ "ownerOnly": false,
+ "required": false,
+ "sla": 0,
+ "system": false,
+ "threshold": 72,
+ "type": "grid",
+ "unmapped": false,
+ "unsearchable": true,
+ "useAsKpi": false,
+ "version": -1,
+ "fromVersion": "6.8.0"
+}
\ No newline at end of file
diff --git a/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.py b/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.py
index 923b33cf733e..e3a319fa52dd 100644
--- a/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.py
+++ b/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.py
@@ -36,6 +36,7 @@
ALERT_STATUSES = [
"new",
+ "reopened",
"under_investigation",
"resolved_no_risk",
"resolved_risk_accepted",
diff --git a/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.yml b/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.yml
index 0a7b5ffd430a..60a00c441d7f 100644
--- a/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.yml
+++ b/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse.yml
@@ -63,6 +63,7 @@ configuration:
required: false
options:
- new
+ - reopened
- under_investigation
- resolved_no_longer_observed
- resolved_no_risk
@@ -443,6 +444,7 @@ script:
auto: PREDEFINED
predefined:
- new
+ - reopened
- under_investigation
- resolved_no_longer_observed
- resolved_no_risk
@@ -714,6 +716,7 @@ script:
auto: PREDEFINED
predefined:
- new
+ - reopened
- under_investigation
- resolved
- resolved_contested_asset
diff --git a/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse_test.py b/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse_test.py
index ad1e884e94d4..0ba00dfd7474 100644
--- a/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse_test.py
+++ b/Packs/CortexXpanse/Integrations/CortexXpanse/CortexXpanse_test.py
@@ -344,13 +344,17 @@ def test_list_alerts_command(requests_mock):
},
proxy=False)
args = {
- 'limit': '2',
- 'severity': 'high',
+ 'limit': '3',
'sort_by_creation_time': 'asc'
}
response = list_alerts_command(client, args)
+ for alert in response.outputs:
+ if 'status' in alert:
+ status = alert['status']
+ assert status == 'reopened'
+
assert response.outputs == LIST_ALERTS_RESULTS
assert response.outputs_prefix == 'ASM.Alert'
assert response.outputs_key_field == 'alert_id'
@@ -819,7 +823,7 @@ def test_fetch_incidents(requests_mock, mocker):
status=None,
tags=None)
- assert len(incidents) == 2
+ assert len(incidents) == 3
assert incidents[0]['name'] == "Networking Infrastructure"
assert json.loads(incidents[0]['rawJSON']).pop('local_insert_ts')
assert next_run == {'last_fetch': 1659455267908}
diff --git a/Packs/CortexXpanse/Integrations/CortexXpanse/README.md b/Packs/CortexXpanse/Integrations/CortexXpanse/README.md
index b5136678c8a2..15995be47e6f 100644
--- a/Packs/CortexXpanse/Integrations/CortexXpanse/README.md
+++ b/Packs/CortexXpanse/Integrations/CortexXpanse/README.md
@@ -1092,7 +1092,7 @@ Get a list of all your ASM alerts filtered by alert IDs, severity and/or creatio
| alert_id_list | Comma-separated list of alert IDs. | Optional |
| severity | Comma-separated list of alert severities (valid values are low, medium, high, critical, informational). | Optional |
| tags | Comma-separated list of alert tags. These should include the tag prefix, ex. AT:Asset Tag. | Optional |
-| status | Comma-separated list of the alert status. Possible values are: new, under_investigation, resolved_no_longer_observed, resolved_no_risk, resolved_risk_accepted, resolved_contested_asset, resolved_remediated_automatically, resolved. | Optional |
+| status | Comma-separated list of the alert status. Possible values are: new, reopened, under_investigation, resolved_no_longer_observed, resolved_no_risk, resolved_risk_accepted, resolved_contested_asset, resolved_remediated_automatically, resolved. | Optional |
| business_units_list | Comma-separated list business units. | Optional |
| case_id_list | Comma-separated list of case (incident) IDs. | Optional |
| lte_creation_time | A date in the format 2019-12-31T23:59:00. Only incidents that were created on or before the specified date/time will be retrieved. | Optional |
@@ -1985,7 +1985,7 @@ Updates the state of one or more alerts.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
| alert_id_list | Comma-separated list of integers of the alert ID. | Optional |
-| status | Updated alert status. Possible values are: new, under_investigation, resolved_no_longer_observed, resolved_no_risk, resolved_risk_accepted, resolved_contested_asset, resolved_remediated_automatically, resolved. | Optional |
+| status | Updated alert status. Possible values are: new, reopened, under_investigation, resolved_no_longer_observed, resolved_no_risk, resolved_risk_accepted, resolved_contested_asset, resolved_remediated_automatically, resolved. | Optional |
| severity | The severity of the alert. Possible values are: low, medium, high, critical. | Optional |
#### Context Output
diff --git a/Packs/CortexXpanse/Integrations/CortexXpanse/command_examples b/Packs/CortexXpanse/Integrations/CortexXpanse/command_examples
index 04adfd9f2544..1c9fa1dee9f5 100644
--- a/Packs/CortexXpanse/Integrations/CortexXpanse/command_examples
+++ b/Packs/CortexXpanse/Integrations/CortexXpanse/command_examples
@@ -4,5 +4,5 @@
!asm-list-external-ip-address-range
!asm-list-asset-internet-exposure name="acme.com" type=certificate has_active_external_services=no
!asm-list-external-service domain=acme.com is_active=yes discovery_type=directly_discovery
-!asm-list-alerts limit=2 severity=high sort_by_creation_time=asc
+!asm-list-alerts limit=2 severity=high sort_by_creation_time=asc status=reopened
!asm-list-external-websites authentication="Form" limit=5
\ No newline at end of file
diff --git a/Packs/CortexXpanse/Integrations/CortexXpanse/test_data/expected_results.py b/Packs/CortexXpanse/Integrations/CortexXpanse/test_data/expected_results.py
index 051630b19f8f..ac7deb480f46 100644
--- a/Packs/CortexXpanse/Integrations/CortexXpanse/test_data/expected_results.py
+++ b/Packs/CortexXpanse/Integrations/CortexXpanse/test_data/expected_results.py
@@ -1116,6 +1116,161 @@
"action": "NOT_AVAILABLE",
"action_pretty": "N/A",
"tags": null
+ },
+ {
+ "external_id": "FAKE-GUID-3",
+ "severity": "high",
+ "matching_status": "MATCHED",
+ "end_match_attempt_ts": null,
+ "local_insert_ts": 1659455246813,
+ "last_modified_ts": 1660240426055,
+ "bioc_indicator": null,
+ "matching_service_rule_id": null,
+ "attempt_counter": null,
+ "bioc_category_enum_key": null,
+ "is_whitelisted": false,
+ "starred": false,
+ "deduplicate_tokens": null,
+ "filter_rule_id": null,
+ "mitre_technique_id_and_name": null,
+ "mitre_tactic_id_and_name": null,
+ "agent_version": null,
+ "agent_ip_addresses_v6": null,
+ "agent_device_domain": null,
+ "agent_fqdn": null,
+ "agent_os_type": "NO_HOST",
+ "agent_os_sub_type": null,
+ "agent_data_collection_status": null,
+ "mac": null,
+ "is_pcap": false,
+ "alert_type": "Unclassified",
+ "resolution_status": "STATUS_230_REOPENED",
+ "resolution_comment": "ASM alert reopened",
+ "dynamic_fields": null,
+ "events": [
+ {
+ "agent_install_type": "NA",
+ "agent_host_boot_time": null,
+ "event_sub_type": null,
+ "module_id": null,
+ "association_strength": null,
+ "dst_association_strength": null,
+ "story_id": null,
+ "event_id": null,
+ "event_type": null,
+ "event_timestamp": 1659452809020,
+ "actor_process_instance_id": null,
+ "actor_process_image_path": null,
+ "actor_process_image_name": null,
+ "actor_process_command_line": null,
+ "actor_process_signature_status": "N/A",
+ "actor_process_signature_vendor": null,
+ "actor_process_image_sha256": null,
+ "actor_process_image_md5": null,
+ "actor_process_causality_id": null,
+ "actor_causality_id": null,
+ "actor_process_os_pid": null,
+ "actor_thread_thread_id": null,
+ "causality_actor_process_image_name": null,
+ "causality_actor_process_command_line": null,
+ "causality_actor_process_image_path": null,
+ "causality_actor_process_signature_vendor": null,
+ "causality_actor_process_signature_status": "N/A",
+ "causality_actor_causality_id": null,
+ "causality_actor_process_execution_time": null,
+ "causality_actor_process_image_md5": null,
+ "causality_actor_process_image_sha256": null,
+ "action_file_path": null,
+ "action_file_name": null,
+ "action_file_md5": null,
+ "action_file_sha256": null,
+ "action_file_macro_sha256": null,
+ "action_registry_data": null,
+ "action_registry_key_name": null,
+ "action_registry_value_name": null,
+ "action_registry_full_key": null,
+ "action_local_ip": null,
+ "action_local_ip_v6": null,
+ "action_local_port": null,
+ "action_remote_ip": null,
+ "action_remote_ip_v6": null,
+ "action_remote_port": 80,
+ "action_external_hostname": null,
+ "action_country": "UNKNOWN",
+ "action_process_instance_id": null,
+ "action_process_causality_id": null,
+ "action_process_image_name": null,
+ "action_process_image_sha256": null,
+ "action_process_image_command_line": null,
+ "action_process_signature_status": "N/A",
+ "action_process_signature_vendor": null,
+ "os_actor_effective_username": null,
+ "os_actor_process_instance_id": null,
+ "os_actor_process_image_path": null,
+ "os_actor_process_image_name": null,
+ "os_actor_process_command_line": null,
+ "os_actor_process_signature_status": "N/A",
+ "os_actor_process_signature_vendor": null,
+ "os_actor_process_image_sha256": null,
+ "os_actor_process_causality_id": null,
+ "os_actor_causality_id": null,
+ "os_actor_process_os_pid": null,
+ "os_actor_thread_thread_id": null,
+ "fw_app_id": null,
+ "fw_interface_from": null,
+ "fw_interface_to": null,
+ "fw_rule": null,
+ "fw_rule_id": null,
+ "fw_device_name": null,
+ "fw_serial_number": null,
+ "fw_url_domain": null,
+ "fw_email_subject": null,
+ "fw_email_sender": null,
+ "fw_email_recipient": null,
+ "fw_app_subcategory": null,
+ "fw_app_category": null,
+ "fw_app_technology": null,
+ "fw_vsys": null,
+ "fw_xff": null,
+ "fw_misc": null,
+ "fw_is_phishing": "N/A",
+ "dst_agent_id": null,
+ "dst_causality_actor_process_execution_time": null,
+ "dns_query_name": null,
+ "dst_action_external_hostname": null,
+ "dst_action_country": null,
+ "dst_action_external_port": null,
+ "contains_featured_host": "NO",
+ "contains_featured_user": "NO",
+ "contains_featured_ip": "NO",
+ "image_name": null,
+ "container_id": null,
+ "cluster_name": null,
+ "referenced_resource": null,
+ "operation_name": null,
+ "identity_sub_type": null,
+ "identity_type": null,
+ "project": null,
+ "cloud_provider": null,
+ "resource_type": null,
+ "resource_sub_type": null,
+ "user_agent": null,
+ "user_name": null
+ }
+ ],
+ "alert_id": "34",
+ "detection_timestamp": 1659452809020,
+ "name": "Networking Infrastructure",
+ "category": null,
+ "endpoint_id": null,
+ "description": "Networking and security infrastructure, such as firewalls and routers, generally should not have their administration panels open to public Internet. Compromise of these devices, often though password guessing or vulnerability exploitation, provides privileged access to an enterprise network.",
+ "host_ip": null,
+ "host_name": null,
+ "mac_addresses": null,
+ "source": "ASM",
+ "action": "NOT_AVAILABLE",
+ "action_pretty": "N/A",
+ "tags": null
}
]
diff --git a/Packs/CortexXpanse/Integrations/CortexXpanse/test_data/raw_response.py b/Packs/CortexXpanse/Integrations/CortexXpanse/test_data/raw_response.py
index cab7bca9122c..bcb7af27ba38 100644
--- a/Packs/CortexXpanse/Integrations/CortexXpanse/test_data/raw_response.py
+++ b/Packs/CortexXpanse/Integrations/CortexXpanse/test_data/raw_response.py
@@ -848,7 +848,7 @@
LIST_ALERTS_RESPONSE = {
"reply": {
"total_count": 696,
- "result_count": 2,
+ "result_count": 3,
"alerts": [
{
"external_id": "FAKE-GUID",
@@ -1159,6 +1159,161 @@
"action": "NOT_AVAILABLE",
"action_pretty": "N/A",
"tags": null
+ },
+ {
+ "external_id": "FAKE-GUID-3",
+ "severity": "high",
+ "matching_status": "MATCHED",
+ "end_match_attempt_ts": null,
+ "local_insert_ts": 1659455246813,
+ "last_modified_ts": 1660240426055,
+ "bioc_indicator": null,
+ "matching_service_rule_id": null,
+ "attempt_counter": null,
+ "bioc_category_enum_key": null,
+ "is_whitelisted": false,
+ "starred": false,
+ "deduplicate_tokens": null,
+ "filter_rule_id": null,
+ "mitre_technique_id_and_name": null,
+ "mitre_tactic_id_and_name": null,
+ "agent_version": null,
+ "agent_ip_addresses_v6": null,
+ "agent_device_domain": null,
+ "agent_fqdn": null,
+ "agent_os_type": "NO_HOST",
+ "agent_os_sub_type": null,
+ "agent_data_collection_status": null,
+ "mac": null,
+ "is_pcap": false,
+ "alert_type": "Unclassified",
+ "resolution_status": "STATUS_230_REOPENED",
+ "resolution_comment": "ASM alert reopened",
+ "dynamic_fields": null,
+ "events": [
+ {
+ "agent_install_type": "NA",
+ "agent_host_boot_time": null,
+ "event_sub_type": null,
+ "module_id": null,
+ "association_strength": null,
+ "dst_association_strength": null,
+ "story_id": null,
+ "event_id": null,
+ "event_type": null,
+ "event_timestamp": 1659452809020,
+ "actor_process_instance_id": null,
+ "actor_process_image_path": null,
+ "actor_process_image_name": null,
+ "actor_process_command_line": null,
+ "actor_process_signature_status": "N/A",
+ "actor_process_signature_vendor": null,
+ "actor_process_image_sha256": null,
+ "actor_process_image_md5": null,
+ "actor_process_causality_id": null,
+ "actor_causality_id": null,
+ "actor_process_os_pid": null,
+ "actor_thread_thread_id": null,
+ "causality_actor_process_image_name": null,
+ "causality_actor_process_command_line": null,
+ "causality_actor_process_image_path": null,
+ "causality_actor_process_signature_vendor": null,
+ "causality_actor_process_signature_status": "N/A",
+ "causality_actor_causality_id": null,
+ "causality_actor_process_execution_time": null,
+ "causality_actor_process_image_md5": null,
+ "causality_actor_process_image_sha256": null,
+ "action_file_path": null,
+ "action_file_name": null,
+ "action_file_md5": null,
+ "action_file_sha256": null,
+ "action_file_macro_sha256": null,
+ "action_registry_data": null,
+ "action_registry_key_name": null,
+ "action_registry_value_name": null,
+ "action_registry_full_key": null,
+ "action_local_ip": null,
+ "action_local_ip_v6": null,
+ "action_local_port": null,
+ "action_remote_ip": null,
+ "action_remote_ip_v6": null,
+ "action_remote_port": 80,
+ "action_external_hostname": null,
+ "action_country": "UNKNOWN",
+ "action_process_instance_id": null,
+ "action_process_causality_id": null,
+ "action_process_image_name": null,
+ "action_process_image_sha256": null,
+ "action_process_image_command_line": null,
+ "action_process_signature_status": "N/A",
+ "action_process_signature_vendor": null,
+ "os_actor_effective_username": null,
+ "os_actor_process_instance_id": null,
+ "os_actor_process_image_path": null,
+ "os_actor_process_image_name": null,
+ "os_actor_process_command_line": null,
+ "os_actor_process_signature_status": "N/A",
+ "os_actor_process_signature_vendor": null,
+ "os_actor_process_image_sha256": null,
+ "os_actor_process_causality_id": null,
+ "os_actor_causality_id": null,
+ "os_actor_process_os_pid": null,
+ "os_actor_thread_thread_id": null,
+ "fw_app_id": null,
+ "fw_interface_from": null,
+ "fw_interface_to": null,
+ "fw_rule": null,
+ "fw_rule_id": null,
+ "fw_device_name": null,
+ "fw_serial_number": null,
+ "fw_url_domain": null,
+ "fw_email_subject": null,
+ "fw_email_sender": null,
+ "fw_email_recipient": null,
+ "fw_app_subcategory": null,
+ "fw_app_category": null,
+ "fw_app_technology": null,
+ "fw_vsys": null,
+ "fw_xff": null,
+ "fw_misc": null,
+ "fw_is_phishing": "N/A",
+ "dst_agent_id": null,
+ "dst_causality_actor_process_execution_time": null,
+ "dns_query_name": null,
+ "dst_action_external_hostname": null,
+ "dst_action_country": null,
+ "dst_action_external_port": null,
+ "contains_featured_host": "NO",
+ "contains_featured_user": "NO",
+ "contains_featured_ip": "NO",
+ "image_name": null,
+ "container_id": null,
+ "cluster_name": null,
+ "referenced_resource": null,
+ "operation_name": null,
+ "identity_sub_type": null,
+ "identity_type": null,
+ "project": null,
+ "cloud_provider": null,
+ "resource_type": null,
+ "resource_sub_type": null,
+ "user_agent": null,
+ "user_name": null
+ }
+ ],
+ "alert_id": "34",
+ "detection_timestamp": 1659452809020,
+ "name": "Networking Infrastructure",
+ "category": null,
+ "endpoint_id": null,
+ "description": "Networking and security infrastructure, such as firewalls and routers, generally should not have their administration panels open to public Internet. Compromise of these devices, often though password guessing or vulnerability exploitation, provides privileged access to an enterprise network.",
+ "host_ip": null,
+ "host_name": null,
+ "mac_addresses": null,
+ "source": "ASM",
+ "action": "NOT_AVAILABLE",
+ "action_pretty": "N/A",
+ "tags": null
}
]
}
diff --git a/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse.py b/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse.py
new file mode 100644
index 000000000000..d8c73f012d09
--- /dev/null
+++ b/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse.py
@@ -0,0 +1,374 @@
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa # pylint: disable=unused-wildcard-import
+
+import urllib3
+from typing import Any
+
+# Disable insecure warnings
+urllib3.disable_warnings()
+
+''' CONSTANTS '''
+
+DEFAULT_FEED_TAGS = {'XPANSE'}
+DEFAULT_ASSET_SEARCH_LIMIT = 5000
+V1_URL_SUFFIX = "/public_api/v1"
+
+''' CLIENT CLASS '''
+
+
+class Client(BaseClient):
+ """
+ Client class to interact with the service API
+ """
+
+ def __init__(self, base_url: str, verify: bool, proxy: bool, feed_tags: list[str], tlp_color: str, headers: dict):
+ """
+ Class initialization.
+ """
+ super().__init__(base_url=base_url, verify=verify, proxy=proxy, headers=headers)
+ self.feed_tags = feed_tags
+ self.tlp_color = tlp_color
+ self.verify = verify
+ self.proxy = proxy
+ self.headers = headers
+
+ def list_asset_internet_exposure_request(self, search_params: list[dict] = [], search_from: int = 0,
+ search_to: int = DEFAULT_ASSET_SEARCH_LIMIT,
+ use_paging: bool = True) -> list:
+ """Get a list of all your internet exposure assets using the '/assets/get_assets_internet_exposure/' endpoint.
+
+ Args:
+ search_params (list): list of search parameters to add to the API call body.
+ search_from (int): Starting search index.
+ search_to (int): Ending search index.
+ use_paging (bool): whether to use paging or not (default is True)
+
+ Returns:
+ List: list containing dictionaries of internet exposure assets.
+ """
+ body = {"request_data": {"filters": search_params, "search_to": int(
+ search_to), "search_from": int(search_from), "use_page_token": True}}
+ full_response = []
+ while True:
+ result = self._http_request(
+ method='POST',
+ url_suffix=f'{V1_URL_SUFFIX}/assets/get_assets_internet_exposure/',
+ json_data=body
+ )
+
+ data = result.get('reply', {}).get('assets_internet_exposure')
+ if data:
+ full_response.extend(data)
+ if not use_paging:
+ break
+ pagination = result.get('reply', {}).get("next_page_token")
+ if pagination is None:
+ break
+ body["request_data"]["next_page_token"] = pagination
+
+ return full_response
+
+
+''' HELPER FUNCTIONS '''
+
+
+def create_x509_certificate_grids(string_object: Optional[str]) -> list:
+ """
+ Creates a grid field related to the subject and issuer field of the x509 certificate object.
+
+ Args:
+ string_object (Optional[str]): A str in format of C=ZA, Inc.,ST=Western Cape,L=Cape Town,O=Thawte.
+ Returns:
+ list: The return value. A list of dict [{"title": "C", "data": "ZA"}].
+ """
+ result_grid_list = []
+ if string_object:
+ key_value_pairs = string_object.split(',')
+ for pair in key_value_pairs:
+ result_grid = {}
+ # '=' in pair means we extracted the right entries for k/v
+ if '=' in pair:
+ key, value = pair.split('=', 1)
+ result_grid['title'] = key
+ result_grid['data'] = value
+ result_grid_list.append(result_grid)
+ # If no '=' that means we had a ',' within the value we need to append
+ else:
+ result_grid_list[-1]['data'] = (result_grid_list[-1]['data'] + ", " + pair).replace("\\", "")
+ return result_grid_list
+
+
+def map_indicator_fields(raw_indicator: dict[str, Any], asset_type: str) -> dict[str, Any]:
+ """
+ Create indicator field mapping based on asset_type
+
+ Args:
+ raw_indicator (Dict[str, Any]): raw indicator as JSON.
+ asset_type (str): indicator type
+
+ Returns:
+ Dict: dictionary of indicator field mappings.
+ """
+ # name is a required API return parameter
+ description = raw_indicator['name'] + " indicator of asset type " + asset_type + " from Cortex Xpanse"
+ indicator_fields = {"internal": True, "description": description}
+ if asset_type == 'Domain':
+ if domain_details := raw_indicator.get("domain_details"):
+ domain_fields_mapping: dict = {
+ "creationDate": "creationdate",
+ "registryExpiryDate": "expirationdate",
+ }
+
+ for key, mapped_key in domain_fields_mapping.items():
+ if detail_value := domain_details.get(key):
+ indicator_fields[mapped_key] = timestamp_to_datestring(detail_value)
+
+ elif asset_type == 'X509 Certificate' and (cert_details := raw_indicator.get("certificate_details")):
+ cert_fields_mapping: dict = {
+ "signatureAlgorithm": ("signaturealgorithm", None),
+ "serialNumber": ("serialnumber", None),
+ "validNotAfter": ("validitynotafter", timestamp_to_datestring),
+ "validNotBefore": ("validitynotbefore", timestamp_to_datestring),
+ "issuer": ("issuer", create_x509_certificate_grids),
+ "subject": ("subject", create_x509_certificate_grids),
+ }
+
+ for key, (mapped_key, processing_func) in cert_fields_mapping.items():
+ if detail_value := cert_details.get(key):
+ # Apply processing function if one is defined
+ if processing_func:
+ indicator_fields[mapped_key] = processing_func(detail_value)
+ else:
+ indicator_fields[mapped_key] = detail_value
+ return indicator_fields
+
+
+def map_indicator_type(asset_type: str) -> str:
+ """
+ Correlates asset_type to indicator type or returns "None"
+
+ Args:
+ asset_type (str): Xpanse asset type.
+
+ Returns:
+ str: indicator type or "None".
+ """
+ asset_types_mapping = {
+ 'UNASSOCIATED_RESPONSIVE_IP': 'IP',
+ "DOMAIN": 'Domain',
+ "CERTIFICATE": "X509 Certificate",
+ 'CIDR': 'CIDR'
+ }
+ return asset_types_mapping.get(asset_type, "None")
+
+
+def build_asset_indicators(client: Client, raw_indicators: list[dict[str, Any]]) -> list:
+ """
+ Builds indicators JSON data in XSOAR expected format from the raw response.
+
+ Args:
+ client (Client): Xpanse client.
+ raw_indicators (List[Dict[str, Any]]): raw indicators as JSON.
+
+ Returns:
+ List: list of indicators to be send to XSOAR.
+ """
+ demisto.debug(f'Creating {len(raw_indicators)} asset indicators.')
+ indicators: list = []
+
+ for raw_indicator in raw_indicators:
+ asset_type = raw_indicator.get("asset_type", 'None')
+ indicator_type = map_indicator_type(asset_type)
+
+ # Skip IPv6 responsive or not found type
+ if raw_indicator.get("ipv6s") or indicator_type == 'None':
+ continue
+
+ # name is a required API return parameter
+ name = raw_indicator['name']
+ indicator_type = 'DomainGlob' if '*' in name and indicator_type == 'Domain' else indicator_type
+ fields = map_indicator_fields(raw_indicator, indicator_type)
+
+ # Add TLP color and feed tags if they exist
+ if client.tlp_color:
+ fields['trafficlightprotocol'] = client.tlp_color
+ if client.feed_tags:
+ fields['tags'] = client.feed_tags
+
+ indicator = {
+ 'value': name,
+ 'type': indicator_type,
+ 'fields': fields,
+ 'rawJSON': raw_indicator
+ }
+
+ indicators.append(indicator)
+
+ return indicators
+
+
+''' COMMAND FUNCTIONS '''
+
+
+def test_module(client: Client): # pragma: no cover
+ """
+ Tests API connectivity and authentication'
+
+ Returning 'ok' indicates that the integration works like it is supposed to.
+ Connection to the service is successful.
+ Raises exceptions if something goes wrong.
+
+ :type client: ``Client``
+ :param Client: client to use
+
+ :return: 'ok' if test passed, anything else will fail the test.
+ :rtype: ``str``
+ """
+ client.list_asset_internet_exposure_request(search_to=1, use_paging=False)
+ return_results('ok')
+
+
+def fetch_indicators(client: Client, limit: Optional[int] = None,
+ asset_type: str = 'all') -> tuple | list:
+ """
+ Fetch indicators from Xpanse API and create indicators in XSOAR.
+
+ Args:
+ client (Client): Xpanse client.
+ limit (int): limt the number of indicators to return.
+ asset_type (str): which asset_types to pull from API.
+
+ Returns:
+ List: list of indicators to be send to XSOAR.
+ List: raw response from API.
+ """
+ asset_list, asset_response = [], []
+ if asset_type == 'all':
+ asset_list = ["CERTIFICATE", "DOMAIN", "UNASSOCIATED_RESPONSIVE_IP"]
+ if 'domain' in asset_type:
+ asset_list.append("DOMAIN")
+ if 'certificate' in asset_type:
+ asset_list.append("CERTIFICATE")
+ if 'ipv4' in asset_type:
+ asset_list.append("UNASSOCIATED_RESPONSIVE_IP")
+ if limit:
+ # Had to add 1 to the limit to get the right return.
+ asset_response = client.list_asset_internet_exposure_request(
+ search_params=[{"field": "type", "operator": "in", "value": asset_list}], search_to=limit + 1, use_paging=False)
+ else:
+ asset_response = client.list_asset_internet_exposure_request(
+ search_params=[{"field": "type", "operator": "in", "value": asset_list}])
+
+ assset_indicators = build_asset_indicators(client, asset_response)
+
+ return assset_indicators, asset_response
+
+
+''' MAIN FUNCTION '''
+
+
+def get_indicators(client: Client, args: dict[str, Any]) -> CommandResults:
+ """
+ Get indicators from Xpanse API, mainly for debug.
+
+ Args:
+ client (Client): Xpanse client.
+ args (dict): all command arguments, usually passed from ``demisto.args()``.
+
+ Returns:
+ CommandResults: A ``CommandResults`` object that is then passed to ``return_results``,
+ that contains Xpanse indicators.
+ """
+ hr_list = []
+
+ asset_type = ''
+ if argToBoolean(args.get('ip', 'yes')):
+ asset_type += 'ipv4'
+ if argToBoolean(args.get('domain', 'yes')):
+ asset_type += 'domain'
+ if argToBoolean(args.get('certificate', 'yes')):
+ asset_type += 'certificate'
+
+ limit = arg_to_number(args.get('limit', None))
+
+ if limit and limit <= 0:
+ raise ValueError('Limit must be a positive number.')
+ if limit and limit > DEFAULT_ASSET_SEARCH_LIMIT:
+ raise ValueError('Limit must be less that the API limit of ' + str(DEFAULT_ASSET_SEARCH_LIMIT) + '.')
+ if asset_type == '':
+ raise ValueError('need to specify at least one asset type')
+
+ indicators, raw_res = fetch_indicators(client=client, limit=limit, asset_type=asset_type)
+
+ indicators = indicators[:limit] if isinstance(indicators, list) \
+ else [indicators] if indicators else []
+ for record in indicators:
+ hr = {'Name': record.get('value'), 'Type': record.get('type'), 'Description': record['fields']['description']}
+ hr_list.append(hr)
+ return CommandResults(outputs=hr_list, outputs_prefix='ASM.Indicators', raw_response=raw_res,
+ readable_output=tableToMarkdown("Xpanse indicators", hr_list, headers=['Name', 'Type', 'Description']),
+ outputs_key_field='Name')
+
+
+def main() -> None: # pragma: no cover
+ """
+ main function
+ """
+ params = demisto.params()
+ base_url = params.get('url')
+ verify_certificate = not params.get('insecure', False)
+ proxy = params.get('proxy', False)
+ # Append default tags.
+ feed_tags = list(set(argToList(params.get('feedTags', []))) | DEFAULT_FEED_TAGS)
+ tlp_color = params.get('tlp_color', '')
+ creds = params.get('credentials', {})
+ api = creds.get('password', '')
+ add_sensitive_log_strs(api)
+ auth_id = creds.get('identifier', '')
+ headers = {
+ 'Authorization': f'{api}',
+ 'x-xdr-auth-id': f'{auth_id}',
+ 'Content-Type': 'application/json'
+ }
+ command = demisto.command()
+
+ demisto.info(f'Command being called is {command}')
+ try:
+ client = Client(
+ base_url=base_url,
+ verify=verify_certificate,
+ proxy=proxy,
+ feed_tags=feed_tags,
+ tlp_color=tlp_color,
+ headers=headers
+ )
+
+ if command == 'test-module':
+ test_module(client)
+ elif command == 'fetch-indicators':
+ indicators, _ = fetch_indicators(client)
+ for iter_ in batch(indicators, batch_size=2000):
+ try:
+ demisto.createIndicators(iter_)
+ except Exception:
+ # find problematic indicator
+ for indicator in iter_:
+ try:
+ demisto.createIndicators([indicator])
+ except Exception as err:
+ demisto.debug(f'createIndicators Error: failed to create the following indicator:'
+ f' {indicator}\n {err}')
+ raise
+ elif command == 'xpanse-get-indicators':
+ return_results(get_indicators(client, demisto.args()))
+ else:
+ raise NotImplementedError(f'Command "{command}" is not implemented.')
+
+ except Exception as e:
+ return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
+
+
+''' ENTRY POINT '''
+
+if __name__ in ('__main__', '__builtin__', 'builtins'):
+ main()
diff --git a/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse.yml b/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse.yml
new file mode 100644
index 000000000000..748e74857dda
--- /dev/null
+++ b/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse.yml
@@ -0,0 +1,153 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: Xpanse Feed
+ version: -1
+sectionOrder:
+- Connect
+- Collect
+configuration:
+- additionalinfo: The web UI with `api-` appended to front (e.g., https://api-xsiam.paloaltonetworks.com). For more information, see https://docs.paloaltonetworks.com/cortex/cortex-xdr/cortex-xdr-api/cortex-xdr-api-overview/get-started-with-cortex-xdr-apis.
+ display: Server URL
+ name: url
+ required: true
+ type: 0
+- additionalinfo: For more information, see https://docs.paloaltonetworks.com/cortex/cortex-xdr/cortex-xdr-api/cortex-xdr-api-overview/get-started-with-cortex-xdr-apis. Only standard API key type is supported.
+ display: API Key ID
+ displaypassword: API Key
+ name: credentials
+ required: true
+ type: 9
+- defaultvalue: 'true'
+ display: Fetch indicators
+ name: feed
+ type: 8
+ section: Collect
+ required: false
+- additionalinfo: Indicators from this integration instance will be marked with this reputation.
+ section: Collect
+ display: Indicator Reputation
+ name: feedReputation
+ defaultvalue: Good
+ options:
+ - None
+ - Good
+ - Suspicious
+ - Bad
+ type: 18
+ required: false
+- additionalinfo: Reliability of the source providing the intelligence data.
+ defaultvalue: A - Completely reliable
+ display: Source Reliability
+ name: feedReliability
+ options:
+ - A - Completely reliable
+ - B - Usually reliable
+ - C - Fairly reliable
+ - D - Not usually reliable
+ - E - Unreliable
+ - F - Reliability cannot be judged
+ required: true
+ type: 15
+ section: Collect
+- additionalinfo: The Traffic Light Protocol (TLP) designation to apply to indicators fetched from the feed.
+ defaultvalue: CLEAR
+ display: Traffic Light Protocol Color
+ name: tlp_color
+ options:
+ - RED
+ - AMBER+STRICT
+ - AMBER
+ - GREEN
+ - CLEAR
+ type: 15
+ section: Collect
+ required: false
+- defaultvalue: '20160'
+ display: ''
+ name: feedExpirationInterval
+ type: 1
+ required: false
+- additionalinfo: When selected, the exclusion list is ignored for indicators from this feed. This means that if an indicator from this feed is on the exclusion list, the indicator might still be added to the system.
+ defaultvalue: 'false'
+ display: Bypass exclusion list
+ name: feedBypassExclusionList
+ type: 8
+ section: Collect
+ advanced: true
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ type: 8
+ section: Connect
+ advanced: true
+ required: false
+- defaultvalue: 'false'
+ display: Trust any certificate (not secure)
+ name: insecure
+ type: 8
+ section: Connect
+ advanced: true
+ required: false
+- additionalinfo: Supports CSV values.
+ display: Tags
+ name: feedTags
+ type: 0
+ section: Collect
+ advanced: true
+ required: false
+- name: feedExpirationPolicy
+ defaultvalue: suddenDeath
+ display: ''
+ type: 17
+ options:
+ - never
+ - interval
+ - indicatorType
+ - suddenDeath
+ section: Collect
+ required: false
+- defaultvalue: '1440'
+ name: feedFetchInterval
+ display: Feed Fetch Interval
+ type: 19
+ section: Collect
+ advanced: true
+ required: false
+description: Use this feed to retrieve the discovered IPs/Domains/Certificates from Cortex Xpanse asset database.
+display: 'Xpanse Feed'
+name: Xpanse Feed
+script:
+ commands:
+ - arguments:
+ - description: The maximum number of indicators to return.
+ name: limit
+ required: true
+ - defaultValue: yes
+ description: Retrieve discovered IPs.
+ name: ip
+ - defaultValue: yes
+ description: Retrieve discovered domains.
+ name: domain
+ - defaultValue: yes
+ description: Retrieve discovered certificates.
+ name: certificate
+ description: Retrieves a limited number of indicators.
+ name: xpanse-get-indicators
+ outputs:
+ - contextPath: ASM.Indicators.Name
+ description: The name of the indicator.
+ type: String
+ - contextPath: ASM.Indicators.Description
+ description: The description of the indicator.
+ type: String
+ - contextPath: ASM.Indicators.Type
+ description: The type of the indicator.
+ type: String
+ feed: true
+ script: '-'
+ type: python
+ subtype: python3
+ dockerimage: demisto/python3:3.10.14.96411
+fromversion: 6.10.0
+tests:
+- No tests (auto formatted)
diff --git a/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse_description.md b/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse_description.md
new file mode 100644
index 000000000000..52ba1c5c08cb
--- /dev/null
+++ b/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse_description.md
@@ -0,0 +1,13 @@
+## Configure Cortex Xpanse
+
+1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+2. Search for Xpanse Feed.
+3. Click **Add instance** to create and configure a new integration instance.
+
+ | **Parameter** | **Description** | **Required** |
+ | --- | --- | --- |
+ Server URL | The web UI with \`api-\` appended to front (e.g., https://api-xsiam.paloaltonetworks.com). For more information, see [get-started-with-cortex-xdr-apis](https://docs.paloaltonetworks.com/cortex/cortex-xdr/cortex-xdr-api/cortex-xdr-api-overview/get-started-with-cortex-xdr-apis). | True
+ API Key ID | See [get-started-with-cortex-xdr-apis](https://docs.paloaltonetworks.com/cortex/cortex-xdr/cortex-xdr-api/cortex-xdr-api-overview/get-started-with-cortex-xdr-apis). | True
+ API Key | See [get-started-with-cortex-xdr-apis](https://docs.paloaltonetworks.com/cortex/cortex-xdr/cortex-xdr-api/cortex-xdr-api-overview/get-started-with-cortex-xdr-apis). **Only standard API key type is supported**. | True
+
+4. Click **Test** to validate the URLs, token, and connection.
diff --git a/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse_image.png b/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse_image.png
new file mode 100644
index 000000000000..249fc6f403d6
Binary files /dev/null and b/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse_image.png differ
diff --git a/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse_test.py b/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse_test.py
new file mode 100644
index 000000000000..4adef942e144
--- /dev/null
+++ b/Packs/CortexXpanse/Integrations/FeedXpanse/FeedXpanse_test.py
@@ -0,0 +1,209 @@
+"""
+Tests module for Xpanse Feed integration.
+"""
+
+# Client for multiple tests
+from FeedXpanse import Client
+client = Client(
+ base_url='https://test.com', tlp_color="GREEN",
+ verify=True, feed_tags=["test_tag"],
+ headers={
+ "HOST": "test.com",
+ "Authorizatio": "THISISAFAKEKEY",
+ "Content-Type": "application/json"
+ },
+ proxy=False)
+
+
+def test_map_indicator_type():
+ """Tests map_indicator_type helper function.
+
+ Given:
+ - Indicator type input
+ When:
+ - Getting output from map_indicator_type helper function
+ Then:
+ - Checks the output of the helper function with the expected output.
+ """
+ from FeedXpanse import map_indicator_type
+ # Test know types
+ assert map_indicator_type('UNASSOCIATED_RESPONSIVE_IP') == 'IP'
+ assert map_indicator_type('DOMAIN') == 'Domain'
+ assert map_indicator_type('CERTIFICATE') == 'X509 Certificate'
+ assert map_indicator_type('CIDR') == 'CIDR'
+ # test_map_unknown_type
+ assert map_indicator_type('UNKNOWN_TYPE') == 'None'
+ # test_map_empty_string
+ assert map_indicator_type('') == 'None'
+ # test_map_none_input
+ assert map_indicator_type('domain') == 'None'
+
+
+def test_create_x509_certificate_grids():
+ """Tests create_x509_certificate_grids helper function.
+
+ Given:
+ - Indicator type input
+ When:
+ - Getting output from create_x509_certificate_grids helper function
+ Then:
+ - Checks the output of the helper function with the expected output.
+ """
+ from FeedXpanse import create_x509_certificate_grids
+ # test_with_valid_string
+ input_str = "C=ZA,ST=Western Cape,L=Cape Town,O=Thawte"
+ expected_output = [
+ {"title": "C", "data": "ZA"},
+ {"title": "ST", "data": "Western Cape"},
+ {"title": "L", "data": "Cape Town"},
+ {"title": "O", "data": "Thawte"}
+ ]
+ assert create_x509_certificate_grids(input_str) == expected_output
+
+ # test_with_none_input
+ assert create_x509_certificate_grids(None) == []
+
+ # test_with_empty_string
+ assert create_x509_certificate_grids('') == []
+
+
+def test_map_indicator_fields():
+ """Tests map_indicator_fields helper function.
+
+ Given:
+ - Indicator type input
+ When:
+ - Getting output from map_indicator_fields helper function
+ Then:
+ - Checks the output of the helper function with the expected output.
+ """
+ from FeedXpanse import map_indicator_fields
+ # test_map_indicator_fields_domain
+ raw_indicator = {
+ "name": "example.com",
+ "domain_details": {
+ "creationDate": 1609459200,
+ "registryExpiryDate": 1609459200,
+ }
+ }
+ asset_type = 'Domain'
+ expected_output = {
+ "internal": True,
+ "description": "example.com indicator of asset type Domain from Cortex Xpanse",
+ "creationdate": '1970-01-19T15:04:19.000Z',
+ "expirationdate": '1970-01-19T15:04:19.000Z'
+ }
+ assert map_indicator_fields(raw_indicator, asset_type) == expected_output
+
+ # test_map_indicator_fields_x509_certificate
+ raw_indicator = {
+ "name": "certificate",
+ "certificate_details": {
+ "signatureAlgorithm": "SHA256WithRSAEncryption",
+ "serialNumber": "1234567890",
+ "validNotAfter": 1609459200,
+ "validNotBefore": 1609459200,
+ "issuer": "C=US,ST=California",
+ "subject": "C=US,ST=California",
+ }
+ }
+ asset_type = 'X509 Certificate'
+ expected_output = {
+ "internal": True,
+ "description": "certificate indicator of asset type X509 Certificate from Cortex Xpanse",
+ "signaturealgorithm": "SHA256WithRSAEncryption",
+ "serialnumber": "1234567890",
+ "validitynotafter": "1970-01-19T15:04:19.000Z",
+ "validitynotbefore": "1970-01-19T15:04:19.000Z",
+ "issuer": [{"title": "C", "data": "US"}, {"title": "ST", "data": "California"}],
+ "subject": [{"title": "C", "data": "US"}, {"title": "ST", "data": "California"}]
+ }
+ assert map_indicator_fields(raw_indicator, asset_type) == expected_output
+
+
+def test_build_asset_indicators():
+ """Tests build_asset_indicators helper function.
+
+ Given:
+ - Indicator type input
+ When:
+ - Getting output from build_asset_indicators helper function
+ Then:
+ - Checks the output of the helper function with the expected output.
+ """
+ from FeedXpanse import build_asset_indicators
+ # test_build_asset_indicators
+ raw_indicators = [
+ {"name": "example.com", "asset_type": "DOMAIN"},
+ {"name": "example.net", "asset_type": "DOMAIN", "ipv6s": ["::1"]}, # This should be skipped
+ {"name": "*.example.org", "asset_type": "DOMAIN"}, # This should become a DomainGlob
+ {"name": "nonexistent", "asset_type": "CLOUD_SERVER"}, # This should be skipped
+ ]
+ expected_output = [
+ {
+ 'value': "example.com",
+ 'type': "Domain",
+ 'fields': {
+ "internal": True,
+ "description": "example.com indicator of asset type Domain from Cortex Xpanse",
+ "trafficlightprotocol": "GREEN",
+ "tags": ["test_tag"]
+ },
+ 'rawJSON': {"name": "example.com", "asset_type": "DOMAIN"}
+ },
+ {
+ 'value': "*.example.org",
+ 'type': "DomainGlob",
+ 'fields': {
+ "internal": True,
+ "description": "*.example.org indicator of asset type DomainGlob from Cortex Xpanse",
+ "trafficlightprotocol": "GREEN",
+ "tags": ["test_tag"]
+ },
+ 'rawJSON': {"name": "*.example.org", "asset_type": "DOMAIN"}
+ }
+ ]
+ assert build_asset_indicators(client, raw_indicators) == expected_output
+
+
+def test_fetch_indicators(mocker):
+ """Tests fetch_indicators command function.
+
+ Given:
+ - requests_mock instance to generate the appropriate list_asset_internet_exposure_command( API response,
+ loaded from a local JSON file.
+ When:
+ - Getting output from fetch_indicators command function
+ Then:
+ - Checks the output of the command function with the expected output.
+ """
+ from FeedXpanse import fetch_indicators
+ from test_data.raw_response import EXTERNAL_EXPOSURES_RESPONSE
+ mocker.patch.object(client, 'list_asset_internet_exposure_request', return_value=EXTERNAL_EXPOSURES_RESPONSE)
+ indicators, _ = fetch_indicators(client, limit=1, asset_type='domain')
+ expected_indicators_fields = {
+ "internal": True,
+ "description": "example.com indicator of asset type Domain from Cortex Xpanse",
+ "trafficlightprotocol": "GREEN",
+ "tags": ["test_tag"],
+ }
+ assert indicators[0]['fields'] == expected_indicators_fields
+
+
+def test_get_indicators(mocker):
+ """Tests get_indicators command function.
+
+ Given:
+ - requests_mock instance to generate the appropriate list_asset_internet_exposure_command( API response,
+ loaded from a local JSON file.
+ When:
+ - Getting output from get_indicators command function
+ Then:
+ - Checks the output of the command function with the expected output.
+ """
+ from FeedXpanse import get_indicators
+ from test_data.raw_response import EXTERNAL_EXPOSURES_RESPONSE
+ mocker.patch.object(client, 'list_asset_internet_exposure_request', return_value=EXTERNAL_EXPOSURES_RESPONSE)
+ args = {"limit": "1", 'domain': "yes", "certificate": "no", "ipv4": "no"}
+ response = get_indicators(client, args)
+ assert response.outputs[0]['Type'] == 'Domain'
diff --git a/Packs/CortexXpanse/Integrations/FeedXpanse/README.md b/Packs/CortexXpanse/Integrations/FeedXpanse/README.md
new file mode 100644
index 000000000000..ed6a5430e5a2
--- /dev/null
+++ b/Packs/CortexXpanse/Integrations/FeedXpanse/README.md
@@ -0,0 +1,81 @@
+Use this feed to retrieve the discovered IPs/Domains/Certificates from Cortex Xpanse asset database.
+This integration was integrated and tested with version 2.5 of Cortex Xpanse.
+
+## Configure Xpanse Feed on Cortex XSOAR
+
+1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+2. Search for Xpanse Feed.
+3. Click **Add instance** to create and configure a new integration instance.
+
+ | **Parameter** | **Description** | **Required** |
+ | --- | --- | --- |
+ | Server URL | The web UI with \`api-\` appended to front \(e.g., https://api-xsiam.paloaltonetworks.com\). For more information, see https://docs.paloaltonetworks.com/cortex/cortex-xdr/cortex-xdr-api/cortex-xdr-api-overview/get-started-with-cortex-xdr-apis. | True |
+ | API Key ID | For more information, see https://docs.paloaltonetworks.com/cortex/cortex-xdr/cortex-xdr-api/cortex-xdr-api-overview/get-started-with-cortex-xdr-apis. Only standard API key type is supported. | True |
+ | API Key | | True |
+ | Fetch indicators | | False |
+ | Indicator Reputation | Indicators from this integration instance will be marked with this reputation. | False |
+ | Source Reliability | Reliability of the source providing the intelligence data. | True |
+ | Traffic Light Protocol Color | The Traffic Light Protocol \(TLP\) designation to apply to indicators fetched from the feed. | False |
+ | | | False |
+ | Bypass exclusion list | When selected, the exclusion list is ignored for indicators from this feed. This means that if an indicator from this feed is on the exclusion list, the indicator might still be added to the system. | False |
+ | Use system proxy settings | | False |
+ | Trust any certificate (not secure) | | False |
+ | Tags | Supports CSV values. | False |
+ | | | False |
+ | Feed Fetch Interval | | False |
+
+4. Click **Test** to validate the URLs, token, and connection.
+
+## Commands
+
+You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
+After you successfully execute a command, a DBot message appears in the War Room with the command details.
+
+### xpanse-get-indicators
+
+***
+Retrieves a limited number of indicators.
+
+#### Base Command
+
+`xpanse-get-indicators`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| limit | The maximum number of indicators to return. | Required |
+| ip | Retrieve discovered IPs. Default is yes. | Optional |
+| domain | Retrieve discovered domains. Default is yes. | Optional |
+| certificate | Retrieve discovered certificates. Default is yes. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| ASM.Indicators.Name | String | The name of the indicator. |
+| ASM.Indicators.Description | String | The description of the indicator. |
+| ASM.Indicators.Type | String | The type of the indicator. |
+
+#### Command example
+```!xpanse-get-indicators limit=1 ip=yes certificate=no domain=no```
+#### Context Example
+```json
+{
+ "ASM": {
+ "Indicators": {
+ "Description": "1.1.1.1 indicator of asset type IP from Cortex Xpanse",
+ "Name": "1.1.1.1",
+ "Type": "IP"
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Xpanse indicators
+>|Name|Type|Description|
+>|---|---|---|
+>| 1.1.1.1 | IP | 1.1.1.1 indicator of asset type IP from Cortex Xpanse |
+
diff --git a/Packs/CortexXpanse/Integrations/FeedXpanse/command_examples b/Packs/CortexXpanse/Integrations/FeedXpanse/command_examples
new file mode 100644
index 000000000000..515cdf78bda3
--- /dev/null
+++ b/Packs/CortexXpanse/Integrations/FeedXpanse/command_examples
@@ -0,0 +1 @@
+!xpanse-get-indicators limit=1 ip=yes certificate=no domain=no
\ No newline at end of file
diff --git a/Packs/CortexXpanse/Integrations/FeedXpanse/test_data/raw_response.py b/Packs/CortexXpanse/Integrations/FeedXpanse/test_data/raw_response.py
new file mode 100644
index 000000000000..9273fc414136
--- /dev/null
+++ b/Packs/CortexXpanse/Integrations/FeedXpanse/test_data/raw_response.py
@@ -0,0 +1,28 @@
+# adding so null don't get seen as variable.
+null = None
+false = False
+true = True
+
+EXTERNAL_EXPOSURES_RESPONSE = [
+ {
+ "asm_ids": [
+ "1111-1111-1111-1111"
+ ],
+ "name": "example.com",
+ "asset_type": "DOMAIN",
+ },
+ {
+ "asm_ids": [
+ "2222-2222-2222-2222"
+ ],
+ "name": "192.168.1.1",
+ "asset_type": "UNASSOCIATED_RESPONSIVE_IP",
+ },
+ {
+ "asm_ids": [
+ "3333-3333-3333-3333"
+ ],
+ "name": "192.168.1.2",
+ "asset_type": "UNASSOCIATED_RESPONSIVE_IP",
+ },
+]
diff --git a/Packs/CortexXpanse/Layouts/layoutscontainer-Xpanse_Alert_Layout.json b/Packs/CortexXpanse/Layouts/layoutscontainer-Xpanse_Alert_Layout.json
index 79c128739e28..6f52545d1a8e 100644
--- a/Packs/CortexXpanse/Layouts/layoutscontainer-Xpanse_Alert_Layout.json
+++ b/Packs/CortexXpanse/Layouts/layoutscontainer-Xpanse_Alert_Layout.json
@@ -7,482 +7,484 @@
"type": "summary"
},
{
- "id": "caseinfoid",
- "name": "Incident Info",
- "sections": [
- {
- "displayType": "ROW",
- "h": 2,
- "i": "caseinfoid-fce71720-98b0-11e9-97d7-ed26ef9e46c8",
- "isVisible": true,
- "items": [
- {
- "endCol": 2,
- "fieldId": "type",
- "height": 22,
- "id": "incident-type-field",
- "index": 0,
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "endCol": 2,
- "fieldId": "severity",
- "height": 22,
- "id": "incident-severity-field",
- "index": 1,
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "endCol": 2,
- "fieldId": "owner",
- "height": 22,
- "id": "incident-owner-field",
- "index": 2,
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "endCol": 2,
- "fieldId": "sourcebrand",
- "height": 22,
- "id": "incident-sourceBrand-field",
- "index": 3,
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "endCol": 2,
- "fieldId": "sourceinstance",
- "height": 22,
- "id": "incident-sourceInstance-field",
- "index": 4,
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "endCol": 2,
- "fieldId": "playbookid",
- "height": 22,
- "id": "incident-playbookId-field",
- "index": 5,
- "sectionItemType": "field",
- "startCol": 0
- }
- ],
- "maxW": 3,
- "moved": false,
- "name": "Case Details",
- "static": false,
- "w": 1,
- "x": 0,
- "y": 0
- },
- {
- "h": 2,
- "i": "caseinfoid-61263cc0-98b1-11e9-97d7-ed26ef9e46c8",
- "maxW": 3,
- "moved": false,
- "name": "Notes",
- "static": false,
- "type": "notes",
- "w": 1,
- "x": 2,
- "y": 6
- },
- {
- "displayType": "ROW",
- "h": 2,
- "i": "caseinfoid-6aabad20-98b1-11e9-97d7-ed26ef9e46c8",
- "maxW": 3,
- "moved": false,
- "name": "Work Plan",
- "static": false,
- "type": "workplan",
- "w": 1,
- "x": 0,
- "y": 5
- },
- {
- "displayType": "ROW",
- "h": 2,
- "i": "caseinfoid-770ec200-98b1-11e9-97d7-ed26ef9e46c8",
- "isVisible": true,
- "maxW": 3,
- "moved": false,
- "name": "Linked Incidents",
- "static": false,
- "type": "linkedIncidents",
- "w": 1,
- "x": 1,
- "y": 4
- },
- {
- "displayType": "ROW",
- "h": 2,
- "i": "caseinfoid-842632c0-98b1-11e9-97d7-ed26ef9e46c8",
- "maxW": 3,
- "moved": false,
- "name": "Child Incidents",
- "static": false,
- "type": "childInv",
- "w": 1,
- "x": 0,
- "y": 9
- },
- {
- "displayType": "ROW",
- "h": 2,
- "i": "caseinfoid-4a31afa0-98ba-11e9-a519-93a53c759fe0",
- "maxW": 3,
- "moved": false,
- "name": "Evidence",
- "static": false,
- "type": "evidence",
- "w": 1,
- "x": 0,
- "y": 7
- },
- {
- "displayType": "ROW",
- "h": 2,
- "hideName": false,
- "i": "caseinfoid-7717e580-9bed-11e9-9a3f-8b4b2158e260",
- "maxW": 3,
- "moved": false,
- "name": "Team Members",
- "static": false,
- "type": "team",
- "w": 1,
- "x": 2,
- "y": 8
- },
- {
- "displayType": "ROW",
- "h": 2,
- "i": "caseinfoid-7ce69dd0-a07f-11e9-936c-5395a1acf11e",
- "maxW": 3,
- "moved": false,
- "name": "Indicators",
- "query": "",
- "queryType": "input",
- "static": false,
- "type": "indicators",
- "w": 2,
- "x": 1,
- "y": 2
- },
- {
- "displayType": "CARD",
- "h": 2,
- "i": "caseinfoid-ac32f620-a0b0-11e9-b27f-13ae1773d289",
- "items": [
- {
- "endCol": 1,
- "fieldId": "occurred",
- "height": 22,
- "id": "incident-occurred-field",
- "index": 0,
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "endCol": 1,
- "fieldId": "dbotmodified",
- "height": 22,
- "id": "incident-modified-field",
- "index": 1,
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "endCol": 2,
- "fieldId": "dbotduedate",
- "height": 22,
- "id": "incident-dueDate-field",
- "index": 2,
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "endCol": 2,
- "fieldId": "dbotcreated",
- "height": 22,
- "id": "incident-created-field",
- "index": 0,
- "sectionItemType": "field",
- "startCol": 1
- },
- {
- "endCol": 2,
- "fieldId": "dbotclosed",
- "height": 22,
- "id": "incident-closed-field",
- "index": 1,
- "sectionItemType": "field",
- "startCol": 1
- }
- ],
- "maxW": 3,
- "moved": false,
- "name": "Timeline Information",
- "static": false,
- "w": 1,
- "x": 2,
- "y": 4
- },
- {
- "displayType": "ROW",
- "h": 2,
- "i": "caseinfoid-88e6bf70-a0b1-11e9-b27f-13ae1773d289",
- "isVisible": true,
- "items": [
- {
- "endCol": 2,
- "fieldId": "dbotclosed",
- "height": 22,
- "id": "incident-dbotClosed-field",
- "index": 0,
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "endCol": 2,
- "fieldId": "closereason",
- "height": 22,
- "id": "incident-closeReason-field",
- "index": 1,
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "endCol": 2,
- "fieldId": "closenotes",
- "height": 22,
- "id": "incident-closeNotes-field",
- "index": 2,
- "sectionItemType": "field",
- "startCol": 0
- }
- ],
- "maxW": 3,
- "moved": false,
- "name": "Closing Information",
- "static": false,
- "w": 1,
- "x": 1,
- "y": 8
- },
- {
- "displayType": "CARD",
- "h": 2,
- "i": "caseinfoid-e54b1770-a0b1-11e9-b27f-13ae1773d289",
- "isVisible": true,
- "items": [
- {
- "endCol": 2,
- "fieldId": "details",
- "height": 22,
- "id": "incident-details-field",
- "index": 0,
- "sectionItemType": "field",
- "startCol": 0
- }
- ],
- "maxW": 3,
- "moved": false,
- "name": "Investigation Data",
- "static": false,
- "w": 1,
- "x": 1,
- "y": 6
- },
- {
- "displayType": "ROW",
- "h": 3,
- "hideName": false,
- "i": "caseinfoid-227c69d0-b447-11ed-9fd0-85eb99e8103b",
- "items": [
- {
- "dropEffect": "move",
- "endCol": 2,
- "fieldId": "xpanseprogressstatus",
- "height": 22,
- "id": "5a6b69e0-b447-11ed-9fd0-85eb99e8103b",
- "index": 0,
- "listId": "caseinfoid-227c69d0-b447-11ed-9fd0-85eb99e8103b",
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "endCol": 2,
- "fieldId": "xpansealertid",
- "height": 22,
- "id": "41ea5f70-b447-11ed-9fd0-85eb99e8103b",
- "index": 1,
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "dropEffect": "move",
- "endCol": 2,
- "fieldId": "xpanseexternalid",
- "height": 22,
- "id": "50df69d0-b447-11ed-9fd0-85eb99e8103b",
- "index": 2,
- "listId": "caseinfoid-227c69d0-b447-11ed-9fd0-85eb99e8103b",
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "dropEffect": "move",
- "endCol": 2,
- "fieldId": "xpanseassetids",
- "height": 22,
- "id": "10e43460-1459-11ee-b6c6-ab5ba5fed5f6",
- "index": 3,
- "listId": "caseinfoid-227c69d0-b447-11ed-9fd0-85eb99e8103b",
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "endCol": 2,
- "fieldId": "xpanseserviceid",
- "height": 22,
- "id": "17f151c0-1459-11ee-b6c6-ab5ba5fed5f6",
- "index": 4,
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "endCol": 2,
- "fieldId": "xpanseip",
- "height": 22,
- "id": "446375c0-b447-11ed-9fd0-85eb99e8103b",
- "index": 5,
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "endCol": 2,
- "fieldId": "xpanseport",
- "height": 22,
- "id": "46dc16e0-b447-11ed-9fd0-85eb99e8103b",
- "index": 6,
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "endCol": 2,
- "fieldId": "xpanseprotocol",
- "height": 22,
- "id": "25fa7580-1459-11ee-b6c6-ab5ba5fed5f6",
- "index": 7,
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "endCol": 2,
- "fieldId": "xpansebusinessunits",
- "height": 22,
- "id": "896d87f0-2675-11ee-9bd7-d3698cbd65c3",
- "index": 8,
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "endCol": 2,
- "fieldId": "xpansehostname",
- "height": 22,
- "id": "4a073f20-b447-11ed-9fd0-85eb99e8103b",
- "index": 9,
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "endCol": 2,
- "fieldId": "xpansecountrycode",
- "height": 22,
- "id": "2a83a720-1459-11ee-b6c6-ab5ba5fed5f6",
- "index": 10,
- "sectionItemType": "field",
- "startCol": 0
- },
- {
- "endCol": 2,
- "fieldId": "xpansetags",
- "height": 22,
- "id": "5c16a8e0-b447-11ed-9fd0-85eb99e8103b",
- "index": 11,
- "sectionItemType": "field",
- "startCol": 0
- }
- ],
- "maxW": 3,
- "minH": 1,
- "moved": false,
- "name": "Xpanse Alert Details",
- "static": false,
- "w": 1,
- "x": 0,
- "y": 2
- },
- {
- "displayType": "ROW",
- "h": 2,
- "hideItemTitleOnlyOne": true,
- "hideName": false,
- "i": "caseinfoid-ef2d3920-1458-11ee-b6c6-ab5ba5fed5f6",
- "items": [
- {
- "endCol": 2,
- "fieldId": "xpanseremediationguidance",
- "height": 22,
- "id": "0803b690-1459-11ee-b6c6-ab5ba5fed5f6",
- "index": 0,
- "sectionItemType": "field",
- "startCol": 0
- }
- ],
- "maxW": 3,
- "minH": 1,
- "moved": false,
- "name": "Remediation Guidance",
- "static": false,
- "w": 1,
- "x": 2,
- "y": 0
- },
- {
- "displayType": "ROW",
- "h": 2,
- "hideItemTitleOnlyOne": true,
- "hideName": false,
- "i": "caseinfoid-7b0787c0-1459-11ee-b6c6-ab5ba5fed5f6",
- "items": [
- {
- "endCol": 2,
- "fieldId": "xpansedescription",
- "height": 22,
- "id": "9cfff8d0-1459-11ee-b6c6-ab5ba5fed5f6",
- "index": 0,
- "sectionItemType": "field",
- "startCol": 0
- }
- ],
- "maxW": 3,
- "minH": 1,
- "moved": false,
- "name": "Alert Details",
- "static": false,
- "w": 1,
- "x": 1,
- "y": 0
- }
- ],
- "type": "custom"
- },
+ "id": "caseinfoid",
+ "name": "Incident Info",
+ "sections": [
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "i": "caseinfoid-fce71720-98b0-11e9-97d7-ed26ef9e46c8",
+ "isVisible": true,
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "type",
+ "height": 22,
+ "id": "incident-type-field",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0,
+ "dropEffect": "move",
+ "listId": "caseinfoid-fce71720-98b0-11e9-97d7-ed26ef9e46c8"
+ },
+ {
+ "endCol": 2,
+ "fieldId": "severity",
+ "height": 22,
+ "id": "incident-severity-field",
+ "index": 1,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "owner",
+ "height": 22,
+ "id": "incident-owner-field",
+ "index": 2,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "sourcebrand",
+ "height": 22,
+ "id": "incident-sourceBrand-field",
+ "index": 3,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "sourceinstance",
+ "height": 22,
+ "id": "incident-sourceInstance-field",
+ "index": 4,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "playbookid",
+ "height": 22,
+ "id": "incident-playbookId-field",
+ "index": 5,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "occurred",
+ "height": 22,
+ "id": "incident-occurred-field",
+ "index": 6,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "dbotcreated",
+ "height": 22,
+ "id": "incident-created-field",
+ "index": 7,
+ "listId": "caseinfoid-fce71720-98b0-11e9-97d7-ed26ef9e46c8",
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "dbotmodified",
+ "height": 22,
+ "id": "incident-modified-field",
+ "index": 8,
+ "listId": "caseinfoid-ac32f620-a0b0-11e9-b27f-13ae1773d289",
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "dbotclosed",
+ "height": 22,
+ "id": "incident-closed-field",
+ "index": 9,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "closereason",
+ "height": 22,
+ "id": "incident-closeReason-field",
+ "index": 10,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "closenotes",
+ "height": 44,
+ "id": "incident-closeNotes-field",
+ "index": 11,
+ "listId": "caseinfoid-88e6bf70-a0b1-11e9-b27f-13ae1773d289",
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxW": 3,
+ "moved": false,
+ "name": "Case Details",
+ "static": false,
+ "w": 1,
+ "x": 0,
+ "y": 0
+ },
+ {
+ "h": 2,
+ "i": "caseinfoid-61263cc0-98b1-11e9-97d7-ed26ef9e46c8",
+ "maxW": 3,
+ "moved": false,
+ "name": "Notes",
+ "static": false,
+ "type": "notes",
+ "w": 1,
+ "x": 0,
+ "y": 5
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "i": "caseinfoid-6aabad20-98b1-11e9-97d7-ed26ef9e46c8",
+ "maxW": 3,
+ "moved": false,
+ "name": "Work Plan",
+ "static": false,
+ "type": "workplan",
+ "w": 1,
+ "x": 2,
+ "y": 0
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "i": "caseinfoid-7717e580-9bed-11e9-9a3f-8b4b2158e260",
+ "maxW": 3,
+ "moved": false,
+ "name": "Team Members",
+ "static": false,
+ "type": "team",
+ "w": 1,
+ "x": 0,
+ "y": 7,
+ "hideName": false
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "i": "caseinfoid-7ce69dd0-a07f-11e9-936c-5395a1acf11e",
+ "maxW": 3,
+ "moved": false,
+ "name": "Indicators",
+ "static": false,
+ "type": "indicators",
+ "w": 2,
+ "x": 1,
+ "y": 11,
+ "query": "",
+ "queryType": "input"
+ },
+ {
+ "displayType": "ROW",
+ "h": 3,
+ "i": "caseinfoid-227c69d0-b447-11ed-9fd0-85eb99e8103b",
+ "maxW": 3,
+ "moved": false,
+ "name": "Xpanse Alert Details",
+ "static": false,
+ "w": 1,
+ "x": 0,
+ "y": 2,
+ "hideName": false,
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "xpanseattacksurfacerulename",
+ "height": 22,
+ "id": "031ed8a0-0ed9-11ef-a7ed-df76d588cdce",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "xpanseprogressstatus",
+ "height": 22,
+ "id": "5a6b69e0-b447-11ed-9fd0-85eb99e8103b",
+ "index": 1,
+ "listId": "caseinfoid-227c69d0-b447-11ed-9fd0-85eb99e8103b",
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "xpansealertid",
+ "height": 22,
+ "id": "41ea5f70-b447-11ed-9fd0-85eb99e8103b",
+ "index": 2,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "xpanseexternalid",
+ "height": 22,
+ "id": "50df69d0-b447-11ed-9fd0-85eb99e8103b",
+ "index": 4,
+ "listId": "caseinfoid-227c69d0-b447-11ed-9fd0-85eb99e8103b",
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "xpanseassetids",
+ "height": 22,
+ "id": "10e43460-1459-11ee-b6c6-ab5ba5fed5f6",
+ "index": 5,
+ "listId": "caseinfoid-227c69d0-b447-11ed-9fd0-85eb99e8103b",
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "xpanseserviceid",
+ "height": 22,
+ "id": "17f151c0-1459-11ee-b6c6-ab5ba5fed5f6",
+ "index": 6,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "xpanseip",
+ "height": 22,
+ "id": "446375c0-b447-11ed-9fd0-85eb99e8103b",
+ "index": 7,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "xpanseport",
+ "height": 22,
+ "id": "46dc16e0-b447-11ed-9fd0-85eb99e8103b",
+ "index": 8,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "xpanseprotocol",
+ "height": 22,
+ "id": "25fa7580-1459-11ee-b6c6-ab5ba5fed5f6",
+ "index": 9,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "xpansebusinessunits",
+ "height": 22,
+ "id": "896d87f0-2675-11ee-9bd7-d3698cbd65c3",
+ "index": 10,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "expanseprovider",
+ "height": 22,
+ "id": "ff30fca0-0ed8-11ef-a7ed-df76d588cdce",
+ "index": 11,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "xpansehostname",
+ "height": 22,
+ "id": "4a073f20-b447-11ed-9fd0-85eb99e8103b",
+ "index": 12,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "xpansecountrycode",
+ "height": 22,
+ "id": "2a83a720-1459-11ee-b6c6-ab5ba5fed5f6",
+ "index": 13,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "endCol": 2,
+ "fieldId": "xpansetags",
+ "height": 22,
+ "id": "5c16a8e0-b447-11ed-9fd0-85eb99e8103b",
+ "index": 14,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "minH": 1
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "hideName": false,
+ "i": "caseinfoid-ef2d3920-1458-11ee-b6c6-ab5ba5fed5f6",
+ "maxW": 3,
+ "moved": false,
+ "name": "Remediation Guidance",
+ "static": false,
+ "w": 2,
+ "x": 1,
+ "y": 2,
+ "hideItemTitleOnlyOne": true,
+ "items": [
+ {
+ "endCol": 4,
+ "fieldId": "xpanseremediationguidance",
+ "height": 106,
+ "id": "0803b690-1459-11ee-b6c6-ab5ba5fed5f6",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "minH": 1
+ },
+ {
+ "displayType": "ROW",
+ "h": 2,
+ "i": "caseinfoid-7b0787c0-1459-11ee-b6c6-ab5ba5fed5f6",
+ "maxW": 3,
+ "moved": false,
+ "name": "Alert Details",
+ "static": false,
+ "w": 1,
+ "x": 1,
+ "y": 0,
+ "hideItemTitleOnlyOne": true,
+ "hideName": false,
+ "items": [
+ {
+ "endCol": 2,
+ "fieldId": "xpansedescription",
+ "height": 22,
+ "id": "9cfff8d0-1459-11ee-b6c6-ab5ba5fed5f6",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "minH": 1
+ },
+ {
+ "displayType": "ROW",
+ "h": 3,
+ "i": "caseinfoid-ad139040-0ed8-11ef-a7ed-df76d588cdce",
+ "items": [
+ {
+ "endCol": 4,
+ "fieldId": "xpanseservicedetails",
+ "height": 106,
+ "id": "d1468800-0ed8-11ef-a7ed-df76d588cdce",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0,
+ "dropEffect": "move",
+ "listId": "caseinfoid-ad139040-0ed8-11ef-a7ed-df76d588cdce"
+ }
+ ],
+ "maxW": 3,
+ "moved": false,
+ "name": "Service Details",
+ "static": false,
+ "w": 2,
+ "x": 1,
+ "y": 4,
+ "hideItemTitleOnlyOne": true,
+ "hideName": false,
+ "minH": 1
+ },
+ {
+ "displayType": "ROW",
+ "h": 4,
+ "i": "caseinfoid-b8ee8960-0ed8-11ef-a7ed-df76d588cdce",
+ "items": [
+ {
+ "endCol": 4,
+ "fieldId": "xpanseresponsiveipasset",
+ "height": 106,
+ "id": "1f9b3c80-115e-11ef-ab77-e15edc2e0527",
+ "index": 0,
+ "sectionItemType": "field",
+ "startCol": 0,
+ "dropEffect": "move",
+ "listId": "caseinfoid-b8ee8960-0ed8-11ef-a7ed-df76d588cdce"
+ },
+ {
+ "endCol": 4,
+ "fieldId": "xpansecertificateasset",
+ "height": 106,
+ "id": "19698b00-115e-11ef-ab77-e15edc2e0527",
+ "index": 1,
+ "sectionItemType": "field",
+ "startCol": 0,
+ "dropEffect": "move",
+ "listId": "caseinfoid-b8ee8960-0ed8-11ef-a7ed-df76d588cdce"
+ },
+ {
+ "endCol": 4,
+ "fieldId": "xpansedomainasset",
+ "height": 106,
+ "id": "26cf53a0-12ef-11ef-8bc1-834c92adb1be",
+ "index": 2,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
+ {
+ "dropEffect": "move",
+ "endCol": 4,
+ "fieldId": "xpansecloudasset",
+ "height": 106,
+ "id": "2d281520-12ef-11ef-8bc1-834c92adb1be",
+ "index": 3,
+ "listId": "caseinfoid-b8ee8960-0ed8-11ef-a7ed-df76d588cdce",
+ "sectionItemType": "field",
+ "startCol": 0
+ }
+ ],
+ "maxW": 3,
+ "moved": false,
+ "name": "Asset Details",
+ "static": false,
+ "w": 2,
+ "x": 1,
+ "y": 7,
+ "hideItemTitleOnlyOne": false,
+ "hideName": false,
+ "minH": 1
+ }
+ ],
+ "type": "custom"
+ },
{
"id": "warRoom",
"name": "War Room",
diff --git a/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Handler.yml b/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Handler.yml
index 116666b18d42..12a67501c63d 100644
--- a/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Handler.yml
+++ b/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Handler.yml
@@ -126,11 +126,6 @@ tasks:
nexttasks:
'#none#':
- "4"
- scriptarguments:
- RemoteIP:
- complex:
- root: incident
- accessor: xpanseip
separatecontext: true
continueonerrortype: ""
loop:
diff --git a/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Self-Enrichment.yml b/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Self-Enrichment.yml
index 10afae3d70bd..0d98cb9f69f2 100644
--- a/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Self-Enrichment.yml
+++ b/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Self-Enrichment.yml
@@ -26,7 +26,7 @@ tasks:
{
"position": {
"x": 460,
- "y": -180
+ "y": -400
}
}
note: false
@@ -38,13 +38,13 @@ tasks:
isautoswitchedtoquietmode: false
"11":
id: "11"
- taskid: b2ac05c6-d58d-41ca-89af-f53212b9609f
+ taskid: f58f0b25-ae83-4196-8202-f58c135ebf7f
type: condition
task:
- id: b2ac05c6-d58d-41ca-89af-f53212b9609f
+ id: f58f0b25-ae83-4196-8202-f58c135ebf7f
version: -1
- name: Is Cortex Xpanse enabled and IP present?
- description: Determines if the "Cortex Xpanse" integration instance is configured and an IP is present to continue with cloud enrichment.
+ name: Is Cortex Xpanse enabled?
+ description: Determines if the "Cortex Xpanse" integration instance is configured to continue with enrichment.
type: condition
iscommand: false
brand: ""
@@ -52,7 +52,8 @@ tasks:
'#default#':
- "38"
"yes":
- - "88"
+ - "128"
+ - "129"
separatecontext: false
conditions:
- label: "yes"
@@ -82,18 +83,12 @@ tasks:
iscontext: true
right:
value: {}
- - - operator: isNotEmpty
- left:
- value:
- complex:
- root: inputs.RemoteIP
- iscontext: true
continueonerrortype: ""
view: |-
{
"position": {
"x": 460,
- "y": -40
+ "y": -270
}
}
note: false
@@ -133,32 +128,31 @@ tasks:
isautoswitchedtoquietmode: false
"88":
id: "88"
- taskid: 5e18d026-6149-4f5c-858f-c7995a5b3550
+ taskid: 41e5b734-11c8-45d6-83cd-dd04e1f3980b
type: regular
task:
- id: 5e18d026-6149-4f5c-858f-c7995a5b3550
+ id: 41e5b734-11c8-45d6-83cd-dd04e1f3980b
version: -1
name: Search external service information
- description: Get a list of all your external services filtered by the remote IP of the alert.
- script: '|||asm-list-external-service'
+ description: Get service details according to the service ID.
+ script: 'Cortex Xpanse|||asm-get-external-service'
type: regular
iscommand: true
- brand: ""
+ brand: "Cortex Xpanse"
nexttasks:
'#none#':
- "90"
scriptarguments:
- ip_address:
+ service_id:
complex:
- root: inputs.RemoteIP
- is_active:
- simple: "yes"
+ accessor: xpanseserviceid
+ root: incident
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 800,
+ "x": 190,
"y": 170
}
}
@@ -171,33 +165,48 @@ tasks:
isautoswitchedtoquietmode: false
"89":
id: "89"
- taskid: 9fb19614-5316-43d9-8c67-d8c31eed003c
+ taskid: 0a3ff6f7-216d-4055-8169-e9e9c862f72a
type: regular
task:
- id: 9fb19614-5316-43d9-8c67-d8c31eed003c
+ id: 0a3ff6f7-216d-4055-8169-e9e9c862f72a
version: -1
name: Search related Xpanse assets
- description: Get a list of all your internet exposures filtered by IP address, domain, type, and/or if there is an active external service. Maximum result limit is 100 assets.
- script: Cortex Xpanse|||asm-list-asset-internet-exposure
+ description: Get internet exposure asset details according to the asset ID.
+ script: Cortex Xpanse|||asm-get-asset-internet-exposure
type: regular
iscommand: true
brand: Cortex Xpanse
nexttasks:
'#none#':
- - "38"
+ - "97"
scriptarguments:
- has_active_external_services:
- simple: "yes"
- ip_address:
+ asm_id:
complex:
- root: inputs.RemoteIP
+ accessor: xpanseassetids
+ root: incident
+ transformers:
+ - args:
+ chars:
+ value:
+ simple: '[]'
+ operator: StripChars
+ - args:
+ delimiter:
+ value:
+ simple: ','
+ operator: split
+ - args:
+ chars:
+ value:
+ simple: '"'
+ operator: StripChars
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 740,
- "y": 1610
+ "x": 1550,
+ "y": 150
}
}
note: false
@@ -209,21 +218,22 @@ tasks:
isautoswitchedtoquietmode: false
"90":
id: "90"
- taskid: e1b53bce-b5bd-46a0-84e5-69bb5f7192d6
+ taskid: 8758da78-9cc6-4587-8d75-422327f2f2bb
type: condition
task:
- id: e1b53bce-b5bd-46a0-84e5-69bb5f7192d6
+ id: 8758da78-9cc6-4587-8d75-422327f2f2bb
version: -1
- name: Services exist?
+ name: Service exist?
description: Determines if service information was passed back from the last command.
type: condition
iscommand: false
brand: ""
nexttasks:
'#default#':
- - "89"
+ - "38"
"yes":
- - "91"
+ - "112"
+ - "98"
separatecontext: false
conditions:
- label: "yes"
@@ -241,8 +251,8 @@ tasks:
view: |-
{
"position": {
- "x": 800,
- "y": 360
+ "x": 190,
+ "y": 330
}
}
note: false
@@ -252,44 +262,30 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "91":
- id: "91"
- taskid: 9fecdfbf-97b1-4af0-81bf-e819be660a24
- type: regular
+ "97":
+ id: "97"
+ taskid: 60961560-078f-44c7-8336-0418d94977cc
+ type: condition
task:
- id: 9fecdfbf-97b1-4af0-81bf-e819be660a24
+ id: 60961560-078f-44c7-8336-0418d94977cc
version: -1
- name: Set Xpanse service ID
- description: commands.local.cmd.set.incident
- script: Builtin|||setIncident
- type: regular
- iscommand: true
- brand: Builtin
+ name: Asset exists?
+ description: Determines if asset information was passed back from the last command.
+ type: condition
+ iscommand: false
+ brand: ""
nexttasks:
- '#none#':
- - "92"
- scriptarguments:
- xpanseserviceid:
- complex:
- root: ASM.ExternalService
- filters:
- - - operator: isEqualString
- left:
- value:
- simple: ASM.ExternalService.port
- iscontext: true
- right:
- value:
- simple: incident.xpanseport
- iscontext: true
- accessor: service_id
+ '#default#':
+ - "38"
+ "yes":
+ - "113"
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 1160,
- "y": 540
+ "x": 1550,
+ "y": 310
}
}
note: false
@@ -299,240 +295,2362 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "92":
- id: "92"
- taskid: b546bba5-63c9-4f60-8481-b8f9f265e70d
- type: condition
- task:
- id: b546bba5-63c9-4f60-8481-b8f9f265e70d
- version: -1
- name: Service ID exists?
- description: Determines if a service ID was passed back from the last command.
- type: condition
- iscommand: false
- brand: ""
- nexttasks:
- '#default#':
- - "89"
- "yes":
- - "93"
- separatecontext: false
conditions:
- - label: "yes"
- condition:
- - - operator: isNotEmpty
- left:
+ - condition:
+ - - left:
+ iscontext: true
value:
complex:
- root: incident
- accessor: xpanseserviceid
- iscontext: true
+ accessor: AssetInternetExposure
+ root: ASM
+ operator: isNotEmpty
right:
value: {}
+ label: "yes"
+ "98":
continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": 1160,
- "y": 720
- }
- }
- note: false
- timertriggers: []
+ id: "98"
ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
isautoswitchedtoquietmode: false
- "93":
- id: "93"
- taskid: 53726b10-a1d0-44fa-8d1f-f9707f246ab5
- type: regular
- task:
- id: 53726b10-a1d0-44fa-8d1f-f9707f246ab5
- version: -1
- name: Get service details
- description: Get service details according to the service ID.
- script: Cortex Xpanse|||asm-get-external-service
- type: regular
- iscommand: true
- brand: Cortex Xpanse
+ isoversize: false
nexttasks:
'#none#':
- - "94"
+ - "99"
+ note: false
+ quietmode: 0
scriptarguments:
- service_id:
+ gridfield:
+ simple: xpanseservicedetails
+ keys:
+ simple: field,value
+ val1:
+ simple: Service Name
+ val2:
complex:
- root: incident
- accessor: xpanseserviceid
+ accessor: service_name
+ root: ASM.ExternalService
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
separatecontext: false
- continueonerrortype: ""
+ skipunavailable: false
+ task:
+ brand: ""
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Example of command:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: bb5c3806-9983-40ce-85d2-e224942d135b
+ iscommand: false
+ name: Set service information (Service Name)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: bb5c3806-9983-40ce-85d2-e224942d135b
+ timertriggers: []
+ type: regular
view: |-
{
"position": {
- "x": 1490,
- "y": 920
+ "x": 190,
+ "y": 510
}
}
- note: false
- timertriggers: []
+ "99":
+ continueonerrortype: ""
+ id: "99"
ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
isautoswitchedtoquietmode: false
- "94":
- id: "94"
- taskid: 4cb5d6e4-dd65-4a5e-836b-531f2447b157
- type: regular
- task:
- id: 4cb5d6e4-dd65-4a5e-836b-531f2447b157
- version: -1
- name: Set Xpanse protocol
- description: commands.local.cmd.set.incident
- script: Builtin|||setIncident
- type: regular
- iscommand: true
- brand: Builtin
+ isoversize: false
nexttasks:
'#none#':
- - "96"
+ - "101"
+ note: false
+ quietmode: 0
scriptarguments:
- xpanseprotocol:
+ gridfield:
+ simple: xpanseservicedetails
+ keys:
+ simple: field,value
+ val1:
+ simple: Is Active
+ val2:
complex:
+ accessor: is_active
root: ASM.ExternalService
- filters:
- - - operator: isEqualString
- left:
- value:
- simple: ASM.ExternalService.service_id
- iscontext: true
- right:
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
value:
- simple: incident.xpanseserviceid
- iscontext: true
- accessor: protocol
+ simple: n/a
+ operator: SetIfEmpty
separatecontext: false
- continueonerrortype: ""
+ skipunavailable: false
+ task:
+ brand: ""
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Example of command:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: fdaef9a9-d071-4b8d-8361-2a77e159353f
+ iscommand: false
+ name: Set service information (Is Active)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: fdaef9a9-d071-4b8d-8361-2a77e159353f
+ timertriggers: []
+ type: regular
view: |-
{
"position": {
- "x": 1490,
- "y": 1090
+ "x": 190,
+ "y": 680
}
}
- note: false
- timertriggers: []
+ "100":
+ continueonerrortype: ""
+ id: "100"
ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
isautoswitchedtoquietmode: false
- "96":
- id: "96"
- taskid: f8bd180a-8e64-4346-8d04-a244aa6da48d
- type: regular
- task:
- id: f8bd180a-8e64-4346-8d04-a244aa6da48d
- version: -1
- name: Set classifications
- description: |-
- Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Example of command:
- `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
- scriptName: GridFieldSetup
- type: regular
- iscommand: false
- brand: ""
+ isoversize: false
nexttasks:
'#none#':
- - "97"
+ - "107"
+ note: false
+ quietmode: 0
scriptarguments:
gridfield:
- simple: xpanseserviceclassifications
+ simple: xpanseservicedetails
keys:
- simple: activeclassifications
+ simple: field,value
val1:
+ simple: Potential CVEs
+ val2:
complex:
+ accessor: externally_inferred_vulnerability_score
root: ASM.ExternalService
- accessor: active_classifications
transformers:
- - operator: uniq
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
separatecontext: false
- continueonerrortype: ""
+ skipunavailable: false
+ task:
+ brand: ""
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Example of command:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: 667637a3-7289-42a3-88f8-38f9085aabe1
+ iscommand: false
+ name: Set service information (Potential CVEs)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 667637a3-7289-42a3-88f8-38f9085aabe1
+ timertriggers: []
+ type: regular
view: |-
{
"position": {
- "x": 1490,
- "y": 1260
+ "x": 190,
+ "y": 1010
}
}
- note: false
- timertriggers: []
+ "101":
+ continueonerrortype: ""
+ id: "101"
ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
isautoswitchedtoquietmode: false
- "97":
- id: "97"
- taskid: 2c67d08f-993b-48a4-89a4-d950268354b0
- type: regular
- task:
- id: 2c67d08f-993b-48a4-89a4-d950268354b0
- version: -1
- name: Set provider
- description: commands.local.cmd.set.incident
- script: Builtin|||setIncident
- type: regular
- iscommand: true
- brand: Builtin
+ isoversize: false
nexttasks:
'#none#':
- - "89"
+ - "100"
+ note: false
+ quietmode: 0
scriptarguments:
- xpanseprovider:
+ gridfield:
+ simple: xpanseservicedetails
+ keys:
+ simple: field,value
+ val1:
+ simple: Active Classifications
+ val2:
complex:
+ accessor: active_classifications
root: ASM.ExternalService
- accessor: externally_detected_providers
transformers:
- - operator: FirstArrayElement
+ - args:
+ separator:
+ value:
+ simple: ', '
+ operator: join
separatecontext: false
- continueonerrortype: ""
+ skipunavailable: false
+ task:
+ brand: ""
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Example of command:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: edcabcaa-be6e-45ec-86a5-add2df3e2734
+ iscommand: false
+ name: Set service information (Active Classifications)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: edcabcaa-be6e-45ec-86a5-add2df3e2734
+ timertriggers: []
+ type: regular
view: |-
{
"position": {
- "x": 1490,
- "y": 1430
+ "x": 190,
+ "y": 845
}
}
+ "102":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ complex:
+ accessor: type
+ root: ASM.AssetInternetExposure
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ label: "yes"
+ continueonerrortype: ""
+ id: "102"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "38"
+ "yes":
+ - "114"
note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Determines if there is an asset of type certificate.
+ id: e0856db5-4c44-4f19-89ee-84768af91985
+ iscommand: false
+ name: Is there a certificate asset?
+ type: condition
+ version: -1
+ taskid: e0856db5-4c44-4f19-89ee-84768af91985
timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": 860,
+ "y": 680
+ }
+ }
+ "103":
+ continueonerrortype: ""
+ id: "103"
ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "38"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: xpansecertificateasset
+ keys:
+ simple: name,type,date_added,explainers,subject,subject_alerternative_names,issuer,issuer_email,expires,algorithm
+ val1:
+ complex:
+ accessor: name
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val2:
+ complex:
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure.type
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val3:
+ complex:
+ accessor: created
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ transformers:
+ - operator: TimeStampToDate
+ val4:
+ complex:
+ accessor: explainers
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ separator:
+ value:
+ simple: ','
+ operator: join
+ val5:
+ complex:
+ accessor: details.certificateDetails.subject
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val6:
+ complex:
+ accessor: details.certificateDetails.subjectAlternativeNames
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val7:
+ complex:
+ accessor: certificate_issuer
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val8:
+ complex:
+ accessor: details.certificateDetails.issuerEmail
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val9:
+ complex:
+ accessor: details.certificateDetails.validNotAfter
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ transformers:
+ - operator: TimeStampToDate
+ val10:
+ complex:
+ accessor: certificate_algorithm
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Example of command:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: 7ec17610-c685-49ca-8a65-935721a02bcb
+ iscommand: false
+ name: Set asset information (with time)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 7ec17610-c685-49ca-8a65-935721a02bcb
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 680,
+ "y": 1050
+ }
+ }
+ "104":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ complex:
+ accessor: type
+ root: ASM.AssetInternetExposure
+ operator: isEqualString
+ right:
+ value:
+ simple: ResponsiveIP
+ label: "yes"
+ continueonerrortype: ""
+ id: "104"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "38"
+ "yes":
+ - "130"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Determines if there is an asset of type responsive IP.
+ id: b0768e44-c968-4d69-8110-fd5cdf639a08
+ iscommand: false
+ name: Is there a responsive ip asset?
+ type: condition
+ version: -1
+ taskid: b0768e44-c968-4d69-8110-fd5cdf639a08
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": 1550,
+ "y": 680
+ }
+ }
+ "105":
+ continueonerrortype: ""
+ id: "105"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "38"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: xpanseresponsiveipasset
+ keys:
+ simple: name,type,date_added,explainers,ip_version,range,asn_number,asn_country
+ val1:
+ complex:
+ accessor: name
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: ResponsiveIP
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val2:
+ complex:
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: ResponsiveIP
+ root: ASM.AssetInternetExposure.type
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val3:
+ complex:
+ accessor: created
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: ResponsiveIP
+ root: ASM.AssetInternetExposure
+ transformers:
+ - operator: TimeStampToDate
+ val4:
+ complex:
+ accessor: explainers
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: ResponsiveIP
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ separator:
+ value:
+ simple: ','
+ operator: join
+ val5:
+ complex:
+ accessor: IP_VERSION
+ root: range_info
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val6:
+ complex:
+ accessor: handle
+ root: range_info.DETAILS.networkRecords
+ transformers:
+ - operator: uniq
+ - args:
+ separator:
+ value:
+ simple: ','
+ operator: join
+ val7:
+ complex:
+ accessor: ASN_HANDLES
+ root: range_info
+ transformers:
+ - args:
+ separator:
+ value:
+ simple: ','
+ operator: join
+ val8:
+ complex:
+ accessor: ASN_COUNTRIES
+ root: range_info
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Example of command:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: 49969165-0261-4433-892f-71ea9f797a60
+ iscommand: false
+ name: Set asset information (with time)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 49969165-0261-4433-892f-71ea9f797a60
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 1400,
+ "y": 1340
+ }
+ }
+ "107":
+ continueonerrortype: ""
+ id: "107"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "111"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: xpanseservicedetails
+ keys:
+ simple: field,value
+ val1:
+ simple: Confirmed Vulnerable CVEs
+ val2:
+ complex:
+ accessor: confirmed_vulnerable_cve_ids
+ root: ASM.ExternalService
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Example of command:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: 8772970e-8dfc-43a2-83d8-96655ad71ec0
+ iscommand: false
+ name: Set service information (Confirmed Vulnerable CVEs)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 8772970e-8dfc-43a2-83d8-96655ad71ec0
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 190,
+ "y": 1170
+ }
+ }
+ "108":
+ continueonerror: true
+ continueonerrortype: ""
+ id: "108"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "109"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: xpanseservicedetails
+ keys:
+ simple: field,value
+ val1:
+ simple: First Observed
+ val2:
+ complex:
+ accessor: first_observed
+ root: ASM.ExternalService
+ transformers:
+ - operator: TimeStampToDate
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Example of command:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: 8802dbed-5fb6-4fb5-8857-e06df0702af4
+ iscommand: false
+ name: Set service information (First Observed)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 8802dbed-5fb6-4fb5-8857-e06df0702af4
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 190,
+ "y": 1510
+ }
+ }
+ "109":
+ continueonerror: true
+ continueonerrortype: ""
+ id: "109"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "38"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: xpanseservicedetails
+ keys:
+ simple: field,value
+ val1:
+ simple: Last Observed
+ val2:
+ complex:
+ accessor: last_observed
+ root: ASM.ExternalService
+ transformers:
+ - operator: TimeStampToDate
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Example of command:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: 8a838e32-b013-4f73-8d8c-f98c71a36f9e
+ iscommand: false
+ name: Set service information (Last Observed)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 8a838e32-b013-4f73-8d8c-f98c71a36f9e
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 190,
+ "y": 1670
+ }
+ }
+ "110":
+ continueonerrortype: ""
+ id: "110"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "105"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ append:
+ simple: "false"
+ key:
+ simple: range_info
+ value:
+ complex:
+ accessor: details.ip_ranges
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: ResponsiveIP
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ field:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.name
+ operator: getField
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Set a value in context under the key you entered.
+ id: 66051cf8-3404-4693-8b4c-5a7dae07b230
+ iscommand: false
+ name: Set temp context
+ script: Set
+ type: regular
+ version: -1
+ taskid: 66051cf8-3404-4693-8b4c-5a7dae07b230
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 1400,
+ "y": 1180
+ }
+ }
+ "111":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.ExternalService.first_observed
+ operator: isExists
+ right:
+ value: {}
+ label: "yes"
+ continueonerrortype: ""
+ id: "111"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "38"
+ "yes":
+ - "108"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Determines if there are time entries to add to the service grid field.
+ id: 064f771f-dff0-4705-8fca-91b07804d0ed
+ iscommand: false
+ name: Are there time entries?
+ type: condition
+ version: -1
+ taskid: 064f771f-dff0-4705-8fca-91b07804d0ed
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": 190,
+ "y": 1330
+ }
+ }
+ "112":
+ continueonerrortype: ""
+ id: "112"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "99"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ investigationID:
+ simple: ${incident.investigationId}
+ text:
+ complex:
+ accessor: ' confirmed_vulnerable_cve_ids'
+ root: ASM.ExternalService
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: Builtin
+ description: commands.local.cmd.extract.indicators
+ id: aaeb28a6-25a5-4915-8a3a-5b425709617a
+ iscommand: true
+ name: Extract CVEs
+ script: Builtin|||extractIndicators
+ type: regular
+ version: -1
+ taskid: aaeb28a6-25a5-4915-8a3a-5b425709617a
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": -230,
+ "y": 510
+ }
+ }
+ "113":
+ continueonerrortype: ""
+ id: "113"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "102"
+ - "104"
+ - "118"
+ - "119"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ investigationID:
+ simple: ${incident.investigationId}
+ text:
+ complex:
+ accessor: ' confirmed_vulnerable_cve_ids'
+ root: ASM.ExternalService
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: Builtin
+ description: commands.local.cmd.extract.indicators
+ id: 718b677b-8e2f-4679-81c9-34af5702e041
+ iscommand: true
+ name: Extract CVEs
+ script: Builtin|||extractIndicators
+ type: regular
+ version: -1
+ taskid: 718b677b-8e2f-4679-81c9-34af5702e041
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 1550,
+ "y": 480
+ }
+ }
+ "114":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ complex:
+ accessor: details.certificateDetails.validNotAfter
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ operator: isExists
+ right:
+ value: {}
+ label: "yes"
+ continueonerrortype: ""
+ id: "114"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "120"
+ "yes":
+ - "103"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Determines if there are time entries to add to the asset grid field.
+ id: e6a098c6-5cdc-4fdd-810d-b866d2d7da53
+ iscommand: false
+ name: Are there time entries?
+ type: condition
+ version: -1
+ taskid: e6a098c6-5cdc-4fdd-810d-b866d2d7da53
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": 860,
+ "y": 845
+ }
+ }
+ "118":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ complex:
+ accessor: type
+ root: ASM.AssetInternetExposure
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ label: "yes"
+ continueonerrortype: ""
+ id: "118"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "38"
+ "yes":
+ - "124"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Determines if there is an asset of type domain.
+ id: 551aaf77-07d5-4610-8f72-43949dba127f
+ iscommand: false
+ name: Is there a domain asset?
+ type: condition
+ version: -1
+ taskid: 551aaf77-07d5-4610-8f72-43949dba127f
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": 2230,
+ "y": 690
+ }
+ }
+ "119":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ complex:
+ accessor: type
+ root: ASM.AssetInternetExposure
+ operator: isEqualString
+ right:
+ value:
+ simple: CloudIntegration
+ label: "yes"
+ continueonerrortype: ""
+ id: "119"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "38"
+ "yes":
+ - "122"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Determines if there is an asset of type cloud.
+ id: 92d4b2f3-5493-456a-88a8-7d259da37241
+ iscommand: false
+ name: Is there a cloud asset?
+ type: condition
+ version: -1
+ taskid: 92d4b2f3-5493-456a-88a8-7d259da37241
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": 2950,
+ "y": 690
+ }
+ }
+ "120":
+ continueonerrortype: ""
+ id: "120"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "38"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: xpansecertificateasset
+ keys:
+ simple: name,type,date_added,explainers,subject,subject_alerternative_names,issuer,issuer_email,expires,algorithm
+ val1:
+ complex:
+ accessor: name
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val2:
+ complex:
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure.type
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val3:
+ complex:
+ accessor: created
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ transformers:
+ - operator: TimeStampToDate
+ val4:
+ complex:
+ accessor: explainers
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ separator:
+ value:
+ simple: ','
+ operator: join
+ val5:
+ complex:
+ accessor: details.certificateDetails.subject
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val6:
+ complex:
+ accessor: details.certificateDetails.subjectAlternativeNames
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val7:
+ complex:
+ accessor: certificate_issuer
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val8:
+ complex:
+ accessor: details.certificateDetails.issuerEmail
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val9:
+ simple: n/a
+ val10:
+ complex:
+ accessor: certificate_algorithm
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Certificate
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Example of command:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: 71564cd0-f13d-4ec0-8e38-7d6006061f45
+ iscommand: false
+ name: Set asset information (without time)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 71564cd0-f13d-4ec0-8e38-7d6006061f45
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 1090,
+ "y": 1050
+ }
+ }
+ "121":
+ continueonerrortype: ""
+ id: "121"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "38"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: xpansedomainasset
+ keys:
+ simple: name,type,date_added,explainers,registrar_name,registry_expiration,domain_status,registrant_name,registrant_org,registrant_email
+ val1:
+ complex:
+ accessor: name
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val2:
+ complex:
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure.type
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val3:
+ complex:
+ accessor: created
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ transformers:
+ - operator: TimeStampToDate
+ val4:
+ complex:
+ accessor: explainers
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ separator:
+ value:
+ simple: ','
+ operator: join
+ val5:
+ complex:
+ accessor: details.domainDetails.registrar.name
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val6:
+ complex:
+ accessor: details.domainDetails.registryExpiryDate
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ transformers:
+ - operator: TimeStampToDate
+ val7:
+ complex:
+ accessor: details.domainDetails.domainStatuses
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ separator:
+ value:
+ simple: ','
+ operator: join
+ val8:
+ complex:
+ accessor: details.domainDetails.registrant.name
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val9:
+ complex:
+ accessor: details.domainDetails.registrant.organization
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val10:
+ complex:
+ accessor: details.domainDetails.registrant.emailAddress
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Example of command:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: 04d78478-e2e2-4867-85b8-067411f75680
+ iscommand: false
+ name: Set asset information (with time)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 04d78478-e2e2-4867-85b8-067411f75680
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 2050,
+ "y": 1060
+ }
+ }
+ "122":
+ continueonerrortype: ""
+ id: "122"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "38"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: xpansecloudasset
+ keys:
+ simple: name,type,date_added,explainers,externally_detected_providers,ips,domain
+ val1:
+ complex:
+ accessor: name
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: CloudIntegration
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val2:
+ complex:
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: CloudIntegration
+ root: ASM.AssetInternetExposure.type
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val3:
+ complex:
+ accessor: created
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: CloudIntegration
+ root: ASM.AssetInternetExposure
+ transformers:
+ - operator: TimeStampToDate
+ val4:
+ complex:
+ accessor: explainers
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: CloudIntegration
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ separator:
+ value:
+ simple: ','
+ operator: join
+ val5:
+ complex:
+ accessor: externally_detected_providers
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: CloudIntegration
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ separator:
+ value:
+ simple: ','
+ operator: join
+ val6:
+ complex:
+ accessor: ips
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: CloudIntegration
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ separator:
+ value:
+ simple: ','
+ operator: join
+ val7:
+ complex:
+ accessor: domain
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: CloudIntegration
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Example of command:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: 792fb7b0-2162-4b00-8ae1-c6ca8e3ddd9e
+ iscommand: false
+ name: Set asset information (with time)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 792fb7b0-2162-4b00-8ae1-c6ca8e3ddd9e
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 2950,
+ "y": 1050
+ }
+ }
+ "124":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ complex:
+ accessor: details.domainDetails.domainStatuses
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ operator: isExists
+ right:
+ value: {}
+ label: "yes"
+ continueonerrortype: ""
+ id: "124"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "126"
+ "yes":
+ - "121"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Determines if there are time entries to add to the asset grid field.
+ id: 4b745d05-e52e-4c0f-8d97-2fcc071f4dec
+ iscommand: false
+ name: Are there time entries?
+ type: condition
+ version: -1
+ taskid: 4b745d05-e52e-4c0f-8d97-2fcc071f4dec
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": 2230,
+ "y": 885
+ }
+ }
+ "126":
+ continueonerrortype: ""
+ id: "126"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "38"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: xpansedomainasset
+ keys:
+ simple: name,type,date_added,explainers,registrar_name,registry_expiration,domain_status,registrant_name,registrant_org,registrant_email
+ val1:
+ complex:
+ accessor: name
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val2:
+ complex:
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure.type
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val3:
+ complex:
+ accessor: created
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ transformers:
+ - operator: TimeStampToDate
+ val4:
+ complex:
+ accessor: explainers
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ separator:
+ value:
+ simple: ','
+ operator: join
+ val5:
+ complex:
+ accessor: details.domainDetails.registrar.name
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val6:
+ simple: n/a
+ val7:
+ complex:
+ accessor: details.domainDetails.domainStatuses
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ separator:
+ value:
+ simple: ','
+ operator: join
+ val8:
+ complex:
+ accessor: details.domainDetails.registrant.name
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val9:
+ complex:
+ accessor: details.domainDetails.registrant.organization
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val10:
+ complex:
+ accessor: details.domainDetails.registrant.emailAddress
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: Domain
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Example of command:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: fa63614a-ca49-4bf3-8051-b862ab3f97b0
+ iscommand: false
+ name: Set asset information (without time)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: fa63614a-ca49-4bf3-8051-b862ab3f97b0
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 2440,
+ "y": 1060
+ }
+ }
+ "128":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ complex:
+ accessor: xpanseassetids
+ root: incident
+ operator: isExists
+ right:
+ value: {}
+ label: "yes"
+ continueonerrortype: ""
+ id: "128"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "38"
+ "yes":
+ - "89"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Determines if there is an asset ID to enrich.
+ id: b21b79b5-bb5f-43d8-818a-46be4de44320
+ iscommand: false
+ name: Is Xpanse Asset IDs defined?
+ type: condition
+ version: -1
+ taskid: b21b79b5-bb5f-43d8-818a-46be4de44320
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": 1550,
+ "y": -15
+ }
+ }
+ "129":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ complex:
+ accessor: xpanseserviceid
+ root: incident
+ operator: isExists
+ right:
+ value: {}
+ label: "yes"
+ continueonerrortype: ""
+ id: "129"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "38"
+ "yes":
+ - "88"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Determines if there is a service ID to enrich.
+ id: 1e7f5704-542d-48b4-845c-351460a1cc69
+ iscommand: false
+ name: Is Xpanse Service ID defined?
+ type: condition
+ version: -1
+ taskid: 1e7f5704-542d-48b4-845c-351460a1cc69
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": 190,
+ "y": -35
+ }
+ }
+ "130":
+ conditions:
+ - condition:
+ - - left:
+ iscontext: true
+ value:
+ complex:
+ accessor: details.ip_ranges
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: ResponsiveIP
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ field:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.name
+ operator: getField
+ operator: isExists
+ right:
+ value: {}
+ label: "yes"
+ continueonerrortype: ""
+ id: "130"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#default#':
+ - "131"
+ "yes":
+ - "110"
+ note: false
+ quietmode: 0
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: Determines if there are time entries to add to the asset grid field.
+ id: f81c2323-d804-4a83-8972-b59dc47930ae
+ iscommand: false
+ name: Are there ip_range entries?
+ type: condition
+ version: -1
+ taskid: f81c2323-d804-4a83-8972-b59dc47930ae
+ timertriggers: []
+ type: condition
+ view: |-
+ {
+ "position": {
+ "x": 1550,
+ "y": 860
+ }
+ }
+ "131":
+ continueonerrortype: ""
+ id: "131"
+ ignoreworker: false
+ isautoswitchedtoquietmode: false
+ isoversize: false
+ nexttasks:
+ '#none#':
+ - "38"
+ note: false
+ quietmode: 0
+ scriptarguments:
+ gridfield:
+ simple: xpanseresponsiveipasset
+ keys:
+ simple: name,type,date_added,explainers,ip_version,range,asn_number,asn_country
+ val1:
+ complex:
+ accessor: name
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: ResponsiveIP
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val2:
+ complex:
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: ResponsiveIP
+ root: ASM.AssetInternetExposure.type
+ transformers:
+ - args:
+ applyIfEmpty: {}
+ defaultValue:
+ value:
+ simple: n/a
+ operator: SetIfEmpty
+ val3:
+ complex:
+ accessor: created
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: ResponsiveIP
+ root: ASM.AssetInternetExposure
+ transformers:
+ - operator: TimeStampToDate
+ val4:
+ complex:
+ accessor: explainers
+ filters:
+ - - left:
+ iscontext: true
+ value:
+ simple: ASM.AssetInternetExposure.type
+ operator: isEqualString
+ right:
+ value:
+ simple: ResponsiveIP
+ root: ASM.AssetInternetExposure
+ transformers:
+ - args:
+ separator:
+ value:
+ simple: ','
+ operator: join
+ val5:
+ simple: n/a
+ val6:
+ simple: n/a
+ val7:
+ simple: n/a
+ val8:
+ simple: n/a
+ separatecontext: false
+ skipunavailable: false
+ task:
+ brand: ""
+ description: |-
+ Automation used to more easily populate a grid field. This is necessary when you want to assign certain values as static or if you have context paths that you will assign to different values as well. Example of command:
+ `!GridFieldSetup keys=ip,src val1=${AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress} val2="AWS" gridfiled="gridfield"`
+ id: 4719d617-837d-4aef-81ea-da9214bba227
+ iscommand: false
+ name: Set asset information (with time)
+ script: GridFieldSetup
+ type: regular
+ version: -1
+ taskid: 4719d617-837d-4aef-81ea-da9214bba227
+ timertriggers: []
+ type: regular
+ view: |-
+ {
+ "position": {
+ "x": 1800,
+ "y": 1180
+ }
+ }
view: |-
{
- "linkLabelsPosition": {},
+ "linkLabelsPosition": {
+ "102_38_#default#": 0.17,
+ "104_38_#default#": 0.54,
+ "111_38_#default#": 0.33,
+ "118_38_#default#": 0.72,
+ "119_38_#default#": 0.1,
+ "11_38_#default#": 0.1,
+ "128_38_#default#": 0.2,
+ "129_38_#default#": 0.11,
+ "130_110_yes": 0.54,
+ "130_131_#default#": 0.29,
+ "90_38_#default#": 0.14,
+ "97_38_#default#": 0.14
+ },
"paper": {
"dimensions": {
- "height": 2085,
- "width": 1410,
- "x": 460,
- "y": -180
+ "height": 2305,
+ "width": 3560,
+ "x": -230,
+ "y": -400
}
}
}
-inputs:
-- key: RemoteIP
- value:
- complex:
- root: incident
- accessor: xpanseip
- required: false
- description: IP address of service
- playbookInputQuery:
+inputs: []
outputs: []
tests:
- No tests (auto formatted)
diff --git a/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Self-Enrichment_README.md b/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Self-Enrichment_README.md
index dbd432314d56..747eb635f941 100644
--- a/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Self-Enrichment_README.md
+++ b/Packs/CortexXpanse/Playbooks/Xpanse_-_Alert_Self-Enrichment_README.md
@@ -1,31 +1,32 @@
-This playbook handles ASM alerts by enriching alert information with Xpanse service and asset details.
+Enrichment on the alert itself using Cortex Xpanse APIs.
## Dependencies
This playbook uses the following sub-playbooks, integrations, and scripts.
+### Sub-playbooks
+
+This playbook does not use any sub-playbooks.
+
### Integrations
Cortex Xpanse
### Scripts
-GridFieldSetup
+* GridFieldSetup
+* Set
### Commands
-* asm-list-external-service
-* asm-list-asset-internet-exposure
+* asm-get-asset-internet-exposure
* asm-get-external-service
+* extractIndicators
## Playbook Inputs
---
-
-| **Name** | **Description** | **Default Value** | **Required** |
-|---------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | --- |
-| Provider | The externally detected provider for the alert. | ${incident.xpanseprovider} | Required |
-| IP | The external IP address associated with the alert. | ${incident.xpanseip} | Required |
+There are no inputs for this playbook.
## Playbook Outputs
@@ -36,4 +37,4 @@ There are no outputs for this playbook.
---
-![Xpanse - Alert Self-Enrichment](../doc_files/Xpanse_-_Alert_Self-Enrichment.png)
\ No newline at end of file
+![Xpanse - Alert Self-Enrichment](../doc_files/Xpanse_-_Alert_Self-Enrichment.png)
diff --git a/Packs/CortexXpanse/ReleaseNotes/1_0_26.md b/Packs/CortexXpanse/ReleaseNotes/1_0_26.md
new file mode 100644
index 000000000000..d135a34a0ace
--- /dev/null
+++ b/Packs/CortexXpanse/ReleaseNotes/1_0_26.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Cortex Xpanse
+
+Added the *reopened* alert status value to filter for reopened alerts during integration configuration and for commands like ***asm-list-alerts** and ***asm-update-alerts** commands.
diff --git a/Packs/CortexXpanse/ReleaseNotes/1_1_0.md b/Packs/CortexXpanse/ReleaseNotes/1_1_0.md
new file mode 100644
index 000000000000..4336db194fa7
--- /dev/null
+++ b/Packs/CortexXpanse/ReleaseNotes/1_1_0.md
@@ -0,0 +1,32 @@
+
+#### Incident Fields
+
+- New: **Xpanse Domain Asset**
+- New: **Xpanse Attack Surface Rule Name**
+- New: **Xpanse Certificate Asset**
+- New: **Xpanse Responsive IP Asset**
+- New: **Xpanse Service Details**
+- New: **Xpanse Cloud Asset**
+
+#### Layouts
+
+##### Xpanse Alert Layout
+
+Updated the layout to include new fields.
+
+#### Mappers
+
+##### Xpanse - Incoming Mapper
+
+Updated the mapper to include new fields.
+
+#### Playbooks
+
+##### Xpanse - Alert Self-Enrichment
+
+Updated the playbook to enrich CVEs and populate fields displayed in layout.
+
+##### Xpanse - Alert Handler
+
+Updated the playbook to reference the new version of the *Xpanse - Alert Self-Enrichment* playbook (no inputs).
+
diff --git a/Packs/CortexXpanse/ReleaseNotes/1_2_0.md b/Packs/CortexXpanse/ReleaseNotes/1_2_0.md
new file mode 100644
index 000000000000..e25fe00ecbd2
--- /dev/null
+++ b/Packs/CortexXpanse/ReleaseNotes/1_2_0.md
@@ -0,0 +1,5 @@
+#### Integrations
+
+##### Xpanse Feed
+
+Added the *Xpanse Feed* integration to retrieve the discovered IPs/Domains/Certificates from the Cortex Xpanse asset database.
diff --git a/Packs/CortexXpanse/doc_files/Xpanse_-_Alert_Self-Enrichment.png b/Packs/CortexXpanse/doc_files/Xpanse_-_Alert_Self-Enrichment.png
index a500c78828cb..ef34c82a3065 100644
Binary files a/Packs/CortexXpanse/doc_files/Xpanse_-_Alert_Self-Enrichment.png and b/Packs/CortexXpanse/doc_files/Xpanse_-_Alert_Self-Enrichment.png differ
diff --git a/Packs/CortexXpanse/pack_metadata.json b/Packs/CortexXpanse/pack_metadata.json
index 80cf6b4eea76..b416b1350248 100644
--- a/Packs/CortexXpanse/pack_metadata.json
+++ b/Packs/CortexXpanse/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cortex Xpanse",
"description": "Content for working with Attack Surface Management (ASM).",
"support": "xsoar",
- "currentVersion": "1.0.25",
+ "currentVersion": "1.2.0",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py
index ba8a3408d9e7..8aeff904a94b 100644
--- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py
+++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py
@@ -32,6 +32,7 @@
USE_SSL = not PARAMS.get('insecure', False)
# How many time before the first fetch to retrieve incidents
FETCH_TIME = PARAMS.get('fetch_time', '3 days')
+PROXY = PARAMS.get('proxy', False)
BYTE_CREDS = f'{CLIENT_ID}:{SECRET}'.encode()
# Headers to be sent in requests
HEADERS = {
@@ -45,8 +46,6 @@
DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
DETECTION_DATE_FORMAT = IOM_DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
DEFAULT_TIMEOUT = 30
-# Remove proxy if not set to true in params
-handle_proxy()
''' KEY DICTIONARY '''
@@ -386,6 +385,7 @@ def http_request(method, url_suffix, params=None, data=None, files=None, headers
data=data,
files=files,
params=params,
+ proxy=PROXY,
resp_type='response',
verify=USE_SSL,
error_handler=error_handler,
@@ -416,6 +416,7 @@ def http_request(method, url_suffix, params=None, data=None, files=None, headers
data=data,
files=files,
params=params,
+ proxy=PROXY,
retries=5,
status_list_to_retry=[429],
resp_type='response',
@@ -2206,6 +2207,8 @@ def get_remote_data_command(args: dict[str, Any]):
"""
remote_args = GetRemoteDataArgs(args)
remote_incident_id = remote_args.remote_incident_id
+ reopen_statuses_list = argToList(demisto.params().get('reopen_statuses', ''))
+ demisto.debug(f'In get_remote_data_command {reopen_statuses_list=}')
mirrored_data = {}
entries: list = []
@@ -2217,20 +2220,20 @@ def get_remote_data_command(args: dict[str, Any]):
mirrored_data, updated_object = get_remote_incident_data(remote_incident_id)
if updated_object:
demisto.debug(f'Update incident {remote_incident_id} with fields: {updated_object}')
- set_xsoar_incident_entries(updated_object, entries, remote_incident_id) # sets in place
+ set_xsoar_incident_entries(updated_object, entries, remote_incident_id, reopen_statuses_list) # sets in place
elif incident_type == IncidentType.DETECTION:
mirrored_data, updated_object = get_remote_detection_data(remote_incident_id)
if updated_object:
demisto.debug(f'Update detection {remote_incident_id} with fields: {updated_object}')
- set_xsoar_detection_entries(updated_object, entries, remote_incident_id) # sets in place
+ set_xsoar_detection_entries(updated_object, entries, remote_incident_id, reopen_statuses_list) # sets in place
elif incident_type == IncidentType.IDP_OR_MOBILE_DETECTION:
mirrored_data, updated_object, detection_type = get_remote_idp_or_mobile_detection_data(remote_incident_id)
if updated_object:
demisto.debug(f'Update {detection_type} detection {remote_incident_id} with fields: {updated_object}')
set_xsoar_idp_or_mobile_detection_entries(
- updated_object, entries, remote_incident_id, detection_type) # sets in place
+ updated_object, entries, remote_incident_id, detection_type, reopen_statuses_list) # sets in place
else:
# this is here as prints can disrupt mirroring
@@ -2312,36 +2315,55 @@ def get_remote_idp_or_mobile_detection_data(remote_incident_id):
"""
mirrored_data_list = get_detection_entities([remote_incident_id]).get('resources', []) # a list with one dict in it
mirrored_data = mirrored_data_list[0]
+ demisto.debug(f'in get_remote_idp_or_mobile_detection_data {mirrored_data=}')
detection_type = ''
+ mirroring_fields = ['status']
updated_object: dict[str, Any] = {}
if 'idp' in mirrored_data['product']:
updated_object = {'incident_type': IDP_DETECTION}
detection_type = 'IDP'
+ mirroring_fields.append('id')
if 'mobile' in mirrored_data['product']:
updated_object = {'incident_type': MOBILE_DETECTION}
detection_type = 'Mobile'
- set_updated_object(updated_object, mirrored_data, ['status'])
+ mirroring_fields.append('mobile_detection_id')
+ set_updated_object(updated_object, mirrored_data, mirroring_fields)
+ demisto.debug(f'in get_remote_idp_or_mobile_detection_data {mirroring_fields=} {updated_object=}')
return mirrored_data, updated_object, detection_type
-def set_xsoar_incident_entries(updated_object: dict[str, Any], entries: list, remote_incident_id: str):
+def set_xsoar_incident_entries(updated_object: dict[str, Any], entries: list, remote_incident_id: str,
+ reopen_statuses_list: list):
+ reopen_statuses_set = {str(status).strip() for status in reopen_statuses_list} \
+ if reopen_statuses_list else set(STATUS_TEXT_TO_NUM.keys()) - {'Closed'}
+ demisto.debug(f'In set_xsoar_incident_entries {reopen_statuses_set=}')
if demisto.params().get('close_incident'):
if updated_object.get('status') == 'Closed':
close_in_xsoar(entries, remote_incident_id, 'Incident')
- elif updated_object.get('status') in (set(STATUS_TEXT_TO_NUM.keys()) - {'Closed'}):
+ elif updated_object.get('status', '') in reopen_statuses_set:
reopen_in_xsoar(entries, remote_incident_id, 'Incident')
+ else:
+ demisto.debug(f"In set_xsoar_incident_entries not closing and not reopening since {updated_object.get('status')=} "
+ f"and {reopen_statuses_set=}.")
-def set_xsoar_detection_entries(updated_object: dict[str, Any], entries: list, remote_detection_id: str):
+def set_xsoar_detection_entries(updated_object: dict[str, Any], entries: list, remote_detection_id: str,
+ reopen_statuses_list: list):
+ reopen_statuses_set = {str(status).lower().strip().replace(' ', '_') for status in reopen_statuses_list} \
+ if reopen_statuses_list else (set(DETECTION_STATUS) - {'closed'})
+ demisto.debug(f'In set_xsoar_detection_entries {reopen_statuses_set=}')
if demisto.params().get('close_incident'):
if updated_object.get('status') == 'closed':
close_in_xsoar(entries, remote_detection_id, 'Detection')
- elif updated_object.get('status') in (set(DETECTION_STATUS) - {'closed'}):
+ elif updated_object.get('status') in reopen_statuses_set:
reopen_in_xsoar(entries, remote_detection_id, 'Detection')
+ else:
+ demisto.debug(f"In set_xsoar_detection_entries not closing and not reopening since {updated_object.get('status')=} "
+ f"and {reopen_statuses_set=}.")
-def set_xsoar_idp_or_mobile_detection_entries(updated_object: dict[str, Any], entries: list,
- remote_idp_detection_id: str, incident_type_name: str):
+def set_xsoar_idp_or_mobile_detection_entries(updated_object: dict[str, Any], entries: list, remote_idp_detection_id: str,
+ incident_type_name: str, reopen_statuses_list: list):
"""
Send the updated object to the relevant status handler
@@ -2351,15 +2373,23 @@ def set_xsoar_idp_or_mobile_detection_entries(updated_object: dict[str, Any], en
:param entries: The list of entries to add the new entry into.
:type remote_idp_detection_id: ``str``
:param remote_idp_detection_id: the remote idp detection id
+ :type reopen_statuses_list: ``list``
+ :param reopen_statuses_list: the set of statuses that should reopen an incident in XSOAR.
:return: The response.
:rtype ``dict``
"""
+ reopen_statuses_set = {str(status).lower().strip().replace(' ', '_') for status in reopen_statuses_list} \
+ if reopen_statuses_list else (set(IDP_AND_MOBILE_DETECTION_STATUS) - {'closed'})
+ demisto.debug(f'In set_xsoar_idp_or_mobile_detection_entries {reopen_statuses_set=}')
if demisto.params().get('close_incident'):
if updated_object.get('status') == 'closed':
close_in_xsoar(entries, remote_idp_detection_id, incident_type_name)
- elif updated_object.get('status') in (set(IDP_AND_MOBILE_DETECTION_STATUS) - {'closed'}):
+ elif updated_object.get('status') in reopen_statuses_set:
reopen_in_xsoar(entries, remote_idp_detection_id, incident_type_name)
+ else:
+ demisto.debug(f"In set_xsoar_idp_or_mobile_detection_entries not closing and not reopening since "
+ f"{updated_object.get('status')=} and {reopen_statuses_set=}.")
def close_in_xsoar(entries: list, remote_incident_id: str, incident_type_name: str):
@@ -6299,7 +6329,6 @@ def create_gql_client(url_suffix="identity-protection/combined/graphql/v1"):
"Content-Type": "application/json"}
}
transport = RequestsHTTPTransport(**kwargs)
- handle_proxy()
client = Client(
transport=transport,
fetch_schema_from_transport=True,
diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml
index a18b451c075d..d1001f791318 100644
--- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml
+++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml
@@ -163,6 +163,21 @@ configuration:
- Indicator of Misconfiguration
- Indicator of Attack
- Mobile Detection
+- defaultvalue: 'New,In progress,True positive,False positive,Reopened,Ignored'
+ display: Reopen Statuses
+ name: reopen_statuses
+ type: 16
+ section: Collect
+ advanced: true
+ required: false
+ additionalinfo: Crowdsrike Falcon statuses that will reopen an incident in XSOAR if closed. You can choose any combination.
+ options:
+ - New
+ - In progress
+ - True positive
+ - False positive
+ - Reopened
+ - Ignored
- defaultvalue: '1'
display: 'Incidents Fetch Interval'
name: incidentFetchInterval
@@ -5017,7 +5032,7 @@ script:
- contextPath: CrowdStrike.IOARules.version_ids
description: The IOA Rule's version ID.
type: String
- dockerimage: demisto/py3-tools:1.0.0.91908
+ dockerimage: demisto/py3-tools:1.0.0.97242
isfetch: true
ismappable: true
isremotesyncin: true
diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py
index 97e9fb03c6bb..92e4149908ac 100644
--- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py
+++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py
@@ -4174,6 +4174,50 @@ def test_get_remote_detection_data(mocker):
'behaviors.display_name': 'SampleTemplateDetection'}
+def test_get_remote_idp_or_mobile_detection_data_idp(mocker):
+ """
+ Given
+ - an idp detection ID on the remote system
+ When
+ - running get_remote_data_command with changes to make on a detection
+ Then
+ - returns the relevant detection entity from the remote system with the relevant incoming mirroring fields
+ """
+ from CrowdStrikeFalcon import get_remote_idp_or_mobile_detection_data
+ detection_entity = input_data.response_idp_detection.copy()
+ mocker.patch('CrowdStrikeFalcon.get_detection_entities', return_value={'resources': [detection_entity.copy()]})
+ mocker.patch.object(demisto, 'debug', return_value=None)
+ mirrored_data, updated_object, detection_type = get_remote_idp_or_mobile_detection_data(input_data.remote_idp_detection_id)
+ detection_entity['severity'] = 2
+ assert mirrored_data == detection_entity
+ assert detection_type == 'IDP'
+ assert updated_object == {'incident_type': 'IDP detection',
+ 'status': 'closed',
+ 'id': 'ind:20879a8064904ecfbb62c118a6a19411:C0BB6ACD-8FDC-4CBA-9CF9-EBF3E28B3E56'}
+
+
+def test_get_remote_idp_or_mobile_detection_data_mobile_detection(mocker):
+ """
+ Given
+ - an idp detection ID on the remote system
+ When
+ - running get_remote_data_command with changes to make on a detection
+ Then
+ - returns the relevant detection entity from the remote system with the relevant incoming mirroring fields
+ """
+ from CrowdStrikeFalcon import get_remote_idp_or_mobile_detection_data
+ detection_entity = input_data.response_mobile_detection.copy()
+ mocker.patch('CrowdStrikeFalcon.get_detection_entities', return_value={'resources': [detection_entity.copy()]})
+ mocker.patch.object(demisto, 'debug', return_value=None)
+ mirrored_data, updated_object, detection_type = get_remote_idp_or_mobile_detection_data(input_data.remote_mobile_detection_id)
+ detection_entity['severity'] = 90
+ assert mirrored_data == detection_entity
+ assert detection_type == 'Mobile'
+ assert updated_object == {'incident_type': 'MOBILE detection',
+ 'status': 'new',
+ 'mobile_detection_id': '1111111111111111111'}
+
+
@pytest.mark.parametrize('updated_object, entry_content, close_incident', input_data.set_xsoar_incident_entries_args)
def test_set_xsoar_incident_entries(mocker, updated_object, entry_content, close_incident):
"""
@@ -4187,14 +4231,40 @@ def test_set_xsoar_incident_entries(mocker, updated_object, entry_content, close
"""
from CrowdStrikeFalcon import set_xsoar_incident_entries
mocker.patch.object(demisto, 'params', return_value={'close_incident': close_incident})
+ mocker.patch.object(demisto, 'debug', return_value=None)
entries = []
- set_xsoar_incident_entries(updated_object, entries, input_data.remote_incident_id)
+ reopen_statuses = ['New', 'Reopened', 'In Progress']
+ set_xsoar_incident_entries(updated_object, entries, input_data.remote_incident_id, reopen_statuses)
if entry_content:
assert entry_content in entries[0].get('Contents')
else:
assert entries == []
+@pytest.mark.parametrize('updated_object', input_data.check_reopen_set_xsoar_incident_entries_args)
+def test_set_xsoar_incident_entries_reopen(mocker, updated_object):
+ """
+ Given
+ - the incident status from the remote system
+ - the close_incident parameter that was set when setting the integration
+ - the reopen statuses set.
+ When
+ - running get_remote_data_command with changes to make on an incident
+ Then
+ - add the relevant entries only if the status is Reopened.
+ """
+ from CrowdStrikeFalcon import set_xsoar_incident_entries
+ mocker.patch.object(demisto, 'params', return_value={'close_incident': True})
+ mocker.patch.object(demisto, 'debug', return_value=None)
+ entries = []
+ reopen_statuses = ['Reopened'] # Add a reopen entry only if the status in CS Falcon is reopened
+ set_xsoar_incident_entries(updated_object, entries, input_data.remote_incident_id, reopen_statuses)
+ if updated_object.get('status') == 'Reopened':
+ assert 'dbotIncidentReopen' in entries[0].get('Contents')
+ else:
+ assert entries == []
+
+
@pytest.mark.parametrize('updated_object, entry_content, close_incident', input_data.set_xsoar_detection_entries_args)
def test_set_xsoar_detection_entries(mocker, updated_object, entry_content, close_incident):
"""
@@ -4209,13 +4279,66 @@ def test_set_xsoar_detection_entries(mocker, updated_object, entry_content, clos
from CrowdStrikeFalcon import set_xsoar_detection_entries
mocker.patch.object(demisto, 'params', return_value={'close_incident': close_incident})
entries = []
- set_xsoar_detection_entries(updated_object, entries, input_data.remote_incident_id)
+ reopen_statuses = ['New', 'In progress', 'True positive', 'False positive', 'Reopened', 'Ignored']
+ set_xsoar_detection_entries(updated_object, entries, input_data.remote_incident_id, reopen_statuses)
if entry_content:
assert entry_content in entries[0].get('Contents')
else:
assert entries == []
+@pytest.mark.parametrize('updated_object', input_data.check_reopen_set_xsoar_detections_entries_args)
+def test_set_xsoar_detection_entries_reopen_check(mocker, updated_object):
+ """
+ Given
+ - the incident status from the remote system
+ - the close_incident parameter that was set when setting the integration
+ - the reopen statuses set.
+ When
+ - running get_remote_data_command with changes to make on a detection
+ Then
+ - add the relevant entries only if the status is Reopened.
+ """
+ from CrowdStrikeFalcon import set_xsoar_detection_entries
+ mocker.patch.object(demisto, 'params', return_value={'close_incident': True})
+ mocker.patch.object(demisto, 'debug', return_value=None)
+ entries = []
+ reopen_statuses = ['Reopened'] # Add a reopen entry only if the status in CS Falcon is reopened
+ set_xsoar_detection_entries(updated_object, entries, input_data.remote_detection_id, reopen_statuses)
+ if updated_object.get('status') == 'reopened':
+ assert 'dbotIncidentReopen' in entries[0].get('Contents')
+ else:
+ assert entries == []
+
+
+@pytest.mark.parametrize('updated_object', input_data.set_xsoar_idp_or_mobile_detection_entries)
+def test_set_xsoar_idp_or_mobile_detection_entries(mocker, updated_object):
+ """
+ Given
+ - the incident status from the remote system
+ - the close_incident parameter that was set when setting the integration
+ - the reopen statuses set.
+ When
+ - running get_remote_data_command with changes to make on a detection
+ Then
+ - add the relevant entries only if the status is Reopened.
+ """
+ from CrowdStrikeFalcon import set_xsoar_idp_or_mobile_detection_entries
+ mocker.patch.object(demisto, 'params', return_value={'close_incident': True})
+ mocker.patch.object(demisto, 'debug', return_value=None)
+ entries = []
+ reopen_statuses = ['Reopened'] # Add a reopen entry only if the status in CS Falcon is reopened
+ set_xsoar_idp_or_mobile_detection_entries(updated_object, entries, input_data.remote_idp_detection_id, 'IDP', reopen_statuses)
+ if updated_object.get('status') == 'reopened':
+ assert 'dbotIncidentReopen' in entries[0].get('Contents')
+ elif updated_object.get('status') == 'closed':
+ assert 'dbotIncidentClose' in entries[0].get('Contents')
+ assert 'closeReason' in entries[0].get('Contents')
+ assert entries[0].get('Contents', {}).get('closeReason') == 'IDP was closed on CrowdStrike Falcon'
+ else:
+ assert entries == []
+
+
@pytest.mark.parametrize('updated_object, mirrored_data, mirroring_fields, output', input_data.set_updated_object_args)
def test_set_updated_object(updated_object, mirrored_data, mirroring_fields, output):
"""
diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/README.md b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/README.md
index f1262f412b83..6e7a367817ac 100644
--- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/README.md
+++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/README.md
@@ -27,6 +27,7 @@ The CrowdStrike Falcon OAuth 2 API integration (formerly Falcon Firehose API), e
| Close Mirrored XSOAR Incident | When selected, closes the CrowdStrike Falcon incident or detection, which is mirrored in the Cortex XSOAR incident. | False |
| Close Mirrored CrowdStrike Falcon Incident or Detection | When selected, closes the Cortex XSOAR incident, which is mirrored in the CrowdStrike Falcon incident or detection, according to the types that were chosen to be fetched and mirrored. | False |
| Fetch types | Choose what to fetch - incidents, detections, IDP detections. You can choose any combination. | False |
+ | Reopen Statuses | Crowdsrike Falcon statuses that will reopen an incident in XSOAR if closed. You can choose any combination. | False |
| Incidents Fetch Interval | | False |
| Advanced: Time in minutes to look back when fetching incidents and detections | Use this parameter to determine how long backward to look in the search for incidents that were created before the last run time and did not match the query when they were created. | False |
diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/test_data/input_data.py b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/test_data/input_data.py
index 978e52a9bddd..c41f91ca190d 100644
--- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/test_data/input_data.py
+++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/test_data/input_data.py
@@ -193,6 +193,92 @@
"updated_timestamp": "2023-06-27T09:29:52.448779709Z"
}
+response_mobile_detection = {'agent_id': '111...111',
+ 'agent_load_flags': '0',
+ 'agent_version': '2024.04.4060003',
+ 'aggregate_id': '',
+ 'android_sdk_version': '31',
+ 'bootloader_unlocked': '1',
+ 'bootloader_version': 'slider-1.0-7683913',
+ 'cid': '2222...222',
+ 'composite_id': '1111111111111111111111:ind:22222222222222222222222222222222:33333|4444444444444444444',
+ 'computer_name': 'computer_name',
+ 'confidence': 100,
+ 'config_id_base': 'config_id_base',
+ 'config_id_build': 'config_id_build',
+ 'config_id_platform': 'config_id_platform',
+ 'config_version': '0',
+ 'context_timestamp': '2024-05-30T12:26:34.384Z',
+ 'crawled_timestamp': '2024-05-30T13:26:35.874005623Z',
+ 'created_timestamp': '2024-05-30T12:27:35.879609848Z',
+ 'data_domains': ['Endpoint'],
+ 'description': 'Mobile detection description',
+ 'developer_options_enabled': '1',
+ 'display_name': 'DisplayName',
+ 'enrollment_email': 'test@test.com',
+ 'falcon_app_trusted': True,
+ 'falcon_host_link': 'https://falcon.crowdstrike.com/mobile/detections/1111111111111111111111:ind:22222222222222222222222222222222:33333|4444444444444444444?_cid=1111111111111111111111',
+ 'firmware_build_fingerprint': 'firmware_build_fingerprint',
+ 'firmware_build_time': '2021-09-02T12:01:16.000Z',
+ 'firmware_build_type': 'user',
+ 'fma_version_code': 'fma_version_code',
+ 'id': 'ind:22222222222222222222222222222222:33333|4444444444444444444',
+ 'keystore_check_failed': False,
+ 'keystore_inconclusive': False,
+ 'keystore_insecure': False,
+ 'lock_screen_enabled': '0',
+ 'mobile_brand': 'mobile_brand',
+ 'mobile_design': 'mobile_design',
+ 'mobile_detection_id': '1111111111111111111',
+ 'mobile_hardware': 'mobile_hardware',
+ 'mobile_manufacturer': 'mobile_manufacturer',
+ 'mobile_model': 'mobile_model',
+ 'mobile_product': 'mobile_product',
+ 'mobile_serial': 'unknown',
+ 'name': 'name',
+ 'objective': 'Falcon Detection Method',
+ 'os_integrity_intact': '0',
+ 'os_major_version': '12',
+ 'os_minor_version': '0',
+ 'os_version': 'Android 12',
+ 'pattern_id': 'pattern_id',
+ 'platform': 'Android',
+ 'platform_version': 'platform_version',
+ 'playintegrity_compatibility_failed': False,
+ 'playintegrity_insecure_device': True,
+ 'playintegrity_meets_basic_integrity': False,
+ 'playintegrity_meets_device_integrity': False,
+ 'playintegrity_meets_partial_integrity': False,
+ 'playintegrity_meets_strong_integrity': False,
+ 'playintegrity_only_basic_integrity': False,
+ 'playintegrity_timestamp_expired': False,
+ 'poly_id': 'poly_id',
+ 'product': 'mobile',
+ 'radio_version': 'radio_version',
+ 'safetynet_verify_apps_enabled': '1',
+ 'scenario': 'attacker_methodology',
+ 'seconds_to_resolved': 590841,
+ 'seconds_to_triaged': 591762,
+ 'security_patch_level': '2021-10-05',
+ 'selinux_enforcement_policy': '1',
+ 'severity': 90,
+ 'severity_name': 'Critical',
+ 'show_in_ui': True,
+ 'source_products': ['Falcon for Mobile'],
+ 'source_vendors': ['CrowdStrike'],
+ 'status': 'new',
+ 'storage_encrypted': '1',
+ 'supported_arch': '7',
+ 'tactic': 'Insecure security posture',
+ 'tactic_id': 'CSTA0009',
+ 'technique': 'Bad device settings',
+ 'technique_id': 'CST0024',
+ 'timestamp': '2024-05-30T12:26:34.384Z',
+ 'type': 'mobile-android-attestation',
+ 'updated_timestamp': '2024-06-06T08:57:44.904557373Z',
+ 'user_name': 'test@test.com',
+ 'verified_boot_state': 2}
+
context_idp_detection = {
'name': 'IDP Detection ID: 20879a8064904ecfbb62c118a6a19411:ind:20879a8064904ecfbb62c118a6a19411:C0BB6ACD-8FDC-4CBA-9CF9-EBF3E28B3E56',
'occurred': '2023-04-20T11:12:03.089Z', 'last_updated': '2023-06-27T09:29:52.448779709Z',
@@ -201,6 +287,7 @@
remote_incident_id = 'inc:afb5d1512a00480f53e9ad91dc3e4b55:1cf23a95678a421db810e11b5db693bd'
remote_detection_id = 'ldt:15dbb9d8f06b89fe9f61eb46e829d986:528715079668'
remote_idp_detection_id = '20879a8064904e:ind:20879a8064904ecfbb62c118a6a19411:26DF54C9-8803-4F97-AD22-A725EE820EA9'
+remote_mobile_detection_id = '1111111111111111111'
# remote_id, close_incident, incident_status, detection_status, mirrored_object, entries
get_remote_incident = (remote_incident_id,
@@ -311,6 +398,31 @@
incident_no_status,
]
+# updated_object
+incident_new_status = ({'status': 'New'})
+incident_in_progress_status = ({'status': 'In Progress'})
+incident_reopened_status = ({'status': 'Reopened'})
+check_reopen_set_xsoar_incident_entries_args = [incident_new_status, incident_in_progress_status, incident_reopened_status]
+
+# updated_object
+detection_new_status = ({'status': 'new'})
+detection_in_progress_status = ({'status': 'in_progress'})
+detection_reopened_status = ({'status': 'reopened'})
+detection_true_positive_status = ({'status': 'true_positive'})
+detection_false_positive_status = ({'status': 'false_positive'})
+detection_ignored_status = ({'status': 'ignored'})
+check_reopen_set_xsoar_detections_entries_args = [detection_new_status, detection_in_progress_status, detection_reopened_status,
+ detection_true_positive_status, detection_false_positive_status,
+ detection_ignored_status]
+
+# updated_object
+idp_mobile_detection_new_status = ({'status': 'new'})
+idp_mobile_detection_in_progress_status = ({'status': 'in_progress'})
+idp_mobile_detection_reopened_status = ({'status': 'reopened'})
+idp_mobile_detection_closed_status = ({'status': 'closed'})
+set_xsoar_idp_or_mobile_detection_entries = [idp_mobile_detection_new_status, idp_mobile_detection_in_progress_status,
+ idp_mobile_detection_reopened_status, idp_mobile_detection_closed_status]
+
# updated_object, entry_content, close_incident
detection_closes = ({'status': 'closed'},
'dbotIncidentClose',
diff --git a/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_10.md b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_10.md
new file mode 100644
index 000000000000..879575ce1237
--- /dev/null
+++ b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_10.md
@@ -0,0 +1,8 @@
+
+#### Integrations
+
+##### CrowdStrike Falcon
+
+- Added a new parameter **Reopen Statuses** to the integration configuration, representing the Crowdsrike Falcon statuses that will reopen an incident in XSOAR if closed.
+- Added the fields **id** and **mobile_detection_id** to the mirroring process of **CrowdStrike Falcon IDP Detection** and **CrowdStrike Falcon Mobile Detection** respectively.
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.97242*.
\ No newline at end of file
diff --git a/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_9.md b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_9.md
new file mode 100644
index 000000000000..a6f473f3e1a7
--- /dev/null
+++ b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_13_9.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### CrowdStrike Falcon
+
+- Added support for passing the *proxy* argument in generic_http calls.
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.96102*.
diff --git a/Packs/CrowdStrikeFalcon/TestPlaybooks/Test_Playbook_-_CrowdStrike_Falcon_-_Get_Detections_by_Incident.yml b/Packs/CrowdStrikeFalcon/TestPlaybooks/Test_Playbook_-_CrowdStrike_Falcon_-_Get_Detections_by_Incident.yml
index a621dd2ff42e..1f5befb709ea 100644
--- a/Packs/CrowdStrikeFalcon/TestPlaybooks/Test_Playbook_-_CrowdStrike_Falcon_-_Get_Detections_by_Incident.yml
+++ b/Packs/CrowdStrikeFalcon/TestPlaybooks/Test_Playbook_-_CrowdStrike_Falcon_-_Get_Detections_by_Incident.yml
@@ -266,7 +266,7 @@ tasks:
{
"position": {
"x": -400,
- "y": -3330
+ "y": -3320
}
}
note: false
@@ -386,17 +386,14 @@ tasks:
description: ''
nexttasks:
'#none#':
- - "262"
- - "292"
- - "251"
- - "253"
+ - "368"
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": -400,
- "y": -2510
+ "x": -650,
+ "y": -2575
}
}
note: false
@@ -3805,6 +3802,83 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ "368":
+ id: "368"
+ taskid: 209143a5-bb32-4dcc-8413-f7e954b538ca
+ type: condition
+ task:
+ id: 209143a5-bb32-4dcc-8413-f7e954b538ca
+ version: -1
+ name: CrowdStrike.FoundDetections is False
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "369"
+ "yes":
+ - "251"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isEqualString
+ left:
+ value:
+ complex:
+ root: CrowdStrike
+ accessor: FoundDetections
+ iscontext: true
+ right:
+ value:
+ simple: "False"
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -1630,
+ "y": -1410
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "369":
+ id: "369"
+ taskid: 85b11c59-0148-4fbf-8831-a4a91a351035
+ type: title
+ task:
+ id: 85b11c59-0148-4fbf-8831-a4a91a351035
+ version: -1
+ name: No Detections was found
+ type: title
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "253"
+ - "262"
+ - "292"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -1560,
+ "y": -1180
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
view: |-
{
"linkLabelsPosition": {
@@ -3850,8 +3924,8 @@ view: |-
"paper": {
"dimensions": {
"height": 4310,
- "width": 6470,
- "x": -400,
+ "width": 7700,
+ "x": -1630,
"y": -3955
}
}
diff --git a/Packs/CrowdStrikeFalcon/pack_metadata.json b/Packs/CrowdStrikeFalcon/pack_metadata.json
index 3f8b2d0f274e..1f0860f20760 100644
--- a/Packs/CrowdStrikeFalcon/pack_metadata.json
+++ b/Packs/CrowdStrikeFalcon/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CrowdStrike Falcon",
"description": "The CrowdStrike Falcon OAuth 2 API (formerly the Falcon Firehose API), enables fetching and resolving detections, searching devices, getting behaviors by ID, containing hosts, and lifting host containment.",
"support": "xsoar",
- "currentVersion": "1.13.8",
+ "currentVersion": "1.13.10",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/CyCognito/ReleaseNotes/1_0_22.md b/Packs/CyCognito/ReleaseNotes/1_0_22.md
new file mode 100644
index 000000000000..c967d590ae6b
--- /dev/null
+++ b/Packs/CyCognito/ReleaseNotes/1_0_22.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### CyCognitoGetEndpoints
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/CyCognito/Scripts/CyCognitoGetEndpoints/CyCognitoGetEndpoints.yml b/Packs/CyCognito/Scripts/CyCognitoGetEndpoints/CyCognitoGetEndpoints.yml
index 66a41a11beb3..b671496c9aca 100644
--- a/Packs/CyCognito/Scripts/CyCognitoGetEndpoints/CyCognitoGetEndpoints.yml
+++ b/Packs/CyCognito/Scripts/CyCognitoGetEndpoints/CyCognitoGetEndpoints.yml
@@ -11,7 +11,7 @@ tags:
enabled: true
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
fromversion: 6.2.0
tests:
diff --git a/Packs/CyCognito/pack_metadata.json b/Packs/CyCognito/pack_metadata.json
index b7a61337bfb5..fde6ca8ddc1c 100644
--- a/Packs/CyCognito/pack_metadata.json
+++ b/Packs/CyCognito/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CyCognito",
"description": "Fetches the issues associated with a particular asset from the CyCognito platform.",
"support": "partner",
- "currentVersion": "1.0.21",
+ "currentVersion": "1.0.22",
"author": "CyCognito",
"url": "",
"email": "support@cycognito.com",
diff --git a/Packs/CyberArkEPM/Integrations/CyberArkEPMEventCollector/CyberArkEPMEventCollector.py b/Packs/CyberArkEPM/Integrations/CyberArkEPMEventCollector/CyberArkEPMEventCollector.py
index 1c485aabfe14..eb36d6e400b9 100644
--- a/Packs/CyberArkEPM/Integrations/CyberArkEPMEventCollector/CyberArkEPMEventCollector.py
+++ b/Packs/CyberArkEPM/Integrations/CyberArkEPMEventCollector/CyberArkEPMEventCollector.py
@@ -97,7 +97,7 @@ def get_policy_audits(self, set_id: str, from_date: str = '', limit: int = MAX_L
url_suffix = f'Sets/{set_id}/policyaudits/search?nextCursor={next_cursor}&limit={min(limit, MAX_LIMIT)}'
filter_params = f'arrivalTime GE {from_date}'
if self.policy_audits_event_type:
- filter_params += f'{filter_params} AND eventType IN {",".join(self.policy_audits_event_type)}'
+ filter_params += f' AND eventType IN {",".join(self.policy_audits_event_type)}'
data = assign_params(
filter=filter_params,
)
@@ -107,7 +107,7 @@ def get_events(self, set_id: str, from_date: str = '', limit: int = MAX_LIMIT, n
url_suffix = f'Sets/{set_id}/Events/Search?nextCursor={next_cursor}&limit={min(limit, MAX_LIMIT)}'
filter_params = f'arrivalTime GE {from_date}'
if self.raw_events_event_type:
- filter_params += f'{filter_params} AND eventType IN {",".join(self.raw_events_event_type)}'
+ filter_params += f' AND eventType IN {",".join(self.raw_events_event_type)}'
data = assign_params(
filter=filter_params,
)
diff --git a/Packs/CyberArkEPM/ReleaseNotes/1_0_1.md b/Packs/CyberArkEPM/ReleaseNotes/1_0_1.md
new file mode 100644
index 000000000000..ae56bbf9fdb4
--- /dev/null
+++ b/Packs/CyberArkEPM/ReleaseNotes/1_0_1.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### CyberArk EPM Event Collector
+
+Fixed an issue where the ***fetch-incidents*** command returned an internal server error.
diff --git a/Packs/CyberArkEPM/pack_metadata.json b/Packs/CyberArkEPM/pack_metadata.json
index 8a4410cd73c8..8f3b28dd26c6 100644
--- a/Packs/CyberArkEPM/pack_metadata.json
+++ b/Packs/CyberArkEPM/pack_metadata.json
@@ -2,11 +2,13 @@
"name": "CyberArk EPM",
"description": "Endpoint Privilege Manager helps remove local admin rights while improving user experience and optimizing IT operations.",
"support": "xsoar",
- "currentVersion": "1.0.0",
+ "currentVersion": "1.0.1",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
- "categories": ["Analytics & SIEM"],
+ "categories": [
+ "Analytics & SIEM"
+ ],
"tags": [],
"useCases": [],
"keywords": [],
diff --git a/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel.py b/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel.py
index b5d4f2bf1def..9be1544361ce 100644
--- a/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel.py
+++ b/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel.py
@@ -13,6 +13,7 @@
from dateutil import parser
from typing import *
+
# Disable insecure warnings
urllib3.disable_warnings()
@@ -86,46 +87,45 @@ def build_indicators(self, args: Dict[str, Any], data: list):
indicator_obj = {
"service": "Cyble Feed"
}
- for eachtype in FeedIndicatorType.list_all_supported_indicators():
- if eachtype.lower() in args.get('collection').lower(): # type: ignore
- indicator_obj['type'] = eachtype
- break
multi_data = True
try:
- data = self.get_recursively(eachres['indicators'][0]['observable'], 'value')
- if not data:
- data = self.get_recursively(eachres['indicators'][0]['observable'], 'address_value')
+ data_r = self.get_recursively(eachres['indicators'][0]['observable'], 'value')
+ if not data_r:
+ data_r = self.get_recursively(eachres['indicators'][0]['observable'], 'address_value')
except Exception:
try:
- data = self.get_recursively(eachres['observables']['observables'][0], 'value')
+ data_r = self.get_recursively(eachres['observables']['observables'][0], 'value')
except Exception:
demisto.debug(f'Found indicator without observable field: {eachres}')
continue
+ if not data_r:
+ continue
+
if multi_data:
ind_val = {}
- for eachindicator in data:
+ for eachindicator in data_r:
typeval = auto_detect_indicator_type(eachindicator)
indicator_obj['type'] = typeval
if typeval:
ind_val[typeval] = eachindicator
- if len(data) == 1:
- indicator_obj['value'] = str(data[0])
+ if len(data_r) == 1:
+ indicator_obj['value'] = str(data_r[0])
elif indicator_obj['type'] in list(ind_val.keys()):
indicator_obj['value'] = str(ind_val[indicator_obj['type']])
elif len(ind_val) != 0:
indicator_obj['type'] = list(ind_val.keys())[0]
indicator_obj['value'] = ind_val[list(ind_val.keys())[0]]
- #
+
if eachres.get('indicators'):
- for eachindicator in eachres.get('indicators'):
- indicator_obj['title'] = eachindicator.get('title')
- indicator_obj['time'] = eachindicator.get('timestamp')
+ ind_content = eachres.get('indicators')
else:
- for eachindicator in eachres.get('ttps').get('ttps'):
- indicator_obj['title'] = eachindicator.get('title')
- indicator_obj['time'] = eachindicator.get('timestamp')
+ ind_content = eachres.get('ttps').get('ttps')
+
+ for eachindicator in ind_content:
+ indicator_obj['title'] = eachindicator.get('title')
+ indicator_obj['time'] = eachindicator.get('timestamp')
indicator_obj['rawJSON'] = eachres
indicators.append(indicator_obj)
@@ -155,6 +155,10 @@ def get_taxii(self, args: Dict[str, Any], is_first_fetch: bool = False):
count = 0
try:
+
+ if 'begin' not in args or 'end' not in args:
+ raise ValueError("Last fetch time retrieval failed.")
+
for data in self.fetch(args.get('begin'), args.get('end'), args.get('collection')):
skip = False
response = self.parse_to_json(data)
@@ -164,7 +168,7 @@ def get_taxii(self, args: Dict[str, Any], is_first_fetch: bool = False):
elif response.get('ttps') or False:
content = response.get('ttps').get('ttps')
else:
- raise ValueError("Last fetch time retrieval failed.")
+ continue
for eachone in content:
if eachone.get('confidence'):
@@ -286,7 +290,6 @@ def fetch_indicators(client: Client):
'''
args = {}
last_run = demisto.getLastRun()
- is_first_fetch = None
if isinstance(last_run, dict):
last_fetch_time = last_run.get('lastRun_{}'.format(client.collection_name), None)
diff --git a/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel.yml b/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel.yml
index 0c28ed4f7e60..a59e1b6f49bb 100644
--- a/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel.yml
+++ b/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel.yml
@@ -136,7 +136,7 @@ script:
- contextPath: CybleIntel.collection.names
description: Available collection names for the feed service.
description: Get the data feed collection names for the taxii feed.
- dockerimage: demisto/taxii-server:1.0.0.87636
+ dockerimage: demisto/taxii-server:1.0.0.96806
subtype: python3
feed: true
fromversion: 6.2.0
diff --git a/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel_test.py b/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel_test.py
index 7d2a973bd080..5a4a2a8085cb 100644
--- a/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel_test.py
+++ b/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/CybleThreatIntel_test.py
@@ -5,6 +5,7 @@
input_value = json.load(open("test_data/input.json", "r"))
params = input_value['params']
args = input_value['args']
+args2 = input_value['args2']
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S+00:00"
@@ -74,7 +75,7 @@ def test_get_taxii_invalid(mocker, capfd):
mocker.patch.object(client, 'fetch', return_value=[mock_response_1])
with capfd.disabled():
try:
- val, time = Client.get_taxii(client, args)
+ val, time = Client.get_taxii(client, args2)
except Exception as e:
error_val = e.args[0]
diff --git a/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/test_data/input.json b/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/test_data/input.json
index 54c35943c8ab..00bc231fd569 100644
--- a/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/test_data/input.json
+++ b/Packs/CybleThreatIntel/Integrations/CybleThreatIntel/test_data/input.json
@@ -12,11 +12,16 @@
"initial_interval": 2,
"limit": 5
},
- "args": {
+ "args": {
"limit": 5,
"begin": "2022-06-11 00:00:00",
"end": "2022-06-13 00:00:00",
"collection": "phishing_url1",
"override_limit": "True"
+ },
+ "args2": {
+ "limit": 5,
+ "collection": "phishing_url1",
+ "override_limit": "True"
}
}
\ No newline at end of file
diff --git a/Packs/CybleThreatIntel/ReleaseNotes/2_0_23.md b/Packs/CybleThreatIntel/ReleaseNotes/2_0_23.md
new file mode 100644
index 000000000000..cb85f776836e
--- /dev/null
+++ b/Packs/CybleThreatIntel/ReleaseNotes/2_0_23.md
@@ -0,0 +1,4 @@
+#### Integrations
+##### Cyble Threat Intel
+- Fixed an issue where the **fetch-indicators** command failed when encountering improperly structured indicators
+- Updated the Docker image to: *demisto/taxii-server:1.0.0.96806*.
\ No newline at end of file
diff --git a/Packs/CybleThreatIntel/pack_metadata.json b/Packs/CybleThreatIntel/pack_metadata.json
index d9f87469412c..31501c6564e9 100644
--- a/Packs/CybleThreatIntel/pack_metadata.json
+++ b/Packs/CybleThreatIntel/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cyble Threat Intel",
"description": "Cyble Threat Intelligence for Vision Users. Must have access to Vision Taxii feed to access the threat intelligence.",
"support": "partner",
- "currentVersion": "2.0.22",
+ "currentVersion": "2.0.23",
"author": "Cyble Infosec",
"url": "https://cyble.com",
"email": "",
diff --git a/Packs/Cymulate/Integrations/Cymulate_v2/README.md b/Packs/Cymulate/Integrations/Cymulate_v2/README.md
index bcaffcb34a0b..af466bb573b1 100644
--- a/Packs/Cymulate/Integrations/Cymulate_v2/README.md
+++ b/Packs/Cymulate/Integrations/Cymulate_v2/README.md
@@ -1,5 +1,8 @@
Multi-Vector Cyber Attack, Breach and Attack Simulation.
This integration was integrated and tested with API version 1 of cymulate
+
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure cymulate_v2 on Cortex XSOAR
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
diff --git a/Packs/Cymulate/pack_metadata.json b/Packs/Cymulate/pack_metadata.json
index 90a04f80b76a..7b61c88ae7a1 100644
--- a/Packs/Cymulate/pack_metadata.json
+++ b/Packs/Cymulate/pack_metadata.json
@@ -18,5 +18,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "cymulate_v2"
}
\ No newline at end of file
diff --git a/Packs/CyrenInboxSecurity/ReleaseNotes/1_1_12.md b/Packs/CyrenInboxSecurity/ReleaseNotes/1_1_12.md
new file mode 100644
index 000000000000..802daccce099
--- /dev/null
+++ b/Packs/CyrenInboxSecurity/ReleaseNotes/1_1_12.md
@@ -0,0 +1,15 @@
+
+#### Scripts
+
+##### Cyren-Find-Similar-Incidents
+
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+##### Cyren-Show-Threat-Indicators
+
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+##### Cyren-Find-Similar-Incidents
+
+
+##### Cyren-Show-Threat-Indicators
diff --git a/Packs/CyrenInboxSecurity/Scripts/CyrenFindSimilarIncidents/CyrenFindSimilarIncidents.py b/Packs/CyrenInboxSecurity/Scripts/CyrenFindSimilarIncidents/CyrenFindSimilarIncidents.py
index 139d4d2a2265..d6e593144535 100644
--- a/Packs/CyrenInboxSecurity/Scripts/CyrenFindSimilarIncidents/CyrenFindSimilarIncidents.py
+++ b/Packs/CyrenInboxSecurity/Scripts/CyrenFindSimilarIncidents/CyrenFindSimilarIncidents.py
@@ -11,8 +11,7 @@
def get_incidents_by_case(case_id, incident_id):
# find incidents that are not closed and do not have the same incident id.
- query = 'cyrencaseid="%s" and -cyrenincidentid="%s"'\
- % (case_id, incident_id)
+ query = f'cyrencaseid="{case_id}" and -cyrenincidentid="{incident_id}"'
get_incidents_argument =\
{'query': query,
@@ -41,7 +40,7 @@ def parse_time(date_time_str):
return date_time_str
time = parse_time(incident[time_field])
- return {'id': "[%s](#/Details/%s)" % (incident['id'], incident['id']),
+ return {'id': "[{}](#/Details/{})".format(incident['id'], incident['id']),
'raw_id': incident['id'],
'cyren_incident_id': incident['CustomFields']['cyrenincidentid'],
'name': incident['name'],
@@ -87,18 +86,11 @@ def main():
if len(similar_incidents or []) > 0:
similar_incidents_rows =\
- list(
- map(
- lambda x: incident_to_record(x, TIME_FIELD),
- similar_incidents
- )
- )
+ [incident_to_record(x, TIME_FIELD) for x in similar_incidents]
similar_incidents_rows =\
- list(
- sorted(
- similar_incidents_rows,
- key=lambda x: (x['time'], x['id'])
- )
+ sorted(
+ similar_incidents_rows,
+ key=lambda x: (x['time'], x['id'])
)
similar_incident_csv = ""
diff --git a/Packs/CyrenInboxSecurity/Scripts/CyrenFindSimilarIncidents/CyrenFindSimilarIncidents.yml b/Packs/CyrenInboxSecurity/Scripts/CyrenFindSimilarIncidents/CyrenFindSimilarIncidents.yml
index 508b39ffbcc7..4ee9ecf2cd2f 100644
--- a/Packs/CyrenInboxSecurity/Scripts/CyrenFindSimilarIncidents/CyrenFindSimilarIncidents.yml
+++ b/Packs/CyrenInboxSecurity/Scripts/CyrenFindSimilarIncidents/CyrenFindSimilarIncidents.yml
@@ -1,5 +1,5 @@
args:
-- description: the case id of incidents to return
+- description: the case id of incidents to return.
name: case_id
comment: |-
Finds similar incidents by Cyren Case ID
@@ -10,7 +10,7 @@ comment: |-
commonfields:
id: Cyren-Find-Similar-Incidents
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.95956
enabled: true
name: Cyren-Find-Similar-Incidents
outputs:
@@ -27,10 +27,10 @@ outputs:
description: Similar incident name.
type: string
- contextPath: cyrenSimilarIncidentList
- description: an array if similar incidents
+ description: an array if similar incidents.
type: Unknown
- contextPath: cyrenSimilarIncidentCsv
- description: comma separated raw ids
+ description: comma separated raw ids.
type: Unknown
script: ''
scripttarget: 0
diff --git a/Packs/CyrenInboxSecurity/Scripts/CyrenShowThreatIndicators/CyrenShowThreatIndicators.py b/Packs/CyrenInboxSecurity/Scripts/CyrenShowThreatIndicators/CyrenShowThreatIndicators.py
index b13885c55b0b..a1aa7adf97e6 100644
--- a/Packs/CyrenInboxSecurity/Scripts/CyrenShowThreatIndicators/CyrenShowThreatIndicators.py
+++ b/Packs/CyrenInboxSecurity/Scripts/CyrenShowThreatIndicators/CyrenShowThreatIndicators.py
@@ -27,6 +27,7 @@ def stringify_indicators(threat_indicators):
# other indicators
if threat_indicators.get("type") is not None:
return tableToMarkdown("", threat_indicators, ["type", "value"], pretty_title) + '\n\n'
+ return None
def pretty_title(s):
diff --git a/Packs/CyrenInboxSecurity/Scripts/CyrenShowThreatIndicators/CyrenShowThreatIndicators.yml b/Packs/CyrenInboxSecurity/Scripts/CyrenShowThreatIndicators/CyrenShowThreatIndicators.yml
index 9eff479a01aa..be1c897fb54b 100644
--- a/Packs/CyrenInboxSecurity/Scripts/CyrenShowThreatIndicators/CyrenShowThreatIndicators.yml
+++ b/Packs/CyrenInboxSecurity/Scripts/CyrenShowThreatIndicators/CyrenShowThreatIndicators.yml
@@ -7,7 +7,7 @@ comment: |-
commonfields:
id: Cyren-Show-Threat-Indicators
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.95956
enabled: true
name: Cyren-Show-Threat-Indicators
script: ''
diff --git a/Packs/CyrenInboxSecurity/pack_metadata.json b/Packs/CyrenInboxSecurity/pack_metadata.json
index a387fa56226d..db2f72369234 100644
--- a/Packs/CyrenInboxSecurity/pack_metadata.json
+++ b/Packs/CyrenInboxSecurity/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cyren Inbox Security",
"description": "Cyren Inbox Security protects Office 365 mailboxes from evasive phishing, business email compromise, and fraud.",
"support": "partner",
- "currentVersion": "1.1.11",
+ "currentVersion": "1.1.12",
"author": "Cyren",
"url": "https://www.cyren.com/products/cyren-inbox-security",
"email": "paltoalto-cortex-xsoar@cyren.com",
diff --git a/Packs/Darktrace/Integrations/DarktraceEventCollector/README.md b/Packs/Darktrace/Integrations/DarktraceEventCollector/README.md
index 1d21c3cb35ff..c4437576cc95 100644
--- a/Packs/Darktrace/Integrations/DarktraceEventCollector/README.md
+++ b/Packs/Darktrace/Integrations/DarktraceEventCollector/README.md
@@ -1,5 +1,8 @@
Use this integration to fetch a list of model breaches, filtered by the specified parameters. This is important for organizations that wish to integrate Darktrace programmatically into their SOC environment.
The integration was integrated and tested with version v5.2 API of Darktrace.
+
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure Darktrace Event Collector on Cortex XSIAM
1. Navigate to **Settings** > **Configurations** > **Data Collection** > **Automations & Feed Integrations**.
diff --git a/Packs/Darktrace/pack_metadata.json b/Packs/Darktrace/pack_metadata.json
index 0635f12a65c2..5b34c9b87b9c 100644
--- a/Packs/Darktrace/pack_metadata.json
+++ b/Packs/Darktrace/pack_metadata.json
@@ -28,5 +28,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "Darktrace Event Collector"
}
\ No newline at end of file
diff --git a/Packs/DeepInstinct/Integrations/DeepInstinct/DeepInstinct.py b/Packs/DeepInstinct/Integrations/DeepInstinct/DeepInstinct.py
index 059e4bc699de..6bb03a6b868d 100644
--- a/Packs/DeepInstinct/Integrations/DeepInstinct/DeepInstinct.py
+++ b/Packs/DeepInstinct/Integrations/DeepInstinct/DeepInstinct.py
@@ -42,8 +42,7 @@ def http_request(method, url_suffix, json=None):
except ValueError:
return None
except Exception as e:
- return_error(message='Error occurred on API call: %s. Error is: %s'
- % (base_url + api_suffix + url_suffix, str(e)))
+ return_error(message=f'Error occurred on API call: {base_url + api_suffix + url_suffix}. Error is: {str(e)}')
def get_specific_device():
@@ -116,7 +115,7 @@ def add_hash_to_blacklist():
policy_id = demisto.args().get('policy_id')
file_hash = demisto.args().get('file_hash')
comment = demisto.args().get('comment') or ""
- http_request('POST', '/policies/%s/blacklist/hashes/%s' % (str(policy_id), file_hash), json={"comment": comment})
+ http_request('POST', f'/policies/{str(policy_id)}/blacklist/hashes/{file_hash}', json={"comment": comment})
demisto.results('ok')
@@ -127,7 +126,7 @@ def add_hash_to_whitelist():
policy_id = demisto.args().get('policy_id')
file_hash = demisto.args().get('file_hash')
comment = demisto.args().get('comment') or ""
- http_request('POST', '/policies/%s/whitelist/hashes/%s' % (str(policy_id), file_hash), json={"comment": comment})
+ http_request('POST', f'/policies/{str(policy_id)}/whitelist/hashes/{file_hash}', json={"comment": comment})
demisto.results('ok')
@@ -137,7 +136,7 @@ def remove_hash_from_blacklist():
"""
policy_id = demisto.args().get('policy_id')
file_hash = demisto.args().get('file_hash')
- http_request('DELETE', '/policies/%s/blacklist/hashes/%s' % (str(policy_id), file_hash))
+ http_request('DELETE', f'/policies/{str(policy_id)}/blacklist/hashes/{file_hash}')
demisto.results('ok')
@@ -147,7 +146,7 @@ def remove_hash_from_whitelist():
"""
policy_id = demisto.args().get('policy_id')
file_hash = demisto.args().get('file_hash')
- http_request('DELETE', '/policies/%s/whitelist/hashes/%s' % (str(policy_id), file_hash))
+ http_request('DELETE', f'/policies/{str(policy_id)}/whitelist/hashes/{file_hash}')
demisto.results('ok')
diff --git a/Packs/DeepInstinct/Integrations/DeepInstinct/DeepInstinct.yml b/Packs/DeepInstinct/Integrations/DeepInstinct/DeepInstinct.yml
index e00c90e7ecb1..00b347d0d965 100644
--- a/Packs/DeepInstinct/Integrations/DeepInstinct/DeepInstinct.yml
+++ b/Packs/DeepInstinct/Integrations/DeepInstinct/DeepInstinct.yml
@@ -35,283 +35,285 @@ script:
arguments:
- name: device_id
required: true
- description: The device ID
+ description: The device ID.
outputs:
- contextPath: DeepInstinct.devices.ID
- description: Device ID
+ description: Device ID.
type: number
- contextPath: DeepInstinct.devices.os
- description: Device OS
+ description: Device OS.
type: string
- contextPath: DeepInstinct.devices.osv
- description: Device OS version
+ description: Device OS version.
type: string
- contextPath: DeepInstinct.devices.ip_address
- description: Device IP address
+ description: Device IP address.
type: string
- contextPath: DeepInstinct.devices.mac_address
- description: Device mac address
+ description: Device mac address.
type: string
- contextPath: DeepInstinct.devices.hostname
- description: Device hostname
+ description: Device hostname.
type: string
- contextPath: DeepInstinct.devices.domain
- description: Device domain
+ description: Device domain.
type: string
- contextPath: DeepInstinct.devices.scanned_files
- description: Num of device scanned files
+ description: Num of device scanned files.
type: number
- contextPath: DeepInstinct.devices.tag
- description: Device tag
+ description: Device tag.
type: string
- contextPath: DeepInstinct.devices.connectivity_status
- description: Device connectivity status
+ description: Device connectivity status.
type: string
- contextPath: DeepInstinct.devices.deployment_status
- description: Device deployment status
+ description: Device deployment status.
type: string
- contextPath: DeepInstinct.devices.last_registration
- description: Device last registration datetime
+ description: Device last registration datetime.
type: string
- contextPath: DeepInstinct.devices.last_contact
- description: Device last contact datetime
+ description: Device last contact datetime.
type: string
- contextPath: DeepInstinct.devices.distinguished_name
- description: Device distinguished name
+ description: Device distinguished name.
type: string
- contextPath: DeepInstinct.devices.group_name
- description: Device group name
+ description: Device group name.
type: string
- contextPath: DeepInstinct.devices.group_id
- description: Device group ID
+ description: Device group ID.
type: number
- contextPath: DeepInstinct.devices.policy_name
- description: Device policy name
+ description: Device policy name.
type: string
- contextPath: DeepInstinct.devices.policy_id
- description: Device policy ID
+ description: Device policy ID.
type: number
- contextPath: DeepInstinct.devices.log_status
- description: Device log status
+ description: Device log status.
type: string
- contextPath: DeepInstinct.devices.agent_version
- description: Device agent version
+ description: Device agent version.
type: string
- contextPath: DeepInstinct.devices.brain_version
- description: Device brain version
+ description: Device brain version.
type: string
- contextPath: DeepInstinct.devices.msp_name
- description: Device msp name
+ description: Device msp name.
type: string
- contextPath: DeepInstinct.devices.msp_id
- description: Device msp ID
+ description: Device msp ID.
type: number
- contextPath: DeepInstinct.devices.tenant_name
- description: Device tenant name
+ description: Device tenant name.
type: string
- contextPath: DeepInstinct.devices.tenant_id
- description: Device tenant ID
+ description: Device tenant ID.
type: number
- description: get specific device by ID
+ description: get specific device by ID.
- name: deepinstinct-get-events
arguments:
- name: first_event_id
- description: First event id to get as max events in response can be 50
+ description: First event id to get as max events in response can be 50.
defaultValue: "0"
outputs:
- contextPath: DeepInstinct.Events.events.ID
- description: event ID
+ description: event ID.
type: number
- contextPath: DeepInstinct.Events.events.device_id
- description: event device ID
+ description: event device ID.
type: number
- contextPath: DeepInstinct.Events.events.file_hash
- description: event file hash
+ description: event file hash.
type: string
- contextPath: DeepInstinct.Events.events.file_type
- description: event file type
+ description: event file type.
type: string
- contextPath: DeepInstinct.Events.events.file_archive_hash
- description: event file archive hash
+ description: event file archive hash.
type: string
- contextPath: DeepInstinct.Events.events.path
- description: event file path
+ description: event file path.
- contextPath: DeepInstinct.Events.events.file_size
- description: event file size
+ description: event file size.
type: number
- contextPath: DeepInstinct.Events.events.threat_severity
- description: event threat severity
+ description: event threat severity.
type: string
- contextPath: DeepInstinct.Events.events.deep_classification
- description: Deep Instinct classification
+ description: Deep Instinct classification.
type: string
- contextPath: DeepInstinct.Events.events.file_status
- description: event file status
+ description: event file status.
type: string
- contextPath: sandbox_statusDeepInstinct.Events.events.
- description: event sandbox status
+ description: event sandbox status.
type: string
- contextPath: DeepInstinct.Events.events.model
- description: event model
+ description: event model.
type: string
- contextPath: DeepInstinct.Events.events.type
- description: event type
+ description: event type.
type: string
- contextPath: DeepInstinct.Events.events.trigger
- description: event trigger
+ description: event trigger.
type: string
- contextPath: DeepInstinct.Events.events.action
- description: event action
+ description: event action.
type: string
- contextPath: DeepInstinct.Events.events.tenant_id
- description: event tenant id
+ description: event tenant id.
type: number
- contextPath: DeepInstinct.Events.events.msp_id
- description: event msp id
+ description: event msp id.
type: number
- contextPath: DeepInstinct.Events.events.status
- description: event status
+ description: event status.
type: string
- contextPath: DeepInstinct.Events.events.close_trigger
- description: event close trigger
+ description: event close trigger.
type: string
- contextPath: DeepInstinct.Events.events.reoccurrence_count
- description: event reoccurrence_count
+ description: event reoccurrence_count.
type: number
- description: Get all events. Max events in response can be 50, use first_event_id parameter to define first event id to get
+ description: Get all events. Max events in response can be 50, use first_event_id parameter to define first event id to get.
- name: deepinstinct-get-all-groups
arguments: []
outputs:
- contextPath: DeepInstinct.Groups.ID
- description: group id
+ description: group id.
type: number
- contextPath: DeepInstinct.Groups.os
- description: group operation system
+ description: group operation system.
type: string
- contextPath: DeepInstinct.Groups.name
- description: group name
+ description: group name.
type: string
- contextPath: DeepInstinct.Groups.policy_id
- description: group policy ID
+ description: group policy ID.
type: number
- contextPath: DeepInstinct.Groups.is_default_group
- description: True if group is a default group, false otherwise
+ description: True if group is a default group, false otherwise.
type: boolean
- contextPath: DeepInstinct.Groups.msp_name
- description: msp name
+ description: msp name.
type: string
- contextPath: DeepInstinct.Groups.msp_id
- description: msp ID
+ description: msp ID.
type: number
- description: get all groups
+ description: get all groups.
- name: deepinstinct-get-all-policies
arguments: []
outputs:
- contextPath: DeepInstinct.Policies.ID
- description: policy ID
+ description: policy ID.
type: number
- contextPath: DeepInstinct.Policies.name
- description: policy name
+ description: policy name.
type: string
- contextPath: DeepInstinct.Policies.os
- description: policy operating system
+ description: policy operating system.
type: string
- contextPath: DeepInstinct.Policies.is_default_policy
- description: True if policy is a default policy, False otherwise
+ description: True if policy is a default policy, False otherwise.
type: boolean
- contextPath: DeepInstinct.Policies.msp_id
- description: msp ID
+ description: msp ID.
type: number
- contextPath: DeepInstinct.Policies.msp_name
- description: msp name
+ description: msp name.
type: string
- description: get all policies
+ description: get all policies.
- name: deepinstinct-add-hash-to-blacklist
arguments:
- name: policy_id
required: true
- description: policy ID
+ description: policy ID.
- name: file_hash
required: true
- description: file hash
+ description: file hash.
- name: comment
- description: Optional, add comment to hash field
+ description: Optional, add comment to hash field.
defaultValue: ''
- description: add file hash to block list
+ description: add file hash to block list.
- name: deepinstinct-add-hash-to-whitelist
arguments:
- name: policy_id
required: true
- description: policy ID
+ description: policy ID.
- name: file_hash
required: true
- description: file hash
+ description: file hash.
- name: comment
- description: Optional, add comment to hash field
+ description: Optional, add comment to hash field.
defaultValue: ''
- description: add file hash to allow list
+ description: add file hash to allow list.
- name: deepinstinct-remove-hash-from-blacklist
arguments:
- name: policy_id
required: true
- description: policy ID
+ description: policy ID.
- name: file_hash
required: true
- description: file hash
- description: remove file hash from block list
+ description: file hash.
+ description: remove file hash from block list.
- name: deepinstinct-remove-hash-from-whitelist
arguments:
- name: policy_id
required: true
- description: policy ID
+ description: policy ID.
- name: file_hash
required: true
- description: file hash
- description: remove file hash from allow list
+ description: file hash.
+ description: remove file hash from allow list.
- name: deepinstinct-add-devices-to-group
arguments:
- name: group_id
required: true
- description: group ID
+ description: group ID.
- name: device_ids
required: true
- description: comma separated devices ids
+ description: comma separated devices ids.
isArray: true
- description: add multiple devices to group
+ description: add multiple devices to group.
- name: deepinstinct-remove-devices-from-group
arguments:
- name: group_id
required: true
- description: group ID to remove from
+ description: group ID to remove from.
- name: device_ids
required: true
- description: comma separeted list of device ids to remove
+ description: comma separeted list of device ids to remove.
isArray: true
- description: remove list of devices from group
+ description: remove list of devices from group.
- name: deepinstinct-delete-files-remotely
arguments:
- name: event_ids
required: true
- description: comma separeted list of event ids
+ description: comma separeted list of event ids.
isArray: true
- description: delete multiple files remotely
+ description: delete multiple files remotely.
- name: deepinstinct-terminate-processes
arguments:
- name: event_ids
required: true
- description: comma separeted list of event ids
+ description: comma separeted list of event ids.
isArray: true
- description: terminate list of processes
+ description: terminate list of processes.
- name: deepinstinct-close-events
arguments:
- name: event_ids
required: true
- description: comma separeted list of event ids
+ description: comma separeted list of event ids.
isArray: true
- description: close list of events
- dockerimage: demisto/python3:3.10.12.63474
+ description: close list of events.
+ dockerimage: demisto/python3:3.10.14.95956
isfetch: true
script: '-'
type: python
subtype: python3
fromversion: 5.0.0
+tests:
+- No tests (auto formatted)
diff --git a/Packs/DeepInstinct/Integrations/DeepInstinct/DeepInstinct_test.py b/Packs/DeepInstinct/Integrations/DeepInstinct/DeepInstinct_test.py
index 8a548751fdc7..a0ac0c309037 100644
--- a/Packs/DeepInstinct/Integrations/DeepInstinct/DeepInstinct_test.py
+++ b/Packs/DeepInstinct/Integrations/DeepInstinct/DeepInstinct_test.py
@@ -170,7 +170,7 @@
def test_get_device_command(requests_mock, mocker):
mocker.patch.object(demisto, 'params', return_value=params)
mocker.patch.object(demisto, 'args', return_value={'device_id': mock_device['id']})
- requests_mock.get("{0}/api/v1/devices/{1}".format(params['base_url'], mock_device['id']), json=mock_device)
+ requests_mock.get("{}/api/v1/devices/{}".format(params['base_url'], mock_device['id']), json=mock_device)
mocker.patch.object(demisto, 'results')
DeepInstinct.get_specific_device()
result = demisto.results.call_args[0][0]
@@ -179,7 +179,7 @@ def test_get_device_command(requests_mock, mocker):
def test_get_all_groups(requests_mock, mocker):
mocker.patch.object(demisto, 'params', return_value=params)
- requests_mock.get("{0}/api/v1/groups".format(params['base_url']), json=mock_groups)
+ requests_mock.get("{}/api/v1/groups".format(params['base_url']), json=mock_groups)
mocker.patch.object(demisto, 'results')
DeepInstinct.get_all_groups()
result = demisto.results.call_args[0][0]
@@ -188,7 +188,7 @@ def test_get_all_groups(requests_mock, mocker):
def test_get_all_policies(requests_mock, mocker):
mocker.patch.object(demisto, 'params', return_value=params)
- requests_mock.get("{0}/api/v1/policies".format(params['base_url']), json=mock_policies)
+ requests_mock.get("{}/api/v1/policies".format(params['base_url']), json=mock_policies)
mocker.patch.object(demisto, 'results')
DeepInstinct.get_all_policies()
result = demisto.results.call_args[0][0]
@@ -198,7 +198,7 @@ def test_get_all_policies(requests_mock, mocker):
def test_get_events(requests_mock, mocker):
mocker.patch.object(demisto, 'params', return_value=params)
mocker.patch.object(demisto, 'args', return_value={'first_event_id': 0})
- requests_mock.get("{0}/api/v1/events/?after_id=0".format(params['base_url']), json=mock_events)
+ requests_mock.get("{}/api/v1/events/?after_id=0".format(params['base_url']), json=mock_events)
mocker.patch.object(demisto, 'results')
DeepInstinct.get_events()
result = demisto.results.call_args[0][0]
@@ -209,8 +209,8 @@ def test_fetch_incidents(requests_mock, mocker):
mocker.patch.object(demisto, 'params', return_value=params)
mocker.patch.object(demisto, 'args', return_value={'first_fetch_id': 0})
mocker.patch.object(demisto, 'getLastRun', return_value={'last_id': 0})
- requests_mock.get("{0}/api/v1/events/?after_id=0".format(params['base_url']), json=mock_events)
- requests_mock.get("{0}/api/v1/events/?after_id=2".format(params['base_url']), json={})
+ requests_mock.get("{}/api/v1/events/?after_id=0".format(params['base_url']), json=mock_events)
+ requests_mock.get("{}/api/v1/events/?after_id=2".format(params['base_url']), json={})
mocker.patch.object(demisto, "incidents")
DeepInstinct.fetch_incidents()
incidents = demisto.incidents.call_args[0][0]
diff --git a/Packs/DeepInstinct/Integrations/DeepInstinct3x/README.md b/Packs/DeepInstinct/Integrations/DeepInstinct3x/README.md
index 605fcf66640b..6a9d75e5b496 100644
--- a/Packs/DeepInstinct/Integrations/DeepInstinct3x/README.md
+++ b/Packs/DeepInstinct/Integrations/DeepInstinct3x/README.md
@@ -1,6 +1,8 @@
Deep Instinct is a prevention-first approach to stopping ransomware and other malware using the world's first purpose-built, deep learning cybersecurity framework.
This integration was integrated and tested with version 3.3.x of DeepInstinct v3
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure DeepInstinct v3 on Cortex XSOAR
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
diff --git a/Packs/DeepInstinct/ReleaseNotes/1_1_18.md b/Packs/DeepInstinct/ReleaseNotes/1_1_18.md
new file mode 100644
index 000000000000..7be0cd29ad80
--- /dev/null
+++ b/Packs/DeepInstinct/ReleaseNotes/1_1_18.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Deep Instinct
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/DeepInstinct/pack_metadata.json b/Packs/DeepInstinct/pack_metadata.json
index 37817465940f..263fe190f462 100644
--- a/Packs/DeepInstinct/pack_metadata.json
+++ b/Packs/DeepInstinct/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "DeepInstinct",
"description": "At Deep Instinct, we prevent what others can't find.",
"support": "partner",
- "currentVersion": "1.1.17",
+ "currentVersion": "1.1.18",
"author": "Deep Instinct",
"url": "",
"email": "support@deepinstinct.com",
@@ -18,5 +18,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "DeepInstinct v3"
}
\ No newline at end of file
diff --git a/Packs/DefaultPlaybook/ReleaseNotes/2_0_11.md b/Packs/DefaultPlaybook/ReleaseNotes/2_0_11.md
new file mode 100644
index 000000000000..76646814e98c
--- /dev/null
+++ b/Packs/DefaultPlaybook/ReleaseNotes/2_0_11.md
@@ -0,0 +1,3 @@
+## Default
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/DefaultPlaybook/pack_metadata.json b/Packs/DefaultPlaybook/pack_metadata.json
index 1cddfa74b745..cce5cb18d078 100644
--- a/Packs/DefaultPlaybook/pack_metadata.json
+++ b/Packs/DefaultPlaybook/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Default",
"description": "Got a unique incident? This Content Pack helps you automate the core steps of enrichment and severity calculation for any kind of incident.",
"support": "xsoar",
- "currentVersion": "2.0.10",
+ "currentVersion": "2.0.11",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/DemistoLocking/Integrations/DemistoLock/DemistoLock.js b/Packs/DemistoLocking/Integrations/DemistoLock/DemistoLock.js
index b9c68440588c..f23b9411481f 100644
--- a/Packs/DemistoLocking/Integrations/DemistoLock/DemistoLock.js
+++ b/Packs/DemistoLocking/Integrations/DemistoLock/DemistoLock.js
@@ -39,51 +39,86 @@ switch (command) {
case 'demisto-lock-get':
var lockTimeout = args.timeout || params.timeout || 600;
- var incidentID = incidents[0].id
- var lockInfo = 'Locked by incident #' + incidentID + '.';
- lockInfo += (args.info) ? ' Additional info: ' + args.info :'';
+ var lockInfo = 'Locked by incident #' + incidents[0].id + '.';
+ lockInfo += (args.info) ? ' Additional info: ' + args.info : '';
- var guid = guid();
+ var guid = args.guid || guid();
var time = 0;
var lock, version;
- var attempt = 1;
- do{
- logDebug('Task guid: ' + guid + ', Incident:' + incidentID + ' | Trying to acquire lock lockName: ' + lockName + ', attempt number: ' + attempt);
+
+ if (isDemistoVersionGE('8.0.0')) { // XSOAR 8 lock implementation with polling.
+ logDebug('Running on XSOAR version 8');
+
+ // check if the process already holds the lock
[lock, version] = getLock();
- if (typeof version === 'object') {
+ if (typeof version === "object") {
version = JSON.stringify(version)
}
- logDebug('Task guid: ' + guid + ', Incident:' + incidentID + ' | Current lock is: ' + JSON.stringify(lock) + ', version: ' + version);
+ logDebug('Task guid: ' + guid + ' | Current lock is: ' + JSON.stringify(lock) + ', version: ' + version);
if (lock.guid === guid) {
- break;
+ var md = '### Demisto Locking Mechanism\n';
+ md += 'Lock acquired successfully\n';
+ md += 'GUID: ' + guid;
+ logDebug(md)
+ return { ContentsFormat: formats.markdown, Type: entryTypes.note, Contents: md };
}
- if (!lock.guid) {
- try {
- setLock(guid, lockInfo, version);
- } catch(err) {
- logDebug('Task guid: ' + guid + ', Incident:' + incidentID + ' | Failed setting lock: ' + err.message);
+ else {
+ // attempt to acquire the lock
+ if (!lock.guid) {
+ logDebug("Attempting to acquire lock")
+ try {
+ setLock(guid, lockInfo, version);
+ } catch (err) {
+ logDebug(err.message);
+ }
+ }
+ var timeout_err_msg = 'Timeout waiting for lock\n';
+ timeout_err_msg += 'Lock name: ' + lockName + '\n';
+ timeout_err_msg += 'Lock info: ' + lock.info + '\n';
+ logDebug(timeout_err_msg)
+ return {
+ Type: entryTypes.note,
+ Contents: 'Lock was not acquired, Polling.',
+ PollingCommand: 'demisto-lock-get',
+ NextRun: '30',
+ PollingArgs: { name: lockName, info: args.info, timeout: args.timeout, guid: guid, timeout_err_msg: timeout_err_msg },
+ Timeout: String(lockTimeout)
}
}
- attempt++;
- wait(1);
- } while (time++ < lockTimeout) ;
+ } else { // XSOAR 6 lock implementation without polling.
+ logDebug('Running on XSOAR version 6');
+ do {
+ [lock, version] = getLock();
+ if (lock.guid === guid) {
+ break;
+ }
+ if (!lock.guid) {
+ try {
+ setLock(guid, lockInfo, version);
+ } catch (err) {
+ logDebug(err.message)
+ }
+ }
+ wait(1);
+ } while (time++ < lockTimeout);
- [lock, version] = getLock();
+ [lock, version] = getLock();
- if (lock.guid === guid) {
- var md = '### Demisto Locking Mechanism\n';
- md += 'Lock acquired successfully\n';
- md += 'GUID: ' + guid;
- return { ContentsFormat: formats.markdown, Type: entryTypes.note, Contents: md } ;
- } else {
- var md = 'Timeout waiting for lock\n';
- md += 'Lock name: ' + lockName + '\n';
- md += 'Lock info: ' + lock.info + '\n';
- return { ContentsFormat: formats.text, Type: entryTypes.error, Contents: md };
+ if (lock.guid === guid) {
+ var md = '### Demisto Locking Mechanism\n';
+ md += 'Lock acquired successfully\n';
+ md += 'GUID: ' + guid;
+ return { ContentsFormat: formats.markdown, Type: entryTypes.note, Contents: md };
+ } else {
+ var md = 'Timeout waiting for lock\n';
+ md += 'Lock name: ' + lockName + '\n';
+ md += 'Lock info: ' + lock.info + '\n';
+ return { ContentsFormat: formats.text, Type: entryTypes.error, Contents: md };
+ }
+ break;
}
- break;
case 'demisto-lock-release':
logDebug('Releasing lock lockName: ' + lockName);
diff --git a/Packs/DemistoLocking/Integrations/DemistoLock/DemistoLock.yml b/Packs/DemistoLocking/Integrations/DemistoLock/DemistoLock.yml
index df250e12191f..e1e2dec43973 100644
--- a/Packs/DemistoLocking/Integrations/DemistoLock/DemistoLock.yml
+++ b/Packs/DemistoLocking/Integrations/DemistoLock/DemistoLock.yml
@@ -29,6 +29,7 @@ script:
type: javascript
commands:
- name: demisto-lock-get
+ polling: true
arguments:
- name: name
default: true
@@ -37,6 +38,12 @@ script:
description: Additional information to provide for the lock instance.
- name: timeout
description: Timeout (seconds) for wait on lock to be freed.
+ - name: guid
+ description: Unique identifier value of the lock.
+ hidden: true
+ - name: timeout_err_msg
+ description: Polling Timeout error message.
+ hidden: true
description: Gets a specific lock. If the lock doesn't exist, it creates one. If the lock is already in use, the command waits until the lock is released or until timeout is reached. If timeout is reached and the lock hasn't been released, the command fails to get the lock.
- name: demisto-lock-release
arguments:
@@ -55,4 +62,5 @@ script:
description: Release all locks.
marketplaces:
- xsoar
+- marketplacev2
fromversion: 5.0.0
diff --git a/Packs/DemistoLocking/ReleaseNotes/1_1_0.md b/Packs/DemistoLocking/ReleaseNotes/1_1_0.md
new file mode 100644
index 000000000000..18833835efef
--- /dev/null
+++ b/Packs/DemistoLocking/ReleaseNotes/1_1_0.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Demisto Lock
+
+- Updated the `demisto-lock-get` command to run as a scheduled command on XSOAR version 8.0 or higher.
diff --git a/Packs/DemistoLocking/ReleaseNotes/1_1_1.md b/Packs/DemistoLocking/ReleaseNotes/1_1_1.md
new file mode 100644
index 000000000000..ff25b5e075c6
--- /dev/null
+++ b/Packs/DemistoLocking/ReleaseNotes/1_1_1.md
@@ -0,0 +1,3 @@
+## Cortex Lock
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/DemistoLocking/TestPlaybooks/playbook-DemistoLockingTest.yml b/Packs/DemistoLocking/TestPlaybooks/playbook-DemistoLockingTest.yml
index 2b0902df7bf3..d6d919e9bc20 100644
--- a/Packs/DemistoLocking/TestPlaybooks/playbook-DemistoLockingTest.yml
+++ b/Packs/DemistoLocking/TestPlaybooks/playbook-DemistoLockingTest.yml
@@ -1,15 +1,16 @@
id: DemistoLockTest
-version: -1
+version: 5
+vcShouldKeepItemLegacyProdMachine: false
name: DemistoLockTest
description: Test playbook for Demisto Locking integration
starttaskid: "0"
tasks:
"0":
id: "0"
- taskid: 2ada7137-85ee-41db-8a6d-2aa92c6d6ea2
+ taskid: 1e488f23-17ae-4697-8ec5-2caf8cffc6cd
type: start
task:
- id: 2ada7137-85ee-41db-8a6d-2aa92c6d6ea2
+ id: 1e488f23-17ae-4697-8ec5-2caf8cffc6cd
version: -1
name: ""
iscommand: false
@@ -17,21 +18,28 @@ tasks:
nexttasks:
'#none#':
- "12"
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 275,
+ "x": 377.5,
"y": 50
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"1":
id: "1"
- taskid: ecfd392c-0e2e-4eb6-8d40-3dc9cb7b323a
+ taskid: 652133ff-d95f-4a4a-86f6-d1e465a05dae
type: regular
task:
- id: ecfd392c-0e2e-4eb6-8d40-3dc9cb7b323a
+ id: 652133ff-d95f-4a4a-86f6-d1e465a05dae
version: -1
name: lock-info
description: Show information on a locks
@@ -42,26 +50,32 @@ tasks:
nexttasks:
'#none#':
- "14"
- scriptarguments:
- name: {}
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 275,
+ "x": 377.5,
"y": 545
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"2":
id: "2"
- taskid: 8ed60626-10d4-4c78-8efa-b1bba0c6f287
+ taskid: 2cbaf424-3361-4362-80bb-689834c28267
type: regular
task:
- id: 8ed60626-10d4-4c78-8efa-b1bba0c6f287
+ id: 2cbaf424-3361-4362-80bb-689834c28267
version: -1
name: lock-get
- description: Get lock. If the lock is already in use - will wait until it is released or until timeout is reached
+ description: Get lock. If the lock is already in use - will wait until it is
+ released or until timeout is reached
script: Demisto Lock|||demisto-lock-get
type: regular
iscommand: true
@@ -70,28 +84,32 @@ tasks:
'#none#':
- "4"
- "3"
- scriptarguments:
- info: {}
- name: {}
- timeout: {}
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 275,
+ "x": 377.5,
"y": 865
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"3":
id: "3"
- taskid: 3406d919-6735-4067-8b3e-3f9b6ab65521
+ taskid: 61ac5ca5-5600-4629-8033-174fe9020f6a
type: regular
task:
- id: 3406d919-6735-4067-8b3e-3f9b6ab65521
+ id: 61ac5ca5-5600-4629-8033-174fe9020f6a
version: -1
name: lock-get
- description: Get lock. If the lock is already in use - will wait until it is released or until timeout is reached
+ description: Get lock. If the lock is already in use - will wait until it is
+ released or until timeout is reached
script: Demisto Lock|||demisto-lock-get
type: regular
iscommand: true
@@ -99,25 +117,28 @@ tasks:
nexttasks:
'#none#':
- "7"
- scriptarguments:
- info: {}
- name: {}
- timeout: {}
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 490,
+ "x": 592.5,
"y": 1040
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"4":
id: "4"
- taskid: 759476d7-cb8a-487f-8b3e-55c296af5b77
+ taskid: 3e212460-d3b5-467a-8118-7f3768147b43
type: regular
task:
- id: 759476d7-cb8a-487f-8b3e-55c296af5b77
+ id: 3e212460-d3b5-467a-8118-7f3768147b43
version: -1
name: Sleep
scriptName: Sleep
@@ -130,21 +151,28 @@ tasks:
scriptarguments:
seconds:
simple: "10"
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 60,
+ "x": 162.5,
"y": 1215
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"5":
id: "5"
- taskid: 28f82819-7d1e-45aa-876f-65d04e600bbb
+ taskid: 26f2bb30-a754-4ad9-814a-7ff38d20a62a
type: regular
task:
- id: 28f82819-7d1e-45aa-876f-65d04e600bbb
+ id: 26f2bb30-a754-4ad9-814a-7ff38d20a62a
version: -1
name: Set context
scriptName: Set
@@ -155,26 +183,32 @@ tasks:
'#none#':
- "6"
scriptarguments:
- append: {}
key:
simple: key
value:
simple: ok
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 60,
+ "x": 162.5,
"y": 1390
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"6":
id: "6"
- taskid: a44ff79d-8f4b-4419-8151-c69fecf9a2a2
+ taskid: eb4ee1ed-cf90-4091-87a0-dd1998eb2da8
type: regular
task:
- id: a44ff79d-8f4b-4419-8151-c69fecf9a2a2
+ id: eb4ee1ed-cf90-4091-87a0-dd1998eb2da8
version: -1
name: lock-release
description: Release a lock
@@ -185,23 +219,28 @@ tasks:
nexttasks:
'#none#':
- "13"
- scriptarguments:
- name: {}
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 60,
+ "x": 162.5,
"y": 1565
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"7":
id: "7"
- taskid: 0f8a0578-b570-479c-8ce5-19bb3137b81e
+ taskid: 68bada69-ce19-4c03-81b9-66b16fde7d49
type: regular
task:
- id: 0f8a0578-b570-479c-8ce5-19bb3137b81e
+ id: 68bada69-ce19-4c03-81b9-66b16fde7d49
version: -1
name: Print
scriptName: Print
@@ -214,21 +253,28 @@ tasks:
scriptarguments:
value:
simple: ${key}
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 490,
+ "x": 592.5,
"y": 1215
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"8":
id: "8"
- taskid: d6b5f540-caf1-49d8-868b-1d965c93325f
+ taskid: a862e1f0-dfaa-4024-8ab3-36d8bdbfe3a9
type: regular
task:
- id: d6b5f540-caf1-49d8-868b-1d965c93325f
+ id: a862e1f0-dfaa-4024-8ab3-36d8bdbfe3a9
version: -1
name: lock-release
description: Release a lock
@@ -239,23 +285,28 @@ tasks:
nexttasks:
'#none#':
- "13"
- scriptarguments:
- name: {}
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 490,
+ "x": 592.5,
"y": 1565
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"9":
id: "9"
- taskid: 3b0a046c-9de8-4383-8b72-78c1132657a0
+ taskid: 0fc95f73-2940-478b-8f1c-45182a6631ed
type: regular
task:
- id: 3b0a046c-9de8-4383-8b72-78c1132657a0
+ id: 0fc95f73-2940-478b-8f1c-45182a6631ed
version: -1
name: lock-info
description: Show information on a locks
@@ -266,23 +317,28 @@ tasks:
nexttasks:
'#none#':
- "8"
- scriptarguments:
- name: {}
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 490,
+ "x": 592.5,
"y": 1390
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"10":
id: "10"
- taskid: 446d979d-1ecd-4174-86f3-44fa34194b8a
+ taskid: 22ad3680-27a4-48ff-8ec2-5620e5c3d01e
type: regular
task:
- id: 446d979d-1ecd-4174-86f3-44fa34194b8a
+ id: 22ad3680-27a4-48ff-8ec2-5620e5c3d01e
version: -1
name: closeInvestigation
description: Close the current investigation
@@ -290,25 +346,28 @@ tasks:
type: regular
iscommand: true
brand: Builtin
- scriptarguments:
- closeNotes: {}
- closeReason: {}
- id: {}
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 275,
- "y": 4130
+ "x": 377.5,
+ "y": 3955
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"11":
id: "11"
- taskid: c8d03d09-d738-45a1-88e4-99212d04e917
+ taskid: 17aa6a6b-f97b-4b25-8f8c-ff94adedb6b7
type: regular
task:
- id: c8d03d09-d738-45a1-88e4-99212d04e917
+ id: 17aa6a6b-f97b-4b25-8f8c-ff94adedb6b7
version: -1
name: lock-release-all
description: Release all locks
@@ -319,21 +378,28 @@ tasks:
nexttasks:
'#none#':
- "1"
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 275,
+ "x": 377.5,
"y": 370
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"12":
id: "12"
- taskid: b74d92fe-fd60-4749-8b80-d6bb631597a7
+ taskid: d10fd77e-a274-45c4-8217-9a6afb4724ed
type: regular
task:
- id: b74d92fe-fd60-4749-8b80-d6bb631597a7
+ id: d10fd77e-a274-45c4-8217-9a6afb4724ed
version: -1
name: Delete Context
scriptName: DeleteContext
@@ -346,22 +412,28 @@ tasks:
scriptarguments:
all:
simple: "yes"
- key: {}
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 275,
+ "x": 377.5,
"y": 195
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"13":
id: "13"
- taskid: ea352eb7-0ca1-40df-8835-fdda45ae3545
+ taskid: 5b66a81b-9ce4-4f8e-8fff-88e349f2b7ac
type: title
task:
- id: ea352eb7-0ca1-40df-8835-fdda45ae3545
+ id: 5b66a81b-9ce4-4f8e-8fff-88e349f2b7ac
version: -1
name: Check timeout
type: title
@@ -370,21 +442,28 @@ tasks:
nexttasks:
'#none#':
- "15"
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 275,
+ "x": 377.5,
"y": 1740
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"14":
id: "14"
- taskid: aabb15ad-37fe-4c6e-8c31-957429629ba7
+ taskid: 7f6a5c73-42b8-45b5-82d7-833c7d8ee420
type: title
task:
- id: aabb15ad-37fe-4c6e-8c31-957429629ba7
+ id: 7f6a5c73-42b8-45b5-82d7-833c7d8ee420
version: -1
name: Check successful lock
type: title
@@ -393,21 +472,28 @@ tasks:
nexttasks:
'#none#':
- "2"
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 275,
+ "x": 377.5,
"y": 720
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"15":
id: "15"
- taskid: 2a6b27bc-01ad-4195-8f62-4b6ac7427c1c
+ taskid: bf5b17a1-76dc-4643-8ccb-dcd46ea41e35
type: regular
task:
- id: 2a6b27bc-01ad-4195-8f62-4b6ac7427c1c
+ id: bf5b17a1-76dc-4643-8ccb-dcd46ea41e35
version: -1
name: get-lock
script: Demisto Lock|||demisto-lock-get
@@ -419,25 +505,30 @@ tasks:
- "16"
- "17"
scriptarguments:
- info: {}
name:
simple: timeout
- timeout: {}
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 275,
+ "x": 377.5,
"y": 1885
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"16":
id: "16"
- taskid: f2b9a15c-e894-4d15-8aef-f157611d52f9
+ taskid: 15f49725-9f27-4472-8c94-fc182d01ffcb
type: regular
task:
- id: f2b9a15c-e894-4d15-8aef-f157611d52f9
+ id: 15f49725-9f27-4472-8c94-fc182d01ffcb
version: -1
name: get-lock
script: Demisto Lock|||demisto-lock-get
@@ -445,30 +536,38 @@ tasks:
iscommand: true
brand: Demisto Lock
nexttasks:
+ '#error#':
+ - "20"
'#none#':
- "18"
scriptarguments:
- info: {}
name:
simple: timeout
timeout:
simple: "10"
- reputationcalc: 0
- continueonerror: true
separatecontext: false
+ continueonerror: true
+ continueonerrortype: errorPath
view: |-
{
"position": {
- "x": 490,
+ "x": 592.5,
"y": 2060
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"17":
id: "17"
- taskid: 723e443d-96aa-4a5b-8d99-ce16ab225bd9
+ taskid: 62d99bb8-7273-4361-8d76-7c39675b69e6
type: regular
task:
- id: 723e443d-96aa-4a5b-8d99-ce16ab225bd9
+ id: 62d99bb8-7273-4361-8d76-7c39675b69e6
version: -1
name: Sleep
scriptName: Sleep
@@ -481,21 +580,28 @@ tasks:
scriptarguments:
seconds:
simple: "20"
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 70,
- "y": 2060
+ "x": 50,
+ "y": 2235
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"18":
id: "18"
- taskid: 02c44b17-94bf-42b6-8be2-fbb9907eaf09
+ taskid: c0b5a468-c37d-4d05-8337-10f7a7ff57c8
type: condition
task:
- id: 02c44b17-94bf-42b6-8be2-fbb9907eaf09
+ id: c0b5a468-c37d-4d05-8337-10f7a7ff57c8
version: -1
name: Timeout?
scriptName: isError
@@ -508,21 +614,28 @@ tasks:
scriptarguments:
entryId:
simple: ${lastCompletedTaskEntries}
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 490,
+ "x": 480,
"y": 2235
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"19":
id: "19"
- taskid: c986a6ef-68ec-4fbc-8b04-8389da3b704e
+ taskid: 0856eef1-a548-4bce-8c0f-78dc5c2decd1
type: regular
task:
- id: c986a6ef-68ec-4fbc-8b04-8389da3b704e
+ id: 0856eef1-a548-4bce-8c0f-78dc5c2decd1
version: -1
name: lock-release-all
script: Demisto Lock|||demisto-lock-release-all
@@ -532,21 +645,28 @@ tasks:
nexttasks:
'#none#':
- "28"
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 275,
+ "x": 265,
"y": 2410
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"20":
id: "20"
- taskid: 297a89b3-04da-4bc6-870e-986dc8c0551c
+ taskid: d0c36655-2116-4bb7-8028-40610305c26e
type: title
task:
- id: 297a89b3-04da-4bc6-870e-986dc8c0551c
+ id: d0c36655-2116-4bb7-8028-40610305c26e
version: -1
name: Check parallel
type: title
@@ -556,24 +676,32 @@ tasks:
'#none#':
- "21"
- "22"
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 275,
+ "x": 377.5,
"y": 2760
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"21":
id: "21"
- taskid: 2162752d-a4c6-43a8-8c81-61230cb89eb0
+ taskid: 499eee07-3190-43bf-8704-ca800c2e10f5
type: regular
task:
- id: 2162752d-a4c6-43a8-8c81-61230cb89eb0
+ id: 499eee07-3190-43bf-8704-ca800c2e10f5
version: -1
name: lock-get
- description: Get lock. If the lock is already in use - will wait until it is released or until timeout is reached
+ description: Get lock. If the lock is already in use - will wait until it is
+ released or until timeout is reached
script: Demisto Lock|||demisto-lock-get
type: regular
iscommand: true
@@ -581,28 +709,32 @@ tasks:
nexttasks:
'#none#':
- "29"
- scriptarguments:
- info: {}
- name: {}
- timeout: {}
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 50,
+ "x": 152.5,
"y": 2905
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"22":
id: "22"
- taskid: 7941f44f-9cdd-453b-8138-9443541f3f6e
+ taskid: de885218-aea6-4d0d-805a-ed0bb1edf7eb
type: regular
task:
- id: 7941f44f-9cdd-453b-8138-9443541f3f6e
+ id: de885218-aea6-4d0d-805a-ed0bb1edf7eb
version: -1
name: lock-get
- description: Get lock. If the lock is already in use - will wait until it is released or until timeout is reached
+ description: Get lock. If the lock is already in use - will wait until it is
+ released or until timeout is reached
script: Demisto Lock|||demisto-lock-get
type: regular
iscommand: true
@@ -610,25 +742,28 @@ tasks:
nexttasks:
'#none#':
- "30"
- scriptarguments:
- info: {}
- name: {}
- timeout: {}
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 500,
+ "x": 602.5,
"y": 2905
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"23":
id: "23"
- taskid: 15020654-8161-46c8-8293-fb3e56ccdaff
+ taskid: b41744bb-1c05-4e63-804b-a3266c4ff353
type: regular
task:
- id: 15020654-8161-46c8-8293-fb3e56ccdaff
+ id: b41744bb-1c05-4e63-804b-a3266c4ff353
version: -1
name: Sleep
description: Sleep for X seconds
@@ -642,21 +777,28 @@ tasks:
scriptarguments:
seconds:
simple: "5"
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 500,
+ "x": 602.5,
"y": 3255
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"24":
id: "24"
- taskid: 8a4d0165-2b8b-43b9-8341-857a11568791
+ taskid: 369814fd-56a3-4350-8577-e6cf70691e77
type: regular
task:
- id: 8a4d0165-2b8b-43b9-8341-857a11568791
+ id: 369814fd-56a3-4350-8577-e6cf70691e77
version: -1
name: Sleep
description: Sleep for X seconds
@@ -670,21 +812,28 @@ tasks:
scriptarguments:
seconds:
simple: "5"
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 50,
+ "x": 152.5,
"y": 3255
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"25":
id: "25"
- taskid: 8415366e-3cae-4319-8577-0e02f5f11db1
+ taskid: 22893d66-4f7d-49e2-8e7b-f9cd2de8722c
type: regular
task:
- id: 8415366e-3cae-4319-8577-0e02f5f11db1
+ id: 22893d66-4f7d-49e2-8e7b-f9cd2de8722c
version: -1
name: lock-release
description: Release a lock
@@ -695,23 +844,28 @@ tasks:
nexttasks:
'#none#':
- "10"
- scriptarguments:
- name: {}
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 50,
+ "x": 152.5,
"y": 3780
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"26":
id: "26"
- taskid: de2fbc8e-2e0d-49d4-815d-76aa0a51dc7c
+ taskid: 4158c53c-88be-4e4b-8802-a67147affd9a
type: regular
task:
- id: de2fbc8e-2e0d-49d4-815d-76aa0a51dc7c
+ id: 4158c53c-88be-4e4b-8802-a67147affd9a
version: -1
name: lock-release
description: Release a lock
@@ -722,23 +876,28 @@ tasks:
nexttasks:
'#none#':
- "10"
- scriptarguments:
- name: {}
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 500,
+ "x": 602.5,
"y": 3780
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"28":
id: "28"
- taskid: c06cc601-de90-4b30-81f3-0656b253f3cb
+ taskid: f5c12d0c-5647-4000-8716-d7b249baa2fe
type: regular
task:
- id: c06cc601-de90-4b30-81f3-0656b253f3cb
+ id: f5c12d0c-5647-4000-8716-d7b249baa2fe
version: -1
name: Delete Context
scriptName: DeleteContext
@@ -751,22 +910,28 @@ tasks:
scriptarguments:
all:
simple: "yes"
- key: {}
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 275,
+ "x": 265,
"y": 2585
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"29":
id: "29"
- taskid: 598e44ca-36d3-401c-8512-3f45689367b5
+ taskid: da21ce6d-469b-4fa2-8e16-394cfe6ae6d0
type: regular
task:
- id: 598e44ca-36d3-401c-8512-3f45689367b5
+ id: da21ce6d-469b-4fa2-8e16-394cfe6ae6d0
version: -1
name: Set 1
scriptName: Set
@@ -777,26 +942,32 @@ tasks:
'#none#':
- "24"
scriptarguments:
- append: {}
key:
simple: key1
value:
simple: key1
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 50,
+ "x": 152.5,
"y": 3080
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"30":
id: "30"
- taskid: 98ea0dae-4745-42b2-8c0c-071c0a522168
+ taskid: 4be7c571-c3cd-42ac-856d-9323104469c1
type: regular
task:
- id: 98ea0dae-4745-42b2-8c0c-071c0a522168
+ id: 4be7c571-c3cd-42ac-856d-9323104469c1
version: -1
name: Set 2
scriptName: Set
@@ -807,26 +978,32 @@ tasks:
'#none#':
- "23"
scriptarguments:
- append: {}
key:
simple: key2
value:
simple: key2
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 500,
+ "x": 602.5,
"y": 3080
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"31":
id: "31"
- taskid: 64203cfe-f10e-4bc6-87ed-a878737ee55b
+ taskid: 1faa5b1e-ad5f-469d-8c2f-b6c2220ea088
type: condition
task:
- id: 64203cfe-f10e-4bc6-87ed-a878737ee55b
+ id: 1faa5b1e-ad5f-469d-8c2f-b6c2220ea088
version: -1
name: Check if key 1 is set
type: condition
@@ -836,29 +1013,36 @@ tasks:
'#default#':
- "26"
- "33"
- reputationcalc: 0
separatecontext: false
conditions:
- label: "yes"
condition:
- - - operator: general.isExists
+ - - operator: isExists
left:
value:
simple: key1
iscontext: true
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 500,
+ "x": 602.5,
"y": 3430
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"32":
id: "32"
- taskid: 28860329-efab-4186-856a-c1ac8c168f51
+ taskid: 68484e6d-5029-41c4-8158-6af332283896
type: condition
task:
- id: 28860329-efab-4186-856a-c1ac8c168f51
+ id: 68484e6d-5029-41c4-8158-6af332283896
version: -1
name: Check if key 2 is set
type: condition
@@ -868,29 +1052,36 @@ tasks:
'#default#':
- "25"
- "34"
- reputationcalc: 0
separatecontext: false
conditions:
- label: "yes"
condition:
- - - operator: general.isExists
+ - - operator: isExists
left:
value:
simple: key2
iscontext: true
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 50,
+ "x": 152.5,
"y": 3430
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"33":
id: "33"
- taskid: 3c6743af-47a9-4067-876b-fecd43f5c6dd
+ taskid: 3f2ebc1a-c739-4959-88fd-467f12a4242d
type: regular
task:
- id: 3c6743af-47a9-4067-876b-fecd43f5c6dd
+ id: 3f2ebc1a-c739-4959-88fd-467f12a4242d
version: -1
name: Delete key 2
scriptName: DeleteContext
@@ -901,24 +1092,30 @@ tasks:
'#none#':
- "26"
scriptarguments:
- all: {}
key:
simple: key2
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 612.5,
+ "x": 715,
"y": 3605
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"34":
id: "34"
- taskid: 25fae19c-3e3e-4b48-86f8-b8e38d89264a
+ taskid: f7eb738d-84d1-4f9c-899a-22839b282d2a
type: regular
task:
- id: 25fae19c-3e3e-4b48-86f8-b8e38d89264a
+ id: f7eb738d-84d1-4f9c-899a-22839b282d2a
version: -1
name: Delete key 1
scriptName: DeleteContext
@@ -929,25 +1126,33 @@ tasks:
'#none#':
- "25"
scriptarguments:
- all: {}
key:
simple: key1
- reputationcalc: 0
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 162.5,
+ "x": 265,
"y": 3605
}
}
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
view: |-
{
- "linkLabelsPosition": {},
+ "linkLabelsPosition": {
+ "16_20_#error#": 0.69
+ },
"paper": {
"dimensions": {
- "height": 4175,
- "width": 942.5,
+ "height": 4000,
+ "width": 1045,
"x": 50,
"y": 50
}
@@ -955,6 +1160,8 @@ view: |-
}
inputs: []
outputs: []
+dirtyInputs: true
fromversion: 5.0.0
marketplaces:
- xsoar
+ - marketplacev2
diff --git a/Packs/DemistoLocking/pack_metadata.json b/Packs/DemistoLocking/pack_metadata.json
index a00eabd42b06..0354cc0ca7b0 100644
--- a/Packs/DemistoLocking/pack_metadata.json
+++ b/Packs/DemistoLocking/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cortex Lock",
"description": "Locking mechanism that prevents concurrent execution of different tasks",
"support": "xsoar",
- "currentVersion": "1.0.10",
+ "currentVersion": "1.1.1",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/DemistoRESTAPI/ReleaseNotes/1_3_56.md b/Packs/DemistoRESTAPI/ReleaseNotes/1_3_56.md
new file mode 100644
index 000000000000..56b8f0d70667
--- /dev/null
+++ b/Packs/DemistoRESTAPI/ReleaseNotes/1_3_56.md
@@ -0,0 +1,3 @@
+## Cortex REST API
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/DemistoRESTAPI/pack_metadata.json b/Packs/DemistoRESTAPI/pack_metadata.json
index c7ea29f9cf43..5ac6f2766673 100644
--- a/Packs/DemistoRESTAPI/pack_metadata.json
+++ b/Packs/DemistoRESTAPI/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Cortex REST API",
"description": "Use Demisto REST APIs",
"support": "xsoar",
- "currentVersion": "1.3.55",
+ "currentVersion": "1.3.56",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/README.md b/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/README.md
index 7a793258b6a5..b7bf3741cc3c 100644
--- a/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/README.md
+++ b/Packs/DigitalGuardian/Integrations/DigitalGuardianARCEventCollector/README.md
@@ -1,6 +1,8 @@
This is the Digital Guardian ARC event collector integration for XSIAM.
This integration was integrated and tested with version 3.10.0 of DigitalGuardianARCEventCollector
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Known Limitations
The integration fetch interval should be set to a minimum of "1 hour". If set to less, a quota error might be received.
diff --git a/Packs/DigitalGuardian/pack_metadata.json b/Packs/DigitalGuardian/pack_metadata.json
index ff96b8993724..7744841d42e8 100644
--- a/Packs/DigitalGuardian/pack_metadata.json
+++ b/Packs/DigitalGuardian/pack_metadata.json
@@ -20,5 +20,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "DigitalGuardianARCEventCollector"
}
\ No newline at end of file
diff --git a/Packs/DigitalShadows/Integrations/ReliaQuestGreyMatterDRPEventCollector/README.md b/Packs/DigitalShadows/Integrations/ReliaQuestGreyMatterDRPEventCollector/README.md
index 862a42abaeab..a21016f3c6e1 100644
--- a/Packs/DigitalShadows/Integrations/ReliaQuestGreyMatterDRPEventCollector/README.md
+++ b/Packs/DigitalShadows/Integrations/ReliaQuestGreyMatterDRPEventCollector/README.md
@@ -2,6 +2,8 @@ ReliaQuest GreyMatter DRP Event Collector minimizes digital risk by identifying
This integration fetches event items which can be either incident/alerts, for more information refer [here](https://portal-digitalshadows.com/learn/searchlight-api/key-words/triage)
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure ReliaQuest GreyMatter DRP Event Collector On XSIAM
1. Navigate to **Settings** > **Configurations** > **Data Collection** > **Automations & Feed Integrations**.
diff --git a/Packs/DigitalShadows/pack_metadata.json b/Packs/DigitalShadows/pack_metadata.json
index d619a4226b03..7f97c2587e16 100644
--- a/Packs/DigitalShadows/pack_metadata.json
+++ b/Packs/DigitalShadows/pack_metadata.json
@@ -73,5 +73,6 @@
"DS",
"Digital Shadows",
"SearchLight"
- ]
+ ],
+ "defaultDataSource": "ReliaQuest GreyMatter DRP Event Collector"
}
\ No newline at end of file
diff --git a/Packs/Digital_Defense_FrontlineVM/Integrations/Digital_Defense_FrontlineVM/Digital_Defense_FrontlineVM.py b/Packs/Digital_Defense_FrontlineVM/Integrations/Digital_Defense_FrontlineVM/Digital_Defense_FrontlineVM.py
index 47fb624badf8..5da7ce5405a9 100644
--- a/Packs/Digital_Defense_FrontlineVM/Integrations/Digital_Defense_FrontlineVM/Digital_Defense_FrontlineVM.py
+++ b/Packs/Digital_Defense_FrontlineVM/Integrations/Digital_Defense_FrontlineVM/Digital_Defense_FrontlineVM.py
@@ -9,7 +9,7 @@
import struct
import urllib3
from datetime import datetime, timedelta, timezone
-from typing import List, Dict, Any
+from typing import Any
# disable insecure warnings
urllib3.disable_warnings()
@@ -66,7 +66,6 @@ def get_base_url():
class EndOfTime(Exception):
''' Raised when functions timeout '''
- pass
def function_timeout(signum, frame):
@@ -225,7 +224,7 @@ def fetch_incidents():
try:
new_start_time = datetime.utcnow() # may be used to update new start_time if no incidents found.
new_start_time_str = new_start_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
- incidents: List[Dict[str, Any]] = []
+ incidents: list[dict[str, Any]] = []
last_run = demisto.getLastRun()
# Check if last_run exists and has a start_time to continue:
@@ -705,10 +704,7 @@ def scan_policy_exists(policy_selected):
resp = requests.get(policy_url, headers=API_AUTH_HEADER, verify=VERIFY_SSL)
resp.raise_for_status()
data = json.loads(resp.text)
- for policy in data:
- if policy_selected == policy.get('name', ""):
- return True
- return False
+ return any(policy_selected == policy.get('name', '') for policy in data)
except Exception as err:
return_error("Error: FrontlineVM scan_policy_exists failed " + str(err))
@@ -788,7 +784,7 @@ def test_module():
def main():
''' Integration main method '''
- LOG('command is %s' % (demisto.command(), ))
+ LOG(f'command is {demisto.command()}')
try:
if demisto.command() == 'test-module':
test_module()
diff --git a/Packs/Digital_Defense_FrontlineVM/Integrations/Digital_Defense_FrontlineVM/Digital_Defense_FrontlineVM.yml b/Packs/Digital_Defense_FrontlineVM/Integrations/Digital_Defense_FrontlineVM/Digital_Defense_FrontlineVM.yml
index 88d27fe50f4f..9fa9130c42d1 100644
--- a/Packs/Digital_Defense_FrontlineVM/Integrations/Digital_Defense_FrontlineVM/Digital_Defense_FrontlineVM.yml
+++ b/Packs/Digital_Defense_FrontlineVM/Integrations/Digital_Defense_FrontlineVM/Digital_Defense_FrontlineVM.yml
@@ -164,7 +164,7 @@ script:
- contextPath: FrontlineVM.Scan.IP
description: The IP address of the scan (can be a single IP address or a range of IP addresses).
description: Performs a scan on the specified asset.
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.95956
isfetch: true
subtype: python3
fromversion: 5.0.0
diff --git a/Packs/Digital_Defense_FrontlineVM/ReleaseNotes/1_1_10.md b/Packs/Digital_Defense_FrontlineVM/ReleaseNotes/1_1_10.md
new file mode 100644
index 000000000000..414fbb7ff246
--- /dev/null
+++ b/Packs/Digital_Defense_FrontlineVM/ReleaseNotes/1_1_10.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Digital Defense FrontlineVM
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/Digital_Defense_FrontlineVM/pack_metadata.json b/Packs/Digital_Defense_FrontlineVM/pack_metadata.json
index aa3a03da6247..035a68f81ee3 100644
--- a/Packs/Digital_Defense_FrontlineVM/pack_metadata.json
+++ b/Packs/Digital_Defense_FrontlineVM/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Digital Defense Frontline VM",
"description": "Use the Digital Defense Frontline VM to identify and evaluate the security and business risks of network devices and applications deployed as premise, cloud, or hybrid network-based implementations.",
"support": "partner",
- "currentVersion": "1.1.9",
+ "currentVersion": "1.1.10",
"author": "Digital Defense",
"url": "https://www.digitaldefense.com/",
"email": "support@digitaldefense.com",
diff --git a/Packs/DuoAdminApi/Integrations/DuoEventCollector/DuoEventCollector.py b/Packs/DuoAdminApi/Integrations/DuoEventCollector/DuoEventCollector.py
index eed495558930..fe366e9099a5 100644
--- a/Packs/DuoAdminApi/Integrations/DuoEventCollector/DuoEventCollector.py
+++ b/Packs/DuoAdminApi/Integrations/DuoEventCollector/DuoEventCollector.py
@@ -325,7 +325,7 @@ def main():
raw_response=events,
)
return_results(command_results)
- if argToBoolean(demisto_params.get('push_events', 'false')):
+ if argToBoolean(demisto_params.get('should_push_events', 'false')):
demisto.debug(f'Sending {len(events)} events to XSIAM')
send_events_to_xsiam(events, vendor=VENDOR, product=PRODUCT)
else:
diff --git a/Packs/DuoAdminApi/Integrations/DuoEventCollector/DuoEventCollector.yml b/Packs/DuoAdminApi/Integrations/DuoEventCollector/DuoEventCollector.yml
index 6f2994f696d9..9673407a42f4 100644
--- a/Packs/DuoAdminApi/Integrations/DuoEventCollector/DuoEventCollector.yml
+++ b/Packs/DuoAdminApi/Integrations/DuoEventCollector/DuoEventCollector.yml
@@ -59,7 +59,7 @@ script:
required: true
description: Manual command to fetch events and display them.
name: duo-get-events
- dockerimage: demisto/vendors-sdk:1.0.0.87491
+ dockerimage: demisto/vendors-sdk:1.0.0.96124
isfetchevents: true
subtype: python3
marketplaces:
diff --git a/Packs/DuoAdminApi/ReleaseNotes/4_0_19.md b/Packs/DuoAdminApi/ReleaseNotes/4_0_19.md
new file mode 100644
index 000000000000..932ecb97e70a
--- /dev/null
+++ b/Packs/DuoAdminApi/ReleaseNotes/4_0_19.md
@@ -0,0 +1,5 @@
+
+#### Integrations
+##### Duo Event Collector
+- Fixed an issue where the *should_push_events* argument was not passed correctly to the **duo-get-events** command.
+- Updated the Docker image to: *demisto/vendors-sdk:1.0.0.96124*.
\ No newline at end of file
diff --git a/Packs/DuoAdminApi/pack_metadata.json b/Packs/DuoAdminApi/pack_metadata.json
index ef16ff50bf05..9916b1b8e293 100644
--- a/Packs/DuoAdminApi/pack_metadata.json
+++ b/Packs/DuoAdminApi/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "DUO Admin",
"description": "DUO for admins.\nMust have access to the admin api in order to use this",
"support": "xsoar",
- "currentVersion": "4.0.18",
+ "currentVersion": "4.0.19",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/EDL/Integrations/EDL/EDL.py b/Packs/EDL/Integrations/EDL/EDL.py
index bdbcf4ab2f7b..1b1f15a17230 100644
--- a/Packs/EDL/Integrations/EDL/EDL.py
+++ b/Packs/EDL/Integrations/EDL/EDL.py
@@ -1,5 +1,7 @@
import os
from datetime import datetime
+from pathlib import Path
+
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
@@ -8,7 +10,7 @@
from base64 import b64decode
from flask import Flask, Response, request, send_file
from netaddr import IPSet, IPNetwork
-from typing import IO, Tuple
+from typing import IO
from collections.abc import Iterable, Callable
from math import ceil
from enum import Enum
@@ -299,7 +301,7 @@ def create_new_edl(request_args: RequestArguments) -> tuple[str, int, dict]:
)
demisto.debug(f"Creating a new EDL file in {request_args.out_format} format")
formatted_indicators = ''
- new_log_stats = dict()
+ new_log_stats = {}
if request_args.out_format == FORMAT_TEXT:
if request_args.drop_invalids or request_args.collapse_ips != "Don't Collapse":
# Because there may be illegal indicators or they may turn into cider, the limit is increased
@@ -731,31 +733,33 @@ def store_log_data(request_args: RequestArguments, created: datetime, log_stats:
created (datetime): The time the log was created. This will be added to the header.
log_stats (dict): A statistics dict for the indicator modifications (e.g. {'Added': 5, 'Dropped': 3, 'Modified': 2}
"""
- added_count = log_stats.get(IndicatorAction.ADDED.value, 0)
- dropped_count = log_stats.get(IndicatorAction.DROPPED.value, 0)
- modified_count = log_stats.get(IndicatorAction.MODIFIED.value, 0)
+ log_file_wip = Path(EDL_FULL_LOG_PATH_WIP)
+ if log_file_wip.exists():
+ added_count = log_stats.get(IndicatorAction.ADDED.value, 0)
+ dropped_count = log_stats.get(IndicatorAction.DROPPED.value, 0)
+ modified_count = log_stats.get(IndicatorAction.MODIFIED.value, 0)
- total_count = added_count + dropped_count + modified_count
+ total_count = added_count + dropped_count + modified_count
- header = f"# Created new EDL at {created.isoformat()}\n\n" \
- f"## Configuration Arguments: {request_args.to_context_json()}\n\n" \
- f"## EDL stats: {total_count} indicators in total, {modified_count} modified, {dropped_count} dropped, " \
- f"{added_count} added.\n" \
- f"\nAction | Indicator | Raw Indicator | Reason"
+ header = f"# Created new EDL at {created.isoformat()}\n\n" \
+ f"## Configuration Arguments: {request_args.to_context_json()}\n\n" \
+ f"## EDL stats: {total_count} indicators in total, {modified_count} modified, {dropped_count} dropped, " \
+ f"{added_count} added.\n" \
+ f"\nAction | Indicator | Raw Indicator | Reason"
- with open(EDL_FULL_LOG_PATH, 'w+') as new_full_log_file, open(EDL_FULL_LOG_PATH_WIP, 'r') as log_file_data:
- # Finalize the current log: write the headers and the WIP log to full_log_path
- new_full_log_file.write(header)
- for log_line in log_file_data:
- new_full_log_file.write(log_line)
+ with open(EDL_FULL_LOG_PATH, 'w+') as new_full_log_file, log_file_wip.open('r') as log_file_data:
+ # Finalize the current log: write the headers and the WIP log to full_log_path
+ new_full_log_file.write(header)
+ for log_line in log_file_data:
+ new_full_log_file.write(log_line)
- with open(EDL_FULL_LOG_PATH_WIP, 'w+') as log_file_data:
- # Empty WIP log file after finalization.
- log_file_data.seek(0)
+ with open(EDL_FULL_LOG_PATH_WIP, 'w+') as log_file_data:
+ # Empty WIP log file after finalization.
+ log_file_data.seek(0)
@debug_function
-def create_text_out_format(iocs: IO, request_args: RequestArguments) -> Tuple[Union[IO, IO[str]], dict]:
+def create_text_out_format(iocs: IO, request_args: RequestArguments) -> tuple[Union[IO, IO[str]], dict]:
"""
Create a list in new file of formatted_indicators, and log the modifications.
* IP / CIDR:
@@ -774,7 +778,7 @@ def create_text_out_format(iocs: IO, request_args: RequestArguments) -> Tuple[Un
ipv6_formatted_indicators = set()
iocs.seek(0)
formatted_indicators = tempfile.TemporaryFile(mode='w+t')
- log_stats: dict = dict()
+ log_stats: dict = {}
new_line = '' # For the first time he will not add a new line
for str_ioc in iocs:
ioc = json.loads(str_ioc.rstrip())
@@ -1065,7 +1069,7 @@ def get_edl_log_file() -> str:
if os.path.getsize(EDL_FULL_LOG_PATH) > MAX_DISPLAY_LOG_FILE_SIZE:
return LARGE_LOG_DISPLAY_MSG
- with open(EDL_FULL_LOG_PATH, 'r') as log_file:
+ with open(EDL_FULL_LOG_PATH) as log_file:
log_file.seek(0)
edl_data_log = log_file.read()
log_file.seek(0)
diff --git a/Packs/EDL/Integrations/EDL/EDL.yml b/Packs/EDL/Integrations/EDL/EDL.yml
index 00500ed5539b..aa03ce2d50ab 100644
--- a/Packs/EDL/Integrations/EDL/EDL.yml
+++ b/Packs/EDL/Integrations/EDL/EDL.yml
@@ -432,7 +432,7 @@ script:
- 'False'
- 'True'
description: Updates values stored in the List (only available On-Demand).
- dockerimage: demisto/flask-nginx:1.0.0.92897
+ dockerimage: demisto/flask-nginx:1.0.0.96292
longRunning: true
longRunningPort: true
script: '-'
diff --git a/Packs/EDL/Integrations/EDL/EDL_test.py b/Packs/EDL/Integrations/EDL/EDL_test.py
index b4788454bb13..e7dcf876e9c2 100644
--- a/Packs/EDL/Integrations/EDL/EDL_test.py
+++ b/Packs/EDL/Integrations/EDL/EDL_test.py
@@ -1182,3 +1182,30 @@ def test_route_edl_log_too_big(mocker):
downloaded_expected_path = f"{edl.LOGS_ZIP_FILE_PREFIX}_{datetime.now().strftime('%Y%m%d-%H%M%S')}.zip"
assert os.path.exists(downloaded_expected_path)
os.remove(downloaded_expected_path)
+
+
+@pytest.mark.parametrize(argnames='wip_exist', argvalues=[True, False])
+def test_store_log_data(mocker, wip_exist):
+ """
+ Given:
+ - previous log file exist/missing.
+ When:
+ - call to store_log_data.
+ Then:
+ - ensure full_log will create only if previous log exist
+ """
+ import EDL as edl
+ from pathlib import Path
+ from datetime import datetime
+ tmp_dir = mkdtemp()
+ wip_log_file = Path(tmp_dir) / 'wip_log_file'
+ full_log_file = Path(tmp_dir) / 'full_log_file'
+
+ if wip_exist:
+ wip_log_file.write_text('')
+ mocker.patch.object(edl, 'EDL_FULL_LOG_PATH_WIP', new=wip_log_file.absolute())
+
+ mocker.patch.object(edl, 'EDL_FULL_LOG_PATH', new=full_log_file.absolute())
+ request_args = edl.RequestArguments()
+ edl.store_log_data(request_args, datetime.now(), {})
+ assert Path(edl.EDL_FULL_LOG_PATH).exists() == wip_exist
diff --git a/Packs/EDL/ReleaseNotes/3_3_1.md b/Packs/EDL/ReleaseNotes/3_3_1.md
new file mode 100644
index 000000000000..68accef551cc
--- /dev/null
+++ b/Packs/EDL/ReleaseNotes/3_3_1.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Generic Export Indicators Service
+
+- Fixed an issue where integration stop to work due to an internal server error (500).
+- Updated the Docker image to: *demisto/flask-nginx:1.0.0.96292*.
diff --git a/Packs/EDL/ReleaseNotes/3_3_2.md b/Packs/EDL/ReleaseNotes/3_3_2.md
new file mode 100644
index 000000000000..c7f30b22f05b
--- /dev/null
+++ b/Packs/EDL/ReleaseNotes/3_3_2.md
@@ -0,0 +1,3 @@
+## Generic Export Indicators Service
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/EDL/pack_metadata.json b/Packs/EDL/pack_metadata.json
index 8b5c11a2d7c7..5d049b1f4fcf 100644
--- a/Packs/EDL/pack_metadata.json
+++ b/Packs/EDL/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Generic Export Indicators Service",
"description": "Use this pack to generate a list based on your Threat Intel Library, and export it to ANY other product in your network, such as your firewall, agent or SIEM. This pack is built for ongoing distribution of indicators from XSOAR to other products in the network, by creating an endpoint with a list of indicators that can be pulled by external vendors.",
"support": "xsoar",
- "currentVersion": "3.3.0",
+ "currentVersion": "3.3.2",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/EclecticIQ/ReleaseNotes/3_0_1.json b/Packs/EclecticIQ/ReleaseNotes/3_0_1.json
new file mode 100644
index 000000000000..f3c3db76b060
--- /dev/null
+++ b/Packs/EclecticIQ/ReleaseNotes/3_0_1.json
@@ -0,0 +1,4 @@
+{
+ "breakingChanges": true,
+ "breakingChangesNotes": "Due to a fixed issue in `Base` pack (Version `1.33.52`) EclecticIQ Intelligence Center v3 will now correctly input email addresses into context under `Account.Email` and not under `Email` as it did up until now."
+}
\ No newline at end of file
diff --git a/Packs/EclecticIQ/ReleaseNotes/3_0_1.md b/Packs/EclecticIQ/ReleaseNotes/3_0_1.md
new file mode 100644
index 000000000000..061ddb269a54
--- /dev/null
+++ b/Packs/EclecticIQ/ReleaseNotes/3_0_1.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### EclecticIQ Intelligence Center v3
+
+Fixed an issue in `Base` pack (Version `1.33.52`) so now EclecticIQ Intelligence Center v3 will correctly input email addresses into context under `Account.Email` and not under `Email`.
diff --git a/Packs/EclecticIQ/pack_metadata.json b/Packs/EclecticIQ/pack_metadata.json
index 49772f9e2107..f0a61e3c3b2d 100644
--- a/Packs/EclecticIQ/pack_metadata.json
+++ b/Packs/EclecticIQ/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "EclecticIQ Platform",
"description": "Threat Intelligence Platform that connects and interprets intelligence data from open sources, commercial suppliers and industry partnerships.",
"support": "partner",
- "currentVersion": "3.0.0",
+ "currentVersion": "3.0.1",
"author": "EclecticIQ",
"url": "https://support.eclecticiq.com",
"email": "support@eclecticiq.com",
diff --git a/Packs/EmailHippo/Integrations/EmailHippo/test_data/get_email_output.json b/Packs/EmailHippo/Integrations/EmailHippo/test_data/get_email_output.json
index 59fa0fbe6a36..da5520fad61b 100644
--- a/Packs/EmailHippo/Integrations/EmailHippo/test_data/get_email_output.json
+++ b/Packs/EmailHippo/Integrations/EmailHippo/test_data/get_email_output.json
@@ -80,9 +80,9 @@
}
},
"expected_context": {
- "Email(val.Address && val.Address == obj.Address)": [
+ "Account(val.Email.Address && val.Email.Address == obj.Email.Address)": [
{
- "Address": "test@example.com",
+ "Email": {"Address": "test@example.com"},
"Domain": "example.com"
}
],
diff --git a/Packs/EmailHippo/ReleaseNotes/1_0_6.json b/Packs/EmailHippo/ReleaseNotes/1_0_6.json
new file mode 100644
index 000000000000..e1f589b8e7bd
--- /dev/null
+++ b/Packs/EmailHippo/ReleaseNotes/1_0_6.json
@@ -0,0 +1,4 @@
+{
+ "breakingChanges": true,
+ "breakingChangesNotes": "Due to a fixed issue in `Base` pack (Version `1.33.52`) Email Hippo will now correctly input email addresses into context under `Account.Email` and not under `Email` as it did up until now."
+}
\ No newline at end of file
diff --git a/Packs/EmailHippo/ReleaseNotes/1_0_6.md b/Packs/EmailHippo/ReleaseNotes/1_0_6.md
new file mode 100644
index 000000000000..ab978614789e
--- /dev/null
+++ b/Packs/EmailHippo/ReleaseNotes/1_0_6.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Email Hippo
+
+Fixed an issue in `Base` pack (Version `1.33.52`) so now Email Hippo will correctly input email addresses into context under `Account.Email` and not under `Email`.
diff --git a/Packs/EmailHippo/pack_metadata.json b/Packs/EmailHippo/pack_metadata.json
index a4b90c264d48..573dae3844d0 100644
--- a/Packs/EmailHippo/pack_metadata.json
+++ b/Packs/EmailHippo/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Email Hippo",
"description": "Use this tool to verify email sources as fake emails that were used as part of phishing attacks.",
"support": "xsoar",
- "currentVersion": "1.0.5",
+ "currentVersion": "1.0.6",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/ExabeamDataLake/.pack-ignore b/Packs/ExabeamDataLake/.pack-ignore
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Packs/ExabeamDataLake/.secrets-ignore b/Packs/ExabeamDataLake/.secrets-ignore
new file mode 100644
index 000000000000..b7d1664d67bf
--- /dev/null
+++ b/Packs/ExabeamDataLake/.secrets-ignore
@@ -0,0 +1 @@
+http://example.com
\ No newline at end of file
diff --git a/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/ExabeamDataLake.py b/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/ExabeamDataLake.py
new file mode 100644
index 000000000000..498c06cfde33
--- /dev/null
+++ b/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/ExabeamDataLake.py
@@ -0,0 +1,306 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+import urllib3
+# Disable insecure warnings
+urllib3.disable_warnings()
+
+""" CONSTANTS """
+
+ISO_8601_FORMAT = "%Y-%m-%d"
+
+
+""" CLIENT CLASS """
+
+
+class Client(BaseClient):
+ """
+ Client to use in the Exabeam DataLake integration. Overrides BaseClient
+ """
+
+ def __init__(self, base_url: str, username: str, password: str, verify: bool,
+ proxy: bool):
+ super().__init__(base_url=f'{base_url}', verify=verify, proxy=proxy, timeout=20)
+ self.username = username
+ self.password = password
+
+ self._login()
+
+ def _login(self):
+ """
+ Logs in to the Exabeam API using the provided username and password.
+ This function must be called before any other API calls.
+ """
+ data = {"username": self.username, "password": self.password}
+ self._http_request(
+ "POST",
+ full_url=f"{self._base_url}/api/auth/login",
+ headers={'Accept': 'application/json', 'Csrf-Token': 'nocheck'},
+ data=data,
+ )
+
+ def _logout(self) -> None:
+ """
+ The _logout method initiates a logout request, utilizing a GET HTTP request to the specified endpoint for
+ user session termination.
+ """
+ self._http_request('GET', full_url=f"{self._base_url}/api/auth/logout")
+
+ def test_module_request(self):
+ """
+ Performs basic get request to check if the server is reachable.
+ """
+ self._http_request('GET', full_url=f'{self._base_url}/api/auth/check', resp_type='text')
+
+ def query_datalake_request(self, args: dict, from_param: int, size_param: int, cluster_name: str,
+ dates_in_format: list) -> dict:
+ """
+ Queries the Exabeam Data Lake API with the provided search query and returns the response.
+ """
+ search_query = {
+ "sortBy": [
+ {"field": "@timestamp", "order": "desc", "unmappedType": "date"}
+ ],
+ "query": args.get("query", "*"),
+ "from": from_param,
+ "size": size_param,
+ "clusterWithIndices": [
+ {
+ "clusterName": cluster_name,
+ "indices": dates_in_format,
+ }
+ ]
+ }
+ return self._http_request(
+ "POST",
+ full_url=f"{self._base_url}/dl/api/es/search",
+ data=json.dumps(search_query),
+ headers={'Content-Type': 'application/json', 'Csrf-Token': 'nocheck'},
+ )
+
+
+""" HELPER FUNCTIONS """
+
+
+def _parse_entry(entry: dict) -> dict:
+ """
+ Parse a single entry from the API response to a dictionary.
+
+ Args:
+ entry: The entry from the API response.
+
+ Returns:
+ dict: The parsed entry dictionary.
+ """
+ source: dict = entry.get("_source", {})
+ return {
+ "Id": entry.get("_id"),
+ "Vendor": source.get("Vendor"),
+ "Created_at": source.get("@timestamp"),
+ "Product": source.get("Product"),
+ "Message": source.get("message")
+ }
+
+
+def dates_in_range(start_time: Any, end_time: Any) -> list[str]:
+ """
+ Generate a list of dates within a specified range.
+
+ Args:
+ start_time: The start date of the range in the format "YYYY-MM-DD".
+ end_time: The end date of the range in the format "YYYY-MM-DD".
+
+ Raises:
+ DemistoException: If the start time is not before the end time, or if the difference between start time and end time is
+ greater than 10 days.
+
+ Returns:
+ list: A list of dates within the specified range, formatted as strings in the format "YYYY.MM.DD".
+ """
+ start_time = datetime.strptime(start_time, "%Y-%m-%d")
+ end_time = datetime.strptime(end_time, "%Y-%m-%d")
+
+ if start_time >= end_time:
+ raise DemistoException("Start time must be before end time.")
+
+ if (end_time - start_time).days > 10:
+ raise DemistoException("Difference between start time and end time must be less than or equal to 10 days.")
+
+ dates = []
+ current_date = start_time
+ while current_date <= end_time:
+ dates.append(current_date.strftime("%Y.%m.%d"))
+ current_date += timedelta(days=1)
+
+ return dates
+
+
+def get_date(time: str, arg_name: str):
+ """
+ Get the date from a given time string.
+
+ Args:
+ time (str): The time string to extract the date from.
+
+ Returns:
+ str: The date extracted from the time string formatted in ISO 8601 format (YYYY-MM-DD),
+ or None if the time string is invalid.
+ """
+ date_time = arg_to_datetime(arg=time, arg_name=arg_name, required=True)
+ if not date_time:
+ raise DemistoException(f"There was an issue parsing the {arg_name} provided.")
+ date = date_time.strftime(ISO_8601_FORMAT)
+ return date
+
+
+def get_limit(args: dict, arg_name: str) -> int:
+ """
+ Get the limit value specified in the arguments.
+
+ Args:
+ args: A dictionary containing the 'limit' argument.
+
+ Returns:
+ int: The limit value if specified and less than or equal to 3000; otherwise, returns 3000 as the maximum limit.
+ If the 'limit' argument is not present in the dictionary or is None, returns 50 as the default limit.
+ """
+ if limit := args.get(arg_name):
+ return min(int(limit), 3000)
+
+ return 50
+
+
+def calculate_page_parameters(args: dict) -> tuple[int, int]:
+ """
+ Calculate the page parameters for pagination.
+
+ Args:
+ args: A dictionary containing the arguments passed to the function.
+
+ Raises:
+ DemistoException: If invalid combinations of arguments are provided. You can only provide 'limit'
+ alone or 'page' and 'page_size' together.
+
+ Returns:
+ tuple: A tuple containing two integers representing the 'from' and 'size' parameters for pagination.
+ 'from' is the index of the first item to retrieve, and 'size' is the number of items to retrieve.
+ """
+ page_arg = args.get('page')
+ page_size_arg = args.get('page_size')
+ limit_arg = args.get('limit')
+
+ if (limit_arg and (page_arg or page_size_arg)) or ((not (page_arg and page_size_arg)) and (page_arg or page_size_arg)):
+ raise DemistoException("You can only provide 'limit' alone or 'page' and 'page_size' together.")
+
+ if page_arg and page_size_arg:
+ page = arg_to_number(args.get('page', '1'))
+ page_size = get_limit(args, "page_size")
+ if page == 0 or page_size == 0:
+ raise DemistoException("Both 'page' and 'page_size' must be greater than 0.")
+ if page and page_size:
+ if page < 0 or page_size < 0:
+ raise DemistoException("Both 'page' and 'page_size' must be greater than 0.")
+ from_param = page * page_size - page_size
+ size_param = page_size
+ else:
+ from_param = 0
+ size_param = get_limit(args, "limit")
+
+ return from_param, size_param
+
+
+""" COMMAND FUNCTIONS """
+
+
+def query_data_lake_command(client: Client, args: dict, cluster_name: str) -> CommandResults:
+ """
+ Query the datalake command and return the results in a formatted table.
+
+ Args:
+ client: The client object for interacting with the API.
+ args: The arguments passed to the command.
+
+ Returns:
+ CommandResults: The command results object containing outputs and readable output.
+ """
+ from_param, size_param = calculate_page_parameters(args)
+
+ start_time = get_date(args.get("start_time", "7 days ago"), "start_time")
+ end_time = get_date(args.get("end_time", "today"), "end_time")
+ dates = dates_in_range(start_time, end_time)
+ dates_in_format = ["exabeam-" + date for date in dates]
+
+ response = client.query_datalake_request(args, from_param, size_param, cluster_name, dates_in_format).get("responses", [{}])
+
+ data_response = response[0].get("hits", {}).get("hits", [])
+
+ human_readable = [_parse_entry(entry) for entry in data_response]
+
+ return CommandResults(
+ outputs_prefix="ExabeamDataLake.Event",
+ outputs=data_response,
+ readable_output=tableToMarkdown(name="Logs", t=human_readable, headers=[
+ "Id", "Vendor", "Product", "Created_at", "Message"])
+ )
+
+
+def test_module(client: Client): # pragma: no cover
+ """test function
+
+ Args:
+ client: Client
+
+ Returns:
+ ok if successful
+ """
+ client.test_module_request()
+ return 'ok'
+
+
+""" MAIN FUNCTION """
+
+
+def main() -> None: # pragma: no cover
+ params = demisto.params()
+ args = demisto.args()
+ command = demisto.command()
+
+ credentials: dict = params.get('credentials', {})
+ username = credentials.get('identifier', '')
+ password = credentials.get('password', '')
+ base_url: str = params.get('url', '')
+ verify_certificate = not params.get('insecure', False)
+ proxy = params.get('proxy', False)
+ cluster_name = params.get('cluster_name', 'local')
+ client = None
+
+ try:
+ client = Client(
+ base_url.rstrip('/'),
+ username=username,
+ password=password,
+ verify=verify_certificate,
+ proxy=proxy
+ )
+
+ demisto.debug(f"Command being called is {command}")
+
+ if command == "test-module":
+ return_results(test_module(client))
+ elif command == "exabeam-data-lake-search":
+ return_results(query_data_lake_command(client, args, cluster_name))
+ else:
+ raise NotImplementedError(f"Command {command} is not supported")
+
+ except Exception as e:
+ demisto.info(str(e))
+ return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
+
+ finally:
+ if client:
+ client._logout()
+
+
+if __name__ in ("__main__", "__builtin__", "builtins"):
+ main()
diff --git a/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/ExabeamDataLake.yml b/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/ExabeamDataLake.yml
new file mode 100644
index 000000000000..f0b65a0f2736
--- /dev/null
+++ b/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/ExabeamDataLake.yml
@@ -0,0 +1,76 @@
+category: Analytics & SIEM
+commonfields:
+ id: Exabeam Data Lake
+ version: -1
+configuration:
+- defaultvalue: https://example.com/
+ display: Server URL
+ name: url
+ required: true
+ type: 0
+- display: User Name
+ name: credentials
+ required: true
+ section: Connect
+ type: 9
+ displaypassword: Password
+- defaultvalue: local
+ display: Cluster Name
+ name: cluster_name
+ type: 0
+ required: true
+ additionalinfo: The default value is usually 'local', suitable for standard setups. For custom cluster deployments, consult the Exabeam Support Team.
+- display: Trust any certificate (not secure)
+ name: insecure
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ type: 8
+description: Exabeam Data Lake provides a searchable log management system. Data Lake is used for log collection, storage, processing, and presentation.
+display: Exabeam Data Lake
+name: Exabeam Data Lake
+script:
+ commands:
+ - arguments:
+ - defaultValue: "*"
+ description: The search query string to filter the events by. Examples can be found in the syntax documentation section of the integration description.
+ name: query
+ - defaultValue: "7 days ago"
+ description: The starting date for the search range. The search range should be at least one day long and can extend up to a maximum of 10 days.
+ name: start_time
+ - defaultValue: "today"
+ description: The ending date for the search range. This defines the end of the search range, which should be within one to ten days after the start_time.
+ name: end_time
+ - description: The maximal number of results to return. Maximum value is 3000.
+ name: limit
+ - description: The page number for pagination.
+ name: page
+ - description: The maximal number of results to return per page. Maximum value is 3000.
+ name: page_size
+ description: Get events from Exabeam Data Lake.
+ name: exabeam-data-lake-search
+ outputs:
+ - contextPath: ExabeamDataLake.Event._id
+ description: The event ID.
+ type: str
+ - contextPath: ExabeamDataLake.Event._source.Vendor
+ description: Vendor of the event.
+ type: str
+ - contextPath: ExabeamDataLake.Event._source.Product
+ description: Product of the event.
+ type: str
+ - contextPath: ExabeamDataLake.Event._source.@timestamp
+ description: The time of the event.
+ type: str
+ - contextPath: ExabeamDataLake.Event._source.message
+ description: The message of the event.
+ type: str
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+ dockerimage: demisto/python3:3.10.14.95956
+fromversion: 6.10.0
+tests:
+- ExabeamDataLake-test
diff --git a/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/ExabeamDataLake_description.md b/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/ExabeamDataLake_description.md
new file mode 100644
index 000000000000..a1448c2c276f
--- /dev/null
+++ b/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/ExabeamDataLake_description.md
@@ -0,0 +1,9 @@
+### Authentication Method
+ - **Basic Authentication** - Provide the Exabeam username and password in the corresponding parameters in the configuration.
+
+#### Required Permissions
+`run_es_search`
+
+For the syntax used in Exabeam, refer to the [Exabeam syntax documentation](https://docs.exabeam.com/en/data-lake/i40/data-lake-search-quick-reference-guide/169290-how-to-run-query-searches-in-exabeam-data-lake.html)
+
+For additional information, refer to [Exabeam Administration Guide](https://docs.exabeam.com/en/data-lake/i40/data-lake-administration-guide/131779-exabeam-data-lake-architecture-overview.html)
diff --git a/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/ExabeamDataLake_image.png b/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/ExabeamDataLake_image.png
new file mode 100644
index 000000000000..1be1cbaa0121
Binary files /dev/null and b/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/ExabeamDataLake_image.png differ
diff --git a/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/ExabeamDataLake_test.py b/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/ExabeamDataLake_test.py
new file mode 100644
index 000000000000..1507f2c52a24
--- /dev/null
+++ b/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/ExabeamDataLake_test.py
@@ -0,0 +1,311 @@
+import json
+import pytest
+from CommonServerPython import DemistoException
+from ExabeamDataLake import Client, query_data_lake_command, get_limit, get_date, dates_in_range, calculate_page_parameters, \
+ _parse_entry
+
+
+class MockClient(Client):
+ def __init__(self, base_url: str, username: str, password: str, verify: bool, proxy: bool):
+ pass
+
+ def query_data_lake_command(self) -> None:
+ return
+
+
+def test_query_data_lake_command(mocker):
+ """
+ GIVEN:
+ a mocked Client with an empty response,
+
+ WHEN:
+ 'query_data_lake_command' function is called with the provided arguments,
+
+ THEN:
+ it should query the data lake, return log entries, and format them into readable output.
+ """
+ args = {
+ 'page': 1,
+ 'page_size': 50,
+ 'start_time': '2024-05-01T00:00:00',
+ 'end_time': '2024-05-08T00:00:00',
+ 'query': '*'
+ }
+ mock_response = {
+ "responses": [
+ {
+ "hits": {
+ "hits": [
+ {"_id": "FIRST_ID", "_source": {"@timestamp": "2024-05-01T12:00:00",
+ "message": "example message 1"}},
+ {"_id": "SECOND_ID", "_source": {"@timestamp": "2024-05-02T12:00:00",
+ "message": "example message 2", "only_hr": "nothing"}}
+ ]
+ }
+ }
+ ]
+ }
+
+ mocker.patch.object(Client, "query_datalake_request", return_value=mock_response)
+
+ client = MockClient("", "", "", False, False)
+
+ response = query_data_lake_command(client, args, cluster_name="local")
+
+ result = response.to_context().get('EntryContext', {}).get('ExabeamDataLake.Event', [])
+
+ assert {'_id': 'FIRST_ID', '_source': {'@timestamp': '2024-05-01T12:00:00', 'message': 'example message 1'}} in result
+ assert {'_id': 'SECOND_ID', '_source': {'@timestamp': '2024-05-02T12:00:00', 'message': 'example message 2',
+ 'only_hr': 'nothing'}} in result
+ expected_result = (
+ "### Logs\n"
+ "|Id|Vendor|Product|Created_at|Message|\n"
+ "|---|---|---|---|---|\n"
+ "| FIRST_ID | | | 2024-05-01T12:00:00 | example message 1 |\n"
+ "| SECOND_ID | | | 2024-05-02T12:00:00 | example message 2 |\n"
+ )
+ assert expected_result in response.readable_output
+
+
+def test_query_data_lake_command_no_response(mocker):
+ """
+ GIVEN:
+ a mocked Client with an empty response,
+ WHEN:
+ 'query_data_lake_command' function is called with the provided arguments,
+ THEN:
+ it should return a readable output indicating no results found.
+
+ """
+ args = {
+ 'page': 1,
+ 'page_size': 50,
+ 'start_time': '2024-05-01T00:00:00',
+ 'end_time': '2024-05-08T00:00:00',
+ 'query': '*'
+ }
+
+ mocker.patch.object(Client, "query_datalake_request", return_value={})
+
+ response = query_data_lake_command(MockClient("", "", "", False, False), args, "local")
+
+ assert response.readable_output == '### Logs\n**No entries.**\n'
+
+
+def test_get_date(mocker):
+ """
+ GIVEN:
+ a mocked CommonServerPython.arg_to_datetime function returning a specific time string,
+
+ WHEN:
+ 'get_date' function is called with the provided time string,
+
+ THEN:
+ it should return the date part of the provided time string in the 'YYYY-MM-DD' format.
+ """
+ time = '2024.05.01T14:00:00'
+ expected_result = '2024-05-01'
+
+ with mocker.patch("CommonServerPython.arg_to_datetime", return_value=time):
+ result = get_date(time, "start_time")
+
+ assert result == expected_result
+
+
+@pytest.mark.parametrize('start_time_str, end_time_str, expected_output', [
+ (
+ "2024-05-01",
+ "2024-05-10",
+ [
+ '2024.05.01', '2024.05.02', '2024.05.03',
+ '2024.05.04', '2024.05.05', '2024.05.06',
+ '2024.05.07', '2024.05.08', '2024.05.09', '2024.05.10'
+ ]
+ ),
+ (
+ "2024-05-01",
+ "2024-05-05",
+ ['2024.05.01', '2024.05.02', '2024.05.03', '2024.05.04', '2024.05.05']
+ )
+])
+def test_dates_in_range_valid(start_time_str, end_time_str, expected_output):
+ """
+ GIVEN:
+ start_time_str, end_time_str, and expected_output representing start time, end time, and expected output, respectively,
+
+ WHEN:
+ 'dates_in_range' function is called with the provided start and end time strings,
+
+ THEN:
+ it should return a list of dates in range between the start time and end time.
+ """
+ result = dates_in_range(start_time_str, end_time_str)
+ assert result == expected_output
+
+
+@pytest.mark.parametrize('start_time_str, end_time_str, expected_output', [
+ (
+ "2024-05-10",
+ "2024-05-01",
+ "Start time must be before end time"
+ ),
+ (
+ "2024-05-01",
+ "2024-05-15",
+ "Difference between start time and end time must be less than or equal to 10 days"
+ )
+])
+def test_dates_in_range_invalid(start_time_str, end_time_str, expected_output):
+ """
+ GIVEN:
+ start_time_str, end_time_str, and expected_output representing start time, end time, and expected output, respectively,
+
+ WHEN:
+ 'dates_in_range' function is called with the provided start and end time strings that are invalid,
+
+ THEN:
+ it should raise a DemistoException with the expected error message.
+ """
+ with pytest.raises(DemistoException, match=expected_output):
+ dates_in_range(start_time_str, end_time_str)
+
+
+@pytest.mark.parametrize('args, from_param_expected, size_param_expected', [
+ ({'page': '1', 'page_size': '50', 'limit': None}, 0, 50),
+ ({'page': None, 'page_size': None, 'limit': '100'}, 0, 100)
+])
+def test_calculate_page_parameters_valid(args, from_param_expected, size_param_expected):
+ """
+ GIVEN:
+ args, from_param_expected, and size_param_expected representing input arguments,
+ expected 'from' parameter, and expected 'size' parameter, respectively,
+
+ WHEN:
+ 'calculate_page_parameters' function is called with the provided arguments,
+
+ THEN:
+ it should return the expected 'from' and 'size' parameters based on the input arguments.
+ """
+ from_param, size_param = calculate_page_parameters(args)
+ assert from_param == from_param_expected
+ assert size_param == size_param_expected
+
+
+@pytest.mark.parametrize('args', [
+ ({'page': '1', 'page_size': None, 'limit': '100'}),
+ ({'page': '1', 'page_size': '25', 'limit': '100'}),
+ ({'page': None, 'page_size': '25', 'limit': None})
+])
+def test_calculate_page_parameters_invalid(mocker, args):
+ """
+ GIVEN:
+ args representing input arguments with invalid combinations of 'page', 'page_size', and 'limit',
+
+ WHEN:
+ 'calculate_page_parameters' function is called with the provided arguments,
+
+ THEN:
+ it should raise a DemistoException with the expected error message.
+ """
+ with pytest.raises(DemistoException, match="You can only provide 'limit' alone or 'page' and 'page_size' together."):
+ calculate_page_parameters(args)
+
+
+def test_parse_entry():
+ """
+ GIVEN:
+ an entry dictionary representing a log entry with various fields such as '_id', '_source', 'Vendor', '@timestamp',
+ 'Product', and 'message',
+
+ WHEN:
+ '_parse_entry' function is called with the provided entry dictionary,
+
+ THEN:
+ it should parse the entry and return a dictionary with the expected fields renamed for consistency.
+ """
+ entry = {
+ "_id": "12345",
+ "_source": {
+ "Vendor": "VendorName",
+ "@timestamp": "2024-05-09T12:00:00Z",
+ "Product": "ProductA",
+ "message": "Some message here"
+ }
+ }
+
+ parsed_entry = _parse_entry(entry)
+ assert parsed_entry["Id"] == "12345"
+ assert parsed_entry["Vendor"] == "VendorName"
+ assert parsed_entry["Created_at"] == "2024-05-09T12:00:00Z"
+ assert parsed_entry["Product"] == "ProductA"
+ assert parsed_entry["Message"] == "Some message here"
+
+
+def test_query_datalake_request(mocker):
+ """
+ GIVEN:
+ a mocked '_login' method and '_http_request' method of the Client class,
+ a base URL, username, password, headers, proxy, and search query,
+
+ WHEN:
+ 'query_datalake_request' method of the Client class is called with the provided search query,
+
+ THEN:
+ it should send a POST request to the data lake API with the search query,
+ using the correct base URL and headers including 'kbn-version' and 'Content-Type'.
+ """
+ mock_login = mocker.patch('ExabeamDataLake.Client._login')
+ mock_http_request = mocker.patch('ExabeamDataLake.Client._http_request')
+
+ base_url = "http://example.com"
+ username = "user123"
+ password = "password123"
+ proxy = False
+ args = {"query": "*"}
+ from_param = 0
+ size_param = 10
+ cluster_name = "example_cluster"
+ dates_in_format = ["index1", "index2"]
+
+ instance = Client(base_url=base_url, username=username, password=password,
+ verify=False, proxy=proxy)
+
+ expected_search_query = {
+ "sortBy": [{"field": "@timestamp", "order": "desc", "unmappedType": "date"}],
+ "query": "*",
+ "from": 0,
+ "size": 10,
+ "clusterWithIndices": [{"clusterName": "example_cluster", "indices": ["index1", "index2"]}]
+ }
+
+ instance.query_datalake_request(args, from_param, size_param, cluster_name, dates_in_format)
+
+ mock_http_request.assert_called_once_with(
+ "POST",
+ full_url="http://example.com/dl/api/es/search",
+ data=json.dumps(expected_search_query),
+ headers={'Content-Type': 'application/json', 'Csrf-Token': 'nocheck'}
+ )
+ mock_login.assert_called_once()
+
+
+@pytest.mark.parametrize('args, arg_name, expected_output', [
+ ({}, 'limit', 50),
+ ({'limit': None}, 'limit', 50),
+ ({'limit': 1000}, 'limit', 1000),
+ ({'limit': 5000}, 'limit', 3000)
+])
+def test_get_limit(args, arg_name, expected_output):
+ """
+ GIVEN:
+ a dictionary containing the 'limit' argument with various values.
+
+ WHEN:
+ 'get_limit' function is called with the provided dictionary.
+
+ THEN:
+ it should return the limit value if specified and less than or equal to 3000;
+ otherwise, it should return 3000 as the maximum limit.
+ If the 'limit' argument is not present in the dictionary or is None, it should return 50 as the default limit.
+ """
+ assert get_limit(args, arg_name) == expected_output
diff --git a/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/README.md b/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/README.md
new file mode 100644
index 000000000000..2b1e35f6d0a7
--- /dev/null
+++ b/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/README.md
@@ -0,0 +1,160 @@
+Exabeam Data Lake provides a searchable log management system.
+Data Lake is used for log collection, storage, processing, and presentation.
+This integration was integrated and tested with version LMS-i40.3 of Exabeam Data Lake.
+
+## Configure Exabeam Data Lake on Cortex XSOAR
+
+1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+2. Search for Exabeam Data Lake.
+3. Click **Add instance** to create and configure a new integration instance.
+
+ | **Parameter** | **Description** | **Required** |
+ | --- | --- | --- |
+ | Server URL | | True |
+ | User Name | | True |
+ | Password | | True |
+ | Cluster Name | The default value is usually 'local', suitable for standard setups. For custom cluster deployments, consult Exabeam Support Team. | True |
+ | Trust any certificate (not secure) | | |
+ | Use system proxy settings | | |
+
+4. Click **Test** to validate the URLs, token, and connection.
+
+## Commands
+
+You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
+After you successfully execute a command, a DBot message appears in the War Room with the command details.
+
+### exabeam-data-lake-search
+
+***
+Get events from Exabeam Data Lake.
+
+#### Base Command
+
+`exabeam-data-lake-search`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| query | The search query string to filter the events by. Examples can be found in the syntax documentation section of the integration description. | Required |
+| start_time | The starting date for the search range. The search range should be at least one day long and can extend up to a maximum of 10 days. | Required |
+| end_time | The ending date for the search range. This defines the end of the search range, which should be within one to ten days after the start_time. | Required |
+| limit | The maximal number of results to return. Maximum value is 3000. | Optional |
+| page | The page number for pagination. | Optional |
+| page_size | The maximal number of results to return per page. Maximum value is 3000. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| ExabeamDataLake.Event._id | str | The event ID. |
+| ExabeamDataLake.Event._source.Vendor | str | Vendor of the event. |
+| ExabeamDataLake.Event._source.Product | str | Product of the event. |
+| ExabeamDataLake.Event._source.@timestamp | str | The time of the event. |
+| ExabeamDataLake.Event._source.message | str | The message of the event. |
+
+#### Command example
+```!exabeam-data-lake-search query="risk_score:3" start_time="2024.02.27" end_time="2024.02.28" limit=2```
+#### Context Example
+```json
+{
+ "ExabeamDataLake": {
+ "Event": [
+ {
+ "_id": "some_id",
+ "_index": "exabeam-2024.02.28",
+ "_routing": "SfA86vqw",
+ "_score": null,
+ "_source": {
+ "@timestamp": "2024-02-28T16:15:50.614Z",
+ "@version": "1",
+ "Product": "Exabeam AA",
+ "Vendor": "Exabeam",
+ "data_type": "exabeam-security-alert",
+ "exa_activity_type": [
+ "alert/security",
+ "alert"
+ ],
+ "exa_adjustedEventTime": "2024-02-28T16:15:29.000Z",
+ "exa_category": "Exabeam Alerts",
+ "exa_device_type": [
+ "security"
+ ],
+ "exa_rawEventTime": "2024-02-28T16:15:29.000Z",
+ "indexTime": "2024-02-28T16:15:51.626Z",
+ "is_ransomware_src_ip": false,
+ "is_threat_src_ip": false,
+ "is_tor_src_ip": false,
+ "log_type": "dlp-alert",
+ "message": "<86>1 2024-02-28T16:15:50.609Z exabeam-analytics-master Exabeam - - - timestamp=\"2024-02-28T16:15:29.192Z\" score=\"3\" user=\"ghardin\" event_time=\"2024-02-28 14:35:35\" event_type=\"dlp-alert\" domain=\"kenergy\" time=\"1709130935833\" source=\"ObserveIT\" vendor=\"ObserveIT\" lockout_id=\"NA\" session_id=\"ghardin-20240228143533\" session_order=\"2\" account=\"ghardin\" getvalue('zone_info', src)=\"new york office\" alert_name=\" rule violation\" local_asset=\"lt-ghardin-888\" alert_type=\"DATA EXFILTRATION\" os=\"Win\" rule_name=\"Abnormal DLP alert name for user\" rule_description=\"Exabeam noted that this alert name has been triggered for this user in the past yet it is still considered abnormal activity. This activity may be an early indication of compromise of a user by malware or other malicious actors.\" rule_reason=\"Abnormal DLP alert with name rule violation for user\" ",
+ "port": 41590,
+ "risk_score": "3",
+ "rule_description": "Exabeam noted that this alert name has been triggered for this user in the past yet it is still considered abnormal activity. This activity may be an early indication of compromise of a user by malware or other malicious actors.",
+ "rule_name": "Abnormal DLP alert name for user",
+ "score": "3",
+ "session_id": "ghardin-20240228143533",
+ "time": "2024-02-28T16:15:29.000Z",
+ "user": "ghardin"
+ },
+ "_type": "logs",
+ "sort": [
+ 1709136950614
+ ]
+ },
+ {
+ "_id": "another_id",
+ "_index": "exabeam-2024.02.27",
+ "_routing": "XUXxevyv",
+ "_score": null,
+ "_source": {
+ "@timestamp": "2024-02-27T16:21:45.721Z",
+ "@version": "1",
+ "Product": "Exabeam AA",
+ "Vendor": "Exabeam",
+ "data_type": "exabeam-security-alert",
+ "event_code": "4768",
+ "exa_activity_type": [
+ "alert/security",
+ "alert"
+ ],
+ "exa_adjustedEventTime": "2024-02-24T16:16:29.000Z",
+ "exa_category": "Exabeam Alerts",
+ "exa_device_type": [
+ "security"
+ ],
+ "exa_rawEventTime": "2024-02-24T16:16:29.000Z",
+ "host": "exabeamdemodc1",
+ "indexTime": "2024-02-27T16:23:56.271Z",
+ "is_ransomware_dest_ip": false,
+ "is_threat_dest_ip": false,
+ "is_tor_dest_ip": false,
+ "log_type": "kerberos-logon",
+ "message": "<86>1 2024-02-27T16:21:45.539Z exabeam-analytics-master Exabeam - - - timestamp=\"2024-02-24T16:16:29.975Z\" id=\"ghardin-20240224140716\" score=\"3\" user=\"ghardin\" event_time=\"2024-02-24 14:34:42\" event_type=\"kerberos-logon\" host=\"exabeamdemodc1\" domain=\"ktenergy\" time=\"1708785282052\" source=\"DC\" lockout_id=\"NA\" session_id=\"ghardin-20240224140716\" session_order=\"4\" account=\"ghardin\" ticket_options_encryption=\"0x40810010:0x12\" nonmachine_user=\"ghardin\" event_code=\"4768\" ticket_encryption_type=\"0x12\" ticket_options=\"0x40810010\" rule_name=\"IT presence without badge access\" rule_description=\"This user is logged on to the company network but did not use their badge to access a physical location. It is unusual to have IT access without badge access.\" rule_reason=\"IT presence without badge access\" ",
+ "port": 56920,
+ "risk_score": "3",
+ "rule_description": "This user is logged on to the company network but did not use their badge to access a physical location. It is unusual to have IT access without badge access.",
+ "rule_name": "IT presence without badge access",
+ "score": "3",
+ "session_id": "ghardin-20240224140716",
+ "time": "2024-02-24T16:16:29.000Z",
+ "user": "ghardin"
+ },
+ "_type": "logs",
+ "sort": [
+ 1709050905721
+ ]
+ }
+ ]
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Logs
+>|Created_at|Id|Message|Product|Vendor|
+>|---|---|---|---|---|
+>| 2024-02-28T16:15:50.614Z | some_id | <86>1 2024-02-28T16:15:50.609Z exabeam-analytics-master Exabeam - - - timestamp="2024-02-28T16:15:29.192Z" id="ghardin-20240228143533" score="3" user="ghardin" event_time="2024-02-28 14:35:35" event_type="dlp-alert" domain="kenergy" time="1709130935833" source="ObserveIT" vendor="ObserveIT" lockout_id="NA" session_id="ghardin-20240228143533" session_order="2" account="ghardin" getvalue('zone_info', src)="new york office" alert_name=" rule violation" local_asset="lt-ghardin-888" alert_type="DATA EXFILTRATION" os="Win" rule_name="Abnormal DLP alert name for user" rule_description="Exabeam noted that this alert name has been triggered for this user in the past yet it is still considered abnormal activity. This activity may be an early indication of compromise of a user by malware or other malicious actors." rule_reason="Abnormal DLP alert with name rule violation for user" | Exabeam AA | Exabeam |
+>| 2024-02-27T16:21:45.721Z | another_id | <86>1 2024-02-27T16:21:45.539Z exabeam-analytics-master Exabeam - - - timestamp="2024-02-24T16:16:29.975Z" id="ghardin-20240224140716" score="3" user="ghardin" event_time="2024-02-24 14:34:42" event_type="kerberos-logon" host="exabeamdemodc1" domain="ktenergy" time="1708785282052" source="DC" lockout_id="NA" session_id="ghardin-20240224140716" session_order="4" account="ghardin" ticket_options_encryption="0x40810010:0x12" nonmachine_user="ghardin" event_code="4768" ticket_encryption_type="0x12" ticket_options="0x40810010" rule_name="IT presence without badge access" rule_description="This user is logged on to the company network but did not use their badge to access a physical location. It is unusual to have IT access without badge access." rule_reason="IT presence without badge access" | Exabeam AA | Exabeam |
+
diff --git a/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/command_examples.txt b/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/command_examples.txt
new file mode 100644
index 000000000000..2587529ea4f3
--- /dev/null
+++ b/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/command_examples.txt
@@ -0,0 +1 @@
+!exabeam-data-lake-search query="risk_score:3" start_time="2024.02.27" end_time="2024.02.28" limit=2
\ No newline at end of file
diff --git a/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/test_data/response.json b/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/test_data/response.json
new file mode 100644
index 000000000000..32a967033339
--- /dev/null
+++ b/Packs/ExabeamDataLake/Integrations/ExabeamDataLake/test_data/response.json
@@ -0,0 +1,225 @@
+{
+ "responses": [
+ {
+ "took": 10,
+ "timed_out": false,
+ "_shards": {
+ "total": 1,
+ "successful": 1,
+ "skipped": 0,
+ "failed": 0
+ },
+ "hits": {
+ "total": 159608,
+ "max_score": null,
+ "hits": [
+ {
+ "_index": "exabeam-2023",
+ "_type": "logs",
+ "_id": "test_id_1",
+ "_score": null,
+ "_routing": "test_routing_1",
+ "_source": {
+ "exa_parser_name": "test_parser_1",
+ "forwarder": "test_forwarder_1",
+ "app_protocol": "test",
+ "is_threat_dest_ip": false,
+ "@timestamp": "2022-06-12T23:59:57.458Z",
+ "exa_rsc": {
+ "hostname": "localhost",
+ "timezone": "00",
+ "time_off": 10800,
+ "timestamp": "2021-09-12T23:59:58.459Z"
+ },
+ "exa_adjustedEventTime": "2021-09-12T23:55:05.000Z",
+ "dest_translated_ip": "0.0.0.0",
+ "exa_activity_type": [
+ "network-traffic"
+ ],
+ "host": "local",
+ "exa-message-size": 1006,
+ "is_tor_dest_ip": false,
+ "exa_outcome": [
+ "success"
+ ],
+ "src_port": 0,
+ "event_name": "Accept",
+ "outcome": "Accept",
+ "rule": "test rule",
+ "src_translated_ip": "test_src_translated_ip_1",
+ "direction": "inbound",
+ "src_ip": "test_src_ip_1",
+ "rule_id": "test_rule_id_1",
+ "is_ransomware_dest_ip": false,
+ "dest_ip": "test_dest_ip_1",
+ "indexTime": "2022-06-12T23:59:59.298Z",
+ "interface_name": "test_interface_name_1",
+ "Vendor": "test_vendor_1",
+ "origin_ip": "test_origin_ip_1",
+ "data_type": "network",
+ "product_name": "VPN",
+ "dest_port": 0,
+ "dest_translated_port": "0",
+ "exa_rawEventTime": "2022-06-12T23:55:05.000Z",
+ "message": "<134>1 2022-06-12T23:55:05Z",
+ "time": "2022-06-12T23:55:05.000Z",
+ "src_translated_port": "0",
+ "Product": "test_product_1",
+ "@version": "1",
+ "exa_category": "Network",
+ "exa_device_type": [
+ "network",
+ "network"
+ ],
+ "is_threat_src_ip": false,
+ "action": "Accept",
+ "protocol": "00",
+ "is_ransomware_src_ip": false,
+ "is_tor_src_ip": false
+ },
+ "sort": [
+ 1689206397458
+ ]
+ },
+ {
+ "_index": "exabeam-2023",
+ "_type": "logs",
+ "_id": "test_id_2",
+ "_score": null,
+ "_routing": "test_routing_2",
+ "_source": {
+ "exa_parser_name": "test_parser_2",
+ "forwarder": "test_forwarder_2",
+ "app_protocol": "test",
+ "is_threat_dest_ip": false,
+ "@timestamp": "2022-06-12T23:59:57.458Z",
+ "exa_rsc": {
+ "hostname": "localhost",
+ "timezone": "+03",
+ "time_off": 10800,
+ "timestamp": "2022-06-12T23:59:58.459Z"
+ },
+ "exa_adjustedEventTime": "2022-06-12T23:55:05.000Z",
+ "dest_translated_ip": "0.0.0.0",
+ "exa_activity_type": [
+ "network"
+ ],
+ "host": "local",
+ "exa-message-size": 1006,
+ "is_tor_dest_ip": false,
+ "exa_outcome": [
+ "success"
+ ],
+ "src_port": 0,
+ "event_name": "Accept",
+ "outcome": "Accept",
+ "rule": "test rule",
+ "src_translated_ip": "test_src_translated_ip_2",
+ "direction": "inbound",
+ "src_ip": "test_src_ip_2",
+ "rule_id": "test_rule_id_2",
+ "is_ransomware_dest_ip": false,
+ "dest_ip": "test_dest_ip_2",
+ "indexTime": "2022-06-12T23:59:59.298Z",
+ "interface_name": "test_interface_name_2",
+ "Vendor": "test_vendor_2",
+ "origin_ip": "test_origin_ip_2",
+ "data_type": "network-connection",
+ "product_name": "VPN",
+ "dest_port": 0,
+ "dest_translated_port": "0",
+ "exa_rawEventTime": "2022-06-12T23:55:05.000Z",
+ "message": "<134>1 2022-06-12T23:55:05Z",
+ "time": "2022-06-12T23:55:05.000Z",
+ "src_translated_port": "0",
+ "Product": "test_product_2",
+ "@version": "1",
+ "exa_category": "Network",
+ "exa_device_type": [
+ "network",
+ "network"
+ ],
+ "is_threat_src_ip": false,
+ "action": "Accept",
+ "protocol": "00",
+ "is_ransomware_src_ip": false,
+ "is_tor_src_ip": false
+ },
+ "sort": [
+ 1689206397458
+ ]
+ },
+ {
+ "_index": "exabeam-2023",
+ "_type": "logs",
+ "_id": "test_id_3",
+ "_score": null,
+ "_routing": "test_routing_1",
+ "_source": {
+ "exa_parser_name": "test_parser_3",
+ "forwarder": "test_forwarder_3",
+ "app_protocol": "test",
+ "is_threat_dest_ip": false,
+ "@timestamp": "2022-06-12T23:59:57.458Z",
+ "exa_rsc": {
+ "hostname": "localhost",
+ "timezone": "+03",
+ "time_off": 10800,
+ "timestamp": "2022-06-12T23:59:58.459Z"
+ },
+ "exa_adjustedEventTime": "2022-06-12T23:55:05.000Z",
+ "dest_translated_ip": "0.0.0.0",
+ "exa_activity_type": [
+ "network-traffic"
+ ],
+ "host": "local",
+ "exa-message-size": 1006,
+ "is_tor_dest_ip": false,
+ "exa_outcome": [
+ "success"
+ ],
+ "src_port": 0,
+ "event_name": "Accept",
+ "outcome": "Accept",
+ "rule": "test rule",
+ "src_translated_ip": "test_src_translated_ip_3",
+ "direction": "inbound",
+ "src_ip": "test_src_ip_3",
+ "rule_id": "test_rule_id_3",
+ "is_ransomware_dest_ip": false,
+ "dest_ip": "test_dest_ip_3",
+ "indexTime": "2022-06-12T23:59:59.298Z",
+ "interface_name": "test_interface_name_3",
+ "Vendor": "test_vendor_3",
+ "origin_ip": "test_origin_ip_3",
+ "data_type": "network-connection",
+ "product_name": "VPN",
+ "dest_port": 0,
+ "dest_translated_port": "0",
+ "exa_rawEventTime": "2022-06-12T23:55:05.000Z",
+ "message": "<134>1 2022-06-12T23:55:05Z",
+ "time": "2022-06-12T23:55:05.000Z",
+ "src_translated_port": "0",
+ "Product": "test_product_3",
+ "@version": "1",
+ "exa_category": "Network",
+ "exa_device_type": [
+ "network",
+ "network"
+ ],
+ "is_threat_src_ip": false,
+ "action": "Accept",
+ "protocol": "00",
+ "is_ransomware_src_ip": false,
+ "is_tor_src_ip": false
+ },
+ "sort": [
+ 1689206397458
+ ]
+ }
+ ]
+ },
+ "status": 200
+ }
+ ]
+}
diff --git a/Packs/ExabeamDataLake/README.md b/Packs/ExabeamDataLake/README.md
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Packs/ExabeamDataLake/TestPlaybooks/ExabeamDataLake-test.yml b/Packs/ExabeamDataLake/TestPlaybooks/ExabeamDataLake-test.yml
new file mode 100644
index 000000000000..0ef845916319
--- /dev/null
+++ b/Packs/ExabeamDataLake/TestPlaybooks/ExabeamDataLake-test.yml
@@ -0,0 +1,88 @@
+id: ExabeamDataLake-test
+version: -1
+vcShouldKeepItemLegacyProdMachine: false
+name: ExabeamDataLake-test
+starttaskid: "0"
+tasks:
+ "0":
+ id: "0"
+ taskid: c6af0dc9-3028-4a11-856e-ebd4b862bbf3
+ type: start
+ task:
+ id: c6af0dc9-3028-4a11-856e-ebd4b862bbf3
+ version: -1
+ name: ""
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "1"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 50
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "1":
+ id: "1"
+ taskid: 1f926fc2-1d31-41ae-8267-f878f006f8cb
+ type: regular
+ task:
+ id: 1f926fc2-1d31-41ae-8267-f878f006f8cb
+ version: -1
+ name: exabeam-data-lake-search
+ description: Get events from Exabeam Data Lake.
+ script: '|||exabeam-data-lake-search'
+ type: regular
+ iscommand: true
+ brand: ""
+ scriptarguments:
+ end_time:
+ simple: 15-05-2024
+ query:
+ simple: Prodcut:Exabeam DL AND Vendor:Exabeam AND session_id:rkoch-20240514132437
+ AND _id:lms.kafka.topic_10_4050665_82063a0a4d1b
+ start_time:
+ simple: 14-05-2024
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 290
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+view: |-
+ {
+ "linkLabelsPosition": {},
+ "paper": {
+ "dimensions": {
+ "height": 335,
+ "width": 380,
+ "x": 450,
+ "y": 50
+ }
+ }
+ }
+inputs: []
+outputs: []
+quiet: true
+fromversion: 6.10.0
\ No newline at end of file
diff --git a/Packs/ExabeamDataLake/pack_metadata.json b/Packs/ExabeamDataLake/pack_metadata.json
new file mode 100644
index 000000000000..fa5092a71efb
--- /dev/null
+++ b/Packs/ExabeamDataLake/pack_metadata.json
@@ -0,0 +1,19 @@
+{
+ "name": "ExabeamDataLake",
+ "description": "Exabeam Data Lake provides a highly scalable, cost-effective, and searchable log management system. Data Lake is used for log collection, storage, processing, and presentation.",
+ "support": "xsoar",
+ "currentVersion": "1.0.0",
+ "author": "Cortex XSOAR",
+ "url": "https://www.paloaltonetworks.com/cortex",
+ "email": "",
+ "categories": [
+ "Analytics & SIEM"
+ ],
+ "tags": [],
+ "useCases": [],
+ "keywords": [],
+ "marketplaces": [
+ "xsoar",
+ "marketplacev2"
+ ]
+}
\ No newline at end of file
diff --git a/Packs/ExabeamSecurityOperationsPlatform/.pack-ignore b/Packs/ExabeamSecurityOperationsPlatform/.pack-ignore
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Packs/ExabeamSecurityOperationsPlatform/.secrets-ignore b/Packs/ExabeamSecurityOperationsPlatform/.secrets-ignore
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/ExabeamSecOpsPlatform.py b/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/ExabeamSecOpsPlatform.py
new file mode 100644
index 000000000000..63589068ae90
--- /dev/null
+++ b/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/ExabeamSecOpsPlatform.py
@@ -0,0 +1,313 @@
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa: F401
+from CommonServerUserPython import * # noqa
+
+import urllib3
+
+# Disable insecure warnings
+urllib3.disable_warnings()
+
+
+''' CONSTANTS '''
+
+DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
+
+
+''' CLIENT CLASS '''
+
+
+class Client(BaseClient):
+ """
+ Exabeam Client: A Python Wrapper for Interacting with the Exabeam API
+ """
+
+ def __init__(self, base_url: str, client_id: str, client_secret: str, verify: bool,
+ proxy: bool):
+ super().__init__(base_url=f'{base_url}', verify=verify, proxy=proxy, timeout=20)
+ self.client_id = client_id
+ self.client_secret = client_secret
+ self.access_token = None
+
+ self._login()
+
+ def _login(self):
+ """
+ Logs in to the Exabeam API using the provided client_id and client_password.
+ This function must be called before any other API calls.
+ Note: the session is automatically closed in BaseClient's __del__
+ """
+ data = {"client_id": self.client_id, "client_secret": self.client_secret, "grant_type": "client_credentials"}
+
+ response = self._http_request(
+ "POST",
+ full_url=f"{self._base_url}/auth/v1/token",
+ data=data,
+ )
+ self.access_token = response.get('access_token')
+
+ def search_request(self, data_dict: dict) -> dict:
+ """
+ Performs basic get request to check if the server is reachable.
+ """
+ data = json.dumps(data_dict)
+ full_url = f"{self._base_url}/search/v2/events"
+ response = self._http_request(
+ "POST",
+ full_url=full_url,
+ data=data,
+ headers={"Authorization": f"Bearer {self.access_token}", "Content-Type": "application/json"}
+ )
+ return response
+
+
+''' HELPER FUNCTIONS '''
+
+
+def get_date(time: str, arg_name: str):
+ """
+ Get the date from a given time string.
+
+ Args:
+ time (str): The time string to extract the date from.
+
+ Returns:
+ str: The date extracted from the time string formatted in ISO 8601 format (YYYY-MM-DD),
+ or None if the time string is invalid.
+ """
+ date_time = arg_to_datetime(arg=time, arg_name=arg_name, required=True)
+ if not date_time:
+ raise DemistoException(f"There was an issue parsing the {arg_name} provided.")
+ date = date_time.strftime(DATE_FORMAT)
+ return date
+
+
+def transform_string(input_str: str) -> str:
+ """
+ Transform the input string into a formatted string.
+
+ Args:
+ input_str (str): The input string to be transformed. It should be in the format "key:value".
+
+ Returns:
+ str: The transformed string where the value part is converted to lowercase if it's "true" or "false",
+ otherwise it's enclosed in double quotes.
+
+ Examples:
+ transform_string("status:true") -> 'status:true'
+ transform_string("message:Hello World") -> 'message:"Hello World"'
+ """
+ key, value = input_str.split(':', 1)
+ if value.lower() in ['true', 'false']:
+ return f'{key}:{value.lower()}'
+ else:
+ return f'{key}:"{value}"'
+
+
+def process_string(input_str: str) -> str:
+ """
+ Process the input string by splitting it based on logical operators and transforming each part.
+
+ Args:
+ input_str: The input string to be processed. It may contain logical operators such as 'AND', 'OR', 'NOT', 'TO'.
+
+ Returns:
+ str: The processed string where each part is transformed using the transform_string function.
+ """
+ logical_operators = ['AND', 'OR', 'NOT', 'TO']
+ transformed_parts = []
+ start_index = 0
+
+ for end_index in range(len(input_str)):
+ if any(op in input_str[start_index:end_index] for op in logical_operators):
+ part = input_str[start_index:end_index].strip()
+ operator = next(op for op in logical_operators if op in part)
+ part = part.replace(operator, "").strip()
+ transformed_parts.append(transform_string(part))
+ transformed_parts.append(operator)
+ start_index = end_index + 1
+
+ if start_index < len(input_str):
+ remaining_part = input_str[start_index:].strip()
+ if remaining_part:
+ transformed_parts.append(transform_string(remaining_part))
+
+ return ' '.join(transformed_parts)
+
+
+def _parse_entry(entry: dict):
+ """
+ Parse a single entry from the API response to a dictionary.
+ Args:
+ entry: The entry from the API response.
+ Returns:
+ dict: The parsed entry dictionary.
+ """
+ parsed = {
+ "Id": entry.get("id"),
+ "Raw Log Ids": entry.get("rawLogIds"),
+ "Tier": entry.get("tier"),
+ "Is Parsed": entry.get("parsed"),
+ "Raw Logs": entry.get("rawLogs"),
+ "Time": entry.get("time")
+ }
+ final = remove_empty_elements(parsed)
+ return final if final else None
+
+
+def _parse_group_by(entry: dict, titles: list):
+ """
+ Parses a single entry from the API response into a dictionary based on provided titles.
+
+ Args:
+ entry (dict): The entry from the API response.
+ titles (list): A list of keys to extract from the entry.
+
+ Returns:
+ dict or None: The parsed entry dictionary with non-empty elements or None if all elements are empty.
+ """
+ parsed = {}
+ for title in titles:
+ parsed.update({title: entry.get(title)})
+ final = remove_empty_elements(parsed)
+ return final if final else None
+
+
+def get_limit(args: dict) -> int:
+ """
+ Get the limit value specified in the arguments.
+
+ Args:
+ args: A dictionary containing the 'limit' argument.
+
+ Returns:
+ int: The limit value if specified and less than or equal to 3000; otherwise, returns 3000 as the maximum limit.
+ If the 'limit' argument is not present in the dictionary or is None, returns 50 as the default limit.
+ """
+ if limit := args.get('limit'):
+ return min(int(limit), 3000)
+
+ return 50
+
+
+def error_fixes(error: str):
+ new_error = ""
+ if 'not enough values to unpack' in error:
+ new_error = ("Recommendation:\nValidate the query argument "
+ "against the syntax documentation in the integration description.")
+
+ return new_error
+
+
+''' COMMAND FUNCTIONS '''
+
+
+def search_command(client: Client, args: dict) -> CommandResults:
+ """
+ Search for logs using the Exabeam client with the provided arguments.
+
+ Args:
+ client: An instance of the Exabeam client used to make the search request.
+ args: A dictionary containing search query parameters and options.
+
+ Returns:
+ CommandResults: A CommandResults object containing the search results in both structured and human-readable formats.
+ """
+ start_time = get_date(args.get('start_time', '7 days ago'), "start_time")
+ end_time = get_date(args.get('end_time', 'today'), "end_time")
+ if start_time > end_time:
+ raise DemistoException("Start time must be before end time.")
+
+ kwargs = {
+ 'filter': process_string(args.get('query', '')),
+ 'fields': argToList(args.get('fields', '*')),
+ 'limit': get_limit(args),
+ 'startTime': start_time,
+ 'endTime': end_time,
+ }
+ group_by = args.get('group_by')
+ if group_by:
+ group_list = argToList(group_by)
+ kwargs.update({'groupBy': group_list, 'fields': group_list})
+
+ response = client.search_request(kwargs)
+
+ if error := response.get("errors", {}):
+ raise DemistoException(error.get("message"))
+
+ data_response = response.get("rows", {})
+
+ human_readable = []
+ for entry in data_response:
+ if group_by:
+ if parsed_entry := _parse_group_by(entry, group_list):
+ human_readable.append(parsed_entry)
+ elif parsed_entry := _parse_entry(entry):
+ human_readable.append(parsed_entry)
+
+ return CommandResults(
+ outputs_prefix="ExabeamPlatform.Event",
+ outputs=data_response,
+ readable_output=tableToMarkdown(name="Logs", t=human_readable),
+ )
+
+
+def test_module(client: Client) -> str: # pragma: no cover
+ """test function
+
+ Args:
+ client: Client
+
+ Returns:
+ 'ok' if successful
+ If we've reached this point, it indicates that the login process was successful.
+
+ """
+ if client.access_token:
+ return 'ok'
+ else:
+ raise DemistoException('Access Token Generation Failure.')
+
+
+''' MAIN FUNCTION '''
+
+
+def main() -> None:
+ params = demisto.params()
+ args = demisto.args()
+ command = demisto.command()
+
+ credentials = params.get('credentials', {})
+ client_id = credentials.get('identifier')
+ client_secret = credentials.get('password')
+ base_url = params.get('url', '')
+ verify_certificate = not params.get('insecure', False)
+ proxy = params.get('proxy', False)
+
+ try:
+ client = Client(
+ base_url.rstrip('/'),
+ verify=verify_certificate,
+ client_id=client_id,
+ client_secret=client_secret,
+ proxy=proxy)
+
+ demisto.debug(f'Command being called is {demisto.command()}')
+
+ if command == 'test-module':
+ return_results(test_module(client))
+ elif command == 'exabeam-platform-event-search':
+ return_results(search_command(client, args))
+ else:
+ raise NotImplementedError(f"Command {command} is not supported")
+
+ except Exception as e:
+ recommend = error_fixes(str(e))
+ demisto.info(str(e))
+ return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}\n{recommend}')
+
+
+''' ENTRY POINT '''
+
+
+if __name__ in ('__main__', '__builtin__', 'builtins'):
+ main()
diff --git a/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/ExabeamSecOpsPlatform.yml b/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/ExabeamSecOpsPlatform.yml
new file mode 100644
index 000000000000..380da69c019a
--- /dev/null
+++ b/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/ExabeamSecOpsPlatform.yml
@@ -0,0 +1,77 @@
+category: Analytics & SIEM
+commonfields:
+ id: ExabeamSecOpsPlatform
+ version: -1
+configuration:
+- defaultvalue: https://example.com/
+ display: Server URL
+ name: url
+ required: true
+ type: 0
+- display: Client ID
+ name: credentials
+ required: true
+ section: Connect
+ type: 9
+ displaypassword: Client Secret
+- display: Trust any certificate (not secure)
+ name: insecure
+ type: 8
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ type: 8
+ required: false
+description: Exabeam Security Operations Platform offers a centralized and scalable platform for log management.
+display: Exabeam Security Operations Platform
+name: ExabeamSecOpsPlatform
+script:
+ commands:
+ - arguments:
+ - defaultValue: "7 days ago"
+ description: The starting date for the search range.
+ name: start_time
+ - defaultValue: "today"
+ description: The ending date for the search range.
+ name: end_time
+ - defaultValue: ""
+ description: Query, using Lucene syntax, filters log data for precise analysis.
+ name: query
+ required: false
+ - description: Comma-separated list of fields to be returned from the search.
+ name: fields
+ required: false
+ isArray: true
+ - description: Comma-separated list of fields by which to group the results.
+ name: group_by
+ isArray: true
+ required: false
+ - description: The maximal number of results to return. Maximum value is 3000.
+ name: limit
+ required: false
+ description: Get events from Exabeam Security Operations Platform.
+ name: exabeam-platform-event-search
+ outputs:
+ - contextPath: ExabeamPlatform.Event.id
+ description: The unique identifier associated with the event.
+ type: String
+ - contextPath: ExabeamPlatform.Event.rawLogIds
+ description: The raw log identifiers associated with the event.
+ type: String
+ - contextPath: ExabeamPlatform.Event.tier
+ description: The tier associated with the event.
+ type: String
+ - contextPath: ExabeamPlatform.Event.parsed
+ description: Whether the event has been parsed.
+ type: String
+ - contextPath: ExabeamPlatform.Event.rawLogs
+ description: The raw logs associated with the event.
+ type: String
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+ dockerimage: demisto/python3:3.10.14.95956
+fromversion: 6.10.0
+tests:
+- ExabeamSecurityOperationsPlatform-test
diff --git a/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/ExabeamSecOpsPlatform_description.md b/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/ExabeamSecOpsPlatform_description.md
new file mode 100644
index 000000000000..4cfc8a371ae3
--- /dev/null
+++ b/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/ExabeamSecOpsPlatform_description.md
@@ -0,0 +1,8 @@
+### Authentication Method
+ - **OAuth2.0** - Provide the ID and Secret key in the integration configuration.
+
+
+For assistance accessing the Exabeam API, refer to [Exabeam API Base URLs documentation](https://developers.exabeam.com/exabeam/docs/exabeam-api-base-urls)
+
+Find API key instructions here: [API Keys Documentation](https://developers.exabeam.com/exabeam/docs/api-keys)
+
diff --git a/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/ExabeamSecOpsPlatform_image.png b/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/ExabeamSecOpsPlatform_image.png
new file mode 100644
index 000000000000..1be1cbaa0121
Binary files /dev/null and b/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/ExabeamSecOpsPlatform_image.png differ
diff --git a/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/ExabeamSecOpsPlatform_test.py b/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/ExabeamSecOpsPlatform_test.py
new file mode 100644
index 000000000000..11b930111028
--- /dev/null
+++ b/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/ExabeamSecOpsPlatform_test.py
@@ -0,0 +1,273 @@
+import json
+import pytest
+from CommonServerPython import DemistoException, CommandResults
+from ExabeamSecOpsPlatform import Client, search_command, get_limit, get_date, transform_string, process_string, _parse_group_by
+
+
+class MockClient(Client):
+ def __init__(self, base_url: str, username: str, password: str, verify: bool, proxy: bool):
+ pass
+
+ def search_command(self) -> None:
+ return
+
+
+def test_search_command_success(mocker):
+ """
+ GIVEN:
+ A mocked Exabeam client and valid search query arguments.
+
+ WHEN:
+ 'search_command' function is called with the provided arguments.
+
+ THEN:
+ It should search for logs using the Exabeam client and return a CommandResults object containing
+ the search results in both structured and human-readable formats.
+ """
+ # Mock the response from the client's search_request method
+ mock_response = {
+ "rows": [
+ {
+ "id": "123",
+ "rawLogIds": "1",
+ "tier": "Tier",
+ "parsed": "false",
+ "rawLogs": "fictive",
+ "time": "2024-01-30T11:20:07.000000+00:00",
+ "message": "Log message 1",
+ "activity": "trigger",
+ "platform": "blackberry protect",
+ "vendor": "BlackBerry"
+ },
+ {
+ "id": "456",
+ "time": "2024-01-30T11:21:06.976000+00:00",
+ "message": "Log message 2",
+ "activity": "trigger",
+ "platform": "blackberry protect",
+ "vendor": "BlackBerry"
+ }
+ ]
+ }
+
+ client = MockClient("", "", "", False, False)
+
+ mocker.patch.object(client, "search_request", return_value=mock_response)
+
+ # Define test arguments
+ args = {
+ 'query': '',
+ 'fields': 'message',
+ 'limit': '50',
+ 'start_time': '2024-05-01T00:00:00',
+ 'end_time': '2024-05-08T00:00:00'
+ }
+
+ # Call the search_command function
+ response = search_command(client, args)
+
+ assert isinstance(response, CommandResults)
+ assert response.outputs_prefix == "ExabeamPlatform.Event"
+ assert response.outputs == mock_response["rows"]
+ expected_readable_output = (
+ "### Logs\n"
+ "|Id|Is Parsed|Raw Log Ids|Raw Logs|Tier|Time|\n"
+ "|---|---|---|---|---|---|\n"
+ "| 123 | false | 1 | fictive | Tier | 2024-01-30T11:20:07.000000+00:00 |\n"
+ "| 456 | | | | | 2024-01-30T11:21:06.976000+00:00 |\n"
+ )
+ assert expected_readable_output in response.readable_output
+
+
+def test_search_command_failure(mocker):
+ """
+ GIVEN:
+ A mocked Exabeam client and invalid search query arguments.
+
+ WHEN:
+ 'search_command' function is called with invalid arguments.
+
+ THEN:
+ It should raise a DemistoException.
+ """
+ # Mocking the client to simulate a response with errors
+ client = MockClient("", "", "", False, False)
+ mocker.patch.object(client, "search_request", return_value={"errors": {"message": "Error occurred"}})
+
+ args = {
+ 'query': '',
+ 'fields': 'message',
+ 'limit': '50',
+ 'start_time': '2024-05-01T00:00:00',
+ 'end_time': '2024-05-08T00:00:00'
+ }
+
+ with pytest.raises(DemistoException, match="Error occurred"):
+ search_command(client, args)
+
+
+def test_get_date(mocker):
+ """
+ GIVEN:
+ a mocked CommonServerPython.arg_to_datetime function returning a specific time string,
+
+ WHEN:
+ 'get_date' function is called with the provided time string,
+
+ THEN:
+ it should return the date part of the provided time string in the 'YYYY-MM-DD' format.
+ """
+ time = '2024.05.01T14:00:00'
+ expected_result = '2024-05-01T14:00:00Z'
+
+ with mocker.patch("CommonServerPython.arg_to_datetime", return_value=time):
+ result = get_date(time, "start_time")
+
+ assert result == expected_result
+
+
+@pytest.mark.parametrize('input_str, expected_output', [
+ (
+ "key:Some Value",
+ 'key:"Some Value"'
+ ),
+ (
+ "key:TrUe",
+ "key:true"
+ ),
+ (
+ "key:false",
+ "key:false"
+ )
+])
+def test_transform_string(input_str, expected_output):
+ """
+ GIVEN:
+ An input string to be transformed.
+ WHEN:
+ The 'transform_string' function is called with the input string.
+ THEN:
+ It should transform the input string according to the specified rules.
+ """
+ assert transform_string(input_str) == expected_output
+
+
+@pytest.mark.parametrize('input_str, expected_output', [
+ (
+ "key1:true AND key2:false OR key3:true TO key4:false",
+ 'key1:true AND key2:false OR key3:true TO key4:false'
+ ),
+ (
+ "key1:true",
+ 'key1:true'
+ ),
+ (
+ "",
+ ''
+ ),
+ (
+ "key1:true AND key2:some value OR key3:another value",
+ 'key1:true AND key2:"some value" OR key3:"another value"'
+ )
+])
+def test_process_string(input_str, expected_output):
+ """
+ GIVEN:
+ An input string to be processed.
+ WHEN:
+ The 'process_string' function is called with the input string.
+ THEN:
+ It should correctly process the input string, splitting it based on logical operators and transforming each part using
+ the 'transform_string' function.
+ """
+ assert process_string(input_str) == expected_output
+
+
+def test_search_request(mocker):
+ """
+ GIVEN:
+ A dictionary containing data to be sent in the request.
+ A mocked '_http_request' method of the YourClass class.
+ A base URL and an access token.
+ WHEN:
+ The 'search_request' method of the YourClass class is called with the data dictionary.
+ THEN:
+ It should send a POST request to the specified URL with the provided data and headers.
+ """
+ mocker.patch('ExabeamSecOpsPlatform.Client._login')
+ mock_http_request = mocker.patch('ExabeamSecOpsPlatform.Client._http_request')
+ base_url = "https://example-api.com"
+ client_id = "your_client_id"
+ client_secret = "your_client_secret"
+
+ instance = Client(base_url=base_url, client_id=client_id, client_secret=client_secret,
+ verify=False, proxy=False)
+ instance.access_token = "dummy_token"
+ data_dict = {"key": "value"}
+ expected_url = "https://example-api.com/search/v2/events"
+ expected_headers = {
+ "Authorization": "Bearer dummy_token",
+ "Content-Type": "application/json"
+ }
+ mocked_response = {"response_key": "response_value"}
+ mock_http_request.return_value = mocked_response
+ result = instance.search_request(data_dict)
+
+ mock_http_request.assert_called_once_with(
+ "POST",
+ full_url=expected_url,
+ data=json.dumps(data_dict),
+ headers=expected_headers
+ )
+ assert result == mocked_response
+
+
+@pytest.mark.parametrize('args, expected_output', [
+ ({}, 50),
+ ({'limit': None}, 50),
+ ({'limit': 1000}, 1000),
+ ({'limit': 5000}, 3000)
+])
+def test_get_limit(args, expected_output):
+ """
+ GIVEN:
+ a dictionary containing the 'limit' argument with various values.
+
+ WHEN:
+ 'get_limit' function is called with the provided dictionary.
+
+ THEN:
+ it should return the limit value if specified and less than or equal to 3000;
+ otherwise, it should return 3000 as the maximum limit.
+ If the 'limit' argument is not present in the dictionary or is None, it should return 50 as the default limit.
+ """
+ assert get_limit(args) == expected_output
+
+
+def test_parse_group_by():
+ """
+ GIVEN:
+ an entry dictionary containing information about an item with various attributes.
+
+ WHEN:
+ '_parse_group_by' function is called with the provided entry dictionary and a list of titles.
+
+ THEN:
+ it should return a parsed dictionary with non-empty elements based on the provided titles;
+ empty elements should be removed.
+ """
+ entry = {
+ 'Id': '123',
+ 'Vendor': 'Vendor X',
+ 'Product': '',
+ 'Created_at': '2024-05-26T12:00:00',
+ 'Message': 'This is a message.'
+ }
+ titles = ['Id', 'Vendor', 'Created_at', 'Message']
+ expected_result = {
+ 'Id': '123',
+ 'Vendor': 'Vendor X',
+ 'Created_at': '2024-05-26T12:00:00',
+ 'Message': 'This is a message.'
+ }
+ assert _parse_group_by(entry, titles) == expected_result
diff --git a/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/README.md b/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/README.md
new file mode 100644
index 000000000000..3dbd391158dd
--- /dev/null
+++ b/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/README.md
@@ -0,0 +1,108 @@
+Exabeam Security Operations Platform offers a centralized and scalable platform for log management.
+This integration was integrated and tested with version v1.0 of ExabeamSecOpsPlatform.
+
+## Configure Exabeam Security Operations Platform on Cortex XSOAR
+
+1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+2. Search for Exabeam Security Operations Platform.
+3. Click **Add instance** to create and configure a new integration instance.
+
+ | **Parameter** | **Required** |
+ | --- | --- |
+ | Server URL | True |
+ | Client ID | True |
+ | Client Secret | True |
+ | Trust any certificate (not secure) | False |
+ | Use system proxy settings | False |
+
+4. Click **Test** to validate the URLs, token, and connection.
+
+## Commands
+
+You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
+After you successfully execute a command, a DBot message appears in the War Room with the command details.
+
+### exabeam-platform-event-search
+
+***
+Get events from Exabeam Security Operations Platform.
+
+#### Base Command
+
+`exabeam-platform-event-search`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| start_time | The starting date for the search range. | Required |
+| end_time | The ending date for the search range. | Required |
+| query | Query, using Lucene syntax, filters log data for precise analysis. | Optional |
+| fields | Comma-separated list of fields to be returned from the search. | Optional |
+| group_by | Comma-separated list of fields by which to group the results. | Optional |
+| limit | The maximal number of results to return. Maximum value is 3000. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| ExabeamPlatform.Event.id | String | The unique identifier associated with the event. |
+| ExabeamPlatform.Event.rawLogIds | String | The raw log identifiers associated with the event. |
+| ExabeamPlatform.Event.tier | String | The tier associated with the event. |
+| ExabeamPlatform.Event.parsed | String | Whether the event has been parsed. |
+| ExabeamPlatform.Event.rawLogs | String | The raw logs associated with the event. |
+
+#### Command example
+```!exabeam-platform-event-search end_time="today" start_time="7 days ago" limit=2```
+#### Context Example
+```json
+{
+ "ExabeamPlatform": {
+ "Event": [
+ {
+ "approxLogTime": 1715694190909000,
+ "collector_timestamp": 1715694190909000,
+ "customFieldsJSON": "{}",
+ "id": "fake",
+ "ingest_time": 1715694222815000,
+ "metadataFieldsJSON": "{\"m_collector_id\":\"aae1627e-8637-4597-9f43-e49a703a6151\",\"m_collector_name\":\"exa-cribl-logs-sm_exa_ws\",\"m_collector_type\":\"cribl-logs\"}",
+ "parsed": false,
+ "rawLogIds": [
+ "log-fic"
+ ],
+ "rawLogs": [
+ "ANY rawLog"
+ ],
+ "raw_log_size": 9,
+ "tier": "Tier 4"
+ },
+ {
+ "approxLogTime": 1715694915916000,
+ "collector_timestamp": 1715694915916000,
+ "customFieldsJSON": "{}",
+ "id": "fictive-id",
+ "ingest_time": 1715694946775000,
+ "metadataFieldsJSON": "{\"m_collector_id\":\"aae1627e-8637-4597-9f43-e49a703a6151\",\"m_collector_name\":\"exa-cribl-logs-sm_exa_ws\",\"m_collector_type\":\"cribl-logs\"}",
+ "parsed": false,
+ "rawLogIds": [
+ "rawLogId"
+ ],
+ "rawLogs": [
+ "CONNECT hotmail"
+ ],
+ "raw_log_size": 59,
+ "tier": "Tier 4"
+ }
+ ]
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Logs
+>|Id|Is Parsed|Raw Log Ids|Raw Logs|Tier|
+>|---|---|---|---|---|
+>| fake | false | log-fic | ANY rawLog | Tier 4 |
+>| fictive-id | false | rawLogId | CONNECT hotmail | Tier 4 |
+
diff --git a/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/command_examples b/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/command_examples
new file mode 100644
index 000000000000..238f3552e076
--- /dev/null
+++ b/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/command_examples
@@ -0,0 +1 @@
+!exabeam-platform-event-search end_time="today" start_time="7 days ago" limit=2
\ No newline at end of file
diff --git a/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/test_data/baseintegration-dummy.json b/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/test_data/baseintegration-dummy.json
new file mode 100644
index 000000000000..37fa47b18cd0
--- /dev/null
+++ b/Packs/ExabeamSecurityOperationsPlatform/Integrations/ExabeamSecOpsPlatform/test_data/baseintegration-dummy.json
@@ -0,0 +1,3 @@
+{
+ "dummy": "this is a dummy response"
+}
\ No newline at end of file
diff --git a/Packs/ExabeamSecurityOperationsPlatform/README.md b/Packs/ExabeamSecurityOperationsPlatform/README.md
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Packs/ExabeamSecurityOperationsPlatform/TestPlaybooks/ExabeamSecurityOperationsPlatform-test.yml b/Packs/ExabeamSecurityOperationsPlatform/TestPlaybooks/ExabeamSecurityOperationsPlatform-test.yml
new file mode 100644
index 000000000000..717ff9f35757
--- /dev/null
+++ b/Packs/ExabeamSecurityOperationsPlatform/TestPlaybooks/ExabeamSecurityOperationsPlatform-test.yml
@@ -0,0 +1,85 @@
+id: ExabeamSecurityOperationsPlatform-test
+version: -1
+vcShouldKeepItemLegacyProdMachine: false
+name: ExabeamSecurityOperationsPlatform-test
+starttaskid: "0"
+tasks:
+ "0":
+ id: "0"
+ taskid: 9d0b4cdf-ef28-4058-8f4b-94000ee7db23
+ type: start
+ task:
+ id: 9d0b4cdf-ef28-4058-8f4b-94000ee7db23
+ version: -1
+ name: ""
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "1"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 50
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "1":
+ id: "1"
+ taskid: c378fe50-d7c3-4110-8633-a209d575f171
+ type: regular
+ task:
+ id: c378fe50-d7c3-4110-8633-a209d575f171
+ version: -1
+ name: exabeam-platform-event-search
+ description: Get events from Exabeam Security Operations Platform.
+ script: ExabeamSecOpsPlatform|||exabeam-platform-event-search
+ type: regular
+ iscommand: true
+ brand: ExabeamSecOpsPlatform
+ scriptarguments:
+ end_time:
+ simple: 05.20.2024
+ start_time:
+ simple: 05.01.2024
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 260
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+view: |-
+ {
+ "linkLabelsPosition": {},
+ "paper": {
+ "dimensions": {
+ "height": 305,
+ "width": 380,
+ "x": 450,
+ "y": 50
+ }
+ }
+ }
+inputs: []
+outputs: []
+quiet: true
+fromversion: 6.10.0
\ No newline at end of file
diff --git a/Packs/ExabeamSecurityOperationsPlatform/pack_metadata.json b/Packs/ExabeamSecurityOperationsPlatform/pack_metadata.json
new file mode 100644
index 000000000000..dbb4bdec384c
--- /dev/null
+++ b/Packs/ExabeamSecurityOperationsPlatform/pack_metadata.json
@@ -0,0 +1,19 @@
+{
+ "name": "Exabeam Security Operations Platform",
+ "description": " Exabeam Security Operations Platform ",
+ "support": "xsoar",
+ "currentVersion": "1.0.0",
+ "author": "Cortex XSOAR",
+ "url": "https://www.paloaltonetworks.com/cortex",
+ "email": "",
+ "categories": [
+ "Analytics & SIEM"
+ ],
+ "tags": [],
+ "useCases": [],
+ "keywords": [],
+ "marketplaces": [
+ "xsoar",
+ "marketplacev2"
+ ]
+}
\ No newline at end of file
diff --git a/Packs/ExtraHop/ReleaseNotes/2_2_6.md b/Packs/ExtraHop/ReleaseNotes/2_2_6.md
new file mode 100644
index 000000000000..cfd2be0ed2c1
--- /dev/null
+++ b/Packs/ExtraHop/ReleaseNotes/2_2_6.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### ExtraHopTrackIncidents
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/ExtraHop/Scripts/ExtraHopTrackIncidents/ExtraHopTrackIncidents.yml b/Packs/ExtraHop/Scripts/ExtraHopTrackIncidents/ExtraHopTrackIncidents.yml
index ad2653466b66..df94c0b95e7f 100644
--- a/Packs/ExtraHop/Scripts/ExtraHopTrackIncidents/ExtraHopTrackIncidents.yml
+++ b/Packs/ExtraHop/Scripts/ExtraHopTrackIncidents/ExtraHopTrackIncidents.yml
@@ -15,7 +15,7 @@ scripttarget: 0
dependson:
must:
- 'ExtraHop v2|||extrahop-ticket-track'
-dockerimage: demisto/python3:3.10.11.61265
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
tests:
- ExtraHop_v2-Test
diff --git a/Packs/ExtraHop/pack_metadata.json b/Packs/ExtraHop/pack_metadata.json
index ee469fb66c67..944e911d62e6 100644
--- a/Packs/ExtraHop/pack_metadata.json
+++ b/Packs/ExtraHop/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "ExtraHop Reveal(x)",
"description": "Network detection and response. Complete visibility of network communications at enterprise scale, real-time threat detections backed by machine learning, and guided investigation workflows that simplify response.",
"support": "partner",
- "currentVersion": "2.2.5",
+ "currentVersion": "2.2.6",
"author": "ExtraHop",
"url": "https://customer.extrahop.com/s/",
"email": "",
diff --git a/Packs/F5BigIPAWAF/.pack-ignore b/Packs/F5BigIPAWAF/.pack-ignore
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Packs/F5BigIPAWAF/.secrets-ignore b/Packs/F5BigIPAWAF/.secrets-ignore
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Packs/F5BigIPAWAF/ModelingRules/F5BigIPAWAF/F5BigIPAWAF.xif b/Packs/F5BigIPAWAF/ModelingRules/F5BigIPAWAF/F5BigIPAWAF.xif
new file mode 100644
index 000000000000..8776da34048b
--- /dev/null
+++ b/Packs/F5BigIPAWAF/ModelingRules/F5BigIPAWAF/F5BigIPAWAF.xif
@@ -0,0 +1,216 @@
+[RULE: f5_waf_generic_fields]
+alter
+ get_log_level = arrayindex(regextract(_raw_log, "\<\d+\>[^\>]+\s+\S+\s+(\S+)\s+\S+\[\d+\]\:"), 0),
+ get_description = arrayindex(regextract(_raw_log, "\<\d+\>[^\>]+\s+\S+\s+\S+\s+\S+\[\d+\]\:\s+(.*)"), 0)
+| alter
+ get_event_id = arrayindex(regextract(get_description, "^\s*(\d+):\d+:"), 0)
+| alter
+ xdm.source.host.hostname = arrayindex(regextract(_raw_log, "\<\d+\>[^\>]+\s+(\S+)\s+\S+\s+\S+\[\d+\]\:"), 0),
+ xdm.event.log_level = if(get_log_level = "notice", XDM_CONST.LOG_LEVEL_NOTICE, get_log_level = "warning", XDM_CONST.LOG_LEVEL_WARNING, get_log_level = "err", XDM_CONST.LOG_LEVEL_ERROR, get_log_level = "info", XDM_CONST.LOG_LEVEL_INFORMATIONAL, get_log_level = "emerg", XDM_CONST.LOG_LEVEL_EMERGENCY, get_log_level = "emerg", XDM_CONST.LOG_LEVEL_EMERGENCY, get_log_level = "debug", XDM_CONST.LOG_LEVEL_DEBUG, get_log_level ~= "ale", XDM_CONST.LOG_LEVEL_ALERT, get_log_level ~= "crit", XDM_CONST.LOG_LEVEL_CRITICAL),
+ xdm.source.process.name = arrayindex(regextract(_raw_log, "\<\d+\>[^\>]+\s+\S+\s+\S+\s+(\S+)\[\d+\]\:"), 0),
+ xdm.source.process.pid = to_integer(arrayindex(regextract(_raw_log, "\<\d+\>[^\>]+\s+\S+\s+\S+\s+\S+\[(\d+)\]\:"), 0)),
+ xdm.event.description = get_description,
+ xdm.event.id = get_event_id;
+
+
+
+[MODEL: dataset="f5_waf_raw"]
+call f5_waf_generic_fields
+// Event ID 01420002
+| filter _raw_log ~= "01420002"
+| alter
+ get_01420002_user = arrayindex(regextract(_raw_log, "user=([^\=]+)\s+\S+="), 0),
+ get_01420002_folder = arrayindex(regextract(_raw_log, "folder=([^\=]+)\s+\S+="), 0),
+ get_01420002_module = arrayindex(regextract(_raw_log, "module=\(([^\)]+)\)\#\s+\S+="), 0),
+ get_01420002_status = arrayindex(regextract(_raw_log, "status=\s*\[([^\]]+)\]"), 0),
+ get_01420002_cmd_data = arrayindex(regextract(_raw_log, "cmd_data=\s*([^\;]+)"), 0)
+| alter
+ xdm.source.user.username = get_01420002_user,
+ xdm.source.process.executable.directory = get_01420002_folder,
+ xdm.observer.type = get_01420002_module,
+ xdm.source.process.command_line = get_01420002_cmd_data,
+ xdm.event.outcome = if(get_01420002_status = "Command OK", XDM_CONST.OUTCOME_SUCCESS, get_01420002_status = null, null, to_string(get_01420002_status));
+
+
+call f5_waf_generic_fields
+// Event ID 01260009
+| filter _raw_log ~= "01260009"
+| alter
+ get_01260009_source_ip = arrayindex(regextract(_raw_log, "01260009\:\d+\:\s+(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}):\d+\s+\-\>\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d+"), 0),
+ get_01260009_source_port = to_integer(arrayindex(regextract(_raw_log, "01260009\:\d+\:\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:(\d+)\s+\-\>\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d+"), 0)),
+ get_01260009_target_ip = arrayindex(regextract(_raw_log, "01260009\:\d+\:\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d+\s+\-\>\s+(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}):\d+"), 0),
+ get_01260009_target_port = to_integer(arrayindex(regextract(_raw_log, "01260009\:\d+\:\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d+\s+\-\>\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:(\d+)"), 0)),
+ get_01260009_alert_des = arrayindex(regextract(_raw_log, "01260009\:\d+\:.*Connection\s+error:\s+(.*)"), 0)
+| alter
+ xdm.source.ipv4 = get_01260009_source_ip,
+ xdm.source.port = get_01260009_source_port,
+ xdm.target.ipv4 = get_01260009_target_ip,
+ xdm.target.port = get_01260009_target_port,
+ xdm.alert.description = get_01260009_alert_des;
+
+call f5_waf_generic_fields
+// Event ID 0107142f
+| filter _raw_log ~= "0107142f"
+| alter
+ get_0107142f_source_ip = arrayindex(regextract(_raw_log, "0107142f\:\d+\:.*\s+(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})"), 0)
+| alter
+ xdm.source.ipv4 = get_0107142f_source_ip;
+
+call f5_waf_generic_fields
+// Event ID 0107143c
+| filter _raw_log ~= "0107143c"
+| alter
+ get_0107143c_source_ip = arrayindex(regextract(_raw_log, "0107143c:\d+\:.*\s+(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})"), 0)
+| alter
+ xdm.source.ipv4 = get_0107143c_source_ip;
+
+call f5_waf_generic_fields
+// Event ID 01230140
+| filter _raw_log ~= "01230140"
+| alter
+ get_01230140_source_ip = arrayindex(regextract(_raw_log, "01230140\:\d+\:.*from\s+(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\:\d+\s+to\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:\d+\,\s+.*"), 0),
+ get_01230140_source_port = to_integer(arrayindex(regextract(_raw_log, "01230140\:\d+\:.*from\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:(\d+)\s+to\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:\d+\,\s+.*"), 0)),
+ get_01230140_target_ip = arrayindex(regextract(_raw_log, "01230140\:\d+\:.*from\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:\d+\s+to\s+(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\:\d+\,\s+.*"), 0),
+ get_01230140_target_port = to_integer(arrayindex(regextract(_raw_log, "01230140\:\d+\:.*from\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:\d+\s+to\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:(\d+)\,\s+.*"), 0)),
+ get_01230140_alert_des = arrayindex(regextract(_raw_log, "01230140\:\d+\:.*from\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:\d+\s+to\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:\d+\,\s+(.*)"), 0)
+| alter
+ xdm.source.ipv4 = get_01230140_source_ip,
+ xdm.source.port = get_01230140_source_port,
+ xdm.target.ipv4 = get_01230140_target_ip,
+ xdm.target.port = get_01230140_target_port,
+ xdm.alert.description = get_01230140_alert_des;
+
+call f5_waf_generic_fields
+// Event ID 01260013
+| filter _raw_log ~= "01260013"
+| alter
+ get_01260013_source_ip = arrayindex(regextract(_raw_log, "01260013\:\d+\:.*\s+(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\:\d+\s+\-\>\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:\d+"), 0),
+ get_01260013_source_port = to_integer(arrayindex(regextract(_raw_log, "01260013\:\d+\:.*\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:(\d+)\s+\-\>\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:\d+"), 0)),
+ get_01260013_target_ip = arrayindex(regextract(_raw_log, "01260013\:\d+\:.*\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:\d+\s+\-\>\s+(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\:\d+"), 0),
+ get_01260013_target_port = to_integer(arrayindex(regextract(_raw_log, "01260013\:\d+\:.*\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:\d+\s+\-\>\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:(\d+)"), 0))
+| alter
+ xdm.source.ipv4 = get_01260013_source_ip,
+ xdm.source.port = get_01260013_source_port,
+ xdm.target.ipv4 = get_01260013_target_ip,
+ xdm.target.port = get_01260013_target_port;
+
+call f5_waf_generic_fields
+// Event ID 01071681, 01071682
+| filter _raw_log ~= "01071681|01071682"
+| alter
+ get_01071681_01071682_virtualHost = arrayindex(regextract(_raw_log, "SNMP_TRAP\:\s+Virtual\s+(\S+)\s+has\s+become\s+\S+"), 0)
+| alter
+ xdm.source.agent.identifier = get_01071681_01071682_virtualHost;
+
+call f5_waf_generic_fields
+// Event ID 014f0005
+| filter _raw_log ~= "014f0005"
+| alter
+ get_014f0005_user = arrayindex(regextract(_raw_log, "014f0005\:\d+\:.*\s+user=(\S+)"), 0),
+ get_014f0005_action = arrayindex(regextract(_raw_log, "014f0005\:\d+\:.*\suser=\S+\s+action=\"+([^\"]+)"), 0),
+ get_014f0005_status = arrayindex(regextract(_raw_log, "014f0005\:\d+\:.*\suser=\S+\s+action=\"+[^\"]+\"+\s+status=\"+([^\"]+)"), 0)
+| alter
+ xdm.source.user.username = get_014f0005_user,
+ xdm.event.operation_sub_type = get_014f0005_action,
+ xdm.event.outcome_reason = get_014f0005_status;
+
+call f5_waf_generic_fields
+// Event ID 01071432
+| filter _raw_log ~= "01071432"
+| alter
+ get_01071432_target_ip = arrayindex(regextract(_raw_log, "01071432\:\d+\:.*\s+(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\s+port\s+\d+"), 0),
+ get_01071432_target_port = to_integer(arrayindex(regextract(_raw_log, "01071432\:\d+\:.*\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\s+port\s+(\d+)"), 0))
+| alter
+ xdm.target.ipv4 = get_01071432_target_ip,
+ xdm.target.port = get_01071432_target_port;
+
+call f5_waf_generic_fields
+// Event ID 01260006
+| filter _raw_log ~= "01260006"
+| alter
+ get_01260006_alert_des = arrayindex(regextract(_raw_log, "01260006\:\d+\:\s+Peer\s+cert\s+verify\s+error\:\s+(.*)"), 0),
+ get_01260006_cert_O = arrayindex(regextract(_raw_log, "01260006\:\d+\:\s+Peer\s+cert\s+verify\s+error\:\s+.*\/O\=([^\/]+)"), 0),
+ get_01260006_cert_CN = arrayindex(regextract(_raw_log, "01260006\:\d+\:\s+Peer\s+cert\s+verify\s+error\:\s+.*\/CN\=([^\/]+)\)"), 0)
+| alter
+ xdm.alert.description = get_01260006_alert_des,
+ xdm.network.tls.server_certificate.issuer = get_01260006_cert_O,
+ xdm.network.tls.server_certificate.subject = get_01260006_cert_CN;
+
+call f5_waf_generic_fields
+// ssl_req
+| filter _raw_log ~= "\[ssl_req\]"
+| alter
+ ssl_req_target_ip = arrayindex(regextract(_raw_log, "\[ssl_req\]\[[^\]]+\]\s+(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})"), 0),
+ ssl_req_tls_version = arrayindex(regextract(_raw_log, "\[ssl_req\]\[[^\]]+\]\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\s+(\S+)\s\S+\s"), 0),
+ ssl_req_tls_alg = arrayindex(regextract(_raw_log, "\[ssl_req\]\[[^\]]+\]\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\s+\S+\s(\S+)\s"), 0),
+ ssl_req_path = arrayindex(regextract(_raw_log, "\[ssl_req\]\[[^\]]+\]\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\s+\S+\s\S+\s+\"+([^\"]+)"), 0),
+ ssl_req_port = to_integer(arrayindex(regextract(_raw_log, "\[ssl_req\]\[[^\]]+\]\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\s+\S+\s\S+\s+\"+[^\"]+\"+\s+(\d+)"), 0))
+| alter
+ xdm.target.ipv4 = ssl_req_target_ip,
+ xdm.network.tls.client_certificate.version = ssl_req_tls_version,
+ xdm.network.tls.client_certificate.algorithm = ssl_req_tls_alg,
+ xdm.target.file.path = ssl_req_path,
+ xdm.target.port = ssl_req_port;
+
+call f5_waf_generic_fields
+// ssl_acc
+| filter _raw_log ~= "\[ssl_acc\]"
+| alter
+ ssl_acc_target_ip = arrayindex(regextract(_raw_log, "\[ssl_acc\]\s+(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})"), 0),
+ ssl_acc_user = arrayindex(regextract(_raw_log, "\[ssl_acc\]\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\s+\S+\s+(\S+)"), 0),
+ ssl_acc_path = arrayindex(regextract(_raw_log, "\[ssl_acc\]\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\s+\S+\s+\S+\s+[^\]]+\]\s+\"+([^\"]+)"), 0),
+ ssl_acc_port = to_integer(arrayindex(regextract(_raw_log, "\[ssl_acc\]\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\s+\S+\s+\S+\s+[^\]]+\]\s+\"+[^\"]+\"+\s+\d+\s+(\d+)"), 0)),
+ ssl_acc_response_code = arrayindex(regextract(_raw_log, "\[ssl_acc\]\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\s+\S+\s+\S+\s+[^\]]+\]\s+\"+[^\"]+\"+\s+(\d+)"), 0)
+| alter
+ xdm.target.ipv4 = ssl_acc_target_ip,
+ xdm.target.user.username = ssl_acc_user,
+ xdm.target.file.path = ssl_acc_path,
+ xdm.target.port = ssl_acc_port,
+ xdm.network.http.response_code = if(ssl_acc_response_code = "100", HTTP_RSP_CODE_CONTINUE, ssl_acc_response_code = "101", HTTP_RSP_CODE_SWITCHING_PROTOCOLS, ssl_acc_response_code = "102", HTTP_RSP_CODE_PROCESSING, ssl_acc_response_code = "103", HTTP_RSP_CODE_EARLY_HINTS, ssl_acc_response_code = "200", HTTP_RSP_CODE_OK, ssl_acc_response_code = "201", HTTP_RSP_CODE_CREATED, ssl_acc_response_code = "202", HTTP_RSP_CODE_ACCEPTED, ssl_acc_response_code = "203", HTTP_RSP_CODE_NON__AUTHORITATIVE_INFORMATION, ssl_acc_response_code = "204", HTTP_RSP_CODE_NO_CONTENT, ssl_acc_response_code = "205", HTTP_RSP_CODE_RESET_CONTENT, ssl_acc_response_code = "206", HTTP_RSP_CODE_PARTIAL_CONTENT, ssl_acc_response_code = "207", HTTP_RSP_CODE_MULTI__STATUS, ssl_acc_response_code = "208", HTTP_RSP_CODE_ALREADY_REPORTED, ssl_acc_response_code = "226", HTTP_RSP_CODE_IM_USED, ssl_acc_response_code = "300", HTTP_RSP_CODE_MULTIPLE_CHOICES, ssl_acc_response_code = "301", HTTP_RSP_CODE_MOVED_PERMANENTLY, ssl_acc_response_code = "302", HTTP_RSP_CODE_FOUND, ssl_acc_response_code = "303", HTTP_RSP_CODE_SEE_OTHER, ssl_acc_response_code = "304", HTTP_RSP_CODE_NOT_MODIFIED, ssl_acc_response_code = "305", HTTP_RSP_CODE_USE_PROXY, ssl_acc_response_code = "307", HTTP_RSP_CODE_TEMPORARY_REDIRECT, ssl_acc_response_code = "308", HTTP_RSP_CODE_PERMANENT_REDIRECT, ssl_acc_response_code = "400", HTTP_RSP_CODE_BAD_REQUEST, ssl_acc_response_code = "401", HTTP_RSP_CODE_UNAUTHORIZED, ssl_acc_response_code = "402", HTTP_RSP_CODE_PAYMENT_REQUIRED, ssl_acc_response_code = "403", HTTP_RSP_CODE_FORBIDDEN, ssl_acc_response_code = "404", HTTP_RSP_CODE_NOT_FOUND, ssl_acc_response_code = "405", HTTP_RSP_CODE_METHOD_NOT_ALLOWED, ssl_acc_response_code = "406", HTTP_RSP_CODE_NOT_ACCEPTABLE, ssl_acc_response_code = "407", HTTP_RSP_CODE_PROXY_AUTHENTICATION_REQUIRED, ssl_acc_response_code = "408", HTTP_RSP_CODE_REQUEST_TIMEOUT, ssl_acc_response_code = "409", HTTP_RSP_CODE_CONFLICT, ssl_acc_response_code = "410", HTTP_RSP_CODE_GONE, ssl_acc_response_code = "411", HTTP_RSP_CODE_LENGTH_REQUIRED, ssl_acc_response_code = "412", HTTP_RSP_CODE_PRECONDITION_FAILED, ssl_acc_response_code = "413", HTTP_RSP_CODE_CONTENT_TOO_LARGE, ssl_acc_response_code = "414", HTTP_RSP_CODE_URI_TOO_LONG, ssl_acc_response_code = "415", HTTP_RSP_CODE_UNSUPPORTED_MEDIA_TYPE, ssl_acc_response_code = "416", HTTP_RSP_CODE_RANGE_NOT_SATISFIABLE, ssl_acc_response_code = "417", HTTP_RSP_CODE_EXPECTATION_FAILED, ssl_acc_response_code = "421", HTTP_RSP_CODE_MISDIRECTED_REQUEST, ssl_acc_response_code = "422", HTTP_RSP_CODE_UNPROCESSABLE_CONTENT, ssl_acc_response_code = "423", HTTP_RSP_CODE_LOCKED, ssl_acc_response_code = "424", HTTP_RSP_CODE_FAILED_DEPENDENCY, ssl_acc_response_code = "425", HTTP_RSP_CODE_TOO_EARLY, ssl_acc_response_code = "426", HTTP_RSP_CODE_UPGRADE_REQUIRED, ssl_acc_response_code = "428", HTTP_RSP_CODE_PRECONDITION_REQUIRED, ssl_acc_response_code = "429", HTTP_RSP_CODE_TOO_MANY_REQUESTS, ssl_acc_response_code = "431", HTTP_RSP_CODE_REQUEST_HEADER_FIELDS_TOO_LARGE, ssl_acc_response_code = "451", HTTP_RSP_CODE_UNAVAILABLE_FOR_LEGAL_REASONS, ssl_acc_response_code = "500", HTTP_RSP_CODE_INTERNAL_SERVER_ERROR, ssl_acc_response_code = "501", HTTP_RSP_CODE_NOT_IMPLEMENTED, ssl_acc_response_code = "502", HTTP_RSP_CODE_BAD_GATEWAY, ssl_acc_response_code = "503", HTTP_RSP_CODE_SERVICE_UNAVAILABLE, ssl_acc_response_code = "504", HTTP_RSP_CODE_GATEWAY_TIMEOUT, ssl_acc_response_code = "505", HTTP_RSP_CODE_HTTP_VERSION_NOT_SUPPORTED, ssl_acc_response_code = "506", HTTP_RSP_CODE_VARIANT_ALSO_NEGOTIATES, ssl_acc_response_code = "507", HTTP_RSP_CODE_INSUFFICIENT_STORAGE, ssl_acc_response_code = "508", HTTP_RSP_CODE_LOOP_DETECTED, ssl_acc_response_code = "511", HTTP_RSP_CODE_NETWORK_AUTHENTICATION_REQUIRED, ssl_acc_response_code = null, null, to_string(ssl_acc_response_code));
+
+call f5_waf_generic_fields
+// Rule
+| filter _raw_log ~= "Rule"
+| alter
+ // General Rule info
+ rule_name = arrayindex(regextract(_raw_log, "Rule\s+(\S+)\s+\<\S+\>:"), 0),
+ rule_operation = arrayindex(regextract(_raw_log, "Rule\s+\S+\s+\<(\S+)\>:"), 0),
+ // CLIENTSSL_HANDSHAKE
+ rule_clientSSLhandshake_source_ip = arrayindex(regextract(_raw_log, "Rule\s+\S+\s+\<\S+\>\:\s+(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\:\S+\:\S+"), 0),
+ rule_clientSSLhandshake_tls_alg = arrayindex(regextract(_raw_log, "Rule\s+\S+\s+\<\S+\>\:\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:(\S+)\:\S+"), 0),
+ rule_clientSSLhandshake_tls_version = arrayindex(regextract(_raw_log, "Rule\s+\S+\s+\<\S+\>\:\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:\S+\:(\S+)"), 0),
+ // HTTP_REQUEST
+ rule_httpRequest_target_host = arrayindex(regextract(_raw_log, "Rule\s+\S+\s+\<\S+\>\:.*Requested\s+Host\s+=\s+(\S+)"), 0),
+ rule_httpRequest_uri = arrayindex(regextract(_raw_log, "Rule\s+\S+\s+\<\S+\>\:.*Request\s+URI\s+\=\s+(\S+)"), 0)
+| alter
+ rule_httpRequest_target_host_ip = if(rule_httpRequest_target_host ~= "\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:\d+", arrayindex(regextract(rule_httpRequest_target_host, "(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\:\d+"), 0), null),
+ rule_httpRequest_target_host_port = if(rule_httpRequest_target_host ~= "\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:\d+", to_integer(arrayindex(regextract(rule_httpRequest_target_host, "\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:(\d+)"), 0)), null),
+ rule_httpRequest_target_host_name = if(rule_httpRequest_target_host !~= "\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:\d+", rule_httpRequest_target_host, null)
+| alter
+ xdm.network.rule = rule_name,
+ xdm.event.operation_sub_type = rule_operation,
+ xdm.source.ipv4 = rule_clientSSLhandshake_source_ip,
+ xdm.network.tls.client_certificate.algorithm = rule_clientSSLhandshake_tls_alg,
+ xdm.network.tls.client_certificate.version = rule_clientSSLhandshake_tls_version,
+ xdm.target.ipv4 = rule_httpRequest_target_host_ip,
+ xdm.target.port = rule_httpRequest_target_host_port,
+ xdm.target.url = rule_httpRequest_uri,
+ xdm.target.host.hostname = rule_httpRequest_target_host_name;
+
+
+call f5_waf_generic_fields
+// General OR Not mentioned Event IDs Logs
+| filter _raw_log !~= "Rule|\[ssl_acc\]|\[ssl_req\]|01260006|01071432|014f0005|01071681|01071682|01260013|01230140|0107143c|0107142f|01260009|01420002"
+| alter
+ get_description_session = arrayindex(regextract(_raw_log, "Start\S+\s+Session\s+(\d+)\s+of\s+user\s\S+\."), 0),
+ get_description_user_session = arrayindex(regextract(_raw_log, "Start\S+\s+Session\s+\d+\s+of\s+user\s(\S+)\."), 0),
+ get_description_user_cmd = arrayindex(regextract(_raw_log, "\((\S+)\)\sCMD\s\([^\)]+"), 0),
+ get_description_cmd_command = arrayindex(regextract(_raw_log, "\(\S+\)\sCMD\s\(([^\)]+)"), 0),
+ get_description_error_source_ip = arrayindex(regextract(_raw_log, "Did\s+not\s+receive\s+identification\s+string\s+from\s+(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\s+port\s+\d+"), 0),
+ get_description_error_source_port = to_integer(arrayindex(regextract(_raw_log, "Did\s+not\s+receive\s+identification\s+string\s+from\s+\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\s+port\s+(\d+)"), 0))
+| alter
+ xdm.session_context_id = get_description_session,
+ xdm.source.user.username = coalesce(get_description_user_session, get_description_user_cmd),
+ xdm.source.process.command_line = get_description_cmd_command,
+ xdm.source.ipv4 = get_description_error_source_ip,
+ xdm.source.port = get_description_error_source_port;
\ No newline at end of file
diff --git a/Packs/F5BigIPAWAF/ModelingRules/F5BigIPAWAF/F5BigIPAWAF.yml b/Packs/F5BigIPAWAF/ModelingRules/F5BigIPAWAF/F5BigIPAWAF.yml
new file mode 100644
index 000000000000..05776ad2bc1b
--- /dev/null
+++ b/Packs/F5BigIPAWAF/ModelingRules/F5BigIPAWAF/F5BigIPAWAF.yml
@@ -0,0 +1,6 @@
+fromversion: 8.4.0
+id: F5_BIG-IP_Advanced_WAF_ModelingRule
+name: F5 BIG-IP Advanced WAF Modeling Rule
+rules: ''
+schema: ''
+tags: ''
\ No newline at end of file
diff --git a/Packs/cisco-meraki/ModelingRules/CiscoMerakiModelingRules/CiscoMerakiModelingRules_schema.json b/Packs/F5BigIPAWAF/ModelingRules/F5BigIPAWAF/F5BigIPAWAF_schema.json
similarity index 71%
rename from Packs/cisco-meraki/ModelingRules/CiscoMerakiModelingRules/CiscoMerakiModelingRules_schema.json
rename to Packs/F5BigIPAWAF/ModelingRules/F5BigIPAWAF/F5BigIPAWAF_schema.json
index 4707da9cf953..74b8eda6059b 100644
--- a/Packs/cisco-meraki/ModelingRules/CiscoMerakiModelingRules/CiscoMerakiModelingRules_schema.json
+++ b/Packs/F5BigIPAWAF/ModelingRules/F5BigIPAWAF/F5BigIPAWAF_schema.json
@@ -1,8 +1,8 @@
{
- "Cisco_Meraki_raw": {
+ "f5_waf_raw": {
"_raw_log": {
"type": "string",
"is_array": false
}
- }
-}
+ }
+ }
\ No newline at end of file
diff --git a/Packs/F5BigIPAWAF/ParsingRules/F5BigIPAWAF/F5BigIPAWAF.xif b/Packs/F5BigIPAWAF/ParsingRules/F5BigIPAWAF/F5BigIPAWAF.xif
new file mode 100644
index 000000000000..0baf9b18d906
--- /dev/null
+++ b/Packs/F5BigIPAWAF/ParsingRules/F5BigIPAWAF/F5BigIPAWAF.xif
@@ -0,0 +1,7 @@
+[INGEST:vendor="f5", product="waf", target_dataset="f5_waf_raw", no_hit=keep]
+filter _raw_log ~= "\<\d+\>\s*\d{4}\-\d{2}\-\d{2}T\d{2}\:\d{2}\:\d{2}[+-]\d{2}\:\d{2}"
+| alter
+ tmp_timestamp = arrayindex(regextract(_raw_log,"\d{4}\-\d{2}\-\d{2}T\d{2}\:\d{2}\:\d{2}[+-]\d{2}\:\d{2}"),0)
+| alter
+ _time = parse_timestamp("%Y-%m-%dT%H:%M:%S%Ez", tmp_timestamp)
+| fields -tmp_timestamp;
\ No newline at end of file
diff --git a/Packs/F5BigIPAWAF/ParsingRules/F5BigIPAWAF/F5BigIPAWAF.yml b/Packs/F5BigIPAWAF/ParsingRules/F5BigIPAWAF/F5BigIPAWAF.yml
new file mode 100644
index 000000000000..99852b379067
--- /dev/null
+++ b/Packs/F5BigIPAWAF/ParsingRules/F5BigIPAWAF/F5BigIPAWAF.yml
@@ -0,0 +1,6 @@
+name: F5 BIG-IP Advanced WAF Parsing Rule
+id: F5_BIG-IP_Advanced_WAF_ParsingRule
+fromversion: 8.4.0
+tags: []
+rules: ''
+samples: ''
\ No newline at end of file
diff --git a/Packs/F5BigIPAWAF/README.md b/Packs/F5BigIPAWAF/README.md
new file mode 100644
index 000000000000..70a7167047d9
--- /dev/null
+++ b/Packs/F5BigIPAWAF/README.md
@@ -0,0 +1,77 @@
+# F5 BIG-IP Advanced WAF
+
+This pack includes Cortex XSIAM content.
+
+<~XSIAM>
+
+## Configuration on Server Side
+You need to configure BIG-IP AWAF to forward Syslog messages.
+In order to do so, create a logging profile and set remote logging to the relevant server.
+
+* The product documentation is available [here](https://techdocs.f5.com/kb/en-us/products/big-ip_asm/manuals/product/asm-implementations-11-6-0/14.html).
+
+### Creating a logging profile
+1. On the Main tab, click **Security** → **Event Logs** → **Logging Profiles**.
+2. Click **Create**.
+3. In the *Profile Name* field, type a unique name for the profile.
+4. Select the **Application Security** checkbox.
+5. On the *Application Security* tab, for *Configuration*, select **Advanced**.
+6. Select the **Remote Storage** checkbox.
+7. Click **Finished**.
+
+### Setting Remote Logging
+1. Connect to the BIG-IP web UI and log in with administrative rights.
+2. Navigate to **Security** → **Event Logs** → **Logging Profiles**.
+3. Click the name of the logging profile for which you want to set up remote logging.
+4. Select the **Remote Storage** checkbox.
+5. From the *Remote Storage Type* list, select **Remote**.
+6. For the *Protocol* setting, select **TCP**.
+7. For *Server Addresses*, type the IP Address, Port Number (default is 514), and click **Add**.
+8. Click **Finished**.
+
+### Supported Timestamp Ingestion
+Timestamp ingestion is supported for the format: **%Y-%m-%dT%H:%M:%S%Ez** (yyyy-mm-ddTHH:MM:SS+ZZ:ZZ).
+In order to configure the required timestamp for syslog messages, follow these instructions:
+
+* The product documentation is available [here](https://my.f5.com/manage/s/article/K02138840).
+
+1. Log in to the BIG-IP command line.
+2. Use a Linux editor to edit the syslog-ng configuration, in this case using *nano* editor.
+```bash
+ nano /etc/syslog-ng/syslog-ng.conf
+```
+3. Add **ts_format(iso);** at the last line inside **options** section.
+```bash
+ options {
+ dir_perm(0755);
+ perm(0600);
+ chain_hostnames(no);
+ keep_hostname(yes);
+ stats_freq(0);
+ log_fifo_size(2048);
+ ts_format(iso); --> !!!
+ };
+```
+4. Restart syslog-ng service.
+```bash
+ bigstart restart syslog-ng
+```
+
+## Collect Events from Vendor
+In order to use the collector, use the [Broker VM](#broker-vm) option.
+
+### Broker VM
+To create or configure the Broker VM, use the information described [here](https://docs-cortex.paloaltonetworks.com/r/Cortex-XDR/Cortex-XDR-Pro-Administrator-Guide/Configure-the-Broker-VM).
+
+You can configure the specific vendor and product for this instance.
+
+1. Navigate to **Settings** → **Configuration** → **Data Broker** → **Broker VMs**.
+2. Go to the *Apps* column under the *Brokers* tab and add the *Syslog Collector* app for the relevant broker instance. If the app already exists, hover over it and click **Configure**.
+3. Click **Add New** for adding a new syslog data source.
+4. When configuring the new syslog data source, set the following values:
+ | Parameter | Value
+ | :--- | :---
+ | `Vendor` | Enter **f5**.
+ | `Product` | Enter **waf**.
+
+~XSIAM>
\ No newline at end of file
diff --git a/Packs/F5BigIPAWAF/pack_metadata.json b/Packs/F5BigIPAWAF/pack_metadata.json
new file mode 100644
index 000000000000..c4ef444b1b08
--- /dev/null
+++ b/Packs/F5BigIPAWAF/pack_metadata.json
@@ -0,0 +1,18 @@
+{
+ "name": "F5 BIG-IP Advanced WAF",
+ "description": "Used for protecting applications with behavioral analytics, layer 7 DoS mitigation, application-layer encryption of sensetive data, threat intelligence services and API security.",
+ "support": "xsoar",
+ "currentVersion": "1.0.0",
+ "author": "Cortex XSOAR",
+ "url": "https://www.paloaltonetworks.com/cortex",
+ "email": "",
+ "categories": [
+ "Analytics & SIEM"
+ ],
+ "tags": [],
+ "useCases": [],
+ "keywords": [],
+ "marketplaces": [
+ "marketplacev2"
+ ]
+}
\ No newline at end of file
diff --git a/Packs/FeedCyCognito/ReleaseNotes/1_0_21.md b/Packs/FeedCyCognito/ReleaseNotes/1_0_21.md
new file mode 100644
index 000000000000..943b22850fdd
--- /dev/null
+++ b/Packs/FeedCyCognito/ReleaseNotes/1_0_21.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### FeedCyCognitoGetAssetEndpoint
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/FeedCyCognito/Scripts/FeedCyCognitoGetAssetEndpoint/FeedCyCognitoGetAssetEndpoint.yml b/Packs/FeedCyCognito/Scripts/FeedCyCognitoGetAssetEndpoint/FeedCyCognitoGetAssetEndpoint.yml
index 4f90b86a906a..1d00e253a91f 100644
--- a/Packs/FeedCyCognito/Scripts/FeedCyCognitoGetAssetEndpoint/FeedCyCognitoGetAssetEndpoint.yml
+++ b/Packs/FeedCyCognito/Scripts/FeedCyCognitoGetAssetEndpoint/FeedCyCognitoGetAssetEndpoint.yml
@@ -11,7 +11,7 @@ enabled: true
script: ''
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.8.36650
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
fromversion: 6.2.0
tests:
diff --git a/Packs/FeedCyCognito/pack_metadata.json b/Packs/FeedCyCognito/pack_metadata.json
index 155af49b9001..e90cb5643ba0 100644
--- a/Packs/FeedCyCognito/pack_metadata.json
+++ b/Packs/FeedCyCognito/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "CyCognito Feed",
"description": "Provides a feed integration to retrieve the discovered assets.",
"support": "partner",
- "currentVersion": "1.0.20",
+ "currentVersion": "1.0.21",
"author": "CyCognito",
"url": "",
"email": "support@cycognito.com",
diff --git a/Packs/FeedDHS/ReleaseNotes/2_0_39.md b/Packs/FeedDHS/ReleaseNotes/2_0_39.md
new file mode 100644
index 000000000000..120be9a42463
--- /dev/null
+++ b/Packs/FeedDHS/ReleaseNotes/2_0_39.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### DHS Feed v2
+
+Fixed an issue in the **TAXII2ApiModule** related to *TAXII2 server* integration.
diff --git a/Packs/FeedDHS/pack_metadata.json b/Packs/FeedDHS/pack_metadata.json
index 230fc423f7e9..71a97e28d91e 100644
--- a/Packs/FeedDHS/pack_metadata.json
+++ b/Packs/FeedDHS/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "DHS Feed",
"description": "Provides cyber threat indicators from the Cybersecurity and Infrastructure Security Agency’s (CISA’s) free Automated Indicator Sharing (AIS) by the Department of Homeland Security (DHS).",
"support": "xsoar",
- "currentVersion": "2.0.38",
+ "currentVersion": "2.0.39",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/FeedFeedly/Integrations/FeedFeedly/FeedFeedly.py b/Packs/FeedFeedly/Integrations/FeedFeedly/FeedFeedly.py
index 56c98fc38959..81b6edbcf2fb 100644
--- a/Packs/FeedFeedly/Integrations/FeedFeedly/FeedFeedly.py
+++ b/Packs/FeedFeedly/Integrations/FeedFeedly/FeedFeedly.py
@@ -63,6 +63,7 @@
"sha-1": FeedIndicatorType.File,
"sha-256": FeedIndicatorType.File,
"file:hashes": FeedIndicatorType.File,
+ "vulnerability": FeedIndicatorType.CVE,
"attack-pattern": ThreatIntel.ObjectsNames.ATTACK_PATTERN,
"malware": ThreatIntel.ObjectsNames.MALWARE,
"tool": ThreatIntel.ObjectsNames.TOOL,
@@ -176,6 +177,7 @@ class STIX2Parser:
"windows-registry-key",
"relationship",
"extension-definition",
+ "vulnerability",
]
def __init__(self):
@@ -246,14 +248,20 @@ def parse_indicator(self, indicator_obj: dict[str, Any]) -> list[dict[str, Any]]
indicators.extend(
self.get_indicators_from_indicator_groups(
- indicator_groups, indicator_obj, STIX_2_TYPES_TO_CORTEX_TYPES, field_map,
+ indicator_groups,
+ indicator_obj,
+ STIX_2_TYPES_TO_CORTEX_TYPES,
+ field_map,
)
)
cidr_groups = self.extract_indicator_groups_from_pattern(trimmed_pattern, self.cidr_regexes)
indicators.extend(
self.get_indicators_from_indicator_groups(
- cidr_groups, indicator_obj, STIX_2_TYPES_TO_CORTEX_CIDR_TYPES, field_map,
+ cidr_groups,
+ indicator_obj,
+ STIX_2_TYPES_TO_CORTEX_CIDR_TYPES,
+ field_map,
)
)
self.change_ip_to_cidr(indicators)
@@ -540,6 +548,7 @@ def parse_intrusion_set(intrusion_set_obj: dict[str, Any]) -> list[dict[str, Any
"primary_motivation": intrusion_set_obj.get("primary_motivation", ""),
"secondary_motivations": intrusion_set_obj.get("secondary_motivations", []),
"publications": publications,
+ "tags": list(set(intrusion_set_obj.get("labels", []))),
}
intrusion_set["customFields"] = fields
return [intrusion_set]
@@ -651,6 +660,31 @@ def parse_sco_windows_registry_key_indicator(registry_key_obj: dict[str, Any]) -
)
return registry_key_indicator
+ @staticmethod
+ def parse_vulnerability(vulnerability_obj: dict[str, Any]) -> list[dict[str, Any]]:
+ """
+ Parses vulnerability indicator type to cortex format.
+
+ Args:
+ vulnerability_obj (dict): indicator as an observable object of vulnerability type.
+ """
+ vulnerability = {
+ "value": vulnerability_obj.get("name"),
+ "indicator_type": FeedIndicatorType.CVE,
+ "rawJSON": vulnerability_obj,
+ }
+ fields = {
+ "stixid": vulnerability_obj.get("id"),
+ "firstseenbysource": vulnerability_obj.get("created"),
+ "modified": vulnerability_obj.get("modified"),
+ "description": vulnerability_obj.get("description", ""),
+ "external_references": vulnerability_obj.get("external_references", []),
+ "tags": list(set(vulnerability_obj.get("labels", []))),
+ }
+
+ vulnerability["customFields"] = fields
+ return [vulnerability]
+
def parse_relationships(self, relationships_lst: list[dict[str, Any]]) -> dict[str, Any]:
"""Parse the Relationships objects retrieved from the feed.
@@ -691,7 +725,7 @@ def parse_relationships(self, relationships_lst: list[dict[str, Any]]) -> dict[s
for ref in b_object["rawJSON"].get("external_references", [])
if ref.get("source_name") == "mitre-attack"
)
- a_object["customFields"]["tags"].append(mitre_id)
+ a_object["customFields"].setdefault("tags", []).append(mitre_id)
mapping_fields = {
"lastseenbysource": relationships_object.get("modified"),
@@ -746,6 +780,7 @@ def load_stix_objects_from_envelope(self, envelopes: dict[str, Any]):
"mutex": self.parse_sco_mutex_indicator,
"user-account": self.parse_sco_account_indicator,
"windows-registry-key": self.parse_sco_windows_registry_key_indicator,
+ "vulnerability": self.parse_vulnerability,
}
indicators = self.parse_dict_envelope(envelopes, parse_stix_2_objects)
return indicators
@@ -935,7 +970,7 @@ def get_indicators_command(
indicators = client.fetch_indicators_from_stream(
params["feedly_stream_id"], newer_than=time.time() - 24 * 3600, limit=int(args.get("limit", "10"))
)
- demisto.createIndicators(indicators)
+ demisto.createIndicators(indicators) # type: ignore
return CommandResults(readable_output=f"Created {len(indicators)} indicators.")
@@ -949,7 +984,7 @@ def fetch_indicators_command(client: Client, params: dict[str, str], context: di
Indicators.
"""
return client.fetch_indicators_from_stream(
- params["feedly_stream_id"], newer_than=float(context.get("last_successful_run", time.time() - 7 * 24 * 3600)),
+ params["feedly_stream_id"], newer_than=float(context.get("last_successful_run", time.time() - 7 * 24 * 3600))
)
@@ -979,7 +1014,7 @@ def main(): # pragma: no cover
now = time.time()
indicators = fetch_indicators_command(client, params, demisto.getLastRun())
for indicators_batch in batch(indicators, batch_size=2000):
- demisto.createIndicators(indicators_batch)
+ demisto.createIndicators(indicators_batch) # type: ignore
demisto.setLastRun({"last_successful_run": str(now)})
else:
diff --git a/Packs/FeedFeedly/Integrations/FeedFeedly/FeedFeedly.yml b/Packs/FeedFeedly/Integrations/FeedFeedly/FeedFeedly.yml
index ab8cac63bf3d..8b4b6a214bd6 100644
--- a/Packs/FeedFeedly/Integrations/FeedFeedly/FeedFeedly.yml
+++ b/Packs/FeedFeedly/Integrations/FeedFeedly/FeedFeedly.yml
@@ -117,7 +117,7 @@ script:
description: Gets indicators from the feed.
execution: false
name: feedly-get-indicators
- dockerimage: demisto/python3:3.10.13.84405
+ dockerimage: demisto/python3:3.10.14.94490
feed: true
isfetch: false
longRunning: false
diff --git a/Packs/FeedFeedly/ReleaseNotes/1_0_3.md b/Packs/FeedFeedly/ReleaseNotes/1_0_3.md
new file mode 100644
index 000000000000..bea2b373a523
--- /dev/null
+++ b/Packs/FeedFeedly/ReleaseNotes/1_0_3.md
@@ -0,0 +1,5 @@
+#### Integrations
+##### Feedly Feed
+- Updated the Docker image to: *demisto/python3:3.10.14.94490*.
+- Fixed an issue with the labels of the intrusion sets.
+- Adding support for vulnerabilities
diff --git a/Packs/FeedFeedly/pack_metadata.json b/Packs/FeedFeedly/pack_metadata.json
index d66fb1d2bb78..7e4205acc8c2 100644
--- a/Packs/FeedFeedly/pack_metadata.json
+++ b/Packs/FeedFeedly/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Feedly",
"description": "Import Articles from Feedly with enriched IOCs",
"support": "partner",
- "currentVersion": "1.0.2",
+ "currentVersion": "1.0.3",
"author": "Feedly",
"url": "https://feedly.com/i/landing/threatIntelligence",
"email": "support@feedly.com",
diff --git a/Packs/FeedGitHub/.pack-ignore b/Packs/FeedGitHub/.pack-ignore
new file mode 100644
index 000000000000..34ade11dcf15
--- /dev/null
+++ b/Packs/FeedGitHub/.pack-ignore
@@ -0,0 +1,2 @@
+[file:FeedGitHub.yml]
+ignore=IN122
\ No newline at end of file
diff --git a/Packs/FeedGitHub/.secrets-ignore b/Packs/FeedGitHub/.secrets-ignore
new file mode 100644
index 000000000000..e8427de249c6
--- /dev/null
+++ b/Packs/FeedGitHub/.secrets-ignore
@@ -0,0 +1,29 @@
+195.123.227.186
+134.209.37.102
+117.141.112.155
+23.129.64.217
+45.142.213.11
+157.245.250.190
+144.91.106.47
+141.98.81.208
+51.81.53.159
+104.168.173.252
+173.212.206.89
+67.207.94.201
+89.163.242.76
+51.75.71.205
+140.224.183.58
+161.35.22.86
+45.143.220.246
+11.22.33.444
+11.22.33.4441
+22.33.44.555
+12.34.56.789
+123.234.111.134
+11.11.22.234
+99.99.99.99
+111.111.111.111
+watercourse71@gateway2d.dhl.com
+11.22.33.444est.test.test
+4101535@gmail.com
+http://www.test.test.test
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/FeedGitHub.py b/Packs/FeedGitHub/Integrations/FeedGitHub/FeedGitHub.py
new file mode 100644
index 000000000000..255d0b5fb5cd
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/FeedGitHub.py
@@ -0,0 +1,604 @@
+import demistomock as demisto
+from CommonServerPython import *
+from TAXII2ApiModule import *
+import plyara
+import plyara.utils
+import tldextract
+
+CONTEXT_PREFIX = "GITHUB"
+RAW_RESPONSE = []
+
+
+class Client(BaseClient):
+ def __init__(self, base_url: str, verify: bool, proxy: bool, owner: str, repo: str, headers: dict):
+ base_url = urljoin(base_url, f"/repos/{owner}/{repo}")
+ super().__init__(base_url=base_url, verify=verify, proxy=proxy, headers=headers)
+
+ def get_commits_between_dates(self, since, until) -> list:
+ """
+ Retrieves the SHA of the base commit of the repository.
+
+ This function fetches all commits of the repository and returns the SHA of the latest commit,
+ which represents the base commit of the repository.
+
+ Returns:
+ str: The SHA of the base commit of the repository.
+ """
+ parsed_since = dateparser.parse(since)
+ parsed_until = dateparser.parse(until)
+ if not (parsed_since and parsed_until):
+ response = self._http_request("GET", full_url=f"{self._base_url}/commits", resp_type="response")
+ else:
+ params = {
+ "since": parsed_since.isoformat(),
+ "until": parsed_until.isoformat(),
+ }
+ response = self._http_request("GET", full_url=f"{self._base_url}/commits", params=params, resp_type="response")
+ demisto.debug(f"The base get_base_head_commits_sha() raw response: {response}")
+ return [commit.get("sha") for commit in self._extract_commits(response)]
+
+ def _extract_commits(self, response) -> list:
+ all_commits = []
+ if "next" not in response.links:
+ all_commits.extend(response.json())
+ while "next" in response.links:
+ data = response.json()
+ all_commits.extend(data)
+ response = self._http_request("GET", full_url=response.links["next"]["url"], resp_type="response")
+ all_commits.extend(response.json())
+ demisto.debug(f"There are many comites currently bringing them all..., currently exist:{response}")
+ return all_commits
+
+ def get_files_between_commits(self, base: str, head: str, include_base_commit: bool) -> tuple[list[dict[str, str]], str]: # pragma: no cover # noqa: E501
+ """
+ Retrieves the list of files changed between two commits and the SHA of the base commit.
+
+ This function compares two commits in a repository to determine the files that have changed between them.
+ Depending on the `include_base_commit` flag, it adjusts the comparison to include the base commit or not.
+ If the comparison fails due to a "Not Found" error, the function handles this specific case by fetching
+ the indicators including the first commit in the repository.
+
+ :type base: ``str``
+ :param base: The SHA of the base commit.
+
+ :type head: ``str``
+ :param head: The SHA of the head commit.
+
+ :type include_base_commit: ``bool``
+ :param include_base_commit: Flag to indicate if the base commit should be included in the comparison.
+
+ :return: A tuple containing a list of files changed between the commits and the SHA of the base commit.
+ :rtype: ``tuple[list, str]``
+
+ :raises Exception: If an error occurs during the HTTP request.
+ """
+ url_suffix = f"/compare/{base}...{head}" if not include_base_commit else f"/compare/{base}^...{head}"
+ try:
+ response = self._http_request("GET", url_suffix)
+ except Exception as e:
+ if "Not Found" in str(e):
+ demisto.debug("in get_files_between_commits func: Case: fetch indicators including the first commit in the repo")
+ response = self._http_request("GET", f"/compare/{base}...{head}")
+ response["files"] += self._http_request("GET", f"/commits/{base}")["files"]
+ else:
+ demisto.error(f"in get_files_between_commits func error message: {e}")
+ raise
+ demisto.debug(f"The full response from 'get base...head' :{response}")
+ if len(response["commits"]) == 0:
+ base_sha = response["base_commit"]["sha"]
+ else:
+ base_sha = response["commits"][-1].get("sha")
+ return response["files"], base_sha
+
+
+def filter_out_files_by_status(commits_files: list, statuses=("added", "modified")) -> list:
+ """
+ Parses files from a list of commit files based on their status.
+
+ Args:
+ commits_files (list): A list of dictionaries representing commit files.
+
+ Returns:
+ list: A list of URLs for files that are added or modified.
+ """
+ relevant_files: list[dict] = []
+ for file in commits_files:
+ if file.get("status") in statuses:
+ relevant_files.append(file.get("raw_url"))
+ return relevant_files
+
+
+def get_content_files_from_repo(client: Client, relevant_files: list[str], params: dict):
+ """
+ Retrieves content of relevant files based on specified extensions.
+
+ Args:
+ client (Client): An instance of the client used for HTTP requests.
+ relevant_files (list): A list of URLs for relevant files.
+
+ Returns:
+ list: A list of file contents fetched via HTTP requests.
+ """
+ global RAW_RESPONSE
+ resp_type = "json" if params.get("feedType") == "STIX" else "text"
+ extensions_to_fetch = argToList(params.get("extensions_to_fetch") or [])
+ relevant_files = [file for file in relevant_files if any(file.endswith(ext) for ext in extensions_to_fetch)]
+ raw_data_files = [{file: client._http_request("GET", full_url=file, resp_type=resp_type)} for file in relevant_files]
+ demisto.debug(f"list of all files raw_data :{raw_data_files}")
+ RAW_RESPONSE = [list(file.values()) for file in raw_data_files]
+ return raw_data_files
+
+
+def get_commits_files(client: Client, base_commit, head_commit, is_first_fetch: bool) -> tuple[list, str]:
+ """
+ Retrieves relevant files modified between commits and the current repository head.
+
+ Args:
+ client (Client): An instance of the client used for interacting with the repository.
+ last_commit_fetch (str): The SHA of the last fetched commit.
+
+ Returns:
+ tuple: A tuple containing a list of relevant file URLs and the SHA of the current repository head.
+ """
+ try:
+ all_commits_files, current_repo_head_sha = client.get_files_between_commits(base_commit, head_commit, is_first_fetch)
+ relevant_files = filter_out_files_by_status(all_commits_files)
+ return relevant_files, current_repo_head_sha
+ except IndexError:
+ return [], base_commit
+
+
+def parse_and_map_yara_content(content_item: dict[str, str]) -> list:
+ """
+ Parses YARA rules from a given content item and maps their attributes.
+
+ Args:
+ content_item (str): A string containing one or more YARA rules.
+
+ Returns:
+ list: A list of dictionaries representing parsed and mapped YARA rules.
+ Each dictionary contains attributes such as rule name, description, author, etc.
+ """
+
+ text_content = list(content_item.values())[0]
+ file_path = list(content_item.keys())[0]
+ parsed_rules = []
+ parser = plyara.Plyara()
+ raw_rules = parser.parse_string(text_content)
+ current_time = datetime.now().isoformat()
+ for parsed_rule in raw_rules:
+ try:
+ metadata = {key: value for d in parsed_rule["metadata"] for key, value in d.items()}
+ value_ = parsed_rule["rule_name"]
+ type_ = "YARA Rule"
+ mapper = {
+ "value": value_,
+ "description": metadata.get("description", ""),
+ "author": metadata.get("author", ""),
+ "rulereference": metadata.get("reference", ""),
+ "sourcetimestamp": metadata.get("date", ""),
+ "ruleid": metadata.get("id", ""),
+ "rulestrings": make_grid_layout(parsed_rule.get("strings", {})),
+ "condition": " ".join(parsed_rule["condition_terms"]),
+ "references": file_path,
+ "rawrule": f"```\n {plyara.utils.rebuild_yara_rule(parsed_rule)} \n```",
+ }
+ indicator_obj = {
+ "value": value_,
+ "type": type_,
+ "service": "github",
+ "fields": mapper,
+ "score": Common.DBotScore.NONE,
+ "firstseenbysource": current_time,
+ "rawJSON": {"value": value_, "type": type_},
+ }
+ parsed_rules.append(indicator_obj)
+ except Exception as e:
+ demisto.error(f"Rull: {parsed_rule} cannot be processed. Error Message: {e}")
+ continue
+ return parsed_rules
+
+
+def make_grid_layout(list_dict):
+ return [
+ {"index": d.get("name"), "string": d.get("value"), "type": d.get("type"), "modifiers": d.get("modifiers")}
+ for d in list_dict
+ ]
+
+
+def get_yara_indicators(content: list[dict]):
+ """
+ Retrieves YARA indicators from a list of content items.
+
+ Args:
+ content (list): A list of strings containing YARA rules.
+
+ Returns:
+ list: A list of dictionaries representing parsed and mapped YARA rules for each content item.
+ """
+ return [rule for item in content for rule in parse_and_map_yara_content(item)]
+
+
+def detect_domain_type(domain: str):
+ """
+ Detects the type of an indicator (e.g., Domain, DomainGlob) using tldextract library.
+
+ Args:
+ domain (str): The indicator value to be analyzed.
+
+ Returns:
+ Optional[FeedIndicatorType]: The type of the indicator, or None if detection fails.
+ """
+ try:
+ no_cache_extract = tldextract.TLDExtract(cache_dir=False, suffix_list_urls=None) # type: ignore
+
+ if no_cache_extract(domain).suffix:
+ if "*" in domain:
+ return FeedIndicatorType.DomainGlob
+ return FeedIndicatorType.Domain
+
+ except Exception:
+ demisto.debug(f"tldextract failed to detect indicator type. indicator value: {domain}")
+ return None
+
+
+ipv4Regex = (
+ r"(?P(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?))[:]?(?P\d+)?"
+)
+ipv4cidrRegex = r"([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))"
+ipv6Regex = r"(?:(?:[0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|(?:[0-9a-fA-F]{1,4}:){1,7}:|(?:[0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|(?:[0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|(?:[0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|(?:[0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|(?:[0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:(?:(:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))" # noqa: E501
+ipv6cidrRegex = r"s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:)))(%.+)?s*(\/([0-9]|[1-9][0-9]|1[0-1][0-9]|12[0-8]))" # noqa: E501
+
+regex_indicators = [
+ (ipv4cidrRegex, FeedIndicatorType.CIDR),
+ (ipv6Regex, FeedIndicatorType.IPv6),
+ (ipv6cidrRegex, FeedIndicatorType.IPv6CIDR),
+ (emailRegex, FeedIndicatorType.Email),
+ (re.compile(cveRegex, re.M), FeedIndicatorType.CVE),
+ (md5Regex, FeedIndicatorType.File),
+ (sha1Regex, FeedIndicatorType.File),
+ (sha256Regex, FeedIndicatorType.File),
+ (sha512Regex, FeedIndicatorType.File),
+]
+
+regex_with_groups = [
+ (ipv4Regex, FeedIndicatorType.IP, "ipv4"),
+ (urlRegex, FeedIndicatorType.URL, "url_with_path"),
+ (domainRegex, detect_domain_type, "fqdn"),
+]
+
+
+def extract_text_indicators(content: dict[str, str], params):
+ """
+ Extracts indicators from text content using predefined regular expressions.
+
+ Args:
+ content (str): The text content to extract indicators from.
+
+ Returns:
+ list: A list of dictionaries representing extracted indicators.
+ Each dictionary contains the indicator value and its type.
+ """
+ text_content = list(content.values())[0]
+ file_path = list(content.keys())[0]
+ text_content = text_content.replace("[.]", ".").replace("[@]", "@") # Refang indicator prior to checking
+ indicators = []
+ for regex, type_ in regex_indicators:
+ matches = re.finditer(regex, text_content) # type: ignore
+ if matches:
+ indicators += [{"value": match.group(0), "type": type_} for match in matches]
+ for regex, type_, group_name in regex_with_groups:
+ matches = re.finditer(regex, text_content) # type: ignore
+ if matches:
+ for match in matches:
+ if regex in (ipv4Regex, urlRegex):
+ indicators.append({"value": match.group(group_name), "type": type_})
+ elif regex == domainRegex:
+ regex_type = type_(match.group(group_name)) if callable(type_) else type_
+ if regex_type:
+ indicators.append({"value": match.group(group_name), "type": regex_type})
+ indicators_to_xsoar = arrange_iocs_indicator_to_xsoar(file_path, indicators, params)
+ return indicators_to_xsoar
+
+
+def arrange_iocs_indicator_to_xsoar(file_path: str, parsed_indicators: list, params: dict):
+ res = []
+ owner = params.get("owner", "")
+ repo = params.get("repo", "")
+ current_time = datetime.now().isoformat()
+ for indicator in parsed_indicators:
+ value_ = indicator.get("value")
+ type_ = indicator.get("type")
+ raw_data = {"value": value_, "type": type_}
+ indicator_obj = {
+ "value": value_,
+ "type": type_,
+ "service": "github",
+ "fields": {"references": file_path, "tags": {"owner": owner, "repo": repo}, "firstseenbysource": current_time},
+ "rawJSON": raw_data,
+ }
+ res.append(indicator_obj)
+ return res
+
+
+def get_stix_indicators(repo_files_content):
+ stix_client = STIX2XSOARParser({})
+ generator_stix_files = create_stix_generator(repo_files_content)
+ indicators = stix_client.load_stix_objects_from_envelope(generator_stix_files) # type: ignore
+ return indicators
+
+
+def identify_json_structure(json_data) -> Any:
+ """
+ Identifies the structure of JSON data based on its content.
+
+ Args:
+ json_data : The JSON data to identify its structure.
+
+ Returns:
+ Union[str, Dict[str, Any], None]: The identified structure of the JSON data.
+ Possible values are: "Bundle", "Envelope", or a dictionary with the key "objects".
+ Returns None if the structure cannot be identified.
+ """
+ if isinstance(json_data, dict) and json_data.get("bundle"):
+ return "Bundle"
+ if isinstance(json_data, dict) and json_data.get("objects"):
+ return "Envelope"
+ if isinstance(json_data, dict) and all([json_data.get("type"), json_data.get("id")]):
+ return "Envelope"
+ if isinstance(json_data, list) and all([json_data[0].get("type"), json_data[0].get("id")]):
+ return {"objects": json_data}
+ return None
+
+
+def filtering_stix_files(content_files: list) -> list:
+ """
+ Filters a list of content files to include only those in STIX format.
+
+ Args:
+ content_files (list): A list of JSON files or dictionaries representing STIX content.
+
+ Returns:
+ list: A list of STIX files or dictionaries found in the input list.
+ """
+ stix_files = []
+ for file in content_files:
+ for tab in file:
+ file_type = identify_json_structure(tab)
+ if file_type in ("Envelope", "Bundle"):
+ stix_files.append(tab)
+ if isinstance(file_type, dict):
+ stix_files.append(file_type)
+ return stix_files
+
+
+def create_stix_generator(content_files: list[dict]):
+ """
+ Create a generator for iterating over STIX files.
+
+ This function takes a list of JSON files, filters them to include only STIX files, and then
+ creates a generator that yields each STIX file or object one at a time.
+
+ Args:
+ content_files (list): A list of JSON files.
+
+ Returns:
+ Generator: A generator that yields each STIX file from the filtered list one at a time.
+ """
+ content_files1 = [list(content_file.values())[0] for content_file in content_files]
+ return get_stix_files_generator(filtering_stix_files(content_files1))
+
+
+def get_stix_files_generator(json_files):
+ yield from json_files
+
+
+def test_module(client: Client, params) -> str:
+ """Builds the iterator to check that the feed is accessible.
+ Args:
+ client: Client object.
+ Returns:
+ Outputs.
+ """
+ try:
+ dateparser.parse(params.get("fetch_since"))
+ except Exception as e:
+ return str(f"error in 'First fetch time' parameter: {e}")
+ try:
+ client._http_request("GET", full_url=client._base_url)
+ except Exception as e:
+ if "Not Found" in str(e):
+ return f"Not Found error please check the 'Owner / Repo' names The error massage:{e}"
+ elif "Bad credentials" in str(e):
+ return f"Bad credentials error please check the API Token The error massage:{e}"
+ return str(f"{e}")
+ return "ok"
+
+
+def fetch_indicators(
+ client: Client,
+ last_commit_fetch,
+ params,
+ tlp_color: Optional[str] = None,
+ feed_tags: List = [],
+ limit: int = -1,
+) -> List[Dict]:
+ """
+ Fetches indicators from a GitHub repository using the provided client.
+
+ Args:
+ client (Client): The GitHub client used to fetch indicators.
+ last_commit_fetch: The last commit fetched from the repository.
+ tlp_color (Optional[str]): The Traffic Light Protocol (TLP) color to assign to the fetched indicators.
+ feed_tags (List): Tags to associate with the fetched indicators.
+ limit (int): The maximum number of indicators to fetch. Default is -1 (fetch all).
+
+ Returns:
+ List[Dict]: A list of dictionaries representing the fetched indicators.
+ """
+ demisto.debug(f"Before fetch command last commit sha run: {last_commit_fetch}")
+ since = params.get("fetch_since", "90 days ago")
+ until = "now"
+ is_first_fetch = not last_commit_fetch
+ base_commit_sha = last_commit_fetch or client.get_commits_between_dates(since, until)[-1]
+ head_commit = params.get("branch_head", "")
+ iterator, last_commit_info = get_indicators(client, params, base_commit_sha, head_commit, is_first_fetch)
+ indicators = []
+ if limit > 0:
+ iterator = iterator[:limit]
+
+ for item in iterator:
+ if feed_tags:
+ item["fields"]["tags"] = feed_tags
+ if tlp_color:
+ item["fields"]["trafficlightprotocol"] = tlp_color
+ indicators.append(item)
+ demisto.debug(f"After fetch command last run: {last_commit_info}")
+ if last_commit_info:
+ demisto.setLastRun({"last_commit": last_commit_info})
+ return indicators
+
+
+def get_indicators(client: Client, params, base_commit_sha, head_commit, is_first_fetch: bool = True):
+ relevant_files, last_commit_info = get_commits_files(client, base_commit_sha, head_commit, is_first_fetch)
+ feed_type = params.get("feedType", "")
+ repo_files_content = get_content_files_from_repo(client, relevant_files, params)
+ try:
+ if feed_type == "YARA":
+ indicators = get_yara_indicators(repo_files_content)
+
+ elif feed_type == "STIX":
+ indicators = get_stix_indicators(repo_files_content)
+
+ elif feed_type == "IOCs":
+ indicators = []
+ for file in repo_files_content:
+ indicators += extract_text_indicators(file, params)
+
+ except Exception as err:
+ demisto.error(str(err))
+ raise ValueError(f"Could not parse returned data as indicator. \n\nError massage: {err}")
+ demisto.debug(f"fetching {len(indicators)} indicators")
+ return indicators, last_commit_info
+
+
+def get_indicators_command(client: Client, params: dict, args: dict = {}) -> CommandResults:
+ """Wrapper for retrieving indicators from the feed to the war-room.
+ Args:
+ client: Client object with request
+ params: demisto.params()
+ Returns:
+ Outputs.
+ """
+ limit = arg_to_number(args.get("limit"))
+ indicators: list = []
+ try:
+ if limit and limit <= 0:
+ raise ValueError("Limit must be a positive number.")
+ since = args.get("since", "7 days ago")
+ until = args.get("until", "now")
+ all_commits = client.get_commits_between_dates(since, until)
+ if not all_commits:
+ indicators = []
+ human_readable = "#### No commits were found in the given time range"
+ demisto.debug("No commits were found in the given time range")
+ else:
+ base_commit_sha = all_commits[-1]
+ head_commit_sha = all_commits[0]
+ indicators, _ = get_indicators(client, params, base_commit_sha, head_commit_sha)
+ hr_indicators = []
+ if limit and limit > 0:
+ indicators = indicators[:limit]
+ for indicator in indicators:
+ hr_indicators.append(
+ {
+ "Value": indicator.get("value"),
+ "Type": indicator.get("type"),
+ }
+ )
+
+ human_readable = tableToMarkdown(
+ "Indicators from GitHubFeed:", hr_indicators, headers=["Type", "Value"], removeNull=True
+ )
+ if not indicators:
+ human_readable = "#### There are no indicators in the given timeframe"
+ demisto.debug(f"human_readable for request indicators is: {human_readable}")
+ demisto.debug(f"indicators: {indicators}")
+ return CommandResults(
+ outputs_prefix=CONTEXT_PREFIX + ".Indicators",
+ outputs_key_field="githubfeed",
+ raw_response=RAW_RESPONSE,
+ outputs=indicators,
+ readable_output=human_readable,
+ )
+
+ except Exception as err:
+ demisto.error(str(err))
+ raise ValueError(f"get_indicators_command return with error. \n\nError massage: {err}")
+
+
+def fetch_indicators_command(client: Client, params: Dict[str, str], args) -> List[Dict]:
+ """Wrapper for fetching indicators from the feed to the Indicators tab.
+ Args:
+ client: Client object with request
+ params: demisto.params()
+ Returns:
+ Indicators.
+ """
+ feed_tags = argToList(params.get("feedTags", ""))
+ tlp_color = params.get("tlp_color")
+ limit = int(params.get("limit", -1))
+ last_commit_fetch = demisto.getLastRun().get("last_commit")
+ indicators = fetch_indicators(client, last_commit_fetch, params, tlp_color=tlp_color, feed_tags=feed_tags, limit=limit)
+ return indicators
+
+
+def main(): # pragma: no cover
+ params = demisto.params()
+ args = demisto.args()
+ command = demisto.command()
+
+ demisto.debug(f"Command being called is: {command}")
+ base_url = str(params.get("url"))
+ verify_certificate = not params.get("insecure", False)
+ proxy = params.get("proxy", False)
+ owner = params.get("owner", "")
+ repo = params.get("repo", "")
+ api_token = (params.get("api_token") or {}).get("password", "")
+ headers = (
+ {"Accept": "application/vnd.github+json", "Authorization": f"Bearer {api_token}"}
+ if api_token
+ else {"Accept": "application/vnd.github+json"}
+ ) # noqa: E501
+
+ try:
+ client = Client(
+ base_url=base_url,
+ verify=verify_certificate,
+ proxy=proxy,
+ owner=owner,
+ repo=repo,
+ headers=headers,
+ )
+
+ if command == "test-module":
+ return_results(test_module(client, params))
+
+ elif command == "github-get-indicators":
+ return_results(get_indicators_command(client, params, args))
+
+ elif command == "fetch-indicators":
+ indicators = fetch_indicators_command(client, params, args)
+ for iter_ in batch(indicators, batch_size=2000):
+ demisto.createIndicators(iter_)
+
+ else:
+ raise NotImplementedError(f"Command {command} is not implemented.")
+
+ except Exception as e:
+ demisto.error(traceback.format_exc())
+ return_error(f"Failed to execute {command} command.\nError:\n{str(e)}")
+
+
+if __name__ in ("__main__", "__builtin__", "builtins"):
+ main()
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/FeedGitHub.yml b/Packs/FeedGitHub/Integrations/FeedGitHub/FeedGitHub.yml
new file mode 100644
index 000000000000..d954494d8fe9
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/FeedGitHub.yml
@@ -0,0 +1,170 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: Github Feed
+ version: -1
+configuration:
+- defaultvalue: 'true'
+ display: Fetch indicators
+ name: feed
+ required: false
+ type: 8
+- defaultvalue: https://api.github.com
+ display: Base URL
+ name: url
+ type: 0
+ additionalinfo: The URL to the GitHub API.
+ section: Connect
+ required: true
+- displaypassword: API Token
+ name: api_token
+ type: 9
+ hiddenusername: true
+ section: Connect
+ required: false
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- defaultvalue: ""
+ display: Owner
+ name: owner
+ type: 0
+ additionalinfo: Username of the repository owner
+ section: Connect
+ required: true
+- defaultvalue: ""
+ display: Repository / Path to fetch
+ name: repo
+ type: 0
+ additionalinfo: The name of the repository.
+ section: Connect
+ required: true
+- defaultvalue: ""
+ display: Feed type
+ name: feedType
+ options:
+ - YARA
+ - STIX
+ - IOCs
+ type: 15
+ additionalinfo: |
+ Predefined list of indicator types:
+ - YARA: Parses YARA rules from the feed.
+ - STIX: Parses STIX data from the feed.
+ - IOCs: Parses Indicators of Compromise (IOCs) using regex patterns.
+ section: Collect
+ required: true
+- display: Branch name
+ name: branch_head
+ type: 0
+ required: true
+ defaultvalue: main
+ additionalinfo: The name of the main branch to which to compare.
+ section: Collect
+ advanced: true
+- display: Files extensions to fetch
+ name: extensions_to_fetch
+ type: 16
+ required: true
+ defaultvalue: txt,yar,json
+ options:
+ - txt
+ - yar
+ - json
+ additionalinfo: The extension of the file names to target.
+ section: Collect
+ advanced: true
+- additionalinfo: Reliability of the source providing the intelligence data.
+ defaultvalue: F - Reliability cannot be judged
+ display: Source Reliability
+ name: feedReliability
+ options:
+ - A - Completely reliable
+ - B - Usually reliable
+ - C - Fairly reliable
+ - D - Not usually reliable
+ - E - Unreliable
+ - F - Reliability cannot be judged
+ required: true
+ type: 15
+- additionalinfo: The Traffic Light Protocol (TLP) designation to apply to indicators fetched from the feed.
+ display: Traffic Light Protocol Color
+ name: tlp_color
+ options:
+ - RED
+ - AMBER
+ - GREEN
+ - WHITE
+ required: false
+ type: 15
+- display: First fetch time
+ additionalinfo: First commit date of first published indicators to bring. e.g., "1 min ago","2 weeks ago","3 months ago".
+ name: fetch_since
+ type: 0
+ defaultvalue: '90 days ago'
+ required: false
+- display: Feed Fetch Interval
+ name: feedFetchInterval
+ type: 19
+ defaultvalue: '240'
+ required: false
+- additionalinfo: When selected, the exclusion list is ignored for indicators from this feed. This means that if an indicator from this feed is on the exclusion list, the indicator might still be added to the system.
+ defaultvalue: 'true'
+ display: Bypass exclusion list
+ name: feedBypassExclusionList
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: ''
+ name: feedExpirationPolicy
+ type: 17
+ required: false
+ options:
+ - never
+ - interval
+ - indicatorType
+ - suddenDeath
+- display: ''
+ name: feedExpirationInterval
+ type: 1
+ required: false
+- display: Tags
+ name: feedTags
+ type: 0
+ additionalinfo: Supports CSV values.
+ required: false
+display: Github Feed
+name: Github Feed
+script:
+ commands:
+ - arguments:
+ - defaultValue: '7 days'
+ description: 'The start date from which to fetch indicators. Accepts date strings like "7 days ago", "2 weeks ago", etc.'
+ name: since
+ - description: 'The end date until which to fetch indicators. Accepts date strings like "now", "2023-05-19", etc.'
+ name: until
+ - defaultValue: '50'
+ description: The maximum number of results to return.
+ name: limit
+ description: Gets indicators from the feed within a specified date range and up to a maximum limit..
+ name: github-get-indicators
+ dockerimage: demisto/taxii2:1.0.0.96747
+ feed: true
+ isfetch: false
+ longRunning: false
+ longRunningPort: false
+ runonce: false
+ script: '-'
+ subtype: python3
+ type: python
+fromversion: 6.8.0
+description: This is the Feed GitHub integration for getting started with your feed integration.
+marketplaces:
+- xsoar
+- marketplacev2
+tests:
+- No tests (auto formatted)
+
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/FeedGitHub_description.md b/Packs/FeedGitHub/Integrations/FeedGitHub/FeedGitHub_description.md
new file mode 100644
index 000000000000..2619ad08da8c
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/FeedGitHub_description.md
@@ -0,0 +1,24 @@
+# Feed GitHub
+
+The integration supports the following workflow:
+
+- Publication of indicators from a specific repo.
+
+ To access the requested repository, you need to provide
+ the owner(username), repo(repository) name, and API key as parameters for the
+ integration.
+
+#### It is recommended to use Api Token in order to avoid being rate limited.
+
+To generate your personal access token, visit:[GitHub](https://github.com/settings/tokens)
+
+
+- `Owner:` The owner of the repository (see the example)
+- `Repository:` The name of the repository (see the example)
+- `Feed type:` The type of indicators to publish
+- `Branch name:` The required head branch
+- `Files extensions to fetch:` A list of file extensions from which only the indicators will be extracted
+
+#### Owner, Repo sources example
+
+![exa photo](https://github.com/demisto/content/blob/9835-GitHub-Feed/Packs/FeedGithub/Integrations/FeedGithub/owner-repo-example.png?raw=true)
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/FeedGitHub_image.png b/Packs/FeedGitHub/Integrations/FeedGitHub/FeedGitHub_image.png
new file mode 100644
index 000000000000..0319061e822d
Binary files /dev/null and b/Packs/FeedGitHub/Integrations/FeedGitHub/FeedGitHub_image.png differ
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/FeedGitHub_test.py b/Packs/FeedGitHub/Integrations/FeedGitHub/FeedGitHub_test.py
new file mode 100644
index 000000000000..dd9a540653bb
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/FeedGitHub_test.py
@@ -0,0 +1,418 @@
+import pytest
+
+
+import json
+from freezegun import freeze_time
+import demistomock as demisto
+
+
+def util_load_json(path):
+ with open(path, encoding="utf-8") as f:
+ return json.loads(f.read())
+
+
+def util_load_txt(path):
+ with open(path, encoding="utf-8") as f:
+ return f.read()
+
+
+def mock_client():
+ """
+ Create a mock client for testing.
+ """
+ from FeedGitHub import Client
+
+ return Client(
+ base_url="example.com",
+ verify=False,
+ proxy=False,
+ owner="",
+ repo="",
+ headers={},
+ )
+
+
+def test_get_content_files_from_repo(mocker):
+ """
+ Given:
+ - A list of relevant files to fetch content from.
+ - Parameters specifying the feed type and extensions to fetch.
+ - A mock response for the content files from the repository.
+ When:
+ - Calling get_content_files_from_repo to fetch the content of the relevant files.
+ Then:
+ - Returns the content of the relevant files matching the expected results.
+ """
+ from FeedGitHub import get_content_files_from_repo
+
+ client = mock_client()
+ params = {"feedType": "IOCs", "extensions_to_fetch": ["txt"]}
+ relevant_files = util_load_json("test_data/relevant-files.json")
+ return_data = util_load_json("test_data/content_files_from_repo.json")
+ mocker.patch.object(client, "_http_request", return_value=return_data)
+ content_files = get_content_files_from_repo(client, relevant_files, params)
+ assert content_files == util_load_json(
+ "test_data/get_content-files-from-repo-result.json"
+ )
+
+
+def test_get_commit_files(mocker):
+ """
+ Given:
+ - A base commit SHA, a head commit SHA, and a flag indicating if it is the first fetch.
+ - A mock response for the list of all commit files between the base and head commits.
+ When:
+ - Calling get_commits_files to retrieve the relevant files and the current repo head SHA.
+ Then:
+ - Returns the list of relevant files and the current repo head SHA matching the expected results.
+ """
+ from FeedGitHub import get_commits_files
+
+ client = mock_client()
+ base = "ad3e0503765479e9ee09bac5dee726eb918b9ebd"
+ head = "9a611449423b9992c126c20e47c5de4f58fc1c0e"
+ is_first_fetch = True
+ all_commits_files = util_load_json("test_data/all-commit-files-res.json")
+ current_repo_head_sha = "ad3e0503765479e9ee09bac5dee726eb918b9ebd"
+ mocker.patch.object(
+ client,
+ "get_files_between_commits",
+ return_value=(all_commits_files, current_repo_head_sha),
+ )
+ relevant_files, current_repo_head_sha = get_commits_files(
+ client, base, head, is_first_fetch
+ )
+ assert relevant_files == util_load_json("test_data/relevant-files.json")
+
+
+def test_filter_out_files_by_status():
+ """
+ Given:
+ - A list of dictionaries representing commit files, each containing a status and a raw_url.
+ When:
+ - Filtering out files by their status using the filter_out_files_by_status function.
+ Then:
+ - Returns a list of URLs for files that are added or modified.
+ """
+ from FeedGitHub import filter_out_files_by_status
+
+ commits_files = [
+ {"status": "added", "raw_url": "http://example.com/file1"},
+ {"status": "modified", "raw_url": "http://example.com/file2"},
+ {"status": "removed", "raw_url": "http://example.com/file3"},
+ {"status": "renamed", "raw_url": "http://example.com/file4"},
+ {"status": "added", "raw_url": "http://example.com/file5"},
+ ]
+
+ expected_output = [
+ "http://example.com/file1",
+ "http://example.com/file2",
+ "http://example.com/file5",
+ ]
+ actual_output = filter_out_files_by_status(commits_files)
+ assert (
+ actual_output == expected_output
+ ), f"Expected {expected_output}, but got {actual_output}"
+
+
+@freeze_time("2024-05-12T15:30:49.330015")
+def test_parse_and_map_yara_content(mocker):
+ """
+ Given:
+ - YARA rule files as input from different sources.
+ rule-1 = classic yara rule
+ rule-2 = broken yara rule
+ rule-3 = yara rule has a unique structure that contains curly brackets inside the rule strings field
+ list_rules_input = Several different rules from a single file
+ When:
+ - Parsing and mapping YARA content using the parse_and_map_yara_content function.
+ Then:
+ - Returns the parsed YARA rules in JSON format matching the expected results.
+ """
+ from FeedGitHub import parse_and_map_yara_content
+
+ mocker.patch.object(demisto, "error")
+ rule_1_input = {"example.com": util_load_txt("test_data/yara-rule-1.yar")}
+ rule_2_input = {"example.com": util_load_txt("test_data/yara-rule-2.yar")}
+ rule_3_input = {"example.com": util_load_txt("test_data/yara-rule-3.yar")}
+ list_rules_input = {"example.com": util_load_txt("test_data/test-split-yara-1.yar")}
+
+ parsed_rule1 = parse_and_map_yara_content(rule_1_input)
+ parsed_rule2 = parse_and_map_yara_content(rule_2_input)
+ parsed_rule3 = parse_and_map_yara_content(rule_3_input)
+ list_parsed_rules = parse_and_map_yara_content(list_rules_input)
+
+ assert parsed_rule1 == util_load_json("test_data/yara-rule-1-res.json")
+ assert parsed_rule2 == util_load_json("test_data/yara-rule-2-res.json")
+ assert parsed_rule3 == util_load_json("test_data/yara-rule-3-res.json")
+ assert list_parsed_rules == util_load_json("test_data/list-parsed-rules-res.json")
+
+
+@freeze_time("2024-05-12T15:30:49.330015")
+def test_extract_text_indicators():
+ """
+ Given:
+ - A dictionary containing file paths and their respective contents with IOC indicators.
+ - Parameters specifying the repository owner and name.
+ When:
+ - Calling extract_text_indicators to extract IOC indicators from the file contents.
+ Then:
+ - Returns the extracted IOC indicators matching the expected results.
+ """
+ from FeedGitHub import extract_text_indicators
+
+ ioc_indicators_input = {
+ "example.com": util_load_txt("test_data/test-ioc-indicators.txt")
+ }
+ params = {"owner": "example.owner", "repo": "example.repo"}
+ res_indicators = extract_text_indicators(ioc_indicators_input, params)
+ assert res_indicators == util_load_json("test_data/iocs-res.json")
+
+
+def test_get_stix_indicators():
+ """
+ Given:
+ - Output of the STIX feed API
+ When:
+ - When calling the 'get_stix_indicators' method
+ Then:
+ - Returns a list of the STIX indicators parsed from "STIX2XSOARParser client"
+ """
+ from FeedGitHub import get_stix_indicators
+
+ stix_indicators_input = util_load_json("test_data/taxii_test.json")
+ res_indicators = get_stix_indicators(stix_indicators_input)
+ assert res_indicators == util_load_json("test_data/taxii_test_res.json")
+
+
+def test_negative_limit(mocker):
+ """
+ Given:
+ - A negative limit.
+ When:
+ - Calling get_indicators.
+ Then:
+ - Ensure ValueError is raised with the right message.
+ """
+ mocker.patch.object(demisto, "error")
+ from FeedGitHub import get_indicators_command
+
+ args = {"limit": "-1"}
+ client = mock_client()
+
+ with pytest.raises(ValueError) as ve:
+ get_indicators_command(client, {}, args)
+ assert (
+ ve.value.args[0]
+ == "get_indicators_command return with error. \n\nError massage: Limit must be a positive number."
+ )
+
+
+def test_fetch_indicators(mocker):
+ """
+ Given:
+ - A mock client and parameters specifying the fetch time frame.
+ - Mocked responses for base and head commit SHAs, and indicators.
+ When:
+ - Calling fetch_indicators to retrieve indicators from the GitHub feed.
+ Then:
+ - Returns the list of indicators matching the expected results.
+ """
+ import FeedGitHub
+
+ client = mock_client()
+ mocker.patch.object(demisto, "debug")
+ mocker.patch.object(demisto, "setLastRun")
+ params = {"fetch_since": "15 days ago"}
+ mocker.patch.object(
+ client,
+ "get_commits_between_dates",
+ return_value="046a799ebe004e1bff686d6b774387b3bdb3d1ce",
+ )
+ mocker.patch.object(
+ FeedGitHub,
+ "get_indicators",
+ return_value=(
+ util_load_json("test_data/iterator-test.json"),
+ "9a611449423b9992c126c20e47c5de4f58fc1c0e",
+ ),
+ )
+ results = FeedGitHub.fetch_indicators(client, None, params)
+ assert results == util_load_json("test_data/fetch-indicators-res.json")
+
+
+@freeze_time("2024-05-20T11:05:36.984413")
+def test_get_indicators_command(mocker):
+ """
+ Given:
+ - A mock client and parameters to retrieve indicators from the GitHub feed.
+ - Mocked responses for base and head commit SHAs, and indicators.
+ When:
+ - Calling get_indicators_command to retrieve and format indicators.
+ Then:
+ - Returns the human-readable output matching the expected results.
+ """
+
+ import FeedGitHub
+ from CommonServerPython import tableToMarkdown
+
+ client = mock_client()
+ mocker.patch.object(demisto, "debug")
+ mocker.patch.object(demisto, "error")
+ mocker.patch.object(
+ client,
+ "get_commits_between_dates",
+ return_value=[
+ "9a611449423b9992c126c20e47c5de4f58fc1c0e",
+ "aabaf42225cb4d18e338bc5c8c934f25be814704",
+ "046a799ebe004e1bff686d6b774387b3bdb3d1ce",
+ ],
+ )
+ mocker.patch.object(
+ FeedGitHub,
+ "get_indicators",
+ return_value=(util_load_json("test_data/iterator-test.json"), None),
+ )
+ results = FeedGitHub.get_indicators_command(client, params={}, args={"limit": ""})
+ hr_indicators = util_load_json("test_data/hr-indicators.json")
+ human_readable = tableToMarkdown(
+ "Indicators from GitHubFeed:",
+ hr_indicators,
+ headers=["Type", "Value"],
+ removeNull=True,
+ )
+ assert results.readable_output == human_readable
+
+
+def test_extract_commits(mocker):
+ """
+ Given:
+ - A mock response object with commit data.
+ - Mocked responses for paginated commit data.
+ When:
+ - Calling _extract_commits to retrieve and aggregate commit information.
+ Then:
+ - Returns a list of all commits from the paginated responses.
+ """
+ client = mock_client()
+ mocker.patch.object(demisto, "debug")
+
+ mock_response_single = mocker.MagicMock()
+ mock_response_single.links = {}
+ mock_response_single.json.return_value = [{"sha": "commit1"}, {"sha": "commit2"}]
+
+ mock_response_page1 = mocker.MagicMock()
+ mock_response_page2 = mocker.MagicMock()
+ mock_response_page1.links = {"next": {"url": "http://example.com/page2"}}
+ mock_response_page1.json.return_value = [{"sha": "commit1"}, {"sha": "commit2"}]
+ mock_response_page2.links = {}
+ mock_response_page2.json.return_value = [{"sha": "commit3"}, {"sha": "commit4"}]
+
+ mocker.patch.object(client, "_http_request", side_effect=[mock_response_page2])
+
+ commits_single = client._extract_commits(mock_response_single)
+ assert commits_single == [{"sha": "commit1"}, {"sha": "commit2"}]
+
+ commits_multiple = client._extract_commits(mock_response_page1)
+ expected_commits = [
+ {"sha": "commit1"},
+ {"sha": "commit2"},
+ {"sha": "commit3"},
+ {"sha": "commit4"},
+ ]
+ assert commits_multiple == expected_commits
+
+
+@freeze_time("2024-05-20T11:05:36.984413")
+def test_arrange_iocs_indicator_to_xsoar():
+ """
+ Given:
+ - A file path, a list of parsed indicators, and additional parameters.
+ When:
+ - Calling arrange_iocs_indicator_to_xsoar to format the indicators.
+ Then:
+ - Returns a list of formatted indicators with expected fields and values.
+ """
+ from FeedGitHub import arrange_iocs_indicator_to_xsoar
+
+ file_path = "test_file.txt"
+ parsed_indicators = [
+ {"value": "example.com", "type": "Domain"},
+ {"value": "123.456.789.0", "type": "IP"},
+ ]
+ params = {"owner": "example_owner", "repo": "example_repo"}
+ expected_result = [
+ {
+ "value": "example.com",
+ "type": "Domain",
+ "service": "github",
+ "fields": {
+ "references": "test_file.txt",
+ "tags": {"owner": "example_owner", "repo": "example_repo"},
+ "firstseenbysource": "2024-05-20T11:05:36.984413",
+ },
+ "rawJSON": {"value": "example.com", "type": "Domain"},
+ },
+ {
+ "value": "123.456.789.0",
+ "type": "IP",
+ "service": "github",
+ "fields": {
+ "references": "test_file.txt",
+ "tags": {"owner": "example_owner", "repo": "example_repo"},
+ "firstseenbysource": "2024-05-20T11:05:36.984413",
+ },
+ "rawJSON": {"value": "123.456.789.0", "type": "IP"},
+ },
+ ]
+ result = arrange_iocs_indicator_to_xsoar(file_path, parsed_indicators, params)
+ assert result == expected_result
+
+
+def test_identify_json_structure():
+ """
+ Given:
+ - A dictionary containing JSON data with different structures.
+ When:
+ - Calling identify_json_structure to identify the structure.
+ Then:
+ - Returns the identified structure based on the provided JSON data.
+ """
+ from FeedGitHub import identify_json_structure
+ json_data_bundle = {"bundle": {"type": "bundle", "id": "bundle--12345678-1234-5678-1234-567812345678"}}
+ assert identify_json_structure(json_data_bundle) == "Bundle"
+
+ json_data_envelope = {"objects": [{"type": "indicator", "id": "indicator--12345678-1234-5678-1234-567812345678"}]}
+ assert identify_json_structure(json_data_envelope) == "Envelope"
+
+ json_data_envelope_alt = {"type": "indicator", "id": "indicator--12345678-1234-5678-1234-567812345678"}
+ assert identify_json_structure(json_data_envelope_alt) == "Envelope"
+
+ json_data_list = [{"type": "indicator", "id": "indicator--12345678-1234-5678-1234-567812345678"}]
+ assert identify_json_structure(json_data_list) == {"objects": json_data_list}
+
+ json_data_unknown = {"unknown_key": "unknown_value"}
+ assert identify_json_structure(json_data_unknown) is None
+
+
+def test_filtering_stix_files():
+ """
+ Given:
+ - A list of content files containing both STIX and non-STIX files.
+ When:
+ - Calling filtering_stix_files to filter out only the STIX files.
+ Then:
+ - Returns a list containing only the STIX files from the input list.
+ """
+ from FeedGitHub import filtering_stix_files
+ content_files = [
+ [{"type": "indicator", "id": "indicator--12345678-1234-5678-1234-567812345678"}], # STIX format
+ [{"bundle": {"type": "bundle", "id": "bundle--12345678-1234-5678-1234-567812345678"}}], # STIX format
+ [{"type": "non-stix", "id": "non-stix--12345678-1234-5678-1234-567812345678"}], # Non-STIX format
+ ]
+ expected_result = [
+ {"type": "indicator", "id": "indicator--12345678-1234-5678-1234-567812345678"},
+ {"bundle": {"type": "bundle", "id": "bundle--12345678-1234-5678-1234-567812345678"}},
+ {'type': 'non-stix', 'id': 'non-stix--12345678-1234-5678-1234-567812345678'}
+ ]
+ assert filtering_stix_files(content_files) == expected_result
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/README.md b/Packs/FeedGitHub/Integrations/FeedGitHub/README.md
new file mode 100644
index 000000000000..32ab857b905d
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/README.md
@@ -0,0 +1,54 @@
+This is the Feed GitHub integration for getting started with your feed integration.
+This integration was integrated and tested with version 1.0.0 of Github Feed.
+
+## Configure Github Feed on Cortex XSOAR
+
+1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+2. Search for Github Feed.
+3. Click **Add instance** to create and configure a new integration instance.
+
+ | **Parameter** | **Description** | **Required** |
+ | --- | --- | --- |
+ | Fetch indicators | | False |
+ | Base URL | The URL to the GitHub API. | True |
+ | API Token | | False |
+ | Trust any certificate (not secure) | | False |
+ | Owner | Username of the repository owner | True |
+ | Repository / Path to fetch | The name of the repository | True |
+ | Feed type | Predefined list of indicator types: - YARA: Parses YARA rules from the feed. - STIX: Parses STIX data from the feed. - IOCs: Parses Indicators of Compromise \(IOCs\) using regex patterns. | True |
+ | Branch name | The name of the main branch to which to compare. | True |
+ | Files extensions to fetch | The extension for the file names to target. | True |
+ | Source Reliability | Reliability of the source providing the intelligence data. | True |
+ | Traffic Light Protocol Color | The Traffic Light Protocol \(TLP\) designation to apply to indicators fetched from the feed. | False |
+ | First fetch time | First commit date of first published indicators to bring. e.g., "1 min ago","2 weeks ago","3 months ago". | False |
+ | Feed Fetch Interval | | False |
+ | Bypass exclusion list | When selected, the exclusion list is ignored for indicators from this feed. This means that if an indicator from this feed is on the exclusion list, the indicator might still be added to the system. | False |
+ | Use system proxy settings | | False |
+ | Tags | Insert as a comma-separated list. | False |
+4. Click **Test** to validate the URLs, token, and connection.
+
+## Commands
+
+You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
+After you successfully execute a command, a DBot message appears in the War Room with the command details.
+
+### github-get-indicators
+
+***
+Gets indicators from the feed within a specified date range and up to a maximum limit..
+
+#### Base Command
+
+`github-get-indicators`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| since | The start date from which to fetch indicators. Accepts date strings like "7 days ago", "2 weeks ago", etc. Default is 7 days. | Optional |
+| until | The end date until which to fetch indicators. Accepts date strings like "now", "2023-05-19", etc. | Optional |
+| limit | The maximum number of results to return. Default is 50. | Optional |
+
+#### Context Output
+
+There is no context output for this command.
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/command_examples b/Packs/FeedGitHub/Integrations/FeedGitHub/command_examples
new file mode 100644
index 000000000000..31d9901d847f
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/command_examples
@@ -0,0 +1 @@
+!github-get-indicators since="30 days ago" until="now" limit=50
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/all-commit-files-res.json b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/all-commit-files-res.json
new file mode 100644
index 000000000000..9ab9ede68050
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/all-commit-files-res.json
@@ -0,0 +1 @@
+[{"sha": "463cedd3734f00fe9c43d49f6b5e5b22ce8ce931", "filename": "space_invaders-main/aaa.json", "status": "added", "additions": 36, "deletions": 0, "changes": 36, "blob_url": "https://github.com/aaron1535/space_invaders/blob/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Faaa.json", "raw_url": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Faaa.json", "contents_url": "https://api.github.com/repos/aaron1535/space_invaders/contents/space_invaders-main%2Faaa.json?ref=9a611449423b9992c126c20e47c5de4f58fc1c0e", "patch": "@@ -0,0 +1,36 @@\n+[\n+ {\n+ \"objects\": [\n+ {\n+ \"hashes\": {\n+ \"MD5\": \"e086aa137fa19f67d27b39d0eca18610\",\n+ \"SHA-1\": \"409629a08b9b3f3be610b8832cc28822f964410f\",\n+ \"SHA-256\": \"f1412386aa8db2579aff2636cb9511cacc5fd9880ecab60c048508fbe26ee4d9\",\n+ \"SHA-512\": \"502b6772333c9f8ba18bf4b9cb47c81d1b931660e7a7a51d7b42ccf9179ea34e4d852466e1101249dbc3da32b121bf83427b0237dce1870e5e1a5b04db524cfb\"\n+ },\n+ \"type\": \"file\"\n+ },\n+ {\n+ \"key\": \"Computer\\\\HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\7-Zip\",\n+ \"type\": \"windows-registry-key\",\n+ \"values\": [\n+ {\n+ \"data\": \"C:\\\\7-Zip\\\\\",\n+ \"data_type\": \"REG_SZ\",\n+ \"name\": \"Path\"\n+ },\n+ {\n+ \"data\": \"C:\\\\7-Zip\\\\\",\n+ \"data_type\": \"REG_SZ\",\n+ \"name\": \"Path64\"\n+ }\n+ ]\n+ },\n+ {\n+ \"number\": \"15169\",\n+ \"name\": \"Google\",\n+ \"type\": \"autonomous-system\"\n+ }\n+ ]\n+ }\n+]"}, {"sha": "8b82a649186b55f8e905b7850f5dbd5b6f67f2e0", "filename": "space_invaders-main/iocs-test.txt", "status": "added", "additions": 32, "deletions": 0, "changes": 32, "blob_url": "https://github.com/aaron1535/space_invaders/blob/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "raw_url": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "contents_url": "https://api.github.com/repos/aaron1535/space_invaders/contents/space_invaders-main%2Fiocs-test.txt?ref=9a611449423b9992c126c20e47c5de4f58fc1c0e", "patch": "@@ -0,0 +1,32 @@\n+2023-02-08 (WEDNESDAY) - COBALT STRIKE FROM ICEDID (BOKBOT) INFECTION\n+\n+REFERENCE:\n+\n+- https://twitter.com/Unit42_Intel/status/1623707361184477185\n+\n+NOTES:\n+\n+- IcedID infection generated using a OneNote file reported earlier today by @k3dg3 at:\n+ -- https://twitter.com/k3dg3/status/1623333951069646857\n+\n+ICEDID TRAFFIC:\n+\n+- 80.66.88[.]143 port 80 - ehonlionetodo[.]com - GET /\n+- 94.232.46[.]221 port 443 - noosaerty[.]com - HTTPS traffic\n+- 37.252.6[.]77 port 443 - palasedelareforma[.]com - HTTPS traffic\n+\n+COBALT STRIKE TRAFFIC:\n+\n+- 167.172.154[.]189 port 80 - 167.172.154[.]189 - GET /36.ps1\n+- 167.172.154[.]189 port 80 - 167.172.154[.]189 - GET /b360802.dll\n+- 79.132.128[.]191 port 443 - thefirstupd[.]com - HTTPS traffic\n+\n+COBALT STRIKE STAGER:\n+\n+- SHA256 hash: 9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd\n+- File size: 754,688 bytes\n+- File location: hxxp://167.172.154[.]189/b360802.dll\n+- File location: C:\\Windows\\tasks\\si.dll\n+- File description: 64-bit DLL stager for Cobalt Strike\n+- Run method: rundll32.exe [filename],ApendMenu \n+- Sample: https://bazaar.abuse.ch/sample/9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd/"}, {"sha": "da15f7bf70d786eef39aa7d06b4de270ff42ad6d", "filename": "space_invaders-main/stix-test.json", "status": "added", "additions": 527, "deletions": 0, "changes": 527, "blob_url": "https://github.com/aaron1535/space_invaders/blob/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fstix-test.json", "raw_url": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fstix-test.json", "contents_url": "https://api.github.com/repos/aaron1535/space_invaders/contents/space_invaders-main%2Fstix-test.json?ref=9a611449423b9992c126c20e47c5de4f58fc1c0e", "patch": "@@ -0,0 +1,527 @@\n+[\n+ {\n+ \"objects\": [\n+ {\n+ \"id\": \"marking-definition--f88d31f6-486f-44da-b317-01333bde0b82\",\n+ \"created\": \"2017-01-20T00:00:00.000Z\",\n+ \"definition_type\": \"tlp\",\n+ \"definition\": {\n+ \"tlp\": \"amber\"\n+ },\n+ \"type\": \"marking-definition\",\n+ \"spec_version\": \"2.1\"\n+ },\n+ {\n+ \"id\": \"marking-definition--34098fce-860f-48ae-8e50-ebd3cc5e41da\",\n+ \"created\": \"2017-01-20T00:00:00.000Z\",\n+ \"definition_type\": \"tlp\",\n+ \"definition\": {\n+ \"tlp\": \"green\"\n+ },\n+ \"type\": \"marking-definition\",\n+ \"spec_version\": \"2.1\"\n+ }\n+ ],\n+ \"more\": \"false\"\n+ },\n+ {\n+ \"objects\": [\n+ {\n+ \"hashes\": {\n+ \"MD5\": \"e086aa137fa19f67d27b39d0eca18610\",\n+ \"SHA-1\": \"409629a08b9b3f3be610b8832cc28822f964410f\",\n+ \"SHA-256\": \"f1412386aa8db2579aff2636cb9511cacc5fd9880ecab60c048508fbe26ee4d9\",\n+ \"SHA-512\": \"502b6772333c9f8ba18bf4b9cb47c81d1b931660e7a7a51d7b42ccf9179ea34e4d852466e1101249dbc3da32b121bf83427b0237dce1870e5e1a5b04db524cfb\"\n+ },\n+ \"type\": \"file\"\n+ },\n+ {\n+ \"key\": \"Computer\\\\HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\7-Zip\",\n+ \"type\": \"windows-registry-key\",\n+ \"values\": [\n+ {\n+ \"data\": \"C:\\\\7-Zip\\\\\",\n+ \"data_type\": \"REG_SZ\",\n+ \"name\": \"Path\"\n+ },\n+ {\n+ \"data\": \"C:\\\\7-Zip\\\\\",\n+ \"data_type\": \"REG_SZ\",\n+ \"name\": \"Path64\"\n+ }\n+ ]\n+ },\n+ {\n+ \"number\": \"15169\",\n+ \"name\": \"Google\",\n+ \"type\": \"autonomous-system\"\n+ }\n+ ]\n+ },\n+ {\n+ \"objects\": [\n+ {\n+ \"id\": \"indicator--86fee2b1-807d-423d-9d0e-1117bab576ce\",\n+ \"pattern\": \"[ipv4-addr:value = '195.123.227.186' AND ipv4-addr:value = '1.1.1.1']\",\n+ \"confidence\": 50,\n+ \"lang\": \"en\",\n+ \"type\": \"indicator\",\n+ \"created\": \"2020-06-10T01:14:33.126Z\",\n+ \"modified\": \"2020-06-10T01:14:33.126Z\",\n+ \"name\": \"bot_ip: 195.123.227.186\",\n+ \"description\": \"TS ID: 55694549840; iType: bot_ip; Date First: 2020-06-05T08:42:19.170Z; State: active; Org: Layer6 Networks; Source: Emerging Threats - Compromised; MoreDetail: imported by user 668\",\n+ \"valid_from\": \"2020-06-10T01:00:33.779852Z\",\n+ \"pattern_type\": \"stix\",\n+ \"object_marking_refs\": [\n+ \"marking-definition--34098fce-860f-48ae-8e50-ebd3cc5e41da\"\n+ ],\n+ \"labels\": [\n+ \"low\"\n+ ],\n+ \"indicator_types\": [\n+ \"anomalous-activity\"\n+ ],\n+ \"pattern_version\": \"2.1\",\n+ \"spec_version\": \"2.1\"\n+ },\n+ {\n+ \"id\": \"indicator--891207b3-bff4-4bc2-8c12-7fd2321c9f38\",\n+ \"pattern\": \"[ipv4-addr:value = '134.209.37.102' OR ipv4-addr:value = '2.2.2.2']\",\n+ \"confidence\": 85,\n+ \"lang\": \"en\",\n+ \"type\": \"indicator\",\n+ \"created\": \"2020-06-10T01:14:52.501Z\",\n+ \"modified\": \"2020-06-10T01:14:52.501Z\",\n+ \"name\": \"bot_ip: 134.209.37.102\",\n+ \"description\": \"TS ID: 55682983162; iType: bot_ip; Date First: 2020-06-02T07:26:06.274Z; State: active; Org: Covidien Lp; Source: Emerging Threats - Compromised; MoreDetail: imported by user 668\",\n+ \"valid_from\": \"2020-06-10T01:00:33.722754Z\",\n+ \"pattern_type\": \"stix\",\n+ \"object_marking_refs\": [\n+ \"marking-definition--34098fce-860f-48ae-8e50-ebd3cc5e41da\"\n+ ],\n+ \"labels\": [\n+ \"low\"\n+ ],\n+ \"indicator_types\": [\n+ \"anomalous-activity\"\n+ ],\n+ \"pattern_version\": \"2.1\",\n+ \"spec_version\": \"2.1\"\n+ },\n+ {\n+ \"id\": \"indicator--8c726d5f-cb6b-45dc-8c2b-2be8596043cf\",\n+ \"pattern\": \"[ipv4-addr:value = '117.141.112.155' FOLLOWEDBY ipv4-addr:value = '3.3.3.3']\",\n+ \"confidence\": 50,\n+ \"lang\": \"en\",\n+ \"type\": \"indicator\",\n+ \"created\": \"2020-06-10T01:14:54.684Z\",\n+ \"modified\": \"2020-06-10T01:14:54.684Z\",\n+ \"name\": \"bot_ip: 117.141.112.155\",\n+ \"description\": \"TS ID: 55694549819; iType: bot_ip; Date First: 2020-06-05T08:42:17.907Z; State: active; Org: China Mobile Guangdong; Source: Emerging Threats - Compromised; MoreDetail: imported by user 668\",\n+ \"valid_from\": \"2020-06-10T01:00:33.775627Z\",\n+ \"pattern_type\": \"stix\",\n+ \"object_marking_refs\": [\n+ \"marking-definition--34098fce-860f-48ae-8e50-ebd3cc5e41da\"\n+ ],\n+ \"labels\": [\n+ \"low\"\n+ ],\n+ \"indicator_types\": [\n+ \"anomalous-activity\"\n+ ],\n+ \"pattern_version\": \"2.1\",\n+ \"spec_version\": \"2.1\"\n+ },\n+ {\n+ \"id\": \"indicator--8e19a19c-cd66-4278-8bfb-c05c64977d12\",\n+ \"pattern\": \"[ipv4-addr:value = '23.129.64.217']\",\n+ \"confidence\": 50,\n+ \"lang\": \"en\",\n+ \"type\": \"indicator\",\n+ \"created\": \"2020-06-10T01:14:19.858Z\",\n+ \"modified\": \"2020-06-10T01:14:19.858Z\",\n+ \"name\": \"bot_ip: 23.129.64.217\",\n+ \"description\": \"TS ID: 55682983514; iType: bot_ip; Date First: 2020-06-02T07:26:46.206Z; State: active; Org: Emerald Onion; Source: Emerging Threats - Compromised; MoreDetail: imported by user 668\",\n+ \"valid_from\": \"2020-06-10T01:00:33.731573Z\",\n+ \"pattern_type\": \"stix\",\n+ \"object_marking_refs\": [\n+ \"marking-definition--34098fce-860f-48ae-8e50-ebd3cc5e41da\"\n+ ],\n+ \"labels\": [\n+ \"low\"\n+ ],\n+ \"indicator_types\": [\n+ \"anomalous-activity\"\n+ ],\n+ \"pattern_version\": \"2.1\",\n+ \"spec_version\": \"2.1\"\n+ },\n+ {\n+ \"id\": \"indicator--90a4f95d-1e35-4f47-b303-5651c93457f4\",\n+ \"pattern\": \"[ipv4-addr:value = '45.142.213.11']\",\n+ \"confidence\": 85,\n+ \"lang\": \"en\",\n+ \"type\": \"indicator\",\n+ \"created\": \"2020-06-10T01:14:10.753Z\",\n+ \"modified\": \"2020-06-10T01:14:10.753Z\",\n+ \"name\": \"bot_ip: 45.142.213.11\",\n+ \"description\": \"TS ID: 55694549856; iType: bot_ip; Date First: 2020-06-05T08:45:37.178Z; State: active; Source: Emerging Threats - Compromised; MoreDetail: imported by user 668\",\n+ \"valid_from\": \"2020-06-10T01:00:33.808281Z\",\n+ \"pattern_type\": \"stix\",\n+ \"object_marking_refs\": [\n+ \"marking-definition--34098fce-860f-48ae-8e50-ebd3cc5e41da\"\n+ ],\n+ \"labels\": [\n+ \"low\"\n+ ],\n+ \"indicator_types\": [\n+ \"anomalous-activity\"\n+ ],\n+ \"pattern_version\": \"2.1\",\n+ \"spec_version\": \"2.1\"\n+ },\n+ {\n+ \"id\": \"indicator--94f109aa-3ef2-4a8c-a847-dfb4c64f4f29\",\n+ \"pattern\": \"[ipv4-addr:value = '157.245.250.190']\",\n+ \"confidence\": 50,\n+ \"lang\": \"en\",\n+ \"type\": \"indicator\",\n+ \"created\": \"2020-06-10T01:14:15.950Z\",\n+ \"modified\": \"2020-06-10T01:14:15.950Z\",\n+ \"name\": \"bot_ip: 157.245.250.190\",\n+ \"description\": \"TS ID: 55697907923; iType: bot_ip; Date First: 2020-06-06T09:32:01.051Z; State: active; Org: Datalogic ADC; Source: Emerging Threats - Compromised; MoreDetail: imported by user 668\",\n+ \"valid_from\": \"2020-06-10T01:00:33.818576Z\",\n+ \"pattern_type\": \"stix\",\n+ \"object_marking_refs\": [\n+ \"marking-definition--34098fce-860f-48ae-8e50-ebd3cc5e41da\"\n+ ],\n+ \"labels\": [\n+ \"low\"\n+ ],\n+ \"indicator_types\": [\n+ \"anomalous-activity\"\n+ ],\n+ \"pattern_version\": \"2.1\",\n+ \"spec_version\": \"2.1\"\n+ },\n+ {\n+ \"id\": \"indicator--96d1737a-5565-49ac-8a91-52c2c7b38903\",\n+ \"pattern\": \"[ipv4-addr:value = '144.91.106.47']\",\n+ \"confidence\": 50,\n+ \"lang\": \"en\",\n+ \"type\": \"indicator\",\n+ \"created\": \"2020-06-10T01:15:00.764Z\",\n+ \"modified\": \"2020-06-10T01:15:00.764Z\",\n+ \"name\": \"bot_ip: 144.91.106.47\",\n+ \"description\": \"TS ID: 55694549829; iType: bot_ip; Date First: 2020-06-05T08:44:22.790Z; State: active; Org: Mills College; Source: Emerging Threats - Compromised; MoreDetail: imported by user 668\",\n+ \"valid_from\": \"2020-06-10T01:00:33.791474Z\",\n+ \"pattern_type\": \"stix\",\n+ \"object_marking_refs\": [\n+ \"marking-definition--34098fce-860f-48ae-8e50-ebd3cc5e41da\"\n+ ],\n+ \"labels\": [\n+ \"low\"\n+ ],\n+ \"indicator_types\": [\n+ \"anomalous-activity\"\n+ ],\n+ \"pattern_version\": \"2.1\",\n+ \"spec_version\": \"2.1\"\n+ },\n+ {\n+ \"id\": \"indicator--9c98d81b-b4a5-4b8d-8fd6-4b9beec0f1be\",\n+ \"pattern\": \"[ipv4-addr:value = '141.98.81.208']\",\n+ \"confidence\": 50,\n+ \"lang\": \"en\",\n+ \"type\": \"indicator\",\n+ \"created\": \"2020-06-10T01:14:39.995Z\",\n+ \"modified\": \"2020-06-10T01:14:39.995Z\",\n+ \"name\": \"bot_ip: 141.98.81.208\",\n+ \"description\": \"TS ID: 55691320102; iType: bot_ip; Date First: 2020-06-04T10:33:13.398Z; State: active; Source: Emerging Threats - Compromised; MoreDetail: imported by user 668\",\n+ \"valid_from\": \"2020-06-10T01:00:33.766866Z\",\n+ \"pattern_type\": \"stix\",\n+ \"object_marking_refs\": [\n+ \"marking-definition--34098fce-860f-48ae-8e50-ebd3cc5e41da\"\n+ ],\n+ \"labels\": [\n+ \"low\"\n+ ],\n+ \"indicator_types\": [\n+ \"anomalous-activity\"\n+ ],\n+ \"pattern_version\": \"2.1\",\n+ \"spec_version\": \"2.1\"\n+ },\n+ {\n+ \"id\": \"indicator--9cbf82af-8a54-478a-af76-b88a73a33d37\",\n+ \"pattern\": \"[ipv4-addr:value = '51.81.53.159']\",\n+ \"confidence\": 85,\n+ \"lang\": \"en\",\n+ \"type\": \"indicator\",\n+ \"created\": \"2020-06-10T01:15:01.999Z\",\n+ \"modified\": \"2020-06-10T01:15:01.999Z\",\n+ \"name\": \"bot_ip: 51.81.53.159\",\n+ \"description\": \"TS ID: 55694549861; iType: bot_ip; Date First: 2020-06-05T08:42:44.478Z; State: active; Org: OVH SAS; Source: Emerging Threats - Compromised; MoreDetail: imported by user 668\",\n+ \"valid_from\": \"2020-06-10T01:00:33.781286Z\",\n+ \"pattern_type\": \"stix\",\n+ \"object_marking_refs\": [\n+ \"marking-definition--34098fce-860f-48ae-8e50-ebd3cc5e41da\"\n+ ],\n+ \"labels\": [\n+ \"low\"\n+ ],\n+ \"indicator_types\": [\n+ \"anomalous-activity\"\n+ ],\n+ \"pattern_version\": \"2.1\",\n+ \"spec_version\": \"2.1\"\n+ },\n+ {\n+ \"id\": \"indicator--9ee9aecd-89e6-4dd6-9a24-4c610b33ebbb\",\n+ \"pattern\": \"[ipv4-addr:value = '104.168.173.252']\",\n+ \"confidence\": 50,\n+ \"lang\": \"en\",\n+ \"type\": \"indicator\",\n+ \"created\": \"2020-06-10T01:14:58.530Z\",\n+ \"modified\": \"2020-06-10T01:14:58.530Z\",\n+ \"name\": \"bot_ip: 104.168.173.252\",\n+ \"description\": \"TS ID: 55691320097; iType: bot_ip; Date First: 2020-06-04T10:32:46.612Z; State: active; Org: Hostwinds LLC.; Source: Emerging Threats - Compromised; MoreDetail: imported by user 668\",\n+ \"valid_from\": \"2020-06-10T01:00:33.753603Z\",\n+ \"pattern_type\": \"stix\",\n+ \"object_marking_refs\": [\n+ \"marking-definition--34098fce-860f-48ae-8e50-ebd3cc5e41da\"\n+ ],\n+ \"labels\": [\n+ \"low\"\n+ ],\n+ \"indicator_types\": [\n+ \"anomalous-activity\"\n+ ],\n+ \"pattern_version\": \"2.1\",\n+ \"spec_version\": \"2.1\"\n+ },\n+ {\n+ \"id\": \"indicator--9febf107-dd82-4727-bcb7-199291ec474c\",\n+ \"pattern\": \"[ipv4-addr:value = '173.212.206.89']\",\n+ \"confidence\": 50,\n+ \"lang\": \"en\",\n+ \"type\": \"indicator\",\n+ \"created\": \"2020-06-10T01:14:34.822Z\",\n+ \"modified\": \"2020-06-10T01:14:34.822Z\",\n+ \"name\": \"bot_ip: 173.212.206.89\",\n+ \"description\": \"TS ID: 55697907953; iType: bot_ip; Date First: 2020-06-06T09:31:54.190Z; State: active; Org: Contabo GmbH; Source: Emerging Threats - Compromised; MoreDetail: imported by user 668\",\n+ \"valid_from\": \"2020-06-10T01:00:33.814015Z\",\n+ \"pattern_type\": \"stix\",\n+ \"object_marking_refs\": [\n+ \"marking-definition--34098fce-860f-48ae-8e50-ebd3cc5e41da\"\n+ ],\n+ \"labels\": [\n+ \"low\"\n+ ],\n+ \"indicator_types\": [\n+ \"anomalous-activity\"\n+ ],\n+ \"pattern_version\": \"2.1\",\n+ \"spec_version\": \"2.1\"\n+ },\n+ {\n+ \"id\": \"indicator--a25904c8-0270-4d57-add5-64f5ed1485b5\",\n+ \"pattern\": \"[ipv4-addr:value = '67.207.94.201']\",\n+ \"confidence\": 15,\n+ \"lang\": \"en\",\n+ \"type\": \"indicator\",\n+ \"created\": \"2020-06-10T01:14:29.751Z\",\n+ \"modified\": \"2020-06-10T01:14:29.751Z\",\n+ \"name\": \"bot_ip: 67.207.94.201\",\n+ \"description\": \"TS ID: 55697908164; iType: bot_ip; Date First: 2020-06-06T09:32:30.450Z; State: active; Org: Digital Ocean; Source: Emerging Threats - Compromised; MoreDetail: imported by user 668\",\n+ \"valid_from\": \"2020-06-10T01:00:33.837493Z\",\n+ \"pattern_type\": \"stix\",\n+ \"object_marking_refs\": [\n+ \"marking-definition--34098fce-860f-48ae-8e50-ebd3cc5e41da\"\n+ ],\n+ \"labels\": [\n+ \"low\"\n+ ],\n+ \"indicator_types\": [\n+ \"anomalous-activity\"\n+ ],\n+ \"pattern_version\": \"2.1\",\n+ \"spec_version\": \"2.1\"\n+ },\n+ {\n+ \"id\": \"indicator--a5a1408d-ff8b-41b2-8c57-6678aa0c8688\",\n+ \"pattern\": \"[ipv4-addr:value = '89.163.242.76']\",\n+ \"confidence\": 50,\n+ \"lang\": \"en\",\n+ \"type\": \"indicator\",\n+ \"created\": \"2020-06-10T01:14:35.839Z\",\n+ \"modified\": \"2020-06-10T01:14:35.839Z\",\n+ \"name\": \"bot_ip: 89.163.242.76\",\n+ \"description\": \"TS ID: 55694549874; iType: bot_ip; Date First: 2020-06-05T08:45:20.346Z; State: active; Org: myLoc managed IT AG; Source: Emerging Threats - Compromised; MoreDetail: imported by user 668\",\n+ \"valid_from\": \"2020-06-10T01:00:33.800264Z\",\n+ \"pattern_type\": \"stix\",\n+ \"object_marking_refs\": [\n+ \"marking-definition--34098fce-860f-48ae-8e50-ebd3cc5e41da\"\n+ ],\n+ \"labels\": [\n+ \"low\"\n+ ],\n+ \"indicator_types\": [\n+ \"anomalous-activity\"\n+ ],\n+ \"pattern_version\": \"2.1\",\n+ \"spec_version\": \"2.1\"\n+ },\n+ {\n+ \"id\": \"indicator--a8cc5b11-3bbb-4fb2-970c-31a6f58e1374\",\n+ \"pattern\": \"[ipv4-addr:value = '51.75.71.205']\",\n+ \"confidence\": 85,\n+ \"lang\": \"en\",\n+ \"type\": \"indicator\",\n+ \"created\": \"2020-06-10T01:14:41.919Z\",\n+ \"modified\": \"2020-06-10T01:14:41.919Z\",\n+ \"name\": \"bot_ip: 51.75.71.205\",\n+ \"description\": \"TS ID: 55686993979; iType: bot_ip; Date First: 2020-06-03T07:29:11.148Z; State: active; Org: OVH SAS; Source: Emerging Threats - Compromised; MoreDetail: imported by user 668\",\n+ \"valid_from\": \"2020-06-10T01:00:33.73608Z\",\n+ \"pattern_type\": \"stix\",\n+ \"object_marking_refs\": [\n+ \"marking-definition--34098fce-860f-48ae-8e50-ebd3cc5e41da\"\n+ ],\n+ \"labels\": [\n+ \"low\"\n+ ],\n+ \"indicator_types\": [\n+ \"anomalous-activity\"\n+ ],\n+ \"pattern_version\": \"2.1\",\n+ \"spec_version\": \"2.1\"\n+ },\n+ {\n+ \"id\": \"indicator--a8ee1e5f-8c08-4135-878c-4973179cbac5\",\n+ \"pattern\": \"[ipv4-addr:value = '140.224.183.58']\",\n+ \"confidence\": 85,\n+ \"lang\": \"en\",\n+ \"type\": \"indicator\",\n+ \"created\": \"2020-06-10T01:14:11.651Z\",\n+ \"modified\": \"2020-06-10T01:14:11.651Z\",\n+ \"name\": \"bot_ip: 140.224.183.58\",\n+ \"description\": \"TS ID: 55694549823; iType: bot_ip; Date First: 2020-06-05T08:45:24.055Z; State: active; Org: China Telecom FUJIAN NETWORK; Source: Emerging Threats - Compromised; MoreDetail: imported by user 668\",\n+ \"valid_from\": \"2020-06-10T01:00:33.801661Z\",\n+ \"pattern_type\": \"stix\",\n+ \"object_marking_refs\": [\n+ \"marking-definition--34098fce-860f-48ae-8e50-ebd3cc5e41da\"\n+ ],\n+ \"labels\": [\n+ \"low\"\n+ ],\n+ \"indicator_types\": [\n+ \"anomalous-activity\"\n+ ],\n+ \"pattern_version\": \"2.1\",\n+ \"spec_version\": \"2.1\"\n+ },\n+ {\n+ \"id\": \"indicator--aa4ec99f-3c54-4e60-ab47-83ff78d76570\",\n+ \"pattern\": \"[ipv4-addr:value = '161.35.22.86']\",\n+ \"confidence\": 85,\n+ \"lang\": \"en\",\n+ \"type\": \"indicator\",\n+ \"created\": \"2020-06-10T01:14:49.620Z\",\n+ \"modified\": \"2020-06-10T01:14:49.620Z\",\n+ \"name\": \"bot_ip: 161.35.22.86\",\n+ \"description\": \"TS ID: 55697907934; iType: bot_ip; Date First: 2020-06-06T09:32:22.615Z; State: active; Org: Racal-Redac; Source: Emerging Threats - Compromised; MoreDetail: imported by user 668\",\n+ \"valid_from\": \"2020-06-10T01:00:33.831549Z\",\n+ \"pattern_type\": \"stix\",\n+ \"object_marking_refs\": [\n+ \"marking-definition--34098fce-860f-48ae-8e50-ebd3cc5e41da\"\n+ ],\n+ \"labels\": [\n+ \"low\"\n+ ],\n+ \"indicator_types\": [\n+ \"anomalous-activity\"\n+ ],\n+ \"pattern_version\": \"2.1\",\n+ \"spec_version\": \"2.1\"\n+ },\n+ {\n+ \"id\": \"indicator--ac4a9ca5-9f6e-4072-b568-46dbb03a3ace\",\n+ \"pattern\": \"[ipv4-addr:value = '45.143.220.246']\",\n+ \"confidence\": 50,\n+ \"lang\": \"en\",\n+ \"type\": \"indicator\",\n+ \"created\": \"2020-06-10T01:15:10.905Z\",\n+ \"modified\": \"2020-06-10T01:15:10.905Z\",\n+ \"name\": \"bot_ip: 45.143.220.246\",\n+ \"description\": \"TS ID: 55691320117; iType: bot_ip; Date First: 2020-06-04T10:32:46.584Z; State: active; Source: Emerging Threats - Compromised; MoreDetail: imported by user 668\",\n+ \"valid_from\": \"2020-06-10T01:00:33.752185Z\",\n+ \"pattern_type\": \"stix\",\n+ \"object_marking_refs\": [\n+ \"marking-definition--34098fce-860f-48ae-8e50-ebd3cc5e41da\"\n+ ],\n+ \"labels\": [\n+ \"low\"\n+ ],\n+ \"indicator_types\": [\n+ \"anomalous-activity\"\n+ ],\n+ \"pattern_version\": \"2.1\",\n+ \"spec_version\": \"2.1\"\n+ },\n+ {\n+ \"id\": \"marking-definition--f88d31f6-486f-44da-b317-01333bde0b82\",\n+ \"created\": \"2017-01-20T00:00:00.000Z\",\n+ \"definition_type\": \"tlp\",\n+ \"definition\": {\n+ \"tlp\": \"amber\"\n+ },\n+ \"type\": \"marking-definition\",\n+ \"spec_version\": \"2.1\"\n+ },\n+ {\n+ \"id\": \"marking-definition--34098fce-860f-48ae-8e50-ebd3cc5e41da\",\n+ \"created\": \"2017-01-20T00:00:00.000Z\",\n+ \"definition_type\": \"tlp\",\n+ \"definition\": {\n+ \"tlp\": \"green\"\n+ },\n+ \"type\": \"marking-definition\",\n+ \"spec_version\": \"2.1\"\n+ }\n+ ],\n+ \"more\": \"false\"\n+ },\n+ [\n+ {\n+ \"type\": \"intrusion-set\",\n+ \"id\": \"intrusion-set--9120e5a7-d7b5-437c-a9d9-2fdca0c44df8\",\n+ \"created\": \"2024-03-08T08:00:00Z\",\n+ \"modified\": \"2024-03-08T08:00:00Z\",\n+ \"name\": \"APT28\",\n+ \"description\": \"APT28 is a sophisticated threat actor group believed to be associated with the Russian government.\",\n+ \"aliases\": [\n+ \"Fancy Bear\",\n+ \"Sofacy\"\n+ ]\n+ },\n+ {\n+ \"type\": \"campaign\",\n+ \"id\": \"campaign--c0a1d715-55f0-4f76-afcb-f304fe58b4b9\",\n+ \"created\": \"2024-03-07T08:00:00Z\",\n+ \"modified\": \"2024-03-07T08:00:00Z\",\n+ \"name\": \"Emotet Campaign\",\n+ \"description\": \"A series of coordinated cyber attacks using the Emotet malware.\",\n+ \"aliases\": [\n+ \"Emotet Botnet\"\n+ ]\n+ },\n+ {\n+ \"type\": \"tool\",\n+ \"id\": \"tool--b5f2ed7e-2a80-4d0e-9c79-6e032c4986f4\",\n+ \"created\": \"2024-03-06T08:00:00Z\",\n+ \"modified\": \"2024-03-06T08:00:00Z\",\n+ \"name\": \"Cobalt Strike\",\n+ \"description\": \"Cobalt Strike is a commercial, full-featured penetration testing tool developed by Raphael Mudge.\"\n+ }\n+ ]\n+]"}]
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/build_iterator_results.json b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/build_iterator_results.json
new file mode 100644
index 000000000000..82b20e69efd6
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/build_iterator_results.json
@@ -0,0 +1,26 @@
+[
+ {
+ "value": "https://url1.com",
+ "type": "URL",
+ "FeedURL": "https://openphish.com/feed.txt"
+ },
+ {
+ "value": "https://url2.com",
+ "type": "URL",
+ "FeedURL": "https://openphish.com/feed.txt"
+ },
+ {
+ "value": "https://url3.com",
+ "type": "URL",
+ "FeedURL": "https://openphish.com/feed.txt"
+ },
+ {
+ "value": "https://url4.com",
+ "type": "URL",
+ "FeedURL": "https://openphish.com/feed.txt"
+ },
+ {
+ "value": "https://url5.com",
+ "type": "URL",
+ "FeedURL": "https://openphish.com/feed.txt"
+ }]
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/content_files_from_repo.json b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/content_files_from_repo.json
new file mode 100644
index 000000000000..9004f670e20f
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/content_files_from_repo.json
@@ -0,0 +1 @@
+[{"https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt": "2023-02-08 (WEDNESDAY) - COBALT STRIKE FROM ICEDID (BOKBOT) INFECTION\n\nREFERENCE:\n\n- https://twitter.com/Unit42_Intel/status/1623707361184477185\n\nNOTES:\n\n- IcedID infection generated using a OneNote file reported earlier today by @k3dg3 at:\n -- https://twitter.com/k3dg3/status/1623333951069646857\n\nICEDID TRAFFIC:\n\n- 80.66.88[.]143 port 80 - ehonlionetodo[.]com - GET /\n- 94.232.46[.]221 port 443 - noosaerty[.]com - HTTPS traffic\n- 37.252.6[.]77 port 443 - palasedelareforma[.]com - HTTPS traffic\n\nCOBALT STRIKE TRAFFIC:\n\n- 167.172.154[.]189 port 80 - 167.172.154[.]189 - GET /36.ps1\n- 167.172.154[.]189 port 80 - 167.172.154[.]189 - GET /b360802.dll\n- 79.132.128[.]191 port 443 - thefirstupd[.]com - HTTPS traffic\n\nCOBALT STRIKE STAGER:\n\n- SHA256 hash: 9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd\n- File size: 754,688 bytes\n- File location: hxxp://167.172.154[.]189/b360802.dll\n- File location: C:\\Windows\\tasks\\si.dll\n- File description: 64-bit DLL stager for Cobalt Strike\n- Run method: rundll32.exe [filename],ApendMenu \n- Sample: https://bazaar.abuse.ch/sample/9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd/\n"}]
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/extract_commit_response.json b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/extract_commit_response.json
new file mode 100644
index 000000000000..09bbf82031d5
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/extract_commit_response.json
@@ -0,0 +1,1108 @@
+[
+ {
+ "sha": "9a611449423b9992c126c20e47c5de4f58fc1c0e",
+ "node_id": "C_kwDOJjJHSNoAKDlhNjExNDQ5NDIzYjk5OTJjMTI2YzIwZTQ3YzVkZTRmNThmYzFjMGU",
+ "commit": {
+ "author": {
+ "name": "azonenfeld",
+ "email": "117573492+aaron1535@users.noreply.github.com",
+ "date": "2024-05-07T18:07:51Z"
+ },
+ "committer": {
+ "name": "GitHub",
+ "email": "noreply@github.com",
+ "date": "2024-05-07T18:07:51Z"
+ },
+ "message": "Create iocs-test.txt",
+ "tree": {
+ "sha": "d7275113a66c2e2eb810830ec0dd21fd62446e95",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/trees/d7275113a66c2e2eb810830ec0dd21fd62446e95"
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/commits/9a611449423b9992c126c20e47c5de4f58fc1c0e",
+ "comment_count": 0,
+ "verification": {
+ "verified": true,
+ "reason": "valid",
+ "signature": "-----BEGIN PGP SIGNATURE-----\n\nwsFcBAABCAAQBQJmOm33CRC1aQ7uu5UhlAAAJD8QAA/MiB1cl5jO3xeRa7cJJ5bp\nkPGCX+p+p9CwLCUpHH6v6wl3XwKmm8xDFJE3aIClQpxCmJdOOD932Jh32iWwOOjq\n4YVllo8BEMWOVSkuXv+qHE+tiTrNua6hPF0QgiEsf7Dx4Ol/wXGU1/SzPX3Hc+wh\nwRDdU0qN2EacGn0Pi3m5LPcMrNsTZAGFGjlUjTIlpGsEgU9FZWhZoNTI4HGhDLRy\nnjCz2jfvhEt3OYK/XNfWzbbavTiuIrBHII3JU4kxwjlmiDL2mWg3CiPKntWh4tvV\nsvmpsGQ93+vN5iUVNN+MNOT3GS3wZm7vGGjGI6UmmwHH+aK2zrCYdzu9VubHpElp\nmhPsNnP+fhV7uN91hBdiiZJGSN4x8tW/vKb5lmjaubaYRWghTI1nSOg9VrGaw0SE\nLBus6srqjZhs6KiwCcIHf2CmFm6dnWJuKcWg6tJi2uHCipXcqZjDc7Sw8N94NjkW\nJqFYA5qpUjr1p160vJf2+Sqd+Z539kiOgUPRGlxlIbAPaoYsSelXmyxtD2yTzleZ\nacP49/yW7k3/cC6VVvC2n293S1fXO8+JQ26CwV6XGawlK7HlMSURFXb1nyjeNM4r\nH9yeeePOUxjHvQUgfOU8ZhdoFJNhS1GEFcFYyo75PG1liCRKIFyIQlziSIKOI0h9\nnC+CyNffZWlqOx0422XW\n=KMtF\n-----END PGP SIGNATURE-----\n",
+ "payload": "tree d7275113a66c2e2eb810830ec0dd21fd62446e95\nparent aabaf42225cb4d18e338bc5c8c934f25be814704\nauthor azonenfeld <117573492+aaron1535@users.noreply.github.com> 1715105271 +0300\ncommitter GitHub 1715105271 +0300\n\nCreate iocs-test.txt"
+ }
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/9a611449423b9992c126c20e47c5de4f58fc1c0e",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/9a611449423b9992c126c20e47c5de4f58fc1c0e",
+ "comments_url": "https://api.github.com/repos/aaron1535/space_invaders/commits/9a611449423b9992c126c20e47c5de4f58fc1c0e/comments",
+ "author": {
+ "login": "aaron1535",
+ "id": 117573492,
+ "node_id": "U_kgDOBwIHdA",
+ "avatar_url": "https://avatars.githubusercontent.com/u/117573492?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/aaron1535",
+ "html_url": "https://github.com/aaron1535",
+ "followers_url": "https://api.github.com/users/aaron1535/followers",
+ "following_url": "https://api.github.com/users/aaron1535/following{/other_user}",
+ "gists_url": "https://api.github.com/users/aaron1535/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/aaron1535/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/aaron1535/subscriptions",
+ "organizations_url": "https://api.github.com/users/aaron1535/orgs",
+ "repos_url": "https://api.github.com/users/aaron1535/repos",
+ "events_url": "https://api.github.com/users/aaron1535/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/aaron1535/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "committer": {
+ "login": "web-flow",
+ "id": 19864447,
+ "node_id": "MDQ6VXNlcjE5ODY0NDQ3",
+ "avatar_url": "https://avatars.githubusercontent.com/u/19864447?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-flow",
+ "html_url": "https://github.com/web-flow",
+ "followers_url": "https://api.github.com/users/web-flow/followers",
+ "following_url": "https://api.github.com/users/web-flow/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-flow/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-flow/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-flow/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-flow/orgs",
+ "repos_url": "https://api.github.com/users/web-flow/repos",
+ "events_url": "https://api.github.com/users/web-flow/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-flow/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "parents": [
+ {
+ "sha": "aabaf42225cb4d18e338bc5c8c934f25be814704",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/aabaf42225cb4d18e338bc5c8c934f25be814704",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/aabaf42225cb4d18e338bc5c8c934f25be814704"
+ }
+ ]
+ },
+ {
+ "sha": "aabaf42225cb4d18e338bc5c8c934f25be814704",
+ "node_id": "C_kwDOJjJHSNoAKGFhYmFmNDIyMjVjYjRkMThlMzM4YmM1YzhjOTM0ZjI1YmU4MTQ3MDQ",
+ "commit": {
+ "author": {
+ "name": "azonenfeld",
+ "email": "117573492+aaron1535@users.noreply.github.com",
+ "date": "2024-05-06T11:23:55Z"
+ },
+ "committer": {
+ "name": "GitHub",
+ "email": "noreply@github.com",
+ "date": "2024-05-06T11:23:55Z"
+ },
+ "message": "Create aaa.json",
+ "tree": {
+ "sha": "44396a02a17dac11c6cf6bc4ecd7962230460786",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/trees/44396a02a17dac11c6cf6bc4ecd7962230460786"
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/commits/aabaf42225cb4d18e338bc5c8c934f25be814704",
+ "comment_count": 0,
+ "verification": {
+ "verified": true,
+ "reason": "valid",
+ "signature": "-----BEGIN PGP SIGNATURE-----\n\nwsFcBAABCAAQBQJmOL3LCRC1aQ7uu5UhlAAA/ygQAKQhiDcxzbqFK8yeVv9Zywth\nSZRArGlhkxEEd+XMj71uGPHGn+WSAw5t8cDw++SqmuWItCq/IU68i9eJRVIzSeWw\nUP2uLkyA3W/jhk77wGAd5ID4j10rLzhXNrjp6iiQi5cQoVxkt6DIr+knwqIgcbgk\nnBLqCZ8YWAN/roFYkGg0j30SBZgD+59Hb7Lji7OR6S7lvc6oaIZPxGxmYWFvJTJP\noGr5Xgr262sREKXWoEjG1U3EX0falJ9H1rxKkuZSy/32BmK668zUDmdm9euGBsCj\nMkeMLE6XVAlneju/3JONaBa8tAFxm8FzGf+5ndeUC/7qkP/3L0GMQ/mkiFe8ZVRj\nybxYAOM920f3xzPUxLh4rC7N7rsxPfj9RiM6QDbCU0Jpp6IA0cEAUgeWSs4sCykr\n/1fEp0fOldIZLHbqdSKfFyomV3i5hBo4X9fwj2XLc1zrOA8Gpe4SDIVE1E35eGCS\nHV6njUUlbpJJpwNn1CbuEqDzBJSe5lBSysWtIhjEO8iT+KCSgJ3yQPN/6gQD79ab\n0ghGBxWkVnbsJYTPPFccM6OkZ5ZZwRPnWL7mrU+hDpzW9bfEwM1yq67ENjCPJjYt\n9kdrzFgZKRjdJutv/S5EYluqBjI8KkvTXtsRcNj+VVn58o7pVv8YsPoyqYudLdG9\nt35UsBKW3LmYY68RFNg6\n=kVSt\n-----END PGP SIGNATURE-----\n",
+ "payload": "tree 44396a02a17dac11c6cf6bc4ecd7962230460786\nparent 046a799ebe004e1bff686d6b774387b3bdb3d1ce\nauthor azonenfeld <117573492+aaron1535@users.noreply.github.com> 1714994635 +0300\ncommitter GitHub 1714994635 +0300\n\nCreate aaa.json"
+ }
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/aabaf42225cb4d18e338bc5c8c934f25be814704",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/aabaf42225cb4d18e338bc5c8c934f25be814704",
+ "comments_url": "https://api.github.com/repos/aaron1535/space_invaders/commits/aabaf42225cb4d18e338bc5c8c934f25be814704/comments",
+ "author": {
+ "login": "aaron1535",
+ "id": 117573492,
+ "node_id": "U_kgDOBwIHdA",
+ "avatar_url": "https://avatars.githubusercontent.com/u/117573492?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/aaron1535",
+ "html_url": "https://github.com/aaron1535",
+ "followers_url": "https://api.github.com/users/aaron1535/followers",
+ "following_url": "https://api.github.com/users/aaron1535/following{/other_user}",
+ "gists_url": "https://api.github.com/users/aaron1535/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/aaron1535/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/aaron1535/subscriptions",
+ "organizations_url": "https://api.github.com/users/aaron1535/orgs",
+ "repos_url": "https://api.github.com/users/aaron1535/repos",
+ "events_url": "https://api.github.com/users/aaron1535/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/aaron1535/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "committer": {
+ "login": "web-flow",
+ "id": 19864447,
+ "node_id": "MDQ6VXNlcjE5ODY0NDQ3",
+ "avatar_url": "https://avatars.githubusercontent.com/u/19864447?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-flow",
+ "html_url": "https://github.com/web-flow",
+ "followers_url": "https://api.github.com/users/web-flow/followers",
+ "following_url": "https://api.github.com/users/web-flow/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-flow/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-flow/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-flow/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-flow/orgs",
+ "repos_url": "https://api.github.com/users/web-flow/repos",
+ "events_url": "https://api.github.com/users/web-flow/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-flow/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "parents": [
+ {
+ "sha": "046a799ebe004e1bff686d6b774387b3bdb3d1ce",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/046a799ebe004e1bff686d6b774387b3bdb3d1ce",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/046a799ebe004e1bff686d6b774387b3bdb3d1ce"
+ }
+ ]
+ },
+ {
+ "sha": "046a799ebe004e1bff686d6b774387b3bdb3d1ce",
+ "node_id": "C_kwDOJjJHSNoAKDA0NmE3OTllYmUwMDRlMWJmZjY4NmQ2Yjc3NDM4N2IzYmRiM2QxY2U",
+ "commit": {
+ "author": {
+ "name": "azonenfeld",
+ "email": "117573492+aaron1535@users.noreply.github.com",
+ "date": "2024-05-06T08:26:55Z"
+ },
+ "committer": {
+ "name": "GitHub",
+ "email": "noreply@github.com",
+ "date": "2024-05-06T08:26:55Z"
+ },
+ "message": "Update stix-test.json",
+ "tree": {
+ "sha": "8312efb609dfdf26f3c9a49b457e0d9265ca978f",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/trees/8312efb609dfdf26f3c9a49b457e0d9265ca978f"
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/commits/046a799ebe004e1bff686d6b774387b3bdb3d1ce",
+ "comment_count": 0,
+ "verification": {
+ "verified": true,
+ "reason": "valid",
+ "signature": "-----BEGIN PGP SIGNATURE-----\n\nwsFcBAABCAAQBQJmOJRPCRC1aQ7uu5UhlAAAS1YQACr6dbvEENs/wIFp4uZGKN+k\nHjM3H0fie3YbakgXLwFgtLOC5TrtK/Z2CoHodiATmPIbe82H3bJCXbuFng0EW4+F\n2YGcKCfdfVYyfXVGMR9uSenMKvF33zZPyMa+M4wKWNwhvyLpMpHU3U//9xdc77cs\n+LVv+HneF+v3cqr2GgxonKm3dxbKv4Fnq3KED0Ir+I7nnCJinjkSRc1llbU/ZDV3\nw+ewGeo4OX8zeAi24orH0q4I1oqvyWZH8nIEMbkD39VDdLnmIdXPKQWS+bFdNZxp\nslnsDh3pjxARNHnZJKdsHLdkf7ve9FX04up/11NGeKcmFb9Ui1upngbbJNCBMZd8\nlFrn9S+lbWRPqIZYYcjPlR4jOPUaUgxaCBOCkekl4KZEwRyFa2rJ5WI04OdJeRgD\njLCi+gqWpJaON+SuHaV7UVpgXnnupmLes+kIqwPNH8EIKzwT+WyQCBrn74w3YOh5\nRVAwr0f5VbkDVH9Jq9AqLgFyJnfrv3efJ/Ffzyh+j32txMNTUON9S4OXD+dqBy0w\n5MHoerKav5XX81cAjifm06LWtjYSF2n+1I/fAVDZpJ/9hOuXAZnQt4JpF56oOSm7\nc6GaJJK8h7nBTOd0/U2JBGtTIudUQOTTMZxdPGhzATVVrxD34HANnis4f6aaZS3y\nFA2e52a2cKfzQ5hx99Hk\n=dTlt\n-----END PGP SIGNATURE-----\n",
+ "payload": "tree 8312efb609dfdf26f3c9a49b457e0d9265ca978f\nparent c4c5683dcea3b039b1eaf71c21b8bd6178ca03d3\nauthor azonenfeld <117573492+aaron1535@users.noreply.github.com> 1714984015 +0300\ncommitter GitHub 1714984015 +0300\n\nUpdate stix-test.json"
+ }
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/046a799ebe004e1bff686d6b774387b3bdb3d1ce",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/046a799ebe004e1bff686d6b774387b3bdb3d1ce",
+ "comments_url": "https://api.github.com/repos/aaron1535/space_invaders/commits/046a799ebe004e1bff686d6b774387b3bdb3d1ce/comments",
+ "author": {
+ "login": "aaron1535",
+ "id": 117573492,
+ "node_id": "U_kgDOBwIHdA",
+ "avatar_url": "https://avatars.githubusercontent.com/u/117573492?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/aaron1535",
+ "html_url": "https://github.com/aaron1535",
+ "followers_url": "https://api.github.com/users/aaron1535/followers",
+ "following_url": "https://api.github.com/users/aaron1535/following{/other_user}",
+ "gists_url": "https://api.github.com/users/aaron1535/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/aaron1535/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/aaron1535/subscriptions",
+ "organizations_url": "https://api.github.com/users/aaron1535/orgs",
+ "repos_url": "https://api.github.com/users/aaron1535/repos",
+ "events_url": "https://api.github.com/users/aaron1535/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/aaron1535/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "committer": {
+ "login": "web-flow",
+ "id": 19864447,
+ "node_id": "MDQ6VXNlcjE5ODY0NDQ3",
+ "avatar_url": "https://avatars.githubusercontent.com/u/19864447?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-flow",
+ "html_url": "https://github.com/web-flow",
+ "followers_url": "https://api.github.com/users/web-flow/followers",
+ "following_url": "https://api.github.com/users/web-flow/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-flow/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-flow/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-flow/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-flow/orgs",
+ "repos_url": "https://api.github.com/users/web-flow/repos",
+ "events_url": "https://api.github.com/users/web-flow/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-flow/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "parents": [
+ {
+ "sha": "c4c5683dcea3b039b1eaf71c21b8bd6178ca03d3",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/c4c5683dcea3b039b1eaf71c21b8bd6178ca03d3",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/c4c5683dcea3b039b1eaf71c21b8bd6178ca03d3"
+ }
+ ]
+ },
+ {
+ "sha": "c4c5683dcea3b039b1eaf71c21b8bd6178ca03d3",
+ "node_id": "C_kwDOJjJHSNoAKGM0YzU2ODNkY2VhM2IwMzliMWVhZjcxYzIxYjhiZDYxNzhjYTAzZDM",
+ "commit": {
+ "author": {
+ "name": "azonenfeld",
+ "email": "117573492+aaron1535@users.noreply.github.com",
+ "date": "2024-05-02T09:40:18Z"
+ },
+ "committer": {
+ "name": "GitHub",
+ "email": "noreply@github.com",
+ "date": "2024-05-02T09:40:18Z"
+ },
+ "message": "Update stix-test.json",
+ "tree": {
+ "sha": "8312efb609dfdf26f3c9a49b457e0d9265ca978f",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/trees/8312efb609dfdf26f3c9a49b457e0d9265ca978f"
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/commits/c4c5683dcea3b039b1eaf71c21b8bd6178ca03d3",
+ "comment_count": 0,
+ "verification": {
+ "verified": true,
+ "reason": "valid",
+ "signature": "-----BEGIN PGP SIGNATURE-----\n\nwsFcBAABCAAQBQJmM1+CCRC1aQ7uu5UhlAAAp2sQAJDVejgD2pgOEb4QF/eJIl6O\np52kqsrP/DExB7ljhrwSx0vpO8Mm2kJ87cRa7PZ5wjg9ityCgqo86ZpgA4TACTwD\ne22OYW7a9nDzP6ZOtEROCSVe7BgXkIODxEhwxVQrVaMVWZyazHY6VR7EUI/T3pMY\n9wigiI4xuoM6qvKftrZMWW96l5bUEjbb7ojqqnaygYQNImDLIQzF2+/sXTYWOtBD\nQC/tTAXySF6WC5Hv5PJLk4ZCw8gNBNKgSqhW1FeU9Hz2tE22RXeUUYt4UhU5L2gX\nkJVeBFaEtSgwtecu/H6gQKg3cjKIpsFlWs6e+gXL4A2vDCpd23Ni9+jIyRP8O4lF\nOAEbwP1m6ctnUH+ZO/+NLaFjftMaiel6P6gF+Hd0OwP3J/RFlfDO28prSQHzxxGh\nW30lurMigE5kZ5ntXyZip8UKPk9aW60xMLtWVG2XPx08hzdslM3+EHv4xX2sD6rp\nBTOuRIZSSfQbxyu0M76OAzBxHoy4E+cOXUecKwHrOGmNWXfI3+IX4dr5FclFgu7+\n9PdZHafcj5VryjPFMdsd8n5WuSW+tSKNbD+faERSRHIfd4DWf7GWBhg3Q9/stJW7\nIb11tUwkGR9/a4pirKcdx2uAZ+GjC8bO/8kPgvqP2L9Q8KJmaBGc68E5OeD9gNph\n9PioW1N7fSonM/ud7wve\n=M9HI\n-----END PGP SIGNATURE-----\n",
+ "payload": "tree 8312efb609dfdf26f3c9a49b457e0d9265ca978f\nparent 5f2697fddfc57edc92d11d12e7225617e7d1553a\nauthor azonenfeld <117573492+aaron1535@users.noreply.github.com> 1714642818 +0300\ncommitter GitHub 1714642818 +0300\n\nUpdate stix-test.json"
+ }
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/c4c5683dcea3b039b1eaf71c21b8bd6178ca03d3",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/c4c5683dcea3b039b1eaf71c21b8bd6178ca03d3",
+ "comments_url": "https://api.github.com/repos/aaron1535/space_invaders/commits/c4c5683dcea3b039b1eaf71c21b8bd6178ca03d3/comments",
+ "author": {
+ "login": "aaron1535",
+ "id": 117573492,
+ "node_id": "U_kgDOBwIHdA",
+ "avatar_url": "https://avatars.githubusercontent.com/u/117573492?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/aaron1535",
+ "html_url": "https://github.com/aaron1535",
+ "followers_url": "https://api.github.com/users/aaron1535/followers",
+ "following_url": "https://api.github.com/users/aaron1535/following{/other_user}",
+ "gists_url": "https://api.github.com/users/aaron1535/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/aaron1535/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/aaron1535/subscriptions",
+ "organizations_url": "https://api.github.com/users/aaron1535/orgs",
+ "repos_url": "https://api.github.com/users/aaron1535/repos",
+ "events_url": "https://api.github.com/users/aaron1535/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/aaron1535/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "committer": {
+ "login": "web-flow",
+ "id": 19864447,
+ "node_id": "MDQ6VXNlcjE5ODY0NDQ3",
+ "avatar_url": "https://avatars.githubusercontent.com/u/19864447?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-flow",
+ "html_url": "https://github.com/web-flow",
+ "followers_url": "https://api.github.com/users/web-flow/followers",
+ "following_url": "https://api.github.com/users/web-flow/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-flow/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-flow/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-flow/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-flow/orgs",
+ "repos_url": "https://api.github.com/users/web-flow/repos",
+ "events_url": "https://api.github.com/users/web-flow/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-flow/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "parents": [
+ {
+ "sha": "5f2697fddfc57edc92d11d12e7225617e7d1553a",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/5f2697fddfc57edc92d11d12e7225617e7d1553a",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/5f2697fddfc57edc92d11d12e7225617e7d1553a"
+ }
+ ]
+ },
+ {
+ "sha": "5f2697fddfc57edc92d11d12e7225617e7d1553a",
+ "node_id": "C_kwDOJjJHSNoAKDVmMjY5N2ZkZGZjNTdlZGM5MmQxMWQxMmU3MjI1NjE3ZTdkMTU1M2E",
+ "commit": {
+ "author": {
+ "name": "azonenfeld",
+ "email": "117573492+aaron1535@users.noreply.github.com",
+ "date": "2024-05-02T09:38:22Z"
+ },
+ "committer": {
+ "name": "GitHub",
+ "email": "noreply@github.com",
+ "date": "2024-05-02T09:38:22Z"
+ },
+ "message": "Update stix-test.json",
+ "tree": {
+ "sha": "eeecadae0c1c4cb8072e4528e8745767b438558f",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/trees/eeecadae0c1c4cb8072e4528e8745767b438558f"
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/commits/5f2697fddfc57edc92d11d12e7225617e7d1553a",
+ "comment_count": 0,
+ "verification": {
+ "verified": true,
+ "reason": "valid",
+ "signature": "-----BEGIN PGP SIGNATURE-----\n\nwsFcBAABCAAQBQJmM18OCRC1aQ7uu5UhlAAA3rAQAKxd0gf2lazAZNgCeafJ/EDb\nTkCy5Z/soBAIM9o0eTXkZaQLo7O9irUOWhICZ3ZbkGQfuaFMklzApWb7v55dbOk5\nAyakI51b482iN1RFsZxAJDF4CeerbP8S+JsIgL8Q29QvlkTFGEyTqwlS2bGuBdCl\njTjsx3hq1vqOezFf3KRvqkq/UWSqpkJMP6YLMGhEB6TsjhPnk6/ADBH4a6jok2WO\nGGT9XsQvt/Hjd0hiRSQHVTARpNYG5NkxMVcF6/4EB1IjpAG0GwYFfBzd2PSxFbRA\nDGJrp6D/vua3rCLtuYFdDQP68YBTHuLSgWVHSUr59ZnxY+YN6nrvDKGaPuL8oEHS\nyU3nJW8J3cvXODlXppjqsmWSU7OwM8DRQfK/VJq7oWj0jfMq2gQ+ObGXNjyBbiVx\n3oYLKdjpdHotvxkxXqV97ZOmT5Is1mQCM5iTZOaj0l9htFr7652A5QS7wsEUNGBc\n4vikDHryFr0msHGCkCLPgGXxWyq8Q3vq+I1iqSIBGlv2cmsxIeM+jpdJkrqumMfN\npMgNsxskgHER+PTTPqiFIQ9jjbNVSiFsyHwOCvKlG3mUEla6dqR4KGBjMdJCB3MM\nleHaR0VZ+iiZybquZB9dkAeC56mI34Yehr1QTcak+ifSlFkR73HIgkqiKSuAHZwS\n7Sc4FchymsOQvG2yCQJ5\n=qMUj\n-----END PGP SIGNATURE-----\n",
+ "payload": "tree eeecadae0c1c4cb8072e4528e8745767b438558f\nparent ad3e0503765479e9ee09bac5dee726eb918b9ebd\nauthor azonenfeld <117573492+aaron1535@users.noreply.github.com> 1714642702 +0300\ncommitter GitHub 1714642702 +0300\n\nUpdate stix-test.json"
+ }
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/5f2697fddfc57edc92d11d12e7225617e7d1553a",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/5f2697fddfc57edc92d11d12e7225617e7d1553a",
+ "comments_url": "https://api.github.com/repos/aaron1535/space_invaders/commits/5f2697fddfc57edc92d11d12e7225617e7d1553a/comments",
+ "author": {
+ "login": "aaron1535",
+ "id": 117573492,
+ "node_id": "U_kgDOBwIHdA",
+ "avatar_url": "https://avatars.githubusercontent.com/u/117573492?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/aaron1535",
+ "html_url": "https://github.com/aaron1535",
+ "followers_url": "https://api.github.com/users/aaron1535/followers",
+ "following_url": "https://api.github.com/users/aaron1535/following{/other_user}",
+ "gists_url": "https://api.github.com/users/aaron1535/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/aaron1535/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/aaron1535/subscriptions",
+ "organizations_url": "https://api.github.com/users/aaron1535/orgs",
+ "repos_url": "https://api.github.com/users/aaron1535/repos",
+ "events_url": "https://api.github.com/users/aaron1535/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/aaron1535/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "committer": {
+ "login": "web-flow",
+ "id": 19864447,
+ "node_id": "MDQ6VXNlcjE5ODY0NDQ3",
+ "avatar_url": "https://avatars.githubusercontent.com/u/19864447?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-flow",
+ "html_url": "https://github.com/web-flow",
+ "followers_url": "https://api.github.com/users/web-flow/followers",
+ "following_url": "https://api.github.com/users/web-flow/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-flow/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-flow/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-flow/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-flow/orgs",
+ "repos_url": "https://api.github.com/users/web-flow/repos",
+ "events_url": "https://api.github.com/users/web-flow/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-flow/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "parents": [
+ {
+ "sha": "ad3e0503765479e9ee09bac5dee726eb918b9ebd",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/ad3e0503765479e9ee09bac5dee726eb918b9ebd",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/ad3e0503765479e9ee09bac5dee726eb918b9ebd"
+ }
+ ]
+ },
+ {
+ "sha": "ad3e0503765479e9ee09bac5dee726eb918b9ebd",
+ "node_id": "C_kwDOJjJHSNoAKGFkM2UwNTAzNzY1NDc5ZTllZTA5YmFjNWRlZTcyNmViOTE4YjllYmQ",
+ "commit": {
+ "author": {
+ "name": "azonenfeld",
+ "email": "117573492+aaron1535@users.noreply.github.com",
+ "date": "2024-05-02T07:10:31Z"
+ },
+ "committer": {
+ "name": "GitHub",
+ "email": "noreply@github.com",
+ "date": "2024-05-02T07:10:31Z"
+ },
+ "message": "Create stix-test.json",
+ "tree": {
+ "sha": "ada47cb8ecc792c33fcb993333fe0f79b11783c1",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/trees/ada47cb8ecc792c33fcb993333fe0f79b11783c1"
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/commits/ad3e0503765479e9ee09bac5dee726eb918b9ebd",
+ "comment_count": 0,
+ "verification": {
+ "verified": true,
+ "reason": "valid",
+ "signature": "-----BEGIN PGP SIGNATURE-----\n\nwsFcBAABCAAQBQJmMzxnCRC1aQ7uu5UhlAAAmyMQAAsfAiZJKKAEBSs6ThB5ogt1\nSwPr3YGokln/sp92E3Fczhk6YojdUH1N5WJbGxz2ZmFLfoIlrR3Fgx7xShCbfe4K\nz2pivj5vbWBKPrjJpK1d3X82LEedei28eYwUwc5wA0gLiqKo3obdDS+yJ1Ykbu7K\ntBDaxC6xbs5iE37Jg4ga7LwHjY2PQvnh+917MgcbdmirWYOS8Ty8VF439QaxJtO0\naBrO9A0ZqA5soZBj0p3wdKoy7OhAnb4HudAF3NIbq2SSJnJmKSjxoXdr8wO2jrNg\nxq9Sy8LNr6WE3i+/mMsARLWGFUIhL31MB6cjddYKtgvid8HXG5Weuw19PRFPUsr6\ng2eHdR4IKkODZIxoWgLWMpVOG3Zax3CcudjXCtZtVvY39/HXVUIYRC/BRWYwZru3\n/YKyC6EpF2CzcpBp5Bm67Bdk2qiXSI2NaVJALRN8fgscudPqeFLay3KFX4ATwuu6\nNpWQDnPPlrxA/PweTs7Rd56ocXU3JY49fVf79T1Xds83THdVUbd4ONPLey6w2EYm\nKAs9OP2mnFudhvGIx7ZVBI55SjjhlwS3oVoXJcdfN0RyUbmse4xQB+06rnr61erm\nZ9k4gc6Huitl9lGcPn19V0iJvxt5McApLpVzC+dGAWzltnTcl1WvI4pEB/453mE1\nv3ayosvfoJQLEuG75Y9m\n=7lsj\n-----END PGP SIGNATURE-----\n",
+ "payload": "tree ada47cb8ecc792c33fcb993333fe0f79b11783c1\nparent ccc015419dc660cd161714127b16de536ea3f476\nauthor azonenfeld <117573492+aaron1535@users.noreply.github.com> 1714633831 +0300\ncommitter GitHub 1714633831 +0300\n\nCreate stix-test.json"
+ }
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/ad3e0503765479e9ee09bac5dee726eb918b9ebd",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/ad3e0503765479e9ee09bac5dee726eb918b9ebd",
+ "comments_url": "https://api.github.com/repos/aaron1535/space_invaders/commits/ad3e0503765479e9ee09bac5dee726eb918b9ebd/comments",
+ "author": {
+ "login": "aaron1535",
+ "id": 117573492,
+ "node_id": "U_kgDOBwIHdA",
+ "avatar_url": "https://avatars.githubusercontent.com/u/117573492?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/aaron1535",
+ "html_url": "https://github.com/aaron1535",
+ "followers_url": "https://api.github.com/users/aaron1535/followers",
+ "following_url": "https://api.github.com/users/aaron1535/following{/other_user}",
+ "gists_url": "https://api.github.com/users/aaron1535/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/aaron1535/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/aaron1535/subscriptions",
+ "organizations_url": "https://api.github.com/users/aaron1535/orgs",
+ "repos_url": "https://api.github.com/users/aaron1535/repos",
+ "events_url": "https://api.github.com/users/aaron1535/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/aaron1535/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "committer": {
+ "login": "web-flow",
+ "id": 19864447,
+ "node_id": "MDQ6VXNlcjE5ODY0NDQ3",
+ "avatar_url": "https://avatars.githubusercontent.com/u/19864447?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-flow",
+ "html_url": "https://github.com/web-flow",
+ "followers_url": "https://api.github.com/users/web-flow/followers",
+ "following_url": "https://api.github.com/users/web-flow/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-flow/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-flow/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-flow/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-flow/orgs",
+ "repos_url": "https://api.github.com/users/web-flow/repos",
+ "events_url": "https://api.github.com/users/web-flow/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-flow/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "parents": [
+ {
+ "sha": "ccc015419dc660cd161714127b16de536ea3f476",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/ccc015419dc660cd161714127b16de536ea3f476",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/ccc015419dc660cd161714127b16de536ea3f476"
+ }
+ ]
+ },
+ {
+ "sha": "ccc015419dc660cd161714127b16de536ea3f476",
+ "node_id": "C_kwDOJjJHSNoAKGNjYzAxNTQxOWRjNjYwY2QxNjE3MTQxMjdiMTZkZTUzNmVhM2Y0NzY",
+ "commit": {
+ "author": {
+ "name": "aaron1535",
+ "email": "117573492+aaron1535@users.noreply.github.com",
+ "date": "2024-04-03T07:57:37Z"
+ },
+ "committer": {
+ "name": "GitHub",
+ "email": "noreply@github.com",
+ "date": "2024-04-03T07:57:37Z"
+ },
+ "message": "Create indicators.txt",
+ "tree": {
+ "sha": "049e837038bc91bc27bcec3a1db96369d7eea38b",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/trees/049e837038bc91bc27bcec3a1db96369d7eea38b"
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/commits/ccc015419dc660cd161714127b16de536ea3f476",
+ "comment_count": 0,
+ "verification": {
+ "verified": true,
+ "reason": "valid",
+ "signature": "-----BEGIN PGP SIGNATURE-----\n\nwsFcBAABCAAQBQJmDQvxCRC1aQ7uu5UhlAAA55cQAKdf1ViAeyr8xewAIupIolF1\n5LwMYryArC9D6OBl+due6MV1aNdDafFNGfdikyxYw/hR5dmpy9+LWeOTKeHCQJcb\nm4v32EiQwnzZW/vJfQUqiRAufese4otGkkQF7FfwLyevF4PBKQgr0gnb2O8vzZuv\nI1l6VAFOvckJ3AxP+G/jLBHcb2M5x7uqV7L6c1fPsZYmNkPi60KLpp7VF+Jq1NkQ\npoV6KmTa7QQBp0TSf16+9kfkPkTSdcXnqw/dwsjuX7I6i1T6W5vNguMPFdEt9St9\nHGfJGxP99JWg8QiyNbygNw2jXqHL2q+mmImVVx+zKZX30jM7gPtfoSSnNpTgVZjw\niRhI2mv9ND3tss7AvI2H5pAZt+32x/vTjmNqVmQ9YoVfjvhWQ/2R4JZqGCYKppRm\nU40ZZbVmu8LZ2YT76Tti7n6WZMadS851KtH/I3HefWHQ26t0nlUJrsDyy/5gVQhr\nqFxpnzheB/ae/ZPea3SY34n69cWSc5tYgQlyrqTjrgXeFx1iv5fu5UcNVRLVc0X5\ns/T2qPCk6awAdhEMOlIm9ib23g3bqteADgW8cavcddSwIzx3JSZguZc43K1rY5pk\n1f+TDVHXulnalklLz2Gr+qp8Ay1Awr6YxrgEbI5pZfw5xYc3LmWXtKRS20pt7BmI\nYOKkqZS9BrDLKU7YEpep\n=M+6O\n-----END PGP SIGNATURE-----\n",
+ "payload": "tree 049e837038bc91bc27bcec3a1db96369d7eea38b\nparent 570b35bf066d6ee6a89a35f573664ff3877e2151\nauthor aaron1535 <117573492+aaron1535@users.noreply.github.com> 1712131057 +0300\ncommitter GitHub 1712131057 +0300\n\nCreate indicators.txt"
+ }
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/ccc015419dc660cd161714127b16de536ea3f476",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/ccc015419dc660cd161714127b16de536ea3f476",
+ "comments_url": "https://api.github.com/repos/aaron1535/space_invaders/commits/ccc015419dc660cd161714127b16de536ea3f476/comments",
+ "author": {
+ "login": "aaron1535",
+ "id": 117573492,
+ "node_id": "U_kgDOBwIHdA",
+ "avatar_url": "https://avatars.githubusercontent.com/u/117573492?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/aaron1535",
+ "html_url": "https://github.com/aaron1535",
+ "followers_url": "https://api.github.com/users/aaron1535/followers",
+ "following_url": "https://api.github.com/users/aaron1535/following{/other_user}",
+ "gists_url": "https://api.github.com/users/aaron1535/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/aaron1535/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/aaron1535/subscriptions",
+ "organizations_url": "https://api.github.com/users/aaron1535/orgs",
+ "repos_url": "https://api.github.com/users/aaron1535/repos",
+ "events_url": "https://api.github.com/users/aaron1535/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/aaron1535/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "committer": {
+ "login": "web-flow",
+ "id": 19864447,
+ "node_id": "MDQ6VXNlcjE5ODY0NDQ3",
+ "avatar_url": "https://avatars.githubusercontent.com/u/19864447?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-flow",
+ "html_url": "https://github.com/web-flow",
+ "followers_url": "https://api.github.com/users/web-flow/followers",
+ "following_url": "https://api.github.com/users/web-flow/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-flow/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-flow/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-flow/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-flow/orgs",
+ "repos_url": "https://api.github.com/users/web-flow/repos",
+ "events_url": "https://api.github.com/users/web-flow/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-flow/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "parents": [
+ {
+ "sha": "570b35bf066d6ee6a89a35f573664ff3877e2151",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/570b35bf066d6ee6a89a35f573664ff3877e2151",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/570b35bf066d6ee6a89a35f573664ff3877e2151"
+ }
+ ]
+ },
+ {
+ "sha": "570b35bf066d6ee6a89a35f573664ff3877e2151",
+ "node_id": "C_kwDOJjJHSNoAKDU3MGIzNWJmMDY2ZDZlZTZhODlhMzVmNTczNjY0ZmYzODc3ZTIxNTE",
+ "commit": {
+ "author": {
+ "name": "aaron1535",
+ "email": "117573492+aaron1535@users.noreply.github.com",
+ "date": "2024-04-03T07:37:09Z"
+ },
+ "committer": {
+ "name": "GitHub",
+ "email": "noreply@github.com",
+ "date": "2024-04-03T07:37:09Z"
+ },
+ "message": "Create iocsTest.txt",
+ "tree": {
+ "sha": "06ac8e9929e0ad8eafeb4b70d3361dccc3b32f6b",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/trees/06ac8e9929e0ad8eafeb4b70d3361dccc3b32f6b"
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/commits/570b35bf066d6ee6a89a35f573664ff3877e2151",
+ "comment_count": 0,
+ "verification": {
+ "verified": true,
+ "reason": "valid",
+ "signature": "-----BEGIN PGP SIGNATURE-----\n\nwsFcBAABCAAQBQJmDQclCRC1aQ7uu5UhlAAApasQABesgL6kdo2l5K1zJUs7MPiL\nOCDD18UE3M3TYaHnfF3EZZ2nIUZUMKaWQEpq3slqapS1KnHNoJ7qs4QjZiB2Ytoj\nuA9j+EpVAsBTR0KeobdVY/oRqO9gVKuNfNxmoWptt/s63FE2OyBtkQsZ6EYHk1iR\n6J/gtGZLnkPjncFWe0Yy+wi+CyJym+C3cJJOT8LVy6IMxCXTRa0zQUwRDBNLlMGx\nm4LFiIHJ3tfdEkbbm5lgpodGa21loxLvFf037caPGg/M3Udxs5olFr6TJESxrE9q\ndis8ONY8QcNvpnOTddn1ahCUYy78fTnrKdGpR2T1j1w9cgb6ZYZ2egICLxPRKJh7\n9OCQXTxx3ahqJhRLnQIfE0dujmH3QSbo6UhYzhwPivWi7qWXLwGAP5XV0i6TF8e9\n0HmBqXLLBVEehe2QKx84dasxnQYZEp1ZIJIywBtcGepU2SWSfYQU7TVUq55Gf4i2\neZ54eycH2puK+10yGZDKpoZhbn/QpAfZPMNNO7mYy5AbwaoD/Mcy1XB0rIUvlUCA\nVK+vwFhDwCGxpQHD2z+FPfoHg0xll14V/rOs4XFdkTdJjzSYTqXGn7Xvo01UwlRX\nQFNmbJtxlR1wh0tTxBCkvJeSK9d+adnw42zYUvV0gafO3i+z8Amv4xmNB7QLK7bV\npIHzCZzgmkbxQcDmgf0i\n=nNZu\n-----END PGP SIGNATURE-----\n",
+ "payload": "tree 06ac8e9929e0ad8eafeb4b70d3361dccc3b32f6b\nparent 7efd7bb581b62ca5c25e391100fd2bef1bd53502\nauthor aaron1535 <117573492+aaron1535@users.noreply.github.com> 1712129829 +0300\ncommitter GitHub 1712129829 +0300\n\nCreate iocsTest.txt"
+ }
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/570b35bf066d6ee6a89a35f573664ff3877e2151",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/570b35bf066d6ee6a89a35f573664ff3877e2151",
+ "comments_url": "https://api.github.com/repos/aaron1535/space_invaders/commits/570b35bf066d6ee6a89a35f573664ff3877e2151/comments",
+ "author": {
+ "login": "aaron1535",
+ "id": 117573492,
+ "node_id": "U_kgDOBwIHdA",
+ "avatar_url": "https://avatars.githubusercontent.com/u/117573492?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/aaron1535",
+ "html_url": "https://github.com/aaron1535",
+ "followers_url": "https://api.github.com/users/aaron1535/followers",
+ "following_url": "https://api.github.com/users/aaron1535/following{/other_user}",
+ "gists_url": "https://api.github.com/users/aaron1535/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/aaron1535/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/aaron1535/subscriptions",
+ "organizations_url": "https://api.github.com/users/aaron1535/orgs",
+ "repos_url": "https://api.github.com/users/aaron1535/repos",
+ "events_url": "https://api.github.com/users/aaron1535/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/aaron1535/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "committer": {
+ "login": "web-flow",
+ "id": 19864447,
+ "node_id": "MDQ6VXNlcjE5ODY0NDQ3",
+ "avatar_url": "https://avatars.githubusercontent.com/u/19864447?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-flow",
+ "html_url": "https://github.com/web-flow",
+ "followers_url": "https://api.github.com/users/web-flow/followers",
+ "following_url": "https://api.github.com/users/web-flow/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-flow/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-flow/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-flow/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-flow/orgs",
+ "repos_url": "https://api.github.com/users/web-flow/repos",
+ "events_url": "https://api.github.com/users/web-flow/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-flow/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "parents": [
+ {
+ "sha": "7efd7bb581b62ca5c25e391100fd2bef1bd53502",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/7efd7bb581b62ca5c25e391100fd2bef1bd53502",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/7efd7bb581b62ca5c25e391100fd2bef1bd53502"
+ }
+ ]
+ },
+ {
+ "sha": "7efd7bb581b62ca5c25e391100fd2bef1bd53502",
+ "node_id": "C_kwDOJjJHSNoAKDdlZmQ3YmI1ODFiNjJjYTVjMjVlMzkxMTAwZmQyYmVmMWJkNTM1MDI",
+ "commit": {
+ "author": {
+ "name": "Aaron",
+ "email": "4101535@gmail.com",
+ "date": "2024-04-01T14:20:46Z"
+ },
+ "committer": {
+ "name": "Aaron",
+ "email": "4101535@gmail.com",
+ "date": "2024-04-01T14:20:46Z"
+ },
+ "message": "delete",
+ "tree": {
+ "sha": "1ce30d2115b78f2fbeb497834d556520559882ee",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/trees/1ce30d2115b78f2fbeb497834d556520559882ee"
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/commits/7efd7bb581b62ca5c25e391100fd2bef1bd53502",
+ "comment_count": 0,
+ "verification": {
+ "verified": false,
+ "reason": "unsigned",
+ "signature": null,
+ "payload": null
+ }
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/7efd7bb581b62ca5c25e391100fd2bef1bd53502",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/7efd7bb581b62ca5c25e391100fd2bef1bd53502",
+ "comments_url": "https://api.github.com/repos/aaron1535/space_invaders/commits/7efd7bb581b62ca5c25e391100fd2bef1bd53502/comments",
+ "author": {
+ "login": "aaron1535",
+ "id": 117573492,
+ "node_id": "U_kgDOBwIHdA",
+ "avatar_url": "https://avatars.githubusercontent.com/u/117573492?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/aaron1535",
+ "html_url": "https://github.com/aaron1535",
+ "followers_url": "https://api.github.com/users/aaron1535/followers",
+ "following_url": "https://api.github.com/users/aaron1535/following{/other_user}",
+ "gists_url": "https://api.github.com/users/aaron1535/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/aaron1535/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/aaron1535/subscriptions",
+ "organizations_url": "https://api.github.com/users/aaron1535/orgs",
+ "repos_url": "https://api.github.com/users/aaron1535/repos",
+ "events_url": "https://api.github.com/users/aaron1535/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/aaron1535/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "committer": {
+ "login": "aaron1535",
+ "id": 117573492,
+ "node_id": "U_kgDOBwIHdA",
+ "avatar_url": "https://avatars.githubusercontent.com/u/117573492?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/aaron1535",
+ "html_url": "https://github.com/aaron1535",
+ "followers_url": "https://api.github.com/users/aaron1535/followers",
+ "following_url": "https://api.github.com/users/aaron1535/following{/other_user}",
+ "gists_url": "https://api.github.com/users/aaron1535/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/aaron1535/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/aaron1535/subscriptions",
+ "organizations_url": "https://api.github.com/users/aaron1535/orgs",
+ "repos_url": "https://api.github.com/users/aaron1535/repos",
+ "events_url": "https://api.github.com/users/aaron1535/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/aaron1535/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "parents": [
+ {
+ "sha": "858ab325ddbbf2dd83a5d065cf3677d6ca6dbfdc",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/858ab325ddbbf2dd83a5d065cf3677d6ca6dbfdc",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/858ab325ddbbf2dd83a5d065cf3677d6ca6dbfdc"
+ }
+ ]
+ },
+ {
+ "sha": "858ab325ddbbf2dd83a5d065cf3677d6ca6dbfdc",
+ "node_id": "C_kwDOJjJHSNoAKDg1OGFiMzI1ZGRiYmYyZGQ4M2E1ZDA2NWNmMzY3N2Q2Y2E2ZGJmZGM",
+ "commit": {
+ "author": {
+ "name": "aaron1535",
+ "email": "117573492+aaron1535@users.noreply.github.com",
+ "date": "2024-04-01T13:19:14Z"
+ },
+ "committer": {
+ "name": "GitHub",
+ "email": "noreply@github.com",
+ "date": "2024-04-01T13:19:14Z"
+ },
+ "message": "Create aaa.txt",
+ "tree": {
+ "sha": "3c89992513128445d0e9a7713ebb5e376ad1022f",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/trees/3c89992513128445d0e9a7713ebb5e376ad1022f"
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/commits/858ab325ddbbf2dd83a5d065cf3677d6ca6dbfdc",
+ "comment_count": 0,
+ "verification": {
+ "verified": true,
+ "reason": "valid",
+ "signature": "-----BEGIN PGP SIGNATURE-----\n\nwsFcBAABCAAQBQJmCrRSCRC1aQ7uu5UhlAAAAuEQAJDs0FDy4U5y8XpByYb4UCSg\n85TI00Pc/sNcascQDwDAnddRDhs6dqTSRyBaUEsqgj9xgrMuC3lnvCQFRwyhtfmp\nt97QuzWoK28RGRSSZAuxHxD8XbprZLNEUICd846j4Uc9E7ACjlZzvfFZMrlUHKO9\ntH+3o45OZ2Y7/mXRu5w9rBKpQYyu7Baw5d0C4v952FoYJrrLqfS/jBMZ5dHG0jUb\nSQ1Pk2Ms6Gn2GqA5B8w8csRxhCh0Qr1qb6F5ZV6Je5cGW5wRr1VT5odEXtSJlCSx\nSSSobjbf2y3g6EdVccn34EJvkcfaDCYnh25/78rqPm8dKCES/sugBEkGtIFtkpoO\nbKmy9bzNDzZyTPfb0gx1/+If40X9lr+yOkCTVPIVRx8Q+267yB0GdndyajgruRwV\n/Emf185wk0x8NUtTY0gNTq4liOBN7F02gRE/c4oIp6izccG2QnnK8aUS9+1ZhZnf\n5UYnAvcj6ctHWkmD8m63GAMbWMN4G9GxCVZdFG6X/KNVR4ALG0+gfU5rxgiz/7p2\nzw/ijC8mSYhaVvUe2QL15BoBY6t5yBkE6YfQ/+oTb/fh0fJQR7C9YdDbWSn3GRSO\nyVvQReI0keMOQIQZIAfbyDYgLq8zWAT9OpyUdnXfmrtoUuFpOlzZxA7OrTytqrhH\n6ZwpUEYKShBXAG/A1dI6\n=VdzY\n-----END PGP SIGNATURE-----\n",
+ "payload": "tree 3c89992513128445d0e9a7713ebb5e376ad1022f\nparent c9c6d6d2e1e6337cd1c308dede19997868fb43f7\nauthor aaron1535 <117573492+aaron1535@users.noreply.github.com> 1711977554 +0300\ncommitter GitHub 1711977554 +0300\n\nCreate aaa.txt"
+ }
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/858ab325ddbbf2dd83a5d065cf3677d6ca6dbfdc",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/858ab325ddbbf2dd83a5d065cf3677d6ca6dbfdc",
+ "comments_url": "https://api.github.com/repos/aaron1535/space_invaders/commits/858ab325ddbbf2dd83a5d065cf3677d6ca6dbfdc/comments",
+ "author": {
+ "login": "aaron1535",
+ "id": 117573492,
+ "node_id": "U_kgDOBwIHdA",
+ "avatar_url": "https://avatars.githubusercontent.com/u/117573492?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/aaron1535",
+ "html_url": "https://github.com/aaron1535",
+ "followers_url": "https://api.github.com/users/aaron1535/followers",
+ "following_url": "https://api.github.com/users/aaron1535/following{/other_user}",
+ "gists_url": "https://api.github.com/users/aaron1535/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/aaron1535/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/aaron1535/subscriptions",
+ "organizations_url": "https://api.github.com/users/aaron1535/orgs",
+ "repos_url": "https://api.github.com/users/aaron1535/repos",
+ "events_url": "https://api.github.com/users/aaron1535/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/aaron1535/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "committer": {
+ "login": "web-flow",
+ "id": 19864447,
+ "node_id": "MDQ6VXNlcjE5ODY0NDQ3",
+ "avatar_url": "https://avatars.githubusercontent.com/u/19864447?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-flow",
+ "html_url": "https://github.com/web-flow",
+ "followers_url": "https://api.github.com/users/web-flow/followers",
+ "following_url": "https://api.github.com/users/web-flow/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-flow/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-flow/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-flow/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-flow/orgs",
+ "repos_url": "https://api.github.com/users/web-flow/repos",
+ "events_url": "https://api.github.com/users/web-flow/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-flow/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "parents": [
+ {
+ "sha": "c9c6d6d2e1e6337cd1c308dede19997868fb43f7",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/c9c6d6d2e1e6337cd1c308dede19997868fb43f7",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/c9c6d6d2e1e6337cd1c308dede19997868fb43f7"
+ }
+ ]
+ },
+ {
+ "sha": "c9c6d6d2e1e6337cd1c308dede19997868fb43f7",
+ "node_id": "C_kwDOJjJHSNoAKGM5YzZkNmQyZTFlNjMzN2NkMWMzMDhkZWRlMTk5OTc4NjhmYjQzZjc",
+ "commit": {
+ "author": {
+ "name": "aaron1535",
+ "email": "117573492+aaron1535@users.noreply.github.com",
+ "date": "2024-04-01T12:23:35Z"
+ },
+ "committer": {
+ "name": "GitHub",
+ "email": "noreply@github.com",
+ "date": "2024-04-01T12:23:35Z"
+ },
+ "message": "Add files via upload",
+ "tree": {
+ "sha": "06b3a1cd92bb617b8bf4b12a3c4bb530c5deb676",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/trees/06b3a1cd92bb617b8bf4b12a3c4bb530c5deb676"
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/commits/c9c6d6d2e1e6337cd1c308dede19997868fb43f7",
+ "comment_count": 0,
+ "verification": {
+ "verified": true,
+ "reason": "valid",
+ "signature": "-----BEGIN PGP SIGNATURE-----\n\nwsFcBAABCAAQBQJmCqdHCRC1aQ7uu5UhlAAAM5EQACHKpqCWgvvWSTBBD/sIgGFR\nnTIK7MkAOa+hhmOFlMmI6VLhdWCdjRSc67uRPf3ezCSLPW0ilLgy81tlTX/tA1bK\n7SA8weJ6YmDu6AN84Aqm/RIyEKw9TjjjSNDgY9DOpZMa5xJsvYabET2dTYo24Pl9\nTJEbC/5qs1ULu6vBHpe4PbS70Z5CReXav8Hp1lpgKpgO/3Yi457b8D/4QOxuuOsz\nCZwDD2gm5heioEsmAl6O/oq2HEZDY1GV5x4ZtvJyaQ3ykFZhNaTZS/tx6+ejq6oF\nKMZm1JbIXhdjysWXtXhQIHxrZBw/kkw1qQIQHXcK1cdI0stDILg2pyLvUxuxDwyE\nNyqgQPubpKnTQnd22ktwqP9D4/lw7Xc5JMhwT6ugFvq31JaOfdKPxwFZEzzOslp3\njNEfD/O3rJMbSXCz4xR1zu+is2Es20nGS055LKJ+EKPmCdLw+YmB+wkYY0D5UFg5\nvyvvSrMCcFOzAAFHi/XVoG4Lh+bZxqCW4BqnVM044C983Cwk6h3JDBKiJlEmIeOp\nEaGz6T6nRojWaLvQ9RkBCA47gpIGNxMHPEzLufe9yL5N7mVZtl/79rzv6mmKyOEL\nzt5W767yuOM7zUDiK7x36H74bH1YmbFKMe2Yb4jm4GQaSUPOC8pMsGUF0kz3+w63\ni4b1T8qwsdBVbA6PoJUb\n=FhbV\n-----END PGP SIGNATURE-----\n",
+ "payload": "tree 06b3a1cd92bb617b8bf4b12a3c4bb530c5deb676\nparent b6659a7bca36355b41615986629d0ed42ade1d33\nauthor aaron1535 <117573492+aaron1535@users.noreply.github.com> 1711974215 +0300\ncommitter GitHub 1711974215 +0300\n\nAdd files via upload"
+ }
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/c9c6d6d2e1e6337cd1c308dede19997868fb43f7",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/c9c6d6d2e1e6337cd1c308dede19997868fb43f7",
+ "comments_url": "https://api.github.com/repos/aaron1535/space_invaders/commits/c9c6d6d2e1e6337cd1c308dede19997868fb43f7/comments",
+ "author": {
+ "login": "aaron1535",
+ "id": 117573492,
+ "node_id": "U_kgDOBwIHdA",
+ "avatar_url": "https://avatars.githubusercontent.com/u/117573492?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/aaron1535",
+ "html_url": "https://github.com/aaron1535",
+ "followers_url": "https://api.github.com/users/aaron1535/followers",
+ "following_url": "https://api.github.com/users/aaron1535/following{/other_user}",
+ "gists_url": "https://api.github.com/users/aaron1535/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/aaron1535/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/aaron1535/subscriptions",
+ "organizations_url": "https://api.github.com/users/aaron1535/orgs",
+ "repos_url": "https://api.github.com/users/aaron1535/repos",
+ "events_url": "https://api.github.com/users/aaron1535/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/aaron1535/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "committer": {
+ "login": "web-flow",
+ "id": 19864447,
+ "node_id": "MDQ6VXNlcjE5ODY0NDQ3",
+ "avatar_url": "https://avatars.githubusercontent.com/u/19864447?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-flow",
+ "html_url": "https://github.com/web-flow",
+ "followers_url": "https://api.github.com/users/web-flow/followers",
+ "following_url": "https://api.github.com/users/web-flow/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-flow/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-flow/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-flow/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-flow/orgs",
+ "repos_url": "https://api.github.com/users/web-flow/repos",
+ "events_url": "https://api.github.com/users/web-flow/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-flow/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "parents": [
+ {
+ "sha": "b6659a7bca36355b41615986629d0ed42ade1d33",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/b6659a7bca36355b41615986629d0ed42ade1d33",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/b6659a7bca36355b41615986629d0ed42ade1d33"
+ }
+ ]
+ },
+ {
+ "sha": "b6659a7bca36355b41615986629d0ed42ade1d33",
+ "node_id": "C_kwDOJjJHSNoAKGI2NjU5YTdiY2EzNjM1NWI0MTYxNTk4NjYyOWQwZWQ0MmFkZTFkMzM",
+ "commit": {
+ "author": {
+ "name": "aaron1535",
+ "email": "117573492+aaron1535@users.noreply.github.com",
+ "date": "2024-04-01T07:37:44Z"
+ },
+ "committer": {
+ "name": "GitHub",
+ "email": "noreply@github.com",
+ "date": "2024-04-01T07:37:44Z"
+ },
+ "message": "Add files via upload",
+ "tree": {
+ "sha": "54f59cf28a7f237b726c2feed060664b348cfed7",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/trees/54f59cf28a7f237b726c2feed060664b348cfed7"
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/commits/b6659a7bca36355b41615986629d0ed42ade1d33",
+ "comment_count": 0,
+ "verification": {
+ "verified": true,
+ "reason": "valid",
+ "signature": "-----BEGIN PGP SIGNATURE-----\n\nwsFcBAABCAAQBQJmCmRICRC1aQ7uu5UhlAAAcfQQAHls0VvGgi4fboY4QvMDbWGX\nJgN22Ct9brfWrYk8XyqpEYL2E2/FCriGlkJT4xiKjmE8bMUzcg/HaixxpkDylrH0\njt5pgi8pRk6Z0d+wKhxkJ3rzz1gzc7LWhfPhXxb5Ahx2n8vTHJjkHBrdIvfwKZdT\nsetMcccIEik2IDaCSL/x81Y0xMZndHC1oqKVMU4ms5hMjHpL9rhXqHyiF3FbRD5d\nrhu0RH88wqk7StiRcmLqhoAzmD/hOR2uucEu43C6wg85jReN0dxNy4ypjzrQfb9L\n0Yz35A/IfKerk4uIXlVzyN/AERDpbbQfA/X2X364KEfCWAu3JFi9FQdXvSLiiegn\nwx3uB1YXqvRHLCNVG0xnEqMyHpzmJHS/RYSEezjyI8V4p/J1D2pw/YK9xjSS7CVC\n96nnWvf2ICiAAydSWWknFDEtIr8M5OBgTKw14CM6/1F7QuNITtF/VuoCt9+suODj\nvzH4OdlXHo6lq6fHnNyaRcVDL8SDlWFWPnHL5Ig7KoNM0tV9FfRKNYRFJLvJ8FBn\n2ooPawc7LEalp2yXG7oEbZ0WQhg5PZBkJqpYRaAq9IkkzVudoIqtXCL9UZHRHKQM\niLPs/d0mXCD7h+hcwTm0rGvBVzOOwW5VHMn/RXPiNt125Ck2qeBHK1a1I0W0c0b/\ns/8OpiGWMrsOaat9Ucj8\n=nGdw\n-----END PGP SIGNATURE-----\n",
+ "payload": "tree 54f59cf28a7f237b726c2feed060664b348cfed7\nparent 8c2ab6df0ca4ff33236816d19fd41535b8731c66\nauthor aaron1535 <117573492+aaron1535@users.noreply.github.com> 1711957064 +0300\ncommitter GitHub 1711957064 +0300\n\nAdd files via upload"
+ }
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/b6659a7bca36355b41615986629d0ed42ade1d33",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/b6659a7bca36355b41615986629d0ed42ade1d33",
+ "comments_url": "https://api.github.com/repos/aaron1535/space_invaders/commits/b6659a7bca36355b41615986629d0ed42ade1d33/comments",
+ "author": {
+ "login": "aaron1535",
+ "id": 117573492,
+ "node_id": "U_kgDOBwIHdA",
+ "avatar_url": "https://avatars.githubusercontent.com/u/117573492?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/aaron1535",
+ "html_url": "https://github.com/aaron1535",
+ "followers_url": "https://api.github.com/users/aaron1535/followers",
+ "following_url": "https://api.github.com/users/aaron1535/following{/other_user}",
+ "gists_url": "https://api.github.com/users/aaron1535/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/aaron1535/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/aaron1535/subscriptions",
+ "organizations_url": "https://api.github.com/users/aaron1535/orgs",
+ "repos_url": "https://api.github.com/users/aaron1535/repos",
+ "events_url": "https://api.github.com/users/aaron1535/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/aaron1535/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "committer": {
+ "login": "web-flow",
+ "id": 19864447,
+ "node_id": "MDQ6VXNlcjE5ODY0NDQ3",
+ "avatar_url": "https://avatars.githubusercontent.com/u/19864447?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-flow",
+ "html_url": "https://github.com/web-flow",
+ "followers_url": "https://api.github.com/users/web-flow/followers",
+ "following_url": "https://api.github.com/users/web-flow/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-flow/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-flow/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-flow/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-flow/orgs",
+ "repos_url": "https://api.github.com/users/web-flow/repos",
+ "events_url": "https://api.github.com/users/web-flow/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-flow/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "parents": [
+ {
+ "sha": "8c2ab6df0ca4ff33236816d19fd41535b8731c66",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/8c2ab6df0ca4ff33236816d19fd41535b8731c66",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/8c2ab6df0ca4ff33236816d19fd41535b8731c66"
+ }
+ ]
+ },
+ {
+ "sha": "8c2ab6df0ca4ff33236816d19fd41535b8731c66",
+ "node_id": "C_kwDOJjJHSNoAKDhjMmFiNmRmMGNhNGZmMzMyMzY4MTZkMTlmZDQxNTM1Yjg3MzFjNjY",
+ "commit": {
+ "author": {
+ "name": "aaron1535",
+ "email": "117573492+aaron1535@users.noreply.github.com",
+ "date": "2024-03-26T08:32:47Z"
+ },
+ "committer": {
+ "name": "GitHub",
+ "email": "noreply@github.com",
+ "date": "2024-03-26T08:32:47Z"
+ },
+ "message": "Add files via upload",
+ "tree": {
+ "sha": "97a526816f9fa8956cfbd2d004b0c9fe168ea243",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/trees/97a526816f9fa8956cfbd2d004b0c9fe168ea243"
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/commits/8c2ab6df0ca4ff33236816d19fd41535b8731c66",
+ "comment_count": 0,
+ "verification": {
+ "verified": true,
+ "reason": "valid",
+ "signature": "-----BEGIN PGP SIGNATURE-----\n\nwsFcBAABCAAQBQJmAogvCRC1aQ7uu5UhlAAAI88QAA54qcpZtQwuLV6MfK+WwjXy\nD0e4aN/uXo6VeCa1brOpeT1UezNeohCJ5SgnvG3KRttrdwveiRbABMdVyw4z65ap\nF+fluO3H1cB69RSQtWaJEB3Dm/ssCLsSmmFbjR1IRDa38v44WMIgZlElJZC5aXgb\nQK2fhklOT4Dul3OukWSb2gbvpPxW+Li2RwrMBjDMxyqOPz0yetryU81Vh7xU4U17\np2MEUqRub7kC7EDPrQ4LudjaL/GIlkNQb6WI631bD+xUp2+GuFbmXHQqDtikLdAI\nNgX4aza95M4fm/c+r4rFPllGza328lEuzaESgURbtFL1NNAZMBphBQpOkZ+aQnwE\naMPNvfcWvUfh2DVPf9GzeB8iilmxq5YGNn3BwC7lYHzNQ/aa5OauxVLLDwShFCMs\nFFMSzKECd+ZjG9GqRZnRyj3aaPRooMg/K+T7PNf3qqTKg5iyxCrewHZEBbp5toZq\nFXfZY7Jl4nig1TlvBXoayakSyGFe55T+Huij2YN0YuTvXStcE4BEUvhCSyamtQP2\nyO2MWaX+L5vvNHLzL3mpo92mpjSw1a84oGxEPi1HgJRzFtJLTzcK8NgsitSkua+X\nboQ/G5B7PmkwNOizYZFDGwh7mw+dWcf1qXoBT8xWyI8rCOB8X1mbwPLJG9e7g8r8\new9iHls1WlmMQ0fHenJE\n=oDKI\n-----END PGP SIGNATURE-----\n",
+ "payload": "tree 97a526816f9fa8956cfbd2d004b0c9fe168ea243\nparent 9a07b09b8e7a14e583e27e07e4fd6098a03e28b4\nauthor aaron1535 <117573492+aaron1535@users.noreply.github.com> 1711441967 +0200\ncommitter GitHub 1711441967 +0200\n\nAdd files via upload"
+ }
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/8c2ab6df0ca4ff33236816d19fd41535b8731c66",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/8c2ab6df0ca4ff33236816d19fd41535b8731c66",
+ "comments_url": "https://api.github.com/repos/aaron1535/space_invaders/commits/8c2ab6df0ca4ff33236816d19fd41535b8731c66/comments",
+ "author": {
+ "login": "aaron1535",
+ "id": 117573492,
+ "node_id": "U_kgDOBwIHdA",
+ "avatar_url": "https://avatars.githubusercontent.com/u/117573492?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/aaron1535",
+ "html_url": "https://github.com/aaron1535",
+ "followers_url": "https://api.github.com/users/aaron1535/followers",
+ "following_url": "https://api.github.com/users/aaron1535/following{/other_user}",
+ "gists_url": "https://api.github.com/users/aaron1535/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/aaron1535/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/aaron1535/subscriptions",
+ "organizations_url": "https://api.github.com/users/aaron1535/orgs",
+ "repos_url": "https://api.github.com/users/aaron1535/repos",
+ "events_url": "https://api.github.com/users/aaron1535/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/aaron1535/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "committer": {
+ "login": "web-flow",
+ "id": 19864447,
+ "node_id": "MDQ6VXNlcjE5ODY0NDQ3",
+ "avatar_url": "https://avatars.githubusercontent.com/u/19864447?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-flow",
+ "html_url": "https://github.com/web-flow",
+ "followers_url": "https://api.github.com/users/web-flow/followers",
+ "following_url": "https://api.github.com/users/web-flow/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-flow/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-flow/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-flow/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-flow/orgs",
+ "repos_url": "https://api.github.com/users/web-flow/repos",
+ "events_url": "https://api.github.com/users/web-flow/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-flow/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "parents": [
+ {
+ "sha": "9a07b09b8e7a14e583e27e07e4fd6098a03e28b4",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/9a07b09b8e7a14e583e27e07e4fd6098a03e28b4",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/9a07b09b8e7a14e583e27e07e4fd6098a03e28b4"
+ }
+ ]
+ },
+ {
+ "sha": "9a07b09b8e7a14e583e27e07e4fd6098a03e28b4",
+ "node_id": "C_kwDOJjJHSNoAKDlhMDdiMDliOGU3YTE0ZTU4M2UyN2UwN2U0ZmQ2MDk4YTAzZTI4YjQ",
+ "commit": {
+ "author": {
+ "name": "aaron1535",
+ "email": "117573492+aaron1535@users.noreply.github.com",
+ "date": "2024-03-19T11:25:31Z"
+ },
+ "committer": {
+ "name": "GitHub",
+ "email": "noreply@github.com",
+ "date": "2024-03-19T11:25:31Z"
+ },
+ "message": "yara test",
+ "tree": {
+ "sha": "aa38ab5a1490c85cd40b9b6d9ca601efaaac7433",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/trees/aa38ab5a1490c85cd40b9b6d9ca601efaaac7433"
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/git/commits/9a07b09b8e7a14e583e27e07e4fd6098a03e28b4",
+ "comment_count": 0,
+ "verification": {
+ "verified": true,
+ "reason": "valid",
+ "signature": "-----BEGIN PGP SIGNATURE-----\n\nwsFcBAABCAAQBQJl+XYrCRC1aQ7uu5UhlAAAnY8QAG36qOWv0rSUE4PomSnM/+KO\nRn/63h++Hd08B6yAQls/OPUzRxOCYN/XrGWePuDKELHD8h6IeHdgExVJbtYk+dL8\naI+kA7HUqSvv1M5v7uQqYrKqnDlUToDQRweFuxmGkVyHUDUepBeliI+i9GJiMJ0e\nVM8JjtgX2TuNnY0ivf/0sKGpbTd7+BCSFmu1u7D/Pxo/73Wmit1NdklbuwpQyW52\ngpWRL80HPveXzl3dJNl9foBsNA0Gbc1RNydVIswVoEfMVb/RPsQXYY7QpWlw7R36\nKAN/tJklZngTIWWnCg/aaNONBlxvc7vBChEF7SgeHjQpVnY0d4KU36EHW/LZjroj\n1jojkDdOEXUVBV5CXCl2PDXHHB8txTQe5RqqsFh9gpchB/vTALOtzqpdycEC8vm2\n5ALDepqw0WptKCvRj+PuPkeGoOkBzoPneMDltqGJdA9jVHhyFbivqETtFnlDTu0X\nNjTRdRqKh70NHJzD1Y0pZr+jOj0bamY/RDsAFReybUA9KYQ8GwbyecnLs057pPOF\nd9F00yUmsYEudJb+wfehjW2nxNlQq6wHHXle7xUUi3XeEMLY6XTeZqfp4ml4e3CD\nxNz6EGiDjZdxTTLLVsOJzctL7Zk83JipnNTU2T2WL0O1AngBLmOo7zutauBWIHbr\nsmhAjNpxA4SVZgva0sxu\n=LCzj\n-----END PGP SIGNATURE-----\n",
+ "payload": "tree aa38ab5a1490c85cd40b9b6d9ca601efaaac7433\nparent a96d4cfd3cd80e128112c64691172452e5c55865\nauthor aaron1535 <117573492+aaron1535@users.noreply.github.com> 1710847531 +0200\ncommitter GitHub 1710847531 +0200\n\nyara test"
+ }
+ },
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/9a07b09b8e7a14e583e27e07e4fd6098a03e28b4",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/9a07b09b8e7a14e583e27e07e4fd6098a03e28b4",
+ "comments_url": "https://api.github.com/repos/aaron1535/space_invaders/commits/9a07b09b8e7a14e583e27e07e4fd6098a03e28b4/comments",
+ "author": {
+ "login": "aaron1535",
+ "id": 117573492,
+ "node_id": "U_kgDOBwIHdA",
+ "avatar_url": "https://avatars.githubusercontent.com/u/117573492?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/aaron1535",
+ "html_url": "https://github.com/aaron1535",
+ "followers_url": "https://api.github.com/users/aaron1535/followers",
+ "following_url": "https://api.github.com/users/aaron1535/following{/other_user}",
+ "gists_url": "https://api.github.com/users/aaron1535/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/aaron1535/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/aaron1535/subscriptions",
+ "organizations_url": "https://api.github.com/users/aaron1535/orgs",
+ "repos_url": "https://api.github.com/users/aaron1535/repos",
+ "events_url": "https://api.github.com/users/aaron1535/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/aaron1535/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "committer": {
+ "login": "web-flow",
+ "id": 19864447,
+ "node_id": "MDQ6VXNlcjE5ODY0NDQ3",
+ "avatar_url": "https://avatars.githubusercontent.com/u/19864447?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-flow",
+ "html_url": "https://github.com/web-flow",
+ "followers_url": "https://api.github.com/users/web-flow/followers",
+ "following_url": "https://api.github.com/users/web-flow/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-flow/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-flow/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-flow/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-flow/orgs",
+ "repos_url": "https://api.github.com/users/web-flow/repos",
+ "events_url": "https://api.github.com/users/web-flow/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-flow/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "parents": [
+ {
+ "sha": "a96d4cfd3cd80e128112c64691172452e5c55865",
+ "url": "https://api.github.com/repos/aaron1535/space_invaders/commits/a96d4cfd3cd80e128112c64691172452e5c55865",
+ "html_url": "https://github.com/aaron1535/space_invaders/commit/a96d4cfd3cd80e128112c64691172452e5c55865"
+ }
+ ]
+ }
+]
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/fetch-indicators-res.json b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/fetch-indicators-res.json
new file mode 100644
index 000000000000..a7f1f32a40aa
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/fetch-indicators-res.json
@@ -0,0 +1 @@
+[{"value": "9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd", "type": "File", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd", "type": "File"}}, {"value": "9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd", "type": "File", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd", "type": "File"}}, {"value": "11.22.33.444", "type": "IP", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "11.22.33.444", "type": "IP"}}, {"value": "22.33.44.555", "type": "IP", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "22.33.44.555", "type": "IP"}}, {"value": "12.34.56.789", "type": "IP", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "12.34.56.789", "type": "IP"}}, {"value": "123.234.111.134", "type": "IP", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "123.234.111.134", "type": "IP"}}, {"value": "123.234.111.134", "type": "IP", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "123.234.111.134", "type": "IP"}}, {"value": "123.234.111.134", "type": "IP", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "123.234.111.134", "type": "IP"}}, {"value": "123.234.111.134", "type": "IP", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "123.234.111.134", "type": "IP"}}, {"value": "11.11.22.234", "type": "IP", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "11.11.22.234", "type": "IP"}}, {"value": "123.234.111.134", "type": "IP", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "123.234.111.134", "type": "IP"}}, {"value": "https://twitter.com/Unit42_Intel/status/1623707361184477185", "type": "URL", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "https://twitter.com/Unit42_Intel/status/1623707361184477185", "type": "URL"}}, {"value": "https://twitter.com/k3dg3/status/1623333951069646857", "type": "URL", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "https://twitter.com/k3dg3/status/1623333951069646857", "type": "URL"}}, {"value": "hxxp://123.234.111.134/b360802.dll", "type": "URL", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "hxxp://123.234.111.134/b360802.dll", "type": "URL"}}, {"value": "https://bazaar.abuse.ch/sample/9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd/", "type": "URL", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "https://bazaar.abuse.ch/sample/9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd/", "type": "URL"}}, {"value": "twitter.com", "type": "Domain", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "twitter.com", "type": "Domain"}}, {"value": "twitter.com", "type": "Domain", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "twitter.com", "type": "Domain"}}, {"value": "ehonlionetodo.com", "type": "Domain", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "ehonlionetodo.com", "type": "Domain"}}, {"value": "noosaerty.com", "type": "Domain", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "noosaerty.com", "type": "Domain"}}, {"value": "palasedelareforma.com", "type": "Domain", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "palasedelareforma.com", "type": "Domain"}}, {"value": "36.ps", "type": "Domain", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "36.ps", "type": "Domain"}}, {"value": "thefirstupd.com", "type": "Domain", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "thefirstupd.com", "type": "Domain"}}, {"value": "bazaar.abuse.ch", "type": "Domain", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "bazaar.abuse.ch", "type": "Domain"}}]
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/files-list.json b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/files-list.json
new file mode 100644
index 000000000000..a2006b86feb1
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/files-list.json
@@ -0,0 +1,26 @@
+[
+ {
+ "sha": "463cedd3734f00fe9c43d49f6b5e5b22ce8ce931",
+ "filename": "space_invaders-main/aaa.json",
+ "status": "added",
+ "additions": 36,
+ "deletions": 0,
+ "changes": 36,
+ "blob_url": "https://github.com/aaron1535/space_invaders/blob/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Faaa.json",
+ "raw_url": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Faaa.json",
+ "contents_url": "https://api.github.com/repos/aaron1535/space_invaders/contents/space_invaders-main%2Faaa.json?ref=9a611449423b9992c126c20e47c5de4f58fc1c0e",
+ "patch": "@@ -0,0 +1,36 @@\\n+[\\n+ {\\n+ "
+ },
+ {
+ "sha": "8b82a649186b55f8e905b7850f5dbd5b6f67f2e0",
+ "filename": "space_invaders-main/iocs-test.txt",
+ "status": "added",
+ "additions": 32,
+ "deletions": 0,
+ "changes": 32,
+ "blob_url": "https://github.com/aaron1535/space_invaders/blob/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt",
+ "raw_url": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt",
+ "contents_url": "https://api.github.com/repos/aaron1535/space_invaders/contents/space_invaders-main%2Fiocs-test.txt?ref=9a611449423b9992c126c20e47c5de4f58fc1c0e",
+ "patch": "@@ -0,0 +1,32 @@\\n+2023-02-08 (WEDNESDAY) - COBALT STRIKE FROM ICEDID (BOKBOT) INFECTION\\n+\\n+REFERENCE:\\n+\\n+- https://twitter.com/Unit42_Intel/status/1623707361184477185\\n+\\n+NOTES:\\n+\\n+- IcedID infection generated using a OneNote file reported earlier today by @k3dg3 at:\\n+ -- https://twitter.com/k3dg3/status/1623333951069646857\\n+\\n+ICEDID TRAFFIC:\\n+\\n+- 80.66.88[.]143 port 80 - ehonlionetodo[.]com - GET /\\n+- 94.232.46[.]221 port 443 - noosaerty[.]com - HTTPS traffic\\n+- 37.252.6[.]77 port 443 - palasedelareforma[.]com - HTTPS traffic\\n+\\n+COBALT STRIKE TRAFFIC:\\n+\\n+- 167.172.154[.]189 port 80 - 167.172.154[.]189 - GET /36.ps1\\n+- 167.172.154[.]189 port 80 - 167.172.154[.]189 - GET /b360802.dll\\n+- 79.132.128[.]191 port 443 - thefirstupd[.]com - HTTPS traffic\\n+\\n+COBALT STRIKE STAGER:\\n+\\n+- SHA256 hash: 9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd\\n+- File size: 754,688 bytes\\n+- File location: hxxp://167.172.154[.]189/b360802.dll\\n+- File location: C:\\\\Windows\\\\tasks\\\\si.dll\\n+- File description: 64-bit DLL stager for Cobalt Strike\\n+- Run method: rundll32.exe [filename],ApendMenu \\n+- Sample: https://bazaar.abuse.ch/sample/9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd/"
+ }
+]
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/get_content-files-from-repo-result.json b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/get_content-files-from-repo-result.json
new file mode 100644
index 000000000000..c00a433b5fc6
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/get_content-files-from-repo-result.json
@@ -0,0 +1 @@
+[{"https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt": [{"https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt": "2023-02-08 (WEDNESDAY) - COBALT STRIKE FROM ICEDID (BOKBOT) INFECTION\n\nREFERENCE:\n\n- https://twitter.com/Unit42_Intel/status/1623707361184477185\n\nNOTES:\n\n- IcedID infection generated using a OneNote file reported earlier today by @k3dg3 at:\n -- https://twitter.com/k3dg3/status/1623333951069646857\n\nICEDID TRAFFIC:\n\n- 80.66.88[.]143 port 80 - ehonlionetodo[.]com - GET /\n- 94.232.46[.]221 port 443 - noosaerty[.]com - HTTPS traffic\n- 37.252.6[.]77 port 443 - palasedelareforma[.]com - HTTPS traffic\n\nCOBALT STRIKE TRAFFIC:\n\n- 167.172.154[.]189 port 80 - 167.172.154[.]189 - GET /36.ps1\n- 167.172.154[.]189 port 80 - 167.172.154[.]189 - GET /b360802.dll\n- 79.132.128[.]191 port 443 - thefirstupd[.]com - HTTPS traffic\n\nCOBALT STRIKE STAGER:\n\n- SHA256 hash: 9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd\n- File size: 754,688 bytes\n- File location: hxxp://167.172.154[.]189/b360802.dll\n- File location: C:\\Windows\\tasks\\si.dll\n- File description: 64-bit DLL stager for Cobalt Strike\n- Run method: rundll32.exe [filename],ApendMenu \n- Sample: https://bazaar.abuse.ch/sample/9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd/\n"}]}]
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/get_indicators_command_results.json b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/get_indicators_command_results.json
new file mode 100644
index 000000000000..8d991a397e87
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/get_indicators_command_results.json
@@ -0,0 +1,66 @@
+[
+ {
+ "value": "https://url1.com",
+ "type": "URL",
+ "service": "HelloWorld",
+ "fields": {
+ "trafficlightprotocol": "RED"
+ },
+ "rawJSON": {
+ "value": "https://url1.com",
+ "type": "URL",
+ "FeedURL": "https://openphish.com/feed.txt"
+ }
+ },
+ {
+ "value": "https://url2.com",
+ "type": "URL",
+ "service": "HelloWorld",
+ "fields": {
+ "trafficlightprotocol": "RED"
+ },
+ "rawJSON": {
+ "value": "https://url2.com",
+ "type": "URL",
+ "FeedURL": "https://openphish.com/feed.txt"
+ }
+ },
+ {
+ "value": "https://url3.com",
+ "type": "URL",
+ "service": "HelloWorld",
+ "fields": {
+ "trafficlightprotocol": "RED"
+ },
+ "rawJSON": {
+ "value": "https://url3.com",
+ "type": "URL",
+ "FeedURL": "https://openphish.com/feed.txt"
+ }
+ },
+ {
+ "value": "https://url4.com",
+ "type": "URL",
+ "service": "HelloWorld",
+ "fields": {
+ "trafficlightprotocol": "RED"
+ },
+ "rawJSON": {
+ "value": "https://url4.com",
+ "type": "URL",
+ "FeedURL": "https://openphish.com/feed.txt"
+ }
+ },
+ {
+ "value": "https://url5.com",
+ "type": "URL",
+ "service": "HelloWorld",
+ "fields": {
+ "trafficlightprotocol": "RED"
+ },
+ "rawJSON": {
+ "value": "https://url5.com",
+ "type": "URL",
+ "FeedURL": "https://openphish.com/feed.txt"
+ }
+ }]
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/hr-indicators.json b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/hr-indicators.json
new file mode 100644
index 000000000000..f5d946b68f4d
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/hr-indicators.json
@@ -0,0 +1 @@
+[{"Value": "9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd", "Type": "File"}, {"Value": "9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd", "Type": "File"}, {"Value": "11.22.33.444", "Type": "IP"}, {"Value": "22.33.44.555", "Type": "IP"}, {"Value": "12.34.56.789", "Type": "IP"}, {"Value": "123.234.111.134", "Type": "IP"}, {"Value": "123.234.111.134", "Type": "IP"}, {"Value": "123.234.111.134", "Type": "IP"}, {"Value": "123.234.111.134", "Type": "IP"}, {"Value": "11.11.22.234", "Type": "IP"}, {"Value": "123.234.111.134", "Type": "IP"}, {"Value": "https://twitter.com/Unit42_Intel/status/1623707361184477185", "Type": "URL"}, {"Value": "https://twitter.com/k3dg3/status/1623333951069646857", "Type": "URL"}, {"Value": "hxxp://123.234.111.134/b360802.dll", "Type": "URL"}, {"Value": "https://bazaar.abuse.ch/sample/9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd/", "Type": "URL"}, {"Value": "twitter.com", "Type": "Domain"}, {"Value": "twitter.com", "Type": "Domain"}, {"Value": "ehonlionetodo.com", "Type": "Domain"}, {"Value": "noosaerty.com", "Type": "Domain"}, {"Value": "palasedelareforma.com", "Type": "Domain"}, {"Value": "36.ps", "Type": "Domain"}, {"Value": "thefirstupd.com", "Type": "Domain"}, {"Value": "bazaar.abuse.ch", "Type": "Domain"}]
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/iocs-res.json b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/iocs-res.json
new file mode 100644
index 000000000000..8eb50a220d0e
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/iocs-res.json
@@ -0,0 +1 @@
+[{"value": "example@example.com", "type": "Email", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "example@example.com", "type": "Email"}}, {"value": "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890", "type": "File", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890", "type": "File"}}, {"value": "1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef", "type": "File", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef", "type": "File"}}, {"value": "https://twitter.com/LoremIpsum/status/1234567890123456789", "type": "URL", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "https://twitter.com/LoremIpsum/status/1234567890123456789", "type": "URL"}}, {"value": "hxxps://example.com/64HTTPS.dll", "type": "URL", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "hxxps://example.com/64HTTPS.dll", "type": "URL"}}, {"value": "exampledw.com/64HTTPS.dll", "type": "URL", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "exampledw.com/64HTTPS.dll", "type": "URL"}}, {"value": "https://twitter.com/LoremIpsum/status/09876543210987654321", "type": "URL", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "https://twitter.com/LoremIpsum/status/09876543210987654321", "type": "URL"}}, {"value": "twitter.com", "type": "Domain", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "twitter.com", "type": "Domain"}}, {"value": "example.com", "type": "Domain", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "example.com", "type": "Domain"}}, {"value": "example.ru", "type": "Domain", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "example.ru", "type": "Domain"}}, {"value": "example.ru", "type": "Domain", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "example.ru", "type": "Domain"}}, {"value": "example.ru", "type": "Domain", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "example.ru", "type": "Domain"}}, {"value": "example.su", "type": "Domain", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "example.su", "type": "Domain"}}, {"value": "example.su", "type": "Domain", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "example.su", "type": "Domain"}}, {"value": "64HTTPS.zip", "type": "Domain", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "64HTTPS.zip", "type": "Domain"}}, {"value": "exampledw.com", "type": "Domain", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "exampledw.com", "type": "Domain"}}, {"value": "exampledw.com", "type": "Domain", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "exampledw.com", "type": "Domain"}}, {"value": "ipsumupdate.com", "type": "Domain", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "ipsumupdate.com", "type": "Domain"}}, {"value": "twitter.com", "type": "Domain", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "twitter.com", "type": "Domain"}}, {"value": "LOREMIPSUM.esa4.example-out.iphmx.com", "type": "Domain", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "LOREMIPSUM.esa4.example-out.iphmx.com", "type": "Domain"}}, {"value": "example.com", "type": "Domain", "service": "github", "fields": {"references": "example.com", "tags": {"owner": "example.owner", "repo": "example.repo"}, "firstseenbysource": "2024-05-12T15:30:49.330015"}, "rawJSON": {"value": "example.com", "type": "Domain"}}]
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/iterator-test.json b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/iterator-test.json
new file mode 100644
index 000000000000..a7f1f32a40aa
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/iterator-test.json
@@ -0,0 +1 @@
+[{"value": "9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd", "type": "File", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd", "type": "File"}}, {"value": "9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd", "type": "File", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd", "type": "File"}}, {"value": "11.22.33.444", "type": "IP", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "11.22.33.444", "type": "IP"}}, {"value": "22.33.44.555", "type": "IP", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "22.33.44.555", "type": "IP"}}, {"value": "12.34.56.789", "type": "IP", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "12.34.56.789", "type": "IP"}}, {"value": "123.234.111.134", "type": "IP", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "123.234.111.134", "type": "IP"}}, {"value": "123.234.111.134", "type": "IP", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "123.234.111.134", "type": "IP"}}, {"value": "123.234.111.134", "type": "IP", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "123.234.111.134", "type": "IP"}}, {"value": "123.234.111.134", "type": "IP", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "123.234.111.134", "type": "IP"}}, {"value": "11.11.22.234", "type": "IP", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "11.11.22.234", "type": "IP"}}, {"value": "123.234.111.134", "type": "IP", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "123.234.111.134", "type": "IP"}}, {"value": "https://twitter.com/Unit42_Intel/status/1623707361184477185", "type": "URL", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "https://twitter.com/Unit42_Intel/status/1623707361184477185", "type": "URL"}}, {"value": "https://twitter.com/k3dg3/status/1623333951069646857", "type": "URL", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "https://twitter.com/k3dg3/status/1623333951069646857", "type": "URL"}}, {"value": "hxxp://123.234.111.134/b360802.dll", "type": "URL", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "hxxp://123.234.111.134/b360802.dll", "type": "URL"}}, {"value": "https://bazaar.abuse.ch/sample/9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd/", "type": "URL", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "https://bazaar.abuse.ch/sample/9e68ac920bae102ccf1829ae8b8c212cc3046dd82114966c74e740df68b76fcd/", "type": "URL"}}, {"value": "twitter.com", "type": "Domain", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "twitter.com", "type": "Domain"}}, {"value": "twitter.com", "type": "Domain", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "twitter.com", "type": "Domain"}}, {"value": "ehonlionetodo.com", "type": "Domain", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "ehonlionetodo.com", "type": "Domain"}}, {"value": "noosaerty.com", "type": "Domain", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "noosaerty.com", "type": "Domain"}}, {"value": "palasedelareforma.com", "type": "Domain", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "palasedelareforma.com", "type": "Domain"}}, {"value": "36.ps", "type": "Domain", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "36.ps", "type": "Domain"}}, {"value": "thefirstupd.com", "type": "Domain", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "thefirstupd.com", "type": "Domain"}}, {"value": "bazaar.abuse.ch", "type": "Domain", "service": "github", "fields": {"references": "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "tags": {"owner": "aaron1535", "repo": "space_invaders"}, "firstseenbysource": "2024-05-20T11:05:36.984413"}, "rawJSON": {"value": "bazaar.abuse.ch", "type": "Domain"}}]
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/list-parsed-rules-res.json b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/list-parsed-rules-res.json
new file mode 100644
index 000000000000..73fc0f150ca8
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/list-parsed-rules-res.json
@@ -0,0 +1 @@
+[{"value": "Lorem_Malware_Mar18_Rule1", "type": "YARA Rule", "service": "github", "fields": {"value": "Lorem_Malware_Mar18_Rule1", "description": "Detects malware from Lorem Ipsum report", "author": "Lorem Ipsum (Example Systems)", "rulereference": "https://example.com", "sourcetimestamp": "2018-03-10", "ruleid": "12345678-1234-1234-1234-1234567890ab", "rulestrings": [{"index": "$s1", "string": "\\\\Release\\\\LoremCli.pdb", "type": "text", "modifiers": ["ascii"]}, {"index": "$s2", "string": "%snewcmd.exe", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$s3", "string": "Run cmd error %d", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$s4", "string": "%s~loremtmp%08x.ini", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$s5", "string": "run file failed", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$s6", "string": "Cmd timeout %d", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$s7", "string": "2 %s %d 0 %d", "type": "text", "modifiers": ["fullword", "ascii"]}], "condition": "uint16 ( 0 ) == 0x5a4d and filesize < 200KB and 2 of them", "references": "example.com", "rawrule": "```\n import \"pe\"\n\nrule Lorem_Malware_Mar18_Rule1\n{\n\tmeta:\n\t\tdescription = \"Detects malware from Lorem Ipsum report\"\n\t\tlicense = \"Detection Rule License 1.1 https://example.com/license\"\n\t\tauthor = \"Lorem Ipsum (Example Systems)\"\n\t\treference = \"https://example.com\"\n\t\tdate = \"2018-03-10\"\n\t\thash1 = \"1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef\"\n\t\tid = \"12345678-1234-1234-1234-1234567890ab\"\n\n\tstrings:\n\t\t$s1 = \"\\\\Release\\\\LoremCli.pdb\" ascii\n\t\t$s2 = \"%snewcmd.exe\" fullword ascii\n\t\t$s3 = \"Run cmd error %d\" fullword ascii\n\t\t$s4 = \"%s~loremtmp%08x.ini\" fullword ascii\n\t\t$s5 = \"run file failed\" fullword ascii\n\t\t$s6 = \"Cmd timeout %d\" fullword ascii\n\t\t$s7 = \"2 %s %d 0 %d\" fullword ascii\n\n\tcondition:\n\t\tuint16(0)==0x5a4d and filesize <200KB and 2 of them\n}\n \n```"}, "score": 0, "firstseenbysource": "2024-05-12T15:30:49.330015", "rawJSON": {"value": "Lorem_Malware_Mar18_Rule1", "type": "YARA Rule"}}, {"value": "Lorem_Malware_Mar18_Rule2", "type": "YARA Rule", "service": "github", "fields": {"value": "Lorem_Malware_Mar18_Rule2", "description": "Detects malware from Lorem Ipsum report", "author": "Lorem Ipsum (Example Systems)", "rulereference": "https://example.com", "sourcetimestamp": "2018-03-10", "ruleid": "abcdef12-3456-7890-abcd-ef1234567890", "rulestrings": [{"index": "$x1", "string": "del c:\\\\windows\\\\temp\\\\r.exe /f /q", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$x2", "string": "%s\\\\r.exe", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$s1", "string": "rights.dll", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$s2", "string": "\\\"%s\\\">>\\\"%s\\\"\\\\s.txt", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$s3", "string": "Nwsapagent", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$s4", "string": "%s\\\\r.bat", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$s5", "string": "%s\\\\s.txt", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$s6", "string": "runexe", "type": "text", "modifiers": ["fullword", "ascii"]}], "condition": "uint16 ( 0 ) == 0x5a4d and filesize < 200KB and ( ( pe.exports ( \"RunInstallA\" ) and pe.exports ( \"RunUninstallA\" ) ) or 1 of ( $x* ) or 2 of them )", "references": "example.com", "rawrule": "```\n import \"pe\"\n\nrule Lorem_Malware_Mar18_Rule2\n{\n\tmeta:\n\t\tdescription = \"Detects malware from Lorem Ipsum report\"\n\t\tlicense = \"Detection Rule License 1.1 https://example.com/license\"\n\t\tauthor = \"Lorem Ipsum (Example Systems)\"\n\t\treference = \"https://example.com\"\n\t\tdate = \"2018-03-10\"\n\t\thash1 = \"abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890\"\n\t\tid = \"abcdef12-3456-7890-abcd-ef1234567890\"\n\n\tstrings:\n\t\t$x1 = \"del c:\\\\windows\\\\temp\\\\r.exe /f /q\" fullword ascii\n\t\t$x2 = \"%s\\\\r.exe\" fullword ascii\n\t\t$s1 = \"rights.dll\" fullword ascii\n\t\t$s2 = \"\\\"%s\\\">>\\\"%s\\\"\\\\s.txt\" fullword ascii\n\t\t$s3 = \"Nwsapagent\" fullword ascii\n\t\t$s4 = \"%s\\\\r.bat\" fullword ascii\n\t\t$s5 = \"%s\\\\s.txt\" fullword ascii\n\t\t$s6 = \"runexe\" fullword ascii\n\n\tcondition:\n\t\tuint16(0)==0x5a4d and filesize <200KB and ((pe.exports(\"RunInstallA\") and pe.exports(\"RunUninstallA\")) or 1 of ($x*) or 2 of them )\n}\n \n```"}, "score": 0, "firstseenbysource": "2024-05-12T15:30:49.330015", "rawJSON": {"value": "Lorem_Malware_Mar18_Rule2", "type": "YARA Rule"}}, {"value": "Lorem_Malware_Mar18_Rule3", "type": "YARA Rule", "service": "github", "fields": {"value": "Lorem_Malware_Mar18_Rule3", "description": "Detects malware from Lorem Ipsum report", "author": "Lorem Ipsum (Example Systems)", "rulereference": "https://example.com", "sourcetimestamp": "2018-03-10", "ruleid": "abcdef12-3456-7890-abcd-ef1234567890", "rulestrings": [{"index": "$x1", "string": "AAAAKQAASCMAABi+AABnhEBj8vep7VRoAEPRWLweGc0/eiDrXGajJXRxbXsTXAcZAABK4QAAPWwAACzWAAByrg==", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$x2", "string": "AAAAKQAASCMAABi+AABnhKv3kXJJousn5YzkjGF46eE3G8ZGse4B9uoqJo8Q2oF0AABK4QAAPWwAACzWAAByrg==", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$a1", "string": "http://%s/content.html?id=%s", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$a2", "string": "http://%s/main.php?ssid=%s", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$a3", "string": "http://%s/webmail.php?id=%s", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$a9", "string": "http://%s/error.html?tab=%s", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$s1", "string": "%s\\\\~tmp.txt", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$s2", "string": "%s /C %s >>\\\"%s\\\" 2>&1", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$s3", "string": "DisableFirstRunCustomize", "type": "text", "modifiers": ["fullword", "ascii"]}], "condition": "uint16 ( 0 ) == 0x5a4d and filesize < 200KB and ( 1 of ( $x* ) or 2 of them )", "references": "example.com", "rawrule": "```\n import \"pe\"\n\nrule Lorem_Malware_Mar18_Rule3\n{\n\tmeta:\n\t\tdescription = \"Detects malware from Lorem Ipsum report\"\n\t\tlicense = \"Detection Rule License 1.1 https://example.com/license\"\n\t\tauthor = \"Lorem Ipsum (Example Systems)\"\n\t\treference = \"https://example.com\"\n\t\tdate = \"2018-03-10\"\n\t\thash1 = \"abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890\"\n\t\tid = \"abcdef12-3456-7890-abcd-ef1234567890\"\n\n\tstrings:\n\t\t$x1 = \"AAAAKQAASCMAABi+AABnhEBj8vep7VRoAEPRWLweGc0/eiDrXGajJXRxbXsTXAcZAABK4QAAPWwAACzWAAByrg==\" fullword ascii\n\t\t$x2 = \"AAAAKQAASCMAABi+AABnhKv3kXJJousn5YzkjGF46eE3G8ZGse4B9uoqJo8Q2oF0AABK4QAAPWwAACzWAAByrg==\" fullword ascii\n\t\t$a1 = \"http://%s/content.html?id=%s\" fullword ascii\n\t\t$a2 = \"http://%s/main.php?ssid=%s\" fullword ascii\n\t\t$a3 = \"http://%s/webmail.php?id=%s\" fullword ascii\n\t\t$a9 = \"http://%s/error.html?tab=%s\" fullword ascii\n\t\t$s1 = \"%s\\\\~tmp.txt\" fullword ascii\n\t\t$s2 = \"%s /C %s >>\\\"%s\\\" 2>&1\" fullword ascii\n\t\t$s3 = \"DisableFirstRunCustomize\" fullword ascii\n\n\tcondition:\n\t\tuint16(0)==0x5a4d and filesize <200KB and (1 of ($x*) or 2 of them )\n}\n \n```"}, "score": 0, "firstseenbysource": "2024-05-12T15:30:49.330015", "rawJSON": {"value": "Lorem_Malware_Mar18_Rule3", "type": "YARA Rule"}}, {"value": "Lorem_Malware_Mar18_Rule4", "type": "YARA Rule", "service": "github", "fields": {"value": "Lorem_Malware_Mar18_Rule4", "description": "Detects malware from Lorem Ipsum report", "author": "Lorem Ipsum (Example Systems)", "rulereference": "https://example.com", "sourcetimestamp": "2018-03-10", "ruleid": "abcdef12-3456-7890-abcd-ef1234567890", "rulestrings": [{"index": "$s1", "string": "\\\\Release\\\\LoremTool.pdb", "type": "text", "modifiers": ["ascii"]}, {"index": "$s2", "string": "LoremTool.exe", "type": "text", "modifiers": ["fullword", "wide"]}, {"index": "$s3", "string": "Microsoft.Lorem.WebServices.Data", "type": "text", "modifiers": ["fullword", "ascii"]}, {"index": "$s4", "string": "tmp.dat", "type": "text", "modifiers": ["fullword", "wide"]}, {"index": "$s6", "string": "/v or /t is null", "type": "text", "modifiers": ["fullword", "wide"]}], "condition": "uint16 ( 0 ) == 0x5a4d and filesize < 40KB and all of them", "references": "example.com", "rawrule": "```\n import \"pe\"\n\nrule Lorem_Malware_Mar18_Rule4\n{\n\tmeta:\n\t\tdescription = \"Detects malware from Lorem Ipsum report\"\n\t\tlicense = \"Detection Rule License 1.1 https://example.com/license\"\n\t\tauthor = \"Lorem Ipsum (Example Systems)\"\n\t\treference = \"https://example.com\"\n\t\tdate = \"2018-03-10\"\n\t\thash1 = \"abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890\"\n\t\tid = \"abcdef12-3456-7890-abcd-ef1234567890\"\n\n\tstrings:\n\t\t$s1 = \"\\\\Release\\\\LoremTool.pdb\" ascii\n\t\t$s2 = \"LoremTool.exe\" fullword wide\n\t\t$s3 = \"Microsoft.Lorem.WebServices.Data\" fullword ascii\n\t\t$s4 = \"tmp.dat\" fullword wide\n\t\t$s6 = \"/v or /t is null\" fullword wide\n\n\tcondition:\n\t\tuint16(0)==0x5a4d and filesize <40KB and all of them\n}\n \n```"}, "score": 0, "firstseenbysource": "2024-05-12T15:30:49.330015", "rawJSON": {"value": "Lorem_Malware_Mar18_Rule4", "type": "YARA Rule"}}, {"value": "clean_lorem_patchedcmd", "type": "YARA Rule", "service": "github", "fields": {"value": "clean_lorem_patchedcmd", "description": "This is a patched CMD. This is the CMD that LoremCli uses.", "author": "Lorem Ipsum", "rulereference": "", "sourcetimestamp": "", "ruleid": "abcdef12-3456-7890-abcd-ef1234567890", "rulestrings": [{"index": "$", "string": "disableCMD", "type": "text", "modifiers": ["wide"]}, {"index": "$", "string": "%WINDOWS_COPYRIGHT%", "type": "text", "modifiers": ["wide"]}, {"index": "$", "string": "Cmd.Exe", "type": "text", "modifiers": ["wide"]}, {"index": "$", "string": "Windows Command Processor", "type": "text", "modifiers": ["wide"]}], "condition": "uint16 ( 0 ) == 0x5A4D and all of them", "references": "example.com", "rawrule": "```\n import \"pe\"\n\nrule clean_lorem_patchedcmd\n{\n\tmeta:\n\t\tauthor = \"Lorem Ipsum\"\n\t\tdescription = \"This is a patched CMD. This is the CMD that LoremCli uses.\"\n\t\tsha256 = \"abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890\"\n\t\tid = \"abcdef12-3456-7890-abcd-ef1234567890\"\n\n\tstrings:\n\t\t$ = \"disableCMD\" wide\n\t\t$ = \"%WINDOWS_COPYRIGHT%\" wide\n\t\t$ = \"Cmd.Exe\" wide\n\t\t$ = \"Windows Command Processor\" wide\n\n\tcondition:\n\t\tuint16(0)==0x5A4D and all of them\n}\n \n```"}, "score": 0, "firstseenbysource": "2024-05-12T15:30:49.330015", "rawJSON": {"value": "clean_lorem_patchedcmd", "type": "YARA Rule"}}, {"value": "malware_lorem_royalcli_1", "type": "YARA Rule", "service": "github", "fields": {"value": "malware_lorem_royalcli_1", "description": "Generic strings found in the Lorem CLI tool", "author": "Lorem Ipsum", "rulereference": "", "sourcetimestamp": "", "ruleid": "abcdef12-3456-7890-abcd-ef1234567890", "rulestrings": [{"index": "$", "string": "%s~loremtmp%08x.tmp", "type": "text", "modifiers": ["fullword"]}, {"index": "$", "string": "%s /c %s>%s", "type": "text", "modifiers": ["fullword"]}, {"index": "$", "string": "%snewcmd.exe", "type": "text", "modifiers": ["fullword"]}, {"index": "$", "string": "%shkcmd.exe", "type": "text", "modifiers": ["fullword"]}, {"index": "$", "string": "%s~loremtmp%08x.ini", "type": "text", "modifiers": ["fullword"]}, {"index": "$", "string": "myRObject", "type": "text", "modifiers": ["fullword"]}, {"index": "$", "string": "myWObject", "type": "text", "modifiers": ["fullword"]}, {"index": "$", "string": "2 %s %d 0 %d\\x0D\\x0A", "type": "text", "modifiers": null}, {"index": "$", "string": "2 %s %d 1 %d\\x0D\\x0A", "type": "text", "modifiers": null}, {"index": "$", "string": "%s file not exist", "type": "text", "modifiers": ["fullword"]}], "condition": "uint16 ( 0 ) == 0x5A4D and 5 of them", "references": "example.com", "rawrule": "```\n import \"pe\"\n\nrule malware_lorem_royalcli_1\n{\n\tmeta:\n\t\tdescription = \"Generic strings found in the Lorem CLI tool\"\n\t\tauthor = \"Lorem Ipsum\"\n\t\tsha256 = \"abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890\"\n\t\tid = \"abcdef12-3456-7890-abcd-ef1234567890\"\n\n\tstrings:\n\t\t$ = \"%s~loremtmp%08x.tmp\" fullword\n\t\t$ = \"%s /c %s>%s\" fullword\n\t\t$ = \"%snewcmd.exe\" fullword\n\t\t$ = \"%shkcmd.exe\" fullword\n\t\t$ = \"%s~loremtmp%08x.ini\" fullword\n\t\t$ = \"myRObject\" fullword\n\t\t$ = \"myWObject\" fullword\n\t\t$ = \"2 %s %d 0 %d\\x0D\\x0A\"\n\t\t$ = \"2 %s %d 1 %d\\x0D\\x0A\"\n\t\t$ = \"%s file not exist\" fullword\n\n\tcondition:\n\t\tuint16(0)==0x5A4D and 5 of them\n}\n \n```"}, "score": 0, "firstseenbysource": "2024-05-12T15:30:49.330015", "rawJSON": {"value": "malware_lorem_royalcli_1", "type": "YARA Rule"}}, {"value": "malware_lorem_royalcli_2", "type": "YARA Rule", "service": "github", "fields": {"value": "malware_lorem_royalcli_2", "description": "Lorem RoyalCli backdoor", "author": "Lorem Ipsum", "rulereference": "", "sourcetimestamp": "", "ruleid": "abcdef12-3456-7890-abcd-ef1234567890", "rulestrings": [{"index": "$string1", "string": "%shkcmd.exe", "type": "text", "modifiers": ["fullword"]}, {"index": "$string2", "string": "myRObject", "type": "text", "modifiers": ["fullword"]}, {"index": "$string3", "string": "%snewcmd.exe", "type": "text", "modifiers": ["fullword"]}, {"index": "$string4", "string": "%s~loremtmp%08x.tmp", "type": "text", "modifiers": ["fullword"]}, {"index": "$string6", "string": "myWObject", "type": "text", "modifiers": ["fullword"]}], "condition": "uint16 ( 0 ) == 0x5A4D and 2 of them", "references": "example.com", "rawrule": "```\n import \"pe\"\n\nrule malware_lorem_royalcli_2\n{\n\tmeta:\n\t\tauthor = \"Lorem Ipsum\"\n\t\tdescription = \"Lorem RoyalCli backdoor\"\n\t\tid = \"abcdef12-3456-7890-abcd-ef1234567890\"\n\n\tstrings:\n\t\t$string1 = \"%shkcmd.exe\" fullword\n\t\t$string2 = \"myRObject\" fullword\n\t\t$string3 = \"%snewcmd.exe\" fullword\n\t\t$string4 = \"%s~loremtmp%08x.tmp\" fullword\n\t\t$string6 = \"myWObject\" fullword\n\n\tcondition:\n\t\tuint16(0)==0x5A4D and 2 of them\n}\n \n```"}, "score": 0, "firstseenbysource": "2024-05-12T15:30:49.330015", "rawJSON": {"value": "malware_lorem_royalcli_2", "type": "YARA Rule"}}]
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/owner-repo-example.png b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/owner-repo-example.png
new file mode 100644
index 000000000000..d46990e9cb43
Binary files /dev/null and b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/owner-repo-example.png differ
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/relevant-files.json b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/relevant-files.json
new file mode 100644
index 000000000000..93f11b67041f
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/relevant-files.json
@@ -0,0 +1 @@
+["https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Faaa.json", "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fiocs-test.txt", "https://github.com/aaron1535/space_invaders/raw/9a611449423b9992c126c20e47c5de4f58fc1c0e/space_invaders-main%2Fstix-test.json"]
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/split-critical-yara-rule-res.json b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/split-critical-yara-rule-res.json
new file mode 100644
index 000000000000..df0727a2e9b6
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/split-critical-yara-rule-res.json
@@ -0,0 +1 @@
+["rule Agent_BTZ_Proxy_DLL_2 {\n meta:\n description = \"Detects Agent-BTZ Proxy DLL - activeds.dll\"\n author = \"Lorem Ipsum (Nextron Systems)\"\n reference = \"http://www.example.com/new-variants-of-agent-btz-comrat-found/\"\n date = \"2017-08-07\"\n hash1 = \"73db4295c5b29958c5d93c20be9482c1efffc89fc4e5c8ba59ac9425a4657a88\"\n hash2 = \"380b0353ba8cd33da8c5e5b95e3e032e83193019e73c71875b58ec1ed389bdac\"\n hash3 = \"f27e9bba6a2635731845b4334b807c0e4f57d3b790cecdc77d8fef50629f51a2\"\n id = \"2777443d-6f63-5948-855a-e064a6e0310f\"\n strings:\n $s1 = { 38 21 38 2C 38 37 38 42 38 4D 38 58 38 63 38 6E\n 38 79 38 84 38 8F 38 9A 38 A5 38 B0 38 BB 38 C6\n 38 D1 38 DC 38 E7 38 F2 38 FD 38 08 39 13 39 1E\n 39 29 39 34 39 3F 39 4A 39 55 39 60 39 6B 39 76\n 39 81 39 8C 39 97 39 A2 39 AD 39 B8 39 C3 39 CE\n 39 D9 39 E4 39 EF 39 FA 39 05 3A 10 3A 1B 3A 26\n 3A 31 3A 3C 3A 47 3A 52 3A 5D 3A 68 3A 73 3A 7E\n 3A 89 3A 94 3A 9F 3A AA 3A B5 3A C0 3A CB 3A D6\n 3A E1 3A EC 3A F7 3A }\n $s2 = \"activeds.dll\" ascii fullword\n condition:\n uint16(0) == 0x5a4d and filesize < 200KB and all of them and pe.imphash() == \"09b7c73fbe5529e6de7137e3e8268b7b\"\n}"]
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/taxii_test.json b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/taxii_test.json
new file mode 100644
index 000000000000..f7f7f3c70b16
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/taxii_test.json
@@ -0,0 +1,401 @@
+[
+ {
+ "objects": [
+ {
+ "id": "indicator--abcd1234-5678-90ef-ghij-klmnopqrstuv",
+ "pattern": "[ipv4-addr:value = '8.8.8.8' AND ipv4-addr:value = '8.8.4.4']",
+ "confidence": 10,
+ "lang": "fr",
+ "type": "alert",
+ "created": "2021-01-01T12:00:00.000Z",
+ "modified": "2021-01-02T12:00:00.000Z",
+ "name": "example_name: disrupted_data",
+ "description": "TS ID: 123456789; iType: example_type; Date First: 2021-01-01T12:00:00.000Z; State: inactive; Org: Example Org; Source: Example Source; MoreDetail: example detail",
+ "valid_from": "2021-01-01T12:00:00.000Z",
+ "pattern_type": "snort",
+ "object_marking_refs": [
+ "marking-definition--1234abcd-5678-efgh-ijkl-mnopqrstuv"
+ ],
+ "labels": [
+ "high"
+ ],
+ "indicator_types": [
+ "malicious-activity"
+ ],
+ "pattern_version": "1.0",
+ "spec_version": "2.0"
+ },
+ {
+ "id": "indicator--678912b1-5a2e-4dc1-9b3f-7fe9102c3d48",
+ "pattern": "[ipv4-addr:value = '99.99.99.99' OR ipv4-addr:value = '3.3.3.3']",
+ "confidence": 70,
+ "lang": "fr",
+ "type": "indicator",
+ "created": "2021-11-10T12:14:52.501Z",
+ "modified": "2021-11-10T12:14:52.501Z",
+ "name": "example_ip: 99.99.99.99",
+ "description": "TS ID: 12345678910; iType: example_type; Date First: 2021-11-02T07:26:06.274Z; State: inactive; Org: Example Org; Source: Example Source; MoreDetail: example detail",
+ "valid_from": "2021-11-10T12:00:33.722754Z",
+ "pattern_type": "stix",
+ "object_marking_refs": [
+ "marking-definition--1234abcd-5678-efgh-ijkl-mnopqrstuv"
+ ],
+ "labels": [
+ "medium"
+ ],
+ "indicator_types": [
+ "malicious-activity"
+ ],
+ "pattern_version": "3.0",
+ "spec_version": "2.2"
+ },
+ {
+ "id": "indicator--abcdefgh-ijkl-mnop-qrst-uvwxyz123456",
+ "pattern": "[ipv4-addr:value = '111.222.333.444' FOLLOWEDBY ipv4-addr:value = '5.5.5.5']",
+ "confidence": 25,
+ "lang": "fr",
+ "type": "indicator",
+ "created": "2021-12-20T01:14:54.684Z",
+ "modified": "2021-12-20T01:14:54.684Z",
+ "name": "example_ip: 111.222.333.444",
+ "description": "TS ID: 12345678910; iType: example_type; Date First: 2021-12-02T07:26:17.907Z; State: inactive; Org: Example Org; Source: Example Source; MoreDetail: example detail",
+ "valid_from": "2021-12-20T01:00:33.775627Z",
+ "pattern_type": "stix",
+ "object_marking_refs": [
+ "marking-definition--1234abcd-5678-efgh-ijkl-mnopqrstuv"
+ ],
+ "labels": [
+ "high"
+ ],
+ "indicator_types": [
+ "malicious-activity"
+ ],
+ "pattern_version": "3.0",
+ "spec_version": "2.2"
+ },
+ {
+ "id": "indicator--lmnopqrs-tuvw-xyz1-2345-6789abcdef01",
+ "pattern": "[ipv4-addr:value = '1.1.1.1']",
+ "confidence": 25,
+ "lang": "fr",
+ "type": "indicator",
+ "created": "2021-12-20T01:14:19.858Z",
+ "modified": "2021-12-20T01:14:19.858Z",
+ "name": "example_ip: 1.1.1.1",
+ "description": "TS ID: 12345678910; iType: example_type; Date First: 2021-12-02T07:26:46.206Z; State: inactive; Org: Example Org; Source: Example Source; MoreDetail: example detail",
+ "valid_from": "2021-12-20T01:00:33.731573Z",
+ "pattern_type": "stix",
+ "object_marking_refs": [
+ "marking-definition--1234abcd-5678-efgh-ijkl-mnopqrstuv"
+ ],
+ "labels": [
+ "medium"
+ ],
+ "indicator_types": [
+ "anomalous-activity"
+ ],
+ "pattern_version": "3.0",
+ "spec_version": "2.2"
+ },
+ {
+ "id": "indicator--abcdef12-3456-789a-bcde-f123456789ab",
+ "pattern": "[ipv4-addr:value = '111.222.333.444']",
+ "confidence": 25,
+ "lang": "fr",
+ "type": "indicator",
+ "created": "2021-12-20T01:14:10.753Z",
+ "modified": "2021-12-20T01:14:10.753Z",
+ "name": "example_ip: 111.222.333.444",
+ "description": "TS ID: 12345678910; iType: example_type; Date First: 2021-12-02T07:26:17.178Z; State: inactive; Source: Example Source; MoreDetail: example detail",
+ "valid_from": "2021-12-20T01:00:33.808281Z",
+ "pattern_type": "stix",
+ "object_marking_refs": [
+ "marking-definition--1234abcd-5678-efgh-ijkl-mnopqrstuv"
+ ],
+ "labels": [
+ "medium"
+ ],
+ "indicator_types": [
+ "anomalous-activity"
+ ],
+ "pattern_version": "3.0",
+ "spec_version": "2.2"
+ },
+ {
+ "id": "indicator--ghijklmn-opqr-stuv-wxyz-1234567890ab",
+ "pattern": "[ipv4-addr:value = '111.222.333.444']",
+ "confidence": 25,
+ "lang": "fr",
+ "type": "indicator",
+ "created": "2021-12-20T01:14:15.950Z",
+ "modified": "2021-12-20T01:14:15.950Z",
+ "name": "example_ip: 111.222.333.444",
+ "description": "TS ID: 12345678910; iType: example_type; Date First: 2021-12-02T07:26:01.051Z; State: inactive; Org: Example Org; Source: Example Source; MoreDetail: example detail",
+ "valid_from": "2021-12-20T01:00:33.818576Z",
+ "pattern_type": "stix",
+ "object_marking_refs": [
+ "marking-definition--1234abcd-5678-efgh-ijkl-mnopqrstuv"
+ ],
+ "labels": [
+ "low"
+ ],
+ "indicator_types": [
+ "anomalous-activity"
+ ],
+ "pattern_version": "3.0",
+ "spec_version": "2.2"
+ },
+ {
+ "id": "indicator--abcdefghijklm-1234-5678-abcd-ef1234567890",
+ "pattern": "[ipv4-addr:value = '111.222.333.444']",
+ "confidence": 75,
+ "lang": "fr",
+ "type": "indicator",
+ "created": "2021-12-20T01:15:00.764Z",
+ "modified": "2021-12-20T01:15:00.764Z",
+ "name": "example_ip: 111.222.333.444",
+ "description": "TS ID: 12345678910; iType: example_type; Date First: 2021-12-02T07:26:22.790Z; State: inactive; Org: Example Org; Source: Example Source; MoreDetail: example detail",
+ "valid_from": "2021-12-20T01:00:33.791474Z",
+ "pattern_type": "stix",
+ "object_marking_refs": [
+ "marking-definition--1234abcd-5678-efgh-ijkl-mnopqrstuv"
+ ],
+ "labels": [
+ "high"
+ ],
+ "indicator_types": [
+ "anomalous-activity"
+ ],
+ "pattern_version": "3.0",
+ "spec_version": "2.2"
+ },
+ {
+ "id": "indicator--nopqrstuvwxyz-5678-9abcd-ef12-34567890abcd",
+ "pattern": "[ipv4-addr:value = '111.222.333.444']",
+ "confidence": 75,
+ "lang": "fr",
+ "type": "indicator",
+ "created": "2021-12-20T01:14:39.995Z",
+ "modified": "2021-12-20T01:14:39.995Z",
+ "name": "example_ip: 111.222.333.444",
+ "description": "TS ID: 12345678910; iType: example_type; Date First: 2021-12-02T07:26:13.398Z; State: inactive; Org: Example Org; Source: Example Source; MoreDetail: example detail",
+ "valid_from": "2021-12-20T01:00:33.766866Z",
+ "pattern_type": "stix",
+ "object_marking_refs": [
+ "marking-definition--1234abcd-5678-efgh-ijkl-mnopqrstuv"
+ ],
+ "labels": [
+ "low"
+ ],
+ "indicator_types": [
+ "anomalous-activity"
+ ],
+ "pattern_version": "3.0",
+ "spec_version": "2.2"
+ },
+ {
+ "id": "indicator--qrstuvwx-yz12-3456-789a-bcdef123456",
+ "pattern": "[ipv4-addr:value = '111.222.333.444']",
+ "confidence": 90,
+ "lang": "fr",
+ "type": "indicator",
+ "created": "2021-12-20T01:15:01.999Z",
+ "modified": "2021-12-20T01:15:01.999Z",
+ "name": "example_ip: 111.222.333.444",
+ "description": "TS ID: 12345678910; iType: example_type; Date First: 2021-12-02T07:26:32.478Z; State: inactive; Org: Example Org; Source: Example Source; MoreDetail: example detail",
+ "valid_from": "2021-12-20T01:00:33.781286Z",
+ "pattern_type": "stix",
+ "object_marking_refs": [
+ "marking-definition--1234abcd-5678-efgh-ijkl-mnopqrstuv"
+ ],
+ "labels": [
+ "medium"
+ ],
+ "indicator_types": [
+ "anomalous-activity"
+ ],
+ "pattern_version": "3.0",
+ "spec_version": "2.2"
+ },
+ {
+ "id": "indicator--abcdefghijklm-1234-5678-abcd-ef1234567890",
+ "pattern": "[ipv4-addr:value = '111.222.333.444']",
+ "confidence": 75,
+ "lang": "fr",
+ "type": "indicator",
+ "created": "2021-12-20T01:15:00.764Z",
+ "modified": "2021-12-20T01:15:00.764Z",
+ "name": "example_ip: 111.222.333.444",
+ "description": "TS ID: 12345678910; iType: example_type; Date First: 2021-12-02T07:26:22.790Z; State: inactive; Org: Example Org; Source: Example Source; MoreDetail: example detail",
+ "valid_from": "2021-12-20T01:00:33.791474Z",
+ "pattern_type": "stix",
+ "object_marking_refs": [
+ "marking-definition--1234abcd-5678-efgh-ijkl-mnopqrstuv"
+ ],
+ "labels": [
+ "high"
+ ],
+ "indicator_types": [
+ "anomalous-activity"
+ ],
+ "pattern_version": "3.0",
+ "spec_version": "2.2"
+ },
+ {
+ "id": "indicator--nopqrstuvwxyz-5678-9abcd-ef12-34567890abcd",
+ "pattern": "[ipv4-addr:value = '111.222.333.444']",
+ "confidence": 75,
+ "lang": "fr",
+ "type": "indicator",
+ "created": "2021-12-20T01:14:39.995Z",
+ "modified": "2021-12-20T01:14:39.995Z",
+ "name": "example_ip: 111.222.333.444",
+ "description": "TS ID: 12345678910; iType: example_type; Date First: 2021-12-02T07:26:13.398Z; State: inactive; Org: Example Org; Source: Example Source; MoreDetail: example detail",
+ "valid_from": "2021-12-20T01:00:33.766866Z",
+ "pattern_type": "stix",
+ "object_marking_refs": [
+ "marking-definition--1234abcd-5678-efgh-ijkl-mnopqrstuv"
+ ],
+ "labels": [
+ "low"
+ ],
+ "indicator_types": [
+ "anomalous-activity"
+ ],
+ "pattern_version": "3.0",
+ "spec_version": "2.2"
+ },
+ {
+ "id": "indicator--qrstuvwx-yz12-3456-789a-bcdef123456",
+ "pattern": "[ipv4-addr:value = '111.222.333.444']",
+ "confidence": 90,
+ "lang": "fr",
+ "type": "indicator",
+ "created": "2021-12-20T01:15:01.999Z",
+ "modified": "2021-12-20T01:15:01.999Z",
+ "name": "example_ip: 111.222.333.444",
+ "description": "TS ID: 12345678910; iType: example_type; Date First: 2021-12-02T07:26:32.478Z; State: inactive; Org: Example Org; Source: Example Source; MoreDetail: example detail",
+ "valid_from": "2021-12-20T01:00:33.781286Z",
+ "pattern_type": "stix",
+ "object_marking_refs": [
+ "marking-definition--1234abcd-5678-efgh-ijkl-mnopqrstuv"
+ ],
+ "labels": [
+ "medium"
+ ],
+ "indicator_types": [
+ "anomalous-activity"
+ ],
+ "pattern_version": "3.0",
+ "spec_version": "2.2"
+ },
+ {
+ "id": "indicator--abcdef12-3456-7890-abcdef123456",
+ "pattern": "[ipv4-addr:value = '111.111.111.111']",
+ "confidence": 75,
+ "lang": "fr",
+ "type": "indicator",
+ "created": "2021-01-01T01:01:01.000Z",
+ "modified": "2021-01-01T01:01:01.000Z",
+ "name": "example_ip: 111.111.111.111",
+ "description": "TS ID: 1234567890; iType: example_type; Date First: 2021-01-01T01:01:01.000Z; State: inactive; Org: Example Org; Source: Example Source; MoreDetail: example detail",
+ "valid_from": "2021-01-01T01:01:01.000Z",
+ "pattern_type": "stix",
+ "object_marking_refs": [
+ "marking-definition--abcdef12-3456-7890-abcdef123456"
+ ],
+ "labels": [
+ "high"
+ ],
+ "indicator_types": [
+ "anomalous-activity"
+ ],
+ "pattern_version": "3.0",
+ "spec_version": "2.2"
+ },
+ {
+ "id": "indicator--bcdef123-4567-890a-bcde-f1234567890",
+ "pattern": "[ipv4-addr:value = '111.111.111.111']",
+ "confidence": 75,
+ "lang": "fr",
+ "type": "indicator",
+ "created": "2021-01-01T01:01:01.000Z",
+ "modified": "2021-01-01T01:01:01.000Z",
+ "name": "example_ip: 111.111.111.111",
+ "description": "TS ID: 1234567890; iType: example_type; Date First: 2021-01-01T01:01:01.000Z; State: inactive; Org: Example Org; Source: Example Source; MoreDetail: example detail",
+ "valid_from": "2021-01-01T01:01:01.000Z",
+ "pattern_type": "stix",
+ "object_marking_refs": [
+ "marking-definition--abcdef12-3456-7890-abcdef123456"
+ ],
+ "labels": [
+ "low"
+ ],
+ "indicator_types": [
+ "anomalous-activity"
+ ],
+ "pattern_version": "3.0",
+ "spec_version": "2.2"
+ },
+ {
+ "id": "indicator--cdef1234-5678-90ab-cdef-1234567890a",
+ "pattern": "[ipv4-addr:value = '111.111.111.111']",
+ "confidence": 90,
+ "lang": "fr",
+ "type": "indicator",
+ "created": "2021-01-01T01:01:01.000Z",
+ "modified": "2021-01-01T01:01:01.000Z",
+ "name": "example_ip: 111.111.111.111",
+ "description": "TS ID: 1234567890; iType: example_type; Date First: 2021-01-01T01:01:01.000Z; State: inactive; Org: Example Org; Source: Example Source; MoreDetail: example detail",
+ "valid_from": "2021-01-01T01:01:01.000Z",
+ "pattern_type": "stix",
+ "object_marking_refs": [
+ "marking-definition--abcdef12-3456-7890-abcdef123456"
+ ],
+ "labels": [
+ "medium"
+ ],
+ "indicator_types": [
+ "anomalous-activity"
+ ],
+ "pattern_version": "3.0",
+ "spec_version": "2.2"
+ },
+ {
+ "id": "indicator--ac4a9ca5-9f6e-4072-b568-46dbb03a3ace",
+ "pattern": "[ipv4-addr:value = '45.143.220.246']",
+ "confidence": 50,
+ "lang": "en",
+ "type": "indicator",
+ "created": "2020-06-10T01:15:10.905Z",
+ "modified": "2020-06-10T01:15:10.905Z",
+ "name": "bot_ip: 45.143.220.246",
+ "description": "TS ID: 55691320117; iType: bot_ip; Date First: 2020-06-04T10:32:46.584Z; State: active; Source: Emerging Threats - Compromised; MoreDetail: imported by user 668",
+ "valid_from": "2020-06-10T01:00:33.752185Z",
+ "pattern_type": "stix",
+ "object_marking_refs": [
+ "marking-definition--abcdef12-3456-7890-abcdef123456"
+ ],
+ "labels": [
+ "high"
+ ],
+ "indicator_types": [
+ "anomalous-activity"
+ ],
+ "pattern_version": "2.1",
+ "spec_version": "2.1"
+ },
+ {
+ "id": "marking-definition--f88d31f6-486f-44da-b317-01333bde0b82",
+ "created": "2017-01-20T00:00:00.000Z",
+ "definition_type": "tlp",
+ "definition": {
+ "tlp": "red"
+ },
+ "type": "marking-definition",
+ "spec_version": "2.1"
+ }
+ ],
+ "more": false
+ }
+]
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/taxii_test_res.json b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/taxii_test_res.json
new file mode 100644
index 000000000000..0637a088a01e
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/taxii_test_res.json
@@ -0,0 +1 @@
+[]
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/test-ioc-indicators.txt b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/test-ioc-indicators.txt
new file mode 100644
index 000000000000..466d82e54330
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/test-ioc-indicators.txt
@@ -0,0 +1,62 @@
+2023-07-12 (Wednesday): Lorum Ipsum Infection with Dolor Sit Amet
+
+Reference:
+- https://twitter.com/LoremIpsum/status/1234567890123456789
+
+Associated Malware:
+1. File 1:
+ - SHA256 Hash: abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890
+ - File Size: 123,456 bytes
+ - File Type: PE32 executable (DLL) (GUI) Intel 80386 (stripped to external PDB), for MS Windows
+ - Description: 32-bit Windows DLL for Lorem Ipsum, Botnet 1234, build 567890
+ - Run Method: regsvr32.exe [filename]
+
+2. File 2:
+ - SHA256 Hash: 1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef
+ - File Size: 789,012 bytes
+ - File Type: PE32+ executable (DLL) (GUI) x86-64 (stripped to external PDB), for MS Windows
+ - File Location: hxxps://example[.]com/64HTTPS.dll
+ - Saved Location: C:\\Windows\\Tasks\\dolorsit.dll
+ - Description: 64-bit Windows DLL for Dolor Sit stager
+ - Run Method: start-process rundll32.exe -ArgumentList '/s c:\\windows\\tasks\\dolorsit.dll,recurring'
+
+Traffic from Infected Windows Host:
+
+Lorem Ipsum C2 Traffic:
+- 123.456.789[.]101 port 80 - example[.]ru - GET /uploaded/[long base64 string with backslashes and underscores].pct
+- 123.456.789[.]101 port 80 - example[.]ru - POST /uploaded/[long base64 string with backslashes and underscores].dib
+- 123.456.789[.]101 port 80 - example[.]ru - GET /uploaded/[long base64 string with backslashes and underscores].pmg
+- 123.456.789[.]101 port 80 - example[.]su - GET /uploaded/[long base64 string with backslashes and underscores].pmg
+- 123.456.789[.]101 port 80 - example[.]su - POST /uploaded/[long base64 string with backslashes and underscores].dib
+
+Lorem Ipsum Modules (Encrypted Data Binaries):
+- 234.567.890[.]123 port 80 - 234.567.890[.]123 - GET /file1.rar
+- 234.567.890[.]123 port 80 - 234.567.890[.]123 - GET /file2.rar
+- 234.567.890[.]123 port 80 - 234.567.890[.]123 - GET /file3.rar
+- 234.567.890[.]123 port 80 - 234.567.890[.]123 - GET /file4.rar
+- 234.567.890[.]123 port 80 - 234.567.890[.]123 - GET /file5.rar
+- 234.567.890[.]123 port 80 - 234.567.890[.]123 - GET /file6.rar
+
+Traffic Caused by File Module:
+- 345.678.901[.]234 port 9955 - TCP traffic
+
+Encrypted Data Binary for Dolor Sit Stager:
+- 456.789.012[.]345 port 80 - 456.789.012[.]345 - GET /01/64HTTPS.zip
+
+DLL for Dolor Sit Stager:
+- 567.890.123[.]456 port 443 - exampledw[.]com - GET /exampledw.com/64HTTPS.dll
+
+Dolor Sit C2:
+- 678.901.234[.]567 port 443 - ipsumupdate[.]com - HTTPS traffic, TLSv1.2, Let's Encrypt certificate, not valid before 2023-07-03
+
+---
+
+2020-09-21 (Monday): Infection from Ipsum Malspam
+
+Reference:
+- https://twitter.com/LoremIpsum/status/09876543210987654321
+
+Email Header Data:
+- Received: from [123.456.789.101] (unknown [123.456.789.101]) by [removed]; Mon, 21 Sep 2020 14:25:24 +0200 (CEST)
+- Received: from [234.567.890.123] (helo=LOREMIPSUM.esa4.example-out.iphmx.com) by [removed] (envelope-from example@example.com) [removed]; Mon, 21 Sep 2020 13:25:24 +0100
+- Date: Mon, 21 Sep 2020 13:25:24 +0100
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/test-split-yara-1.yar b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/test-split-yara-1.yar
new file mode 100644
index 000000000000..5515d914e0b4
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/test-split-yara-1.yar
@@ -0,0 +1,167 @@
+/*
+ Yara Rule Set
+ Author: Lorem Ipsum
+ Date: 2018-03-10
+ Identifier: Lorem Ipsum Report
+ Reference: https://example.com
+*/
+
+/* Rule Set ----------------------------------------------------------------- */
+
+import "pe"
+
+rule Lorem_Malware_Mar18_Rule1 {
+ meta:
+ description = "Detects malware from Lorem Ipsum report"
+ license = "Detection Rule License 1.1 https://example.com/license"
+ author = "Lorem Ipsum (Example Systems)"
+ reference = "https://example.com"
+ date = "2018-03-10"
+ hash1 = "1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"
+ id = "12345678-1234-1234-1234-1234567890ab"
+ strings:
+ $s1 = "\\Release\\LoremCli.pdb" ascii
+ $s2 = "%snewcmd.exe" fullword ascii
+ $s3 = "Run cmd error %d" fullword ascii
+ $s4 = "%s~loremtmp%08x.ini" fullword ascii
+ $s5 = "run file failed" fullword ascii
+ $s6 = "Cmd timeout %d" fullword ascii
+ $s7 = "2 %s %d 0 %d" fullword ascii
+ condition:
+ uint16(0) == 0x5a4d and filesize < 200KB and 2 of them
+}
+
+rule Lorem_Malware_Mar18_Rule2 {
+ meta:
+ description = "Detects malware from Lorem Ipsum report"
+ license = "Detection Rule License 1.1 https://example.com/license"
+ author = "Lorem Ipsum (Example Systems)"
+ reference = "https://example.com"
+ date = "2018-03-10"
+ hash1 = "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"
+ id = "abcdef12-3456-7890-abcd-ef1234567890"
+ strings:
+ $x1 = "del c:\\windows\\temp\\r.exe /f /q" fullword ascii
+ $x2 = "%s\\r.exe" fullword ascii
+
+ $s1 = "rights.dll" fullword ascii
+ $s2 = "\"%s\">>\"%s\"\\s.txt" fullword ascii
+ $s3 = "Nwsapagent" fullword ascii
+ $s4 = "%s\\r.bat" fullword ascii
+ $s5 = "%s\\s.txt" fullword ascii
+ $s6 = "runexe" fullword ascii
+ condition:
+ uint16(0) == 0x5a4d and filesize < 200KB and (
+ ( pe.exports("RunInstallA") and pe.exports("RunUninstallA") ) or
+ 1 of ($x*) or
+ 2 of them
+ )
+}
+
+rule Lorem_Malware_Mar18_Rule3 {
+ meta:
+ description = "Detects malware from Lorem Ipsum report"
+ license = "Detection Rule License 1.1 https://example.com/license"
+ author = "Lorem Ipsum (Example Systems)"
+ reference = "https://example.com"
+ date = "2018-03-10"
+ hash1 = "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"
+ id = "abcdef12-3456-7890-abcd-ef1234567890"
+ strings:
+ $x1 = "AAAAKQAASCMAABi+AABnhEBj8vep7VRoAEPRWLweGc0/eiDrXGajJXRxbXsTXAcZAABK4QAAPWwAACzWAAByrg==" fullword ascii
+ $x2 = "AAAAKQAASCMAABi+AABnhKv3kXJJousn5YzkjGF46eE3G8ZGse4B9uoqJo8Q2oF0AABK4QAAPWwAACzWAAByrg==" fullword ascii
+
+ $a1 = "http://%s/content.html?id=%s" fullword ascii
+ $a2 = "http://%s/main.php?ssid=%s" fullword ascii
+ $a3 = "http://%s/webmail.php?id=%s" fullword ascii
+ $a9 = "http://%s/error.html?tab=%s" fullword ascii
+
+ $s1 = "%s\\~tmp.txt" fullword ascii
+ $s2 = "%s /C %s >>\"%s\" 2>&1" fullword ascii
+ $s3 = "DisableFirstRunCustomize" fullword ascii
+ condition:
+ uint16(0) == 0x5a4d and filesize < 200KB and (
+ 1 of ($x*) or
+ 2 of them
+ )
+}
+
+rule Lorem_Malware_Mar18_Rule4 {
+ meta:
+ description = "Detects malware from Lorem Ipsum report"
+ license = "Detection Rule License 1.1 https://example.com/license"
+ author = "Lorem Ipsum (Example Systems)"
+ reference = "https://example.com"
+ date = "2018-03-10"
+ hash1 = "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"
+ id = "abcdef12-3456-7890-abcd-ef1234567890"
+ strings:
+ $s1 = "\\Release\\LoremTool.pdb" ascii
+ $s2 = "LoremTool.exe" fullword wide
+ $s3 = "Microsoft.Lorem.WebServices.Data" fullword ascii
+ $s4 = "tmp.dat" fullword wide
+ $s6 = "/v or /t is null" fullword wide
+ condition:
+ uint16(0) == 0x5a4d and filesize < 40KB and all of them
+}
+
+/*
+ Identifier: Lorem = Ipsum = Dolor
+ Author: Lorem Ipsum Group
+ Revised by Lorem Ipsum for performance reasons
+ see https://example.com
+ > some rules were untightened
+ Date: 2018-03-09
+ Reference: https://example.com
+*/
+
+rule clean_lorem_patchedcmd {
+ meta:
+ author = "Lorem Ipsum"
+ description = "This is a patched CMD. This is the CMD that LoremCli uses."
+ sha256 = "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"
+ id = "abcdef12-3456-7890-abcd-ef1234567890"
+ strings:
+ $ = "disableCMD" wide
+ $ = "%WINDOWS_COPYRIGHT%" wide
+ $ = "Cmd.Exe" wide
+ $ = "Windows Command Processor" wide
+ condition:
+ uint16(0) == 0x5A4D and all of them
+}
+
+rule malware_lorem_royalcli_1 {
+ meta:
+ description = "Generic strings found in the Lorem CLI tool"
+ author = "Lorem Ipsum"
+ sha256 = "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"
+ id = "abcdef12-3456-7890-abcd-ef1234567890"
+ strings:
+ $ = "%s~loremtmp%08x.tmp" fullword
+ $ = "%s /c %s>%s" fullword
+ $ = "%snewcmd.exe" fullword
+ $ = "%shkcmd.exe" fullword
+ $ = "%s~loremtmp%08x.ini" fullword
+ $ = "myRObject" fullword
+ $ = "myWObject" fullword
+ $ = "2 %s %d 0 %d\x0D\x0A"
+ $ = "2 %s %d 1 %d\x0D\x0A"
+ $ = "%s file not exist" fullword
+ condition:
+ uint16(0) == 0x5A4D and 5 of them
+}
+
+rule malware_lorem_royalcli_2 {
+ meta:
+ author = "Lorem Ipsum"
+ description = "Lorem RoyalCli backdoor"
+ id = "abcdef12-3456-7890-abcd-ef1234567890"
+ strings:
+ $string1 = "%shkcmd.exe" fullword
+ $string2 = "myRObject" fullword
+ $string3 = "%snewcmd.exe" fullword
+ $string4 = "%s~loremtmp%08x.tmp" fullword
+ $string6 = "myWObject" fullword
+ condition:
+ uint16(0) == 0x5A4D and 2 of them
+}
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/test-split-yara-critical-rule.yar b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/test-split-yara-critical-rule.yar
new file mode 100644
index 000000000000..ae304e419fe9
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/test-split-yara-critical-rule.yar
@@ -0,0 +1,24 @@
+rule Agent_BTZ_Proxy_DLL_2 {
+ meta:
+ description = "Detects Agent-BTZ Proxy DLL - activeds.dll"
+ author = "Lorem Ipsum (Nextron Systems)"
+ reference = "http://www.example.com/new-variants-of-agent-btz-comrat-found/"
+ date = "2017-08-07"
+ hash1 = "73db4295c5b29958c5d93c20be9482c1efffc89fc4e5c8ba59ac9425a4657a88"
+ hash2 = "380b0353ba8cd33da8c5e5b95e3e032e83193019e73c71875b58ec1ed389bdac"
+ hash3 = "f27e9bba6a2635731845b4334b807c0e4f57d3b790cecdc77d8fef50629f51a2"
+ id = "2777443d-6f63-5948-855a-e064a6e0310f"
+ strings:
+ $s1 = { 38 21 38 2C 38 37 38 42 38 4D 38 58 38 63 38 6E
+ 38 79 38 84 38 8F 38 9A 38 A5 38 B0 38 BB 38 C6
+ 38 D1 38 DC 38 E7 38 F2 38 FD 38 08 39 13 39 1E
+ 39 29 39 34 39 3F 39 4A 39 55 39 60 39 6B 39 76
+ 39 81 39 8C 39 97 39 A2 39 AD 39 B8 39 C3 39 CE
+ 39 D9 39 E4 39 EF 39 FA 39 05 3A 10 3A 1B 3A 26
+ 3A 31 3A 3C 3A 47 3A 52 3A 5D 3A 68 3A 73 3A 7E
+ 3A 89 3A 94 3A 9F 3A AA 3A B5 3A C0 3A CB 3A D6
+ 3A E1 3A EC 3A F7 3A }
+ $s2 = "activeds.dll" ascii fullword
+ condition:
+ uint16(0) == 0x5a4d and filesize < 200KB and all of them and pe.imphash() == "09b7c73fbe5529e6de7137e3e8268b7b"
+}
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/test_yara_parser_res.txt b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/test_yara_parser_res.txt
new file mode 100644
index 000000000000..0e9695e6ad8e
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/test_yara_parser_res.txt
@@ -0,0 +1 @@
+[{"value": "MAL_Ransomware_GermanWiper", "type": "YARA Rule", "service": "github", "fields": {"value": "MAL_Ransomware_GermanWiper", "description": "Detects RansomWare GermanWiper in Memory or in unpacked state", "author": "Frank Boldewin (@r3c0nst), modified by Florian Roth", "rulereference": "https://twitter.com/r3c0nst/status/1158326526766657538", "sourcetimestamp": "2019-08-05", "id": "e7587691-f69a-53e7-bab2-875179fbfa19", "rulestrings": [{"index": "$x_Mutex1", "string": "HSDFSD-HFSD-3241-91E7-ASDGSDGHH", "type": "text", "modifiers": ["ascii"]}, {"index": "$x_Mutex2", "string": "cFgxTERNWEVhM2V", "type": "text", "modifiers": ["ascii"]}, {"index": "$PurgeCode", "string": "{ 6a 00 8b 47 08 50 6a 00 6a 01 e8 ?? ?? ?? ??\n 50 e8 ?? ?? ?? ?? 8b f0 8b d7 8b c3 e8 }", "type": "byte", "modifiers": ""}, {"index": "$ProcessKill1", "string": "sqbcoreservice.exe", "type": "text", "modifiers": ["ascii"]}, {"index": "$ProcessKill2", "string": "isqlplussvc.exe", "type": "text", "modifiers": ["ascii"]}, {"index": "$KillShadowCopies", "string": "vssadmin.exe delete shadows", "type": "text", "modifiers": ["ascii"]}, {"index": "$Domain1", "string": "cdnjs.cloudflare.com", "type": "text", "modifiers": ["ascii"]}, {"index": "$Domain2", "string": "expandingdelegation.top", "type": "text", "modifiers": ["ascii"]}, {"index": "$RansomNote", "string": "Entschluesselungs_Anleitung.html", "type": "text", "modifiers": ["ascii"]}], "condition": "uint16 ( 0 ) == 0x5A4D and filesize < 1000KB and ( 1 of ( $x* ) or 3 of them )", "references": "example.com", "raw rule": "``` \n rule MAL_Ransomware_GermanWiper {\n meta:\n description = \"Detects RansomWare GermanWiper in Memory or in unpacked state\"\n author = \"Frank Boldewin (@r3c0nst), modified by Florian Roth\"\n reference = \"https://twitter.com/r3c0nst/status/1158326526766657538\"\n date = \"2019-08-05\"\n hash_packed = \"41364427dee49bf544dcff61a6899b3b7e59852435e4107931e294079a42de7c\"\n hash_unpacked = \"708967cad421bb2396017bdd10a42e6799da27e29264f4b5fb095c0e3503e447\"\n\n id = \"e7587691-f69a-53e7-bab2-875179fbfa19\"\n strings:\n $x_Mutex1 = \"HSDFSD-HFSD-3241-91E7-ASDGSDGHH\" ascii\n $x_Mutex2 = \"cFgxTERNWEVhM2V\" ascii\n\n // code patterns for process kills\n $PurgeCode = { 6a 00 8b 47 08 50 6a 00 6a 01 e8 ?? ?? ?? ??\n 50 e8 ?? ?? ?? ?? 8b f0 8b d7 8b c3 e8 }\n $ProcessKill1 = \"sqbcoreservice.exe\" ascii\n $ProcessKill2 = \"isqlplussvc.exe\" ascii\n $KillShadowCopies = \"vssadmin.exe delete shadows\" ascii\n $Domain1 = \"cdnjs.cloudflare.com\" ascii\n $Domain2 = \"expandingdelegation.top\" ascii\n $RansomNote = \"Entschluesselungs_Anleitung.html\" ascii\n condition:\n uint16(0) == 0x5A4D and filesize < 1000KB and\n ( 1 of ($x*) or 3 of them )\n} \n ```"}, "score": 0, "firstseenbysource": "2024-05-12T15:30:49.330015", "rawJSON": {"value": "MAL_Ransomware_GermanWiper", "type": "YARA Rule"}}]
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/yara-rule-1-res.json b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/yara-rule-1-res.json
new file mode 100644
index 000000000000..7dd6e858bf1c
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/yara-rule-1-res.json
@@ -0,0 +1 @@
+[{"value": "MAL_Ransomware_GermanWiper", "type": "YARA Rule", "service": "github", "fields": {"value": "MAL_Ransomware_GermanWiper", "description": "example of yara rule for testing", "author": "author example of yara rule for testing", "rulereference": "https://twitter.com/example", "sourcetimestamp": "2019-08-05", "ruleid": "e7587691-f69a-53e7-bab2-875179fbfa19", "rulestrings": [{"index": "$x_Mutex1", "string": "HSDFSD-HFSD-3241-91E7-ASDGSDGHH", "type": "text", "modifiers": ["ascii"]}, {"index": "$x_Mutex2", "string": "cFgxTERNWEVhM2V", "type": "text", "modifiers": ["ascii"]}, {"index": "$PurgeCode", "string": "{ 6a 00 8b 47 08 50 6a 00 6a 01 e8 ?? ?? ?? ??\n 50 e8 ?? ?? ?? ?? 8b f0 8b d7 8b c3 e8 }", "type": "byte", "modifiers": null}, {"index": "$ProcessKill1", "string": "sqbcoreservice.exe", "type": "text", "modifiers": ["ascii"]}, {"index": "$ProcessKill2", "string": "isqlplussvc.exe", "type": "text", "modifiers": ["ascii"]}, {"index": "$KillShadowCopies", "string": "vssadmin.exe delete shadows", "type": "text", "modifiers": ["ascii"]}, {"index": "$Domain1", "string": "cdnjs.cloudflare.com", "type": "text", "modifiers": ["ascii"]}, {"index": "$Domain2", "string": "expandingdelegation.top", "type": "text", "modifiers": ["ascii"]}, {"index": "$RansomNote", "string": "Entschluesselungs_Anleitung.html", "type": "text", "modifiers": ["ascii"]}], "condition": "uint16 ( 0 ) == 0x5A4D and filesize < 1000KB and ( 1 of ( $x* ) or 3 of them )", "references": "example.com", "rawrule": "```\n rule MAL_Ransomware_GermanWiper\n{\n\tmeta:\n\t\tdescription = \"example of yara rule for testing\"\n\t\tauthor = \"author example of yara rule for testing\"\n\t\treference = \"https://twitter.com/example\"\n\t\tdate = \"2019-08-05\"\n\t\thash_packed = \"41364427dee49bf544dcff61a6899b3b7e59852435e4107931e294079a42de7c\"\n\t\thash_unpacked = \"708967cad421bb2396017bdd10a42e6799da27e29264f4b5fb095c0e3503e447\"\n\t\tid = \"e7587691-f69a-53e7-bab2-875179fbfa19\"\n\n\tstrings:\n\t\t$x_Mutex1 = \"HSDFSD-HFSD-3241-91E7-ASDGSDGHH\" ascii\n\t\t$x_Mutex2 = \"cFgxTERNWEVhM2V\" ascii\n\t\t$PurgeCode = { 6a 00 8b 47 08 50 6a 00 6a 01 e8 ?? ?? ?? ??\n 50 e8 ?? ?? ?? ?? 8b f0 8b d7 8b c3 e8 }\n\t\t$ProcessKill1 = \"sqbcoreservice.exe\" ascii\n\t\t$ProcessKill2 = \"isqlplussvc.exe\" ascii\n\t\t$KillShadowCopies = \"vssadmin.exe delete shadows\" ascii\n\t\t$Domain1 = \"cdnjs.cloudflare.com\" ascii\n\t\t$Domain2 = \"expandingdelegation.top\" ascii\n\t\t$RansomNote = \"Entschluesselungs_Anleitung.html\" ascii\n\n\tcondition:\n\t\tuint16(0)==0x5A4D and filesize <1000KB and (1 of ($x*) or 3 of them )\n}\n \n```"}, "score": 0, "firstseenbysource": "2024-05-12T15:30:49.330015", "rawJSON": {"value": "MAL_Ransomware_GermanWiper", "type": "YARA Rule"}}]
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/yara-rule-1.yar b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/yara-rule-1.yar
new file mode 100644
index 000000000000..3fa85740d37b
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/yara-rule-1.yar
@@ -0,0 +1,27 @@
+rule MAL_Ransomware_GermanWiper {
+ meta:
+ description = "example of yara rule for testing"
+ author = "author example of yara rule for testing"
+ reference = "https://twitter.com/example"
+ date = "2019-08-05"
+ hash_packed = "41364427dee49bf544dcff61a6899b3b7e59852435e4107931e294079a42de7c"
+ hash_unpacked = "708967cad421bb2396017bdd10a42e6799da27e29264f4b5fb095c0e3503e447"
+
+ id = "e7587691-f69a-53e7-bab2-875179fbfa19"
+ strings:
+ $x_Mutex1 = "HSDFSD-HFSD-3241-91E7-ASDGSDGHH" ascii
+ $x_Mutex2 = "cFgxTERNWEVhM2V" ascii
+
+ // code patterns for process kills
+ $PurgeCode = { 6a 00 8b 47 08 50 6a 00 6a 01 e8 ?? ?? ?? ??
+ 50 e8 ?? ?? ?? ?? 8b f0 8b d7 8b c3 e8 }
+ $ProcessKill1 = "sqbcoreservice.exe" ascii
+ $ProcessKill2 = "isqlplussvc.exe" ascii
+ $KillShadowCopies = "vssadmin.exe delete shadows" ascii
+ $Domain1 = "cdnjs.cloudflare.com" ascii
+ $Domain2 = "expandingdelegation.top" ascii
+ $RansomNote = "Entschluesselungs_Anleitung.html" ascii
+ condition:
+ uint16(0) == 0x5A4D and filesize < 1000KB and
+ ( 1 of ($x*) or 3 of them )
+}
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/yara-rule-2-res.json b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/yara-rule-2-res.json
new file mode 100644
index 000000000000..0637a088a01e
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/yara-rule-2-res.json
@@ -0,0 +1 @@
+[]
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/yara-rule-2.yar b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/yara-rule-2.yar
new file mode 100644
index 000000000000..34a2a592a110
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/yara-rule-2.yar
@@ -0,0 +1,18 @@
+rule Lorem_Ipsum {
+ meta:
+ description = "broking yara rule test"
+ license = "https://creativecommons.org/licenses/by-nc/4.0/"
+ author = "Lorem ipsum"
+ reference = "https://www."
+ date = "2022-02-07"
+ hash1 = "fc5a58bf0fce9cb96f35ee76842ff17816fe302e3164bc7c6a5ef46f6eff67ed"
+ id = "039e5d41-eadb-5c53-82cd-20ffd4105326"
+ strings:
+ $lznt1_compressed_pe_header_small = { FC B9 00 4D 5A 90 } // This is the lznt1 compressed PE header
+
+ $lznt1_compressed_pe_header_large_1 = { FC B9 00 4D 5A 90 00 03 00 00 00 82 04 00 30 FF FF 00 }
+ $lznt1_compressed_pe_header_large_2 = { 00 b8 00 38 0d 01 00 40 04 38 19 00 10 01 00 00 }
+ $lznt1_compressed_pe_header_large_3 = { 00 0e 1f ba 0e 00 b4 09 cd 00 21 b8 01 4c cd 21 }
+ $lznt1_compressed_pe_header_large_4 = { 54 68 00 69 73 20 70 72 6f 67 72 00 61 6d 20 63 }
+ $lznt1_compressed_pe_header_large_5 = { 61 6e 6e 6f 00 74 20 62 65 20 72 75 6e 00 20 69 }
+ $lznt1_compressed_pe_header_large_6 = { 6e 20 44 4f 53 20 00 6d 6f 64 65 2e 0d 0d 0a 02 }
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/yara-rule-3-res.json b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/yara-rule-3-res.json
new file mode 100644
index 000000000000..ab6739287be5
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/yara-rule-3-res.json
@@ -0,0 +1 @@
+[{"value": "Agent_BTZ_Proxy_DLL_2", "type": "YARA Rule", "service": "github", "fields": {"value": "Agent_BTZ_Proxy_DLL_2", "description": "Lorem ipsum", "author": "edge case", "rulereference": "http://www.test.test.test", "sourcetimestamp": "2017-08-07", "ruleid": "2777443d-6f63-5948-855a-e064a6e0310f", "rulestrings": [{"index": "$s1", "string": "{ 38 21 38 2C 38 37 38 42 38 4D 38 58 38 63 38 6E\n 38 79 38 84 38 8F 38 9A 38 A5 38 B0 38 BB 38 C6\n 38 D1 38 DC 38 E7 38 F2 38 FD 38 08 39 13 39 1E\n 39 29 39 34 39 3F 39 4A 39 55 39 60 39 6B 39 76\n 39 81 39 8C 39 97 39 A2 39 AD 39 B8 39 C3 39 CE\n 39 D9 39 E4 39 EF 39 FA 39 05 3A 10 3A 1B 3A 26\n 3A 31 3A 3C 3A 47 3A 52 3A 5D 3A 68 3A 73 3A 7E\n 3A 89 3A 94 3A 9F 3A AA 3A B5 3A C0 3A CB 3A D6\n 3A E1 3A EC 3A F7 3A }", "type": "byte", "modifiers": null}, {"index": "$s2", "string": "activeds.dll", "type": "text", "modifiers": ["ascii", "fullword"]}], "condition": "uint16 ( 0 ) == 0x5a4d and filesize < 200KB and all of them and pe.imphash ( ) == \"09b7c73fbe5529e6de7137e3e8268b7b\"", "references": "example.com", "rawrule": "```\n rule Agent_BTZ_Proxy_DLL_2\n{\n\tmeta:\n\t\tdescription = \"Lorem ipsum\"\n\t\tauthor = \"edge case\"\n\t\treference = \"http://www.test.test.test\"\n\t\tdate = \"2017-08-07\"\n\t\thash1 = \"73db4295c5b29958c5d93c20be9482c1efffc89fc4e5c8ba59ac9425a4657a88\"\n\t\thash2 = \"380b0353ba8cd33da8c5e5b95e3e032e83193019e73c71875b58ec1ed389bdac\"\n\t\thash3 = \"f27e9bba6a2635731845b4334b807c0e4f57d3b790cecdc77d8fef50629f51a2\"\n\t\tid = \"2777443d-6f63-5948-855a-e064a6e0310f\"\n\n\tstrings:\n\t\t$s1 = { 38 21 38 2C 38 37 38 42 38 4D 38 58 38 63 38 6E\n 38 79 38 84 38 8F 38 9A 38 A5 38 B0 38 BB 38 C6\n 38 D1 38 DC 38 E7 38 F2 38 FD 38 08 39 13 39 1E\n 39 29 39 34 39 3F 39 4A 39 55 39 60 39 6B 39 76\n 39 81 39 8C 39 97 39 A2 39 AD 39 B8 39 C3 39 CE\n 39 D9 39 E4 39 EF 39 FA 39 05 3A 10 3A 1B 3A 26\n 3A 31 3A 3C 3A 47 3A 52 3A 5D 3A 68 3A 73 3A 7E\n 3A 89 3A 94 3A 9F 3A AA 3A B5 3A C0 3A CB 3A D6\n 3A E1 3A EC 3A F7 3A }\n\t\t$s2 = \"activeds.dll\" ascii fullword\n\n\tcondition:\n\t\tuint16(0)==0x5a4d and filesize <200KB and all of them and pe.imphash()==\"09b7c73fbe5529e6de7137e3e8268b7b\"\n}\n \n```"}, "score": 0, "firstseenbysource": "2024-05-12T15:30:49.330015", "rawJSON": {"value": "Agent_BTZ_Proxy_DLL_2", "type": "YARA Rule"}}]
\ No newline at end of file
diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/yara-rule-3.yar b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/yara-rule-3.yar
new file mode 100644
index 000000000000..51f46784fdf1
--- /dev/null
+++ b/Packs/FeedGitHub/Integrations/FeedGitHub/test_data/yara-rule-3.yar
@@ -0,0 +1,24 @@
+rule Agent_BTZ_Proxy_DLL_2 {
+ meta:
+ description = "Lorem ipsum"
+ author = "edge case"
+ reference = "http://www.test.test.test"
+ date = "2017-08-07"
+ hash1 = "73db4295c5b29958c5d93c20be9482c1efffc89fc4e5c8ba59ac9425a4657a88"
+ hash2 = "380b0353ba8cd33da8c5e5b95e3e032e83193019e73c71875b58ec1ed389bdac"
+ hash3 = "f27e9bba6a2635731845b4334b807c0e4f57d3b790cecdc77d8fef50629f51a2"
+ id = "2777443d-6f63-5948-855a-e064a6e0310f"
+ strings:
+ $s1 = { 38 21 38 2C 38 37 38 42 38 4D 38 58 38 63 38 6E
+ 38 79 38 84 38 8F 38 9A 38 A5 38 B0 38 BB 38 C6
+ 38 D1 38 DC 38 E7 38 F2 38 FD 38 08 39 13 39 1E
+ 39 29 39 34 39 3F 39 4A 39 55 39 60 39 6B 39 76
+ 39 81 39 8C 39 97 39 A2 39 AD 39 B8 39 C3 39 CE
+ 39 D9 39 E4 39 EF 39 FA 39 05 3A 10 3A 1B 3A 26
+ 3A 31 3A 3C 3A 47 3A 52 3A 5D 3A 68 3A 73 3A 7E
+ 3A 89 3A 94 3A 9F 3A AA 3A B5 3A C0 3A CB 3A D6
+ 3A E1 3A EC 3A F7 3A }
+ $s2 = "activeds.dll" ascii fullword
+ condition:
+ uint16(0) == 0x5a4d and filesize < 200KB and all of them and pe.imphash() == "09b7c73fbe5529e6de7137e3e8268b7b"
+}
\ No newline at end of file
diff --git a/Packs/FeedGitHub/README.md b/Packs/FeedGitHub/README.md
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Packs/FeedGitHub/pack_metadata.json b/Packs/FeedGitHub/pack_metadata.json
new file mode 100644
index 000000000000..c2232210d668
--- /dev/null
+++ b/Packs/FeedGitHub/pack_metadata.json
@@ -0,0 +1,25 @@
+{
+ "name": "GitHub Feed",
+ "description": "A feed to ingest indicators of compromise from Github repositories. The feed supports general extraction of IOCs, extracting from STIX data format and parsing of YARA Rules out of the box.",
+ "support": "xsoar",
+ "currentVersion": "1.0.0",
+ "author": "Cortex XSOAR",
+ "url": "https://www.paloaltonetworks.com/cortex",
+ "email": "",
+ "categories": [
+ "Data Enrichment & Threat Intelligence"
+ ],
+ "tags": [
+ "Free Feed",
+ "Plug & Fetch",
+ "Generic Feed"
+ ],
+ "useCases": [],
+ "keywords": [
+ "YARA"
+ ],
+ "marketplaces": [
+ "xsoar",
+ "marketplacev2"
+ ]
+}
\ No newline at end of file
diff --git a/Packs/FeedMitreAttackv2/Integrations/FeedMitreAttackv2/FeedMitreAttackv2.py b/Packs/FeedMitreAttackv2/Integrations/FeedMitreAttackv2/FeedMitreAttackv2.py
index e3eae65eb1d3..4e12ecc7b74a 100644
--- a/Packs/FeedMitreAttackv2/Integrations/FeedMitreAttackv2/FeedMitreAttackv2.py
+++ b/Packs/FeedMitreAttackv2/Integrations/FeedMitreAttackv2/FeedMitreAttackv2.py
@@ -272,51 +272,29 @@ def map_fields_by_type(indicator_type: str, indicator_json: dict):
'mitreid': mitre_id,
'tags': tags,
'tlp': tlp,
+ 'description': indicator_json['description'],
}
mapping_by_type = {
"Attack Pattern": {
'killchainphases': kill_chain_phases,
- 'description': indicator_json.get('description'),
'operatingsystemrefs': indicator_json.get('x_mitre_platforms')
},
"Intrusion Set": {
- 'description': indicator_json.get('description'),
+ 'aliases': indicator_json.get('aliases')
+ },
+ "Threat Actor": {
'aliases': indicator_json.get('aliases')
},
"Malware": {
'aliases': indicator_json.get('x_mitre_aliases'),
- 'description': indicator_json.get('description'),
'operatingsystemrefs': indicator_json.get('x_mitre_platforms')
-
},
"Tool": {
'aliases': indicator_json.get('x_mitre_aliases'),
- 'description': indicator_json.get('description'),
- 'operatingsystemrefs': indicator_json.get('x_mitre_platforms')
- },
- "Course of Action": {
- 'description': indicator_json.get('description')
- },
-
- "STIX Attack Pattern": {
- 'stixkillchainphases': kill_chain_phases,
- 'stixdescription': indicator_json.get('description'),
- 'operatingsystemrefs': indicator_json.get('x_mitre_platforms')
- },
- "STIX Malware": {
- 'stixaliases': indicator_json.get('x_mitre_aliases'),
- 'stixdescription': indicator_json.get('description'),
- 'operatingsystemrefs': indicator_json.get('x_mitre_platforms')
-
- },
- "STIX Tool": {
- 'stixaliases': indicator_json.get('x_mitre_aliases'),
- 'stixdescription': indicator_json.get('description'),
'operatingsystemrefs': indicator_json.get('x_mitre_platforms')
},
"Campaign": {
- 'description': indicator_json.get('description'),
'aliases': indicator_json.get('aliases')
}
}
@@ -671,6 +649,8 @@ def main():
create_relationships = argToBoolean(params.get('create_relationships'))
command = demisto.command()
demisto.info(f'Command being called is {command}')
+ if params.get('switch_intrusion_set_to_threat_actor', ''):
+ MITRE_TYPE_TO_DEMISTO_TYPE['intrusion-set'] = ThreatIntel.ObjectsNames.THREAT_ACTOR
try:
client = Client(url, proxies, verify_certificate, tags, tlp_color)
diff --git a/Packs/FeedMitreAttackv2/Integrations/FeedMitreAttackv2/FeedMitreAttackv2.yml b/Packs/FeedMitreAttackv2/Integrations/FeedMitreAttackv2/FeedMitreAttackv2.yml
index 77852fca4d71..8ac07345d4b0 100644
--- a/Packs/FeedMitreAttackv2/Integrations/FeedMitreAttackv2/FeedMitreAttackv2.yml
+++ b/Packs/FeedMitreAttackv2/Integrations/FeedMitreAttackv2/FeedMitreAttackv2.yml
@@ -61,12 +61,26 @@ configuration:
name: feedFetchInterval
type: 19
required: false
+- additionalinfo: Supports CSV values.
+ display: Tags
+ name: feedTags
+ type: 0
+ required: false
+- display: Create relationships
+ name: create_relationships
+ type: 8
+ required: false
+ defaultvalue: 'true'
+- defaultvalue: 'false'
+ display: Save intrusion sets as threat actor indicator types
+ name: switch_intrusion_set_to_threat_actor
+ type: 8
- additionalinfo: When selected, the exclusion list is ignored for indicators from this feed. This means that if an indicator from this feed is on the exclusion list, the indicator might still be added to the system.
- defaultvalue: 'false'
display: Bypass exclusion list
name: feedBypassExclusionList
type: 8
required: false
+ defaultvalue: 'false'
- display: Use system proxy settings
name: proxy
type: 8
@@ -76,16 +90,6 @@ configuration:
name: insecure
type: 8
required: false
-- additionalinfo: Supports CSV values.
- display: Tags
- name: feedTags
- type: 0
- required: false
-- defaultvalue: 'true'
- display: Create relationships
- name: create_relationships
- type: 8
- required: false
description: Use the MITRE ATT&CK® feed to fetch MITRE’s Adversarial Tactics, Techniques, and Common Knowledge (ATT&CK®) content. MITRE ATT&CK is a globally-accessible knowledge base of adversary tactics and techniques based on real-world observations. The ATT&CK knowledge base is used as a foundation for the development of specific threat models and methodologies in the private sector, in government, and in the cybersecurity product and service community.
display: MITRE ATT&CK
name: MITRE ATT&CK v2
@@ -166,7 +170,7 @@ script:
- contextPath: MITREATTACK.value
description: MITRE ATTACK Attack Pattern value.
type: String
- dockerimage: demisto/taxii2:1.0.0.90938
+ dockerimage: demisto/taxii2:1.0.0.96248
feed: true
runonce: false
script: '-'
diff --git a/Packs/FeedMitreAttackv2/Integrations/FeedMitreAttackv2/FeedMitreAttackv2_test.py b/Packs/FeedMitreAttackv2/Integrations/FeedMitreAttackv2/FeedMitreAttackv2_test.py
index d3be7e5aef67..807c8ae4a88e 100644
--- a/Packs/FeedMitreAttackv2/Integrations/FeedMitreAttackv2/FeedMitreAttackv2_test.py
+++ b/Packs/FeedMitreAttackv2/Integrations/FeedMitreAttackv2/FeedMitreAttackv2_test.py
@@ -3,7 +3,7 @@
from stix2 import TAXIICollectionSource, parse
import demistomock as demisto # noqa: F401
from test_data.mitre_test_data import ATTACK_PATTERN, COURSE_OF_ACTION, INTRUSION_SET, MALWARE, TOOL, ID_TO_NAME, \
- RELATION, STIX_TOOL, STIX_MALWARE, STIX_ATTACK_PATTERN, MALWARE_LIST_WITHOUT_PREFIX, MALWARE_LIST_WITH_PREFIX, \
+ RELATION, MALWARE_LIST_WITHOUT_PREFIX, MALWARE_LIST_WITH_PREFIX, \
INDICATORS_LIST, NEW_INDICATORS_LIST, MITRE_ID_TO_MITRE_NAME, OLD_ID_TO_NAME, NEW_ID_TO_NAME, RELATIONSHIP_ENTITY, \
CAMPAIGN, ATTACK_PATTERNS
@@ -118,9 +118,6 @@ def test_is_indicator_deprecated_or_revoked(indicator, expected_result):
('Intrusion Set', INTRUSION_SET.get('response'), INTRUSION_SET.get('map_result')),
('Malware', MALWARE.get('response'), MALWARE.get('map_result')),
('Tool', TOOL.get('response'), TOOL.get('map_result')),
- ('STIX Tool', STIX_TOOL.get('response'), STIX_TOOL.get('map_result')),
- ('STIX Malware', STIX_MALWARE.get('response'), STIX_MALWARE.get('map_result')),
- ('STIX Attack Pattern', STIX_ATTACK_PATTERN.get('response'), STIX_ATTACK_PATTERN.get('map_result')),
('Campaign', CAMPAIGN.get('response'), CAMPAIGN.get('map_result')),
])
def test_map_fields_by_type(indicator_type, indicator_json, expected_result):
diff --git a/Packs/FeedMitreAttackv2/Integrations/FeedMitreAttackv2/test_data/mitre_test_data.py b/Packs/FeedMitreAttackv2/Integrations/FeedMitreAttackv2/test_data/mitre_test_data.py
index 9007fdd07fd0..fb2055d0fb99 100644
--- a/Packs/FeedMitreAttackv2/Integrations/FeedMitreAttackv2/test_data/mitre_test_data.py
+++ b/Packs/FeedMitreAttackv2/Integrations/FeedMitreAttackv2/test_data/mitre_test_data.py
@@ -161,9 +161,9 @@
'map_result': {
'stixid': 'attack-pattern--01a5a209-b94c-450b-b7f9-946497d91055',
'firstseenbysource': '2017-05-31T21:30:44.329Z',
- 'stixkillchainphases': ['Defense Evasion', 'Privilege Escalation'],
+ 'killchainphases': ['Defense Evasion', 'Privilege Escalation'],
'modified': "2020-05-13T22:50:51.258Z",
- 'stixdescription': "Adversaries may abuse Windows Management Instrumentation (WMI) to achieve execution.",
+ 'description': "Adversaries may abuse Windows Management Instrumentation (WMI) to achieve execution.",
'operatingsystemrefs': ['Windows'],
'mitreid': 'T1047',
'publications': [{'link': "https://en.wikipedia.org/wiki/Server_Message_Block",
diff --git a/Packs/FeedMitreAttackv2/ReleaseNotes/1_1_37.md b/Packs/FeedMitreAttackv2/ReleaseNotes/1_1_37.md
new file mode 100644
index 000000000000..c254fedc9065
--- /dev/null
+++ b/Packs/FeedMitreAttackv2/ReleaseNotes/1_1_37.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### MITRE ATT&CK
+
+Fixed an issue in the **TAXII2ApiModule** related to *TAXII2 server* integration.
diff --git a/Packs/FeedMitreAttackv2/ReleaseNotes/1_1_38.md b/Packs/FeedMitreAttackv2/ReleaseNotes/1_1_38.md
new file mode 100644
index 000000000000..8e303a6128cf
--- /dev/null
+++ b/Packs/FeedMitreAttackv2/ReleaseNotes/1_1_38.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### MITRE ATT&CK
+- Updated the Docker image to: *demisto/taxii2:1.0.0.96248*.
+- Added a parameter to allow the user to convert MITRE intrusion-sets into Cortex XSOAR threat-actors.
diff --git a/Packs/FeedMitreAttackv2/ReleaseNotes/1_1_39.md b/Packs/FeedMitreAttackv2/ReleaseNotes/1_1_39.md
new file mode 100644
index 000000000000..ec25692a8be2
--- /dev/null
+++ b/Packs/FeedMitreAttackv2/ReleaseNotes/1_1_39.md
@@ -0,0 +1,3 @@
+## MITRE ATT&CK
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/FeedMitreAttackv2/pack_metadata.json b/Packs/FeedMitreAttackv2/pack_metadata.json
index e4fe2b465e21..21b82428ab8b 100644
--- a/Packs/FeedMitreAttackv2/pack_metadata.json
+++ b/Packs/FeedMitreAttackv2/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "MITRE ATT&CK",
"description": "Fetches indicators from MITRE ATT&CK.",
"support": "xsoar",
- "currentVersion": "1.1.36",
+ "currentVersion": "1.1.39",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/FeedNVDv2/Integrations/FeedNVDv2/FeedNVDv2.py b/Packs/FeedNVDv2/Integrations/FeedNVDv2/FeedNVDv2.py
index 705ad621d766..08ffe062d56b 100644
--- a/Packs/FeedNVDv2/Integrations/FeedNVDv2/FeedNVDv2.py
+++ b/Packs/FeedNVDv2/Integrations/FeedNVDv2/FeedNVDv2.py
@@ -71,6 +71,7 @@ def build_param_string(self, params: dict) -> str:
param_string: str = '&'.join([f'{key}={value}' for key, value in params.items()])
param_string = param_string.replace('noRejected=None', 'noRejected')
+ param_string = param_string.replace('hasKev=True', 'hasKev')
for value in self.cvssv3severity:
param_string += f'&cvssV3Severity={value}'
@@ -350,7 +351,7 @@ def retrieve_cves(client, start_date: Any, end_date: Any, publish_date: bool):
param['lastModEndDate'] = end_date.strftime(DATE_FORMAT)
if client.has_kev:
- url_suffix += '&hasKev'
+ param['hasKev'] = True
if client.keyword_search:
param['keywordSearch'] = client.keyword_search
diff --git a/Packs/FeedNVDv2/Integrations/FeedNVDv2/FeedNVDv2_test.py b/Packs/FeedNVDv2/Integrations/FeedNVDv2/FeedNVDv2_test.py
index 63d26ee785b7..17aaa9f3dd3d 100644
--- a/Packs/FeedNVDv2/Integrations/FeedNVDv2/FeedNVDv2_test.py
+++ b/Packs/FeedNVDv2/Integrations/FeedNVDv2/FeedNVDv2_test.py
@@ -131,6 +131,7 @@ def test_parse_cpe(cpe, expected_output, expected_relationships):
[
({"param1": "value1", "noRejected": "None"}, "param1=value1&noRejected&cvssV3Severity=LOW&cvssV3Severity=MEDIUM"),
({"noRejected": "None"}, "noRejected&cvssV3Severity=LOW&cvssV3Severity=MEDIUM"),
+ ({"hasKev": "True"}, "hasKev&cvssV3Severity=LOW&cvssV3Severity=MEDIUM"),
],
)
def test_build_param_string(input_params, expected_param_string):
diff --git a/Packs/FeedNVDv2/ReleaseNotes/1_0_2.md b/Packs/FeedNVDv2/ReleaseNotes/1_0_2.md
new file mode 100644
index 000000000000..c7a10160bac5
--- /dev/null
+++ b/Packs/FeedNVDv2/ReleaseNotes/1_0_2.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### National Vulnerability Database Feed v2
+
+Fixed an issue that caused the integration to timeout when the *kev* parameter was selected by the user.
diff --git a/Packs/FeedNVDv2/pack_metadata.json b/Packs/FeedNVDv2/pack_metadata.json
index 9ea0a263e8e8..6a2787d3b18d 100644
--- a/Packs/FeedNVDv2/pack_metadata.json
+++ b/Packs/FeedNVDv2/pack_metadata.json
@@ -1,7 +1,7 @@
{
"name": "NVD Feed 2.0",
"support": "xsoar",
- "currentVersion": "1.0.1",
+ "currentVersion": "1.0.2",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/FeedOpenCTI/Integrations/FeedOpenCTI_v4/FeedOpenCTI_v4.py b/Packs/FeedOpenCTI/Integrations/FeedOpenCTI_v4/FeedOpenCTI_v4.py
index 2a1add332e92..00598c464ce3 100644
--- a/Packs/FeedOpenCTI/Integrations/FeedOpenCTI_v4/FeedOpenCTI_v4.py
+++ b/Packs/FeedOpenCTI/Integrations/FeedOpenCTI_v4/FeedOpenCTI_v4.py
@@ -1,6 +1,6 @@
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
-from typing import List, Optional, Tuple, Any
+from typing import Any
import urllib3
from pycti import OpenCTIApiClient
@@ -38,7 +38,7 @@
}
-def build_indicator_list(indicator_list: List[str]) -> List[str]:
+def build_indicator_list(indicator_list: list[str]) -> list[str]:
"""Builds an indicator list for the query
Args:
indicator_list: List of XSOAR indicators types to return..
@@ -63,9 +63,9 @@ def reset_last_run():
return CommandResults(readable_output='Fetch history deleted successfully')
-def get_indicators(client: OpenCTIApiClient, indicator_types: List[str], score: List[str] = None,
- limit: Optional[int] = 500, last_run_id: Optional[str] = None,
- tlp_color: Optional[str] = None, tags: List[str] = None) -> Tuple[str, list]:
+def get_indicators(client: OpenCTIApiClient, indicator_types: list[str], score: list[str] = None,
+ limit: int | None = 500, last_run_id: str | None = None,
+ tlp_color: str | None = None, tags: list[str] = None) -> tuple[str, list]:
""" Retrieving indicators from the API
Args:
diff --git a/Packs/FeedOpenCTI/Integrations/FeedOpenCTI_v4/FeedOpenCTI_v4.yml b/Packs/FeedOpenCTI/Integrations/FeedOpenCTI_v4/FeedOpenCTI_v4.yml
index 857317debe6e..823d3006411b 100644
--- a/Packs/FeedOpenCTI/Integrations/FeedOpenCTI_v4/FeedOpenCTI_v4.yml
+++ b/Packs/FeedOpenCTI/Integrations/FeedOpenCTI_v4/FeedOpenCTI_v4.yml
@@ -155,7 +155,7 @@ script:
name: score_end
description: Gets indicators from the feed.
name: opencti-get-indicators
- dockerimage: demisto/vendors-sdk:1.0.0.86440
+ dockerimage: demisto/vendors-sdk:1.0.0.92984
feed: true
runonce: false
script: '-'
@@ -164,3 +164,4 @@ script:
tests:
- OpenCTI Feed Test
fromversion: 5.5.0
+autoUpdateDockerImage : false
diff --git a/Packs/FeedOpenCTI/ReleaseNotes/2_1_2.md b/Packs/FeedOpenCTI/ReleaseNotes/2_1_2.md
new file mode 100644
index 000000000000..a5f1f37043ea
--- /dev/null
+++ b/Packs/FeedOpenCTI/ReleaseNotes/2_1_2.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### OpenCTI Feed 4.X
+
+- Updated the Docker image to: *demisto/vendors-sdk:1.0.0.92984*.
diff --git a/Packs/FeedOpenCTI/pack_metadata.json b/Packs/FeedOpenCTI/pack_metadata.json
index 505c10e8dda7..957b10057138 100644
--- a/Packs/FeedOpenCTI/pack_metadata.json
+++ b/Packs/FeedOpenCTI/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "OpenCTI Feed",
"description": "Ingest indicators from the OpenCTI feed.",
"support": "xsoar",
- "currentVersion": "2.1.1",
+ "currentVersion": "2.1.2",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/FeedReversingLabsRansomwareAndRelatedToolsApp/Integrations/ReversingLabsRansomwareAndRelatedToolsFeed/ReversingLabsRansomwareAndRelatedToolsFeed.py b/Packs/FeedReversingLabsRansomwareAndRelatedToolsApp/Integrations/ReversingLabsRansomwareAndRelatedToolsFeed/ReversingLabsRansomwareAndRelatedToolsFeed.py
index 21277174d67e..b88035de253d 100644
--- a/Packs/FeedReversingLabsRansomwareAndRelatedToolsApp/Integrations/ReversingLabsRansomwareAndRelatedToolsFeed/ReversingLabsRansomwareAndRelatedToolsFeed.py
+++ b/Packs/FeedReversingLabsRansomwareAndRelatedToolsApp/Integrations/ReversingLabsRansomwareAndRelatedToolsFeed/ReversingLabsRansomwareAndRelatedToolsFeed.py
@@ -22,7 +22,7 @@ class Client(BaseClient):
"indicatorTypes={indicator_types}&tagFormat=dict"
def __init__(self, base_url, auth, headers, verify):
- super(Client, self).__init__(base_url=base_url, auth=auth, headers=headers, verify=verify)
+ super().__init__(base_url=base_url, auth=auth, headers=headers, verify=verify)
def query_indicators(self, hours, indicator_types, timeout, retries):
endpoint = self.RANSOMWARE_INDICATORS_ENDPOINT.format(
diff --git a/Packs/FeedReversingLabsRansomwareAndRelatedToolsApp/Integrations/ReversingLabsRansomwareAndRelatedToolsFeed/ReversingLabsRansomwareAndRelatedToolsFeed.yml b/Packs/FeedReversingLabsRansomwareAndRelatedToolsApp/Integrations/ReversingLabsRansomwareAndRelatedToolsFeed/ReversingLabsRansomwareAndRelatedToolsFeed.yml
index e3dbe7ee83e8..90a855ff8fd6 100644
--- a/Packs/FeedReversingLabsRansomwareAndRelatedToolsApp/Integrations/ReversingLabsRansomwareAndRelatedToolsFeed/ReversingLabsRansomwareAndRelatedToolsFeed.yml
+++ b/Packs/FeedReversingLabsRansomwareAndRelatedToolsApp/Integrations/ReversingLabsRansomwareAndRelatedToolsFeed/ReversingLabsRansomwareAndRelatedToolsFeed.yml
@@ -122,7 +122,7 @@ script:
defaultValue: 1
description: Defines how many hours back in time should the indicators be fetched from. Value should be between 1 and 4. Recommended value is 1.
description: Gets indicators from the feed.
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.95956
feed: true
subtype: python3
fromversion: 6.0.0
diff --git a/Packs/FeedReversingLabsRansomwareAndRelatedToolsApp/ReleaseNotes/1_0_3.md b/Packs/FeedReversingLabsRansomwareAndRelatedToolsApp/ReleaseNotes/1_0_3.md
new file mode 100644
index 000000000000..efbbb83c92ae
--- /dev/null
+++ b/Packs/FeedReversingLabsRansomwareAndRelatedToolsApp/ReleaseNotes/1_0_3.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### ReversingLabs Ransomware and Related Tools Feed
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/FeedReversingLabsRansomwareAndRelatedToolsApp/pack_metadata.json b/Packs/FeedReversingLabsRansomwareAndRelatedToolsApp/pack_metadata.json
index f3473741d5b0..b11697711560 100644
--- a/Packs/FeedReversingLabsRansomwareAndRelatedToolsApp/pack_metadata.json
+++ b/Packs/FeedReversingLabsRansomwareAndRelatedToolsApp/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "ReversingLabs Ransomware and Related Tools Feed",
"description": "A timely and curated threat intel list containing recent indicators extracted from ransomware and the tools used to deploy ransomware which are suitable for threat hunting or deployment to security controls.",
"support": "partner",
- "currentVersion": "1.0.2",
+ "currentVersion": "1.0.3",
"author": "ReversingLabs",
"url": "https://www.reversinglabs.com",
"email": "support@reversinglabs.com",
@@ -16,7 +16,7 @@
"ivukovicRL"
],
"tags": [
- "Threat Intelligence Management"
+ "Threat Intelligence Management"
],
"useCases": [],
"keywords": [
diff --git a/Packs/FeedTAXII/ReleaseNotes/1_2_14.md b/Packs/FeedTAXII/ReleaseNotes/1_2_14.md
new file mode 100644
index 000000000000..8c6d00fb46fd
--- /dev/null
+++ b/Packs/FeedTAXII/ReleaseNotes/1_2_14.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### TAXII 2 Feed
+
+Fixed an issue in the **TAXII2ApiModule** related to *TAXII2 server* integration.
diff --git a/Packs/FeedTAXII/pack_metadata.json b/Packs/FeedTAXII/pack_metadata.json
index 45632402870b..3c96140812cc 100644
--- a/Packs/FeedTAXII/pack_metadata.json
+++ b/Packs/FeedTAXII/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "TAXII Feed",
"description": "Ingest indicator feeds from TAXII 1 and TAXII 2 servers.",
"support": "xsoar",
- "currentVersion": "1.2.13",
+ "currentVersion": "1.2.14",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/FeedThreatConnect/Integrations/FeedThreatConnect/FeedThreatConnect.py b/Packs/FeedThreatConnect/Integrations/FeedThreatConnect/FeedThreatConnect.py
index 521afd09385c..799d8a82fef8 100644
--- a/Packs/FeedThreatConnect/Integrations/FeedThreatConnect/FeedThreatConnect.py
+++ b/Packs/FeedThreatConnect/Integrations/FeedThreatConnect/FeedThreatConnect.py
@@ -34,7 +34,7 @@
'CIDR': FeedIndicatorType.CIDR,
'EmailAddress': FeedIndicatorType.Email,
'File': FeedIndicatorType.File,
- 'Host': FeedIndicatorType.Host,
+ 'Host': FeedIndicatorType.Domain,
'Mutex': FeedIndicatorType.MUTEX,
'Registry Key': FeedIndicatorType.Registry,
'URL': FeedIndicatorType.URL,
@@ -77,14 +77,16 @@
'threatAssessRating': 'verdict',
'description': 'description',
'summary': 'name',
+ 'md5': 'md5',
+ 'sha1': 'sha1',
'sha256': 'sha256'},
- 'Host': {'dateAdded': 'firstseenbysource',
- 'lastModified': 'updateddate',
- 'threatAssessRating': 'verdict',
- 'threatAssessConfidence': 'confidence',
- 'description': 'description',
- 'summary': 'name',
- 'hostname': 'hostname'},
+ 'Domain': {'dateAdded': 'firstseenbysource',
+ 'lastModified': 'updateddate',
+ 'threatAssessRating': 'verdict',
+ 'threatAssessConfidence': 'confidence',
+ 'description': 'description',
+ 'summary': 'name',
+ 'hostName': 'domainname'},
'Mutex': {'dateAdded': 'firstseenbysource',
'threatAssessRating': 'verdict',
'description': 'description',
@@ -244,6 +246,21 @@ def create_rk_grid_field(indicator: dict):
return key_value
+def get_indicator_value(indicator: dict, indicator_type: str) -> str:
+ """Getting the indicator value according to the indicator type
+ Args:
+ indicator (dict): The data of the indicator
+ indicator_type (str): The type of the indicator
+ Returns:
+ str: The indicator value
+ """
+ if indicator_type == 'File':
+ indicator_value = indicator.get('sha256') or indicator.get('sha1') or indicator.get('md5') or ''
+ else:
+ indicator_value = indicator.get('summary') or indicator.get('name', '')
+ return indicator_value
+
+
def parse_indicator(indicator: Dict[str, str]) -> Dict[str, Any]:
""" Parsing indicator by indicators demisto convention.
Args:
@@ -251,8 +268,8 @@ def parse_indicator(indicator: Dict[str, str]) -> Dict[str, Any]:
Returns:
dict: Parsed indicator.
"""
- indicator_type = INDICATOR_MAPPING_NAMES.get(indicator.get('type', ''))
- indicator_value = indicator.get('summary') or indicator.get('name')
+ indicator_type = INDICATOR_MAPPING_NAMES.get(indicator.get('type', ''), '')
+ indicator_value = get_indicator_value(indicator, indicator_type)
fields = create_indicator_fields(indicator, indicator_type)
relationships = create_indicator_relationships(fields, indicator_type, indicator_value) # type: ignore
indicator_obj = {
diff --git a/Packs/FeedThreatConnect/Integrations/FeedThreatConnect/FeedThreatConnect.yml b/Packs/FeedThreatConnect/Integrations/FeedThreatConnect/FeedThreatConnect.yml
index dafc994daa67..0e4c837e9bb3 100644
--- a/Packs/FeedThreatConnect/Integrations/FeedThreatConnect/FeedThreatConnect.yml
+++ b/Packs/FeedThreatConnect/Integrations/FeedThreatConnect/FeedThreatConnect.yml
@@ -236,7 +236,7 @@ script:
name: tc-get-indicators
- description: Gets available indicators owners.
name: tc-get-owners
- dockerimage: demisto/python3:3.10.13.87159
+ dockerimage: demisto/python3:3.10.14.95956
feed: true
runonce: false
script: '-'
diff --git a/Packs/FeedThreatConnect/Integrations/FeedThreatConnect/FeedThreatConnect_test.py b/Packs/FeedThreatConnect/Integrations/FeedThreatConnect/FeedThreatConnect_test.py
index 7ad6c4b23132..dc56e4b482ae 100644
--- a/Packs/FeedThreatConnect/Integrations/FeedThreatConnect/FeedThreatConnect_test.py
+++ b/Packs/FeedThreatConnect/Integrations/FeedThreatConnect/FeedThreatConnect_test.py
@@ -1,8 +1,11 @@
+from pytest_mock import MockerFixture
+from CommonServerPython import *
+
import json
import demistomock as demisto
import pytest
from FeedThreatConnect import create_or_query, parse_indicator, set_tql_query, create_types_query, should_send_request, \
- build_url_with_query_params, set_fields_query, get_updated_last_run, create_indicator_fields
+ build_url_with_query_params, set_fields_query, get_updated_last_run, create_indicator_fields, get_indicator_value
def load_json_file(path):
@@ -184,3 +187,55 @@ def test_create_indicator_fields_registry_key():
assert 'Key Value' in result
assert 'name' in result.get('Key Value')[0]
assert result.get('Key Value')[0].get('name') == 'key name'
+
+
+def test_get_indicator_value_for_file():
+ """
+ Given:
+ An indicator dictionary with file hashes.
+ When:
+ The indicator type is 'File'.
+ Then:
+ It should return the sha256 hash if present, else sha1, else md5.
+ """
+ indicator = {
+ 'sha256': 'sha256_hash',
+ 'sha1': 'sha1_hash',
+ 'md5': 'md5_hash'
+ }
+ indicator_type = FeedIndicatorType.File
+ indicator_value = get_indicator_value(indicator, indicator_type)
+ assert indicator_value == 'sha256_hash'
+
+ # Test when sha256 is not present
+ del indicator['sha256']
+ indicator_value = get_indicator_value(indicator, indicator_type)
+ assert indicator_value == 'sha1_hash'
+
+ # Test when sha256 and sha1 are not present
+ del indicator['sha1']
+ indicator_value = get_indicator_value(indicator, indicator_type)
+ assert indicator_value == 'md5_hash'
+
+
+def test_get_indicator_value_for_non_file(mocker: MockerFixture):
+ """
+ Given:
+ An indicator dictionary without file hashes.
+ When:
+ The indicator type is not 'File'.
+ Then:
+ It should return the summary if present, else name.
+ """
+ indicator = {
+ 'summary': 'indicator_summary',
+ 'name': 'indicator_name'
+ }
+ indicator_type = 'IP'
+ indicator_value = get_indicator_value(indicator, indicator_type)
+ assert indicator_value == 'indicator_summary'
+
+ # Test when summary is not present
+ mocker.patch.dict(indicator, {'summary': None})
+ indicator_value = get_indicator_value(indicator, indicator_type)
+ assert indicator_value == 'indicator_name'
diff --git a/Packs/FeedThreatConnect/ReleaseNotes/2_1_22.json b/Packs/FeedThreatConnect/ReleaseNotes/2_1_22.json
new file mode 100644
index 000000000000..fc6c9c2b6548
--- /dev/null
+++ b/Packs/FeedThreatConnect/ReleaseNotes/2_1_22.json
@@ -0,0 +1,4 @@
+{
+ "breakingChanges": true,
+ "breakingChangesNotes": "Changed that indicators of type 'Host' from the ThreatConnect to be of type 'Domain' in the Cortex XSOAR."
+}
\ No newline at end of file
diff --git a/Packs/FeedThreatConnect/ReleaseNotes/2_1_22.md b/Packs/FeedThreatConnect/ReleaseNotes/2_1_22.md
new file mode 100644
index 000000000000..8a4d5c05b48f
--- /dev/null
+++ b/Packs/FeedThreatConnect/ReleaseNotes/2_1_22.md
@@ -0,0 +1,8 @@
+
+#### Integrations
+
+##### ThreatConnect Feed
+
+- Updated the indicators of type 'Host' from ThreatConnect to be of type 'Domain' in the Cortex XSOAR.
+- Updated the indicators of type 'File' to include the 'md5' and 'sha1' fields.
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
diff --git a/Packs/FeedThreatConnect/pack_metadata.json b/Packs/FeedThreatConnect/pack_metadata.json
index 556ab7ddf95a..d963c3145a50 100644
--- a/Packs/FeedThreatConnect/pack_metadata.json
+++ b/Packs/FeedThreatConnect/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "ThreatConnect Feed",
"description": "ThreatConnect indicators feed for Cortex XSOAR TIM.",
"support": "xsoar",
- "currentVersion": "2.1.21",
+ "currentVersion": "2.1.22",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/FeedUnit42v2/ReleaseNotes/1_0_54.md b/Packs/FeedUnit42v2/ReleaseNotes/1_0_54.md
new file mode 100644
index 000000000000..72858f4157f4
--- /dev/null
+++ b/Packs/FeedUnit42v2/ReleaseNotes/1_0_54.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Unit 42 ATOMs Feed
+
+Fixed an issue in the **TAXII2ApiModule** related to *TAXII2 server* integration.
diff --git a/Packs/FeedUnit42v2/ReleaseNotes/1_0_55.md b/Packs/FeedUnit42v2/ReleaseNotes/1_0_55.md
new file mode 100644
index 000000000000..4c950f25c3dc
--- /dev/null
+++ b/Packs/FeedUnit42v2/ReleaseNotes/1_0_55.md
@@ -0,0 +1,3 @@
+## Unit 42 ATOMs Feed
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/FeedUnit42v2/pack_metadata.json b/Packs/FeedUnit42v2/pack_metadata.json
index 58d9cb15c2f2..5be57848ca63 100644
--- a/Packs/FeedUnit42v2/pack_metadata.json
+++ b/Packs/FeedUnit42v2/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Unit 42 ATOMs Feed",
"description": "Unit 42 feed of published IOCs which contains malicious indicators.",
"support": "xsoar",
- "currentVersion": "1.0.53",
+ "currentVersion": "1.0.55",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/FiltersAndTransformers/.pack-ignore b/Packs/FiltersAndTransformers/.pack-ignore
index 102c9525de56..1e3d87f12154 100644
--- a/Packs/FiltersAndTransformers/.pack-ignore
+++ b/Packs/FiltersAndTransformers/.pack-ignore
@@ -5,6 +5,7 @@ ignore=RM112
commonscripts
urlencode
substring
+matchexact
[file:StripChar.yml]
ignore=BA124
diff --git a/Packs/FiltersAndTransformers/.secrets-ignore b/Packs/FiltersAndTransformers/.secrets-ignore
index 29358d0b6fc4..07ac18698ae6 100644
--- a/Packs/FiltersAndTransformers/.secrets-ignore
+++ b/Packs/FiltersAndTransformers/.secrets-ignore
@@ -6,4 +6,5 @@ https://www.
2001:0db8:85a3:0000:0000:8a2e:0370:7334
11.1.1.1
fe80::a00:27ff:fe4e:66a1
-double.tld@test.co.jp
\ No newline at end of file
+double.tld@test.co.jp
+http://another.url.com
\ No newline at end of file
diff --git a/Packs/FiltersAndTransformers/ReleaseNotes/1_2_71.md b/Packs/FiltersAndTransformers/ReleaseNotes/1_2_71.md
new file mode 100644
index 000000000000..5bb7cf40780c
--- /dev/null
+++ b/Packs/FiltersAndTransformers/ReleaseNotes/1_2_71.md
@@ -0,0 +1,6 @@
+
+#### Scripts
+
+##### TimeComponents
+
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
diff --git a/Packs/FiltersAndTransformers/ReleaseNotes/1_2_72.md b/Packs/FiltersAndTransformers/ReleaseNotes/1_2_72.md
new file mode 100644
index 000000000000..98f9753ebf8c
--- /dev/null
+++ b/Packs/FiltersAndTransformers/ReleaseNotes/1_2_72.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### New: RemoveMatches
+
+- New: Removes items from the given list of values if they match any of the patterns in the provided `filters`.
+<~XSOAR> (Available from Cortex XSOAR 6.10.0).~XSOAR>
diff --git a/Packs/FiltersAndTransformers/ReleaseNotes/1_2_73.md b/Packs/FiltersAndTransformers/ReleaseNotes/1_2_73.md
new file mode 100644
index 000000000000..2f77708c8c3c
--- /dev/null
+++ b/Packs/FiltersAndTransformers/ReleaseNotes/1_2_73.md
@@ -0,0 +1,3 @@
+## Filters And Transformers
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/FiltersAndTransformers/Scripts/RemoveMatches/README.md b/Packs/FiltersAndTransformers/Scripts/RemoveMatches/README.md
new file mode 100644
index 000000000000..02073061437a
--- /dev/null
+++ b/Packs/FiltersAndTransformers/Scripts/RemoveMatches/README.md
@@ -0,0 +1,63 @@
+Removes items from the given list of values if they match any of the patterns in the provided `filters`.
+If the match_exact argument is 'yes', direct string compare is used, otherwise the comparison is done using regex.
+
+### Example:
+
+---
+
+##### value (Get):
+
+```json
+[
+ "https://domain1.com/some/url",
+ "http://another.url.com",
+ "domain2.com/faq",
+ "domain3.com/login",
+ "sub.domain3.com/login"
+]
+```
+
+##### filters:
+
+```text
+^*.\.domain1.com/.*\n
+^*.\.domain2.com/.*\n
+^sub.domain3.com/.*
+```
+
+##### Result:
+
+```json
+[
+ "http://another.url.com",
+ "domain3.com/login"
+]
+```
+
+## Script Data
+
+---
+
+| **Name** | **Description** |
+| --- | --- |
+| Script Type | python3 |
+| Tags | transformer, entirelist, general |
+| Cortex XSOAR Version | 6.10.0 |
+
+## Inputs
+
+---
+
+| **Argument Name** | **Description** |
+| --- | --- |
+| value | The value on which the transformer is applied. |
+| ignore_case | Whether to ignore the case of the item for which you are searching. Default is "Yes". |
+| match_exact | Whether to match the exact item in the list, or look for any string that contains it. Default is "No". |
+| delimiter | A string used to delimit fields. For example, a new line "\n" should match the list separator configuration. |
+| filters | A list of patterns to remove from the value. This can be a single string or a list of patterns, separated by the pattern defined in the delimiter argument. Unless match_exact is yes, regex pattern is supported. |
+
+## Outputs
+
+---
+There are no outputs for this script.
+
diff --git a/Packs/FiltersAndTransformers/Scripts/RemoveMatches/RemoveMatches.py b/Packs/FiltersAndTransformers/Scripts/RemoveMatches/RemoveMatches.py
new file mode 100644
index 000000000000..deda654cec50
--- /dev/null
+++ b/Packs/FiltersAndTransformers/Scripts/RemoveMatches/RemoveMatches.py
@@ -0,0 +1,73 @@
+from CommonServerPython import * # noqa: F401
+
+import re
+
+
+def filter_items(values: list, filter_list: list, ignore_case: bool, match_exact: bool):
+ """Filter the values by the filter list.
+ If an item matches an entry in the filter_list, than do not return it.
+
+ Args:
+ values (_type_): The value on which to apply the transformer
+ filter_list (_type_): The list of pattern to filter from the values
+ ignore_case (_type_): If True, ignore the case of the value
+ match_exact (_type_): If True, only filter out values exactly matching the pattern
+
+ Returns:
+ _type_: The values not matching any of the patterns in the given list
+ """
+ filtered_items = []
+
+ regex_ignore_case_flag = re.IGNORECASE if ignore_case else 0
+ list_to_lowercase = [list_item.lower().strip() for list_item in filter_list]
+ for value in values:
+ if match_exact:
+ if ignore_case:
+ if value.lower() in list_to_lowercase:
+ continue
+ else:
+ if value in filter_list:
+ continue
+ else:
+ filtered = False
+ for filter_string in filter_list:
+ filter_string = filter_string.strip() # remove trailing/leading whitespace
+ if filter_string and re.search(filter_string, value, regex_ignore_case_flag):
+ filtered = True
+ break
+ if filtered:
+ continue
+ filtered_items.append(value)
+
+ return filtered_items
+
+
+''' MAIN FUNCTION '''
+
+
+def main(): # pragma: no cover
+ try:
+ args = demisto.args()
+ ignore_case = argToBoolean(args.get('ignore_case', 'True'))
+ match_exact = argToBoolean(args.get('match_exact', 'False'))
+ values = argToList(args.get('value'))
+ delimiter = args.get('delimiter', '\n')
+ list: str = args.get('filters', '')
+ if not list:
+ filtered_items = values
+ else:
+ filters = re.split(delimiter, list)
+ filtered_items = filter_items(values=values,
+ filter_list=filters,
+ ignore_case=ignore_case,
+ match_exact=match_exact)
+ return_results(filtered_items)
+ except Exception as ex:
+ return_error(f'Failed to execute FilterByListTransformer. Error: {str(ex)}')
+
+
+''' ENTRY POINT '''
+
+
+if __name__ in ('__main__', '__builtin__', 'builtins'):
+ main()
diff --git a/Packs/FiltersAndTransformers/Scripts/RemoveMatches/RemoveMatches.yml b/Packs/FiltersAndTransformers/Scripts/RemoveMatches/RemoveMatches.yml
new file mode 100644
index 000000000000..02526993a873
--- /dev/null
+++ b/Packs/FiltersAndTransformers/Scripts/RemoveMatches/RemoveMatches.yml
@@ -0,0 +1,48 @@
+args:
+- default: true
+ description: The value on which the transformer is applied.
+ isArray: true
+ name: value
+- auto: PREDEFINED
+ defaultValue: "Yes"
+ description: Whether to ignore the case of the item for which you are searching. Default is "Yes".
+ name: ignore_case
+ predefined:
+ - "yes"
+ - "no"
+- auto: PREDEFINED
+ defaultValue: "No"
+ description: Whether to match the exact item in the list, or look for any string that contains it. Default is "No".
+ name: match_exact
+ predefined:
+ - "yes"
+ - "no"
+- defaultValue: \n
+ description: A string used to delimit fields. For example, a new line "\n" should match the list separator configuration.
+ name: delimiter
+- description: A list of patterns to remove from the value. This can be a single string or a list of patterns, separated by the pattern defined in the delimiter argument. Unless match_exact is yes, regex pattern is supported.
+ required: true
+ name: filters
+comment: |-
+ Removes items from the given list of values if they match any of the patterns in the provided `filters`.
+ If the match_exact argument is 'yes', direct string compare is used, otherwise the comparison is done using regex.
+commonfields:
+ id: RemoveMatches
+ version: -1
+dockerimage: demisto/python3:3.10.14.92207
+enabled: true
+engineinfo: {}
+name: RemoveMatches
+runas: DBotWeakRole
+runonce: false
+script: ''
+scripttarget: 0
+subtype: python3
+tags:
+- transformer
+- entirelist
+- general
+type: python
+fromversion: 6.10.0
+tests:
+- No tests (auto formatted)
diff --git a/Packs/FiltersAndTransformers/Scripts/RemoveMatches/RemoveMatches_test.py b/Packs/FiltersAndTransformers/Scripts/RemoveMatches/RemoveMatches_test.py
new file mode 100644
index 000000000000..88f784cd1a4f
--- /dev/null
+++ b/Packs/FiltersAndTransformers/Scripts/RemoveMatches/RemoveMatches_test.py
@@ -0,0 +1,12 @@
+from RemoveMatches import filter_items
+import pytest
+
+
+@pytest.mark.parametrize("filter_list, values, ignore_case, match_exact, output",
+ [(['ValueA', 'ValueB'], ['ValueA', 'ValueB', 'ValueC'], True, True, ['ValueC']),
+ (['valueA', 'ValueB'], ['ValueA', 'ValueB', 'ValueC'], False, True, ['ValueA', 'ValueC']),
+ (['Value(A|B)'], ['ValueA', 'ValueB', 'ValueC'], True, False, ['ValueC']),
+ (['value(A|B)'], ['ValueA', 'ValueB', 'ValueC'], False, False, ['ValueA', 'ValueB', 'ValueC'])])
+def test_filter_items(filter_list: list[str], values: list, ignore_case: bool, match_exact: bool, output: list):
+ result = filter_items(values, filter_list=filter_list, ignore_case=ignore_case, match_exact=match_exact)
+ assert result == output
diff --git a/Packs/FiltersAndTransformers/Scripts/RemoveMatches/test_data/listContent.json b/Packs/FiltersAndTransformers/Scripts/RemoveMatches/test_data/listContent.json
new file mode 100644
index 000000000000..ffb56030c89a
--- /dev/null
+++ b/Packs/FiltersAndTransformers/Scripts/RemoveMatches/test_data/listContent.json
@@ -0,0 +1,11 @@
+[
+ {
+ "ModuleName": "InnerServicesModule",
+ "Brand": "Builtin",
+ "Category": "Builtin",
+ "ID": "",
+ "Version": 0,
+ "Type": 1,
+ "Contents": "ValueA\r\nValueC"
+ }
+]
\ No newline at end of file
diff --git a/Packs/FiltersAndTransformers/Scripts/TimeComponents/README.md b/Packs/FiltersAndTransformers/Scripts/TimeComponents/README.md
index 8c3a2aeb89be..762d830a9170 100644
--- a/Packs/FiltersAndTransformers/Scripts/TimeComponents/README.md
+++ b/Packs/FiltersAndTransformers/Scripts/TimeComponents/README.md
@@ -52,7 +52,7 @@ Returns a dictionary with the following components.
| **Argument Name** | **Description** |
| --- | --- |
-| value | Input date or time in a format that is supported by the dateparser.parse\(\) function as outlined here- https://dateparser.readthedocs.io/en/latest/\#popular-formats. For example: '2020-01-01' or '1999/02/03 12:01:59'. \(Default is the current time\). Assume given time is in UTC if time zone is not detected. |
+| value | Input date or time in a format that is supported by the dateparser.parse\(\) function as outlined here- https://dateparser.readthedocs.io/en/latest/#popular-formats. For example: '2020-01-01' or '1999/02/03 12:01:59'. \(Default is the current time\). Assume given time is in UTC if time zone is not detected. |
| time_zone | The time zone \(e.g. -0400, \+09:00\) or time string to extract a time zone |
| key | The name of a key to choose which time component to return |
diff --git a/Packs/FiltersAndTransformers/Scripts/TimeComponents/TimeComponents.yml b/Packs/FiltersAndTransformers/Scripts/TimeComponents/TimeComponents.yml
index 2eda2d35e846..f77815760c55 100644
--- a/Packs/FiltersAndTransformers/Scripts/TimeComponents/TimeComponents.yml
+++ b/Packs/FiltersAndTransformers/Scripts/TimeComponents/TimeComponents.yml
@@ -97,7 +97,7 @@ args:
description: The name of a key to choose which time component to return.
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.13.83255
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
fromversion: 6.5.0
tests:
diff --git a/Packs/FiltersAndTransformers/pack_metadata.json b/Packs/FiltersAndTransformers/pack_metadata.json
index 1a30f5a06a0a..ff424e7044ed 100644
--- a/Packs/FiltersAndTransformers/pack_metadata.json
+++ b/Packs/FiltersAndTransformers/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Filters And Transformers",
"description": "Frequently used filters and transformers pack.",
"support": "xsoar",
- "currentVersion": "1.2.70",
+ "currentVersion": "1.2.73",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/FireEye-Detection-on-Demand/Integrations/FireEye-Detection-on-Demand/FireEye-Detection-on-Demand.py b/Packs/FireEye-Detection-on-Demand/Integrations/FireEye-Detection-on-Demand/FireEye-Detection-on-Demand.py
index b969be415dd9..139ea126cd33 100644
--- a/Packs/FireEye-Detection-on-Demand/Integrations/FireEye-Detection-on-Demand/FireEye-Detection-on-Demand.py
+++ b/Packs/FireEye-Detection-on-Demand/Integrations/FireEye-Detection-on-Demand/FireEye-Detection-on-Demand.py
@@ -1,6 +1,6 @@
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
-from typing import Any, Dict, List, Optional, Tuple
+from typing import Any
import dateparser
import urllib3
@@ -27,19 +27,19 @@ class Client(BaseClient):
For this HelloWorld implementation, no special attributes defined
"""
- def get_file_reputation(self, file: str) -> Dict[str, Any]:
+ def get_file_reputation(self, file: str) -> dict[str, Any]:
return self._http_request(
method='GET',
url_suffix=f'/hashes/{file}'
)
- def get_health(self) -> Dict[str, Any]:
+ def get_health(self) -> dict[str, Any]:
return self._http_request(
method='GET',
url_suffix='/health'
)
- def submit_file(self, files: Dict[str, Any], data: Dict[str, Any]) -> Dict[str, Any]:
+ def submit_file(self, files: dict[str, Any], data: dict[str, Any]) -> dict[str, Any]:
return self._http_request(
method='POST',
url_suffix='/files',
@@ -47,7 +47,7 @@ def submit_file(self, files: Dict[str, Any], data: Dict[str, Any]) -> Dict[str,
data=data
)
- def submit_urls(self, data: Dict[str, Any]) -> Dict[str, Any]:
+ def submit_urls(self, data: dict[str, Any]) -> dict[str, Any]:
return self._http_request(
method='POST',
url_suffix='/urls',
@@ -55,7 +55,7 @@ def submit_urls(self, data: Dict[str, Any]) -> Dict[str, Any]:
data=None
)
- def get_report_url(self, report_id: str, expiration: int) -> Dict[str, Any]:
+ def get_report_url(self, report_id: str, expiration: int) -> dict[str, Any]:
return self._http_request(
method='GET',
url_suffix=f'/presigned-url/{report_id}',
@@ -64,7 +64,7 @@ def get_report_url(self, report_id: str, expiration: int) -> Dict[str, Any]:
}
)
- def report_status(self, report_id: str, extended: str) -> Dict[str, Any]:
+ def report_status(self, report_id: str, extended: str) -> dict[str, Any]:
return self._http_request(
method='GET',
url_suffix=f'/reports/{report_id}',
@@ -73,7 +73,7 @@ def report_status(self, report_id: str, extended: str) -> Dict[str, Any]:
}
)
- def report_artifact(self, report_id: str, artifact_type: str) -> Dict[str, Any]:
+ def report_artifact(self, report_id: str, artifact_type: str) -> dict[str, Any]:
return self._http_request(
method='GET',
url_suffix=f'/artifacts/{report_id}',
@@ -99,7 +99,7 @@ def convert_to_demisto_severity(severity: str) -> int:
}[severity]
-def arg_to_int(arg: Any, arg_name: str, required: bool = False) -> Optional[int]:
+def arg_to_int(arg: Any, arg_name: str, required: bool = False) -> int | None:
if arg is None:
if required is True:
raise ValueError(f'Missing "{arg_name}"')
@@ -113,7 +113,7 @@ def arg_to_int(arg: Any, arg_name: str, required: bool = False) -> Optional[int]
raise ValueError(f'Invalid number: "{arg_name}"')
-def arg_to_timestamp(arg: Any, arg_name: str, required: bool = False) -> Optional[int]:
+def arg_to_timestamp(arg: Any, arg_name: str, required: bool = False) -> int | None:
if arg is None:
if required is True:
raise ValueError(f'Missing "{arg_name}"')
@@ -132,7 +132,7 @@ def arg_to_timestamp(arg: Any, arg_name: str, required: bool = False) -> Optiona
raise ValueError(f'Invalid date: {arg_name}')
return int(date.timestamp())
- if isinstance(arg, (int, float)):
+ if isinstance(arg, int | float):
# Convert to int if the input is a float
return int(arg)
raise ValueError(f'Invalid date: "{arg_name}"')
@@ -162,7 +162,7 @@ def test_module(client: Client) -> str:
return 'ok'
-def get_hashes_command(client: Client, args: Dict[str, Any]) -> Tuple[str, dict, Any]:
+def get_hashes_command(client: Client, args: dict[str, Any]) -> tuple[str, dict, Any]:
hashes = argToList(args.get('md5_hashes'))
if len(hashes) == 0:
@@ -173,9 +173,9 @@ def get_hashes_command(client: Client, args: Dict[str, Any]) -> Tuple[str, dict,
continue
raise Exception('Invalid hash. Only MD5 is supported.')
- dbot_score_list: List[Dict[str, Any]] = []
- file_standard_list: List[Dict[str, Any]] = []
- file_data_list: List[Dict[str, Any]] = []
+ dbot_score_list: list[dict[str, Any]] = []
+ file_standard_list: list[dict[str, Any]] = []
+ file_data_list: list[dict[str, Any]] = []
for hash in hashes:
file_data = client.get_file_reputation(hash)
@@ -183,7 +183,7 @@ def get_hashes_command(client: Client, args: Dict[str, Any]) -> Tuple[str, dict,
del file_data['md5']
# demisto.results(file_data)
engines = file_data.get('engine_results', {})
- for key in engines.keys():
+ for key in engines:
if engines[key].get('sha256'):
file_data['SHA256'] = engines[key].get('sha256')
del engines[key]['sha256']
@@ -193,7 +193,7 @@ def get_hashes_command(client: Client, args: Dict[str, Any]) -> Tuple[str, dict,
score = 3 # bad
else:
score = 0 # unknown
- for key in engines.keys():
+ for key in engines:
verdict = engines[key].get('verdict', 'not_found')
if verdict != "not_found" and verdict != "malicious":
score = 1 # good
@@ -250,7 +250,7 @@ def get_hashes_command(client: Client, args: Dict[str, Any]) -> Tuple[str, dict,
)
-def generate_report_url(client: Client, args: Dict[str, Any]) -> Tuple[str, dict, dict]:
+def generate_report_url(client: Client, args: dict[str, Any]) -> tuple[str, dict, dict]:
report_id = str(args.get('report_id'))
expiration = arg_to_int(arg=args.get('expiration'), arg_name='expiration', required=True)
if expiration:
@@ -271,7 +271,7 @@ def generate_report_url(client: Client, args: Dict[str, Any]) -> Tuple[str, dict
)
-def submit_file_command(client: Client, args: Dict[str, Any]) -> Tuple[str, dict, dict]:
+def submit_file_command(client: Client, args: dict[str, Any]) -> tuple[str, dict, dict]:
entry_id = demisto.args().get('entryID')
file_entry = demisto.getFilePath(entry_id) # .get('path')
file_name = file_entry['name']
@@ -308,13 +308,13 @@ def submit_file_command(client: Client, args: Dict[str, Any]) -> Tuple[str, dict
)
-def submit_urls_command(client: Client, args: Dict[str, Any]) -> Tuple[str, dict, dict]:
+def submit_urls_command(client: Client, args: dict[str, Any]) -> tuple[str, dict, dict]:
urls = argToList(args.get('urls'))
if len(urls) == 0:
raise ValueError('hash(es) not specified')
# Format the URLs into a string list, which the API understands
- formatted_urls = "[" + ",".join(list(map(lambda url: url.replace(url, f'"{url}"'), urls))) + "]"
+ formatted_urls = "[" + ",".join([url.replace(url, f'"{url}"') for url in urls]) + "]"
data = {'urls': formatted_urls}
scan = client.submit_urls(data=data)
@@ -338,7 +338,7 @@ def submit_urls_command(client: Client, args: Dict[str, Any]) -> Tuple[str, dict
)
-def get_reports_command(client: Client, args: Dict[str, Any]) -> Tuple[str, dict, Any]:
+def get_reports_command(client: Client, args: dict[str, Any]) -> tuple[str, dict, Any]:
report_id_list = argToList(args.get('report_ids', []))
extended = args.get('extended_report', "False")
screenshot = args.get('get_screenshot', "false")
@@ -346,7 +346,7 @@ def get_reports_command(client: Client, args: Dict[str, Any]) -> Tuple[str, dict
if len(report_id_list) == 0:
raise ValueError('report_id(s) not specified')
- report_list: List[Dict[str, Any]] = []
+ report_list: list[dict[str, Any]] = []
for report_id in report_id_list:
report = client.report_status(report_id=report_id, extended=extended)
if screenshot.lower() == "true":
diff --git a/Packs/FireEye-Detection-on-Demand/Integrations/FireEye-Detection-on-Demand/FireEye-Detection-on-Demand.yml b/Packs/FireEye-Detection-on-Demand/Integrations/FireEye-Detection-on-Demand/FireEye-Detection-on-Demand.yml
index a38202b678f9..ebac11fec08f 100644
--- a/Packs/FireEye-Detection-on-Demand/Integrations/FireEye-Detection-on-Demand/FireEye-Detection-on-Demand.yml
+++ b/Packs/FireEye-Detection-on-Demand/Integrations/FireEye-Detection-on-Demand/FireEye-Detection-on-Demand.yml
@@ -27,7 +27,7 @@ configuration:
script:
script: ''
type: python
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.95956
runonce: false
subtype: python3
commands:
@@ -50,46 +50,46 @@ script:
- contextPath: DBotScore.Vendor
description: The vendor used to calculate the score.
- contextPath: File.Malicious.Vendor
- description: "N/A"
+ description: "N/A."
- contextPath: File.MD5
- description: The MD5 hash of the file
+ description: The MD5 hash of the file.
- contextPath: FireEyeDoD.engine_results.cache_lookup.sha256
- description: The sha256 value of the file
+ description: The sha256 value of the file.
type: String
- contextPath: FireEyeDoD.engine_results.cache_lookup.signature_name
- description: The name of the virus signature
+ description: The name of the virus signature.
type: String
- contextPath: FireEyeDoD.engine_results.cache_lookup.is_malicious
- description: True/False if the file is malicious
+ description: True/False if the file is malicious.
type: Number
- contextPath: FireEyeDoD.engine_results.cache_lookup.verdict
- description: The overall verdict of all analysis engines
+ description: The overall verdict of all analysis engines.
type: String
- contextPath: FireEyeDoD.engine_results.cache_lookup.file_extension
- description: The extension of the file
+ description: The extension of the file.
type: String
- contextPath: FireEyeDoD.engine_results.cache_lookup.weight
- description: How important this engine result is to determining malicious activity
+ description: How important this engine result is to determining malicious activity.
type: Number
- contextPath: FireEyeDoD.engine_results.dynamic_analysis.verdict
- description: This particular engine's verdict on whether or not the file is malicious
+ description: This particular engine's verdict on whether or not the file is malicious.
type: String
- contextPath: FireEyeDoD.engine_results.av_lookup.verdict
- description: This particular engine's verdict on whether or not the file is malicious
+ description: This particular engine's verdict on whether or not the file is malicious.
type: String
- contextPath: FireEyeDoD.engine_results.avs_lookup.verdict
- description: This particular engine's verdict on whether or not the file is malicious
+ description: This particular engine's verdict on whether or not the file is malicious.
type: String
- contextPath: FireEyeDoD.engine_results.dti_lookup.verdict
- description: This particular engine's verdict on whether or not the file is malicious
+ description: This particular engine's verdict on whether or not the file is malicious.
type: String
- contextPath: FireEyeDoD.md5
- description: The MD5 hash of the file
+ description: The MD5 hash of the file.
type: String
- contextPath: FireEyeDoD.is_malicious
- description: True/False if the file is malicious
+ description: True/False if the file is malicious.
type: Number
- description: Queries FireEye Detection on Demand reports for the provided md5 hashes
+ description: Queries FireEye Detection on Demand reports for the provided md5 hashes.
- name: fireeye-dod-submit-file
arguments:
- name: entryID
@@ -106,43 +106,43 @@ script:
- "True"
- "False"
defaultValue: "False"
- description: Extract screenshot of screen activity during dynamic analysis if true, which later can be downloaded with artifacts api
+ description: Extract screenshot of screen activity during dynamic analysis if true, which later can be downloaded with artifacts api.
- name: video
auto: PREDEFINED
predefined:
- "True"
- "False"
defaultValue: "False"
- description: Extract video activity during dynamic analysis if true, which later can be downloaded with artifacts api
+ description: Extract video activity during dynamic analysis if true, which later can be downloaded with artifacts api.
- name: fileExtraction
auto: PREDEFINED
predefined:
- "True"
- "False"
defaultValue: "False"
- description: Extract dropped files from vm during dynamic analysis if true, which later can be downloaded with artifacts api
+ description: Extract dropped files from vm during dynamic analysis if true, which later can be downloaded with artifacts api.
- name: memoryDump
auto: PREDEFINED
predefined:
- "True"
- "False"
defaultValue: "False"
- description: Extract memory dump files from vm during dynamic analysis if true, which later can be downloaded with artifacts api
+ description: Extract memory dump files from vm during dynamic analysis if true, which later can be downloaded with artifacts api.
- name: pcap
auto: PREDEFINED
predefined:
- "True"
- "False"
defaultValue: "False"
- description: Extract pcap files from vm during dynamic analysis if true, which later can be downloaded with artifacts api
+ description: Extract pcap files from vm during dynamic analysis if true, which later can be downloaded with artifacts api.
outputs:
- contextPath: FireEyeDoD.Scan.report_id
- description: The report ID can be used to query the status and results of the file submission
+ description: The report ID can be used to query the status and results of the file submission.
- contextPath: FireEyeDoD.Scan.status
- description: The current status of the file submission
+ description: The current status of the file submission.
- contextPath: FireEyeDoD.Scan.filename
- description: The name of the file that was submitted
- description: Submits file to FireEye Detection on Demand for analysis
+ description: The name of the file that was submitted.
+ description: Submits file to FireEye Detection on Demand for analysis.
- name: fireeye-dod-submit-urls
arguments:
- name: urls
@@ -152,10 +152,10 @@ script:
description: A comma separated list of URLs to scan. Maximum of 10 per request.
outputs:
- contextPath: FireEyeDoD.Scan.report_id
- description: The ID of the report
+ description: The ID of the report.
- contextPath: FireEyeDoD.Scan.status
description: The status of the file submission. Will be "DONE" when all engines are finished.
- description: Submits URLs to FireEye Detection on Demand for analysis
+ description: Submits URLs to FireEye Detection on Demand for analysis.
- name: fireeye-dod-get-reports
arguments:
- name: report_ids
@@ -163,7 +163,7 @@ script:
required: true
isArray: true
- name: extended_report
- description: If True, additional information will be returned
+ description: If True, additional information will be returned.
auto: PREDEFINED
predefined:
- "True"
@@ -174,62 +174,62 @@ script:
predefined:
- "True"
- "False"
- description: Whether or not to get screenshot artifacts from the report
+ description: Whether or not to get screenshot artifacts from the report.
defaultValue: "False"
- name: get_artifact
auto: PREDEFINED
predefined:
- all
- screenshot
- description: Which report artifacts to retrieve (if any)
+ description: Which report artifacts to retrieve (if any).
outputs:
- contextPath: FireEyeDoD.Scan.report_id
- description: The ID of the report
+ description: The ID of the report.
type: String
- contextPath: FireEyeDoD.Scan.overall_status
- description: The overall status of all of the engines
+ description: The overall status of all of the engines.
type: String
- contextPath: FireEyeDoD.Scan.is_malicious
- description: True/False if the file is malicious
+ description: True/False if the file is malicious.
type: Number
- contextPath: FireEyeDoD.Scan.started_at
- description: The UTC time the scan was started
+ description: The UTC time the scan was started.
type: Date
- contextPath: FireEyeDoD.Scan.completed_at
- description: The UTC time the scan was completed
+ description: The UTC time the scan was completed.
type: Date
- contextPath: FireEyeDoD.Scan.duration
description: How long, in seconds, the scan took to complete.
type: Number
- contextPath: FireEyeDoD.Scan.file_name
- description: The name of the submitted file
+ description: The name of the submitted file.
type: String
- contextPath: FireEyeDoD.Scan.file_size
- description: The size of the file in bytes
+ description: The size of the file in bytes.
type: Number
- contextPath: FireEyeDoD.Scan.file_extension
description: The extension of the submitted file. If a URL was submitted, this will be empty.
type: String
- contextPath: FireEyeDoD.Scan.md5
- description: The MD5 hash of the submitted file
+ description: The MD5 hash of the submitted file.
type: String
- contextPath: FireEyeDoD.Scan.sha256
- description: The sha256 hash of the submitted file
+ description: The sha256 hash of the submitted file.
type: String
- contextPath: FireEyeDoD.Scan.signature_name
- description: List of signatures extracted by all engines
+ description: List of signatures extracted by all engines.
type: String
- description: Retrieves one or more reports of file scans
+ description: Retrieves one or more reports of file scans.
- name: fireeye-dod-get-report-url
arguments:
- name: report_id
- description: The ID of the report to fetch
+ description: The ID of the report to fetch.
required: true
- name: expiration
default: true
description: Expiration (in hours) for browser viewable report pre-signed URL link. Default value is 72 hours. Minimum is 1 hour, and maximum is 8760 hours (365 days).
defaultValue: "72"
- description: Generates a pre-signed URL for a report
+ description: Generates a pre-signed URL for a report.
fromversion: 6.0.0
tests:
- No tests (auto formatted)
diff --git a/Packs/FireEye-Detection-on-Demand/ReleaseNotes/1_0_5.md b/Packs/FireEye-Detection-on-Demand/ReleaseNotes/1_0_5.md
new file mode 100644
index 000000000000..28cd41de2a8b
--- /dev/null
+++ b/Packs/FireEye-Detection-on-Demand/ReleaseNotes/1_0_5.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### FireEye Detection on Demand
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/FireEye-Detection-on-Demand/pack_metadata.json b/Packs/FireEye-Detection-on-Demand/pack_metadata.json
index 083dd070a641..549b3b244352 100644
--- a/Packs/FireEye-Detection-on-Demand/pack_metadata.json
+++ b/Packs/FireEye-Detection-on-Demand/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "FireEye Detection on Demand",
"description": "Detonate files, hashes, and URLs using FireEye Detection on Demand",
"support": "partner",
- "currentVersion": "1.0.4",
+ "currentVersion": "1.0.5",
"author": "FireEye Inc.",
"githubUser": [
"mckibbenc",
diff --git a/Packs/FireEyeETP/Integrations/FireEyeETPEventCollector/README.md b/Packs/FireEyeETP/Integrations/FireEyeETPEventCollector/README.md
index c9561323588a..0f10b13977d2 100644
--- a/Packs/FireEyeETP/Integrations/FireEyeETPEventCollector/README.md
+++ b/Packs/FireEyeETP/Integrations/FireEyeETPEventCollector/README.md
@@ -1,5 +1,7 @@
Use this integration to fetch email security incidents from FireEye ETP as XSIAM events.
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure FireEye ETP Event Collector on Cortex XSOAR
1. Navigate to **Settings** > **Configurations** > **Data Collection** > **Automation & Feed Integrations**.
diff --git a/Packs/FireEyeETP/pack_metadata.json b/Packs/FireEyeETP/pack_metadata.json
index eb9fc8775242..8f768c9f006f 100644
--- a/Packs/FireEyeETP/pack_metadata.json
+++ b/Packs/FireEyeETP/pack_metadata.json
@@ -16,5 +16,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "FireEye ETP Event Collector"
}
\ No newline at end of file
diff --git a/Packs/FireEyeHX/Integrations/FireEyeHXEventCollector/README.md b/Packs/FireEyeHX/Integrations/FireEyeHXEventCollector/README.md
index 3e4075063017..901b31382813 100644
--- a/Packs/FireEyeHX/Integrations/FireEyeHXEventCollector/README.md
+++ b/Packs/FireEyeHX/Integrations/FireEyeHXEventCollector/README.md
@@ -1,5 +1,7 @@
Palo Alto Networks FireEye HX Event Collector integration for XSIAM.
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure FireEye HX Event Collector on Cortex XSIAM
1. Navigate to **Settings** > **Configurations** > **Data Collection** > **Automation & Feed Integrations**.
diff --git a/Packs/FireEyeHX/README.md b/Packs/FireEyeHX/README.md
index b1c796ef7818..e9b6143b948e 100644
--- a/Packs/FireEyeHX/README.md
+++ b/Packs/FireEyeHX/README.md
@@ -1,3 +1,4 @@
+<~XSIAM>
# FireEye HX
This pack includes Cortex XSIAM content.
@@ -28,4 +29,5 @@ You can configure the specific vendor and product for this instance.
- product as hx_audit
- format as Auto-Detect
- protocol as UDP
-
\ No newline at end of file
+
+ ~XSIAM>
\ No newline at end of file
diff --git a/Packs/FireEyeHX/pack_metadata.json b/Packs/FireEyeHX/pack_metadata.json
index dc262a702cff..64931b5542d8 100644
--- a/Packs/FireEyeHX/pack_metadata.json
+++ b/Packs/FireEyeHX/pack_metadata.json
@@ -19,5 +19,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "FireEye HX Event Collector"
}
\ No newline at end of file
diff --git a/Packs/FortiSIEM/Integrations/FortiSIEMV2/README.md b/Packs/FortiSIEM/Integrations/FortiSIEMV2/README.md
index da976af5c3ad..a962b47ce606 100644
--- a/Packs/FortiSIEM/Integrations/FortiSIEMV2/README.md
+++ b/Packs/FortiSIEM/Integrations/FortiSIEMV2/README.md
@@ -1,6 +1,8 @@
Use FortiSIEM v2 to fetch and update incidents, search events and manage FortiSIEM watchlists.
This integration was integrated and tested with FortiSIEMV2 version 6.3.2.
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
Changes have been made that might affect your existing content.
If you are upgrading from a previous of this integration, see [Breaking Changes](#breaking-changes-from-the-previous-version-of-this-integration-fortisiem-v2).
diff --git a/Packs/FortiSIEM/pack_metadata.json b/Packs/FortiSIEM/pack_metadata.json
index b08b16e32f98..fb0d7a71c02c 100644
--- a/Packs/FortiSIEM/pack_metadata.json
+++ b/Packs/FortiSIEM/pack_metadata.json
@@ -16,5 +16,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "FortiSIEMV2"
}
\ No newline at end of file
diff --git a/Packs/GCP-Enrichment-Remediation/ReleaseNotes/1_1_19.md b/Packs/GCP-Enrichment-Remediation/ReleaseNotes/1_1_19.md
new file mode 100644
index 000000000000..db344f5cb368
--- /dev/null
+++ b/Packs/GCP-Enrichment-Remediation/ReleaseNotes/1_1_19.md
@@ -0,0 +1,3 @@
+## GCP Enrichment and Remediation
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/GCP-Enrichment-Remediation/pack_metadata.json b/Packs/GCP-Enrichment-Remediation/pack_metadata.json
index 77741555c613..9bcf7eeca42e 100644
--- a/Packs/GCP-Enrichment-Remediation/pack_metadata.json
+++ b/Packs/GCP-Enrichment-Remediation/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "GCP Enrichment and Remediation",
"description": "Playbooks using multiple GCP content packs for enrichment and remediation purposes",
"support": "xsoar",
- "currentVersion": "1.1.18",
+ "currentVersion": "1.1.19",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/GCP-IAM/ReleaseNotes/1_0_25.md b/Packs/GCP-IAM/ReleaseNotes/1_0_25.md
new file mode 100644
index 000000000000..80582f3cc02a
--- /dev/null
+++ b/Packs/GCP-IAM/ReleaseNotes/1_0_25.md
@@ -0,0 +1,3 @@
+## GCP IAM
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/GCP-IAM/pack_metadata.json b/Packs/GCP-IAM/pack_metadata.json
index 22f97bcbb37b..2ff989a7b7b6 100644
--- a/Packs/GCP-IAM/pack_metadata.json
+++ b/Packs/GCP-IAM/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "GCP IAM",
"description": "Manage identity and access control for Google Cloud Platform resources.",
"support": "xsoar",
- "currentVersion": "1.0.24",
+ "currentVersion": "1.0.25",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Gatewatcher-AionIQ/Integrations/GCenter/GCenter_test.py b/Packs/Gatewatcher-AionIQ/Integrations/GCenter/GCenter_test.py
index 65f9ce0af964..1778b49a276e 100644
--- a/Packs/Gatewatcher-AionIQ/Integrations/GCenter/GCenter_test.py
+++ b/Packs/Gatewatcher-AionIQ/Integrations/GCenter/GCenter_test.py
@@ -205,7 +205,7 @@ def test_gw_get_alert(client, requests_mock, uid, prefix_mapping, raw_alerts_rea
gw_get_alert(client, args)
-@pytest.mark.parametrize("ltype", ["white", "black"])
+@pytest.mark.parametrize("ltype", ["white", "black"], ids=["test-white-list", "test-black-list"])
def test_gw_get_malcore_list_entry(client, requests_mock, ltype, prefix_mapping, get_malcore_list):
args = {
"type": ltype
@@ -228,7 +228,7 @@ def test_gw_get_malcore_list_entry(client, requests_mock, ltype, prefix_mapping,
gw_get_malcore_list_entry(client, args)
-@pytest.mark.parametrize("ltype", ["white", "black"])
+@pytest.mark.parametrize("ltype", ["white", "black"], ids=["test-white-list", "test-black-list"])
def test_gw_add_malcore_list_entry(client, requests_mock, ltype, prefix_mapping, add_malcore_list):
args = {
"type": ltype,
@@ -254,7 +254,7 @@ def test_gw_add_malcore_list_entry(client, requests_mock, ltype, prefix_mapping,
gw_add_malcore_list_entry(client, args)
-@pytest.mark.parametrize("ltype", ["white", "black"])
+@pytest.mark.parametrize("ltype", ["white", "black"], ids=["test-white-list", "test-black-list"])
def test_gw_del_malcore_list_entry(client, requests_mock, ltype, prefix_mapping):
args = {
"type": ltype,
@@ -277,7 +277,7 @@ def test_gw_del_malcore_list_entry(client, requests_mock, ltype, prefix_mapping)
gw_del_malcore_list_entry(client, args)
-@pytest.mark.parametrize("ltype", ["white", "black"])
+@pytest.mark.parametrize("ltype", ["white", "black"], ids=["test-white-list", "test-black-list"])
def test_gw_get_dga_list_entry(client, requests_mock, ltype, prefix_mapping, get_dga_list):
args = {
"type": ltype
@@ -300,7 +300,7 @@ def test_gw_get_dga_list_entry(client, requests_mock, ltype, prefix_mapping, get
gw_get_dga_list_entry(client, args)
-@pytest.mark.parametrize("ltype", ["white", "black"])
+@pytest.mark.parametrize("ltype", ["white", "black"], ids=["test-white-list", "test-black-list"])
def test_gw_add_dga_list_entry(client, requests_mock, ltype, prefix_mapping, add_dga_list):
args = {
"type": ltype,
@@ -325,7 +325,7 @@ def test_gw_add_dga_list_entry(client, requests_mock, ltype, prefix_mapping, add
gw_add_dga_list_entry(client, args)
-@pytest.mark.parametrize("ltype", ["white", "black"])
+@pytest.mark.parametrize("ltype", ["white", "black"], ids=["test-white-list", "test-black-list"])
def test_gw_del_dga_list_entry(client, requests_mock, ltype, prefix_mapping):
args = {
"type": ltype,
diff --git a/Packs/Gem/Integrations/Gem/Gem.py b/Packs/Gem/Integrations/Gem/Gem.py
index 08e29e8e50e1..5f3a3589b28f 100644
--- a/Packs/Gem/Integrations/Gem/Gem.py
+++ b/Packs/Gem/Integrations/Gem/Gem.py
@@ -573,7 +573,7 @@ def init_client(params: dict) -> GemClient:
"""
return GemClient(
base_url=params['api_endpoint'],
- verify=True,
+ verify=not params.get('insecure', False),
proxy=params.get('proxy', False),
client_id=demisto.getParam('credentials')['identifier'] if demisto.getParam('credentials') else "",
client_secret=demisto.getParam('credentials')['password'] if demisto.getParam('credentials') else ""
diff --git a/Packs/Gem/Integrations/Gem/Gem.yml b/Packs/Gem/Integrations/Gem/Gem.yml
index b17bb6767a5e..ed51a16b1cd0 100644
--- a/Packs/Gem/Integrations/Gem/Gem.yml
+++ b/Packs/Gem/Integrations/Gem/Gem.yml
@@ -41,6 +41,11 @@ configuration:
type: 8
section: Connect
required: false
+ - display: Trust any certificate (not secure)
+ name: insecure
+ type: 8
+ section: Connect
+ required: false
- display: Fetch incidents
name: isFetch
type: 8
@@ -1048,7 +1053,7 @@ script:
script: "-"
type: python
subtype: python3
- dockerimage: demisto/auth-utils:1.0.0.89650 # TODO: This docker image was selected since it contains the "jwt" package. Double check if it's the correct one.
+ dockerimage: demisto/auth-utils:1.0.0.96804 # TODO: This docker image was selected since it contains the "jwt" package. Double check if it's the correct one.
fromversion: 6.12.0
tests:
- No tests
diff --git a/Packs/Gem/Integrations/Gem/README.md b/Packs/Gem/Integrations/Gem/README.md
index 73cc6ae21c2d..b043f5e7a0e0 100644
--- a/Packs/Gem/Integrations/Gem/README.md
+++ b/Packs/Gem/Integrations/Gem/README.md
@@ -14,11 +14,13 @@ Use Gem alerts as a trigger for Cortex XSOAR’s custom playbooks, to automate r
| Service Account Secret | The Service Account Secret to use for connection | True |
| First fetch timestamp (<number> <time unit>, e.g., 12 hours, 7 days) | | False |
| Use system proxy settings | | False |
+ | Trust any certificate (not secure) | | False |
| Fetch incidents | | False |
| Maximum number of alerts per fetch | | False |
4. Click **Test** to validate the URLs, token, and connection.
+
## Commands
You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
@@ -588,4 +590,4 @@ Add a timeline event to a threat.
#### Context Output
-There is no context output for this command.
+There is no context output for this command.
\ No newline at end of file
diff --git a/Packs/Gem/ReleaseNotes/1_0_1.md b/Packs/Gem/ReleaseNotes/1_0_1.md
new file mode 100644
index 000000000000..3c44370e479c
--- /dev/null
+++ b/Packs/Gem/ReleaseNotes/1_0_1.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Gem
+
+- Added the "Trust any certificate" parameter to the integration configuration.
+- Updated the Docker image to: *demisto/auth-utils:1.0.0.96804*.
diff --git a/Packs/Gem/pack_metadata.json b/Packs/Gem/pack_metadata.json
index bb01e80b437c..d971ede580fe 100644
--- a/Packs/Gem/pack_metadata.json
+++ b/Packs/Gem/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Gem",
"description": "Integrate with Gem to use alerts as a trigger for Cortex XSOAR’s custom playbooks, and automate response to specific TTPs and scenarios.",
"support": "partner",
- "currentVersion": "1.0.0",
+ "currentVersion": "1.0.1",
"author": "Gem Security",
"url": "https://gem.security/",
"email": "support@gem.security",
diff --git a/Packs/Genians/Integrations/Genians/Genians.py b/Packs/Genians/Integrations/Genians/Genians.py
index b81965606b39..aeb09dac4e05 100755
--- a/Packs/Genians/Integrations/Genians/Genians.py
+++ b/Packs/Genians/Integrations/Genians/Genians.py
@@ -46,7 +46,7 @@ def http_request(method, url, body=None):
verify=USE_SSL,
)
if result.status_code < 200 or result.status_code >= 300:
- raise Exception("Error in Genian NAC Integration API Call. Code: {0}".format(str(result.status_code)))
+ raise Exception(f"Error in Genian NAC Integration API Call. Code: {str(result.status_code)}")
json_result = result.json()
@@ -98,7 +98,7 @@ def assign_ip_tag_command():
nodeid = result[0]["nl_nodeid"]
if not nodeid:
- demisto.results("IP not found. [{0}] is not exist in your network".format(IP))
+ demisto.results(f"IP not found. [{IP}] is not exist in your network")
else:
result2 = assign_ip_tag(nodeid)
@@ -109,7 +109,7 @@ def assign_ip_tag_command():
break
if tag_check == TAG_NAME:
- hr = "IP : [{0}], [{1}] Tag assign success.".format(IP, TAG_NAME)
+ hr = f"IP : [{IP}], [{TAG_NAME}] Tag assign success."
assign_tag = {
"nodeId": nodeid,
"Name": TAG_NAME
@@ -125,7 +125,7 @@ def assign_ip_tag_command():
}
})
else:
- raise Exception("IP : [{0}], [{1}] Tag assign fail.".format(IP, TAG_NAME))
+ raise Exception(f"IP : [{IP}], [{TAG_NAME}] Tag assign fail.")
def unassign_ip_tag(nodeid: str, data):
@@ -141,7 +141,7 @@ def unassign_ip_tag_command():
nodeid = result[0]["nl_nodeid"]
if not nodeid:
- demisto.results("IP not found. [{0}] is not exist in your network".format(IP))
+ demisto.results(f"IP not found. [{IP}] is not exist in your network")
else:
result2 = get_tag_list()
@@ -156,7 +156,7 @@ def unassign_ip_tag_command():
data = "[\"" + str(tag_check) + "\"]"
result3 = unassign_ip_tag(nodeid, data)
if str(result3) == "[]":
- hr = "IP : [{0}], [{1}] Tag unassign success.".format(IP, TAG_NAME)
+ hr = f"IP : [{IP}], [{TAG_NAME}] Tag unassign success."
unassign_tag = {
"nodeId": nodeid,
"Name": TAG_NAME
@@ -172,18 +172,18 @@ def unassign_ip_tag_command():
}
})
else:
- raise Exception("IP : [{0}], [{1}] Tag unassign fail.".format(IP, TAG_NAME))
+ raise Exception(f"IP : [{IP}], [{TAG_NAME}] Tag unassign fail.")
else:
- demisto.results("[{0}] Tag not found.".format(TAG_NAME))
+ demisto.results(f"[{TAG_NAME}] Tag not found.")
else:
- demisto.results("[{0}] Tag not found.".format(TAG_NAME))
+ demisto.results(f"[{TAG_NAME}] Tag not found.")
def main():
"""Main execution block"""
try:
- LOG("Command being called is {0}".format(demisto.command()))
+ LOG(f"Command being called is {demisto.command()}")
if demisto.command() == "test-module":
get_ip_nodeid('8.8.8.8')
@@ -193,7 +193,7 @@ def main():
elif demisto.command() == 'genians-unassign-ip-tag':
unassign_ip_tag_command()
else:
- raise NotImplementedError("Command {} was not implemented.".format(demisto.command()))
+ raise NotImplementedError(f"Command {demisto.command()} was not implemented.")
except Exception as e:
return_error(str(e))
diff --git a/Packs/Genians/Integrations/Genians/Genians.yml b/Packs/Genians/Integrations/Genians/Genians.yml
index 7eb51cde0ce8..3378e0b20f91 100755
--- a/Packs/Genians/Integrations/Genians/Genians.yml
+++ b/Packs/Genians/Integrations/Genians/Genians.yml
@@ -37,29 +37,29 @@ script:
arguments:
- name: ip
required: true
- description: Threat IP Address (e.g. 192.168.100.87)
+ description: Threat IP Address (e.g. 192.168.100.87).
outputs:
- contextPath: genians.tag.nodeId
- description: nodeid of IP
+ description: nodeid of IP.
type: string
- contextPath: genians.tag.Name
- description: Tag name
+ description: Tag name.
type: string
description: Assigns a tag to the Node specified.
- name: genians-unassign-ip-tag
arguments:
- name: ip
required: true
- description: IP Address (e.g. 192.168.100.87)
+ description: IP Address (e.g. 192.168.100.87).
outputs:
- contextPath: genians.tag.nodeId
- description: nodeid of IP
+ description: nodeid of IP.
type: string
- contextPath: genians.tag.Name
- description: Tag name
+ description: Tag name.
type: string
description: Removes the tag(s) from the Node specified.
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.95956
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/Genians/ReleaseNotes/1_0_10.md b/Packs/Genians/ReleaseNotes/1_0_10.md
new file mode 100644
index 000000000000..5502b2010062
--- /dev/null
+++ b/Packs/Genians/ReleaseNotes/1_0_10.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Genians
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/Genians/pack_metadata.json b/Packs/Genians/pack_metadata.json
index a23c5bad2e12..3bfa3ea4c8f9 100644
--- a/Packs/Genians/pack_metadata.json
+++ b/Packs/Genians/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Genians",
"description": "Use the Genian NAC integration to block IP addresses using the assign tag.",
"support": "partner",
- "currentVersion": "1.0.9",
+ "currentVersion": "1.0.10",
"author": "Genians",
"url": "https://www.genians.com/resources-overview/",
"email": "integration@genians.com",
diff --git a/Packs/GitHub/Integrations/GitHubEventCollector/README.md b/Packs/GitHub/Integrations/GitHubEventCollector/README.md
index c93d2f6dc56b..39f0beb08cbd 100644
--- a/Packs/GitHub/Integrations/GitHubEventCollector/README.md
+++ b/Packs/GitHub/Integrations/GitHubEventCollector/README.md
@@ -1,6 +1,8 @@
Github logs event collector integration for Cortex XSIAM.
This integration was integrated and tested with Github REST API V3
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure Github Event Collector on Cortex XSOAR
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
diff --git a/Packs/GitHub/pack_metadata.json b/Packs/GitHub/pack_metadata.json
index 7603b71b0b66..361d2fb3e847 100644
--- a/Packs/GitHub/pack_metadata.json
+++ b/Packs/GitHub/pack_metadata.json
@@ -22,5 +22,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "Github Event Collector"
}
\ No newline at end of file
diff --git a/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser.py b/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser.py
index efb6b4523342..d71910f11d37 100644
--- a/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser.py
+++ b/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser.py
@@ -6,6 +6,7 @@
from datetime import datetime, timedelta, timezone
from email.utils import parsedate_to_datetime, format_datetime
import httplib2
+from httplib2 import socks
import sys
from html.parser import HTMLParser
from html.entities import name2codepoint
@@ -24,9 +25,9 @@
from googleapiclient.discovery_cache.base import Cache
import itertools as it
import urllib.parse
-from typing import List, Optional, Tuple
import secrets
import hashlib
+from googleapiclient.errors import HttpError
''' GLOBAL VARS '''
params = demisto.params()
@@ -49,10 +50,67 @@
'Accept': 'application/json',
}
-
+EXECUTION_METRICS = ExecutionMetrics()
''' HELPER FUNCTIONS '''
+def execute_gmail_action(service, action: str, action_kwargs: dict) -> dict:
+ """Executes a specified action on the Gmail API, while collecting execution metrics.
+
+ This function dynamically executes an action such as sending an email, retrieving an email,
+ getting attachments, or listing emails based on the specified action and its arguments.
+
+ Args:
+ service: The Gmail API service instance.
+ action (str): The action to perform. Supported actions are "send", "get",
+ "get_attachments", and "list".
+ action_kwargs (dict): The keyword arguments required for the specified action.
+
+ Returns:
+ dict: The result from the Gmail API call.
+
+ Raises:
+ HttpError: If an error occurs from the Gmail API request.
+ Exception: If a general error occurs during the function execution.
+
+ Note:
+ This function updates execution metrics counters based on the outcome of the API call.
+ """
+ try:
+ match action:
+ case "send":
+ result = service.users().messages().send(**action_kwargs).execute()
+ case "get":
+ result = service.users().messages().get(**action_kwargs).execute()
+ case "get_attachments":
+ result = service.users().messages().attachments().get(**action_kwargs).execute()
+ case "list":
+ result = service.users().messages().list(**action_kwargs).execute()
+ case _:
+ raise ValueError(f"Unsupported action: {action}")
+
+ except HttpError as error:
+ if error.status_code == 429:
+ EXECUTION_METRICS.quota_error += 1
+ elif error.reason == 'Unauthorized':
+ EXECUTION_METRICS.auth_error += 1
+ else:
+ EXECUTION_METRICS.general_error += 1
+ raise
+ except Exception:
+ EXECUTION_METRICS.general_error += 1
+ raise
+ EXECUTION_METRICS.success += 1
+ return result
+
+
+def return_metrics():
+ if EXECUTION_METRICS.metrics is not None and ExecutionMetrics.is_supported():
+ return_results(EXECUTION_METRICS.metrics)
+ else:
+ demisto.debug("Not returning metrics. Either metrics are not supported in this XSOAR version, or none were collected")
+
+
# See: https://github.com/googleapis/google-api-python-client/issues/325#issuecomment-274349841
class MemoryCache(Cache):
_CACHE: dict = {}
@@ -67,7 +125,7 @@ def set(self, url, content):
class TextExtractHtmlParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
- self._texts = [] # type: list
+ self._texts: list = []
self._ignore = False
def handle_starttag(self, tag, attrs):
@@ -127,7 +185,7 @@ def get_http_client_with_proxy(self):
https_proxy = 'https://' + https_proxy
parsed_proxy = urllib.parse.urlparse(https_proxy)
proxy_info = httplib2.ProxyInfo( # disable-secrets-detection
- proxy_type=httplib2.socks.PROXY_TYPE_HTTP, # disable-secrets-detection
+ proxy_type=socks.PROXY_TYPE_HTTP, # disable-secrets-detection
proxy_host=parsed_proxy.hostname,
proxy_port=parsed_proxy.port,
proxy_user=parsed_proxy.username,
@@ -181,17 +239,16 @@ def get_refresh_token(self, integration_context):
f'{resp.status} {resp.reason} {content}')
resp_json = json.loads(content)
if not resp_json.get('refresh_token'):
- raise ValueError('Error obtaining refresh token. Missing refresh token in response: {}'.format(content))
+ raise ValueError(f'Error obtaining refresh token. Missing refresh token in response: {content}')
return resp_json.get('refresh_token')
def get_access_token(self):
integration_context = demisto.getIntegrationContext() or {}
access_token = integration_context.get('access_token')
valid_until = integration_context.get('valid_until')
- if access_token and valid_until and integration_context.get('code') == AUTH_CODE:
- if self.epoch_seconds() < valid_until:
- demisto.debug('Using access token from integration context')
- return access_token
+ if access_token and valid_until and integration_context.get('code') == AUTH_CODE and self.epoch_seconds() < valid_until:
+ demisto.debug('Using access token from integration context')
+ return access_token
refresh_token = self.get_refresh_token(integration_context)
demisto.debug(f"Going to obtain access token for client id: {CLIENT_ID}")
body = {
@@ -207,12 +264,10 @@ def get_access_token(self):
if resp.status not in {200, 201}:
msg = 'Error obtaining access token. Try checking the credentials you entered.'
try:
- demisto.info('Authentication failure from server: {} {} {}'.format(
- resp.status, resp.reason, content))
-
- msg += ' Server message: {}'.format(content)
+ demisto.info(f'Authentication failure from server: {resp.status} {resp.reason} {content}')
+ msg += f' Server message: {content}'
except Exception as ex:
- demisto.error('Failed parsing error response - Exception: {}'.format(ex))
+ demisto.error(f'Failed parsing error response - Exception: {ex}')
raise Exception(msg)
parsed_response = json.loads(content)
@@ -231,10 +286,10 @@ def get_access_token(self):
demisto.debug(f"Done obtaining access token for client id: {CLIENT_ID}. Expires in: {expires_in}")
return access_token
- def parse_mail_parts(self, parts):
- body = u''
- html = u''
- attachments = [] # type: list
+ def parse_mail_parts(self, parts: list[dict]):
+ body = ''
+ html = ''
+ attachments: list = []
for part in parts:
if 'multipart' in part['mimeType'] and part.get('parts'):
part_body, part_html, part_attachments = self.parse_mail_parts(
@@ -268,7 +323,7 @@ def get_attachments(self, user_id, _id):
service = self.get_service(
'gmail',
'v1')
- result = service.users().messages().get(**mail_args).execute()
+ result = execute_gmail_action(service, "get", mail_args)
result = self.get_email_context(result, user_id)[0]
command_args = {
@@ -278,14 +333,14 @@ def get_attachments(self, user_id, _id):
files = []
for attachment in result['Attachments']:
command_args['id'] = attachment['ID']
- result = service.users().messages().attachments().get(**command_args).execute()
+ result = execute_gmail_action(service, "get_attachments", command_args)
file_data = base64.urlsafe_b64decode(result['data'].encode('ascii'))
files.append((attachment['Name'], file_data))
return files
@staticmethod
- def get_date_from_email_header(header: str) -> Optional[datetime]:
+ def get_date_from_email_header(header: str) -> datetime | None:
"""Parse an email header such as Date or Received. The format is either just the date
or name value pairs followed by ; and the date specification. For example:
by 2002:a17:90a:77cb:0:0:0:0 with SMTP id e11csp4670216pjs; Mon, 21 Dec 2020 12:11:57 -0800 (PST)
@@ -310,7 +365,7 @@ def get_date_from_email_header(header: str) -> Optional[datetime]:
return None
@staticmethod
- def get_occurred_date(email_data: dict) -> Tuple[datetime, bool]:
+ def get_occurred_date(email_data: dict) -> tuple[datetime, bool]:
"""Get the occurred date of an email. The date gmail uses is actually the X-Received or the top Received
dates in the header. If fails finding these dates will fall back to internal date.
@@ -324,7 +379,7 @@ def get_occurred_date(email_data: dict) -> Tuple[datetime, bool]:
if not headers or not isinstance(headers, list):
demisto.error(f"couldn't get headers for msg (shouldn't happen): {email_data}")
else:
- # use x-received or recvived. We want to use x-received first and fallback to received.
+ # use x-received or received. We want to use x-received first and fallback to received.
for name in ['x-received', 'received', ]:
header = next(filter(lambda ht: ht.get('name', '').lower() == name, headers), None)
if header:
@@ -337,7 +392,7 @@ def get_occurred_date(email_data: dict) -> Tuple[datetime, bool]:
internalDate = email_data.get('internalDate')
demisto.info(f"couldn't extract occurred date from headers trying internalDate: {internalDate}")
if internalDate and internalDate != '0':
- # intenalDate timestamp has 13 digits, but epoch-timestamp counts the seconds since Jan 1st 1970
+ # internalDate timestamp has 13 digits, but epoch-timestamp counts the seconds since Jan 1st 1970
# (which is currently less than 13 digits) thus a need to cut the timestamp down to size.
timestamp_len = len(str(int(time.time())))
if len(str(internalDate)) > timestamp_len:
@@ -347,23 +402,23 @@ def get_occurred_date(email_data: dict) -> Tuple[datetime, bool]:
demisto.info("Failed finding date from internal or headers. Using 'datetime.now()'")
return datetime.now(tz=timezone.utc), False
- def get_email_context(self, email_data, mailbox) -> Tuple[dict, dict, dict, datetime, bool]:
+ def get_email_context(self, email_data, mailbox) -> tuple[dict, dict, dict, datetime, bool]:
"""Get the email context from email data
Args:
- email_data (dics): the email data received from the gmail api
+ email_data (dict): the email data received from the gmail api
mailbox (str): mail box name
Returns:
- (context_gmail, headers, context_email, received_date, is_valid_recieved): note that if received date is not
- resolved properly is_valid_recieved will be false
+ (context_gmail, headers, context_email, received_date, is_valid_received): note that if received date is not
+ resolved properly is_valid_received will be false
"""
occurred, occurred_is_valid = Client.get_occurred_date(email_data)
context_headers = email_data.get('payload', {}).get('headers', [])
context_headers = [{'Name': v['name'], 'Value': v['value']}
for v in context_headers]
- headers = dict([(h['Name'].lower(), h['Value']) for h in context_headers])
+ headers = {h['Name'].lower(): h['Value'] for h in context_headers}
body = demisto.get(email_data, 'payload.body.data')
body = body.encode('ascii') if body is not None else ''
parsed_body = base64.urlsafe_b64decode(body)
@@ -479,7 +534,7 @@ def parse_date_isoformat_server(dt: str) -> datetime:
"""
return datetime.strptime(dt, '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo=timezone.utc)
- def mail_to_incident(self, msg, service, user_key) -> Tuple[dict, datetime, bool]:
+ def mail_to_incident(self, msg, service, user_key) -> tuple[dict, datetime, bool]:
"""Parse an email message
Args:
@@ -488,13 +543,13 @@ def mail_to_incident(self, msg, service, user_key) -> Tuple[dict, datetime, bool
user_key
Raises:
- Exception: when problem getting attachements
+ Exception: when problem getting attachments
Returns:
Tuple[dict, datetime, bool]: incident object, occurred datetime, boolean indicating if date is valid or not
"""
parsed_msg, headers, _, occurred, occurred_is_valid = self.get_email_context(msg, user_key)
- # conver occurred to gmt and then isoformat + Z
+ # convert occurred to gmt and then isoformat + Z
occurred_str = Client.get_date_isoformat_server(occurred)
file_names = []
command_args = {
@@ -504,7 +559,7 @@ def mail_to_incident(self, msg, service, user_key) -> Tuple[dict, datetime, bool
for attachment in parsed_msg['Attachments']:
command_args['id'] = attachment['ID']
- result = service.users().messages().attachments().get(**command_args).execute()
+ result = execute_gmail_action(service, "get_attachments", command_args)
file_data = base64.urlsafe_b64decode(result['data'].encode('ascii'))
# save the attachment
@@ -588,9 +643,9 @@ def search(self, user_id, subject='', _from='', to='', before='', after='', file
'in': _in,
'has': 'attachment' if has_attachments else ''
}
- q = ' '.join('%s:%s ' % (name, value, )
+ q = ' '.join(f'{name}:{value} '
for name, value in query_values.items() if value != '')
- q = ('%s %s' % (q, query, )).strip()
+ q = (f'{q} {query}').strip()
command_args = {
'userId': user_id,
@@ -602,7 +657,7 @@ def search(self, user_id, subject='', _from='', to='', before='', after='', file
'includeSpamTrash': include_spam_trash,
}
service = self.get_service('gmail', 'v1')
- result = service.users().messages().list(**command_args).execute()
+ result = execute_gmail_action(service, "list", command_args)
return [self.get_mail(user_id, mail['id'], 'full') for mail in result.get('messages', [])], q
@@ -614,9 +669,7 @@ def get_mail(self, user_id, _id, _format):
}
service = self.get_service('gmail', 'v1')
- result = service.users().messages().get(**command_args).execute()
-
- return result
+ return execute_gmail_action(service, "get", command_args)
'''MAIL SENDER FUNCTIONS'''
@@ -644,7 +697,7 @@ def template_params(self, paramsStr):
params = json.loads(paramsStr)
except ValueError as e:
- return_error('Unable to parse templateParams: {}'.format(str(e)))
+ return_error(f'Unable to parse templateParams: {str(e)}')
# Build a simple key/value
for p in params:
@@ -746,6 +799,7 @@ def collect_inline_attachments(self, attach_cids):
})
return inline_attachment
+ return None
def collect_manual_attachments(self):
attachments = []
@@ -960,7 +1014,12 @@ def send_email_request(self, email_from: str, body: dict) -> dict:
Returns:
dict: the email send response.
"""
- return self.get_service('gmail', 'v1').users().messages().send(userId=email_from, body=body).execute()
+ command_args = {
+ "userId": email_from,
+ "body": body
+ }
+ service = self.get_service('gmail', 'v1')
+ return execute_gmail_action(service, "send", command_args)
def generate_auth_link(self):
"""Generate an auth2 link.
@@ -1006,7 +1065,7 @@ def test_module(client):
demisto.results('Test is not supported. Please use the following command: !gmail-auth-test.')
-def mail_command(client, args, email_from, send_as, subject_prefix='', in_reply_to=None, references=None):
+def mail_command(client: Client, args: dict, email_from, send_as, subject_prefix='', in_reply_to=None, references=None):
email_to = args.get('to')
body = args.get('body')
subject = f"{subject_prefix}{args.get('subject')}"
@@ -1045,13 +1104,13 @@ def mail_command(client, args, email_from, send_as, subject_prefix='', in_reply_
return send_mail_result
-def send_mail_command(client):
+def send_mail_command(client: Client):
args = demisto.args()
return mail_command(client, args, EMAIL, SEND_AS or EMAIL)
-def reply_mail_command(client):
+def reply_mail_command(client: Client):
args = demisto.args()
email_from = args.get('from')
send_as = args.get('send_as')
@@ -1061,7 +1120,7 @@ def reply_mail_command(client):
return mail_command(client, args, email_from, send_as, 'Re: ', in_reply_to, references)
-def get_attachments_command(client):
+def get_attachments_command(client: Client):
args = demisto.args()
_id = args.get('message-id')
@@ -1080,8 +1139,8 @@ def fetch_incidents(client: Client):
demisto.debug(f'last run: {last_run}')
last_fetch = last_run.get('gmt_time')
next_last_fetch = last_run.get('next_gmt_time')
- page_token = last_run.get('page_token') or None
- ignore_ids: List[str] = last_run.get('ignore_ids') or []
+ page_token = last_run.get('page_token')
+ ignore_ids: list[str] = last_run.get('ignore_ids') or []
ignore_list_used = last_run.get('ignore_list_used') or False # can we reset the ignore list if we haven't used it
# handle first time fetch - gets current GMT time -1 day
if not last_fetch:
@@ -1101,22 +1160,30 @@ def fetch_incidents(client: Client):
max_results = MAX_FETCH
if MAX_FETCH > 200:
max_results = 200
- LOG(f'GMAIL: fetch parameters: user: {user_key} query={query}'
- f' fetch time: {last_fetch} page_token: {page_token} max results: {max_results}')
- result = service.users().messages().list(
- userId=user_key, maxResults=max_results, pageToken=page_token, q=query).execute()
+ demisto.debug(f'GMAIL: fetch parameters: user: {user_key} {query=}'
+ f' fetch time: {last_fetch} page_token: {page_token} max results: {max_results}')
+ list_command_args = {
+ "userId": user_key,
+ "maxResults": max_results,
+ "pageToken": page_token,
+ "q": query
+ }
+ result = execute_gmail_action(service, "list", list_command_args)
incidents = []
# so far, so good
- LOG(f'GMAIL: possible new incidents are {result}')
+ demisto.debug(f'GMAIL: possible new incidents are {result}')
for msg in result.get('messages', []):
msg_id = msg['id']
if msg_id in ignore_ids:
demisto.info(f'Ignoring msg id: {msg_id} as it is in the ignore list')
ignore_list_used = True
continue
- msg_result = service.users().messages().get(
- id=msg_id, userId=user_key).execute()
+ command_kwargs = {
+ 'userId': user_key,
+ 'id': msg_id
+ }
+ msg_result = execute_gmail_action(service, "get", command_kwargs)
incident, occurred, is_valid_date = client.mail_to_incident(msg_result, service, user_key)
if not is_valid_date: # if we can't trust the date store the msg id in the ignore list
demisto.info(f'appending to ignore list msg id: {msg_id}. name: {incident.get("name")}')
@@ -1133,7 +1200,7 @@ def fetch_incidents(client: Client):
demisto.info(
f'skipped incident with lower date: {occurred} than fetch: {last_fetch} name: {incident.get("name")}')
- demisto.info('extract {} incidents'.format(len(incidents)))
+ demisto.info(f'extracted {len(incidents)} incidents')
next_page_token = result.get('nextPageToken', '')
if next_page_token:
# we still have more results
@@ -1143,7 +1210,7 @@ def fetch_incidents(client: Client):
demisto.debug(f'will use new last fetch date (no next page token): {next_last_fetch}')
# if we are not in a tokenized search and we didn't use the ignore ids we can reset it
if (not page_token) and (not ignore_list_used) and (len(ignore_ids) > 0):
- demisto.info(f'reseting igonre list of len: {len(ignore_ids)}')
+ demisto.info(f'resetting ignore list of len: {len(ignore_ids)}')
ignore_ids = []
last_fetch = next_last_fetch
demisto.setLastRun({
@@ -1209,9 +1276,10 @@ def main(): # pragma: no cover
sys.exit(0)
if command in commands:
demisto.results(commands[command](client))
- # Log exceptions
except Exception as e:
return_error(f'An error occurred: {e}', error=e)
+ finally:
+ return_metrics()
# python2 uses __builtin__ python3 uses builtins
diff --git a/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser_image.png b/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser_image.png
index 51551a6069ec..f6d3527f8257 100644
Binary files a/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser_image.png and b/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser_image.png differ
diff --git a/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser_test.py b/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser_test.py
index 94cfb541fb7c..7abc815bb860 100644
--- a/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser_test.py
+++ b/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser_test.py
@@ -1,8 +1,9 @@
import json
import pytest
+from pytest_mock import MockerFixture
import demistomock as demisto
-from GmailSingleUser import Client, send_mail_command, MIMEMultipart
+from GmailSingleUser import Client, send_mail_command, MIMEMultipart, execute_gmail_action
from email.utils import parsedate_to_datetime
import base64
@@ -14,6 +15,86 @@ def gmail_client(mocker):
return client
+class MockAttachments:
+ def get(self, **kwargs):
+ return MockExecute()
+
+
+class MockExecute:
+ def execute(self):
+ return {'attachmentId': '67890', 'size': 1024, 'data': 'mock_data'}
+
+
+class MockSend:
+ def execute(self):
+ return {'id': '12345'}
+
+
+class MockGet:
+ def execute(self):
+ return {'id': '12345', 'snippet': 'Test email content'}
+
+
+class MockList:
+ def execute(self):
+ return {'messages': [{'id': '12345'}, {'id': '67890'}]}
+
+
+class MockMessages:
+ def send(self, **kwargs):
+ return MockSend()
+
+ def get(self, **kwargs):
+ return MockGet()
+
+ def attachments(self, **kwargs):
+ return MockAttachments()
+
+ def list(self, **kwargs):
+ return MockList()
+
+
+class MockUsers:
+ def messages(self):
+ return MockMessages()
+
+
+class MockService:
+ def users(self):
+ return MockUsers()
+
+
+@pytest.fixture
+def mock_service():
+ return MockService()
+
+
+def test_execute_gmail_action_send(mock_service):
+ result = execute_gmail_action(mock_service, "send", {})
+ assert result == {'id': '12345'}
+
+
+def test_execute_gmail_action_get(mock_service):
+ result = execute_gmail_action(mock_service, "get", {})
+ assert result == {'id': '12345', 'snippet': 'Test email content'}
+
+
+def test_execute_gmail_action_get_attachments(mock_service):
+ result = execute_gmail_action(mock_service, "get_attachments", {})
+ assert result == {'attachmentId': '67890', 'size': 1024, 'data': 'mock_data'}
+
+
+def test_execute_gmail_action_list(mock_service):
+ result = execute_gmail_action(mock_service, "list", {})
+ assert result == {'messages': [{'id': '12345'}, {'id': '67890'}]}
+
+
+def test_execute_gmail_action_unsupported(mock_service):
+ action_kwargs = {}
+ with pytest.raises(ValueError, match="Unsupported action: unsupported_action"):
+ execute_gmail_action(mock_service, "unsupported_action", action_kwargs)
+
+
MOCK_MAIL_NO_LABELS = {
'internalDate': '1572251535000',
'historyId': '249781',
@@ -26,7 +107,7 @@ def gmail_client(mocker):
{
'name': 'Received',
'value': 'from 1041831412594 named unknown by gmailapi.google.com with '
- u'HTTPREST; Mon, 28 Oct 2019 04:32:15 -0400'
+ 'HTTPREST; Mon, 28 Oct 2019 04:32:15 -0400'
}, {
'name': 'Content-Type',
'value': 'mixed; boundary="===============4922146810840031257=="'
@@ -103,7 +184,7 @@ def gmail_client(mocker):
{
'Name': 'Received',
'Value': 'from 1041831412594 named '
- u'unknown by gmailapi.google.com with HTTPREST; Mon, 28 Oct 2019 04:32:15 -0400'
+ 'unknown by gmailapi.google.com with HTTPREST; Mon, 28 Oct 2019 04:32:15 -0400'
}, {
'Name': 'Content-Type',
'Value': 'mixed; boundary="===============4922146810840031257=="'
@@ -291,7 +372,7 @@ def test_send_mail_with_reference(gmail_client: Client, mocker):
)
-def test_send_mail_MIMEMultipart_constructor(mocker):
+def test_send_mail_MIMEMultipart_constructor(mocker: MockerFixture):
"""
Given:
- Client object
@@ -304,13 +385,19 @@ def test_send_mail_MIMEMultipart_constructor(mocker):
import GmailSingleUser
gmail_single_user_client = Client()
+ # Mock the chain of calls: service.users().messages().send().execute()
+ mock_execute = mocker.Mock(return_value={'id': 'mock_message_id'})
+ mock_send = mocker.Mock(return_value=mock_execute)
+ mock_messages = mocker.Mock(send=mocker.Mock(return_value=mock_send))
+ mock_users = mocker.Mock(messages=mocker.Mock(return_value=mock_messages))
+ mock_service = mocker.Mock(users=mocker.Mock(return_value=mock_users))
+ # Patch the service object in the Client class to use the mocked service
+ mocker.patch.object(GmailSingleUser.Client, 'get_service', new=mock_service)
# Replace MIMEMultipart with the mock object
mocker_obj = mocker.patch.object(
GmailSingleUser, "MIMEMultipart", return_value=MIMEMultipart()
)
- mocker = mocker.patch.object(
- gmail_single_user_client, "send_email_request", return_value=True
- )
+
gmail_single_user_client.send_mail(
emailto="test@gmail.com",
emailfrom="test@gmail.com",
@@ -331,6 +418,7 @@ def test_send_mail_MIMEMultipart_constructor(mocker):
additional_headers=[],
templateParams=None,
)
+
mocker_obj.assert_called_once()
assert mocker_obj.call_args.args == ()
diff --git a/Packs/GmailSingleUser/ReleaseNotes/1_4_0.md b/Packs/GmailSingleUser/ReleaseNotes/1_4_0.md
new file mode 100644
index 000000000000..6f73ab1effd1
--- /dev/null
+++ b/Packs/GmailSingleUser/ReleaseNotes/1_4_0.md
@@ -0,0 +1,12 @@
+
+#### Integrations
+
+##### Gmail Single User
+
+Added support for API Execution Metric reporting.
+
+#### Widgets
+
+##### New: API Call Results For Gmail Single User
+
+(Available from Cortex XSOAR 6.10.0)
diff --git a/Packs/GmailSingleUser/Widgets/Widget-API_Call_Results_for_Gmail_Single_User.json b/Packs/GmailSingleUser/Widgets/Widget-API_Call_Results_for_Gmail_Single_User.json
new file mode 100644
index 000000000000..8b4018fd8d44
--- /dev/null
+++ b/Packs/GmailSingleUser/Widgets/Widget-API_Call_Results_for_Gmail_Single_User.json
@@ -0,0 +1,95 @@
+{
+ "dataType": "metrics",
+ "params": {
+ "customGroupBy": [
+ null,
+ {
+ "ConnectionError": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "ConnectionError",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "ConnectionError"
+ },
+ "General Error": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "GeneralError",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "General Error"
+ },
+ "Quota Error": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "QuotaError",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "Quota Error"
+ },
+ "Success": {
+ "conditions": [
+ [
+ {
+ "field": "apiResponseType",
+ "operator": "isEqualCaseString",
+ "right": "Successful",
+ "type": "string"
+ }
+ ]
+ ],
+ "name": "Success"
+ }
+ }
+ ],
+ "groupBy": [
+ "modified(h)",
+ "apiResponseType"
+ ],
+ "keys": [
+ "sum|totalAPICalls"
+ ],
+ "timeFrame": "hours",
+ "valuesFormat": "abbreviated",
+ "xAxisLabel": "Time",
+ "yAxisLabel": "Request Counts"
+ },
+ "query": "type:integration and name:\"Gmail Single User\"",
+ "modified": "2024-05-21T10:23:37.500573773Z",
+ "name": "API Call Results For Gmail Single User",
+ "dateRange": {
+ "fromDate": "0001-01-01T00:00:00Z",
+ "toDate": "0001-01-01T00:00:00Z",
+ "period": {
+ "by": "",
+ "byTo": "hours",
+ "byFrom": "hours",
+ "toValue": 0,
+ "fromValue": 3,
+ "field": ""
+ },
+ "fromDateLicense": "0001-01-01T00:00:00Z"
+ },
+ "isPredefined": true,
+ "version": -1,
+ "id": "eff19897-4635-44c4-889c-f106373be355",
+ "widgetType": "line",
+ "fromVersion": "6.10.0",
+ "description": ""
+}
\ No newline at end of file
diff --git a/Packs/GmailSingleUser/pack_metadata.json b/Packs/GmailSingleUser/pack_metadata.json
index e48cabafafcf..e20748966793 100644
--- a/Packs/GmailSingleUser/pack_metadata.json
+++ b/Packs/GmailSingleUser/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Gmail Single User",
"description": "Gmail API using OAuth 2.0.",
"support": "xsoar",
- "currentVersion": "1.3.13",
+ "currentVersion": "1.4.0",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/GoogleChronicleBackstory/.pack-ignore b/Packs/GoogleChronicleBackstory/.pack-ignore
index 155572892f7f..cdde59889bcc 100644
--- a/Packs/GoogleChronicleBackstory/.pack-ignore
+++ b/Packs/GoogleChronicleBackstory/.pack-ignore
@@ -64,6 +64,9 @@ ignore=PB114
[file:GoogleChronicleBackstory_image.png]
ignore=IM111
+[file:GoogleChronicleBackstoryStreamingAPI_image.png]
+ignore=IM111
+
[file:classifier-Chronicle.json]
ignore=BA101
@@ -164,4 +167,5 @@ googleapis
APPDATA
txt
retries
-Registerserver
\ No newline at end of file
+Registerserver
+gcb
\ No newline at end of file
diff --git a/Packs/GoogleChronicleBackstory/Classifiers/classifier-mapper-incoming-Chronicle.json b/Packs/GoogleChronicleBackstory/Classifiers/classifier-mapper-incoming-Chronicle.json
index 7566a463dd87..06aea10433da 100644
--- a/Packs/GoogleChronicleBackstory/Classifiers/classifier-mapper-incoming-Chronicle.json
+++ b/Packs/GoogleChronicleBackstory/Classifiers/classifier-mapper-incoming-Chronicle.json
@@ -118,6 +118,22 @@
"transformers": []
},
"simple": ""
+ },
+ "Description": {
+ "complex": {
+ "accessor": "description",
+ "filters": [],
+ "root": "detection",
+ "transformers": []
+ }
+ },
+ "Detection URL": {
+ "complex": {
+ "accessor": "urlBackToProduct",
+ "filters": [],
+ "root": "detection",
+ "transformers": []
+ }
}
}
},
diff --git a/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstory/GoogleChronicleBackstory.py b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstory/GoogleChronicleBackstory.py
index 571acc6e736c..0ea3d0c174ce 100644
--- a/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstory/GoogleChronicleBackstory.py
+++ b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstory/GoogleChronicleBackstory.py
@@ -188,8 +188,6 @@ def __init__(self, params: dict[str, Any], proxy, disable_ssl):
service_account_credential = json.loads(encoded_service_account, strict=False)
# Create a credential using the Google Developer Service Account Credential and Chronicle API scope.
credentials = service_account.Credentials.from_service_account_info(service_account_credential, scopes=SCOPES)
- # Build an HTTP client which can make authorized OAuth requests.
- self.http_client = auth_requests.AuthorizedSession(credentials)
proxies = {}
if proxy:
@@ -199,6 +197,11 @@ def __init__(self, params: dict[str, Any], proxy, disable_ssl):
https_proxy = proxies['https']
if not https_proxy.startswith('https') and not https_proxy.startswith('http'):
proxies['https'] = 'https://' + https_proxy
+ else:
+ skip_proxy()
+
+ # Build an HTTP client which can make authorized OAuth requests.
+ self.http_client = auth_requests.AuthorizedSession(credentials)
self.proxy_info = proxies
self.disable_ssl = disable_ssl
diff --git a/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstory/GoogleChronicleBackstory.yml b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstory/GoogleChronicleBackstory.yml
index 9cad284a987a..5283a15ca073 100644
--- a/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstory/GoogleChronicleBackstory.yml
+++ b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstory/GoogleChronicleBackstory.yml
@@ -6698,7 +6698,7 @@ script:
- contextPath: GoogleChronicleBackstory.Events.securityResult.urlBackToProduct
description: URL to direct you to the source product console for this security event.
type: String
- dockerimage: demisto/googleapi-python3:1.0.0.89487
+ dockerimage: demisto/googleapi-python3:1.0.0.97032
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI.py b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI.py
new file mode 100644
index 000000000000..716163c70b0b
--- /dev/null
+++ b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI.py
@@ -0,0 +1,934 @@
+"""Main file for GoogleChronicleBackstory Integration."""
+from CommonServerPython import *
+
+from typing import Any, Mapping, Tuple, Iterator
+
+from google.oauth2 import service_account
+from google.auth.transport import requests as auth_requests
+from datetime import datetime
+
+''' CONSTANTS '''
+
+DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
+
+SCOPES = ['https://www.googleapis.com/auth/chronicle-backstory']
+MAX_CONSECUTIVE_FAILURES = 7
+
+BACKSTORY_API_V2_URL = 'https://{}backstory.googleapis.com/v2'
+
+ENDPOINTS = {
+ # Stream detections endpoint.
+ 'STREAM_DETECTIONS_ENDPOINT': '/detect/rules:streamDetectionAlerts',
+}
+
+TIMEOUT = 300
+MAX_DETECTION_STREAM_BATCH_SIZE = 100
+MAX_DELTA_TIME_FOR_STREAMING_DETECTIONS = '7 days'
+MAX_DELTA_TIME_STRINGS = ['7 day', '168 hour', '1 week']
+IDEAL_SLEEP_TIME_BETWEEN_BATCHES = 30
+IDEAL_BATCH_SIZE = 200
+DEFAULT_FIRST_FETCH = "now"
+
+REGIONS = {
+ "General": "",
+ "Europe": "europe-",
+ "Asia": "asia-southeast1-",
+ "Europe-west2": "europe-west2-"
+}
+
+SEVERITY_MAP = {
+ 'unspecified': 0,
+ 'informational': 0.5,
+ 'low': 1,
+ 'medium': 2,
+ 'high': 3
+}
+
+MESSAGES = {
+ "INVALID_DELTA_TIME_FOR_STREAMING_DETECTIONS": "First fetch time should not be greater than 7 days or 168 hours (in relative manner compared to current time).", # noqa: E501
+ "FUTURE_DATE": "First fetch time should not be in the future.",
+ "INVALID_JSON_RESPONSE": 'Invalid response received from Chronicle API. Response not in JSON format.',
+ "INVALID_REGION": 'Invalid response from Chronicle API. Check the provided "Other Region" parameter.',
+ "CONSECUTIVELY_FAILED": 'Exiting retry loop. Consecutive retries have failed {} times.',
+ "PERMISSION_DENIED": 'Permission denied.',
+ "INVALID_ARGUMENTS": "Connection refused due to invalid arguments"
+}
+
+CHRONICLE_STREAM_DETECTIONS = '[CHRONICLE STREAM DETECTIONS]'
+SKIPPING_CURRENT_DETECTION = f'{CHRONICLE_STREAM_DETECTIONS} Skipping insertion of current detection since it already exists.'
+
+''' CLIENT CLASS '''
+
+
+class Client:
+ """
+ Client to use in integration to fetch data from Chronicle Backstory.
+
+ requires service_account_credentials : a json formatted string act as a token access
+ """
+
+ def __init__(self, params: dict[str, Any], proxy, disable_ssl):
+ """
+ Initialize HTTP Client.
+
+ :param params: parameter returned from demisto.params()
+ :param proxy: whether to use environment proxy
+ :param disable_ssl: whether to disable ssl
+ """
+ encoded_service_account = str(params.get('credentials', {}).get('password', ''))
+ service_account_credential = json.loads(encoded_service_account, strict=False)
+ # Create a credential using the Google Developer Service Account Credential and Chronicle API scope.
+ self.credentials = service_account.Credentials.from_service_account_info(service_account_credential,
+ scopes=SCOPES)
+ self.proxy = proxy
+ self.disable_ssl = disable_ssl
+ region = params.get('region', '')
+ other_region = params.get('other_region', '').strip()
+ if region:
+ if other_region and other_region[-1] != '-':
+ other_region = f'{other_region}-'
+ self.region = REGIONS[region] if region.lower() != 'other' else other_region
+ else:
+ self.region = REGIONS['General']
+ self.build_http_client()
+
+ def build_http_client(self):
+ """
+ Build an HTTP client which can make authorized OAuth requests.
+ """
+ proxies = {}
+ if self.proxy:
+ proxies = handle_proxy()
+ if not proxies.get('https', True):
+ raise DemistoException('https proxy value is empty. Check XSOAR server configuration' + str(proxies))
+ https_proxy = proxies['https']
+ if not https_proxy.startswith('https') and not https_proxy.startswith('http'):
+ proxies['https'] = 'https://' + https_proxy
+ else:
+ skip_proxy()
+ self.http_client = auth_requests.AuthorizedSession(self.credentials)
+ self.proxy_info = proxies
+
+
+''' HELPER FUNCTIONS '''
+
+
+def validate_response(client: Client, url, method='GET', body=None):
+ """
+ Get response from Chronicle Search API and validate it.
+
+ :param client: object of client class
+ :type client: object of client class
+
+ :param url: url
+ :type url: str
+
+ :param method: HTTP request method
+ :type method: str
+
+ :param body: data to pass with the request
+ :type body: str
+
+ :return: response
+ """
+ demisto.info(f'{CHRONICLE_STREAM_DETECTIONS}: Request URL: {url.format(client.region)}')
+ raw_response = client.http_client.request(url=url.format(client.region), method=method, data=body,
+ proxies=client.proxy_info, verify=not client.disable_ssl)
+
+ if 500 <= raw_response.status_code <= 599:
+ raise ValueError(
+ 'Internal server error occurred. Failed to execute request.\n'
+ f'Message: {parse_error_message(raw_response.text, client.region)}')
+ if raw_response.status_code == 429:
+ raise ValueError(
+ 'API rate limit exceeded. Failed to execute request.\n'
+ f'Message: {parse_error_message(raw_response.text, client.region)}')
+ if raw_response.status_code == 400 or raw_response.status_code == 404:
+ raise ValueError(
+ f'Status code: {raw_response.status_code}\n'
+ f'Error: {parse_error_message(raw_response.text, client.region)}')
+ if raw_response.status_code != 200:
+ raise ValueError(
+ f'Status code: {raw_response.status_code}\n'
+ f'Error: {parse_error_message(raw_response.text, client.region)}')
+ if not raw_response.text:
+ raise ValueError('Technical Error while making API call to Chronicle. '
+ f'Empty response received with the status code: {raw_response.status_code}.')
+ try:
+ response = remove_empty_elements(raw_response.json())
+ return response
+ except json.decoder.JSONDecodeError:
+ raise ValueError(MESSAGES['INVALID_JSON_RESPONSE'])
+
+
+def validate_configuration_parameters(param: dict[str, Any], command: str) -> tuple[datetime | None]:
+ """
+ Check whether entered configuration parameters are valid or not.
+
+ :type param: dict
+ :param param: Dictionary of demisto configuration parameter.
+
+ :type command: str
+ :param command: Name of the command being called.
+
+ :return: Tuple containing the first fetch timestamp.
+ :rtype: Tuple[str]
+ """
+ # get configuration parameters
+ service_account_json = param.get('credentials', {}).get('password', '')
+ first_fetch = param.get('first_fetch', '').strip().lower() or DEFAULT_FIRST_FETCH
+
+ try:
+ # validate service_account_credential configuration parameter
+ json.loads(service_account_json, strict=False)
+
+ # validate first_fetch parameter
+ first_fetch_datetime = arg_to_datetime(first_fetch, 'First fetch time')
+ if not first_fetch_datetime.tzinfo: # type: ignore
+ first_fetch_datetime = first_fetch_datetime.astimezone(timezone.utc) # type: ignore
+ if any(ts in first_fetch.lower() for ts in MAX_DELTA_TIME_STRINGS): # type: ignore
+ first_fetch_datetime += timedelta(minutes=1) # type: ignore
+ integration_context: dict = get_integration_context()
+ continuation_time = integration_context.get('continuation_time')
+ raise_exception_for_date_difference = False
+ date_difference_greater_than_expected = first_fetch_datetime < arg_to_datetime( # type: ignore
+ MAX_DELTA_TIME_FOR_STREAMING_DETECTIONS).astimezone(timezone.utc) # type: ignore
+ if command == 'test-module' or not continuation_time: # type: ignore
+ if first_fetch_datetime > arg_to_datetime(DEFAULT_FIRST_FETCH).astimezone(timezone.utc): # type: ignore
+ raise ValueError(MESSAGES['FUTURE_DATE'])
+ raise_exception_for_date_difference = date_difference_greater_than_expected
+ if raise_exception_for_date_difference:
+ raise ValueError(MESSAGES['INVALID_DELTA_TIME_FOR_STREAMING_DETECTIONS'])
+ return (first_fetch_datetime,)
+
+ except json.decoder.JSONDecodeError:
+ raise ValueError('User\'s Service Account JSON has invalid format.')
+
+
+def parse_error_message(error: str, region: str):
+ """
+ Extract error message from error object.
+
+ :type error: str
+ :param error: Error string response to be parsed.
+ :type region: str
+ :param region: Region value based on the location of the chronicle backstory instance.
+
+ :return: Error message.
+ :rtype: str
+ """
+ try:
+ json_error = json.loads(error)
+ if isinstance(json_error, list):
+ json_error = json_error[0]
+ except json.decoder.JSONDecodeError:
+ if region not in REGIONS.values() and '404' in error:
+ error_message = MESSAGES['INVALID_REGION']
+ else:
+ error_message = MESSAGES['INVALID_JSON_RESPONSE']
+ demisto.debug(f'{CHRONICLE_STREAM_DETECTIONS} {error_message} Response - {error}')
+ return error_message
+
+ if json_error.get('error', {}).get('code') == 403:
+ return 'Permission denied'
+ return json_error.get('error', {}).get('message', '')
+
+
+def generic_sleep_function(sleep_duration: int, ingestion: bool = False, error_statement: str = ""):
+ """
+ Log and sleep for the specified duration.
+
+ :type sleep_duration: int
+ :param sleep_duration: Duration (in seconds) for which the function will sleep.
+
+ :type ingestion: bool
+ :param ingestion: Indicates that the sleep is called between the ingestion process.
+
+ :type error_statement: str
+ :param error_statement: Error statement to be logged.
+
+ :rtype: None
+ """
+ sleeping_statement = "Sleeping for {} seconds before {}."
+ if ingestion:
+ sleeping_statement = sleeping_statement.format(sleep_duration, "ingesting next set of incidents")
+ else:
+ sleeping_statement = sleeping_statement.format(sleep_duration, "retrying")
+ if error_statement:
+ sleeping_statement = f"{sleeping_statement}\n{error_statement}"
+ demisto.updateModuleHealth(sleeping_statement)
+ demisto.debug(f"{CHRONICLE_STREAM_DETECTIONS} {sleeping_statement}")
+ time.sleep(sleep_duration)
+
+
+def deduplicate_detections(detection_context: list[dict[str, Any]],
+ detection_identifiers: list[dict[str, Any]]):
+ """
+ De-duplicates the fetched detections and creates a list of unique detections to be created.
+
+ :type detection_context: list[dict[str, Any]]
+ :param detection_context: Raw response of the detections fetched.
+ :type detection_identifiers: List[str]
+ :param detection_identifiers: List of dictionaries containing id and ruleVersion of detections.
+
+ :rtype: incidents
+ :return: Returns unique incidents that should be created.
+ """
+ unique_detections = []
+ for detection in detection_context:
+ current_detection_identifier = {'id': detection.get('id', ''),
+ 'ruleVersion': detection.get('detection', [])[0].get('ruleVersion', '')}
+ if detection_identifiers and current_detection_identifier in detection_identifiers:
+ demisto.info(f"{SKIPPING_CURRENT_DETECTION} Detection: {current_detection_identifier}")
+ continue
+ unique_detections.append(detection)
+ detection_identifiers.append(current_detection_identifier)
+ return unique_detections
+
+
+def deduplicate_curatedrule_detections(detection_context: list[dict[str, Any]],
+ detection_identifiers: list[dict[str, Any]]):
+ """
+ De-duplicates the fetched curated rule detections and creates a list of unique detections to be created.
+
+ :type detection_context: list[dict[str, Any]
+ :param detection_context: Raw response of the detections fetched.
+ :type detection_identifiers: List[str]
+ :param detection_identifiers: List of dictionaries containing id of detections.
+
+ :rtype: unique_detections
+ :return: Returns unique incidents that should be created.
+ """
+ unique_detections = []
+ for detection in detection_context:
+ current_detection_identifier = {'id': detection.get('id', '')}
+ if detection_identifiers and current_detection_identifier in detection_identifiers:
+ demisto.info(f"{SKIPPING_CURRENT_DETECTION} Curated Detection: {current_detection_identifier}")
+ continue
+ detection_identifiers.append(current_detection_identifier)
+ unique_detections.append(detection)
+ return unique_detections
+
+
+def convert_events_to_actionable_incidents(events: list) -> list:
+ """
+ Convert event to incident.
+
+ :type events: Iterator
+ :param events: List of events.
+
+ :rtype: list
+ :return: Returns updated list of detection identifiers and unique incidents that should be created.
+ """
+ incidents = []
+ for event in events:
+ event["IncidentType"] = "DetectionAlert"
+ incident = {
+ 'name': event['detection'][0]['ruleName'],
+ 'details': json.dumps(event),
+ 'rawJSON': json.dumps(event),
+ }
+ incidents.append(incident)
+
+ return incidents
+
+
+def convert_curatedrule_events_to_actionable_incidents(events: list) -> list:
+ """
+ Convert event from Curated Rule detection to incident.
+
+ :type events: List
+ :param events: List of events.
+
+ :rtype: List
+ :return: Returns updated list of detection identifiers and unique incidents that should be created.
+ """
+ incidents = []
+ for event in events:
+ event["IncidentType"] = "CuratedRuleDetectionAlert"
+ incident = {
+ 'name': event['detection'][0]['ruleName'],
+ 'occurred': event.get('detectionTime'),
+ 'details': json.dumps(event),
+ 'rawJSON': json.dumps(event),
+ 'severity': SEVERITY_MAP.get(str(event['detection'][0].get('severity')).lower(), 0),
+ }
+ incidents.append(incident)
+
+ return incidents
+
+
+def get_event_list_for_detections_context(result_events: Dict[str, Any]) -> List[Dict[str, Any]]:
+ """
+ Convert events response related to the specified detection into list of events for command's context.
+
+ :param result_events: Dictionary containing list of events
+ :type result_events: Dict[str, Any]
+
+ :return: returns list of the events related to the specified detection
+ :rtype: List[Dict[str,Any]]
+ """
+ events = []
+ if result_events:
+ for event in result_events.get('references', []):
+ events.append(event.get('event', {}))
+ return events
+
+
+def get_asset_identifier_details(asset_identifier):
+ """
+ Return asset identifier detail such as hostname, ip, mac.
+
+ :param asset_identifier: A dictionary that have asset information
+ :type asset_identifier: dict
+
+ :return: asset identifier name
+ :rtype: str
+ """
+ if asset_identifier.get('hostname', ''):
+ return asset_identifier.get('hostname', '')
+ if asset_identifier.get('ip', []):
+ return '\n'.join(asset_identifier.get('ip', []))
+ if asset_identifier.get('mac', []):
+ return '\n'.join(asset_identifier.get('mac', []))
+
+
+def get_events_context_for_detections(result_events: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
+ """
+ Convert events in response into Context data for events associated with a detection.
+
+ :param result_events: List of Dictionary containing list of events
+ :type result_events: List[Dict[str, Any]]
+
+ :return: list of events to populate in the context
+ :rtype: List[Dict[str, Any]]
+ """
+ events_ec = []
+ for collection_element in result_events:
+ reference = []
+ events = get_event_list_for_detections_context(collection_element)
+ for event in events:
+ event_dict = {}
+ if 'metadata' in event.keys():
+ event_dict.update(event.pop('metadata'))
+ principal_asset_identifier = get_asset_identifier_details(event.get('principal', {}))
+ target_asset_identifier = get_asset_identifier_details(event.get('target', {}))
+ if principal_asset_identifier:
+ event_dict.update({'principalAssetIdentifier': principal_asset_identifier})
+ if target_asset_identifier:
+ event_dict.update({'targetAssetIdentifier': target_asset_identifier})
+ event_dict.update(event)
+ reference.append(event_dict)
+ collection_element_dict = {'references': reference, 'label': collection_element.get('label', '')}
+ events_ec.append(collection_element_dict)
+
+ return events_ec
+
+
+def get_events_context_for_curatedrule_detections(result_events: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
+ """
+ Convert events in response into Context data for events associated with a curated rule detection.
+
+ :param result_events: List of Dictionary containing list of events
+ :type result_events: List[Dict[str, Any]]
+
+ :return: list of events to populate in the context
+ :rtype: List[Dict[str, Any]]
+ """
+ events_ec = []
+ for collection_element in result_events:
+ reference = []
+ events = get_event_list_for_detections_context(collection_element)
+ for event in events:
+ event_dict = {}
+ if 'metadata' in event.keys():
+ event_dict.update(event.pop('metadata'))
+ principal_asset_identifier = get_asset_identifier_details(event.get('principal', {}))
+ target_asset_identifier = get_asset_identifier_details(event.get('target', {}))
+ if event.get('securityResult'):
+ severity = []
+ for security_result in event.get('securityResult', []):
+ if isinstance(security_result, dict) and 'severity' in security_result:
+ severity.append(security_result.get('severity'))
+ if severity:
+ event_dict.update({'eventSeverity': ','.join(severity)}) # type: ignore
+ if principal_asset_identifier:
+ event_dict.update({'principalAssetIdentifier': principal_asset_identifier})
+ if target_asset_identifier:
+ event_dict.update({'targetAssetIdentifier': target_asset_identifier})
+ event_dict.update(event)
+ reference.append(event_dict)
+ collection_element_dict = {'references': reference, 'label': collection_element.get('label', '')}
+ events_ec.append(collection_element_dict)
+
+ return events_ec
+
+
+def add_detections_in_incident_list(detections: List, detection_incidents: List) -> None:
+ """
+ Add found detection in incident list.
+
+ :type detections: list
+ :param detections: list of detection
+ :type detection_incidents: list
+ :param detection_incidents: list of incidents
+
+ :rtype: None
+ """
+ if detections and len(detections) > 0:
+ for detection in detections:
+ events_ec = get_events_context_for_detections(detection.get('collectionElements', []))
+ detection['collectionElements'] = events_ec
+ detection_incidents.extend(detections)
+
+
+def add_curatedrule_detections_in_incident_list(curatedrule_detections: List,
+ curatedrule_detection_to_process: List) -> None:
+ """
+ Add found detection in incident list.
+
+ :type curatedrule_detections: List
+ :param curatedrule_detections: List of curated detection.
+ :type curatedrule_detection_to_process: List
+ :param curatedrule_detection_to_process: List of incidents.
+
+ :rtype: None
+ """
+ if curatedrule_detections and len(curatedrule_detections) > 0:
+ for detection in curatedrule_detections:
+ events_ec = get_events_context_for_curatedrule_detections(detection.get('collectionElements', []))
+ detection['collectionElements'] = events_ec
+ curatedrule_detection_to_process.extend(curatedrule_detections)
+
+
+def parse_stream(response: requests.Response) -> Iterator[Mapping[str, Any]]:
+ """Parses a stream response containing one detection batch.
+
+ The requests library provides utilities for iterating over the HTTP stream
+ response, so we do not have to worry about chunked transfer encoding. The
+ response is a stream of bytes that represent a JSON array.
+ Each top-level element of the JSON array is a detection batch. The array is
+ "never ending"; the server can send a batch at any time, thus
+ adding to the JSON array.
+
+ Args:
+ response: The response object returned from post().
+
+ Yields:
+ Dictionary representations of each detection batch that was sent over the stream.
+ """
+ try:
+ if response.encoding is None:
+ response.encoding = "utf-8"
+
+ for line in response.iter_lines(decode_unicode=True, delimiter="\r\n"):
+ if not line:
+ continue
+ # Trim all characters before first opening brace, and after last closing
+ # brace. Example:
+ # Input: " {'key1': 'value1'}, "
+ # Output: "{'key1': 'value1'}"
+ json_string = "{" + line.split("{", 1)[1].rsplit("}", 1)[0] + "}"
+ yield json.loads(json_string)
+
+ except Exception as e: # pylint: disable=broad-except
+ # Chronicle's servers will generally send a {"error": ...} dict over the
+ # stream to indicate retryable failures (e.g. due to periodic internal
+ # server maintenance), which will not cause this except block to fire.
+ yield {
+ "error": {
+ "code": 503,
+ "status": "UNAVAILABLE",
+ "message": "Exception caught while reading stream response. This "
+ "python client is catching all errors and is returning "
+ "error code 503 as a catch-all. The original error "
+ f"message is as follows: {repr(e)}",
+ }
+ }
+
+
+''' COMMAND FUNCTIONS '''
+
+
+def test_module(client_obj: Client, params: dict[str, Any]) -> str:
+ """
+ Perform test connectivity by validating a valid http response.
+
+ :type client_obj: Client
+ :param client_obj: client object which is used to get response from api
+
+ :type params: Dict[str, Any]
+ :param params: it contain configuration parameter
+
+ :return: Raises ValueError if any error occurred during connection else returns 'ok'.
+ :rtype: str
+ """
+ demisto.debug(f'{CHRONICLE_STREAM_DETECTIONS} Running Test having Proxy {params.get("proxy")}')
+
+ response_code, disconnection_reason, _ = stream_detection_alerts(
+ client_obj, {'detectionBatchSize': 1}, {}, True)
+ if response_code == 200 and not disconnection_reason:
+ return 'ok'
+
+ demisto.debug(f'{CHRONICLE_STREAM_DETECTIONS} Test Connection failed.\nMessage: {disconnection_reason}')
+ if 500 <= response_code <= 599:
+ return f'Internal server error occurred.\nMessage: {disconnection_reason}'
+ if response_code == 429:
+ return f'API rate limit exceeded.\nMessage: {disconnection_reason}'
+
+ error_message = disconnection_reason
+ if response_code in [400, 404, 403]:
+ if response_code == 400:
+ error_message = f'{MESSAGES["INVALID_ARGUMENTS"]}.'
+ elif response_code == 404:
+ if client_obj.region not in REGIONS.values():
+ error_message = MESSAGES['INVALID_REGION']
+ else:
+ return error_message
+ elif response_code == 403:
+ error_message = MESSAGES['PERMISSION_DENIED']
+ return f'Status code: {response_code}\nError: {error_message}'
+
+ return disconnection_reason
+
+
+def fetch_samples() -> list:
+ """Extracts sample events stored in the integration context and returns them as incidents
+
+ Returns:
+ None: No data returned.
+ """
+ """
+ Extracts sample events stored in the integration context and returns them as incidents
+
+ :return: raise ValueError if any error occurred during connection
+ :rtype: list
+ """
+ integration_context = get_integration_context()
+ sample_events = json.loads(integration_context.get('sample_events', '[]'))
+ return sample_events
+
+
+def stream_detection_alerts(
+ client: Client,
+ req_data: dict[str, Any],
+ integration_context: dict[str, Any],
+ test_mode: bool = False
+) -> Tuple[int, str, str]:
+ """Makes one call to stream_detection_alerts, and runs until disconnection.
+
+ Each call to stream_detection_alerts streams all detection alerts found after
+ req_data["continuationTime"].
+
+ Initial connections should omit continuationTime from the connection request;
+ in this case, the server will default the continuation time to the time of
+ the connection.
+
+ The server sends a stream of bytes, which is interpreted as a list of python
+ dictionaries; each dictionary represents one "detection batch."
+
+ - A detection batch might have the key "error";
+ if it does, you should retry connecting with exponential backoff, which
+ this function implements.
+ - A detection batch might have the key "heartbeat";
+ if it does, this is a "heartbeat detection batch", meant as a
+ keep-alive message from the server, which your client can ignore.
+ - If none of the above apply:
+ - The detection batch is a "non-heartbeat detection batch".
+ It will have a key, "continuationTime." This
+ continuation time should be provided when reconnecting to
+ stream_detection_alerts to continue receiving alerts from where the
+ last connection left off; the most recent continuation time (which
+ will be the maximum continuation time so far) should be provided.
+ - The detection batch may optionally have a key, "detections",
+ containing detection alerts from Rules Engine. The key will be
+ omitted if no new detection alerts were found.
+
+ Example heartbeat detection batch:
+ {
+ "heartbeat": true,
+ }
+
+ Example detection batch without detections list:
+ {
+ "continuationTime": "2019-08-01T21:59:17.081331Z"
+ }
+
+ Example detection batch with detections list:
+ {
+ "continuationTime": "2019-05-29T05:00:04.123073Z",
+ "detections": [
+ {contents of detection 1},
+ {contents of detection 2}
+ ]
+ }
+
+ Args:
+ client: Client object containing the authorized session for HTTP requests.
+ req_data: Dictionary containing connection request parameters (either empty,
+ or contains the keys, "continuationTime" and "detectionBatchSize").
+ integration_context: Dictionary containing the current context of the integration.
+ test_mode: Whether we are in test mode or not.
+
+ Returns:
+ Tuple containing (HTTP response status code from connection attempt,
+ disconnection reason, continuation time string received in most recent
+ non-heartbeat detection batch or empty string if no such non-heartbeat
+ detection batch was received).
+ """
+ url = f"{BACKSTORY_API_V2_URL}{ENDPOINTS['STREAM_DETECTIONS_ENDPOINT']}"
+
+ response_code = 0
+ disconnection_reason = ""
+ continuation_time = ""
+
+ # Heartbeats are sent by the server, approximately every 15s. Even if
+ # no new detections are being produced, the server sends empty
+ # batches.
+ # We impose a client-side timeout of 300s (5 mins) between messages from the
+ # server. We expect the server to send messages much more frequently due
+ # to the heartbeats though; this timeout should never be hit, and serves
+ # as a safety measure.
+ # If no messages are received after this timeout, the client cancels
+ # connection (then retries).
+ with client.http_client.post(url=url.format(client.region), stream=True, data=req_data, timeout=TIMEOUT,
+ proxies=client.proxy_info, verify=not client.disable_ssl) as response:
+ # Expected server response is a continuous stream of
+ # bytes that represent a never-ending JSON array. The parsing
+ # is handed by parse_stream. See docstring above for
+ # formats of detections and detection batches.
+ #
+ # Example stream of bytes:
+ # [
+ # {detection batch 1},
+ # # Some delay before server sends next batch...
+ # {detection batch 2},
+ # # Some delay before server sends next batch(es)...
+ # # The ']' never arrives, because we hold the connection
+ # # open until the connection breaks.
+ demisto.info(f"{CHRONICLE_STREAM_DETECTIONS} Initiated connection to detection alerts stream with request: {req_data}")
+ demisto_health_needs_to_update = True
+ response_code = response.status_code
+ if response.status_code != 200:
+ disconnection_reason = f"Connection refused with status={response.status_code}, error={response.text}"
+ else:
+ # Loop over each detection batch that is streamed. The following
+ # loop will block, and an iteration only runs when the server
+ # sends a detection batch.
+ for batch in parse_stream(response):
+ if "error" in batch:
+ error_dump = json.dumps(batch["error"], indent="\t")
+ disconnection_reason = f"Connection closed with error: {error_dump}"
+ break
+ if demisto_health_needs_to_update:
+ demisto.updateModuleHealth('')
+ demisto_health_needs_to_update = False
+ if test_mode:
+ break
+ if "heartbeat" in batch:
+ demisto.info(f"{CHRONICLE_STREAM_DETECTIONS} Got empty heartbeat (confirms connection/keepalive).")
+ continue
+
+ # When we reach this line, we have successfully received
+ # a non-heartbeat detection batch.
+ continuation_time = batch["continuationTime"]
+ if "detections" not in batch:
+ demisto.info(f"{CHRONICLE_STREAM_DETECTIONS} Got a new continuationTime={continuation_time}, no detections.")
+ integration_context.update({'continuation_time': continuation_time})
+ set_integration_context(integration_context)
+ demisto.debug(f'Updated integration context checkpoint with continuationTime={continuation_time}.')
+ continue
+ else:
+ demisto.info(f"{CHRONICLE_STREAM_DETECTIONS} Got detection batch with continuationTime={continuation_time}.")
+
+ # Process the batch.
+ detections = batch["detections"]
+ demisto.debug(f"{CHRONICLE_STREAM_DETECTIONS} No. of detections fetched: {len(detections)}.")
+ if not detections:
+ integration_context.update({'continuation_time': continuation_time})
+ set_integration_context(integration_context)
+ demisto.debug(f'Updated integration context checkpoint with continuationTime={continuation_time}.')
+ continue
+ user_rule_detections = []
+ chronicle_rule_detections = []
+ detection_identifiers = integration_context.get('detection_identifiers', [])
+ curatedrule_detection_identifiers = integration_context.get('curatedrule_detection_identifiers', [])
+
+ for raw_detection in detections:
+ raw_detection_type = str(raw_detection.get('type', ''))
+ if raw_detection_type.upper() == 'RULE_DETECTION':
+ user_rule_detections.append(raw_detection)
+ elif raw_detection_type.upper() == 'GCTI_FINDING':
+ chronicle_rule_detections.append(raw_detection)
+
+ user_rule_detections = deduplicate_detections(user_rule_detections, detection_identifiers)
+ chronicle_rule_detections = deduplicate_curatedrule_detections(
+ chronicle_rule_detections, curatedrule_detection_identifiers)
+ detection_to_process: list[dict] = []
+ add_detections_in_incident_list(user_rule_detections, detection_to_process)
+ detection_incidents: list[dict] = convert_events_to_actionable_incidents(detection_to_process)
+ curatedrule_detection_to_process: list[dict] = []
+ add_curatedrule_detections_in_incident_list(chronicle_rule_detections, curatedrule_detection_to_process)
+ curatedrule_incidents: list[dict] = convert_curatedrule_events_to_actionable_incidents(
+ curatedrule_detection_to_process)
+ sample_events = detection_incidents[:5]
+ sample_events.extend(curatedrule_incidents[:5])
+ if sample_events:
+ integration_context.update({'sample_events': json.dumps(sample_events)})
+ set_integration_context(integration_context)
+ demisto.debug(f'Updated integration context checkpoint with continuationTime={continuation_time}.')
+ incidents = detection_incidents
+ incidents.extend(curatedrule_incidents)
+ integration_context.update({'continuation_time': continuation_time})
+ if not incidents:
+ set_integration_context(integration_context)
+ demisto.debug(f'Updated integration context checkpoint with continuationTime={continuation_time}.')
+ continue
+ total_ingested_incidents = 0
+ length_of_incidents = len(incidents)
+ while total_ingested_incidents < len(incidents):
+ current_batch = IDEAL_BATCH_SIZE if (
+ total_ingested_incidents + IDEAL_BATCH_SIZE <= length_of_incidents) else (
+ length_of_incidents - total_ingested_incidents)
+ demisto.debug(f"{CHRONICLE_STREAM_DETECTIONS} No. of detections being ingested: {current_batch}.")
+ demisto.createIncidents(incidents[total_ingested_incidents: total_ingested_incidents + current_batch])
+ total_ingested_incidents = total_ingested_incidents + current_batch
+ if current_batch == IDEAL_BATCH_SIZE:
+ generic_sleep_function(IDEAL_SLEEP_TIME_BETWEEN_BATCHES, ingestion=True)
+
+ integration_context.update({
+ 'detection_identifiers': detection_identifiers,
+ 'curatedrule_detection_identifiers': curatedrule_detection_identifiers,
+ })
+ set_integration_context(integration_context)
+ demisto.debug(f'Updated integration context checkpoint with continuationTime={continuation_time}.')
+
+ return response_code, disconnection_reason, continuation_time
+
+
+def stream_detection_alerts_in_retry_loop(client: Client, initial_continuation_time: datetime, test_mode: bool = False):
+ """Calls stream_detection_alerts and manages state for reconnection.
+
+ Args:
+
+ client: Client object, used to make an authorized session for HTTP requests.
+ initial_continuation_time: A continuation time to be used in the initial stream_detection_alerts
+ connection (default = server will set this to the time of connection). Subsequent stream_detection_alerts
+ connections will use continuation times from past connections.
+ test_mode: Whether we are in test mode or not.
+
+ Raises:
+ RuntimeError: Hit retry limit after multiple consecutive failures
+ without success.
+
+ """
+ integration_context: dict = get_integration_context()
+ initial_continuation_time_str = initial_continuation_time.astimezone(timezone.utc).strftime(DATE_FORMAT)
+ continuation_time = integration_context.get('continuation_time', initial_continuation_time_str)
+
+ # Our retry loop uses exponential backoff with a retry limit.
+ # For simplicity, we retry for all types of errors.
+ consecutive_failures = 0
+ disconnection_reason = ""
+ while True:
+ try:
+ if consecutive_failures > MAX_CONSECUTIVE_FAILURES:
+ raise RuntimeError(MESSAGES['CONSECUTIVELY_FAILED'].format(consecutive_failures))
+
+ if consecutive_failures:
+ sleep_duration = 2 ** consecutive_failures
+ generic_sleep_function(sleep_duration, error_statement=disconnection_reason)
+
+ req_data = {} if not continuation_time else {"continuationTime": continuation_time}
+ req_data.update({'detectionBatchSize': MAX_DETECTION_STREAM_BATCH_SIZE})
+
+ # Connections may last hours. Make a new authorized session every retry loop
+ # to avoid session expiration.
+ client.build_http_client()
+
+ # This function runs until disconnection.
+ response_code, disconnection_reason, most_recent_continuation_time = stream_detection_alerts(
+ client, req_data, integration_context)
+
+ if most_recent_continuation_time:
+ consecutive_failures = 0
+ disconnection_reason = ""
+ continuation_time = most_recent_continuation_time
+ integration_context.update({'continuation_time': most_recent_continuation_time or continuation_time})
+ set_integration_context(integration_context)
+ demisto.debug(f'Updated integration context checkpoint with continuationTime={continuation_time}.')
+ if test_mode:
+ return integration_context
+ else:
+ disconnection_reason = disconnection_reason if disconnection_reason else "Connection unexpectedly closed."
+
+ # Do not retry if the disconnection was due to invalid arguments.
+ # We assume a disconnection was due to invalid arguments if the connection
+ # was refused with HTTP status code 400.
+ if response_code == 400:
+ raise RuntimeError(disconnection_reason.replace(
+ 'Connection refused', MESSAGES['INVALID_ARGUMENTS'], 1))
+ elif 400 < response_code < 500 and response_code != 429:
+ raise RuntimeError(disconnection_reason)
+
+ consecutive_failures += 1
+ # Do not update continuation_time because the connection immediately
+ # failed without receiving any non-heartbeat detection batches.
+ # Retry with the same connection request as before.
+ except RuntimeError as runtime_error:
+ demisto.error(str(runtime_error))
+ if response_code == 400 and initial_continuation_time_str != continuation_time:
+ # The continuation time coming from integration context is older than 7 days. Update it to a 7 days.
+ new_continuation_time = arg_to_datetime(MAX_DELTA_TIME_FOR_STREAMING_DETECTIONS).astimezone( # type: ignore
+ timezone.utc) + timedelta(minutes=1)
+ new_continuation_time_str = new_continuation_time.strftime(DATE_FORMAT)
+ demisto.updateModuleHealth('Got the continuation time from the integration context which is '
+ f'older than {MAX_DELTA_TIME_FOR_STREAMING_DETECTIONS}.\n'
+ f'Changing the continuation time to {new_continuation_time_str}.')
+ continuation_time = new_continuation_time_str
+ elif consecutive_failures <= MAX_CONSECUTIVE_FAILURES:
+ generic_sleep_function(IDEAL_SLEEP_TIME_BETWEEN_BATCHES, error_statement=str(runtime_error))
+ else:
+ demisto.updateModuleHealth(str(runtime_error))
+ consecutive_failures = 0
+ disconnection_reason = ""
+ if test_mode:
+ raise runtime_error
+ except Exception as exception:
+ demisto.error(str(exception))
+ generic_sleep_function(IDEAL_SLEEP_TIME_BETWEEN_BATCHES, error_statement=str(exception))
+ consecutive_failures = 0
+ disconnection_reason = ""
+ if test_mode:
+ raise exception
+
+
+def main():
+ """PARSE AND VALIDATE INTEGRATION PARAMS."""
+ # initialize configuration parameter
+ proxy = demisto.params().get('proxy')
+ disable_ssl = demisto.params().get('insecure', False)
+ command = demisto.command()
+
+ try:
+ (first_fetch_timestamp,) = validate_configuration_parameters(demisto.params(), command)
+
+ # Initializing client Object
+ client_obj = Client(demisto.params(), proxy, disable_ssl)
+
+ # trigger command based on input
+ if command == 'test-module':
+ return_results(test_module(client_obj, demisto.args()))
+ elif command == 'long-running-execution':
+ stream_detection_alerts_in_retry_loop(client_obj, first_fetch_timestamp) # type: ignore
+ elif command == 'fetch-incidents':
+ demisto.incidents(fetch_samples())
+
+ except Exception as e:
+ demisto.updateModuleHealth(str(e))
+ return_error(f'Failed to execute {demisto.command()} command.\nError: {str(e)}')
+
+
+# initial flow of execution
+if __name__ in ('__main__', '__builtin__', 'builtins'):
+ main()
diff --git a/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI.yml b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI.yml
new file mode 100644
index 000000000000..f6d51016ba61
--- /dev/null
+++ b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI.yml
@@ -0,0 +1,87 @@
+category: Analytics & SIEM
+sectionOrder:
+- Connect
+- Collect
+commonfields:
+ id: Google Chronicle Backstory Streaming API
+ version: -1
+configuration:
+- displaypassword: User's Service Account JSON
+ name: credentials
+ hiddenusername: true
+ required: true
+ type: 9
+ section: Connect
+- additionalinfo: Select the region based on the location of the chronicle backstory instance. If the region is not listed in the dropdown, choose the "Other" option and specify the region in the "Other Region" text field.
+ defaultvalue: General
+ display: Region
+ name: region
+ options:
+ - General
+ - Europe
+ - Asia
+ - Europe-west2
+ - Other
+ type: 15
+ section: Connect
+- additionalinfo: Specify the region based on the location of the chronicle backstory instance. Only applicable if the "Other" option is selected in the Region dropdown.
+ display: Other Region
+ hidden: false
+ name: other_region
+ required: false
+ type: 0
+ section: Connect
+- display: Incident type
+ name: incidentType
+ type: 13
+ section: Connect
+ required: false
+- additionalinfo: |-
+ The date or relative timestamp from where to start fetching detections. Default will be the current time.
+
+ Note: The API is designed to retrieve data for the past 7 days only. Requests for data beyond that timeframe will result in errors.
+
+ Supported formats: N minutes, N hours, N days, N weeks, yyyy-mm-dd, yyyy-mm-ddTHH:MM:SSZ
+
+ For example: 10 minutes, 5 hours, 6 days, 1 week, 2024-12-31, 01 Mar 2024, 01 Feb 2024 04:45:33, 2024-04-17T14:05:44Z
+ defaultvalue: now
+ display: First fetch time
+ name: first_fetch
+ type: 0
+ section: Collect
+ required: false
+- defaultvalue: 'true'
+ display: Long running instance
+ hidden: true
+ name: longRunning
+ type: 8
+ section: Connect
+ required: false
+- display: Trust any certificate (not secure)
+ name: insecure
+ type: 8
+ section: Connect
+ advanced: true
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ type: 8
+ section: Connect
+ advanced: true
+ required: false
+description: Use the Google Chronicle Backstory Streaming API integration to ingest detections created by both user-created rules and Chronicle Rules as XSOAR incidents.
+display: Chronicle Streaming API
+name: Google Chronicle Backstory Streaming API
+script:
+ dockerimage: demisto/googleapi-python3:1.0.0.97032
+ longRunning: true
+ isFetchSamples: true
+ runonce: false
+ script: '-'
+ subtype: python3
+ type: python
+fromversion: 6.10.0
+tests:
+- No tests (auto formatted)
+defaultmapperin: 'Chronicle-mapper'
+defaultclassifier: 'Chronicle'
diff --git a/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI_dark.svg b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI_dark.svg
new file mode 100644
index 000000000000..685a1748da46
--- /dev/null
+++ b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI_dark.svg
@@ -0,0 +1,22 @@
+
diff --git a/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI_description.md b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI_description.md
new file mode 100644
index 000000000000..593eade28ca9
--- /dev/null
+++ b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI_description.md
@@ -0,0 +1,20 @@
+### Configure an API account on Google Chronicle
+
+Your Customer Experience Engineer (CEE) will provide you with a [Google Developer Service Account Credential](https://developers.google.com/identity/protocols/OAuth2#serviceaccount) to enable the Google API client to communicate with the Backstory API.
+
+### Instance Configuration
+
+* Provide the "**Service Account JSON**".
+* Select the "**Region**" based on the location of the chronicle backstory instance.
+* Provide the date or relative timestamp from where to start fetching detections.
+ * Note: The API is designed to retrieve data for the [past 7 days only](https://cloud.google.com/chronicle/docs/reference/detection-engine-api#body_parameters_4). Requests for data beyond that timeframe will result in errors.
+
+### Generic Notes
+
+* This integration would only ingest the **detections** created by both **user-created rules** and **Chronicle Rules**.
+* Also, It only ingests the detections created by rules whose **alerting status** was **enabled** at the time of detection.
+* Enable alerting using the **Chronicle UI** by setting the **Alerting** option to **enabled**.
+ * For **user-created rules**, use the Rules Dashboard to enable each rule's alerting status.
+ * For **Chronicle Rules**, enable alerting status of the Rule Set to get detections created by corresponding rules.
+* You are limited to a maximum of 10 simultaneous streaming integration instances for the particular Service Account Credential (your instance will receive a **429 error** if you attempt to create more).
+* For more, please check out the [Google Chronicle reference doc](https://cloud.google.com/chronicle/docs/reference/detection-engine-api#streamdetectionalerts).
diff --git a/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI_image.png b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI_image.png
new file mode 100644
index 000000000000..dff3216e432f
Binary files /dev/null and b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI_image.png differ
diff --git a/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI_light.svg b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI_light.svg
new file mode 100644
index 000000000000..6c22dbc3e2cb
--- /dev/null
+++ b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI_light.svg
@@ -0,0 +1,22 @@
+
diff --git a/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI_test.py b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI_test.py
new file mode 100644
index 000000000000..c81c4032bb9b
--- /dev/null
+++ b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/GoogleChronicleBackstoryStreamingAPI_test.py
@@ -0,0 +1,367 @@
+"""Test File for GoogleChronicleBackstory Integration."""
+import json
+import os
+import time
+
+import pytest
+from unittest import mock
+
+from CommonServerPython import arg_to_datetime
+import demistomock as demisto
+
+from GoogleChronicleBackstoryStreamingAPI import DATE_FORMAT, MAX_CONSECUTIVE_FAILURES, MAX_DELTA_TIME_FOR_STREAMING_DETECTIONS, \
+ fetch_samples, service_account, auth_requests, validate_configuration_parameters, stream_detection_alerts_in_retry_loop, \
+ validate_response, test_module as main_test_module, timezone, timedelta, MESSAGES, Client, parse_error_message
+
+
+GENERIC_INTEGRATION_PARAMS = {
+ 'credentials': {
+ 'password': '{}',
+ },
+ 'first_fetch': '1 days'
+}
+
+
+class MockResponse:
+ status_code = 200
+ json = lambda **_: {} # noqa: E731
+ text = "{}"
+ request = lambda **_: "" # noqa: E731
+ post = lambda **_: "" # noqa: E731
+
+
+class StreamResponse:
+
+ def __init__(self, **_):
+ pass
+
+ def __enter__(self):
+ return self.mock_response
+
+ def __exit__(self, *_):
+ pass
+
+
+def util_load_json(path):
+ """Load a JSON file to python dictionary."""
+ with open(path, mode='r', encoding='utf-8') as f:
+ return json.loads(f.read())
+
+
+@pytest.fixture
+def special_mock_client():
+ """Fixture for the http client with no original client class response."""
+ mocked_client = mock.Mock()
+ mocked_client.region = "General"
+ return mocked_client
+
+
+@pytest.fixture()
+def mock_client(mocker):
+ """Fixture for the http client."""
+ credentials = {"type": "service_account"}
+ mocker.patch.object(service_account.Credentials, 'from_service_account_info', return_value=credentials)
+ mocker.patch.object(auth_requests, 'AuthorizedSession', return_value=MockResponse)
+ client = Client(params=GENERIC_INTEGRATION_PARAMS, proxy=False, disable_ssl=True)
+ return client
+
+
+def test_validate_configuration_parameters(capfd):
+ """Test case scenario for validating the configuration parameters."""
+ integration_params = GENERIC_INTEGRATION_PARAMS.copy()
+ capfd.close()
+ validate_configuration_parameters(integration_params, 'test-module')
+
+
+@pytest.mark.parametrize('first_fetch', ['invalid', '8 days'])
+def test_validate_configuration_parameters_with_invalid_first_fetch(capfd, first_fetch):
+ """Test case scenario for validating the configuration parameters with invalid first fetch."""
+ integration_params = GENERIC_INTEGRATION_PARAMS.copy()
+ integration_params['first_fetch'] = first_fetch
+ capfd.close()
+ with pytest.raises(ValueError):
+ validate_configuration_parameters(integration_params, 'test-module')
+
+
+def test_validate_configuration_parameters_with_invalid_credentials():
+ """Test case scenario for validating the configuration parameters with invalid credentials."""
+ integration_params = GENERIC_INTEGRATION_PARAMS.copy()
+ integration_params['credentials'] = {'password': 'invalid'}
+ with pytest.raises(ValueError):
+ validate_configuration_parameters(integration_params, 'test-module')
+
+
+def test_parse_error_message_with_invalid_json(capfd):
+ """Test case scenario for parsing error message with invalid json."""
+ capfd.close()
+ assert parse_error_message('invalid json', 'General') == MESSAGES['INVALID_JSON_RESPONSE']
+
+
+def test_parse_error_message_with_invalid_region(capfd):
+ """Test case scenario for parsing error message with invalid region."""
+ capfd.close()
+ assert parse_error_message('service unavailable 404', 'invalid region') == MESSAGES['INVALID_REGION']
+
+
+def test_validate_response(mocker, capfd):
+ """
+ Test case scenario for successful execution of validate_response.
+
+ Given:
+ - mocked client
+ When:
+ - Calling `validate_response` function.
+ Then:
+ - Returns an ok message
+ """
+ credentials = {"type": "service_account"}
+ mocker.patch.object(service_account.Credentials, 'from_service_account_info', return_value=credentials)
+ mocker.patch.object(auth_requests, 'AuthorizedSession', return_value=MockResponse)
+ integration_params = GENERIC_INTEGRATION_PARAMS.copy()
+ integration_params['region'] = 'other'
+ integration_params['other_region'] = 'new-region'
+ client = Client(params=integration_params, proxy=False, disable_ssl=True)
+
+ mocker.patch.object(client.http_client, 'request', return_value=MockResponse)
+ capfd.close()
+ assert validate_response(client, '') == {}
+
+
+@mock.patch('demistomock.error')
+@pytest.mark.parametrize('args', [{"status_code": 429, "message": 'API rate limit'},
+ {"status_code": 300, "message": 'Status code: 300'},
+ {"status_code": 500, "message": 'Internal server error'},
+ {"status_code": 400, "message": 'Status code: 400'},
+ {"status_code": 403,
+ "text": '{"error": {"code": 403}}', "message": 'Permission denied'},
+ {"text": "", "message": 'Technical Error'},
+ {"text": "*", "message": MESSAGES['INVALID_JSON_RESPONSE']}])
+def test_429_or_500_error_for_validate_response(mock_error, special_mock_client, capfd, args):
+ """
+ Test behavior for 429 and 500 error codes for validate_response.
+ """
+ mock_error.return_value = {}
+
+ class MockResponse:
+ status_code = 200
+ text = '[{"error": {}}]'
+
+ def json(self):
+ return json.loads(self.text)
+
+ mock_response = MockResponse()
+ if 'status_code' in args:
+ mock_response.status_code = args.get('status_code')
+ if 'text' in args:
+ mock_response.text = args.get('text')
+
+ special_mock_client.http_client.request.side_effect = [mock_response]
+ capfd.close()
+ with pytest.raises(ValueError) as value_error:
+ validate_response(special_mock_client, '')
+
+ assert args.get('message') in str(value_error.value)
+ assert special_mock_client.http_client.request.call_count == 1
+
+
+def test_test_module(mocker, mock_client, capfd):
+ """
+ Test case scenario for successful execution of test_module.
+
+ Given:
+ - mocked client
+ When:
+ - Calling `test_module` function.
+ Then:
+ - Assert for the continuation time and incidents.
+ """
+ mock_response = MockResponse()
+
+ with open(os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ 'test_data/stream_detections.txt'), 'r') as f:
+
+ mock_response.iter_lines = lambda **_: f.readlines()
+
+ stream_response = StreamResponse
+ stream_response.mock_response = mock_response
+ mock_response.post = StreamResponse
+ mock_response.encoding = None
+ mocker.patch.object(time, 'sleep', return_value=lambda **_: None)
+ mock_client.http_client = mock_response
+ capfd.close()
+ assert main_test_module(mock_client, {}) == 'ok'
+
+
+def test_test_module_for_error(mocker, mock_client, capfd):
+ """
+ Test case scenario for unsuccessful execution of test_module.
+
+ Given:
+ - mocked client
+ When:
+ - Calling `test_module` function.
+ Then:
+ - Assert for the continuation time and incidents.
+ """
+ mock_response = MockResponse()
+
+ with open(os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ 'test_data/stream_detections_error_2.txt'), 'r') as f:
+
+ mock_response.iter_lines = lambda **_: f.readlines()
+
+ stream_response = StreamResponse
+ stream_response.mock_response = mock_response
+ mock_response.post = StreamResponse
+ mock_response.encoding = None
+ mocker.patch.object(time, 'sleep', return_value=lambda **_: None)
+ mock_client.http_client = mock_response
+ capfd.close()
+ assert main_test_module(mock_client, {}) == 'Connection closed with error: "error"'
+ mock_response.post = None
+
+
+def test_fetch_samples(mocker):
+ """
+ Test case scenario for successful execution of fetch_samples.
+
+ Given:
+ - mocked client
+ When:
+ - Calling `fetch_samples` function.
+ Then:
+ - Returns list of incidents stored in context.
+ """
+ mocker.patch.object(demisto, 'getIntegrationContext',
+ return_value={'sample_events': '[{}]'})
+ assert fetch_samples() == [{}]
+
+
+def test_stream_detection_alerts_in_retry_loop(mocker, mock_client, capfd):
+ """
+ Test case scenario for successful execution of stream_detection_alerts_in_retry_loop.
+
+ Given:
+ - mocked client
+ When:
+ - Calling `stream_detection_alerts_in_retry_loop` function.
+ Then:
+ - Assert for the continuation time and incidents.
+ """
+ mock_response = MockResponse()
+
+ stream_detection_outputs: dict = util_load_json(os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ 'test_data/steam_detection_outputs.json'))
+
+ with open(os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ 'test_data/stream_detections.txt'), 'r') as f:
+
+ mock_response.iter_lines = lambda **_: f.readlines()
+
+ stream_response = StreamResponse
+ stream_response.mock_response = mock_response
+ mock_response.post = StreamResponse
+ mock_response.encoding = None
+ mocker.patch.object(auth_requests, 'AuthorizedSession', return_value=mock_response)
+ mocker.patch.object(time, 'sleep', return_value=lambda **_: None)
+ capfd.close()
+ assert stream_detection_alerts_in_retry_loop(
+ mock_client, arg_to_datetime('now'), test_mode=True) == stream_detection_outputs
+
+
+def test_stream_detection_alerts_in_retry_loop_with_error(mocker, mock_client, capfd):
+ """
+ Test case scenario for execution of stream_detection_alerts_in_retry_loop when error response comes.
+
+ Given:
+ - mocked client
+ When:
+ - Calling `stream_detection_alerts_in_retry_loop` function.
+ Then:
+ - Assert exception value.
+ """
+ mock_response = MockResponse()
+
+ with open(os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ 'test_data/stream_detections_error.txt'), 'r') as f:
+
+ mock_response.iter_lines = lambda **_: f.readlines()
+
+ stream_response = StreamResponse
+ stream_response.mock_response = mock_response
+ mock_response.post = StreamResponse
+ mock_response.encoding = None
+ mocker.patch.object(auth_requests, 'AuthorizedSession', return_value=mock_response)
+ mocker.patch.object(time, 'sleep', return_value=lambda **_: None)
+ capfd.close()
+ with pytest.raises(RuntimeError) as exc_info:
+ stream_detection_alerts_in_retry_loop(mock_client, arg_to_datetime('now'), test_mode=True)
+
+ assert str(exc_info.value) == MESSAGES['CONSECUTIVELY_FAILED'].format(MAX_CONSECUTIVE_FAILURES + 1)
+
+
+def test_stream_detection_alerts_in_retry_loop_with_empty_response(mocker, mock_client, capfd):
+ """
+ Test case scenario for execution of stream_detection_alerts_in_retry_loop when empty response comes.
+
+ Given:
+ - mocked client
+ When:
+ - Calling `stream_detection_alerts_in_retry_loop` function.
+ Then:
+ - Returns an ok message
+ """
+ mock_response = MockResponse()
+
+ with open(os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ 'test_data/stream_detections_empty.txt'), 'r') as f:
+
+ mock_response.iter_lines = lambda **_: f.readlines()
+
+ stream_response = StreamResponse
+ stream_response.mock_response = mock_response
+ mock_response.post = StreamResponse
+ mock_response.encoding = None
+ mocker.patch.object(auth_requests, 'AuthorizedSession', return_value=mock_response)
+ mocker.patch.object(time, 'sleep', return_value=lambda **_: None)
+ capfd.close()
+ with pytest.raises(Exception) as exc_info:
+ stream_detection_alerts_in_retry_loop(mock_client, arg_to_datetime('now'), test_mode=True)
+ assert str(exc_info.value) == str(KeyError('continuationTime'))
+
+
+def test_stream_detection_alerts_in_retry_loop_with_400(mocker, mock_client, capfd):
+ """
+ Test case scenario for execution of stream_detection_alerts_in_retry_loop when 400 status code comes.
+
+ Given:
+ - mocked client
+ When:
+ - Calling `stream_detection_alerts_in_retry_loop` function.
+ Then:
+ - Assert exception value.
+ """
+ mock_response = MockResponse()
+ mock_response.status_code = 400
+
+ with open(os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ 'test_data/stream_detections_error.txt'), 'r') as f:
+
+ mock_response.iter_lines = lambda **_: f.readlines()
+
+ stream_response = StreamResponse
+ stream_response.mock_response = mock_response
+ mock_response.post = StreamResponse
+ mock_response.encoding = None
+ mocker.patch.object(auth_requests, 'AuthorizedSession', return_value=mock_response)
+ mocker.patch.object(time, 'sleep', return_value=lambda **_: None)
+ new_continuation_time = arg_to_datetime(MAX_DELTA_TIME_FOR_STREAMING_DETECTIONS).astimezone(
+ timezone.utc) + timedelta(minutes=1) # type: ignore
+ new_continuation_time_str = new_continuation_time.strftime(DATE_FORMAT)
+ integration_context = {'continuation_time': new_continuation_time_str}
+ mocker.patch.object(demisto, 'getIntegrationContext', return_value=integration_context)
+ capfd.close()
+ with pytest.raises(RuntimeError) as exc_info:
+ stream_detection_alerts_in_retry_loop(mock_client, arg_to_datetime('now'), test_mode=True)
+
+ assert str(exc_info.value) == MESSAGES['INVALID_ARGUMENTS'] + ' with status=400, error={}'
diff --git a/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/README.md b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/README.md
new file mode 100644
index 000000000000..53209545c8eb
--- /dev/null
+++ b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/README.md
@@ -0,0 +1,48 @@
+## Overview
+---
+
+Use the Google Chronicle Backstory Streaming API integration to ingest detections created by both user-created rules and Chronicle Rules as XSOAR incidents.
+This integration was integrated and tested with version 2 of Google Chronicle Backstory Streaming API (Detection Engine API).
+
+#### Troubleshoot
+
+**Note:** The streaming mechanism will do up to 7 internal retries with a gap of 2, 4, 8, 16, 32, 64, and 128 seconds (exponentially) between the retries.
+
+##### Problem #1
+Duplication of rule detection incidents when fetched from Chronicle.
+
+##### Solution #1
+
+- To avoid duplication of incidents with duplicate detection ids and to drop them, XSOAR provides inbuilt features of Pre-process rules.
+- End users must configure this setting in the XSOAR platform independently, as it is not included in the integration pack.
+- Pre-processing rules enable users to perform certain actions on incidents as they are ingested into XSOAR.
+- Using these rules, users can filter incoming incidents and take specific actions, such as dropping all incidents or dropping and updating them based on certain conditions.
+- Please refer for information on [Pre-Process rules](https://xsoar.pan.dev/docs/incidents/incident-pre-processing#:~:text=Creating%20Rules&text=Navigate%20to%20Settings%20%3E%20Integrations%20%3E%20Pre,viewing%20the%20list%20of%20rules).
+
+## Configure Chronicle Streaming API on Cortex XSOAR
+
+1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+2. Search for Chronicle Streaming API.
+3. Click **Add instance** to create and configure a new integration instance.
+
+ | **Parameter** | **Description** | **Required** |
+ | --- | --- | --- |
+ | User's Service Account JSON | Your Customer Experience Engineer (CEE) will provide you with a [Google Developer Service Account Credential](https://developers.google.com/identity/protocols/OAuth2#serviceaccount) to enable the Google API client to communicate with the Backstory API. | True |
+ | Region | Select the region based on the location of the chronicle backstory instance. If the region is not listed in the dropdown, choose the "Other" option and specify the region in the "Other Region" text field. | False |
+ | Other Region | Specify the region based on the location of the chronicle backstory instance. Only applicable if the "Other" option is selected in the Region dropdown. | False |
+ | Incident type | | False |
+ | First fetch time | The date or relative timestamp from where to start fetching detections. Default will be the current time.
Note: The API is designed to retrieve data for the past 7 days only. Requests for data beyond that timeframe will result in errors.
Supported formats: N minutes, N hours, N days, N weeks, yyyy-mm-dd, yyyy-mm-ddTHH:MM:SSZ
For example: 10 minutes, 5 hours, 6 days, 1 week, 2024-12-31, 01 Mar 2024, 01 Feb 2024 04:45:33, 2024-04-17T14:05:44Z | False |
+ | Trust any certificate (not secure) | | False |
+ | Use system proxy settings | | False |
+
+4. Click **Test** to validate the URLs, token, and connection.
+
+## Generic Notes
+
+- This integration would only ingest the **detections** created by both **user-created rules** and **Chronicle Rules**.
+- Also, It only ingests the detections created by rules whose **alerting status** was **enabled** at the time of detection.
+- Enable alerting using the **Chronicle UI** by setting the **Alerting** option to **enabled**.
+ - For **user-created rules**, use the Rules Dashboard to enable each rule's alerting status.
+ - For **Chronicle Rules**, enable alerting status of the Rule Set to get detections created by corresponding rules.
+- You are limited to a maximum of 10 simultaneous streaming integration instances for the particular Service Account Credential (your instance will receive a **429 error** if you attempt to create more).
+- For more, please check out the [Google Chronicle reference doc](https://cloud.google.com/chronicle/docs/reference/detection-engine-api#streamdetectionalerts).
diff --git a/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/test_data/steam_detection_outputs.json b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/test_data/steam_detection_outputs.json
new file mode 100644
index 000000000000..a2f5b7213a71
--- /dev/null
+++ b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/test_data/steam_detection_outputs.json
@@ -0,0 +1,15 @@
+{
+ "sample_events": "[{\"name\": \"SampleRule\", \"details\": \"{\\\"type\\\": \\\"RULE_DETECTION\\\", \\\"detection\\\": [{\\\"ruleName\\\": \\\"SampleRule\\\", \\\"urlBackToProduct\\\": \\\"https://dummy-chronicle/ruleDetections?ruleId=ru_e6abfcb5-1b85-41b0-b64c-695b3250436f&selectedList=RuleDetectionsViewTimeline&selectedDetectionId=de_e6abfcb5-1b85-41b0-b64c-695b32504361&selectedTimestamp=2020-12-21T03:54:00Z\\\", \\\"ruleId\\\": \\\"ru_e6abfcb5-1b85-41b0-b64c-695b3250436f\\\", \\\"ruleVersion\\\": \\\"ru_e6abfcb5-1b85-41b0-b64c-695b3250436f@v_1602631093_146879000\\\", \\\"alertState\\\": \\\"ALERTING\\\", \\\"ruleType\\\": \\\"MULTI_EVENT\\\", \\\"detectionFields\\\": [{\\\"key\\\": \\\"client_ip\\\", \\\"value\\\": \\\"10.0.XX.XX\\\"}]}], \\\"createdTime\\\": \\\"2020-12-21T03:12:50.128428Z\\\", \\\"id\\\": \\\"de_e6abfcb5-1b85-41b0-b64c-695b32504361\\\", \\\"timeWindow\\\": {\\\"startTime\\\": \\\"2020-12-21T02:54:00Z\\\", \\\"endTime\\\": \\\"2020-12-21T03:54:00Z\\\"}, \\\"collectionElements\\\": [{\\\"references\\\": [{\\\"eventTimestamp\\\": \\\"2020-12-21T02:58:06.804Z\\\", \\\"eventType\\\": \\\"NETWORK_DNS\\\", \\\"productName\\\": \\\"ExtraHop\\\", \\\"ingestedTimestamp\\\": \\\"2020-12-21T03:02:46.559472Z\\\", \\\"principalAssetIdentifier\\\": \\\"ray-xxx-laptop\\\", \\\"targetAssetIdentifier\\\": \\\"10.0.XX.XX\\\", \\\"principal\\\": {\\\"hostname\\\": \\\"ray-xxx-laptop\\\", \\\"ip\\\": [\\\"10.0.XX.XX\\\"], \\\"mac\\\": [\\\"88:a6:XX:XX:XX:XX\\\"]}, \\\"target\\\": {\\\"ip\\\": [\\\"10.0.XX.XX\\\"]}, \\\"securityResult\\\": [{\\\"action\\\": [\\\"UNKNOWN_ACTION\\\"]}], \\\"network\\\": {\\\"applicationProtocol\\\": \\\"DNS\\\", \\\"dns\\\": {\\\"questions\\\": [{\\\"name\\\": \\\"is5-ssl.mzstatic.com\\\", \\\"type\\\": 1}], \\\"answers\\\": [{\\\"name\\\": \\\"is5-ssl.mzstatic.com\\\", \\\"type\\\": 1, \\\"ttl\\\": 11111, \\\"data\\\": \\\"10.0.XX.XX\\\"}], \\\"response\\\": true}}}, {\\\"eventTimestamp\\\": \\\"2020-12-21T02:56:58.802Z\\\", \\\"eventType\\\": \\\"NETWORK_DNS\\\", \\\"productName\\\": \\\"ExtraHop\\\", \\\"ingestedTimestamp\\\": \\\"2020-12-21T03:02:46.559472Z\\\", \\\"principalAssetIdentifier\\\": \\\"ray-xxx-laptop\\\", \\\"targetAssetIdentifier\\\": \\\"10.0.XX.XX\\\", \\\"principal\\\": {\\\"hostname\\\": \\\"ray-xxx-laptop\\\", \\\"ip\\\": [\\\"10.0.XX.XX\\\"], \\\"mac\\\": [\\\"88:a6:XX:XX:XX:XX\\\"]}, \\\"target\\\": {\\\"ip\\\": [\\\"10.0.XX.XX\\\"]}, \\\"securityResult\\\": [{\\\"action\\\": [\\\"UNKNOWN_ACTION\\\"]}], \\\"network\\\": {\\\"applicationProtocol\\\": \\\"DNS\\\", \\\"dns\\\": {\\\"questions\\\": [{\\\"name\\\": \\\"is5-ssl.mzstatic.com\\\", \\\"type\\\": 1}]}}}], \\\"label\\\": \\\"event\\\"}, {\\\"references\\\": [{\\\"eventTimestamp\\\": \\\"2020-12-21T02:58:06.804Z\\\", \\\"eventType\\\": \\\"NETWORK_DNS\\\", \\\"productName\\\": \\\"ExtraHop\\\", \\\"ingestedTimestamp\\\": \\\"2020-12-21T03:02:46.559472Z\\\", \\\"principalAssetIdentifier\\\": \\\"ray-xxx-laptop\\\", \\\"targetAssetIdentifier\\\": \\\"10.0.XX.XX\\\", \\\"principal\\\": {\\\"hostname\\\": \\\"ray-xxx-laptop\\\", \\\"ip\\\": [\\\"10.0.XX.XX\\\"], \\\"mac\\\": [\\\"88:a6:XX:XX:XX:XX\\\"]}, \\\"target\\\": {\\\"ip\\\": [\\\"10.0.XX.XX\\\"]}, \\\"securityResult\\\": [{\\\"action\\\": [\\\"UNKNOWN_ACTION\\\"]}], \\\"network\\\": {\\\"applicationProtocol\\\": \\\"DNS\\\", \\\"dns\\\": {\\\"questions\\\": [{\\\"name\\\": \\\"is5-ssl.mzstatic.com\\\", \\\"type\\\": 1}], \\\"answers\\\": [{\\\"name\\\": \\\"is5-ssl.mzstatic.com\\\", \\\"type\\\": 1, \\\"ttl\\\": 11111, \\\"data\\\": \\\"10.0.XX.XX\\\"}], \\\"response\\\": true}}}, {\\\"eventTimestamp\\\": \\\"2020-12-21T02:56:58.802Z\\\", \\\"eventType\\\": \\\"NETWORK_DNS\\\", \\\"productName\\\": \\\"ExtraHop\\\", \\\"ingestedTimestamp\\\": \\\"2020-12-21T03:02:46.559472Z\\\", \\\"principalAssetIdentifier\\\": \\\"ray-xxx-laptop\\\", \\\"targetAssetIdentifier\\\": \\\"10.0.XX.XX\\\", \\\"principal\\\": {\\\"hostname\\\": \\\"ray-xxx-laptop\\\", \\\"ip\\\": [\\\"10.0.XX.XX\\\"], \\\"mac\\\": [\\\"88:a6:XX:XX:XX:XX\\\"]}, \\\"target\\\": {\\\"ip\\\": [\\\"10.0.XX.XX\\\"]}, \\\"securityResult\\\": [{\\\"action\\\": [\\\"UNKNOWN_ACTION\\\"]}], \\\"network\\\": {\\\"applicationProtocol\\\": \\\"DNS\\\", \\\"dns\\\": {\\\"questions\\\": [{\\\"name\\\": \\\"is5-ssl.mzstatic.com\\\", \\\"type\\\": 1}]}}}], \\\"label\\\": \\\"connections\\\"}], \\\"detectionTime\\\": \\\"2020-12-21T03:54:00Z\\\", \\\"IncidentType\\\": \\\"DetectionAlert\\\"}\", \"rawJSON\": \"{\\\"type\\\": \\\"RULE_DETECTION\\\", \\\"detection\\\": [{\\\"ruleName\\\": \\\"SampleRule\\\", \\\"urlBackToProduct\\\": \\\"https://dummy-chronicle/ruleDetections?ruleId=ru_e6abfcb5-1b85-41b0-b64c-695b3250436f&selectedList=RuleDetectionsViewTimeline&selectedDetectionId=de_e6abfcb5-1b85-41b0-b64c-695b32504361&selectedTimestamp=2020-12-21T03:54:00Z\\\", \\\"ruleId\\\": \\\"ru_e6abfcb5-1b85-41b0-b64c-695b3250436f\\\", \\\"ruleVersion\\\": \\\"ru_e6abfcb5-1b85-41b0-b64c-695b3250436f@v_1602631093_146879000\\\", \\\"alertState\\\": \\\"ALERTING\\\", \\\"ruleType\\\": \\\"MULTI_EVENT\\\", \\\"detectionFields\\\": [{\\\"key\\\": \\\"client_ip\\\", \\\"value\\\": \\\"10.0.XX.XX\\\"}]}], \\\"createdTime\\\": \\\"2020-12-21T03:12:50.128428Z\\\", \\\"id\\\": \\\"de_e6abfcb5-1b85-41b0-b64c-695b32504361\\\", \\\"timeWindow\\\": {\\\"startTime\\\": \\\"2020-12-21T02:54:00Z\\\", \\\"endTime\\\": \\\"2020-12-21T03:54:00Z\\\"}, \\\"collectionElements\\\": [{\\\"references\\\": [{\\\"eventTimestamp\\\": \\\"2020-12-21T02:58:06.804Z\\\", \\\"eventType\\\": \\\"NETWORK_DNS\\\", \\\"productName\\\": \\\"ExtraHop\\\", \\\"ingestedTimestamp\\\": \\\"2020-12-21T03:02:46.559472Z\\\", \\\"principalAssetIdentifier\\\": \\\"ray-xxx-laptop\\\", \\\"targetAssetIdentifier\\\": \\\"10.0.XX.XX\\\", \\\"principal\\\": {\\\"hostname\\\": \\\"ray-xxx-laptop\\\", \\\"ip\\\": [\\\"10.0.XX.XX\\\"], \\\"mac\\\": [\\\"88:a6:XX:XX:XX:XX\\\"]}, \\\"target\\\": {\\\"ip\\\": [\\\"10.0.XX.XX\\\"]}, \\\"securityResult\\\": [{\\\"action\\\": [\\\"UNKNOWN_ACTION\\\"]}], \\\"network\\\": {\\\"applicationProtocol\\\": \\\"DNS\\\", \\\"dns\\\": {\\\"questions\\\": [{\\\"name\\\": \\\"is5-ssl.mzstatic.com\\\", \\\"type\\\": 1}], \\\"answers\\\": [{\\\"name\\\": \\\"is5-ssl.mzstatic.com\\\", \\\"type\\\": 1, \\\"ttl\\\": 11111, \\\"data\\\": \\\"10.0.XX.XX\\\"}], \\\"response\\\": true}}}, {\\\"eventTimestamp\\\": \\\"2020-12-21T02:56:58.802Z\\\", \\\"eventType\\\": \\\"NETWORK_DNS\\\", \\\"productName\\\": \\\"ExtraHop\\\", \\\"ingestedTimestamp\\\": \\\"2020-12-21T03:02:46.559472Z\\\", \\\"principalAssetIdentifier\\\": \\\"ray-xxx-laptop\\\", \\\"targetAssetIdentifier\\\": \\\"10.0.XX.XX\\\", \\\"principal\\\": {\\\"hostname\\\": \\\"ray-xxx-laptop\\\", \\\"ip\\\": [\\\"10.0.XX.XX\\\"], \\\"mac\\\": [\\\"88:a6:XX:XX:XX:XX\\\"]}, \\\"target\\\": {\\\"ip\\\": [\\\"10.0.XX.XX\\\"]}, \\\"securityResult\\\": [{\\\"action\\\": [\\\"UNKNOWN_ACTION\\\"]}], \\\"network\\\": {\\\"applicationProtocol\\\": \\\"DNS\\\", \\\"dns\\\": {\\\"questions\\\": [{\\\"name\\\": \\\"is5-ssl.mzstatic.com\\\", \\\"type\\\": 1}]}}}], \\\"label\\\": \\\"event\\\"}, {\\\"references\\\": [{\\\"eventTimestamp\\\": \\\"2020-12-21T02:58:06.804Z\\\", \\\"eventType\\\": \\\"NETWORK_DNS\\\", \\\"productName\\\": \\\"ExtraHop\\\", \\\"ingestedTimestamp\\\": \\\"2020-12-21T03:02:46.559472Z\\\", \\\"principalAssetIdentifier\\\": \\\"ray-xxx-laptop\\\", \\\"targetAssetIdentifier\\\": \\\"10.0.XX.XX\\\", \\\"principal\\\": {\\\"hostname\\\": \\\"ray-xxx-laptop\\\", \\\"ip\\\": [\\\"10.0.XX.XX\\\"], \\\"mac\\\": [\\\"88:a6:XX:XX:XX:XX\\\"]}, \\\"target\\\": {\\\"ip\\\": [\\\"10.0.XX.XX\\\"]}, \\\"securityResult\\\": [{\\\"action\\\": [\\\"UNKNOWN_ACTION\\\"]}], \\\"network\\\": {\\\"applicationProtocol\\\": \\\"DNS\\\", \\\"dns\\\": {\\\"questions\\\": [{\\\"name\\\": \\\"is5-ssl.mzstatic.com\\\", \\\"type\\\": 1}], \\\"answers\\\": [{\\\"name\\\": \\\"is5-ssl.mzstatic.com\\\", \\\"type\\\": 1, \\\"ttl\\\": 11111, \\\"data\\\": \\\"10.0.XX.XX\\\"}], \\\"response\\\": true}}}, {\\\"eventTimestamp\\\": \\\"2020-12-21T02:56:58.802Z\\\", \\\"eventType\\\": \\\"NETWORK_DNS\\\", \\\"productName\\\": \\\"ExtraHop\\\", \\\"ingestedTimestamp\\\": \\\"2020-12-21T03:02:46.559472Z\\\", \\\"principalAssetIdentifier\\\": \\\"ray-xxx-laptop\\\", \\\"targetAssetIdentifier\\\": \\\"10.0.XX.XX\\\", \\\"principal\\\": {\\\"hostname\\\": \\\"ray-xxx-laptop\\\", \\\"ip\\\": [\\\"10.0.XX.XX\\\"], \\\"mac\\\": [\\\"88:a6:XX:XX:XX:XX\\\"]}, \\\"target\\\": {\\\"ip\\\": [\\\"10.0.XX.XX\\\"]}, \\\"securityResult\\\": [{\\\"action\\\": [\\\"UNKNOWN_ACTION\\\"]}], \\\"network\\\": {\\\"applicationProtocol\\\": \\\"DNS\\\", \\\"dns\\\": {\\\"questions\\\": [{\\\"name\\\": \\\"is5-ssl.mzstatic.com\\\", \\\"type\\\": 1}]}}}], \\\"label\\\": \\\"connections\\\"}], \\\"detectionTime\\\": \\\"2020-12-21T03:54:00Z\\\", \\\"IncidentType\\\": \\\"DetectionAlert\\\"}\"}, {\"name\": \"GCP Secret Manager Mass Deletion\", \"occurred\": \"2023-06-14T17:28:00Z\", \"details\": \"{\\\"type\\\": \\\"GCTI_FINDING\\\", \\\"detection\\\": [{\\\"ruleName\\\": \\\"GCP Secret Manager Mass Deletion\\\", \\\"summary\\\": \\\"Rule Detection\\\", \\\"description\\\": \\\"Identifies mass deletion of secrets in GCP Secret Manager.\\\", \\\"severity\\\": \\\"LOW\\\", \\\"urlBackToProduct\\\": \\\"https://dummy-chronicle/ruleDetections?ruleId=ur_ttp_GCP__MassSecretDeletion&selectedList=RuleDetectionsViewTimeline&ruleSource=ruleSet&selectedDetectionId=de_50fd0957-0959-6410-0000-c6f8400006b1&selectedTimestamp=2023-06-14T17:28:00Z\\\", \\\"ruleId\\\": \\\"ur_ttp_GCP__MassSecretDeletion\\\", \\\"alertState\\\": \\\"ALERTING\\\", \\\"ruleType\\\": \\\"MULTI_EVENT\\\", \\\"detectionFields\\\": [{\\\"key\\\": \\\"resource\\\", \\\"value\\\": \\\"secretmanager.googleapis.com\\\"}, {\\\"key\\\": \\\"principaluser\\\", \\\"value\\\": \\\"secret@google.com\\\", \\\"source\\\": \\\"udm.principal.user.email_addresses\\\"}], \\\"ruleLabels\\\": [{\\\"key\\\": \\\"rule_name\\\", \\\"value\\\": \\\"GCP Secret Manager Mass Deletion\\\"}, {\\\"key\\\": \\\"false_positives\\\", \\\"value\\\": \\\"This may be common behavior in dev, testing, or deprecated projects.\\\"}], \\\"outcomes\\\": [{\\\"key\\\": \\\"risk_score\\\", \\\"value\\\": \\\"35\\\"}, {\\\"key\\\": \\\"resource_name\\\", \\\"value\\\": \\\"gsm_secret_1, gsm_secret_10\\\", \\\"source\\\": \\\"udm.target.resource.name\\\"}, {\\\"key\\\": \\\"ip\\\", \\\"value\\\": \\\"0.0.0.1\\\", \\\"source\\\": \\\"udm.principal.ip\\\"}], \\\"ruleSet\\\": \\\"9d7537ae-0ae2-0000-b5e2-507c00008ae9\\\", \\\"ruleSetDisplayName\\\": \\\"Service Disruption\\\", \\\"riskScore\\\": 35}], \\\"createdTime\\\": \\\"2023-06-14T18:38:30.569526Z\\\", \\\"lastUpdatedTime\\\": \\\"2023-06-14T18:38:30.569526Z\\\", \\\"id\\\": \\\"de_50fd0957-0959-0000-d556-c6f8000016b1\\\", \\\"timeWindow\\\": {\\\"startTime\\\": \\\"2023-06-14T17:18:00Z\\\", \\\"endTime\\\": \\\"2023-06-14T17:28:00Z\\\"}, \\\"collectionElements\\\": [{\\\"references\\\": [{\\\"eventTimestamp\\\": \\\"2023-06-14T17:27:39.239875241Z\\\", \\\"collectedTimestamp\\\": \\\"2023-06-14T17:27:42.956025244Z\\\", \\\"eventType\\\": \\\"RESOURCE_DELETION\\\", \\\"vendorName\\\": \\\"Google Cloud Platform\\\", \\\"productName\\\": \\\"Google Cloud Platform\\\", \\\"productEventType\\\": \\\"google.cloud.secretmanager.v1.SecretManagerService.DeleteSecret\\\", \\\"urlBackToProduct\\\": \\\"url_0000\\\", \\\"ingestedTimestamp\\\": \\\"2023-06-14T17:27:44.382729Z\\\", \\\"id\\\": \\\"000000000000000000000001\\\", \\\"logType\\\": \\\"GCP_CLOUD_AUDIT\\\", \\\"eventSeverity\\\": \\\"INFORMATIONAL\\\", \\\"principalAssetIdentifier\\\": \\\"0.0.0.1\\\", \\\"principal\\\": {\\\"user\\\": {\\\"emailAddresses\\\": [\\\"secret-migration@test-is-00001.iam.gserviceaccount.com\\\"], \\\"productObjectId\\\": \\\"000000000000000000000001\\\", \\\"attribute\\\": {\\\"roles\\\": [{\\\"name\\\": \\\"roles/secretmanager.admin\\\", \\\"type\\\": \\\"SERVICE_ACCOUNT\\\"}], \\\"permissions\\\": [{\\\"name\\\": \\\"secretmanager.secrets.delete\\\", \\\"type\\\": \\\"ADMIN_WRITE\\\"}]}}, \\\"ip\\\": [\\\"0.0.0.1\\\"], \\\"location\\\": {\\\"state\\\": \\\"State\\\", \\\"countryOrRegion\\\": \\\"Country\\\", \\\"regionLatitude\\\": 10, \\\"regionLongitude\\\": 10, \\\"regionCoordinates\\\": {\\\"latitude\\\": 10, \\\"longitude\\\": 10}}, \\\"resource\\\": {\\\"attribute\\\": {\\\"cloud\\\": {\\\"project\\\": {\\\"name\\\": \\\"projects/0000000/secrets/gsm_secret_1\\\", \\\"resourceSubtype\\\": \\\"secretmanager.googleapis.com/Secret\\\"}}, \\\"labels\\\": [{\\\"key\\\": \\\"request_type\\\", \\\"value\\\": \\\"type.googleapis.com/google.cloud.secretmanager.v1.DeleteSecretRequest\\\"}]}}, \\\"labels\\\": [{\\\"key\\\": \\\"request_attributes_time\\\", \\\"value\\\": \\\"2023-06-14T17:27:39.245079752Z\\\"}], \\\"ipGeoArtifact\\\": [{\\\"ip\\\": \\\"0.0.0.1\\\", \\\"location\\\": {\\\"state\\\": \\\"Gujarat\\\", \\\"countryOrRegion\\\": \\\"India\\\", \\\"regionLatitude\\\": 10, \\\"regionLongitude\\\": 10, \\\"regionCoordinates\\\": {\\\"latitude\\\": 10, \\\"longitude\\\": 10}}, \\\"network\\\": {\\\"asn\\\": \\\"00001\\\", \\\"dnsDomain\\\": \\\"broad_band.in\\\", \\\"carrierName\\\": \\\"broad band ltd.\\\", \\\"organizationName\\\": \\\"broad band services limited\\\"}}]}, \\\"target\\\": {\\\"application\\\": \\\"secretmanager.googleapis.com\\\", \\\"resource\\\": {\\\"name\\\": \\\"gsm_secret_1\\\", \\\"attribute\\\": {\\\"labels\\\": [{\\\"key\\\": \\\"request_name\\\", \\\"value\\\": \\\"projects/test-is-00001/secrets/gsm_secret_1\\\"}]}}, \\\"cloud\\\": {\\\"environment\\\": \\\"GOOGLE_CLOUD_PLATFORM\\\", \\\"project\\\": {\\\"name\\\": \\\"test-is-00001\\\"}}}, \\\"securityResult\\\": [{\\\"categoryDetails\\\": [\\\"projects/test-is-00001/logs/cloudaudit.googleapis.com\\\"], \\\"action\\\": [\\\"ALLOW\\\"], \\\"severity\\\": \\\"INFORMATIONAL\\\", \\\"detectionFields\\\": [{\\\"key\\\": \\\"resource_name\\\", \\\"value\\\": \\\"projects/0000001/secrets/gsm_secret_1\\\"}, {\\\"key\\\": \\\"key_id\\\", \\\"value\\\": \\\"000000000000000000000001\\\"}]}], \\\"network\\\": {\\\"http\\\": {\\\"userAgent\\\": \\\"grpc-python-asyncio/1.51.3 grpc-c/29.0.0 (windows; chttp2),gzip(gfe)\\\"}}}], \\\"label\\\": \\\"e\\\"}], \\\"detectionTime\\\": \\\"2023-06-14T17:28:00Z\\\", \\\"tags\\\": [\\\"TA0040\\\", \\\"T1485\\\"], \\\"IncidentType\\\": \\\"CuratedRuleDetectionAlert\\\"}\", \"rawJSON\": \"{\\\"type\\\": \\\"GCTI_FINDING\\\", \\\"detection\\\": [{\\\"ruleName\\\": \\\"GCP Secret Manager Mass Deletion\\\", \\\"summary\\\": \\\"Rule Detection\\\", \\\"description\\\": \\\"Identifies mass deletion of secrets in GCP Secret Manager.\\\", \\\"severity\\\": \\\"LOW\\\", \\\"urlBackToProduct\\\": \\\"https://dummy-chronicle/ruleDetections?ruleId=ur_ttp_GCP__MassSecretDeletion&selectedList=RuleDetectionsViewTimeline&ruleSource=ruleSet&selectedDetectionId=de_50fd0957-0959-6410-0000-c6f8400006b1&selectedTimestamp=2023-06-14T17:28:00Z\\\", \\\"ruleId\\\": \\\"ur_ttp_GCP__MassSecretDeletion\\\", \\\"alertState\\\": \\\"ALERTING\\\", \\\"ruleType\\\": \\\"MULTI_EVENT\\\", \\\"detectionFields\\\": [{\\\"key\\\": \\\"resource\\\", \\\"value\\\": \\\"secretmanager.googleapis.com\\\"}, {\\\"key\\\": \\\"principaluser\\\", \\\"value\\\": \\\"secret@google.com\\\", \\\"source\\\": \\\"udm.principal.user.email_addresses\\\"}], \\\"ruleLabels\\\": [{\\\"key\\\": \\\"rule_name\\\", \\\"value\\\": \\\"GCP Secret Manager Mass Deletion\\\"}, {\\\"key\\\": \\\"false_positives\\\", \\\"value\\\": \\\"This may be common behavior in dev, testing, or deprecated projects.\\\"}], \\\"outcomes\\\": [{\\\"key\\\": \\\"risk_score\\\", \\\"value\\\": \\\"35\\\"}, {\\\"key\\\": \\\"resource_name\\\", \\\"value\\\": \\\"gsm_secret_1, gsm_secret_10\\\", \\\"source\\\": \\\"udm.target.resource.name\\\"}, {\\\"key\\\": \\\"ip\\\", \\\"value\\\": \\\"0.0.0.1\\\", \\\"source\\\": \\\"udm.principal.ip\\\"}], \\\"ruleSet\\\": \\\"9d7537ae-0ae2-0000-b5e2-507c00008ae9\\\", \\\"ruleSetDisplayName\\\": \\\"Service Disruption\\\", \\\"riskScore\\\": 35}], \\\"createdTime\\\": \\\"2023-06-14T18:38:30.569526Z\\\", \\\"lastUpdatedTime\\\": \\\"2023-06-14T18:38:30.569526Z\\\", \\\"id\\\": \\\"de_50fd0957-0959-0000-d556-c6f8000016b1\\\", \\\"timeWindow\\\": {\\\"startTime\\\": \\\"2023-06-14T17:18:00Z\\\", \\\"endTime\\\": \\\"2023-06-14T17:28:00Z\\\"}, \\\"collectionElements\\\": [{\\\"references\\\": [{\\\"eventTimestamp\\\": \\\"2023-06-14T17:27:39.239875241Z\\\", \\\"collectedTimestamp\\\": \\\"2023-06-14T17:27:42.956025244Z\\\", \\\"eventType\\\": \\\"RESOURCE_DELETION\\\", \\\"vendorName\\\": \\\"Google Cloud Platform\\\", \\\"productName\\\": \\\"Google Cloud Platform\\\", \\\"productEventType\\\": \\\"google.cloud.secretmanager.v1.SecretManagerService.DeleteSecret\\\", \\\"urlBackToProduct\\\": \\\"url_0000\\\", \\\"ingestedTimestamp\\\": \\\"2023-06-14T17:27:44.382729Z\\\", \\\"id\\\": \\\"000000000000000000000001\\\", \\\"logType\\\": \\\"GCP_CLOUD_AUDIT\\\", \\\"eventSeverity\\\": \\\"INFORMATIONAL\\\", \\\"principalAssetIdentifier\\\": \\\"0.0.0.1\\\", \\\"principal\\\": {\\\"user\\\": {\\\"emailAddresses\\\": [\\\"secret-migration@test-is-00001.iam.gserviceaccount.com\\\"], \\\"productObjectId\\\": \\\"000000000000000000000001\\\", \\\"attribute\\\": {\\\"roles\\\": [{\\\"name\\\": \\\"roles/secretmanager.admin\\\", \\\"type\\\": \\\"SERVICE_ACCOUNT\\\"}], \\\"permissions\\\": [{\\\"name\\\": \\\"secretmanager.secrets.delete\\\", \\\"type\\\": \\\"ADMIN_WRITE\\\"}]}}, \\\"ip\\\": [\\\"0.0.0.1\\\"], \\\"location\\\": {\\\"state\\\": \\\"State\\\", \\\"countryOrRegion\\\": \\\"Country\\\", \\\"regionLatitude\\\": 10, \\\"regionLongitude\\\": 10, \\\"regionCoordinates\\\": {\\\"latitude\\\": 10, \\\"longitude\\\": 10}}, \\\"resource\\\": {\\\"attribute\\\": {\\\"cloud\\\": {\\\"project\\\": {\\\"name\\\": \\\"projects/0000000/secrets/gsm_secret_1\\\", \\\"resourceSubtype\\\": \\\"secretmanager.googleapis.com/Secret\\\"}}, \\\"labels\\\": [{\\\"key\\\": \\\"request_type\\\", \\\"value\\\": \\\"type.googleapis.com/google.cloud.secretmanager.v1.DeleteSecretRequest\\\"}]}}, \\\"labels\\\": [{\\\"key\\\": \\\"request_attributes_time\\\", \\\"value\\\": \\\"2023-06-14T17:27:39.245079752Z\\\"}], \\\"ipGeoArtifact\\\": [{\\\"ip\\\": \\\"0.0.0.1\\\", \\\"location\\\": {\\\"state\\\": \\\"Gujarat\\\", \\\"countryOrRegion\\\": \\\"India\\\", \\\"regionLatitude\\\": 10, \\\"regionLongitude\\\": 10, \\\"regionCoordinates\\\": {\\\"latitude\\\": 10, \\\"longitude\\\": 10}}, \\\"network\\\": {\\\"asn\\\": \\\"00001\\\", \\\"dnsDomain\\\": \\\"broad_band.in\\\", \\\"carrierName\\\": \\\"broad band ltd.\\\", \\\"organizationName\\\": \\\"broad band services limited\\\"}}]}, \\\"target\\\": {\\\"application\\\": \\\"secretmanager.googleapis.com\\\", \\\"resource\\\": {\\\"name\\\": \\\"gsm_secret_1\\\", \\\"attribute\\\": {\\\"labels\\\": [{\\\"key\\\": \\\"request_name\\\", \\\"value\\\": \\\"projects/test-is-00001/secrets/gsm_secret_1\\\"}]}}, \\\"cloud\\\": {\\\"environment\\\": \\\"GOOGLE_CLOUD_PLATFORM\\\", \\\"project\\\": {\\\"name\\\": \\\"test-is-00001\\\"}}}, \\\"securityResult\\\": [{\\\"categoryDetails\\\": [\\\"projects/test-is-00001/logs/cloudaudit.googleapis.com\\\"], \\\"action\\\": [\\\"ALLOW\\\"], \\\"severity\\\": \\\"INFORMATIONAL\\\", \\\"detectionFields\\\": [{\\\"key\\\": \\\"resource_name\\\", \\\"value\\\": \\\"projects/0000001/secrets/gsm_secret_1\\\"}, {\\\"key\\\": \\\"key_id\\\", \\\"value\\\": \\\"000000000000000000000001\\\"}]}], \\\"network\\\": {\\\"http\\\": {\\\"userAgent\\\": \\\"grpc-python-asyncio/1.51.3 grpc-c/29.0.0 (windows; chttp2),gzip(gfe)\\\"}}}], \\\"label\\\": \\\"e\\\"}], \\\"detectionTime\\\": \\\"2023-06-14T17:28:00Z\\\", \\\"tags\\\": [\\\"TA0040\\\", \\\"T1485\\\"], \\\"IncidentType\\\": \\\"CuratedRuleDetectionAlert\\\"}\", \"severity\": 1}]",
+ "detection_identifiers": [
+ {
+ "id": "de_e6abfcb5-1b85-41b0-b64c-695b32504361",
+ "ruleVersion": "ru_e6abfcb5-1b85-41b0-b64c-695b3250436f@v_1602631093_146879000"
+ }
+ ],
+ "curatedrule_detection_identifiers": [
+ {
+ "id": "de_50fd0957-0959-0000-d556-c6f8000016b1"
+ }
+ ],
+ "continuation_time": "2024-03-21T09:44:04.877670709Z"
+}
\ No newline at end of file
diff --git a/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/test_data/stream_detections.txt b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/test_data/stream_detections.txt
new file mode 100644
index 000000000000..1c488fa20f17
--- /dev/null
+++ b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/test_data/stream_detections.txt
@@ -0,0 +1,8 @@
+[{"continuationTime": "2024-03-21T05:31:06Z","heartbeat": true},
+{"continuationTime":"2024-03-21T06:19:59.094785596Z","detections":[{"type":"RULE_DETECTION","detection":[{"ruleName":"SampleRule","urlBackToProduct":"https://dummy-chronicle/ruleDetections?ruleId=ru_e6abfcb5-1b85-41b0-b64c-695b3250436f&selectedList=RuleDetectionsViewTimeline&selectedDetectionId=de_e6abfcb5-1b85-41b0-b64c-695b32504361&selectedTimestamp=2020-12-21T03:54:00Z","ruleId":"ru_e6abfcb5-1b85-41b0-b64c-695b3250436f","ruleVersion":"ru_e6abfcb5-1b85-41b0-b64c-695b3250436f@v_1602631093_146879000","alertState":"ALERTING","ruleType":"MULTI_EVENT","detectionFields":[{"key":"client_ip","value":"10.0.XX.XX"}]}],"createdTime":"2020-12-21T03:12:50.128428Z","id":"de_e6abfcb5-1b85-41b0-b64c-695b32504361","timeWindow":{"startTime":"2020-12-21T02:54:00Z","endTime":"2020-12-21T03:54:00Z"},"collectionElements":[{"references":[{"event":{"metadata":{"eventTimestamp":"2020-12-21T02:58:06.804Z","eventType":"NETWORK_DNS","productName":"ExtraHop","ingestedTimestamp":"2020-12-21T03:02:46.559472Z"},"principal":{"hostname":"ray-xxx-laptop","ip":["10.0.XX.XX"],"mac":["88:a6:XX:XX:XX:XX"]},"target":{"ip":["10.0.XX.XX"]},"securityResult":[{"action":["UNKNOWN_ACTION"]}],"network":{"applicationProtocol":"DNS","dns":{"questions":[{"name":"is5-ssl.mzstatic.com","type":1}],"answers":[{"name":"is5-ssl.mzstatic.com","type":1,"ttl":11111,"data":"10.0.XX.XX"}],"response":true}}}},{"event":{"metadata":{"eventTimestamp":"2020-12-21T02:56:58.802Z","eventType":"NETWORK_DNS","productName":"ExtraHop","ingestedTimestamp":"2020-12-21T03:02:46.559472Z"},"principal":{"hostname":"ray-xxx-laptop","ip":["10.0.XX.XX"],"mac":["88:a6:XX:XX:XX:XX"]},"target":{"ip":["10.0.XX.XX"]},"securityResult":[{"action":["UNKNOWN_ACTION"]}],"network":{"applicationProtocol":"DNS","dns":{"questions":[{"name":"is5-ssl.mzstatic.com","type":1}]}}}}],"label":"event"},{"references":[{"event":{"metadata":{"eventTimestamp":"2020-12-21T02:58:06.804Z","eventType":"NETWORK_DNS","productName":"ExtraHop","ingestedTimestamp":"2020-12-21T03:02:46.559472Z"},"principal":{"hostname":"ray-xxx-laptop","ip":["10.0.XX.XX"],"mac":["88:a6:XX:XX:XX:XX"]},"target":{"ip":["10.0.XX.XX"]},"securityResult":[{"action":["UNKNOWN_ACTION"]}],"network":{"applicationProtocol":"DNS","dns":{"questions":[{"name":"is5-ssl.mzstatic.com","type":1}],"answers":[{"name":"is5-ssl.mzstatic.com","type":1,"ttl":11111,"data":"10.0.XX.XX"}],"response":true}}}},{"event":{"metadata":{"eventTimestamp":"2020-12-21T02:56:58.802Z","eventType":"NETWORK_DNS","productName":"ExtraHop","ingestedTimestamp":"2020-12-21T03:02:46.559472Z"},"principal":{"hostname":"ray-xxx-laptop","ip":["10.0.XX.XX"],"mac":["88:a6:XX:XX:XX:XX"]},"target":{"ip":["10.0.XX.XX"]},"securityResult":[{"action":["UNKNOWN_ACTION"]}],"network":{"applicationProtocol":"DNS","dns":{"questions":[{"name":"is5-ssl.mzstatic.com","type":1}]}}}}],"label":"connections"}],"detectionTime":"2020-12-21T03:54:00Z"},{"type":"GCTI_FINDING","detection":[{"ruleName":"GCP Secret Manager Mass Deletion","summary":"Rule Detection","description":"Identifies mass deletion of secrets in GCP Secret Manager.","severity":"LOW","urlBackToProduct":"https://dummy-chronicle/ruleDetections?ruleId=ur_ttp_GCP__MassSecretDeletion&selectedList=RuleDetectionsViewTimeline&ruleSource=ruleSet&selectedDetectionId=de_50fd0957-0959-6410-0000-c6f8400006b1&selectedTimestamp=2023-06-14T17:28:00Z","ruleId":"ur_ttp_GCP__MassSecretDeletion","alertState":"ALERTING","ruleType":"MULTI_EVENT","detectionFields":[{"key":"resource","value":"secretmanager.googleapis.com"},{"key":"principaluser","value":"secret@google.com","source":"udm.principal.user.email_addresses"}],"ruleLabels":[{"key":"rule_name","value":"GCP Secret Manager Mass Deletion"},{"key":"false_positives","value":"This may be common behavior in dev, testing, or deprecated projects."}],"outcomes":[{"key":"risk_score","value":"35"},{"key":"resource_name","value":"gsm_secret_1, gsm_secret_10","source":"udm.target.resource.name"},{"key":"ip","value":"0.0.0.1","source":"udm.principal.ip"}],"ruleSet":"9d7537ae-0ae2-0000-b5e2-507c00008ae9","ruleSetDisplayName":"Service Disruption","riskScore":35}],"createdTime":"2023-06-14T18:38:30.569526Z","lastUpdatedTime":"2023-06-14T18:38:30.569526Z","id":"de_50fd0957-0959-0000-d556-c6f8000016b1","timeWindow":{"startTime":"2023-06-14T17:18:00Z","endTime":"2023-06-14T17:28:00Z"},"collectionElements":[{"references":[{"event":{"metadata":{"eventTimestamp":"2023-06-14T17:27:39.239875241Z","collectedTimestamp":"2023-06-14T17:27:42.956025244Z","eventType":"RESOURCE_DELETION","vendorName":"Google Cloud Platform","productName":"Google Cloud Platform","productEventType":"google.cloud.secretmanager.v1.SecretManagerService.DeleteSecret","urlBackToProduct":"url_0000","ingestedTimestamp":"2023-06-14T17:27:44.382729Z","id":"000000000000000000000001","logType":"GCP_CLOUD_AUDIT"},"principal":{"user":{"emailAddresses":["secret-migration@test-is-00001.iam.gserviceaccount.com"],"productObjectId":"000000000000000000000001","attribute":{"roles":[{"name":"roles/secretmanager.admin","type":"SERVICE_ACCOUNT"}],"permissions":[{"name":"secretmanager.secrets.delete","type":"ADMIN_WRITE"}]}},"ip":["0.0.0.1"],"location":{"state":"State","countryOrRegion":"Country","regionLatitude":10,"regionLongitude":10,"regionCoordinates":{"latitude":10,"longitude":10}},"resource":{"attribute":{"cloud":{"project":{"name":"projects/0000000/secrets/gsm_secret_1","resourceSubtype":"secretmanager.googleapis.com/Secret"}},"labels":[{"key":"request_type","value":"type.googleapis.com/google.cloud.secretmanager.v1.DeleteSecretRequest"}]}},"labels":[{"key":"request_attributes_time","value":"2023-06-14T17:27:39.245079752Z"}],"ipGeoArtifact":[{"ip":"0.0.0.1","location":{"state":"Gujarat","countryOrRegion":"India","regionLatitude":10,"regionLongitude":10,"regionCoordinates":{"latitude":10,"longitude":10}},"network":{"asn":"00001","dnsDomain":"broad_band.in","carrierName":"broad band ltd.","organizationName":"broad band services limited"}}]},"target":{"application":"secretmanager.googleapis.com","resource":{"name":"gsm_secret_1","attribute":{"labels":[{"key":"request_name","value":"projects/test-is-00001/secrets/gsm_secret_1"}]}},"cloud":{"environment":"GOOGLE_CLOUD_PLATFORM","project":{"name":"test-is-00001"}}},"securityResult":[{"categoryDetails":["projects/test-is-00001/logs/cloudaudit.googleapis.com"],"action":["ALLOW"],"severity":"INFORMATIONAL","detectionFields":[{"key":"resource_name","value":"projects/0000001/secrets/gsm_secret_1"},{"key":"key_id","value":"000000000000000000000001"}]}],"network":{"http":{"userAgent":"grpc-python-asyncio/1.51.3 grpc-c/29.0.0 (windows; chttp2),gzip(gfe)"}}}}],"label":"e"}],"detectionTime":"2023-06-14T17:28:00Z","tags":["TA0040","T1485"]}]}
+{"heartbeat": true}
+{"continuationTime":"2024-03-21T06:19:59.094785596Z","detections":[]}
+{"continuationTime": "2024-03-21T09:43:04.877670709Z"}
+{"continuationTime":"2024-03-21T09:44:04.877670709Z","detections":[{"type":"RULE_DETECTION","detection":[{"ruleName":"SampleRule","urlBackToProduct":"https://dummy-chronicle/ruleDetections?ruleId=ru_e6abfcb5-1b85-41b0-b64c-695b3250436f&selectedList=RuleDetectionsViewTimeline&selectedDetectionId=de_e6abfcb5-1b85-41b0-b64c-695b32504361&selectedTimestamp=2020-12-21T03:54:00Z","ruleId":"ru_e6abfcb5-1b85-41b0-b64c-695b3250436f","ruleVersion":"ru_e6abfcb5-1b85-41b0-b64c-695b3250436f@v_1602631093_146879000","alertState":"ALERTING","ruleType":"MULTI_EVENT","detectionFields":[{"key":"client_ip","value":"10.0.XX.XX"}]}],"createdTime":"2020-12-21T03:12:50.128428Z","id":"de_e6abfcb5-1b85-41b0-b64c-695b32504361","timeWindow":{"startTime":"2020-12-21T02:54:00Z","endTime":"2020-12-21T03:54:00Z"},"collectionElements":[{"references":[{"event":{"metadata":{"eventTimestamp":"2020-12-21T02:58:06.804Z","eventType":"NETWORK_DNS","productName":"ExtraHop","ingestedTimestamp":"2020-12-21T03:02:46.559472Z"},"principal":{"hostname":"ray-xxx-laptop","ip":["10.0.XX.XX"],"mac":["88:a6:XX:XX:XX:XX"]},"target":{"ip":["10.0.XX.XX"]},"securityResult":[{"action":["UNKNOWN_ACTION"]}],"network":{"applicationProtocol":"DNS","dns":{"questions":[{"name":"is5-ssl.mzstatic.com","type":1}],"answers":[{"name":"is5-ssl.mzstatic.com","type":1,"ttl":11111,"data":"10.0.XX.XX"}],"response":true}}}},{"event":{"metadata":{"eventTimestamp":"2020-12-21T02:56:58.802Z","eventType":"NETWORK_DNS","productName":"ExtraHop","ingestedTimestamp":"2020-12-21T03:02:46.559472Z"},"principal":{"hostname":"ray-xxx-laptop","ip":["10.0.XX.XX"],"mac":["88:a6:XX:XX:XX:XX"]},"target":{"ip":["10.0.XX.XX"]},"securityResult":[{"action":["UNKNOWN_ACTION"]}],"network":{"applicationProtocol":"DNS","dns":{"questions":[{"name":"is5-ssl.mzstatic.com","type":1}]}}}}],"label":"event"},{"references":[{"event":{"metadata":{"eventTimestamp":"2020-12-21T02:58:06.804Z","eventType":"NETWORK_DNS","productName":"ExtraHop","ingestedTimestamp":"2020-12-21T03:02:46.559472Z"},"principal":{"hostname":"ray-xxx-laptop","ip":["10.0.XX.XX"],"mac":["88:a6:XX:XX:XX:XX"]},"target":{"ip":["10.0.XX.XX"]},"securityResult":[{"action":["UNKNOWN_ACTION"]}],"network":{"applicationProtocol":"DNS","dns":{"questions":[{"name":"is5-ssl.mzstatic.com","type":1}],"answers":[{"name":"is5-ssl.mzstatic.com","type":1,"ttl":11111,"data":"10.0.XX.XX"}],"response":true}}}},{"event":{"metadata":{"eventTimestamp":"2020-12-21T02:56:58.802Z","eventType":"NETWORK_DNS","productName":"ExtraHop","ingestedTimestamp":"2020-12-21T03:02:46.559472Z"},"principal":{"hostname":"ray-xxx-laptop","ip":["10.0.XX.XX"],"mac":["88:a6:XX:XX:XX:XX"]},"target":{"ip":["10.0.XX.XX"]},"securityResult":[{"action":["UNKNOWN_ACTION"]}],"network":{"applicationProtocol":"DNS","dns":{"questions":[{"name":"is5-ssl.mzstatic.com","type":1}]}}}}],"label":"connections"}],"detectionTime":"2020-12-21T03:54:00Z"},{"type":"GCTI_FINDING","detection":[{"ruleName":"GCP Secret Manager Mass Deletion","summary":"Rule Detection","description":"Identifies mass deletion of secrets in GCP Secret Manager.","severity":"LOW","urlBackToProduct":"https://dummy-chronicle/ruleDetections?ruleId=ur_ttp_GCP__MassSecretDeletion&selectedList=RuleDetectionsViewTimeline&ruleSource=ruleSet&selectedDetectionId=de_50fd0957-0959-6410-0000-c6f8400006b1&selectedTimestamp=2023-06-14T17:28:00Z","ruleId":"ur_ttp_GCP__MassSecretDeletion","alertState":"ALERTING","ruleType":"MULTI_EVENT","detectionFields":[{"key":"resource","value":"secretmanager.googleapis.com"},{"key":"principaluser","value":"secret@google.com","source":"udm.principal.user.email_addresses"}],"ruleLabels":[{"key":"rule_name","value":"GCP Secret Manager Mass Deletion"},{"key":"false_positives","value":"This may be common behavior in dev, testing, or deprecated projects."}],"outcomes":[{"key":"risk_score","value":"35"},{"key":"resource_name","value":"gsm_secret_1, gsm_secret_10","source":"udm.target.resource.name"},{"key":"ip","value":"0.0.0.1","source":"udm.principal.ip"}],"ruleSet":"9d7537ae-0ae2-0000-b5e2-507c00008ae9","ruleSetDisplayName":"Service Disruption","riskScore":35}],"createdTime":"2023-06-14T18:38:30.569526Z","lastUpdatedTime":"2023-06-14T18:38:30.569526Z","id":"de_50fd0957-0959-0000-d556-c6f8000016b1","timeWindow":{"startTime":"2023-06-14T17:18:00Z","endTime":"2023-06-14T17:28:00Z"},"collectionElements":[{"references":[{"event":{"metadata":{"eventTimestamp":"2023-06-14T17:27:39.239875241Z","collectedTimestamp":"2023-06-14T17:27:42.956025244Z","eventType":"RESOURCE_DELETION","vendorName":"Google Cloud Platform","productName":"Google Cloud Platform","productEventType":"google.cloud.secretmanager.v1.SecretManagerService.DeleteSecret","urlBackToProduct":"url_0000","ingestedTimestamp":"2023-06-14T17:27:44.382729Z","id":"000000000000000000000001","logType":"GCP_CLOUD_AUDIT"},"principal":{"user":{"emailAddresses":["secret-migration@test-is-00001.iam.gserviceaccount.com"],"productObjectId":"000000000000000000000001","attribute":{"roles":[{"name":"roles/secretmanager.admin","type":"SERVICE_ACCOUNT"}],"permissions":[{"name":"secretmanager.secrets.delete","type":"ADMIN_WRITE"}]}},"ip":["0.0.0.1"],"location":{"state":"State","countryOrRegion":"Country","regionLatitude":10,"regionLongitude":10,"regionCoordinates":{"latitude":10,"longitude":10}},"resource":{"attribute":{"cloud":{"project":{"name":"projects/0000000/secrets/gsm_secret_1","resourceSubtype":"secretmanager.googleapis.com/Secret"}},"labels":[{"key":"request_type","value":"type.googleapis.com/google.cloud.secretmanager.v1.DeleteSecretRequest"}]}},"labels":[{"key":"request_attributes_time","value":"2023-06-14T17:27:39.245079752Z"}],"ipGeoArtifact":[{"ip":"0.0.0.1","location":{"state":"Gujarat","countryOrRegion":"India","regionLatitude":10,"regionLongitude":10,"regionCoordinates":{"latitude":10,"longitude":10}},"network":{"asn":"00001","dnsDomain":"broad_band.in","carrierName":"broad band ltd.","organizationName":"broad band services limited"}}]},"target":{"application":"secretmanager.googleapis.com","resource":{"name":"gsm_secret_1","attribute":{"labels":[{"key":"request_name","value":"projects/test-is-00001/secrets/gsm_secret_1"}]}},"cloud":{"environment":"GOOGLE_CLOUD_PLATFORM","project":{"name":"test-is-00001"}}},"securityResult":[{"categoryDetails":["projects/test-is-00001/logs/cloudaudit.googleapis.com"],"action":["ALLOW"],"severity":"INFORMATIONAL","detectionFields":[{"key":"resource_name","value":"projects/0000001/secrets/gsm_secret_1"},{"key":"key_id","value":"000000000000000000000001"}]}],"network":{"http":{"userAgent":"grpc-python-asyncio/1.51.3 grpc-c/29.0.0 (windows; chttp2),gzip(gfe)"}}}}],"label":"e"}],"detectionTime":"2023-06-14T17:28:00Z","tags":["TA0040","T1485"]}]}
+{"heartbeat": true}
+'
\ No newline at end of file
diff --git a/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/test_data/stream_detections_empty.txt b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/test_data/stream_detections_empty.txt
new file mode 100644
index 000000000000..11a24a1213f3
--- /dev/null
+++ b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/test_data/stream_detections_empty.txt
@@ -0,0 +1,3 @@
+[{"continuationTime": "2024-03-21T05:31:06Z","heartbeat": true},
+{},
+{"continuationTime": "2024-03-21T07:31:06Z","heartbeat": true},
\ No newline at end of file
diff --git a/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/test_data/stream_detections_error.txt b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/test_data/stream_detections_error.txt
new file mode 100644
index 000000000000..1a5388796ee4
--- /dev/null
+++ b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/test_data/stream_detections_error.txt
@@ -0,0 +1,3 @@
+[{"continuationTime": "2024-03-21T05:31:06Z","heartbeat": true},
+{"continuationTime":"2024-03-21T06:19:59.094785596Z", "error": "error"},
+{"continuationTime": "2024-03-21T07:31:06Z","heartbeat": true},
\ No newline at end of file
diff --git a/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/test_data/stream_detections_error_2.txt b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/test_data/stream_detections_error_2.txt
new file mode 100644
index 000000000000..5e1afb69a6c3
--- /dev/null
+++ b/Packs/GoogleChronicleBackstory/Integrations/GoogleChronicleBackstoryStreamingAPI/test_data/stream_detections_error_2.txt
@@ -0,0 +1,2 @@
+[{"continuationTime":"2024-03-21T06:19:59.094785596Z", "error": "error"},
+{"continuationTime": "2024-03-21T07:31:06Z","heartbeat": true},
\ No newline at end of file
diff --git a/Packs/GoogleChronicleBackstory/Layouts/layoutscontainer-Chronicle_Rule_Detection.json b/Packs/GoogleChronicleBackstory/Layouts/layoutscontainer-Chronicle_Rule_Detection.json
index 649dc96e7fe1..010ceaa78952 100644
--- a/Packs/GoogleChronicleBackstory/Layouts/layoutscontainer-Chronicle_Rule_Detection.json
+++ b/Packs/GoogleChronicleBackstory/Layouts/layoutscontainer-Chronicle_Rule_Detection.json
@@ -26,12 +26,21 @@
"sectionItemType": "field",
"startCol": 0
},
+ {
+ "endCol": 2,
+ "fieldId": "description",
+ "height": 22,
+ "id": "7ee26b40-0f67-11ee-bbaa-0371f85c2d92",
+ "index": 1,
+ "sectionItemType": "field",
+ "startCol": 0
+ },
{
"endCol": 2,
"fieldId": "severity",
"height": 22,
"id": "incident-severity-field",
- "index": 1,
+ "index": 2,
"sectionItemType": "field",
"startCol": 0
},
@@ -40,7 +49,7 @@
"fieldId": "owner",
"height": 22,
"id": "incident-owner-field",
- "index": 2,
+ "index": 3,
"sectionItemType": "field",
"startCol": 0
},
@@ -49,7 +58,7 @@
"fieldId": "dbotsource",
"height": 22,
"id": "incident-source-field",
- "index": 3,
+ "index": 4,
"sectionItemType": "field",
"startCol": 0
},
@@ -58,7 +67,7 @@
"fieldId": "sourcebrand",
"height": 22,
"id": "incident-sourceBrand-field",
- "index": 4,
+ "index": 5,
"sectionItemType": "field",
"startCol": 0
},
@@ -67,7 +76,7 @@
"fieldId": "sourceinstance",
"height": 22,
"id": "incident-sourceInstance-field",
- "index": 5,
+ "index": 6,
"sectionItemType": "field",
"startCol": 0
},
@@ -76,7 +85,7 @@
"fieldId": "playbookid",
"height": 22,
"id": "incident-playbookId-field",
- "index": 6,
+ "index": 7,
"sectionItemType": "field",
"startCol": 0
}
@@ -387,6 +396,17 @@
"sectionItemType": "field",
"startCol": 0
},
+ {
+ "dropEffect": "move",
+ "endCol": 2,
+ "fieldId": "detectionurl",
+ "height": 53,
+ "id": "6f51e430-0f67-11ee-bbaa-0371f85c2d92",
+ "index": 4,
+ "listId": "caseinfoid-04493090-1504-11eb-9b4e-a35aa939990c",
+ "sectionItemType": "field",
+ "startCol": 0
+ },
{
"dropEffect": "move",
"endCol": 2,
@@ -933,6 +953,10 @@
"fieldId": "incident_type",
"isVisible": true
},
+ {
+ "fieldId": "incident_description",
+ "isVisible": true
+ },
{
"fieldId": "incident_severity",
"isVisible": true
@@ -980,6 +1004,10 @@
"fieldId": "incident_chronicledetectionid",
"isVisible": true
},
+ {
+ "fieldId": "incident_detectionurl",
+ "isVisible": true
+ },
{
"fieldId": "incident_chronicledetectiontype",
"isVisible": true
diff --git a/Packs/GoogleChronicleBackstory/ReleaseNotes/3_1_5.md b/Packs/GoogleChronicleBackstory/ReleaseNotes/3_1_5.md
new file mode 100644
index 000000000000..f173102d24cb
--- /dev/null
+++ b/Packs/GoogleChronicleBackstory/ReleaseNotes/3_1_5.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### ListDeviceEvents
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/GoogleChronicleBackstory/ReleaseNotes/4_0_0.md b/Packs/GoogleChronicleBackstory/ReleaseNotes/4_0_0.md
new file mode 100644
index 000000000000..91546c1d58db
--- /dev/null
+++ b/Packs/GoogleChronicleBackstory/ReleaseNotes/4_0_0.md
@@ -0,0 +1,27 @@
+
+#### Integrations
+
+##### New: Chronicle Streaming API
+
+- New: Use the Google Chronicle Backstory Streaming API integration to ingest detections created by both user-created rules and Chronicle Rules as XSOAR incidents. (Available from Cortex XSOAR 6.10.0).
+
+##### Chronicle
+
+- Fixed an issue with the proxy settings. The proxy communication will only be used when the proxy checkbox is enabled.
+- Updated the Docker image to: *demisto/googleapi-python3:1.0.0.97032*.
+
+#### Layouts
+
+##### Chronicle Rule Detection Incident
+
+- Updated the layout for the addition of the following incident fields.
+ - **Detection URL**
+ - **Description**
+
+#### Mappers
+
+##### Chronicle - Incoming Mapper
+
+- Updated the mapper for the addition of the following incident fields in **Chronicle Rule Detection**.
+ - **Detection URL**
+ - **Description**
diff --git a/Packs/GoogleChronicleBackstory/Scripts/ListDeviceEventsScript/ListDeviceEventsScript.yml b/Packs/GoogleChronicleBackstory/Scripts/ListDeviceEventsScript/ListDeviceEventsScript.yml
index b7e7e6e75125..e9726e8e23e5 100644
--- a/Packs/GoogleChronicleBackstory/Scripts/ListDeviceEventsScript/ListDeviceEventsScript.yml
+++ b/Packs/GoogleChronicleBackstory/Scripts/ListDeviceEventsScript/ListDeviceEventsScript.yml
@@ -1099,7 +1099,7 @@ tags:
- enhancement
timeout: '0'
type: python
-dockerimage: demisto/python3:3.10.9.40422
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
tests:
- No tests (auto formatted)
diff --git a/Packs/GoogleChronicleBackstory/pack_metadata.json b/Packs/GoogleChronicleBackstory/pack_metadata.json
index 4079dd5f1b0f..06c56d753c8d 100644
--- a/Packs/GoogleChronicleBackstory/pack_metadata.json
+++ b/Packs/GoogleChronicleBackstory/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Chronicle",
"description": "Retrieve Chronicle detections, impacted assets, IOC matches, and 3P alerts to enrich your XSOAR workflows.",
"support": "partner",
- "currentVersion": "3.1.4",
+ "currentVersion": "4.0.0",
"certification": "certified",
"author": "Chronicle",
"url": "https://go.chronicle.security/contact",
diff --git a/Packs/GoogleCloudCompute/ReleaseNotes/1_1_10.md b/Packs/GoogleCloudCompute/ReleaseNotes/1_1_10.md
new file mode 100644
index 000000000000..cbd6d34dafce
--- /dev/null
+++ b/Packs/GoogleCloudCompute/ReleaseNotes/1_1_10.md
@@ -0,0 +1,3 @@
+## Google Cloud Compute
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/GoogleCloudCompute/pack_metadata.json b/Packs/GoogleCloudCompute/pack_metadata.json
index 7e639da8818b..1a750e83475a 100644
--- a/Packs/GoogleCloudCompute/pack_metadata.json
+++ b/Packs/GoogleCloudCompute/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Google Cloud Compute",
"description": "Google Compute Engine delivers virtual machines running in Google's innovative data centers and worldwide fiber network. Compute Engine's tooling and workflow support enable scaling from single instances to global, load-balanced cloud computing.",
"support": "xsoar",
- "currentVersion": "1.1.9",
+ "currentVersion": "1.1.10",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/GoogleKeyManagementService/ReleaseNotes/1_0_25.md b/Packs/GoogleKeyManagementService/ReleaseNotes/1_0_25.md
new file mode 100644
index 000000000000..ca6eefc60861
--- /dev/null
+++ b/Packs/GoogleKeyManagementService/ReleaseNotes/1_0_25.md
@@ -0,0 +1,3 @@
+## Google Key Management Service
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/GoogleKeyManagementService/pack_metadata.json b/Packs/GoogleKeyManagementService/pack_metadata.json
index a84b0d8b2f71..b0c4571d41e6 100644
--- a/Packs/GoogleKeyManagementService/pack_metadata.json
+++ b/Packs/GoogleKeyManagementService/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Google Key Management Service",
"description": "Use the Google Key Management Service API for CryptoKey management and encrypt/decrypt functionality.",
"support": "xsoar",
- "currentVersion": "1.0.24",
+ "currentVersion": "1.0.25",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/GoogleThreatIntelligence/.pack-ignore b/Packs/GoogleThreatIntelligence/.pack-ignore
new file mode 100644
index 000000000000..c5400c2e29e0
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/.pack-ignore
@@ -0,0 +1,7 @@
+[file:GoogleThreatIntelligence_image.png]
+ignore=IM111
+
+[known_words]
+virustotal
+livehunt
+retrohunt
diff --git a/Packs/GoogleThreatIntelligence/.secrets-ignore b/Packs/GoogleThreatIntelligence/.secrets-ignore
new file mode 100644
index 000000000000..a0cac85e612b
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/.secrets-ignore
@@ -0,0 +1,3 @@
+https://us-cert.cisa.gov
+https://fake
+https://vt_is_awesome.com
diff --git a/Packs/GoogleThreatIntelligence/Author_image.png b/Packs/GoogleThreatIntelligence/Author_image.png
new file mode 100644
index 000000000000..ff86122389c0
Binary files /dev/null and b/Packs/GoogleThreatIntelligence/Author_image.png differ
diff --git a/Packs/GoogleThreatIntelligence/CONTRIBUTORS.json b/Packs/GoogleThreatIntelligence/CONTRIBUTORS.json
new file mode 100644
index 000000000000..611c3a3900da
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/CONTRIBUTORS.json
@@ -0,0 +1,3 @@
+[
+ "Google Threat Intelligence Team"
+]
\ No newline at end of file
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/GoogleThreatIntelligence.py b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/GoogleThreatIntelligence.py
new file mode 100644
index 000000000000..267bb1460af4
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/GoogleThreatIntelligence.py
@@ -0,0 +1,2712 @@
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa: F401
+"""
+An integration module for the Google Threat Intelligence API.
+API Documentation:
+ https://gtidocs.virustotal.com/reference
+"""
+from collections import defaultdict
+from typing import cast
+
+from dateparser import parse
+import ipaddress
+
+INTEGRATION_NAME = 'GoogleThreatIntelligence'
+COMMAND_PREFIX = 'gti'
+INTEGRATION_ENTRY_CONTEXT = INTEGRATION_NAME
+
+INDICATOR_TYPE = {
+ 'ip': FeedIndicatorType.IP,
+ 'ip_address': FeedIndicatorType.IP,
+ 'domain': FeedIndicatorType.Domain,
+ 'file': FeedIndicatorType.File,
+ 'url': FeedIndicatorType.URL,
+}
+
+SEVERITY_LEVELS = {
+ 'SEVERITY_UNKNOWN': 'UNKNOWN',
+ 'SEVERITY_LOW': 'LOW',
+ 'SEVERITY_MEDIUM': 'MEDIUM',
+ 'SEVERITY_HIGH': 'HIGH',
+}
+
+VERDICTS = {
+ 'VERDICT_UNKNOWN': 'UNKNOWN',
+ 'VERDICT_UNDETECTED': 'UNDETECTED',
+ 'VERDICT_SUSPICIOUS': 'SUSPICIOUS',
+ 'VERDICT_MALICIOUS': 'MALICIOUS',
+}
+
+
+"""RELATIONSHIP TYPE"""
+RELATIONSHIP_TYPE = {
+ 'file': {
+ 'carbonblack_children': EntityRelationship.Relationships.CREATES,
+ 'carbonblack_parents': EntityRelationship.Relationships.CREATED_BY,
+ 'compressed_parents': EntityRelationship.Relationships.BUNDLED_IN,
+ 'contacted_domains': EntityRelationship.Relationships.COMMUNICATES_WITH,
+ 'contacted_ips': EntityRelationship.Relationships.COMMUNICATES_WITH,
+ 'contacted_urls': EntityRelationship.Relationships.COMMUNICATES_WITH,
+ 'dropped_files': EntityRelationship.Relationships.DROPPED_BY,
+ 'email_attachments': EntityRelationship.Relationships.ATTACHES,
+ 'email_parents': EntityRelationship.Relationships.ATTACHMENT_OF,
+ 'embedded_domains': EntityRelationship.Relationships.EMBEDDED_IN,
+ 'embedded_ips': EntityRelationship.Relationships.EMBEDDED_IN,
+ 'embedded_urls': EntityRelationship.Relationships.EMBEDDED_IN,
+ 'execution_parents': EntityRelationship.Relationships.EXECUTED_BY,
+ 'itw_domains': EntityRelationship.Relationships.DOWNLOADS_FROM,
+ 'itw_ips': EntityRelationship.Relationships.DOWNLOADS_FROM,
+ 'overlay_children': EntityRelationship.Relationships.BUNDLES,
+ 'overlay_parents': EntityRelationship.Relationships.BUNDLED_IN,
+ 'pcap_children': EntityRelationship.Relationships.BUNDLES,
+ 'pcap_parents': EntityRelationship.Relationships.BUNDLED_IN,
+ 'pe_resource_children': EntityRelationship.Relationships.EXECUTED,
+ 'pe_resource_parents': EntityRelationship.Relationships.EXECUTED_BY,
+ 'similar_files': EntityRelationship.Relationships.SIMILAR_TO,
+ },
+ 'domain': {
+ 'cname_records': EntityRelationship.Relationships.IS_ALSO,
+ 'caa_records': EntityRelationship.Relationships.RELATED_TO,
+ 'communicating_files': EntityRelationship.Relationships.DROPS,
+ 'downloaded_files': EntityRelationship.Relationships.DROPS,
+ 'immediate_parent': EntityRelationship.Relationships.SUB_DOMAIN_OF,
+ 'mx_records': EntityRelationship.Relationships.RELATED_TO,
+ 'ns_records': EntityRelationship.Relationships.DROPS,
+ 'parent': EntityRelationship.Relationships.SUB_DOMAIN_OF,
+ 'referrer_files': EntityRelationship.Relationships.RELATED_TO,
+ 'resolutions': EntityRelationship.Relationships.RESOLVED_FROM,
+ 'siblings': EntityRelationship.Relationships.SUPRA_DOMAIN_OF,
+ 'soa_records': EntityRelationship.Relationships.IS_ALSO,
+ 'subdomains': EntityRelationship.Relationships.SUPRA_DOMAIN_OF,
+ 'urls': EntityRelationship.Relationships.HOSTS,
+ },
+ 'ip': {
+ 'communicating_files': EntityRelationship.Relationships.COMMUNICATES_WITH,
+ 'downloaded_files': EntityRelationship.Relationships.DROPS,
+ 'referrer_files': EntityRelationship.Relationships.RELATED_TO,
+ 'resolutions': EntityRelationship.Relationships.RESOLVES_TO,
+ 'urls': EntityRelationship.Relationships.RELATED_TO,
+ },
+ 'url': {
+ 'contacted_domains': EntityRelationship.Relationships.RELATED_TO,
+ 'contacted_ips': EntityRelationship.Relationships.RELATED_TO,
+ 'downloaded_files': EntityRelationship.Relationships.DROPS,
+ 'last_serving_ip_address': EntityRelationship.Relationships.RESOLVED_FROM,
+ 'network_location': EntityRelationship.Relationships.RESOLVED_FROM,
+ 'redirecting_urls': EntityRelationship.Relationships.DUPLICATE_OF,
+ 'redirects_to': EntityRelationship.Relationships.DUPLICATE_OF,
+ 'referrer_files': EntityRelationship.Relationships.EMBEDDED_IN,
+ 'referrer_urls': EntityRelationship.Relationships.RELATED_TO,
+ },
+}
+
+
+class Client(BaseClient):
+ """Client for Google Threat Intelligence API."""
+ reliability: DBotScoreReliability
+
+ def __init__(self, params: dict):
+ self.reliability = DBotScoreReliability.get_dbot_score_reliability_from_str(params['feedReliability'])
+
+ super().__init__(
+ 'https://www.virustotal.com/api/v3/',
+ verify=not argToBoolean(params.get('insecure')),
+ proxy=argToBoolean(params.get('proxy')),
+ headers={
+ 'x-apikey': params['credentials']['password'],
+ 'x-tool': 'CortexGTI',
+ }
+ )
+
+ # region Reputation calls
+
+ def ip(self, ip: str, relationships: str = '') -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/ip-info
+ """
+ return self._http_request(
+ 'GET',
+ f'ip_addresses/{ip}?relationships={relationships}', ok_codes=(404, 429, 200)
+ )
+
+ def file(self, file: str, relationships: str = '') -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/file-info
+ """
+ return self._http_request(
+ 'GET',
+ f'files/{file}?relationships={relationships}', ok_codes=(404, 429, 200)
+ )
+
+ # It is not a Reputation call
+ def private_file(self, file: str) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/private-files-info
+ """
+ return self._http_request(
+ 'GET',
+ f'private/files/{file}', ok_codes=(404, 429, 200)
+ )
+
+ def url(self, url: str, relationships: str = ''):
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/url-info
+ """
+ return self._http_request(
+ 'GET',
+ f'urls/{encode_url_to_base64(url)}?relationships={relationships}',
+ ok_codes=(404, 429, 200)
+ )
+
+ def domain(self, domain: str, relationships: str = '') -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/domain-info
+ """
+ return self._http_request(
+ 'GET',
+ f'domains/{domain}?relationships={relationships}',
+ ok_codes=(404, 429, 200)
+ )
+
+ # endregion
+
+ # region Comments call
+ def delete_comment(self, id_: str):
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/comment-id-delete
+ """
+ self._http_request(
+ 'DELETE',
+ f'comments/{id_}',
+ resp_type='response'
+ )
+
+ def get_ip_comments(self, ip: str, limit: int) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/ip-comments-get
+ """
+
+ return self._http_request(
+ 'GET',
+ f'ip_addresses/{ip}/comments',
+ params={'limit': limit}
+ )
+
+ def get_url_comments(self, url: str, limit: int) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/urls-comments-get
+
+ """
+ return self._http_request(
+ 'GET',
+ f'urls/{encode_url_to_base64(url)}/comments',
+ params={'limit': limit}
+ )
+
+ def get_hash_comments(self, file_hash: str, limit: int) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/files-comments-get
+ """
+ return self._http_request(
+ 'GET',
+ f'files/{file_hash}/comments',
+ params={'limit': limit}
+ )
+
+ def get_domain_comments(self, domain: str, limit: int) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/domains-comments-get
+ """
+ return self._http_request(
+ 'GET',
+ f'domains/{domain}/comments',
+ params={'limit': limit}
+ )
+
+ def get_comment_by_id(self, comment_id: str) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/get-comment
+ """
+ return self._http_request(
+ 'GET',
+ f'comments/{comment_id}'
+ )
+
+ def add_comment(self, suffix: str, comment: str) -> dict:
+ """Sending POST HTTP request to comment
+
+ Args:
+ suffix: suffix of the comment
+ comment: the comment itself
+
+ Returns:
+ json of response
+ """
+ return self._http_request(
+ 'POST',
+ suffix,
+ json_data={'data': {'type': 'comment', 'attributes': {'text': comment}}}
+ )
+
+ def add_comment_to_ip(self, ip: str, comment: str) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/ip-comments-post
+ """
+ return self.add_comment(f'ip_addresses/{ip}/comments', comment)
+
+ def add_comment_to_url(self, url: str, comment: str) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/urls-comments-post
+ """
+ return self.add_comment(f'urls/{encode_url_to_base64(url)}/comments', comment)
+
+ def add_comment_to_domain(self, domain: str, comment: str) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/domains-comments-post
+ """
+ return self.add_comment(f'domains/{domain}/comments', comment)
+
+ def add_comment_to_file(self, resource: str, comment: str) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/files-comments-post
+ """
+ return self.add_comment(f'files/{resource}/comments', comment)
+
+ # endregion
+
+ # region Scan calls
+ def file_rescan(self, file_hash: str) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/files-analyse
+ """
+ return self._http_request(
+ 'POST',
+ f'/files/{file_hash}/analyse'
+ )
+
+ def file_scan(self, file_path: str, /, upload_url: Optional[str]) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/files-analyse
+ """
+ response: requests.Response
+ with open(file_path, 'rb') as file:
+ if upload_url or os.stat(file_path).st_size / (1024 * 1024) >= 32:
+ if not upload_url:
+ raw_response = self.get_upload_url()
+ upload_url = raw_response['data']
+ response = self._http_request(
+ 'POST',
+ full_url=upload_url,
+ files={'file': file},
+ resp_type='response'
+ )
+ else:
+ response = self._http_request(
+ 'POST',
+ url_suffix='/files',
+ files={'file': file},
+ resp_type='response'
+ )
+ demisto.debug(
+ f'scan_file response:\n'
+ f'{str(response.status_code)=}, {str(response.headers)=}, {str(response.content)}'
+ )
+ return response.json()
+
+ def private_file_scan(self, file_path: str) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/post_files-1
+ """
+ response: requests.Response
+ with open(file_path, 'rb') as file:
+ if os.stat(file_path).st_size / (1024 * 1024) >= 32:
+ raw_response = self.get_private_upload_url()
+ upload_url = raw_response['data']
+ response = self._http_request(
+ 'POST',
+ full_url=upload_url,
+ files={'file': file},
+ resp_type='response'
+ )
+ else:
+ response = self._http_request(
+ 'POST',
+ url_suffix='/private/files',
+ files={'file': file},
+ resp_type='response'
+ )
+ demisto.debug(
+ f'scan_file response:\n'
+ f'{str(response.status_code)=}, {str(response.headers)=}, {str(response.content)}'
+ )
+ return response.json()
+
+ def get_upload_url(self) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/files-upload-url
+ """
+ return self._http_request(
+ 'GET',
+ 'files/upload_url'
+ )
+
+ def get_private_upload_url(self) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/private-files-upload-url
+ """
+ return self._http_request(
+ 'GET',
+ 'private/files/upload_url'
+ )
+
+ def url_scan(self, url: str) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/urls-analyse
+ """
+ return self._http_request(
+ 'POST',
+ 'urls',
+ data={'url': url}
+ )
+
+ # endregion
+
+ def file_sandbox_report(self, file_hash: dict, limit: int) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/files-relationships
+ """
+ return self._http_request(
+ 'GET',
+ f'files/{file_hash}/behaviours',
+ params={'limit': limit},
+ ok_codes=(404, 429, 200)
+ )
+
+ def passive_dns_data(self, id: dict, limit: int) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/ip-relationships
+ """
+ return self._http_request(
+ 'GET',
+ f'{"ip_addresses" if id["type"] == "ip" else "domains"}/{id["value"]}/resolutions',
+ params={'limit': limit}
+ )
+
+ def search(self, query: str, limit: int) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/intelligence-search
+ """
+ return self._http_request(
+ 'GET',
+ 'search',
+ params={'query': query, 'limit': limit}
+ )
+
+ def get_analysis(self, analysis_id: str) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/analysis
+ """
+ return self._http_request(
+ 'GET',
+ f'/analyses/{analysis_id}'
+ )
+
+ def get_private_analysis(self, analysis_id: str) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/private-analysis
+ """
+ return self._http_request(
+ 'GET',
+ f'private/analyses/{analysis_id}'
+ )
+
+ def get_private_file_from_analysis(self, analysis_id: str) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/analysesidrelationship
+ """
+ return self._http_request(
+ 'GET',
+ f'private/analyses/{analysis_id}/item?attributes=threat_severity,threat_verdict'
+ )
+
+ def get_file_sigma_analysis(self, file_hash: str) -> dict:
+ """
+ See Also:
+ https://gtidocs.virustotal.com/reference/files-relationships
+ """
+ return self._http_request(
+ 'GET',
+ f'files/{file_hash}/sigma_analysis',
+ )
+
+
+class ScoreCalculator:
+ """
+ Calculating DBotScore of files, ip, etc.
+ """
+ DEFAULT_SUSPICIOUS_THRESHOLD = 5
+ DEFAULT_RELATIONSHIP_SUSPICIOUS_THRESHOLD = 2
+ GTI_MALICIOUS_VERDICT = 'VERDICT_MALICIOUS'
+ GTI_SUSPICIOUS_VERDICT = 'VERDICT_SUSPICIOUS'
+
+ logs: List[str]
+
+ # General
+ trusted_vendors_threshold: int
+ trusted_vendors: List[str]
+ gti_malicious: bool
+ gti_suspicious: bool
+
+ # IP
+ ip_threshold: dict[str, int]
+
+ # URL
+ url_threshold: dict[str, int]
+
+ # Domain
+ domain_threshold: dict[str, int]
+ domain_popularity_ranking: int
+
+ # File
+ file_threshold: dict[str, int]
+ sigma_ids_threshold: int
+ crowdsourced_yara_rules_enabled: bool
+ crowdsourced_yara_rules_threshold: int
+
+ def __init__(self, params: dict):
+ self.trusted_vendors = argToList(params['preferredVendors'])
+ self.trusted_vendors_threshold = arg_to_number_must_int(
+ params['preferredVendorsThreshold'],
+ arg_name='Preferred Vendor Threshold',
+ required=True
+ )
+ self.file_threshold = {
+ 'malicious': arg_to_number_must_int(
+ params['fileThreshold'],
+ arg_name='File Malicious Threshold',
+ required=True),
+ 'suspicious': arg_to_number_must_int(
+ params['fileSuspiciousThreshold'] or self.DEFAULT_SUSPICIOUS_THRESHOLD,
+ arg_name='File Suspicious Threshold',
+ required=True)
+ }
+ self.ip_threshold = {
+ 'malicious': arg_to_number_must_int(
+ params['ipThreshold'],
+ arg_name='IP Malicious Threshold',
+ required=True),
+ 'suspicious': arg_to_number_must_int(
+ params['ipSuspiciousThreshold'] or self.DEFAULT_SUSPICIOUS_THRESHOLD,
+ arg_name='IP Suspicious Threshold',
+ required=True)
+ }
+ self.url_threshold = {
+ 'malicious': arg_to_number_must_int(
+ params['urlThreshold'],
+ arg_name='URL Malicious Threshold',
+ required=True),
+ 'suspicious': arg_to_number_must_int(
+ params['urlSuspiciousThreshold'] or self.DEFAULT_SUSPICIOUS_THRESHOLD,
+ arg_name='URL Suspicious Threshold',
+ required=True)
+ }
+ self.domain_threshold = {
+ 'malicious': arg_to_number_must_int(
+ params['domainThreshold'],
+ arg_name='Domain Malicious Threshold',
+ required=True),
+ 'suspicious': arg_to_number_must_int(
+ params['domainSuspiciousThreshold'] or self.DEFAULT_SUSPICIOUS_THRESHOLD,
+ arg_name='Domain Suspicious Threshold',
+ required=True)
+ }
+ self.crowdsourced_yara_rules_enabled = argToBoolean(params['crowdsourced_yara_rules_enabled'])
+ self.crowdsourced_yara_rules_threshold = arg_to_number_must_int(params['yaraRulesThreshold'])
+ self.sigma_ids_threshold = arg_to_number_must_int(
+ params['SigmaIDSThreshold'],
+ arg_name='Sigma and Intrusion Detection Rules Threshold',
+ required=True
+ )
+ self.domain_popularity_ranking = arg_to_number_must_int(
+ params['domain_popularity_ranking'],
+ arg_name='Domain Popularity Ranking Threshold',
+ required=True
+ )
+ self.gti_malicious = argToBoolean(params.get('gti_malicious', False))
+ self.gti_suspicious = argToBoolean(params.get('gti_suspicious', False))
+ self.logs = []
+
+ def get_logs(self) -> str:
+ """Returns the log string
+ """
+ return '\n'.join(self.logs)
+
+ def _is_by_threshold(self, analysis_stats: dict, threshold: int, suspicious: bool = False) -> bool:
+ """Determines whatever the indicator malicious/suspicious by threshold.
+ if number of malicious (+ suspicious) >= threshold -> Malicious (Suspicious)
+
+ Args:
+ analysis_stats: the analysis stats from the response.
+ threshold: the threshold of the indicator type.
+ suspicious: whether suspicious is also added.
+
+ Returns:
+ Whatever the indicator is malicious/suspicious by threshold.
+ """
+ total = analysis_stats.get('malicious', 0)
+ if suspicious:
+ total += analysis_stats.get('suspicious', 0)
+ verdict = 'suspicious' if suspicious else 'malicious'
+ self.logs.append(f'{total} vendors found {verdict}.\n'
+ f'The {verdict} threshold is {threshold}.')
+ if total >= threshold:
+ self.logs.append(f'Found as {verdict}: {total} >= {threshold}.')
+ return True
+ self.logs.append(f'Not found {verdict} by threshold: {total} < {threshold}.')
+ return False
+
+ def is_suspicious_by_threshold(self, analysis_stats: dict, threshold: int) -> bool:
+ """Determines whatever the indicator suspicious by threshold.
+ if number of malicious + suspicious >= threshold -> Suspicious
+
+ Args:
+ analysis_stats: the analysis stats from the response
+ threshold: the threshold of the indicator type.
+
+ Returns:
+ Whatever the indicator is suspicious by threshold.
+ """
+ return self._is_by_threshold(analysis_stats, threshold, suspicious=True)
+
+ def is_good_by_popularity_ranks(self, popularity_ranks: dict) -> Optional[bool]:
+ """Analyzing popularity ranks.
+ if popularity ranks exist and average rank is < threshold -> Good
+ Args:
+ popularity_ranks: the popularity ranks object from response
+
+ Returns:
+ Whatever the indicator is good or not by popularity rank.
+ """
+ if popularity_ranks:
+ self.logs.append('Found popularity ranks. Analyzing.')
+ average = sum(rank.get('rank', 0) for rank in popularity_ranks.values()) / len(popularity_ranks)
+ self.logs.append(
+ f'The average of the ranks is {average} and the threshold is {self.domain_popularity_ranking}')
+ if average <= self.domain_popularity_ranking:
+ self.logs.append('Indicator is good by popularity ranks.')
+ return True
+ else:
+ self.logs.append('Indicator might not be good by it\'s popularity ranks.')
+ return False
+ self.logs.append('Could not determine rank by popularity, No popularity ranks data.')
+ return None
+
+ def is_suspicious_by_rules(self, file_response: dict) -> bool:
+ """Check if indicator is suspicious by rules analysis.
+
+ crowdsourced_yara_results >= yara_rules_threshold ||
+ sigma_analysis_stats.high + critical >= sigma_id_threshold ||
+ crowdsourced_ids_stats.high + critical >= sigma_id_threshold -> suspicious
+
+ Args:
+ file_response: the file response
+
+ Returns:
+ Whatever the file is suspicious by rules analysis.
+ """
+ data = file_response.get('data', {})
+ if self.crowdsourced_yara_rules_enabled:
+ self.logs.append('Crowdsourced Yara Rules analyzing enabled.')
+ if (total_yara_rules := len(
+ data.get('crowdsourced_yara_results', []))) >= self.crowdsourced_yara_rules_threshold:
+ self.logs.append(
+ 'Found malicious by finding more Crowdsourced Yara Rules than threshold. \n'
+ f'{total_yara_rules} >= {self.crowdsourced_yara_rules_threshold}')
+ return True
+ if sigma_rules := data.get('sigma_analysis_stats'):
+ self.logs.append('Found sigma rules, analyzing.')
+ sigma_high, sigma_critical = sigma_rules.get('high', 0), sigma_rules.get('critical', 0)
+ if (sigma_high + sigma_critical) >= self.sigma_ids_threshold:
+ self.logs.append(
+ f'Found malicious, {sigma_high + sigma_critical} >= {self.sigma_ids_threshold}. ')
+ return True
+ else:
+ self.logs.append('Not found malicious by sigma. ')
+ else:
+ self.logs.append('Not found sigma analysis. Skipping. ')
+ if crowdsourced_ids_stats := data.get('crowdsourced_ids_stats'):
+ self.logs.append('Found crowdsourced IDS analysis, analyzing. ')
+ ids_high, ids_critical = crowdsourced_ids_stats.get('high'), crowdsourced_ids_stats.get('critical')
+ if (ids_high + ids_critical) >= self.sigma_ids_threshold:
+ self.logs.append(
+ f'Found malicious, {(ids_high + ids_critical) >= self.sigma_ids_threshold}.')
+ return True
+ else:
+ self.logs.append('Not found malicious by sigma.')
+ else:
+ self.logs.append('Not found crowdsourced IDS analysis. Skipping.')
+ else:
+ self.logs.append('Crowdsourced Yara Rules analyzing is not enabled. Skipping.')
+ return False
+
+ def is_preferred_vendors_pass_malicious(self, analysis_results: dict) -> bool:
+ """Is the indicator counts as malicious by predefined malicious vendors.
+ trusted_vendors.malicious >= trusted_vendors_threshold -> Malicious
+ The function takes only the latest 20 results.
+
+ Args:
+ analysis_results: The results of the analysis.
+
+ Returns:
+ Whatever the indicator is malicious or not by preferred vendors.
+ """
+ recent = {key: analysis_results[key] for key in list(analysis_results.keys())[:20]}
+ preferred_vendor_scores = {
+ vendor: recent[vendor] for vendor in self.trusted_vendors if vendor in recent
+ }
+ malicious_trusted_vendors = [
+ item for item in preferred_vendor_scores.values() if item.get('category') == 'malicious'
+ ]
+ if len(malicious_trusted_vendors) >= self.trusted_vendors_threshold:
+ self.logs.append(
+ f'{len(malicious_trusted_vendors)} trusted vendors found the hash malicious. \n'
+ f'The trusted vendors threshold is {self.trusted_vendors_threshold}. \n'
+ f'Malicious check: {(len(malicious_trusted_vendors) >= self.trusted_vendors_threshold)=}. '
+ )
+ return True
+ else:
+ self.logs.append(
+ f'Those preferred vendors found the hash malicious: {malicious_trusted_vendors}. '
+ f'They do not pass the threshold {self.trusted_vendors_threshold}. '
+ )
+ return False
+
+ def is_malicious_by_threshold(self, analysis_stats: dict, threshold: int) -> bool:
+ """Determines whatever the indicator malicious by threshold.
+ if number of malicious >= threshold -> Malicious
+
+ Args:
+ analysis_stats: the analysis stats from the response
+ threshold: the threshold of the indicator type.
+
+ Returns:
+ Whatever the indicator is malicious by threshold.
+ """
+ return self._is_by_threshold(analysis_stats, threshold)
+
+ def score_by_threshold(self, analysis_stats: dict, threshold: dict[str, int]) -> int:
+ """Determines the DBOTSCORE of the indicator by threshold only.
+
+ Returns:
+ DBotScore of the indicator. Can by Common.DBotScore.BAD, Common.DBotScore.SUSPICIOUS or
+ Common.DBotScore.GOOD
+ """
+ if self.is_malicious_by_threshold(analysis_stats, threshold['malicious']):
+ return Common.DBotScore.BAD
+ if self.is_suspicious_by_threshold(analysis_stats, threshold['suspicious']):
+ return Common.DBotScore.SUSPICIOUS
+ return Common.DBotScore.GOOD
+
+ def score_by_results_and_stats(self, indicator: str, raw_response: dict, threshold: dict[str, int]) -> int:
+ """Determines indicator score by popularity preferred vendors and threshold.
+
+ Args:
+ indicator: The indicator we analyzing.
+ raw_response: The raw response from API.
+ threshold: Threshold of the indicator.
+
+ Returns:
+ DBotScore of the indicator. Can by Common.DBotScore.BAD, Common.DBotScore.SUSPICIOUS or
+ Common.DBotScore.GOOD
+ """
+ self.logs.append(f'Basic analyzing of "{indicator}"')
+ data = raw_response.get('data', {})
+ attributes = data.get('attributes', {})
+ popularity_ranks = attributes.get('popularity_ranks')
+ last_analysis_results = attributes.get('last_analysis_results')
+ last_analysis_stats = attributes.get('last_analysis_stats')
+ if self.is_good_by_popularity_ranks(popularity_ranks):
+ return Common.DBotScore.GOOD
+ if self.is_preferred_vendors_pass_malicious(last_analysis_results):
+ return Common.DBotScore.BAD
+ return self.score_by_threshold(last_analysis_stats, threshold)
+
+ def is_malicious_by_gti(self, gti_assessment: dict) -> bool:
+ """Determines if an IoC is malicious according to its GTI assessment."""
+ if self.gti_malicious:
+ return gti_assessment.get('verdict', {}).get('value') == self.GTI_MALICIOUS_VERDICT
+ return False
+
+ def is_suspicious_by_gti(self, gti_assessment: dict) -> bool:
+ """Determines if an IoC is suspicious according to its GTI assessment."""
+ if self.gti_suspicious:
+ return gti_assessment.get('verdict', {}).get('value') == self.GTI_SUSPICIOUS_VERDICT
+ return False
+
+ def file_score(self, given_hash: str, raw_response: dict) -> int:
+ """Analyzing file score.
+ The next parameters are analyzed:
+ Preferred vendors
+ Score by threshold
+ Score by rules analysis (YARA, IDS and Sigma, if presents)
+
+ Args:
+ given_hash: The hash we're analyzing
+ raw_response: The response from the API
+
+ Returns:
+ DBotScore of the indicator. Can by Common.DBotScore.BAD, Common.DBotScore.SUSPICIOUS or
+ Common.DBotScore.GOOD
+ """
+ self.logs.append(f'Analysing file hash {given_hash}.')
+ data = raw_response.get('data', {})
+ attributes = data.get('attributes', {})
+ analysis_results = attributes.get('last_analysis_results', {})
+ analysis_stats = attributes.get('last_analysis_stats', {})
+
+ # GTI assessment
+ if self.is_malicious_by_gti(attributes.get('gti_assessment', {})):
+ return Common.DBotScore.BAD
+
+ # Trusted vendors
+ if self.is_preferred_vendors_pass_malicious(analysis_results):
+ return Common.DBotScore.BAD
+
+ score = self.score_by_threshold(analysis_stats, self.file_threshold)
+ if score == Common.DBotScore.BAD:
+ return Common.DBotScore.BAD
+
+ suspicious_by_rules = self.is_suspicious_by_rules(raw_response)
+ if score == Common.DBotScore.SUSPICIOUS and suspicious_by_rules:
+ self.logs.append(
+ f'Hash: "{given_hash}" was found malicious as the '
+ 'hash is suspicious both by threshold and rules analysis.')
+ return Common.DBotScore.BAD
+ elif suspicious_by_rules:
+ self.logs.append(
+ f'Hash: "{given_hash}" was found suspicious by rules analysis.')
+ return Common.DBotScore.SUSPICIOUS
+ elif score == Common.DBotScore.SUSPICIOUS:
+ self.logs.append(
+ f'Hash: "{given_hash}" was found suspicious by passing the threshold analysis.')
+ return Common.DBotScore.SUSPICIOUS
+ elif self.is_suspicious_by_gti(attributes.get('gti_assessment', {})):
+ self.logs.append(
+ f'Hash: "{given_hash}" was found suspicious by gti assessment.')
+ return Common.DBotScore.SUSPICIOUS
+ self.logs.append(f'Hash: "{given_hash}" was found good.')
+ return Common.DBotScore.GOOD # Nothing caught
+
+ def ip_score(self, indicator: str, raw_response: dict) -> int:
+ """Determines indicator score by popularity preferred vendors and threshold.
+
+ Args:
+ indicator: The indicator we analyzing.
+ raw_response: The response from the API
+
+ Returns:
+ DBotScore of the indicator. Can by Common.DBotScore.BAD, Common.DBotScore.SUSPICIOUS or
+ Common.DBotScore.GOOD
+ """
+ data = raw_response.get('data', {})
+ attributes = data.get('attributes', {})
+
+ # GTI assessment
+ if self.is_malicious_by_gti(attributes.get('gti_assessment', {})):
+ return Common.DBotScore.BAD
+
+ score = self.score_by_results_and_stats(indicator, raw_response, self.ip_threshold)
+ if score == Common.DBotScore.GOOD and self.is_suspicious_by_gti(attributes.get('gti_assessment', {})):
+ score = Common.DBotScore.SUSPICIOUS
+
+ return score
+
+ def url_score(self, indicator: str, raw_response: dict) -> int:
+ """Determines indicator score by popularity preferred vendors and threshold.
+
+ Args:
+ indicator: The indicator we analyzing.
+ raw_response: The raw response from API.
+
+ Returns:
+ DBotScore of the indicator. Can by Common.DBotScore.BAD, Common.DBotScore.SUSPICIOUS or
+ Common.DBotScore.GOOD
+ """
+ data = raw_response.get('data', {})
+ attributes = data.get('attributes', {})
+
+ # GTI assessment
+ if self.is_malicious_by_gti(attributes.get('gti_assessment', {})):
+ return Common.DBotScore.BAD
+
+ score = self.score_by_results_and_stats(indicator, raw_response, self.url_threshold)
+ if score == Common.DBotScore.GOOD and self.is_suspicious_by_gti(attributes.get('gti_assessment', {})):
+ score = Common.DBotScore.SUSPICIOUS
+
+ return score
+
+ def domain_score(self, indicator: str, raw_response: dict) -> int:
+ """Determines indicator score by popularity preferred vendors and threshold.
+
+ Args:
+ indicator: The indicator we analyzing.
+ raw_response: The raw response from API.
+
+ Returns:
+ DBotScore of the indicator. Can by Common.DBotScore.BAD, Common.DBotScore.SUSPICIOUS or
+ Common.DBotScore.GOOD
+ """
+ data = raw_response.get('data', {})
+ attributes = data.get('attributes', {})
+
+ if self.is_malicious_by_gti(attributes.get('gti_assessment', {})):
+ return Common.DBotScore.BAD
+
+ score = self.score_by_results_and_stats(indicator, raw_response, self.domain_threshold)
+ if score == Common.DBotScore.GOOD and self.is_suspicious_by_gti(attributes.get('gti_assessment', {})):
+ score = Common.DBotScore.SUSPICIOUS
+
+ return score
+ # endregion
+
+
+# region Helper functions
+
+
+def create_relationships(entity_a: str, entity_a_type: str, relationships_response: dict, reliability):
+ """
+ Create a list of entityRelationship object from the api result
+ entity_a: (str) - source of the relationship
+ entity_a_type: (str) - type of the source of the relationship
+ relationships_response: (dict) - the relationship response from the api
+ reliability: The reliability of the source.
+
+ Returns a list of EntityRelationship objects.
+ """
+ relationships_list: List[EntityRelationship] = []
+ for relationship_type, relationship_type_raw in relationships_response.items():
+
+ relationships_data = relationship_type_raw.get('data', [])
+ if relationships_data:
+ if isinstance(relationships_data, dict):
+ relationships_data = [relationships_data]
+
+ for relation in relationships_data:
+ name = RELATIONSHIP_TYPE.get(entity_a_type.lower(), {}).get(relationship_type)
+ entity_b = relation.get('id', '')
+ entity_b_type = INDICATOR_TYPE.get(relation.get('type', '').lower())
+ if entity_b and entity_b_type and name:
+ if entity_b_type == FeedIndicatorType.URL:
+ entity_b = dict_safe_get(relation, ['context_attributes', 'url'])
+ relationships_list.append(
+ EntityRelationship(entity_a=entity_a, entity_a_type=entity_a_type, name=name,
+ entity_b=entity_b, entity_b_type=entity_b_type, source_reliability=reliability,
+ brand=INTEGRATION_NAME))
+ else:
+ demisto.info(
+ f"WARNING: Relationships will not be created to entity A {entity_a} with relationship name {name}")
+ return relationships_list
+
+
+def arg_to_number_must_int(arg: Any, arg_name: Optional[str] = None, required: bool = False):
+ """Wrapper of arg_to_number that must return int
+ For mypy fixes.
+ """
+ arg_num = arg_to_number(arg, arg_name, required)
+ assert isinstance(arg_num, int)
+ return arg_num
+
+
+def epoch_to_timestamp(epoch: Union[int, str]) -> Optional[str]:
+ """Converts epoch timestamp to a string.
+
+ Args:
+ epoch: Time to convert
+
+ Returns:
+ A formatted string if succeeded. if not, returns None.
+ """
+ try:
+ return datetime.utcfromtimestamp(int(epoch)).strftime("%Y-%m-%d %H:%M:%SZ")
+ except (TypeError, OSError, ValueError):
+ return None
+
+
+def decrease_data_size(data: Union[dict, list]) -> Union[dict, list]:
+ """ Minifying data size.
+
+ Args:
+ data: the data object from raw response
+
+ Returns:
+ the same data without:
+ data['attributes']['last_analysis_results']
+ data['attributes']['pe_info']
+ data['attributes']['crowdsourced_ids_results']
+ data['attributes']['autostart_locations']
+ data['attributes']['sandbox_verdicts']
+ data['attributes']['sigma_analysis_summary']
+ """
+ attributes_to_remove = [
+ 'last_analysis_results',
+ 'pe_info',
+ 'crowdsourced_ids_results',
+ 'autostart_locations',
+ 'sandbox_verdicts',
+ 'sigma_analysis_summary',
+ ]
+ if isinstance(data, list):
+ data = [decrease_data_size(item) for item in data]
+ else:
+ for attribute in attributes_to_remove:
+ data['attributes'].pop(attribute, None)
+ return data
+
+
+def _get_error_result(client: Client, ioc_id: str, ioc_type: str, message: str) -> CommandResults:
+ dbot_type = ioc_type.upper()
+ assert dbot_type in ('FILE', 'DOMAIN', 'IP', 'URL')
+ common_type = dbot_type if dbot_type in ('IP', 'URL') else dbot_type.capitalize()
+ desc = f'{common_type} "{ioc_id}" {message}'
+ dbot = Common.DBotScore(ioc_id,
+ getattr(DBotScoreType, dbot_type),
+ INTEGRATION_NAME,
+ Common.DBotScore.NONE,
+ desc,
+ client.reliability)
+ options: dict[str, Common.DBotScore | str] = {'dbot_score': dbot}
+ if dbot_type == 'FILE':
+ options[get_hash_type(ioc_id)] = ioc_id
+ else:
+ options[dbot_type.lower()] = ioc_id
+ return CommandResults(indicator=getattr(Common, common_type)(**options), readable_output=desc)
+
+
+def build_unknown_output(client: Client, ioc_id: str, ioc_type: str) -> CommandResults:
+ return _get_error_result(client, ioc_id, ioc_type, 'was not found in GoogleThreatIntelligence.')
+
+
+def build_quota_exceeded_output(client: Client, ioc_id: str, ioc_type: str) -> CommandResults:
+ return _get_error_result(client, ioc_id, ioc_type, 'was not enriched. Quota was exceeded.')
+
+
+def build_error_output(client: Client, ioc_id: str, ioc_type: str) -> CommandResults:
+ return _get_error_result(client, ioc_id, ioc_type, 'could not be processed.')
+
+
+def build_unknown_file_output(client: Client, file: str) -> CommandResults:
+ return build_unknown_output(client, file, 'file')
+
+
+def build_quota_exceeded_file_output(client: Client, file: str) -> CommandResults:
+ return build_quota_exceeded_output(client, file, 'file')
+
+
+def build_error_file_output(client: Client, file: str) -> CommandResults:
+ return build_error_output(client, file, 'file')
+
+
+def build_unknown_domain_output(client: Client, domain: str) -> CommandResults:
+ return build_unknown_output(client, domain, 'domain')
+
+
+def build_quota_exceeded_domain_output(client: Client, domain: str) -> CommandResults:
+ return build_quota_exceeded_output(client, domain, 'domain')
+
+
+def build_error_domain_output(client: Client, domain: str) -> CommandResults:
+ return build_error_output(client, domain, 'domain')
+
+
+def build_unknown_url_output(client: Client, url: str) -> CommandResults:
+ return build_unknown_output(client, url, 'url')
+
+
+def build_quota_exceeded_url_output(client: Client, url: str) -> CommandResults:
+ return build_quota_exceeded_output(client, url, 'url')
+
+
+def build_error_url_output(client: Client, url: str) -> CommandResults:
+ return build_error_output(client, url, 'url')
+
+
+def build_unknown_ip_output(client: Client, ip: str) -> CommandResults:
+ return build_unknown_output(client, ip, 'ip')
+
+
+def build_quota_exceeded_ip_output(client: Client, ip: str) -> CommandResults:
+ return build_quota_exceeded_output(client, ip, 'ip')
+
+
+def build_error_ip_output(client: Client, ip: str) -> CommandResults:
+ return build_error_output(client, ip, 'ip')
+
+
+def build_skipped_enrichment_ip_output(client: Client, ip: str) -> CommandResults:
+ return _get_error_result(client, ip, 'ip',
+ 'was not enriched. Reputation lookups have been disabled for private IP addresses.')
+
+
+def _get_domain_indicator(client: Client, score_calculator: ScoreCalculator, domain: str, raw_response: dict):
+ data = raw_response.get('data', {})
+ attributes = data.get('attributes', {})
+ last_analysis_stats = attributes.get('last_analysis_stats', {})
+ detection_engines = sum(last_analysis_stats.values())
+ positive_detections = last_analysis_stats.get('malicious', 0)
+ whois = get_whois(attributes.get('whois', ''))
+
+ relationships_response = data.get('relationships', {})
+ relationships_list = create_relationships(
+ entity_a=domain,
+ entity_a_type=FeedIndicatorType.Domain,
+ relationships_response=relationships_response,
+ reliability=client.reliability
+ )
+
+ score = score_calculator.domain_score(domain, raw_response)
+
+ logs = score_calculator.get_logs()
+ demisto.debug(logs)
+
+ return Common.Domain(
+ domain=domain,
+ name_servers=whois['Name Server'],
+ creation_date=whois['Creation Date'],
+ updated_date=whois['Updated Date'],
+ expiration_date=whois['Registry Expiry Date'],
+ admin_name=whois['Admin Organization'],
+ admin_email=whois['Admin Email'],
+ admin_country=whois['Admin Country'],
+ registrant_email=whois['Registrant Email'],
+ registrant_country=whois['Registrant Country'],
+ registrar_name=whois['Registrar'],
+ registrar_abuse_email=whois['Registrar Abuse Contact Email'],
+ registrar_abuse_phone=whois['Registrar Abuse Contact Phone'],
+ detection_engines=detection_engines,
+ positive_detections=positive_detections,
+ dbot_score=Common.DBotScore(
+ domain,
+ DBotScoreType.DOMAIN,
+ INTEGRATION_NAME,
+ score=score,
+ malicious_description=logs,
+ reliability=client.reliability
+ ),
+ relationships=relationships_list
+ )
+
+
+def _get_url_indicator(client: Client, score_calculator: ScoreCalculator, url: str, raw_response: dict):
+ data = raw_response.get('data', {})
+ attributes = data.get('attributes', {})
+ last_analysis_stats = attributes.get('last_analysis_stats', {})
+ detection_engines = sum(last_analysis_stats.values())
+ positive_detections = last_analysis_stats.get('malicious', 0)
+
+ relationships_response = data.get('relationships', {})
+ relationships_list = create_relationships(
+ entity_a=url,
+ entity_a_type=FeedIndicatorType.URL,
+ relationships_response=relationships_response,
+ reliability=client.reliability
+ )
+
+ score = score_calculator.url_score(url, raw_response)
+
+ logs = score_calculator.get_logs()
+ demisto.debug(logs)
+
+ return Common.URL(
+ url,
+ category=attributes.get('categories'),
+ detection_engines=detection_engines,
+ positive_detections=positive_detections,
+ relationships=relationships_list,
+ dbot_score=Common.DBotScore(
+ url,
+ DBotScoreType.URL,
+ INTEGRATION_NAME,
+ score=score,
+ reliability=client.reliability,
+ malicious_description=logs
+ )
+ )
+
+
+def _get_ip_indicator(client: Client, score_calculator: ScoreCalculator, ip: str, raw_response: dict):
+ data = raw_response.get('data', {})
+ attributes = data.get('attributes', {})
+ last_analysis_stats = attributes.get('last_analysis_stats', {})
+ detection_engines = sum(last_analysis_stats.values())
+ positive_engines = last_analysis_stats.get('malicious', 0)
+
+ relationships_response = data.get('relationships', {})
+ relationships_list = create_relationships(
+ entity_a=ip,
+ entity_a_type=FeedIndicatorType.IP,
+ relationships_response=relationships_response,
+ reliability=client.reliability
+ )
+
+ score = score_calculator.ip_score(ip, raw_response)
+
+ logs = score_calculator.get_logs()
+ demisto.debug(logs)
+
+ return Common.IP(
+ ip,
+ asn=attributes.get('asn'),
+ geo_country=attributes.get('country'),
+ detection_engines=detection_engines,
+ positive_engines=positive_engines,
+ as_owner=attributes.get('as_owner'),
+ relationships=relationships_list,
+ dbot_score=Common.DBotScore(
+ ip,
+ DBotScoreType.IP,
+ INTEGRATION_NAME,
+ score=score,
+ malicious_description=logs,
+ reliability=client.reliability
+ )
+ )
+
+
+def _get_file_indicator(client: Client, score_calculator: ScoreCalculator, file_hash: str, raw_response: dict):
+ data = raw_response.get('data', {})
+ attributes = data.get('attributes', {})
+ exiftool = attributes.get('exiftool', {})
+ signature_info = attributes.get('signature_info', {})
+
+ score = score_calculator.file_score(file_hash, raw_response)
+
+ logs = score_calculator.get_logs()
+ demisto.debug(logs)
+
+ relationships_response = data.get('relationships', {})
+ relationships_list = create_relationships(
+ entity_a=file_hash,
+ entity_a_type=FeedIndicatorType.File,
+ relationships_response=relationships_response,
+ reliability=client.reliability
+ )
+
+ return Common.File(
+ dbot_score=Common.DBotScore(
+ file_hash,
+ DBotScoreType.FILE,
+ integration_name=INTEGRATION_NAME,
+ score=score,
+ malicious_description=logs,
+ reliability=client.reliability
+ ),
+ name=exiftool.get('OriginalFileName'),
+ size=attributes.get('size'),
+ sha1=attributes.get('sha1'),
+ sha256=attributes.get('sha256'),
+ file_type=exiftool.get('MIMEType'),
+ md5=attributes.get('md5'),
+ ssdeep=attributes.get('ssdeep'),
+ extension=exiftool.get('FileTypeExtension'),
+ company=exiftool.get('CompanyName'),
+ product_name=exiftool.get('ProductName'),
+ tags=attributes.get('tags'),
+ signature=Common.FileSignature(
+ authentihash=attributes.get('authentihash'),
+ copyright=signature_info.get('copyright'),
+ file_version=signature_info.get('file version'),
+ description=signature_info.get('description'),
+ internal_name=signature_info.get('internal name'),
+ original_name=signature_info.get('original name')
+ ),
+ relationships=relationships_list
+ )
+
+
+def build_domain_output(
+ client: Client,
+ score_calculator: ScoreCalculator,
+ domain: str,
+ raw_response: dict,
+ extended_data: bool
+) -> CommandResults:
+ data = raw_response.get('data', {})
+ attributes = data.get('attributes', {})
+
+ last_analysis_stats = attributes.get('last_analysis_stats', {})
+ positive_engines = last_analysis_stats.get('malicious', 0)
+ detection_engines = sum(last_analysis_stats.values())
+
+ whois = get_whois(attributes.get('whois', ''))
+
+ relationships_response = data.get('relationships', {})
+ relationships_list = create_relationships(
+ entity_a=domain,
+ entity_a_type=FeedIndicatorType.Domain,
+ relationships_response=relationships_response,
+ reliability=client.reliability
+ )
+
+ domain_indicator = _get_domain_indicator(client, score_calculator, domain, raw_response)
+
+ if not extended_data:
+ data = decrease_data_size(data)
+
+ return CommandResults(
+ outputs_prefix=f'{INTEGRATION_ENTRY_CONTEXT}.Domain',
+ outputs_key_field='id',
+ indicator=domain_indicator,
+ readable_output=tableToMarkdown(
+ f'Domain data of {domain}',
+ {
+ **data,
+ **attributes,
+ **whois,
+ 'last_modified': epoch_to_timestamp(attributes.get('last_modification_date')),
+ 'positives': f'{positive_engines}/{detection_engines}',
+ 'gti_threat_score': attributes.get('gti_assessment', {}).get('threat_score', {}).get('value'),
+ 'gti_severity': attributes.get('gti_assessment', {}).get('severity', {}).get('value'),
+ 'gti_verdict': attributes.get('gti_assessment', {}).get('verdict', {}).get('value'),
+ },
+ headers=[
+ 'id',
+ 'Registrant Country',
+ 'Registrar',
+ 'last_modified',
+ 'reputation',
+ 'positives',
+ 'gti_threat_score',
+ 'gti_severity',
+ 'gti_verdict',
+ ],
+ removeNull=True,
+ headerTransform=string_to_table_header
+ ),
+ outputs=data,
+ raw_response=raw_response,
+ relationships=relationships_list
+ )
+
+
+def build_url_output(
+ client: Client,
+ score_calculator: ScoreCalculator,
+ url: str,
+ raw_response: dict,
+ extended_data: bool
+) -> CommandResults:
+ data = raw_response.get('data', {})
+ attributes = data.get('attributes', {})
+
+ last_analysis_stats = attributes.get('last_analysis_stats', {})
+ positive_detections = last_analysis_stats.get('malicious', 0)
+ detection_engines = sum(last_analysis_stats.values())
+
+ relationships_response = data.get('relationships', {})
+ relationships_list = create_relationships(
+ entity_a=url,
+ entity_a_type=FeedIndicatorType.URL,
+ relationships_response=relationships_response,
+ reliability=client.reliability
+ )
+
+ url_indicator = _get_url_indicator(client, score_calculator, url, raw_response)
+
+ if not extended_data:
+ data = decrease_data_size(data)
+
+ return CommandResults(
+ outputs_prefix=f'{INTEGRATION_ENTRY_CONTEXT}.URL',
+ outputs_key_field='id',
+ indicator=url_indicator,
+ readable_output=tableToMarkdown(
+ f'URL data of "{url}"',
+ {
+ **data,
+ **attributes,
+ 'url': url,
+ 'last_modified': epoch_to_timestamp(attributes.get('last_modification_date')),
+ 'positives': f'{positive_detections}/{detection_engines}',
+ 'gti_threat_score': attributes.get('gti_assessment', {}).get('threat_score', {}).get('value'),
+ 'gti_severity': attributes.get('gti_assessment', {}).get('severity', {}).get('value'),
+ 'gti_verdict': attributes.get('gti_assessment', {}).get('verdict', {}).get('value'),
+ },
+ headers=[
+ 'url',
+ 'title',
+ 'has_content',
+ 'last_http_response_content_sha256',
+ 'last_modified',
+ 'reputation',
+ 'positives',
+ 'gti_threat_score',
+ 'gti_severity',
+ 'gti_verdict',
+ ],
+ removeNull=True,
+ headerTransform=string_to_table_header
+ ),
+ outputs=data,
+ raw_response=raw_response,
+ relationships=relationships_list
+ )
+
+
+def build_ip_output(
+ client: Client,
+ score_calculator: ScoreCalculator,
+ ip: str,
+ raw_response: dict,
+ extended_data: bool
+) -> CommandResults:
+ data = raw_response.get('data', {})
+ attributes = data.get('attributes', {})
+
+ last_analysis_stats = attributes.get('last_analysis_stats', {})
+ positive_engines = last_analysis_stats.get('malicious', 0)
+ detection_engines = sum(last_analysis_stats.values())
+
+ relationships_response = data.get('relationships', {})
+ relationships_list = create_relationships(
+ entity_a=ip,
+ entity_a_type=FeedIndicatorType.IP,
+ relationships_response=relationships_response,
+ reliability=client.reliability
+ )
+
+ ip_indicator = _get_ip_indicator(client, score_calculator, ip, raw_response)
+
+ if not extended_data:
+ data = decrease_data_size(data)
+
+ return CommandResults(
+ outputs_prefix=f'{INTEGRATION_ENTRY_CONTEXT}.IP',
+ outputs_key_field='id',
+ indicator=ip_indicator,
+ readable_output=tableToMarkdown(
+ f'IP reputation of {ip}:',
+ {
+ **data,
+ **attributes,
+ 'last_modified': epoch_to_timestamp(attributes.get('last_modification_date')),
+ 'positives': f'{positive_engines}/{detection_engines}',
+ 'gti_threat_score': attributes.get('gti_assessment', {}).get('threat_score', {}).get('value'),
+ 'gti_severity': attributes.get('gti_assessment', {}).get('severity', {}).get('value'),
+ 'gti_verdict': attributes.get('gti_assessment', {}).get('verdict', {}).get('value'),
+ },
+ headers=[
+ 'id',
+ 'network',
+ 'country',
+ 'as_owner',
+ 'last_modified',
+ 'reputation',
+ 'positives',
+ 'gti_threat_score',
+ 'gti_severity',
+ 'gti_verdict',
+ ],
+ headerTransform=string_to_table_header
+ ),
+ outputs=data,
+ raw_response=raw_response,
+ relationships=relationships_list
+ )
+
+
+def build_file_output(
+ client: Client,
+ score_calculator: ScoreCalculator,
+ file_hash: str,
+ raw_response: dict,
+ extended_data: bool
+) -> CommandResults:
+ data = raw_response.get('data', {})
+ attributes = data.get('attributes', {})
+
+ last_analysis_stats = attributes.get('last_analysis_stats', {})
+ malicious = last_analysis_stats.get('malicious', 0)
+ total = sum(last_analysis_stats.values())
+
+ relationships_response = data.get('relationships', {})
+ relationships_list = create_relationships(
+ entity_a=file_hash,
+ entity_a_type=FeedIndicatorType.File,
+ relationships_response=relationships_response,
+ reliability=client.reliability
+ )
+
+ file_indicator = _get_file_indicator(client, score_calculator, file_hash, raw_response)
+
+ if not extended_data:
+ data = decrease_data_size(data)
+
+ return CommandResults(
+ outputs_prefix=f'{INTEGRATION_ENTRY_CONTEXT}.File',
+ outputs_key_field='id',
+ indicator=file_indicator,
+ readable_output=tableToMarkdown(
+ f'Results of file hash {file_hash}',
+ {
+ **data,
+ **attributes,
+ 'positives': f'{malicious}/{total}',
+ 'creation_date': epoch_to_timestamp(attributes.get('creation_date')),
+ 'last_modified': epoch_to_timestamp(attributes.get('last_modification_date', 0)),
+ 'gti_threat_score': attributes.get('gti_assessment', {}).get('threat_score', {}).get('value'),
+ 'gti_severity': attributes.get('gti_assessment', {}).get('severity', {}).get('value'),
+ 'gti_verdict': attributes.get('gti_assessment', {}).get('verdict', {}).get('value'),
+ },
+ headers=[
+ 'sha1',
+ 'sha256',
+ 'md5',
+ 'meaningful_name',
+ 'type_extension',
+ 'creation_date',
+ 'last_modified',
+ 'reputation',
+ 'positives',
+ 'gti_threat_score',
+ 'gti_severity',
+ 'gti_verdict',
+ ],
+ removeNull=True,
+ headerTransform=string_to_table_header
+ ),
+ outputs=data,
+ raw_response=raw_response,
+ relationships=relationships_list
+ )
+
+
+def build_private_file_output(file_hash: str, raw_response: dict) -> CommandResults:
+ data = raw_response.get('data', {})
+ attributes = data.get('attributes', {})
+ threat_severity = attributes.get('threat_severity', {})
+ threat_severity_level = threat_severity.get('threat_severity_level', '')
+ threat_severity_data = threat_severity.get('threat_severity_data', {})
+ popular_threat_category = threat_severity_data.get('popular_threat_category', '')
+ threat_verdict = attributes.get('threat_verdict', '')
+ return CommandResults(
+ outputs_prefix=f'{INTEGRATION_ENTRY_CONTEXT}.File',
+ outputs_key_field='id',
+ readable_output=tableToMarkdown(
+ f'Results of file hash {file_hash}',
+ {
+ **attributes,
+ 'threat_severity_level': SEVERITY_LEVELS.get(threat_severity_level, threat_severity_level),
+ 'popular_threat_category': popular_threat_category,
+ 'threat_verdict': VERDICTS.get(threat_verdict, threat_verdict),
+ },
+ headers=[
+ 'sha1',
+ 'sha256',
+ 'md5',
+ 'meaningful_name',
+ 'type_extension',
+ 'threat_severity_level',
+ 'popular_threat_category',
+ 'threat_verdict',
+ ],
+ removeNull=True,
+ headerTransform=string_to_table_header
+ ),
+ outputs=data,
+ raw_response=raw_response,
+ )
+
+
+def get_whois(whois_string: str) -> defaultdict:
+ """Gets a WHOIS string and returns a parsed dict of the WHOIS String.
+
+ Args:
+ whois_string: whois from domain api call
+
+ Returns:
+ A parsed whois
+
+ Examples:
+ >>> get_whois('key1:value\\nkey2:value2')
+ defaultdict({'key1': 'value', 'key2': 'value2'})
+ """
+ whois: defaultdict = defaultdict(lambda: None)
+ for line in whois_string.splitlines():
+ key: str
+ value: str
+ try:
+ key, value = line.split(sep=':', maxsplit=1)
+ except ValueError:
+ demisto.debug(f'Could not unpack Whois string: {line}. Skipping')
+ continue
+ key = key.strip()
+ value = value.strip()
+ if key in whois:
+ if not isinstance(whois[key], list):
+ whois[key] = [whois[key]]
+ whois[key].append(value)
+ else:
+ whois[key] = value
+ return whois
+
+
+def get_file_context(entry_id: str) -> dict:
+ """Gets a File object from context.
+
+ Args:
+ entry_id: The entry ID of the file
+
+ Returns:
+ File object contains Name, Hashes and more information
+ """
+ context = demisto.dt(demisto.context(), f'File(val.EntryID === "{entry_id}")')
+ if not context:
+ return {}
+ if isinstance(context, list):
+ return context[0]
+ return context
+
+
+def raise_if_ip_not_valid(ip: str):
+ """Raises an error if ip is not valid
+
+ Args:
+ ip: ip address
+
+ Raises:
+ ValueError: If IP is not valid
+
+ Examples:
+ >>> raise_if_ip_not_valid('not ip at all')
+ Traceback (most recent call last):
+ ...
+ ValueError: IP "not ip at all" is not valid
+ >>> raise_if_ip_not_valid('8.8.8.8')
+ """
+ if not is_ip_valid(ip, accept_v6_ips=True):
+ raise ValueError(f'IP "{ip}" is not valid')
+
+
+def raise_if_hash_not_valid(file_hash: str):
+ """Raises an error if file_hash is not valid
+
+ Args:
+ file_hash: file hash
+
+ Raises:
+ ValueError: if hash is not of type SHA-256, SHA-1 or MD5
+
+ Examples:
+ >>> raise_if_hash_not_valid('not a hash')
+ Traceback (most recent call last):
+ ...
+ ValueError: Hash "not a hash" is not of type SHA-256, SHA-1 or MD5
+ >>> raise_if_hash_not_valid('7e641f6b9706d860baf09fe418b6cc87')
+ """
+ if get_hash_type(file_hash) not in ('sha256', 'sha1', 'md5'):
+ raise ValueError(f'Hash "{file_hash}" is not of type SHA-256, SHA-1 or MD5')
+
+
+def encode_url_to_base64(url: str) -> str:
+ """Gets a string (in this case, url but it can not be) and return it as base64 without padding ('=')
+
+ Args:
+ url: A string to encode
+
+ Returns:
+ Base64 encoded string with no padding
+
+ Examples:
+ >>> encode_url_to_base64('https://example.com')
+ 'aHR0cHM6Ly9leGFtcGxlLmNvbQ'
+ """
+ return base64.urlsafe_b64encode(url.encode()).decode().strip('=')
+
+# endregion
+
+# region Reputation commands
+
+
+def ip_command(client: Client, score_calculator: ScoreCalculator, args: dict,
+ relationships: str, disable_private_ip_lookup: bool) -> List[CommandResults]:
+ """
+ 1 API Call for regular
+ """
+ ips = argToList(args['ip'])
+ results: List[CommandResults] = []
+ execution_metrics = ExecutionMetrics()
+ override_private_lookup = argToBoolean(args.get('override_private_lookup', False))
+
+ for ip in ips:
+ raise_if_ip_not_valid(ip)
+ if disable_private_ip_lookup and ipaddress.ip_address(ip).is_private and not override_private_lookup:
+ results.append(build_skipped_enrichment_ip_output(client, ip))
+ execution_metrics.success += 1
+ continue
+ try:
+ raw_response = client.ip(ip, relationships)
+ if raw_response.get('error', {}).get('code') == "QuotaExceededError":
+ execution_metrics.quota_error += 1
+ results.append(build_quota_exceeded_ip_output(client, ip))
+ continue
+ if raw_response.get('error', {}).get('code') == 'NotFoundError':
+ results.append(build_unknown_ip_output(client, ip))
+ continue
+ except Exception as exc:
+ # If anything happens, just keep going
+ demisto.debug(f'Could not process IP: "{ip}"\n {str(exc)}')
+ execution_metrics.general_error += 1
+ results.append(build_error_ip_output(client, ip))
+ continue
+ execution_metrics.success += 1
+ results.append(
+ build_ip_output(client, score_calculator, ip, raw_response, argToBoolean(args.get('extended_data', False))))
+ if execution_metrics.is_supported():
+ _metric_results = execution_metrics.metrics
+ metric_results = cast(CommandResults, _metric_results)
+ results.append(metric_results)
+ return results
+
+
+def file_command(client: Client, score_calculator: ScoreCalculator, args: dict, relationships: str) -> List[CommandResults]:
+ """
+ 1 API Call
+ """
+ files = argToList(args['file'])
+ extended_data = argToBoolean(args.get('extended_data', False))
+ results: List[CommandResults] = []
+ execution_metrics = ExecutionMetrics()
+
+ for file in files:
+ raise_if_hash_not_valid(file)
+ try:
+ raw_response = client.file(file, relationships)
+ if raw_response.get('error', {}).get('code') == 'QuotaExceededError':
+ execution_metrics.quota_error += 1
+ results.append(build_quota_exceeded_file_output(client, file))
+ continue
+ if raw_response.get('error', {}).get('code') == 'NotFoundError':
+ results.append(build_unknown_file_output(client, file))
+ continue
+ results.append(build_file_output(client, score_calculator, file, raw_response, extended_data))
+ execution_metrics.success += 1
+ except Exception as exc:
+ # If anything happens, just keep going
+ demisto.debug(f'Could not process file: "{file}"\n {str(exc)}')
+ execution_metrics.general_error += 1
+ results.append(build_error_file_output(client, file))
+ continue
+ if execution_metrics.is_supported():
+ _metric_results = execution_metrics.metrics
+ metric_results = cast(CommandResults, _metric_results)
+ results.append(metric_results)
+ return results
+
+
+def private_file_command(client: Client, args: dict) -> List[CommandResults]:
+ """
+ 1 API Call
+ """
+ files = argToList(args['file'])
+ results: List[CommandResults] = []
+ execution_metrics = ExecutionMetrics()
+
+ for file in files:
+ raise_if_hash_not_valid(file)
+ try:
+ raw_response = client.private_file(file)
+ if raw_response.get('error', {}).get('code') == 'QuotaExceededError':
+ execution_metrics.quota_error += 1
+ results.append(build_quota_exceeded_file_output(client, file))
+ continue
+ if raw_response.get('error', {}).get('code') == 'NotFoundError':
+ results.append(build_unknown_file_output(client, file))
+ continue
+ results.append(build_private_file_output(file, raw_response))
+ execution_metrics.success += 1
+ except Exception as exc:
+ # If anything happens, just keep going
+ demisto.debug(f'Could not process file: "{file}"\n {str(exc)}')
+ execution_metrics.general_error += 1
+ results.append(build_error_file_output(client, file))
+ continue
+ if execution_metrics.is_supported():
+ _metric_results = execution_metrics.metrics
+ metric_results = cast(CommandResults, _metric_results)
+ results.append(metric_results)
+ return results
+
+
+def url_command(client: Client, score_calculator: ScoreCalculator, args: dict, relationships: str) -> List[CommandResults]:
+ """
+ 1 API Call for regular
+ """
+ urls = argToList(args['url'])
+ extended_data = argToBoolean(args.get('extended_data', False))
+ results: List[CommandResults] = []
+ execution_metrics = ExecutionMetrics()
+ for url in urls:
+ try:
+ raw_response = client.url(url, relationships)
+ if raw_response.get('error', {}).get('code') == 'QuotaExceededError':
+ execution_metrics.quota_error += 1
+ results.append(build_quota_exceeded_url_output(client, url))
+ continue
+ if raw_response.get('error', {}).get('code') == 'NotFoundError':
+ results.append(build_unknown_url_output(client, url))
+ continue
+ except Exception as exc:
+ # If anything happens, just keep going
+ demisto.debug(f'Could not process URL: "{url}".\n {str(exc)}')
+ execution_metrics.general_error += 1
+ results.append(build_error_url_output(client, url))
+ continue
+ execution_metrics.success += 1
+ results.append(build_url_output(client, score_calculator, url, raw_response, extended_data))
+ if execution_metrics.is_supported():
+ _metric_results = execution_metrics.metrics
+ metric_results = cast(CommandResults, _metric_results)
+ results.append(metric_results)
+ return results
+
+
+def domain_command(client: Client, score_calculator: ScoreCalculator, args: dict, relationships: str) -> List[CommandResults]:
+ """
+ 1 API Call for regular
+ """
+ execution_metrics = ExecutionMetrics()
+ domains = argToList(args['domain'])
+ results: List[CommandResults] = []
+ for domain in domains:
+ try:
+ raw_response = client.domain(domain, relationships)
+ if raw_response.get('error', {}).get('code') == 'QuotaExceededError':
+ execution_metrics.quota_error += 1
+ results.append(build_quota_exceeded_domain_output(client, domain))
+ continue
+ if raw_response.get('error', {}).get('code') == 'NotFoundError':
+ results.append(build_unknown_domain_output(client, domain))
+ continue
+ except Exception as exc:
+ # If anything happens, just keep going
+ demisto.debug(f'Could not process domain: "{domain}"\n {str(exc)}')
+ execution_metrics.general_error += 1
+ results.append(build_error_domain_output(client, domain))
+ continue
+ execution_metrics.success += 1
+ result = build_domain_output(client, score_calculator, domain, raw_response,
+ argToBoolean(args.get('extended_data', False)))
+ results.append(result)
+ if execution_metrics.is_supported():
+ _metric_results = execution_metrics.metrics
+ metric_results = cast(CommandResults, _metric_results)
+ results.append(metric_results)
+
+ return results
+
+
+# endregion
+
+# region Scan commands
+def file_rescan_command(client: Client, args: dict) -> CommandResults:
+ """
+ 1 API Call
+ """
+ file_hash = args['file']
+ raise_if_hash_not_valid(file_hash)
+ raw_response = client.file_rescan(file_hash)
+ data = raw_response['data']
+ data['hash'] = file_hash
+ context = {
+ f'{INTEGRATION_ENTRY_CONTEXT}.Submission(val.id && val.id === obj.id)': data,
+ 'vtScanID': data.get('id') # BC preservation
+ }
+ return CommandResults(
+ readable_output=tableToMarkdown(
+ f'File "{file_hash}" resubmitted.',
+ data,
+ removeNull=True,
+ headerTransform=underscoreToCamelCase
+ ),
+ outputs=context,
+ raw_response=raw_response
+ )
+
+
+def get_working_id(id_: str, entry_id: str) -> str:
+ """Sometimes new scanned files ID will be only a number. Should connect them with base64(MD5:_id).
+ Fixes bug in Google Threat Intelligence API.
+
+ Args:
+ entry_id: the entry id connected to the file
+ id_: id given from the API
+
+ Returns:
+ A working ID that we can use in other commands.
+ """
+ if isinstance(id_, str) and id_.isnumeric() or isinstance(id_, int):
+ demisto.debug(f'Got an integer id from file-scan. {id_=}, {entry_id=}\n')
+ raise DemistoException(
+ f'Got an int {id_=} as analysis report. This is a bug in Google Threat Intelligence API.\n'
+ f'While Google Threat Intelligence team is fixing the problem, try to resend the file.'
+ )
+ return id_
+
+
+def file_scan(client: Client, args: dict) -> List[CommandResults]:
+ """
+ 1 API Call
+ """
+ return upload_file(client, args)
+
+
+def private_file_scan(client: Client, args: dict) -> List[CommandResults]:
+ """
+ 1 API Call
+ """
+ return upload_file(client, args, True)
+
+
+def upload_file(client: Client, args: dict, private: bool = False) -> List[CommandResults]:
+ """
+ 1 API Call
+ """
+ entry_ids = argToList(args['entryID'])
+ upload_url = args.get('uploadURL')
+ if len(entry_ids) > 1 and upload_url:
+ raise DemistoException('You can supply only one entry ID with an upload URL.')
+ results = []
+ for entry_id in entry_ids:
+ try:
+ file_obj = demisto.getFilePath(entry_id)
+ file_path = file_obj['path']
+ if private:
+ raw_response = client.private_file_scan(file_path)
+ else:
+ raw_response = client.file_scan(file_path, upload_url)
+ data = raw_response.get('data', {})
+ # add current file as identifiers
+ data.update(get_file_context(entry_id))
+ id_ = data.get('id')
+ demisto.debug(f'Result from vt-scan-file {entry_id=} {id_=} {data.get("type")=}')
+ id_ = get_working_id(id_, entry_id)
+ data['id'] = id_
+ context = {
+ f'{INTEGRATION_ENTRY_CONTEXT}.Submission(val.id && val.id === obj.id)': data,
+ 'vtScanID': id_ # BC preservation
+ }
+ results.append(CommandResults(
+ readable_output=tableToMarkdown(
+ f'The file has been submitted "{file_obj["name"]}"',
+ data,
+ headers=['id', 'EntryID', 'MD5', 'SHA1', 'SHA256'],
+ removeNull=True
+ ),
+ outputs=context,
+ raw_response=raw_response
+ ))
+ except Exception as exc:
+ err = f'Could not process {entry_id=}.\n{str(exc)}'
+ demisto.debug(err)
+ demisto.results({
+ 'Type': entryTypes['error'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': err
+ })
+ return results
+
+
+def file_scan_and_get_analysis(client: Client, args: dict):
+ """Calls to file-scan and gti-analysis-get."""
+ interval = int(args.get('interval_in_seconds', 60))
+ extended = argToBoolean(args.get('extended_data', False))
+
+ if not args.get('id'):
+ command_results = file_scan(client, args)
+ command_result = command_results[0]
+ outputs = command_result.outputs
+ if not isinstance(outputs, dict):
+ raise DemistoException('outputs is expected to be a dict')
+ scheduled_command = ScheduledCommand(
+ command=f'{COMMAND_PREFIX}-file-scan-and-analysis-get',
+ next_run_in_seconds=interval,
+ args={
+ 'entryID': args.get('entryID'),
+ 'id': outputs.get('vtScanID'),
+ 'interval_in_seconds': interval,
+ 'extended_data': extended,
+ },
+ timeout_in_seconds=6000,
+ )
+ command_result.scheduled_command = scheduled_command
+ return command_result
+
+ command_result = get_analysis_command(client, args)
+ outputs = command_result.outputs
+ if not isinstance(outputs, dict):
+ raise DemistoException('outputs is expected to be a dict')
+ if outputs.get('data', {}).get('attributes', {}).get('status') == 'completed':
+ return command_result
+ scheduled_command = ScheduledCommand(
+ command=f'{COMMAND_PREFIX}-file-scan-and-analysis-get',
+ next_run_in_seconds=interval,
+ args={
+ 'entryID': args.get('entryID'),
+ 'id': outputs.get('id'),
+ 'interval_in_seconds': interval,
+ 'extended_data': extended,
+ },
+ timeout_in_seconds=6000,
+ )
+ return CommandResults(scheduled_command=scheduled_command)
+
+
+def private_file_scan_and_get_analysis(client: Client, args: dict):
+ """Calls to gti-privatescanning-file-scan and gti-privatescanning-analysis-get."""
+ interval = int(args.get('interval_in_seconds', 60))
+ extended = argToBoolean(args.get('extended_data', False))
+
+ if not args.get('id'):
+ command_results = private_file_scan(client, args)
+ command_result = command_results[0]
+ outputs = command_result.outputs
+ if not isinstance(outputs, dict):
+ raise DemistoException('outputs is expected to be a dict')
+ scheduled_command = ScheduledCommand(
+ command=f'{COMMAND_PREFIX}-private-file-scan-and-analysis-get',
+ next_run_in_seconds=interval,
+ args={
+ 'entryID': args.get('entryID'),
+ 'id': outputs.get('vtScanID'),
+ 'interval_in_seconds': interval,
+ 'extended_data': extended,
+ },
+ timeout_in_seconds=6000,
+ )
+ command_result.scheduled_command = scheduled_command
+ return command_result
+
+ command_result = private_get_analysis_command(client, args)
+ outputs = command_result.outputs
+ if not isinstance(outputs, dict):
+ raise DemistoException('outputs is expected to be a dict')
+ if outputs.get('data', {}).get('attributes', {}).get('status') == 'completed':
+ return command_result
+ scheduled_command = ScheduledCommand(
+ command=f'{COMMAND_PREFIX}-private-file-scan-and-analysis-get',
+ next_run_in_seconds=interval,
+ args={
+ 'entryID': args.get('entryID'),
+ 'id': outputs.get('id'),
+ 'interval_in_seconds': interval,
+ 'extended_data': extended,
+ },
+ timeout_in_seconds=6000,
+ )
+ return CommandResults(scheduled_command=scheduled_command)
+
+
+def url_scan_and_get_analysis(client: Client, args: dict):
+ """Calls to url-scan and gti-analysis-get."""
+ interval = int(args.get('interval_in_seconds', 60))
+ extended = argToBoolean(args.get('extended_data', False))
+
+ if not args.get('id'):
+ command_result = scan_url_command(client, args)
+ outputs = command_result.outputs
+ if not isinstance(outputs, dict):
+ raise DemistoException('outputs is expected to be a dict')
+ scheduled_command = ScheduledCommand(
+ command=f'{COMMAND_PREFIX}-url-scan-and-analysis-get',
+ next_run_in_seconds=interval,
+ args={
+ 'url': args.get('url'),
+ 'id': outputs.get('vtScanID'),
+ 'interval_in_seconds': interval,
+ 'extended_data': extended,
+ },
+ timeout_in_seconds=6000,
+ )
+ command_result.scheduled_command = scheduled_command
+ return command_result
+
+ command_result = get_analysis_command(client, args)
+ outputs = command_result.outputs
+ if not isinstance(outputs, dict):
+ raise DemistoException('outputs is expected to be a dict')
+ if outputs.get('data', {}).get('attributes', {}).get('status') == 'completed':
+ return command_result
+ scheduled_command = ScheduledCommand(
+ command=f'{COMMAND_PREFIX}-url-scan-and-analysis-get',
+ next_run_in_seconds=interval,
+ args={
+ 'url': args.get('url'),
+ 'id': outputs.get('id'),
+ 'interval_in_seconds': interval,
+ 'extended_data': extended,
+ },
+ timeout_in_seconds=6000,
+ )
+ return CommandResults(scheduled_command=scheduled_command)
+
+
+def get_upload_url(client: Client) -> CommandResults:
+ """
+ 1 API Call
+ """
+ raw_response = client.get_upload_url()
+ upload_url = raw_response['data']
+ context = {
+ f'{INTEGRATION_ENTRY_CONTEXT}.FileUploadURL': upload_url,
+ 'vtUploadURL': upload_url # BC preservation
+ }
+ return CommandResults(
+ readable_output=tableToMarkdown(
+ 'New upload url acquired!',
+ {'Upload url': upload_url}
+ ),
+ outputs=context,
+ raw_response=raw_response
+ )
+
+
+def scan_url_command(client: Client, args: dict) -> CommandResults:
+ """
+ 1 API Call
+ """
+ url = args['url']
+ raw_response: Dict[str, Any] = {}
+ data: Dict[str, Any] = {}
+ context: Dict[str, Any] = {}
+ headers = ['id', 'url']
+
+ try:
+ raw_response = client.url_scan(url)
+ data = raw_response['data']
+
+ data['url'] = url
+ context = {
+ f'{INTEGRATION_ENTRY_CONTEXT}.Submission(val.id && val.id === obj.id)': data,
+ 'vtScanID': data.get('id') # BC preservation
+ }
+ except DemistoException as e:
+ error = e.res.json().get('error')
+
+ # Invalid url, probably due to an unknown TLD
+ if error['code'] == 'InvalidArgumentError':
+ data = {'url': url, 'id': '', 'error': error['message']}
+ headers.append('error')
+ else:
+ raise e
+
+ return CommandResults(
+ readable_output=tableToMarkdown(
+ 'New url submission:',
+ data,
+ headers=headers
+ ),
+ outputs=context,
+ raw_response=raw_response
+ )
+
+
+# endregion
+
+# region Comments commands
+def get_comments_command(client: Client, args: dict) -> CommandResults:
+ """
+ 1 API Call
+
+ BC Break - No NotBefore argument
+ added limit
+ """
+ limit = arg_to_number_must_int(
+ args.get('limit'),
+ arg_name='limit',
+ required=True
+ )
+ resource = args['resource']
+ if before := args.get('before'):
+ before = parse(before)
+ assert before is not None, f'Could not parse the before date "{before}"'
+ before = before.replace(tzinfo=None)
+
+ resource_type = args.get('resource_type')
+ if not resource_type:
+ try:
+ raise_if_hash_not_valid(resource)
+ resource_type = 'file'
+ except ValueError:
+ resource_type = 'url'
+ resource_type = resource_type.lower()
+ # Will find if there's one and only one True in the list.
+ if resource_type == 'ip':
+ raise_if_ip_not_valid(resource)
+ raw_response = client.get_ip_comments(resource, limit)
+ elif resource_type == 'url':
+ raw_response = client.get_url_comments(resource, limit)
+ elif resource_type in ('hash', 'file'):
+ raise_if_hash_not_valid(resource)
+ raw_response = client.get_hash_comments(resource, limit)
+ elif resource_type == 'domain':
+ raw_response = client.get_domain_comments(resource, limit)
+ else:
+ raise DemistoException(f'Could not find resource type of "{resource_type}"')
+
+ data = raw_response.get('data', {})
+ context = {
+ 'indicator': resource,
+ 'comments': data
+ }
+ comments = []
+ for comment in data:
+ attributes = comment.get('attributes', {})
+ votes = attributes.get('votes', {})
+
+ if date := parse(str(attributes.get('date'))):
+ date = date.replace(tzinfo=None)
+
+ if date and before and date > before:
+ continue
+ comments.append({
+ 'Date': epoch_to_timestamp(attributes.get('date')),
+ 'Text': attributes.get('text'),
+ 'Positive Votes': votes.get('positive'),
+ 'Abuse Votes': votes.get('abuse'),
+ 'Negative Votes': votes.get('negative')
+ })
+
+ return CommandResults(
+ f'{INTEGRATION_ENTRY_CONTEXT}.Comments',
+ 'id',
+ readable_output=tableToMarkdown(
+ f'Google Threat Intelligence comments of {resource_type}: "{resource}"',
+ comments,
+ headers=['Date', 'Text', 'Positive Votes', 'Abuse Votes', 'Negative Votes']
+ ),
+ outputs=context,
+ raw_response=raw_response
+ )
+
+
+def add_comments_command(client: Client, args: dict) -> CommandResults:
+ """
+ 1 API Call
+ """
+ resource = args['resource']
+ comment = args['comment']
+ resource_type = args.get('resource_type')
+ if not resource_type:
+ try:
+ raise_if_hash_not_valid(resource)
+ resource_type = 'file'
+ except ValueError:
+ resource_type = 'url'
+ resource_type = resource_type.lower()
+ if resource_type == 'ip':
+ raise_if_ip_not_valid(resource)
+ raw_response = client.add_comment_to_ip(resource, comment)
+ elif resource_type == 'url':
+ raw_response = client.add_comment_to_url(resource, comment)
+ elif resource_type == 'domain':
+ raw_response = client.add_comment_to_domain(resource, comment)
+ elif resource_type == 'file':
+ raise_if_hash_not_valid(resource)
+ raw_response = client.add_comment_to_file(resource, comment)
+ else:
+ raise DemistoException(f'Could not find resource type of "{resource_type}"')
+ data = raw_response['data']
+ attributes = data.get('attributes', {})
+ votes = attributes.get('votes', {})
+ return CommandResults(
+ f'{INTEGRATION_ENTRY_CONTEXT}.Comments.comments',
+ 'id',
+ readable_output=tableToMarkdown(
+ 'Comment has been added!',
+ {
+ 'Date': epoch_to_timestamp(attributes.get('date')),
+ 'Text': attributes.get('text'),
+ 'Positive Votes': votes.get('positive'),
+ 'Abuse Votes': votes.get('abuse'),
+ 'Negative Votes': votes.get('negative')
+ },
+ headers=['Date', 'Text', 'Positive Votes', 'Abuse Votes', 'Negative Votes']
+ ),
+ outputs=data,
+ raw_response=raw_response
+ )
+
+
+def get_comments_by_id_command(client: Client, args: dict) -> CommandResults:
+ """
+ 1 API Call
+ """
+ comment_id = args['id']
+ raw_response = client.get_comment_by_id(comment_id)
+ data = raw_response['data']
+ attributes = data.get('attributes', {})
+ votes = attributes.get('votes', {})
+ return CommandResults(
+ f'{INTEGRATION_ENTRY_CONTEXT}.Comments.comments',
+ 'id',
+ readable_output=tableToMarkdown(
+ f'Comment of ID {comment_id}',
+ {
+ 'Date': epoch_to_timestamp(attributes.get('date')),
+ 'Text': attributes.get('text'),
+ 'Positive Votes': votes.get('positive'),
+ 'Abuse Votes': votes.get('abuse'),
+ 'Negative Votes': votes.get('negative')
+ },
+ headers=['Date', 'Text', 'Positive Votes', 'Abuse Votes', 'Negative Votes']
+ ),
+ outputs=data,
+ raw_response=raw_response
+ )
+
+
+# endregion
+
+def file_sandbox_report_command(client: Client, args: dict) -> List[CommandResults]:
+ """
+ 1 API Call
+ """
+ execution_metrics = ExecutionMetrics()
+ results: List[CommandResults] = []
+ file_hash = args['file']
+ limit = arg_to_number(
+ args['limit'],
+ 'limit',
+ required=True
+ )
+ assert isinstance(limit, int) # mypy fix
+ raise_if_hash_not_valid(file_hash)
+ raw_response = client.file_sandbox_report(file_hash, limit)
+ if 'data' in raw_response:
+ data = raw_response['data']
+ execution_metrics.quota_error += 1
+ results.append(
+ CommandResults(
+ f'{INTEGRATION_ENTRY_CONTEXT}.SandboxReport',
+ 'id',
+ readable_output=tableToMarkdown(
+ f'Sandbox Reports for file hash: {file_hash}',
+ [
+ {
+ 'id': item['id'],
+ **item['attributes'],
+ 'link': item['links']['self']
+ } for item in data
+ ],
+ headers=['analysis_date', 'last_modification_date', 'sandbox_name', 'link'],
+ removeNull=True,
+ headerTransform=underscoreToCamelCase
+ ),
+ outputs=data,
+ raw_response=raw_response
+ )
+ )
+ elif raw_response.get('error', {}).get('code') == 'NotFoundError':
+ results.append(build_unknown_file_output(client, file_hash))
+ else:
+ execution_metrics.quota_error += 1
+ results.append(build_quota_exceeded_file_output(client, file_hash))
+
+ if execution_metrics.is_supported():
+ _metric_results = execution_metrics.metrics
+ metric_results = cast(CommandResults, _metric_results)
+ results.append(metric_results)
+
+ return results
+
+
+def passive_dns_data(client: Client, args: dict) -> CommandResults:
+ """
+ 1 API Call
+ """
+
+ id = {}
+ if 'ip' in args:
+ id['value'] = args['ip']
+ id['type'] = 'ip'
+ raise_if_ip_not_valid(id['value'])
+ elif 'domain' in args:
+ id['value'] = args['domain']
+ id['type'] = 'domain'
+ elif 'id' in args:
+ id['value'] = args['id']
+ if is_ip_valid(id['value']):
+ id['type'] = 'ip'
+ else:
+ id['type'] = 'domain'
+ else:
+ return CommandResults(readable_output='No IP address or domain was given.')
+
+ limit = arg_to_number_must_int(
+ args['limit'],
+ arg_name='limit',
+ required=True
+ )
+
+ try:
+ raw_response = client.passive_dns_data(id, limit)
+ except Exception:
+ return CommandResults(readable_output=f'{"IP" if id["type"] == "ip" else "Domain"} {id["value"]} was not found.')
+
+ data = raw_response['data']
+ return CommandResults(
+ f'{INTEGRATION_ENTRY_CONTEXT}.PassiveDNS',
+ 'id',
+ readable_output=tableToMarkdown(
+ f'Passive DNS data for {"IP" if id["type"] == "ip" else "domain"} {id["value"]}',
+ [
+ {
+ 'id': item['id'],
+ **item['attributes']
+ } for item in data
+ ],
+ headers=['id', 'date', 'host_name', 'ip_address', 'resolver'],
+ removeNull=True,
+ headerTransform=underscoreToCamelCase
+ ),
+ outputs=data,
+ raw_response=raw_response
+ )
+
+
+def search_command(client: Client, args: dict) -> CommandResults:
+ """
+ 1 API Call
+ """
+ query = args['query']
+ limit = arg_to_number_must_int(args.get('limit'), 'limit', required=True)
+ raw_response = client.search(query, limit)
+ data = raw_response.get('data', [])
+ if not argToBoolean(args.get('extended_data', False)):
+ data = decrease_data_size(data)
+ return CommandResults(
+ f'{INTEGRATION_ENTRY_CONTEXT}.SearchResults',
+ 'id',
+ readable_output=tableToMarkdown(
+ f'Search result of query {query}',
+ [item.get('attributes') for item in data],
+ removeNull=True,
+ headerTransform=underscoreToCamelCase
+ ),
+ outputs=data,
+ raw_response=raw_response
+ )
+
+
+def get_analysis_command(client: Client, args: dict) -> CommandResults:
+ """
+ 1 API Call
+ """
+ analysis_id = args['id']
+ raw_response = client.get_analysis(analysis_id)
+ data = raw_response.get('data', {})
+ if not argToBoolean(args.get('extended_data', False)):
+ data = decrease_data_size(data)
+ return CommandResults(
+ f'{INTEGRATION_ENTRY_CONTEXT}.Analysis',
+ 'id',
+ readable_output=tableToMarkdown(
+ 'Analysis results:',
+ {
+ **data.get('attributes', {}),
+ 'id': analysis_id
+ },
+ headers=['id', 'stats', 'status'],
+ headerTransform=underscoreToCamelCase
+ ),
+ outputs={
+ **raw_response,
+ 'id': analysis_id
+ },
+ raw_response=raw_response
+ )
+
+
+def private_get_analysis_command(client: Client, args: dict) -> CommandResults:
+ """
+ 1-2 API Call
+ """
+ analysis_id = args['id']
+ raw_response = client.get_private_analysis(analysis_id)
+ data = raw_response.get('data', {})
+ attributes = data.get('attributes', {})
+ stats = {
+ 'threat_severity_level': '',
+ 'popular_threat_category': '',
+ 'threat_verdict': '',
+ }
+ if attributes.get('status', '') == 'completed':
+ file_response = client.get_private_file_from_analysis(analysis_id)
+ file_attributes = file_response.get('data', {}).get('attributes', {})
+ threat_severity = file_attributes.get('threat_severity', {})
+ severity_level = threat_severity.get('threat_severity_level', '')
+ stats['threat_severity_level'] = SEVERITY_LEVELS.get(severity_level, severity_level)
+ threat_severity_data = threat_severity.get('threat_severity_data', {})
+ stats['popular_threat_category'] = threat_severity_data.get('popular_threat_category', '')
+ verdict = file_attributes.get('threat_verdict', '')
+ stats['threat_verdict'] = VERDICTS.get(verdict, verdict)
+ attributes.update(stats)
+ return CommandResults(
+ f'{INTEGRATION_ENTRY_CONTEXT}.Analysis',
+ 'id',
+ readable_output=tableToMarkdown(
+ 'Analysis results:',
+ {
+ **attributes,
+ 'id': analysis_id
+ },
+ headers=['id', 'threat_severity_level', 'popular_threat_category', 'threat_verdict', 'status'],
+ removeNull=True,
+ headerTransform=string_to_table_header
+ ),
+ outputs={
+ **raw_response,
+ 'id': analysis_id
+ },
+ raw_response=raw_response
+ )
+
+
+def check_module(client: Client) -> str:
+ """
+ 1 API Call
+ """
+ client.get_ip_comments('8.8.8.8', 1)
+ return 'ok'
+
+
+def delete_comment(client: Client, args: dict) -> CommandResults:
+ """Delete a comments"""
+ id_ = args['id']
+ client.delete_comment(id_)
+ return CommandResults(readable_output=f'Comment {id_} has been deleted!')
+
+
+def file_sigma_analysis_command(client: Client, args: dict) -> CommandResults:
+ """Get last sigma analysis for a given file"""
+ file_hash = args['file']
+ only_stats = argToBoolean(args.get('only_stats', False))
+ raw_response = client.file(file_hash)
+ data = raw_response['data']
+
+ if 'sigma_analysis_stats' not in data['attributes'] or 'sigma_analysis_results' not in data['attributes']:
+ return CommandResults(readable_output=f'No Sigma analyses for file {file_hash} were found.')
+
+ if only_stats:
+ return CommandResults(
+ f'{INTEGRATION_ENTRY_CONTEXT}.SigmaAnalysis',
+ 'id',
+ readable_output=tableToMarkdown(
+ f'Summary of the last Sigma analysis for file {file_hash}:',
+ {
+ **data['attributes']['sigma_analysis_stats'],
+ '**TOTAL**': sum(data['attributes']['sigma_analysis_stats'].values())
+ },
+ headers=['critical', 'high', 'medium', 'low', '**TOTAL**'],
+ removeNull=True,
+ headerTransform=underscoreToCamelCase
+ ),
+ outputs=data,
+ raw_response=data['attributes']['sigma_analysis_stats'],
+ )
+ else:
+ return CommandResults(
+ f'{INTEGRATION_ENTRY_CONTEXT}.SigmaAnalysis',
+ 'id',
+ readable_output=tableToMarkdown(
+ f'Matched rules for file {file_hash} in the last Sigma analysis:',
+ data['attributes']['sigma_analysis_results'],
+ headers=[
+ 'rule_level', 'rule_description', 'rule_source',
+ 'rule_title', 'rule_id', 'rule_author', 'match_context'
+ ],
+ removeNull=True,
+ headerTransform=underscoreToCamelCase
+ ),
+ outputs=data,
+ raw_response=data['attributes']['sigma_analysis_results'],
+ )
+
+
+def get_assessment_command(client: Client, score_calculator: ScoreCalculator, args: dict) -> CommandResults:
+ """Get Google Threat Intelligence assessment for a given resource."""
+ resource = args['resource']
+ resource_type = args.get('resource_type', 'file').lower()
+ # Will find if there's one and only one True in the list.
+ if resource_type in ('hash', 'file'):
+ raise_if_hash_not_valid(resource)
+ raw_response = client.file(resource)
+ if raw_response.get('error', {}).get('code') == 'QuotaExceededError':
+ return build_quota_exceeded_file_output(client, resource)
+ if raw_response.get('error', {}).get('code') == 'NotFoundError':
+ return build_unknown_file_output(client, resource)
+ indicator = _get_file_indicator(client, score_calculator, resource, raw_response)
+ elif resource_type == 'ip':
+ raise_if_ip_not_valid(resource)
+ raw_response = client.ip(resource)
+ if raw_response.get('error', {}).get('code') == 'QuotaExceededError':
+ return build_quota_exceeded_ip_output(client, resource)
+ if raw_response.get('error', {}).get('code') == 'NotFoundError':
+ return build_unknown_ip_output(client, resource)
+ indicator = _get_ip_indicator(client, score_calculator, resource, raw_response)
+ elif resource_type == 'url':
+ raw_response = client.url(resource)
+ if raw_response.get('error', {}).get('code') == 'QuotaExceededError':
+ return build_quota_exceeded_url_output(client, resource)
+ if raw_response.get('error', {}).get('code') == 'NotFoundError':
+ return build_unknown_url_output(client, resource)
+ indicator = _get_url_indicator(client, score_calculator, resource, raw_response)
+ elif resource_type == 'domain':
+ raw_response = client.domain(resource)
+ if raw_response.get('error', {}).get('code') == 'QuotaExceededError':
+ return build_quota_exceeded_domain_output(client, resource)
+ if raw_response.get('error', {}).get('code') == 'NotFoundError':
+ return build_unknown_domain_output(client, resource)
+ indicator = _get_domain_indicator(client, score_calculator, resource, raw_response)
+ else:
+ raise DemistoException(f'Could not find resource type of "{resource_type}"')
+
+ data = raw_response.get('data', {})
+ data.pop('relationships', None)
+ gti_assessment = data.get('attributes', {}).get('gti_assessment', {})
+
+ if data:
+ if gti_assessment:
+ data['attributes'] = {'gti_assessment': gti_assessment}
+ else:
+ data.pop('attributes', None)
+
+ return CommandResults(
+ f'{INTEGRATION_ENTRY_CONTEXT}.Assessment',
+ 'id',
+ indicator=indicator,
+ readable_output=tableToMarkdown(
+ f'Google Threat Intelligence assessment of {resource_type}: "{resource}"',
+ {
+ 'threat_score': gti_assessment.get('threat_score', {}).get('value'),
+ 'severity': gti_assessment.get('severity', {}).get('value'),
+ 'verdict': gti_assessment.get('verdict', {}).get('value'),
+ },
+ headers=[
+ 'threat_score',
+ 'severity',
+ 'verdict',
+ ],
+ headerTransform=string_to_table_header,
+ ),
+ outputs=data,
+ raw_response=raw_response,
+ )
+
+
+def arg_to_relationships(arg):
+ """Get an argument and return the relationship list."""
+ return (','.join(argToList(arg))).replace('* ', '').replace(' ', '_')
+
+
+def main(params: dict, args: dict, command: str):
+ results: Union[CommandResults, str, List[CommandResults]]
+ handle_proxy()
+ client = Client(params)
+ score_calculator = ScoreCalculator(params)
+
+ ip_relationships = arg_to_relationships(params.get('ip_relationships'))
+ url_relationships = arg_to_relationships(params.get('url_relationships'))
+ domain_relationships = arg_to_relationships(params.get('domain_relationships'))
+ file_relationships = arg_to_relationships(params.get('file_relationships'))
+
+ disable_private_ip_lookup = argToBoolean(params.get('disable_private_ip_lookup', False))
+
+ demisto.debug(f'Command called {command}')
+ if command == 'test-module':
+ results = check_module(client)
+ elif command == 'file':
+ results = file_command(client, score_calculator, args, file_relationships)
+ elif command == 'ip':
+ results = ip_command(client, score_calculator, args, ip_relationships, disable_private_ip_lookup)
+ elif command == 'url':
+ results = url_command(client, score_calculator, args, url_relationships)
+ elif command == 'domain':
+ results = domain_command(client, score_calculator, args, domain_relationships)
+ elif command == f'{COMMAND_PREFIX}-file-sandbox-report':
+ results = file_sandbox_report_command(client, args)
+ elif command == f'{COMMAND_PREFIX}-passive-dns-data':
+ results = passive_dns_data(client, args)
+ elif command == f'{COMMAND_PREFIX}-comments-get':
+ results = get_comments_command(client, args)
+ elif command == f'{COMMAND_PREFIX}-comments-add':
+ results = add_comments_command(client, args)
+ elif command == f'{COMMAND_PREFIX}-comments-get-by-id':
+ results = get_comments_by_id_command(client, args)
+ elif command == f'{COMMAND_PREFIX}-comments-delete':
+ results = delete_comment(client, args)
+ elif command == 'url-scan':
+ results = scan_url_command(client, args)
+ elif command == 'file-scan':
+ results = file_scan(client, args)
+ elif command == 'file-rescan':
+ results = file_rescan_command(client, args)
+ elif command == f'{COMMAND_PREFIX}-file-scan-upload-url':
+ results = get_upload_url(client)
+ elif command == f'{COMMAND_PREFIX}-search':
+ results = search_command(client, args)
+ elif command == f'{COMMAND_PREFIX}-analysis-get':
+ results = get_analysis_command(client, args)
+ elif command == f'{COMMAND_PREFIX}-file-sigma-analysis':
+ results = file_sigma_analysis_command(client, args)
+ elif command == f'{COMMAND_PREFIX}-privatescanning-file':
+ results = private_file_command(client, args)
+ elif command == f'{COMMAND_PREFIX}-privatescanning-file-scan':
+ results = private_file_scan(client, args)
+ elif command == f'{COMMAND_PREFIX}-privatescanning-analysis-get':
+ results = private_get_analysis_command(client, args)
+ elif command == f'{COMMAND_PREFIX}-assessment-get':
+ results = get_assessment_command(client, score_calculator, args)
+ elif command == f'{COMMAND_PREFIX}-file-scan-and-analysis-get':
+ results = file_scan_and_get_analysis(client, args)
+ elif command == f'{COMMAND_PREFIX}-private-file-scan-and-analysis-get':
+ results = private_file_scan_and_get_analysis(client, args)
+ elif command == f'{COMMAND_PREFIX}-url-scan-and-analysis-get':
+ results = url_scan_and_get_analysis(client, args)
+ else:
+ raise NotImplementedError(f'Command {command} not implemented')
+ return_results(results)
+
+
+if __name__ in ('builtins', '__builtin__', '__main__'):
+ try:
+ main(demisto.params(), demisto.args(), demisto.command())
+ except Exception as exception:
+ return_error(exception)
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/GoogleThreatIntelligence.yml b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/GoogleThreatIntelligence.yml
new file mode 100644
index 000000000000..3ac15d0643cb
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/GoogleThreatIntelligence.yml
@@ -0,0 +1,1909 @@
+commonfields:
+ id: GoogleThreatIntelligence
+ version: -1
+name: GoogleThreatIntelligence
+display: Google Threat Intelligence
+fromversion: 6.10.0
+category: Data Enrichment & Threat Intelligence
+description: Analyzes suspicious hashes, URLs, domains, and IP addresses.
+configuration:
+- display: API Key (leave empty. Fill in the API key in the password field.)
+ displaypassword: API Key
+ name: credentials
+ type: 9
+ required: true
+ hiddenusername: true
+- additionalinfo: Reliability of the source providing the intelligence data
+ defaultvalue: C - Fairly reliable
+ display: Source Reliability
+ name: feedReliability
+ type: 15
+ options:
+ - A - Completely reliable
+ - B - Usually reliable
+ - C - Fairly reliable
+ - D - Not usually reliable
+ - E - Unreliable
+ - F - Reliability cannot be judged
+ required: false
+- display: GTI Malicious Verdict. Check Google Threat Intelligence verdict to consider the file malicious.
+ name: gti_malicious
+ defaultvalue: 'false'
+ type: 8
+ required: false
+- display: GTI Suspicious Verdict. Check Google Threat Intelligence verdict to consider the file suspicious.
+ name: gti_suspicious
+ defaultvalue: 'false'
+ type: 8
+ required: false
+- display: File Malicious Threshold. Minimum number of positive results from VT scanners to consider the file malicious.
+ name: fileThreshold
+ defaultvalue: '10'
+ type: 0
+ required: false
+- display: File Suspicious Threshold. Minimum number of positive and suspicious results from VT scanners to consider the file suspicious.
+ name: fileSuspiciousThreshold
+ defaultvalue: '5'
+ type: 0
+ required: false
+- display: IP Malicious Threshold. Minimum number of positive results from VT scanners to consider the IP malicious.
+ name: ipThreshold
+ defaultvalue: '10'
+ type: 0
+ required: false
+- display: IP Suspicious Threshold. Minimum number of positive and suspicious results from VT scanners to consider the IP suspicious.
+ name: ipSuspiciousThreshold
+ defaultvalue: '5'
+ type: 0
+ required: false
+- display: Disable reputation lookups for private IP addresses
+ name: disable_private_ip_lookup
+ defaultvalue: 'false'
+ type: 8
+ required: false
+ additionalinfo: To reduce the number of lookups made to the VT API, this option can be selected to gracefully skip enrichment of any IP addresses allocated for private networks.
+- display: 'URL Malicious Threshold. Minimum number of positive results from VT scanners to consider the URL malicious.'
+ name: urlThreshold
+ defaultvalue: '10'
+ type: 0
+ required: false
+- display: 'URL Suspicious Threshold. Minimum number of positive and suspicious results from VT scanners to consider the URL suspicious.'
+ name: urlSuspiciousThreshold
+ defaultvalue: '5'
+ type: 0
+ required: false
+- display: Domain Malicious Threshold. Minimum number of positive results from VT scanners to consider the Domain malicious.
+ name: domainThreshold
+ defaultvalue: '10'
+ type: 0
+ required: false
+- display: Domain Suspicious Threshold. Minimum number of positive and suspicious results from VT scanners to consider the Domain suspicious.
+ name: domainSuspiciousThreshold
+ defaultvalue: '5'
+ type: 0
+ required: false
+- display: 'Preferred Vendors List. CSV list of vendors who are considered more trustworthy.'
+ name: preferredVendors
+ defaultvalue: ''
+ type: 12
+ required: false
+- display: 'Preferred Vendor Threshold. The minimum number of highly trusted vendors required to consider a domain, IP address, URL, or file as malicious. '
+ name: preferredVendorsThreshold
+ defaultvalue: '5'
+ type: 0
+ required: false
+- display: 'Enable score analyzing by Crowdsourced Yara Rules, Sigma, and IDS'
+ name: crowdsourced_yara_rules_enabled
+ type: 8
+ defaultvalue: 'true'
+ required: false
+- display: Crowdsourced Yara Rules Threshold
+ name: yaraRulesThreshold
+ type: 0
+ defaultvalue: '1'
+ required: false
+- display: Sigma and Intrusion Detection Rules Threshold
+ name: SigmaIDSThreshold
+ type: 0
+ defaultvalue: '5'
+ required: false
+- display: 'Domain Popularity Ranking Threshold'
+ name: domain_popularity_ranking
+ type: 0
+ defaultvalue: '10000'
+ required: false
+- display: IP Relationships
+ name: ip_relationships
+ type: 16
+ required: false
+ additionalinfo: Select the list of relationships to retrieve from the API.
+ options:
+ - 'communicating files'
+ - 'downloaded files'
+ - referrer files
+ - 'urls'
+ defaultvalue: 'communicating files,downloaded files,referrer files,urls'
+- additionalinfo: Select the list of relationships to retrieve from the API.
+ display: Domain Relationships
+ name: domain_relationships
+ options:
+ - 'cname records'
+ - 'caa records'
+ - 'communicating files'
+ - 'downloaded files'
+ - immediate parent
+ - 'mx records'
+ - 'ns records'
+ - 'parent'
+ - 'referrer files'
+ - 'siblings'
+ - 'soa records'
+ - subdomains
+ - 'urls'
+ type: 16
+ defaultvalue: 'cname records,caa records,communicating files,downloaded files,immediate parent,mx records,ns records,parent,referrer files,siblings,soa records,subdomains,urls'
+ required: false
+- additionalinfo: Select the list of relationships to retrieve from the API.
+ display: URL Relationships
+ name: url_relationships
+ options:
+ - 'communicating files'
+ - 'contacted domains'
+ - 'contacted ips'
+ - 'downloaded files'
+ - last serving ip address
+ - network location
+ - 'redirecting urls'
+ - 'redirects to'
+ - 'referrer files'
+ - 'referrer urls'
+ type: 16
+ defaultvalue: 'communicating files,contacted domains,contacted ips,downloaded files,last serving ip address,network location,redirecting urls,redirects to,referrer files,referrer urls'
+ required: false
+- display: File Relationships
+ name: file_relationships
+ type: 16
+ required: false
+ additionalinfo: Select the list of relationships to retrieve from the API.
+ options:
+ - 'carbonblack children'
+ - 'carbonblack parents'
+ - 'compressed parents'
+ - contacted domains
+ - contacted ips
+ - contacted urls
+ - dropped files
+ - 'email attachments'
+ - 'email parents'
+ - 'embedded domains'
+ - 'embedded ips'
+ - 'embedded urls'
+ - execution parents
+ - 'itw domains'
+ - 'itw ips'
+ - 'overlay children'
+ - 'overlay parents'
+ - 'pcap children'
+ - 'pcap parents'
+ - pe resource children
+ - pe resource parents
+ - 'similar files'
+ defaultvalue: 'carbonblack children,carbonblack parents,compressed parents,contacted domains,contacted ips,contacted urls,dropped files,email attachments,email parents,embedded domains,embedded ips,embedded urls,execution parents,itw domains,itw ips,overlay children,overlay parents,pcap children,pcap parents,pe resource children,pe resource parents,similar files'
+- display: Use system proxy settings
+ name: proxy
+ type: 8
+ required: false
+- display: Trust any certificate (not secure)
+ name: insecure
+ type: 8
+ required: false
+script:
+ script: ''
+ type: python
+ subtype: python3
+ commands:
+ - name: file
+ arguments:
+ - name: file
+ required: true
+ default: true
+ description: Hash of the file to query. Supports MD5, SHA1, and SHA256.
+ isArray: true
+ - name: extended_data
+ description: Whether to return extended data (last_analysis_results).
+ defaultValue: false
+ auto: PREDEFINED
+ predefined:
+ - 'true'
+ - 'false'
+ outputs:
+ - contextPath: File.MD5
+ description: Bad MD5 hash.
+ - contextPath: File.SHA1
+ description: Bad SHA1 hash.
+ - contextPath: File.SHA256
+ description: Bad SHA256 hash.
+ - contextPath: File.Relationships.EntityA
+ description: The source of the relationship.
+ type: string
+ - contextPath: File.Relationships.EntityB
+ description: The destination of the relationship.
+ type: string
+ - contextPath: File.Relationships.Relationship
+ description: The name of the relationship.
+ type: string
+ - contextPath: File.Relationships.EntityAType
+ description: The type of the source of the relationship.
+ type: string
+ - contextPath: File.Relationships.EntityBType
+ description: The type of the destination of the relationship.
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision.
+ - contextPath: File.Malicious.Detections
+ description: For malicious files, the total number of detections.
+ - contextPath: File.Malicious.TotalEngines
+ description: For malicious files, the total number of engines that checked the file hash.
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score.
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ - contextPath: DBotScore.Reliability
+ description: Reliability of the source providing the intelligence data.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.type_description
+ description: Description of the type of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.tlsh
+ description: The locality-sensitive hashing.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.exiftool.MIMEType
+ description: MIME type of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.names
+ description: Names of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.javascript_info.tags
+ description: Tags of the JavaScript.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.exiftool.FileType
+ description: The file type.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.exiftool.WordCount
+ description: Total number of words in the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.exiftool.LineCount
+ description: Total number of lines in file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_ids_stats.info
+ description: Number of IDS that marked the file as "info".
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_ids_stats.high
+ description: Number of IDS that marked the file as "high".
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_ids_stats.medium
+ description: Number of IDS that marked the file as "medium".
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_ids_stats.low
+ description: Number of IDS that marked the file as "low".
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.sigma_analysis_stats.critical
+ description: Number of Sigma analysis that marked the file as "critical".
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.sigma_analysis_stats.high
+ description: Number of Sigma analysis that marked the file as "high".
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.sigma_analysis_stats.medium
+ description: Number of Sigma analysis that marked the file as "medium".
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.sigma_analysis_stats.low
+ description: Number of Sigma analysis that marked the file as "low".
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.exiftool.MIMEEncoding
+ description: The MIME encoding.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.exiftool.FileTypeExtension
+ description: The file type extension.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.exiftool.Newlines
+ description: Number of newlines signs.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.trid.file_type
+ description: The TrID file type.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.trid.probability
+ description: The TrID probability.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.description
+ description: Description of the YARA rule.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.source
+ description: Source of the YARA rule.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.author
+ description: Author of the YARA rule.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.ruleset_name
+ description: Rule set name of the YARA rule.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.rule_name
+ description: Name of the YARA rule.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.ruleset_id
+ description: ID of the YARA rule.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.names
+ description: Name of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.last_modification_date
+ description: The last modification date in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.type_tag
+ description: Tag of the type.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.total_votes.harmless
+ description: Total number of harmless votes.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.total_votes.malicious
+ description: Total number of malicious votes.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.size
+ description: Size of the file.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.popular_threat_classification.suggested_threat_label
+ description: Suggested thread label.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.popular_threat_classification.popular_threat_name
+ description: The popular thread name.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.times_submitted
+ description: Number of times the file was submitted.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.last_submission_date
+ description: Last submission date in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.downloadable
+ description: Whether the file is downloadable.
+ type: Boolean
+ - contextPath: GoogleThreatIntelligence.File.attributes.sha256
+ description: SHA-256 hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.type_extension
+ description: Extension of the type.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.tags
+ description: File tags.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.last_analysis_date
+ description: Last analysis date in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.unique_sources
+ description: Unique sources.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.first_submission_date
+ description: First submission date in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.ssdeep
+ description: SSDeep hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.md5
+ description: MD5 hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.sha1
+ description: SHA-1 hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.magic
+ description: Identification of file by the magic number.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.last_analysis_stats.harmless
+ description: The number of engines that found the indicator to be harmless.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.last_analysis_stats.type-unsupported
+ description: The number of engines that found the indicator to be of type unsupported.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.last_analysis_stats.suspicious
+ description: The number of engines that found the indicator to be suspicious.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.last_analysis_stats.confirmed-timeout
+ description: The number of engines that confirmed the timeout of the indicator.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.last_analysis_stats.timeout
+ description: The number of engines that timed out for the indicator.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.last_analysis_stats.failure
+ description: The number of failed analysis engines.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.last_analysis_stats.malicious
+ description: The number of engines that found the indicator to be malicious.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.last_analysis_stats.undetected
+ description: The number of engines that could not detect the indicator.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.meaningful_name
+ description: Meaningful name of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.reputation
+ description: The reputation of the file.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.gti_assessment.threat_score.value
+ description: GTI threat score of the file.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.gti_assessment.severity.value
+ description: GTI severity of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.gti_assessment.verdict.value
+ description: GTI verdict of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.type
+ description: Type of the indicator (file).
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.id
+ description: Type ID of the indicator.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.links.self
+ description: Link to the response.
+ type: String
+ description: Checks the file reputation of the specified hash.
+ - name: ip
+ arguments:
+ - name: ip
+ required: true
+ default: true
+ description: IP address to check.
+ isArray: true
+ - name: extended_data
+ description: Whether to return extended data (last_analysis_results).
+ defaultValue: false
+ auto: PREDEFINED
+ predefined:
+ - 'true'
+ - 'false'
+ - name: override_private_lookup
+ description: When set to "true", enrichment of private IP addresses will be conducted even if it has been disabled at the integration level.
+ defaultValue: false
+ predefined:
+ - 'true'
+ - 'false'
+ outputs:
+ - contextPath: IP.Address
+ description: Bad IP address.
+ - contextPath: IP.ASN
+ description: Bad IP ASN.
+ - contextPath: IP.Geo.Country
+ description: Bad IP country.
+ - contextPath: IP.Relationships.EntityA
+ description: The source of the relationship.
+ type: string
+ - contextPath: IP.Relationships.EntityB
+ description: The destination of the relationship.
+ type: string
+ - contextPath: IP.Relationships.Relationship
+ description: The name of the relationship.
+ type: string
+ - contextPath: IP.Relationships.EntityAType
+ description: The type of the source of the relationship.
+ type: string
+ - contextPath: IP.Relationships.EntityBType
+ description: The type of the destination of the relationship.
+ type: string
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IPs, the vendor that made the decision.
+ - contextPath: IP.Malicious.Description
+ description: For malicious IPs, the reason that the vendor made the decision.
+ - contextPath: IP.ASOwner
+ description: The autonomous system owner of the IP.
+ type: String
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score.
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ - contextPath: DBotScore.Reliability
+ description: Reliability of the source providing the intelligence data.
+ type: String
+ - contextPath: GoogleThreatIntelligence.IP.attributes.regional_internet_registry
+ description: Regional internet registry (RIR).
+ type: String
+ - contextPath: GoogleThreatIntelligence.IP.attributes.jarm
+ description: JARM data.
+ type: String
+ - contextPath: GoogleThreatIntelligence.IP.attributes.network
+ description: Network data.
+ type: String
+ - contextPath: GoogleThreatIntelligence.IP.attributes.country
+ description: The country where the IP is located.
+ type: String
+ - contextPath: GoogleThreatIntelligence.IP.attributes.as_owner
+ description: IP owner.
+ type: String
+ - contextPath: GoogleThreatIntelligence.IP.attributes.last_analysis_stats.harmless
+ description: The number of engines that found the domain to be harmless.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.IP.attributes.last_analysis_stats.malicious
+ description: The number of engines that found the indicator to be malicious.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.IP.attributes.last_analysis_stats.suspicious
+ description: The number of engines that found the indicator to be suspicious.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.IP.attributes.last_analysis_stats.undetected
+ description: The number of engines that could not detect the indicator.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.IP.attributes.last_analysis_stats.timeout
+ description: The number of engines that timed out for the indicator.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.IP.attributes.asn
+ description: ASN data.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.IP.attributes.whois_date
+ description: Date of the last update of the whois record.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.IP.attributes.reputation
+ description: IP reputation.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.IP.attributes.last_modification_date
+ description: Last modification date in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.IP.attributes.total_votes.harmless
+ description: Total number of harmless votes.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.IP.attributes.total_votes.malicious
+ description: Total number of malicious votes.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.IP.attributes.continent
+ description: The continent where the IP is located.
+ type: String
+ - contextPath: GoogleThreatIntelligence.IP.attributes.whois
+ description: whois data.
+ type: String
+ - contextPath: GoogleThreatIntelligence.IP.attributes.gti_assessment.threat_score.value
+ description: GTI threat score of the IP address.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.IP.attributes.gti_assessment.severity.value
+ description: GTI severity of the IP address.
+ type: String
+ - contextPath: GoogleThreatIntelligence.IP.attributes.gti_assessment.verdict.value
+ description: GTI verdict of the IP address.
+ type: String
+ - contextPath: GoogleThreatIntelligence.IP.type
+ description: Indicator IP type.
+ type: String
+ - contextPath: GoogleThreatIntelligence.IP.id
+ description: ID of the IP.
+ type: String
+ description: Checks the reputation of an IP address.
+ - name: url
+ arguments:
+ - name: url
+ required: true
+ default: true
+ description: URL to check.
+ isArray: true
+ - name: extended_data
+ description: Whether to return extended data (last_analysis_results).
+ defaultValue: false
+ auto: PREDEFINED
+ predefined:
+ - 'true'
+ - 'false'
+ outputs:
+ - contextPath: URL.Data
+ description: Bad URLs found.
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision.
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the reason that the vendor made the decision.
+ - contextPath: URL.Relationships.EntityA
+ description: The source of the relationship.
+ type: string
+ - contextPath: URL.Relationships.EntityB
+ description: The destination of the relationship.
+ type: string
+ - contextPath: URL.Relationships.Relationship
+ description: The name of the relationship.
+ type: string
+ - contextPath: URL.Relationships.EntityAType
+ description: The type of the source of the relationship.
+ type: string
+ - contextPath: URL.Relationships.EntityBType
+ description: The type of the destination of the relationship.
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score.
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ - contextPath: DBotScore.Reliability
+ description: Reliability of the source providing the intelligence data.
+ type: String
+ - contextPath: GoogleThreatIntelligence.URL.attributes.favicon.raw_md5
+ description: The MD5 hash of the URL.
+ type: String
+ - contextPath: GoogleThreatIntelligence.URL.attributes.favicon.dhash
+ description: Difference hash.
+ type: String
+ - contextPath: GoogleThreatIntelligence.URL.attributes.last_modification_date
+ description: Last modification date in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.URL.attributes.times_submitted
+ description: The number of times the url has been submitted.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.URL.attributes.total_votes.harmless
+ description: Total number of harmless votes.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.URL.attributes.total_votes.malicious
+ description: Total number of malicious votes.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.URL.attributes.threat_names
+ description: Name of the threats found.
+ type: String
+ - contextPath: GoogleThreatIntelligence.URL.attributes.last_submission_date
+ description: The last submission date in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.URL.attributes.last_http_response_content_length
+ description: The last HTTPS response length.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.URL.attributes.last_http_response_headers.date
+ description: The last response header date.
+ type: Date
+ - contextPath: GoogleThreatIntelligence.URL.attributes.last_http_response_headers.x-sinkhole
+ description: DNS sinkhole from last response.
+ type: String
+ - contextPath: GoogleThreatIntelligence.URL.attributes.last_http_response_headers.content-length
+ description: The content length of the last response.
+ type: String
+ - contextPath: GoogleThreatIntelligence.URL.attributes.last_http_response_headers.content-type
+ description: The content type of the last response.
+ type: String
+ - contextPath: GoogleThreatIntelligence.URL.attributes.reputation
+ description: Reputation of the indicator.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.URL.attributes.last_analysis_date
+ description: The date of the last analysis in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.URL.attributes.has_content
+ description: Whether the url has content in it.
+ type: Boolean
+ - contextPath: GoogleThreatIntelligence.URL.attributes.first_submission_date
+ description: The first submission date in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.URL.attributes.last_http_response_content_sha256
+ description: The SHA-256 hash of the content of the last response.
+ type: String
+ - contextPath: GoogleThreatIntelligence.URL.attributes.last_http_response_code
+ description: Last response status code.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.URL.attributes.last_final_url
+ description: Last final URL.
+ type: String
+ - contextPath: GoogleThreatIntelligence.URL.attributes.url
+ description: The URL itself.
+ type: String
+ - contextPath: GoogleThreatIntelligence.URL.attributes.title
+ description: Title of the page.
+ type: String
+ - contextPath: GoogleThreatIntelligence.URL.attributes.last_analysis_stats.harmless
+ description: The number of engines that found the domain to be harmless.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.URL.attributes.last_analysis_stats.malicious
+ description: The number of engines that found the indicator to be malicious.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.URL.attributes.last_analysis_stats.suspicious
+ description: The number of engines that found the indicator to be suspicious.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.URL.attributes.last_analysis_stats.undetected
+ description: The number of engines that could not detect the indicator.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.URL.attributes.last_analysis_stats.timeout
+ description: The number of engines that timed out for the indicator.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.URL.attributes.outgoing_links
+ description: Outgoing links of the URL page.
+ type: String
+ - contextPath: GoogleThreatIntelligence.URL.attributes.gti_assessment.threat_score.value
+ description: GTI threat score of the URL.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.URL.attributes.gti_assessment.severity.value
+ description: GTI severity of the URL.
+ type: String
+ - contextPath: GoogleThreatIntelligence.URL.attributes.gti_assessment.verdict.value
+ description: GTI verdict of the URL.
+ type: String
+ - contextPath: GoogleThreatIntelligence.URL.type
+ description: Type of the indicator (url).
+ type: String
+ - contextPath: GoogleThreatIntelligence.URL.id
+ description: ID of the indicator.
+ type: String
+ - contextPath: GoogleThreatIntelligence.URL.links.self
+ description: Link to the response.
+ type: String
+ description: Checks the reputation of a URL.
+ - name: domain
+ arguments:
+ - name: domain
+ required: true
+ default: true
+ description: Domain name to check.
+ isArray: true
+ - name: extended_data
+ description: Whether to return extended data (last_analysis_results).
+ defaultValue: false
+ auto: PREDEFINED
+ predefined:
+ - 'true'
+ - 'false'
+ outputs:
+ - contextPath: Domain.Name
+ description: Bad domain found.
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision.
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the reason that the vendor made the decision.
+ - contextPath: Domain.Relationships.EntityA
+ description: The source of the relationship.
+ type: string
+ - contextPath: Domain.Relationships.EntityB
+ description: The destination of the relationship.
+ type: string
+ - contextPath: Domain.Relationships.Relationship
+ description: The name of the relationship.
+ type: string
+ - contextPath: Domain.Relationships.EntityAType
+ description: The type of the source of the relationship.
+ type: string
+ - contextPath: Domain.Relationships.EntityBType
+ description: The type of the destination of the relationship.
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score.
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ - contextPath: DBotScore.Reliability
+ description: Reliability of the source providing the intelligence data.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.last_dns_records.type
+ description: The type of the last DNS records.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.last_dns_records.value
+ description: The value of the last DNS records.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.last_dns_records.ttl
+ description: The time To live (ttl) of the last DNS records.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.jarm
+ description: JARM data.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.whois
+ description: whois data.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.last_dns_records_date
+ description: The last DNS records date in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.last_analysis_stats.harmless
+ description: The number of engines that found the domain to be harmless.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.last_analysis_stats.malicious
+ description: The number of engines that found the indicator to be malicious.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.last_analysis_stats.suspicious
+ description: The number of engines that found the indicator to be suspicious.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.last_analysis_stats.undetected
+ description: The number of engines that could not detect the indicator.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.last_analysis_stats.timeout
+ description: The number of engines that timed out for the indicator.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.favicon.raw_md5
+ description: MD5 hash of the domain.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.favicon.dhash
+ description: Difference hash.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.reputation
+ description: Reputation of the indicator.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.registrar
+ description: Registrar information.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.last_update_date
+ description: Last updated date in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.last_modification_date
+ description: Last modification date in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.creation_date
+ description: Creation date in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.total_votes.harmless
+ description: Total number of harmless votes.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.total_votes.malicious
+ description: Total number of malicious votes.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.gti_assessment.threat_score.value
+ description: GTI threat score of the domain.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.gti_assessment.severity.value
+ description: GTI severity of the domain.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Domain.attributes.gti_assessment.verdict.value
+ description: GTI verdict of the domain.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Domain.type
+ description: Type of indicator (domain).
+ type: String
+ - contextPath: GoogleThreatIntelligence.Domain.id
+ description: ID of the domain.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Domain.links.self
+ description: Link to the domain investigation.
+ type: String
+ description: Checks the reputation of a domain.
+ - name: file-scan
+ arguments:
+ - name: entryID
+ required: true
+ default: true
+ description: The file entry ID to submit.
+ isArray: true
+ - name: uploadURL
+ description: Special upload URL for files larger than 32 MB. Can be acquired from the gti-file-scan-upload-url command.
+ outputs:
+ - contextPath: GoogleThreatIntelligence.Submission.type
+ description: The submission type.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.id
+ description: The ID of the submission.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.EntryID
+ description: The entry ID of the file detonated.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.Extension
+ description: File extension.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.Info
+ description: File info.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.MD5
+ description: MD5 hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.Name
+ description: Name of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.SHA1
+ description: SHA-1 hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.SHA256
+ description: SHA-256 of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.SHA512
+ description: SHA-512 of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.SSDeep
+ description: SSDeep of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.Size
+ description: Size of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.Type
+ description: The type of the submission (analysis).
+ type: String
+ description: Submits a file for scanning. Use the gti-analysis-get command to get the scan results.
+ - name: file-rescan
+ arguments:
+ - name: file
+ required: true
+ default: true
+ description: Hash of the file to rescan. Supports MD5, SHA1, and SHA256.
+ outputs:
+ - contextPath: GoogleThreatIntelligence.Submission.Type
+ description: The type of the submission (analysis).
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.id
+ description: The ID of the submission.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.hash
+ description: The indicator sent to rescan.
+ type: String
+ description: Rescans an already submitted file. This avoids having to upload the file again. Use the gti-analysis-get command to get the scan results.
+ - name: url-scan
+ arguments:
+ - name: url
+ required: true
+ default: true
+ description: The URL to scan.
+ outputs:
+ - contextPath: GoogleThreatIntelligence.Submission.Type
+ description: The type of the submission (analysis).
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.id
+ description: The ID of the submission.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.hash
+ description: The indicator sent to rescan.
+ type: String
+ description: Scans a specified URL. Use the gti-analysis-get command to get the scan results.
+ - name: gti-comments-add
+ arguments:
+ - name: resource
+ required: true
+ description: The file hash (MD5, SHA1, orSHA256), Domain, URL or IP on which you're commenting on. If not supplied, will try to determine if it's a hash or a url.
+ - name: resource_type
+ description: The type of the resource on which you're commenting.
+ auto: PREDEFINED
+ predefined:
+ - ip
+ - url
+ - domain
+ - hash
+ - name: comment
+ required: true
+ description: 'The actual review that you can tag by using the "#" twitter-like syntax, for example, #disinfection #zbot, and reference users using the "@" syntax, for example, @GoogleThreatIntelligenceTeam.'
+ outputs:
+ - contextPath: GoogleThreatIntelligence.Comments.comments.attributes.date
+ description: The date of the comment in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Comments.comments.attributes.text
+ description: The text of the comment.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Comments.comments.attributes.votes.positive
+ description: Number of positive votes.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Comments.comments.attributes.votes.abuse
+ description: Number of abuse votes.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Comments.comments.attributes.votes.negative
+ description: Number of negative votes.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Comments.comments.attributes.html
+ description: The HTML content.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Comments.comments.type
+ description: The type of the comment.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Comments.comments.id
+ description: ID of the comment.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Comments.comments.links.self
+ description: Link to the request.
+ type: String
+ description: Adds comments to files and URLs.
+ - name: gti-file-scan-upload-url
+ outputs:
+ - contextPath: GoogleThreatIntelligence.FileUploadURL
+ description: The special upload URL for large files.
+ description: Get a special URL for files larger than 32 MB.
+ - name: gti-comments-delete
+ arguments:
+ - name: id
+ required: true
+ default: true
+ description: Comment ID.
+ description: Delete a comment.
+ - name: gti-comments-get
+ arguments:
+ - name: resource
+ required: true
+ description: The file hash (MD5, SHA1, or SHA256), Domain, URL or IP on which you're commenting on. If not supplied, will try to determine if it's a hash or a url.
+ - name: resource_type
+ description: The type of the resource on which you're commenting. If not supplied, will determine if it's a url or a file.
+ auto: PREDEFINED
+ predefined:
+ - ip
+ - url
+ - domain
+ - file
+ - hash
+ - name: limit
+ description: Maximum comments to fetch.
+ defaultValue: 10
+ - name: before
+ description: Fetch only comments before the given time.
+ outputs:
+ - contextPath: GoogleThreatIntelligence.Comments.id
+ description: ID that contains the comment (the given hash, domain, url, or ip).
+ type: String
+ - contextPath: GoogleThreatIntelligence.Comments.comments.attributes.date
+ description: The date of the comment in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Comments.comments.attributes.text
+ description: The text of the comment.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Comments.comments.attributes.votes.positive
+ description: Number of positive votes.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Comments.comments.attributes.votes.abuse
+ description: Number of abuse votes.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Comments.comments.attributes.votes.negative
+ description: Number of negative votes.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Comments.comments.attributes.html
+ description: The HTML content.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Comments.comments.type
+ description: The type of the comment.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Comments.comments.id
+ description: ID of the commented.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Comments.comments.links.self
+ description: Link to the request.
+ type: String
+ description: Retrieves comments for a given resource.
+ - name: gti-assessment-get
+ arguments:
+ - name: resource
+ required: true
+ description: The file hash (MD5, SHA1, or SHA256), Domain, URL or IP.
+ - name: resource_type
+ description: The type of the resource. If not supplied, will determine it's a file.
+ auto: PREDEFINED
+ predefined:
+ - ip
+ - url
+ - domain
+ - file
+ - hash
+ outputs:
+ - contextPath: GoogleThreatIntelligence.Assessment.id
+ description: ID that contains the assessment (the given hash, domain, url, or ip).
+ type: String
+ - contextPath: GoogleThreatIntelligence.Assessment.attributes.gti_assessment.threat_score.value
+ description: The threat score of the assessment.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Assessment.attributes.gti_assessment.severity.value
+ description: The severity of the assessment.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Assessment.attributes.gti_assessment.verdict.value
+ description: The verdict of the assessment.
+ type: String
+ description: Retrieves GTI assessment for a given resource.
+ - name: gti-comments-get-by-id
+ arguments:
+ - name: id
+ required: true
+ default: true
+ description: The comment's ID. Can be retrieved using the gti-comments-get command.
+ outputs:
+ - contextPath: GoogleThreatIntelligence.Comments.comments.id
+ description: ID of the comment.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Comments.comments.attributes.date
+ description: The date of the comment in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Comments.comments.attributes.text
+ description: The text of the comment.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Comments.comments.attributes.votes.positive
+ description: Number of positive votes.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Comments.comments.attributes.votes.abuse
+ description: Number of abuse votes.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Comments.comments.attributes.votes.negative
+ description: Number of negative votes.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Comments.comments.attributes.html
+ description: The HTML content.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Comments.comments.type
+ description: The type of the comment.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Comments.comments.links.self
+ description: Link to the request.
+ type: String
+ description: Retrieves a comment by comment ID.
+ - name: gti-search
+ description: Search for an indicator in Google Threat Intelligence.
+ arguments:
+ - name: query
+ required: true
+ default: true
+ description: 'This endpoint searches any of the following: A file hash, URL, domain, IP address, tag comments.'
+ - name: extended_data
+ description: Whether to return extended data (last_analysis_results).
+ defaultValue: false
+ auto: PREDEFINED
+ predefined:
+ - 'true'
+ - 'false'
+ - name: limit
+ description: Maximum number of results to fetch.
+ defaultValue: 10
+ outputs:
+ - contextPath: GoogleThreatIntelligence.SearchResults.attributes.last_analysis_stats.harmless
+ description: Number of engines that found the indicator to be harmless.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.SearchResults.attributes.last_analysis_stats.malicious
+ description: Number of engines that found the indicator to be malicious.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.SearchResults.attributes.last_analysis_stats.suspicious
+ description: Number of engines that found the indicator to be suspicious.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.SearchResults.attributes.last_analysis_stats.undetected
+ description: Number of engines that could not detect the indicator.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.SearchResults.attributes.last_analysis_stats.timeout
+ description: Number of engines that timed out.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.SearchResults.attributes.reputation
+ description: The indicator's reputation.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.SearchResults.attributes.last_modification_date
+ description: The last modification date in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.SearchResults.attributes.total_votes.harmless
+ description: Total number of harmless votes.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.SearchResults.attributes.total_votes.malicious
+ description: Total number of malicious votes.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.SearchResults.type
+ description: The type of the indicator (ip, domain, url, file).
+ type: String
+ - contextPath: GoogleThreatIntelligence.SearchResults.id
+ description: ID of the indicator.
+ type: String
+ - contextPath: GoogleThreatIntelligence.SearchResults.links.self
+ description: Link to the response.
+ type: String
+ - name: gti-file-sandbox-report
+ arguments:
+ - name: file
+ required: true
+ default: true
+ description: Hash of the file to query. Supports MD5, SHA1, and SHA256.
+ - name: limit
+ description: Maximum number of results to fetch.
+ defaultValue: 10
+ outputs:
+ - contextPath: SandboxReport.attributes.analysis_date
+ description: The date of the analysis in epoch format.
+ type: Number
+ - contextPath: SandboxReport.attributes.behash
+ description: Behash of the attribute.
+ type: String
+ - contextPath: SandboxReport.attributes.command_executions
+ description: Which command were executed.
+ type: String
+ - contextPath: SandboxReport.attributes.dns_lookups.hostname
+ description: Host names found in the lookup.
+ type: String
+ - contextPath: SandboxReport.attributes.dns_lookups.resolved_ips
+ description: The IPs that were resolved.
+ type: String
+ - contextPath: SandboxReport.attributes.files_attribute_changed
+ description: The file attributes that were changed.
+ type: String
+ - contextPath: SandboxReport.attributes.has_html_report
+ description: Whether there is an HTML report.
+ type: Boolean
+ - contextPath: SandboxReport.attributes.has_pcap
+ description: Whether the IP has a PCAP file.
+ type: Boolean
+ - contextPath: SandboxReport.attributes.http_conversations.request_method
+ description: The request method of the HTTP conversation.
+ type: String
+ - contextPath: SandboxReport.attributes.http_conversations.response_headers.Cache-Control
+ description: The cache-control method of the response header.
+ type: String
+ - contextPath: SandboxReport.attributes.http_conversations.response_headers.Connection
+ description: The connection of the response header.
+ type: String
+ - contextPath: SandboxReport.attributes.http_conversations.response_headers.Content-Length
+ description: THe Content-Length of the response header.
+ type: String
+ - contextPath: SandboxReport.attributes.http_conversations.response_headers.Content-Type
+ description: The Content-Type of the response header.
+ type: String
+ - contextPath: SandboxReport.attributes.http_conversations.response_headers.Pragma
+ description: The pragma of the response header.
+ type: String
+ - contextPath: SandboxReport.attributes.http_conversations.response_headers.Server
+ description: The server of the response header.
+ type: String
+ - contextPath: SandboxReport.attributes.http_conversations.response_headers.Status-Line
+ description: The Status-Line of the response header.
+ type: String
+ - contextPath: SandboxReport.attributes.http_conversations.response_status_code
+ description: The response status code.
+ type: Number
+ - contextPath: SandboxReport.attributes.http_conversations.url
+ description: The conversation URL.
+ type: String
+ - contextPath: SandboxReport.attributes.last_modification_date
+ description: Last modified data in epoch format.
+ type: Number
+ - contextPath: SandboxReport.attributes.modules_loaded
+ description: Loaded modules.
+ type: String
+ - contextPath: SandboxReport.attributes.mutexes_created
+ description: The mutexes that were created.
+ type: String
+ - contextPath: SandboxReport.attributes.mutexes_opened
+ description: The mutexes that were opened.
+ type: String
+ - contextPath: SandboxReport.attributes.processes_created
+ description: The processes that were created.
+ type: String
+ - contextPath: SandboxReport.attributes.processes_tree.name
+ description: The name of the process tree.
+ type: String
+ - contextPath: SandboxReport.attributes.processes_tree.process_id
+ description: The ID of the process.
+ type: String
+ - contextPath: SandboxReport.attributes.registry_keys_deleted
+ description: Deleted registry keys.
+ type: String
+ - contextPath: SandboxReport.attributes.registry_keys_set.key
+ description: Key of the registry key.
+ type: String
+ - contextPath: SandboxReport.attributes.registry_keys_set.value
+ description: Value of the registry key.
+ type: String
+ - contextPath: SandboxReport.attributes.sandbox_name
+ description: The name of the sandbox.
+ type: String
+ - contextPath: SandboxReport.attributes.services_started
+ description: The services that were started.
+ type: String
+ - contextPath: SandboxReport.attributes.verdicts
+ description: The verdicts.
+ type: String
+ - contextPath: SandboxReport.id
+ description: The IP analyzed.
+ type: String
+ - contextPath: SandboxReport.links.self
+ description: Link to the response.
+ type: String
+ - contextPath: SandboxReport.attributes.files_dropped.path
+ description: Path of the file dropped.
+ type: String
+ - contextPath: SandboxReport.attributes.files_dropped.sha256
+ description: SHA-256 hash of the dropped files.
+ type: String
+ - contextPath: SandboxReport.attributes.files_opened
+ description: The files that were opened.
+ type: String
+ - contextPath: SandboxReport.attributes.files_written
+ description: The files that were written.
+ type: String
+ - contextPath: SandboxReport.attributes.ip_traffic.destination_ip
+ description: Destination IP in the traffic.
+ type: String
+ - contextPath: SandboxReport.attributes.ip_traffic.destination_port
+ description: Destination port in the traffic.
+ type: Number
+ - contextPath: SandboxReport.attributes.ip_traffic.transport_layer_protocol
+ description: Transport layer protocol in the traffic.
+ type: String
+ - contextPath: SandboxReport.attributes.registry_keys_opened
+ description: The registry keys that were opened.
+ type: String
+ - contextPath: SandboxReport.attributes.tags
+ description: The tags of the DNS data.
+ type: String
+ - contextPath: SandboxReport.attributes.files_copied.destination
+ description: Destination of the files copied.
+ type: String
+ - contextPath: SandboxReport.attributes.files_copied.source
+ description: Source of the files copied.
+ type: String
+ - contextPath: SandboxReport.attributes.permissions_requested
+ description: The permissions that where requested.
+ type: String
+ - contextPath: SandboxReport.attributes.processes_injected
+ description: The processes that were injected.
+ type: String
+ - contextPath: SandboxReport.attributes.processes_terminated
+ description: The processes that were terminated.
+ type: String
+ - contextPath: SandboxReport.attributes.processes_tree.children.name
+ description: The name of the children of the process.
+ type: String
+ - contextPath: SandboxReport.attributes.processes_tree.children.process_id
+ description: The ID of the children of the process.
+ type: String
+ - contextPath: SandboxReport.attributes.services_opened
+ description: The services that were opened.
+ type: String
+ - contextPath: SandboxReport.attributes.text_highlighted
+ description: The text that was highlighted.
+ type: String
+ - contextPath: SandboxReport.attributes.calls_highlighted
+ description: The calls that were highlighted.
+ type: String
+ - contextPath: SandboxReport.attributes.processes_tree.children.time_offset
+ description: The time offset of the children in the process.
+ type: Number
+ - contextPath: SandboxReport.links.self
+ description: The link to the response.
+ type: String
+ - contextPath: SandboxReport.meta.count
+ description: The number of objects that were found in the attributes.
+ type: Number
+ description: Retrieves a behavioral relationship of the given file hash.
+ - name: gti-passive-dns-data
+ description: Returns passive DNS records by indicator.
+ arguments:
+ - name: id
+ default: true
+ description: IP or domain for which to get its DNS data.
+ - name: ip
+ description: IP for which to get its DNS data.
+ - name: domain
+ description: Domain for which to get its DNS data.
+ - name: limit
+ description: Maximum number of results to fetch.
+ defaultValue: 10
+ outputs:
+ - contextPath: GoogleThreatIntelligence.PassiveDNS.attributes.date
+ description: Date of the DNS analysis in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.PassiveDNS.attributes.host_name
+ description: The DNS host name.
+ type: String
+ - contextPath: GoogleThreatIntelligence.PassiveDNS.attributes.ip_address
+ description: The DNS IP address.
+ type: String
+ - contextPath: GoogleThreatIntelligence.PassiveDNS.attributes.resolver
+ description: The name of the resolver.
+ type: String
+ - contextPath: GoogleThreatIntelligence.PassiveDNS.id
+ description: The ID of the resolution.
+ type: String
+ - contextPath: GoogleThreatIntelligence.PassiveDNS.links.self
+ description: The link to the resolution.
+ type: String
+ - contextPath: GoogleThreatIntelligence.PassiveDNS.type
+ description: The type of the resolution.
+ type: String
+ - name: gti-analysis-get
+ description: Scan and get the analysis of a file submitted to GoogleThreatIntelligence.
+ arguments:
+ - name: id
+ required: true
+ default: true
+ description: ID of the analysis (from file-scan, file-rescan, or url-scan).
+ - name: extended_data
+ description: Whether to return extended data (last_analysis_results).
+ defaultValue: false
+ outputs:
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.date
+ description: Date of the analysis in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.stats.harmless
+ description: Number of engines that found the indicator to be harmless.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.stats.malicious
+ description: Number of engines that found the indicator to be malicious.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.stats.suspicious
+ description: Number of engines that found the indicator to be suspicious.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.stats.timeout
+ description: he number of engines that timed out for the indicator.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.stats.undetected
+ description: Number of engines the found the indicator to be undetected.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.status
+ description: Status of the analysis.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.data.id
+ description: ID of the analysis.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.data.type
+ description: Type of object (analysis).
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.file_info.sha256
+ description: SHA-256 hash of the file (if it is a file).
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.file_info.sha1
+ description: SHA-1 hash of the file (if it is a file).
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.file_info.md5
+ description: MD5 hash of the file (if it is a file).
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.file_info.name
+ description: Name of the file (if it is a file).
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.file_info.size
+ description: Size of the file (if it is a file).
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.url_info.id
+ description: ID of the url (if it is a URL).
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.url_info.url
+ description: The URL (if it is a URL).
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.id
+ description: The analysis ID.
+ type: String
+ - name: gti-file-sigma-analysis
+ description: Result of the last Sigma analysis in markdown format.
+ arguments:
+ - name: file
+ default: true
+ required: true
+ description: File hash (md5, sha1, sha256).
+ - name: only_stats
+ description: Print only Sigma analysis summary stats.
+ defaultValue: false
+ auto: PREDEFINED
+ predefined:
+ - 'true'
+ - 'false'
+ outputs:
+ - contextPath: GoogleThreatIntelligence.SigmaAnalysis.data.attributes.last_modification_date
+ description: Date of the last update in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.SigmaAnalysis.data.attributes.analysis_date
+ description: Date of the last update in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.rule_matches.match_context
+ description: Matched strings from the log file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.rule_matches.rule_author
+ description: Rule authors separated by commas.
+ type: String
+ - contextPath: GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.rule_matches.rule_description
+ description: Brief summary about what the rule detects.
+ type: String
+ - contextPath: GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.rule_matches.rule_id
+ description: Rule ID in GoogleThreatIntelligence's database.
+ type: String
+ - contextPath: GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.rule_matches.rule_level
+ description: Rule severity. Can be "low", "medium", "high" or "critical".
+ type: String
+ - contextPath: GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.rule_matches.rule_source
+ description: Ruleset where the rule belongs.
+ type: String
+ - contextPath: GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.rule_matches.rule_title
+ description: Rule title.
+ type: String
+ - contextPath: GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.severity_stats.critical
+ description: Number of matched rules having a "critical" severity.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.severity_stats.high
+ description: Number of matched rules having a "high" severity.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.severity_stats.low
+ description: Number of matched rules having a "low" severity.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.severity_stats.medium
+ description: Number of matched rules having a "medium" severity.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.source_severity_stats
+ description: Same as severity_stats but grouping stats by ruleset. Keys are ruleset names as string and values are stats in a dictionary.
+ type: unknown
+ - contextPath: GoogleThreatIntelligence.SigmaAnalysis.data.id
+ description: ID of the analysis.
+ type: String
+ - name: gti-privatescanning-file
+ description: Checks the file reputation of the specified private hash.
+ arguments:
+ - name: file
+ required: true
+ default: true
+ description: Hash of the file to query. Supports MD5, SHA1, and SHA256.
+ isArray: true
+ outputs:
+ - contextPath: GoogleThreatIntelligence.File.attributes.type_description
+ description: Description of the type of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.tlsh
+ description: The locality-sensitive hashing.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.exiftool.MIMEType
+ description: MIME type of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.names
+ description: Names of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.javascript_info.tags
+ description: Tags of the JavaScript.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.exiftool.FileType
+ description: The file type.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.exiftool.WordCount
+ description: Total number of words in the file.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.exiftool.LineCount
+ description: Total number of lines in file.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.exiftool.MIMEEncoding
+ description: The MIME encoding.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.exiftool.FileTypeExtension
+ description: The file type extension.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.exiftool.Newlines
+ description: Number of newlines signs.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_ids_stats.info
+ description: Number of IDS that marked the file as "info".
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_ids_stats.high
+ description: Number of IDS that marked the file as "high".
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_ids_stats.medium
+ description: Number of IDS that marked the file as "medium".
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_ids_stats.low
+ description: Number of IDS that marked the file as "low".
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.trid.file_type
+ description: The TrID file type.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.trid.probability
+ description: The TrID probability.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.description
+ description: Description of the YARA rule.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.source
+ description: Source of the YARA rule.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.author
+ description: Author of the YARA rule.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.ruleset_name
+ description: Rule set name of the YARA rule.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.rule_name
+ description: Name of the YARA rule.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.ruleset_id
+ description: ID of the YARA rule.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.names
+ description: Name of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.type_tag
+ description: Tag of the type.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.size
+ description: Size of the file.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.sha256
+ description: SHA-256 hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.type_extension
+ description: Extension of the type.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.tags
+ description: File tags.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.last_analysis_date
+ description: Last analysis date in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.File.attributes.ssdeep
+ description: SSDeep hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.md5
+ description: MD5 hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.sha1
+ description: SHA-1 hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.magic
+ description: Identification of file by the magic number.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.meaningful_name
+ description: Meaningful name of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.threat_severity.threat_severity_level
+ description: Threat severity level of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.threat_severity.threat_severity_data.popular_threat_category
+ description: Popular threat category of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.attributes.threat_verdict
+ description: Threat verdict of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.type
+ description: Type of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.id
+ description: ID of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.File.links.self
+ description: Link to the response.
+ type: String
+ - name: gti-privatescanning-file-scan
+ description: Submits a file for private scanning. Use the gti-privatescanning-analysis-get command to get the scan results.
+ arguments:
+ - name: entryID
+ required: true
+ default: true
+ description: The file entry ID to submit.
+ isArray: true
+ outputs:
+ - contextPath: GoogleThreatIntelligence.Submission.type
+ description: The type of the submission (analysis).
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.id
+ description: The ID of the submission.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.EntryID
+ description: The entry ID of the file detonated.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.Extension
+ description: File extension.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.Info
+ description: File info.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.MD5
+ description: MD5 hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.Name
+ description: Name of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.SHA1
+ description: SHA-1 of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.SHA256
+ description: SHA-256 of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.SHA512
+ description: SHA-512 of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.SSDeep
+ description: SSDeep of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.Size
+ description: Size of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Submission.Type
+ description: Type of the file.
+ type: String
+ - name: gti-privatescanning-analysis-get
+ description: Get analysis of a private file submitted to GoogleThreatIntelligence.
+ arguments:
+ - name: id
+ required: true
+ default: true
+ description: ID of the analysis.
+ outputs:
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.date
+ description: Date of the analysis in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.status
+ description: Status of the analysis.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.threat_severity_level
+ description: Threat severity level of the private file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.popular_threat_category
+ description: Popular threat category of the private file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.threat_verdict
+ description: Threat verdict of the private file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.data.id
+ description: ID of the analysis.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.data.type
+ description: Type of object (analysis).
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.file_info.sha256
+ description: SHA-256 hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.file_info.sha1
+ description: SHA-1 hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.file_info.md5
+ description: MD5 hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.file_info.size
+ description: Size of the file.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.id
+ description: The analysis ID.
+ type: String
+ - name: gti-file-scan-and-analysis-get
+ description: Scan and get the analysis of a file submitted to GoogleThreatIntelligence.
+ polling: true
+ arguments:
+ - name: entryID
+ required: true
+ default: true
+ description: The file entry ID to submit.
+ - name: uploadURL
+ description: Special upload URL for files larger than 32 MB. Can be acquired from the gti-file-scan-upload-url command.
+ - name: id
+ description: This is an internal argument used for the polling process, not to be used by the user.
+ - name: extended_data
+ description: Whether to return extended data.
+ defaultValue: false
+ auto: PREDEFINED
+ predefined:
+ - 'true'
+ - 'false'
+ - name: interval_in_seconds
+ description: Interval in seconds between each poll.
+ defaultValue: '60'
+ outputs:
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.date
+ description: Date of the analysis in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.stats.harmless
+ description: Number of engines that found the indicator to be harmless.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.stats.malicious
+ description: Number of engines that found the indicator to be malicious.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.stats.suspicious
+ description: Number of engines that found the indicator to be suspicious.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.stats.timeout
+ description: he number of engines that timed out for the indicator.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.stats.undetected
+ description: Number of engines the found the indicator to be undetected.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.status
+ description: Status of the analysis.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.data.id
+ description: ID of the analysis.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.data.type
+ description: Type of object (analysis).
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.file_info.sha256
+ description: SHA-256 hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.file_info.sha1
+ description: SHA-1 hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.file_info.md5
+ description: MD5 hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.file_info.name
+ description: Name of the file.
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.file_info.size
+ description: Size of the file.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.id
+ description: The analysis ID.
+ type: String
+ - name: gti-private-file-scan-and-analysis-get
+ description: Scan and get the analysis of a private file submitted to GoogleThreatIntelligence.
+ polling: true
+ arguments:
+ - name: entryID
+ required: true
+ default: true
+ description: The file entry ID to submit.
+ - name: id
+ description: This is an internal argument used for the polling process, not to be used by the user.
+ - name: extended_data
+ description: Whether to return extended data.
+ defaultValue: false
+ auto: PREDEFINED
+ predefined:
+ - 'true'
+ - 'false'
+ - name: interval_in_seconds
+ description: Interval in seconds between each poll.
+ defaultValue: '60'
+ outputs:
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.date
+ description: Date of the analysis in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.status
+ description: Status of the analysis.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.threat_severity_level
+ description: Threat severity level of the private file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.popular_threat_category
+ description: Popular threat category of the private file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.threat_verdict
+ description: Threat verdict of the private file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.data.id
+ description: ID of the analysis.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.data.type
+ description: Type of object (analysis).
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.file_info.sha256
+ description: SHA-256 hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.file_info.sha1
+ description: SHA-1 hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.file_info.md5
+ description: MD5 hash of the file.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.file_info.size
+ description: Size of the file.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.id
+ description: The analysis ID.
+ type: String
+ - name: gti-url-scan-and-analysis-get
+ description: Scan and get the analysis of a URL submitted to GoogleThreatIntelligence.
+ polling: true
+ arguments:
+ - name: url
+ required: true
+ default: true
+ description: The URL to scan.
+ - name: id
+ description: This is an internal argument used for the polling process, not to be used by the user.
+ - name: extended_data
+ description: Whether to return extended data.
+ defaultValue: false
+ auto: PREDEFINED
+ predefined:
+ - 'true'
+ - 'false'
+ - name: interval_in_seconds
+ description: Interval in seconds between each poll.
+ defaultValue: '60'
+ outputs:
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.date
+ description: Date of the analysis in epoch format.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.stats.harmless
+ description: Number of engines that found the indicator to be harmless.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.stats.malicious
+ description: Number of engines that found the indicator to be malicious.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.stats.suspicious
+ description: Number of engines that found the indicator to be suspicious.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.stats.timeout
+ description: he number of engines that timed out for the indicator.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.stats.undetected
+ description: Number of engines the found the indicator to be undetected.
+ type: Number
+ - contextPath: GoogleThreatIntelligence.Analysis.data.attributes.status
+ description: Status of the analysis.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.data.id
+ description: ID of the analysis.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.data.type
+ description: Type of object (analysis).
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.url_info.id
+ description: ID of the URL.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.meta.url_info.url
+ description: The URL.
+ type: String
+ - contextPath: GoogleThreatIntelligence.Analysis.id
+ description: The analysis ID.
+ type: String
+ dockerimage: demisto/python3:3.10.14.95956
+tests:
+- GoogleThreatIntelligence-test
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/GoogleThreatIntelligence_description.md b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/GoogleThreatIntelligence_description.md
new file mode 100644
index 000000000000..98cb855f7e0b
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/GoogleThreatIntelligence_description.md
@@ -0,0 +1,3 @@
+### Authorization:
+Your API key can be found in your Google Threat Intelligence account user menu, clicking on your avatar.
+Your API key carries all your privileges, so keep it secure and don't share it with anyone.
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/GoogleThreatIntelligence_image.png b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/GoogleThreatIntelligence_image.png
new file mode 100644
index 000000000000..ff86122389c0
Binary files /dev/null and b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/GoogleThreatIntelligence_image.png differ
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/GoogleThreatIntelligence_test.py b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/GoogleThreatIntelligence_test.py
new file mode 100644
index 000000000000..67f0635a9291
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/GoogleThreatIntelligence_test.py
@@ -0,0 +1,1244 @@
+import json
+
+import pytest
+from GoogleThreatIntelligence import (
+ ScoreCalculator,
+ encode_url_to_base64,
+ epoch_to_timestamp,
+ get_working_id,
+ raise_if_hash_not_valid,
+ raise_if_ip_not_valid,
+ create_relationships,
+ get_whois
+)
+
+from CommonServerPython import argToList, DemistoException
+import demistomock as demisto
+
+INTEGRATION_NAME = 'GoogleThreatIntelligence'
+
+
+@pytest.fixture(autouse=True)
+def handle_calling_context(mocker):
+ mocker.patch.object(demisto, 'callingContext', {'context': {'IntegrationBrand': INTEGRATION_NAME}})
+
+
+class TestScoreCalculator:
+ """Tests the ScoreCalculator class"""
+ score_calculator: ScoreCalculator
+
+ @classmethod
+ def setup_class(cls):
+ cls.score_calculator = ScoreCalculator(
+ {
+ 'preferredVendors': 'vt1, v2, vt3',
+ 'preferredVendorsThreshold': 2,
+ 'fileThreshold': 1,
+ 'ipThreshold': 1,
+ 'urlThreshold': 1,
+ 'domainThreshold': 1,
+ 'fileSuspiciousThreshold': 0,
+ 'ipSuspiciousThreshold': 0,
+ 'urlSuspiciousThreshold': 0,
+ 'domainSuspiciousThreshold': 0,
+ 'crowdsourced_yara_rules_enabled': True,
+ 'yaraRulesThreshold': 1,
+ 'SigmaIDSThreshold': 1,
+ 'domain_popularity_ranking': 1,
+ 'relationship_threshold': 1,
+ 'relationship_suspicious_threshold': 0,
+ 'gti_malicious': True,
+ 'gti_suspicious': True,
+ }
+ )
+
+ def test_there_are_logs(self):
+ with open('./test_data/file.json') as f:
+ self.score_calculator.file_score('given hash', json.load(f))
+ assert self.score_calculator.logs
+ self.score_calculator.logs = []
+
+ @pytest.mark.parametrize('malicious, suspicious, threshold, result', [
+ (0, 5, 5, True),
+ (10, 0, 5, True),
+ (0, 0, 2, False)
+ ])
+ def test_is_suspicious_by_threshold(self, malicious: int, suspicious: int, threshold: int, result: bool):
+ analysis_results = {
+ 'malicious': malicious,
+ 'suspicious': suspicious
+ }
+ assert self.score_calculator.is_suspicious_by_threshold(analysis_results, threshold) is result
+
+ @pytest.mark.parametrize('malicious, threshold, result', [
+ (5, 5, True),
+ (10, 5, True),
+ (0, 2, False)
+ ])
+ def test_is_malicious_by_threshold(self, malicious: int, threshold: int, result: bool):
+ analysis_results = {
+ 'malicious': malicious
+ }
+ assert self.score_calculator.is_malicious_by_threshold(analysis_results, threshold) is result
+
+ @pytest.mark.parametrize('ranks, result', [
+ ({'vendor1': {'rank': 10000}}, False),
+ ({'vendor1': {'rank': 3000}, 'vendor2': {'rank': 7000}}, True),
+ ({'vendor1': {'rank': 0}}, True),
+ ({'vendor1': {'rank': 300}, 'vendor2': {'rank': 300}}, True),
+ ({}, None)
+ ])
+ def test_is_good_by_popularity_ranks(self, ranks: dict[str, dict], result: bool):
+ self.score_calculator.domain_popularity_ranking = 5000
+ assert self.score_calculator.is_good_by_popularity_ranks(ranks) is result
+
+ @pytest.mark.parametrize('yara_rules_found, result', [
+ (1, False),
+ (3, True),
+ (2, True)
+ ])
+ def test_is_suspicious_by_rules_yara(self, yara_rules_found: int, result: bool):
+ # enable indicators process and set to 2
+ self.score_calculator.crowdsourced_yara_rules_enabled = True
+ self.score_calculator.crowdsourced_yara_rules_threshold = 2
+ # process
+ response = {'data': {
+ 'crowdsourced_yara_results': [1] * yara_rules_found
+ }}
+ assert self.score_calculator.is_suspicious_by_rules(response) is result
+
+ @pytest.mark.parametrize('high, critical, result', [
+ (2, 0, True),
+ (0, 2, True),
+ (1, 1, True),
+ (0, 0, False),
+ ])
+ def test_is_suspicious_by_rules_sigma(self, high: int, critical: int, result: bool):
+ # enable indicators process and set to 2
+ self.score_calculator.crowdsourced_yara_rules_enabled = True
+ self.score_calculator.sigma_ids_threshold = 2
+ response = {'data': {'sigma_analysis_stats': {'high': high, 'critical': critical}}}
+ # process
+ assert self.score_calculator.is_suspicious_by_rules(response) is result
+
+ @pytest.mark.parametrize('threshold', (1, 2))
+ def test_is_preferred_vendors_pass_malicious(self, threshold: int):
+ # setup
+ self.score_calculator.trusted_vendors_threshold = threshold
+ self.score_calculator.trusted_vendors = ['v1', 'v2']
+ # process
+ analysis_results = {'v1': {'category': 'malicious'}, 'v2': {'category': 'malicious'}}
+ assert self.score_calculator.is_preferred_vendors_pass_malicious(analysis_results)
+
+ def test_is_preferred_vendors_pass_malicious_below_threshold(self):
+ # setup
+ self.score_calculator.trusted_vendors_threshold = 3
+ self.score_calculator.trusted_vendors = ['v1', 'v2']
+ # process
+ analysis_results = {'v1': {'category': 'malicious'}, 'v2': {'category': 'malicious'}}
+ assert not self.score_calculator.is_preferred_vendors_pass_malicious(analysis_results)
+
+ def test_is_malicious_by_gti(self):
+ assert self.score_calculator.is_malicious_by_gti({'verdict': {'value': 'VERDICT_MALICIOUS'}}) is True
+ assert self.score_calculator.is_malicious_by_gti({'verdict': {'value': 'VERDICT_SUSPICIOUS'}}) is False
+ assert self.score_calculator.is_malicious_by_gti({}) is False
+ self.score_calculator.gti_malicious = False
+ assert self.score_calculator.is_malicious_by_gti({'verdict': {'value': 'VERDICT_MALICIOUS'}}) is False
+
+ def test_is_suspicious_by_gti(self):
+ assert self.score_calculator.is_suspicious_by_gti({'verdict': {'value': 'VERDICT_MALICIOUS'}}) is False
+ assert self.score_calculator.is_suspicious_by_gti({'verdict': {'value': 'VERDICT_SUSPICIOUS'}}) is True
+ assert self.score_calculator.is_suspicious_by_gti({}) is False
+ self.score_calculator.gti_suspicious = False
+ assert self.score_calculator.is_suspicious_by_gti({'verdict': {'value': 'VERDICT_SUSPICIOUS'}}) is False
+
+
+class TestHelpers:
+ def test_encode_url_to_base64(self):
+ assert encode_url_to_base64('https://example.com') == 'aHR0cHM6Ly9leGFtcGxlLmNvbQ'
+
+ def test_raise_if_hash_not_valid_valid_input(self):
+ raise_if_hash_not_valid('7e641f6b9706d860baf09fe418b6cc87')
+
+ def test_raise_if_hash_not_valid_invalid_input(self):
+ with pytest.raises(ValueError, match='not of type'):
+ raise_if_hash_not_valid('not a valid hash')
+
+ def test_raise_if_ip_not_valid_valid_input(self):
+ raise_if_ip_not_valid('8.8.8.8')
+
+ def test_raise_if_ip_not_valid_invalid_input(self):
+ with pytest.raises(ValueError, match='is not valid'):
+ raise_if_ip_not_valid('not ip at all')
+
+ @pytest.mark.parametrize('epoch_time, output', [
+ (0, '1970-01-01 00:00:00Z'),
+ (999113584, '2001-08-29 19:33:04Z'),
+ ('a string', None)
+ ])
+ def test_epoch_to_timestamp(self, epoch_time: int, output: str):
+ assert epoch_to_timestamp(epoch_time) == output
+
+ def test_get_working_id(self):
+ assert get_working_id('314huoh432ou', '') == '314huoh432ou'
+
+ def test_get_working_id_no_entry(self):
+ with pytest.raises(DemistoException):
+ assert get_working_id('1451', '')
+
+
+def test_create_relationships():
+ """
+ Given:
+ - The IP response from the API.
+
+ When:
+ - create relationships function.
+
+ Then:
+ - Validate that the relationships were created as expected.
+ """
+ expected_name = ['communicates-with', 'communicates-with', 'related-to', 'related-to']
+ with open('./test_data/relationships.json') as f:
+ relationships = create_relationships(entity_a='Test', entity_a_type='IP', relationships_response=json.load(f),
+ reliability='B - Usually reliable')
+ relation_entry = [relation.to_entry() for relation in relationships]
+
+ for relation, expected_relation_name in zip(relation_entry, expected_name):
+ assert relation.get('name') == expected_relation_name
+ assert relation.get('entityA') == 'Test'
+ assert relation.get('entityBType') == 'File'
+
+
+def test_get_whois_unexpected_value():
+ """
+ Given:
+ - Whois string.
+
+ When:
+ - Whois string returned is a reserved Whois string returned by GoogleThreatIntelligence services.
+
+ Then:
+ - Validate empty dict is returned
+ """
+ assert get_whois('g. [Organization] Reserved Domain Name\nl. [Organization Type] Reserved Domain Name') == {}
+
+
+def util_load_json(path):
+ with open(path, encoding='utf-8') as f:
+ return json.loads(f.read())
+
+
+DEFAULT_PARAMS = {
+ 'credentials': {'password': 'somepassword'},
+ 'domain_relationships': '* cname records',
+ 'ip_relationships': '* cname records',
+ 'url_relationships': '* cname records',
+ 'preferredVendors': 'vt1, v2, vt3',
+ 'preferredVendorsThreshold': 2,
+ 'fileThreshold': 1,
+ 'ipThreshold': 1,
+ 'urlThreshold': 1,
+ 'domainThreshold': 1,
+ 'fileSuspiciousThreshold': 0,
+ 'ipSuspiciousThreshold': 0,
+ 'urlSuspiciousThreshold': 0,
+ 'domainSuspiciousThreshold': 0,
+ 'crowdsourced_yara_rules_enabled': True,
+ 'yaraRulesThreshold': 1,
+ 'SigmaIDSThreshold': 1,
+ 'domain_popularity_ranking': 1,
+ 'relationship_threshold': 1,
+ 'relationship_suspicious_threshold': 0,
+ 'feedReliability': 'A - Completely reliable',
+ 'insecure': 'false',
+ 'proxy': 'false',
+ 'gti_malicious': True,
+ 'gti_suspicious': True,
+}
+
+
+def test_file_command(mocker, requests_mock):
+ """
+ Given:
+ - A valid file hash
+
+ When:
+ - Running the !file command
+
+ Then:
+ - Validate the command results are valid and contains metric data
+ """
+ from GoogleThreatIntelligence import file_command, ScoreCalculator, Client
+ import CommonServerPython
+
+ file_hash = '0000000000000000000000000000000000000000000000000000000000000000'
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+
+ params = demisto.params()
+ mocked_score_calculator = ScoreCalculator(params=params)
+ file_relationships = (','.join(argToList(params.get('file_relationships')))).replace('* ', '').replace(' ', '_')
+ client = Client(params=params)
+
+ mock_response = util_load_json('test_data/file.json')
+ requests_mock.get(f'https://www.virustotal.com/api/v3/files/{file_hash}?relationships={file_relationships}',
+ json=mock_response)
+
+ for extended_data in [True, False]:
+ file_hash = '0000000000000000000000000000000000000000000000000000000000000000'
+ mocker.patch.object(demisto, 'args', return_value={'file': file_hash, 'extended_data': extended_data})
+
+ if extended_data:
+ expected_results = util_load_json('test_data/file_extended_results.json')
+ else:
+ expected_results = util_load_json('test_data/file_results.json')
+
+ # Run command and collect result array
+ results = file_command(
+ client=client, score_calculator=mocked_score_calculator,
+ args=demisto.args(), relationships=file_relationships)
+
+ assert results[1].execution_metrics == [{'APICallsCount': 1, 'Type': 'Successful'}]
+ assert results[0].execution_metrics is None
+ assert results[0].outputs == expected_results
+ assert results[0].indicator.dbot_score.score == 3
+
+
+def test_not_found_file_command(mocker, requests_mock):
+ """
+ Given:
+ - A not found file hash
+
+ When:
+ - Running the !file command
+
+ Then:
+ - Display "Not found" message to user
+ """
+ from GoogleThreatIntelligence import file_command, ScoreCalculator, Client
+ import CommonServerPython
+ # Setup Mocks
+ file_hash = '0000000000000000000000000000000000000000000000000000000000000000'
+ mocker.patch.object(demisto, 'args', return_value={'file': file_hash, 'extended_data': 'false'})
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+
+ # Assign arguments
+ params = demisto.params()
+ mocked_score_calculator = ScoreCalculator(params=params)
+ file_relationships = (','.join(argToList(params.get('file_relationships')))).replace('* ', '').replace(' ', '_')
+ client = Client(params=params)
+
+ mock_response = {'error': {'code': 'NotFoundError'}}
+ requests_mock.get(f'https://www.virustotal.com/api/v3/files/{file_hash}?relationships={file_relationships}',
+ json=mock_response)
+
+ results = file_command(
+ client=client, score_calculator=mocked_score_calculator,
+ args=demisto.args(), relationships=file_relationships)
+
+ assert results[0].execution_metrics is None
+ assert results[0].readable_output == f'File "{file_hash}" was not found in GoogleThreatIntelligence.'
+ assert results[0].indicator.dbot_score.score == 0
+
+
+def test_domain_command(mocker, requests_mock):
+ """
+ Given:
+ - A valid Testing domain (testing.com)
+
+ When:
+ - Running the !domain command
+
+ Then:
+ - Validate the command results are valid and contains metric data
+ """
+ from GoogleThreatIntelligence import domain_command, ScoreCalculator, Client
+ import CommonServerPython
+ # Setup Mocks
+ mocker.patch.object(demisto, 'args', return_value={'domain': 'testing.com', 'extended_data': 'false'})
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+
+ # Assign arguments
+ params = demisto.params()
+ mocked_score_calculator = ScoreCalculator(params=params)
+ domain_relationships = (','.join(argToList(params.get('domain_relationships')))).replace('* ', '').replace(' ', '_')
+ client = Client(params=params)
+
+ # Load assertions and mocked request data
+ mock_response = util_load_json('test_data/domain.json')
+ expected_results = util_load_json('test_data/domain_results.json')
+ requests_mock.get(f'https://www.virustotal.com/api/v3/domains/testing.com?relationships={domain_relationships}',
+ json=mock_response)
+
+ # Run command and collect result array
+ results = domain_command(
+ client=client, score_calculator=mocked_score_calculator,
+ args=demisto.args(), relationships=domain_relationships)
+
+ assert results[1].execution_metrics == [{'APICallsCount': 1, 'Type': 'Successful'}]
+ assert results[0].execution_metrics is None
+ assert results[0].outputs == expected_results
+ assert results[0].indicator.dbot_score.score == 3
+
+
+def test_not_found_domain_command(mocker, requests_mock):
+ """
+ Given:
+ - A not found domain (testing.com)
+
+ When:
+ - Running the !domain command
+
+ Then:
+ - Display "Not found" message to user
+ """
+ from GoogleThreatIntelligence import domain_command, ScoreCalculator, Client
+ import CommonServerPython
+ # Setup Mocks
+ mocker.patch.object(demisto, 'args', return_value={'domain': 'testing.com', 'extended_data': 'false'})
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+
+ # Assign arguments
+ params = demisto.params()
+ mocked_score_calculator = ScoreCalculator(params=params)
+ domain_relationships = (','.join(argToList(params.get('domain_relationships')))).replace('* ', '').replace(' ', '_')
+ client = Client(params=params)
+
+ mock_response = {'error': {'code': 'NotFoundError'}}
+ requests_mock.get(f'https://www.virustotal.com/api/v3/domains/testing.com?relationships={domain_relationships}',
+ json=mock_response)
+
+ results = domain_command(
+ client=client, score_calculator=mocked_score_calculator,
+ args=demisto.args(), relationships=domain_relationships)
+
+ assert results[0].execution_metrics is None
+ assert results[0].readable_output == 'Domain "testing.com" was not found in GoogleThreatIntelligence.'
+ assert results[0].indicator.dbot_score.score == 0
+
+
+def test_ip_command(mocker, requests_mock):
+ """
+ Given:
+ - A valid (and private) ip (192.168.0.1)
+
+ When:
+ - Running the !ip command
+
+ Then:
+ - Validate the command results are valid and contains metric data
+ """
+ from GoogleThreatIntelligence import ip_command, ScoreCalculator, Client
+ import CommonServerPython
+ # Setup Mocks
+ mocker.patch.object(demisto, 'args', return_value={'ip': '192.168.0.1', 'extended_data': 'false'})
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+
+ # Assign arguments
+ params = demisto.params()
+ mocked_score_calculator = ScoreCalculator(params=params)
+ ip_relationships = (','.join(argToList(params.get('ip_relationships')))).replace('* ', '').replace(' ', '_')
+ client = Client(params=params)
+
+ # Load assertions and mocked request data
+ mock_response = util_load_json('test_data/ip.json')
+ expected_results = util_load_json('test_data/ip_results.json')
+ requests_mock.get(f'https://www.virustotal.com/api/v3/ip_addresses/192.168.0.1?relationships={ip_relationships}',
+ json=mock_response)
+
+ # Run command and collect result array
+ results = ip_command(
+ client=client, score_calculator=mocked_score_calculator,
+ args=demisto.args(), relationships=ip_relationships,
+ disable_private_ip_lookup=False)
+
+ assert results[1].execution_metrics == [{'APICallsCount': 1, 'Type': 'Successful'}]
+ assert results[0].execution_metrics is None
+ assert results[0].outputs == expected_results
+ assert results[0].indicator.dbot_score.score == 3
+
+
+def test_ip_command_private_ip_lookup(mocker):
+ """
+ Given:
+ - A valid (and private) ip (192.168.0.1) and enabling private ip lookup
+
+ When:
+ - Running the !ip command
+
+ Then:
+ - Display "Reputation lookups disabled" message to user
+ """
+ from GoogleThreatIntelligence import ip_command, ScoreCalculator, Client
+ import CommonServerPython
+ # Setup Mocks
+ mocker.patch.object(demisto, 'args', return_value={'ip': '192.168.0.1', 'extended_data': 'false'})
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+
+ # Assign arguments
+ params = demisto.params()
+ mocked_score_calculator = ScoreCalculator(params=params)
+ ip_relationships = (','.join(argToList(params.get('ip_relationships')))).replace('* ', '').replace(' ', '_')
+ client = Client(params=params)
+
+ # Run command but disabling private IP enrichment
+ results = ip_command(
+ client=client, score_calculator=mocked_score_calculator,
+ args=demisto.args(), relationships=ip_relationships,
+ disable_private_ip_lookup=True)
+
+ assert results[1].execution_metrics == [{'APICallsCount': 1, 'Type': 'Successful'}]
+ assert results[0].execution_metrics is None
+ assert results[0].readable_output == ('IP "192.168.0.1" was not enriched. '
+ 'Reputation lookups have been disabled for private IP addresses.')
+ assert results[0].indicator.dbot_score.score == 0
+
+
+def test_ip_command_override_private_lookup(mocker, requests_mock):
+ """
+ Given:
+ - A valid (and private) ip (192.168.0.1) and enabling private ip lookup
+
+ When:
+ - Running the !ip command
+
+ Then:
+ - Display "Reputation lookups disabled" message to user
+ """
+ from GoogleThreatIntelligence import ip_command, ScoreCalculator, Client
+ import CommonServerPython
+ # Setup Mocks
+ mocker.patch.object(demisto, 'args', return_value={'ip': '192.168.0.1', 'extended_data': 'false',
+ 'override_private_lookup': 'true'})
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+
+ # Assign arguments
+ params = demisto.params()
+ mocked_score_calculator = ScoreCalculator(params=params)
+ ip_relationships = (','.join(argToList(params.get('ip_relationships')))).replace('* ', '').replace(' ', '_')
+ client = Client(params=params)
+
+ # Load assertions and mocked request data
+ mock_response = util_load_json('test_data/ip.json')
+ expected_results = util_load_json('test_data/ip_results.json')
+ requests_mock.get(f'https://www.virustotal.com/api/v3/ip_addresses/192.168.0.1?relationships={ip_relationships}',
+ json=mock_response)
+
+ # Run command but enabling private IP enrichment after disabling it
+ results = ip_command(
+ client=client, score_calculator=mocked_score_calculator,
+ args=demisto.args(), relationships=ip_relationships,
+ disable_private_ip_lookup=True)
+
+ assert results[1].execution_metrics == [{'APICallsCount': 1, 'Type': 'Successful'}]
+ assert results[0].execution_metrics is None
+ assert results[0].outputs == expected_results
+ assert results[0].indicator.dbot_score.score == 3
+
+
+def test_not_found_ip_command(mocker, requests_mock):
+ """
+ Given:
+ - A not found ip (192.168.0.1)
+
+ When:
+ - Running the !ip command
+
+ Then:
+ - Display "Not found" message to user
+ """
+ from GoogleThreatIntelligence import ip_command, ScoreCalculator, Client
+ import CommonServerPython
+ # Setup Mocks
+ mocker.patch.object(demisto, 'args', return_value={'ip': '192.168.0.1', 'extended_data': 'false'})
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+
+ # Assign arguments
+ params = demisto.params()
+ mocked_score_calculator = ScoreCalculator(params=params)
+ ip_relationships = (','.join(argToList(params.get('ip_relationships')))).replace('* ', '').replace(' ', '_')
+ client = Client(params=params)
+
+ mock_response = {'error': {'code': 'NotFoundError'}}
+ requests_mock.get(f'https://www.virustotal.com/api/v3/ip_addresses/192.168.0.1?relationships={ip_relationships}',
+ json=mock_response)
+
+ results = ip_command(
+ client=client, score_calculator=mocked_score_calculator,
+ args=demisto.args(), relationships=ip_relationships,
+ disable_private_ip_lookup=False)
+
+ assert results[0].execution_metrics is None
+ assert results[0].readable_output == 'IP "192.168.0.1" was not found in GoogleThreatIntelligence.'
+ assert results[0].indicator.dbot_score.score == 0
+
+
+def test_url_command(mocker, requests_mock):
+ """
+ Given:
+ - A valid testing url (https://vt_is_awesome.com/uts)
+
+ When:
+ - Running the !url command
+
+ Then:
+ - Validate the command results are valid and contains metric data
+ """
+ from GoogleThreatIntelligence import url_command, ScoreCalculator, Client
+ import CommonServerPython
+ # Setup Mocks
+ mocker.patch.object(demisto, 'args', return_value={'url': 'https://vt_is_awesome.com/uts', 'extended_data': 'false'})
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+
+ # Assign arguments
+ testing_url = 'https://vt_is_awesome.com/uts'
+ params = demisto.params()
+ mocked_score_calculator = ScoreCalculator(params=params)
+ url_relationships = (','.join(argToList(params.get('url_relationships')))).replace('* ', '').replace(' ', '_')
+ client = Client(params=params)
+
+ # Load assertions and mocked request data
+ mock_response = util_load_json('test_data/url.json')
+ expected_results = util_load_json('test_data/url_results.json')
+ requests_mock.get(f'https://www.virustotal.com/api/v3/urls/{encode_url_to_base64(testing_url)}'
+ f'?relationships={url_relationships}', json=mock_response)
+
+ # Run command and collect result array
+ results = url_command(
+ client=client, score_calculator=mocked_score_calculator,
+ args=demisto.args(), relationships=url_relationships)
+
+ assert results[1].execution_metrics == [{'APICallsCount': 1, 'Type': 'Successful'}]
+ assert results[0].execution_metrics is None
+ assert results[0].outputs == expected_results
+ assert results[0].indicator.dbot_score.score == 3
+
+
+def test_not_found_url_command(mocker, requests_mock):
+ """
+ Given:
+ - A not found url (https://vt_is_awesome.com/uts)
+
+ When:
+ - Running the !url command
+
+ Then:
+ - Display "Not found" message to user
+ """
+ from GoogleThreatIntelligence import url_command, ScoreCalculator, Client
+ import CommonServerPython
+ # Setup Mocks
+ mocker.patch.object(demisto, 'args', return_value={'url': 'https://vt_is_awesome.com/uts', 'extended_data': 'false'})
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+
+ # Assign arguments
+ testing_url = 'https://vt_is_awesome.com/uts'
+ params = demisto.params()
+ mocked_score_calculator = ScoreCalculator(params=params)
+ url_relationships = (','.join(argToList(params.get('url_relationships')))).replace('* ', '').replace(' ', '_')
+ client = Client(params=params)
+
+ mock_response = {'error': {'code': 'NotFoundError'}}
+ requests_mock.get(f'https://www.virustotal.com/api/v3/urls/{encode_url_to_base64(testing_url)}'
+ f'?relationships={url_relationships}', json=mock_response)
+
+ results = url_command(
+ client=client, score_calculator=mocked_score_calculator,
+ args=demisto.args(), relationships=url_relationships)
+
+ assert results[0].execution_metrics is None
+ assert results[0].readable_output == f'URL "{testing_url}" was not found in GoogleThreatIntelligence.'
+ assert results[0].indicator.dbot_score.score == 0
+
+
+def test_private_file_command(mocker, requests_mock):
+ """
+ Given:
+ - A valid Testing private file
+
+ When:
+ - Running the !vt-privatescanning-file command
+
+ Then:
+ - Validate the command results are valid and contains metric data
+ """
+ from GoogleThreatIntelligence import private_file_command, Client
+ import CommonServerPython
+ # Setup Mocks
+ sha256 = 'Example_sha256_with_64_characters_000000000000000000000000000000'
+ mocker.patch.object(demisto, 'args', return_value={'file': sha256})
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+
+ # Assign arguments
+ params = demisto.params()
+ client = Client(params=params)
+
+ # Load assertions and mocked request data
+ mock_response = util_load_json('test_data/private_file.json')
+ expected_results = util_load_json('test_data/private_file_results.json')
+ requests_mock.get(f'https://www.virustotal.com/api/v3/private/files/{sha256}',
+ json=mock_response)
+
+ # Run command and collect result array
+ results = private_file_command(client=client, args=demisto.args())
+
+ assert results[1].execution_metrics == [{'APICallsCount': 1, 'Type': 'Successful'}]
+ assert results[0].execution_metrics is None
+ assert results[0].outputs == expected_results
+
+
+def test_not_found_private_file_command(mocker, requests_mock):
+ """
+ Given:
+ - A valid Testing private file
+
+ When:
+ - Running the !vt-privatescanning-file command
+
+ Then:
+ - Display "Not found" message to user
+ """
+ from GoogleThreatIntelligence import private_file_command, Client
+ import CommonServerPython
+ # Setup Mocks
+ sha256 = 'Example_sha256_with_64_characters_000000000000000000000000000000'
+ mocker.patch.object(demisto, 'args', return_value={'file': sha256})
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+
+ # Assign arguments
+ params = demisto.params()
+ client = Client(params=params)
+
+ mock_response = {'error': {'code': 'NotFoundError'}}
+ requests_mock.get(f'https://www.virustotal.com/api/v3/private/files/{sha256}',
+ json=mock_response)
+
+ results = private_file_command(client=client, args=demisto.args())
+
+ assert results[0].execution_metrics is None
+ assert results[0].readable_output == f'File "{sha256}" was not found in GoogleThreatIntelligence.'
+ assert results[0].indicator.dbot_score.score == 0
+
+
+def test_not_found_file_sandbox_report_command(mocker, requests_mock):
+ """
+ Given:
+ - A valid Testing hash
+
+ When:
+ - Running the !vt-file-sandbox-report command
+
+ Then:
+ - Display "Not found" message to user
+ """
+ from GoogleThreatIntelligence import file_sandbox_report_command, Client
+ import CommonServerPython
+ # Setup Mocks
+ sha256 = 'Example_sha256_with_64_characters_000000000000000000000000000000'
+ mocker.patch.object(demisto, 'args', return_value={'file': sha256, 'limit': '10'})
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+
+ # Assign arguments
+ params = demisto.params()
+ client = Client(params=params)
+
+ mock_response = {'error': {'code': 'NotFoundError'}}
+ requests_mock.get(f'https://www.virustotal.com/api/v3/files/{sha256}/behaviours',
+ json=mock_response)
+
+ results = file_sandbox_report_command(client=client, args=demisto.args())
+
+ assert results[0].execution_metrics is None
+ assert results[0].readable_output == f'File "{sha256}" was not found in GoogleThreatIntelligence.'
+
+
+def test_gti_assessment_command(mocker, requests_mock):
+ """
+ Given:
+ - A valid or not found IoC
+
+ When:
+ - Running the !gti-assessment-get command
+
+ Then:
+ - Validate the command results are valid
+ """
+ from GoogleThreatIntelligence import get_assessment_command, encode_url_to_base64, ScoreCalculator, Client
+ import CommonServerPython
+
+ for resource, resource_type, endpoint in [
+ ('0000000000000000000000000000000000000000000000000000000000000000', 'file', 'files'),
+ ('8.8.8.8', 'ip', 'ip_addresses'),
+ ('www.example.com', 'domain', 'domains'),
+ ('https://www.example.com', 'url', 'urls'),
+ ]:
+ error_resource_type = resource_type.upper() if resource_type in ['url', 'ip'] else resource_type.capitalize()
+ for mock_response, expected_results in [
+ (
+ util_load_json(f'test_data/{resource_type}.json'),
+ util_load_json(f'test_data/{resource_type}_assessment_results.json')
+ ),
+ (
+ {'error': {'code': 'NotFoundError'}},
+ f'{error_resource_type} "{resource}" was not found in GoogleThreatIntelligence.'
+ )
+ ]:
+ mocker.patch.object(demisto, 'args', return_value={'resource': resource, 'resource_type': resource_type})
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+
+ # Assign arguments
+ params = demisto.params()
+ mocked_score_calculator = ScoreCalculator(params=params)
+ client = Client(params=params)
+
+ # Load assertions and mocked request data
+ endpoint_resource = encode_url_to_base64(resource) if resource_type == 'url' else resource
+ requests_mock.get(f'https://www.virustotal.com/api/v3/{endpoint}/{endpoint_resource}?relationships=',
+ json=mock_response)
+
+ # Run command and collect result array
+ results = get_assessment_command(client=client, score_calculator=mocked_score_calculator, args=demisto.args())
+
+ assert results.execution_metrics is None
+ if 'error' in mock_response:
+ assert results.readable_output == expected_results
+ assert results.indicator.dbot_score.score == 0
+ else:
+ assert results.outputs == expected_results
+ assert results.indicator.dbot_score.score == 3
+
+
+def test_gti_comments_command(mocker, requests_mock):
+ """
+ Given:
+ - A valid IoC
+
+ When:
+ - Running the !gti-comments-get command
+
+ Then:
+ - Validate the command results are valid
+ """
+ from GoogleThreatIntelligence import get_comments_command, encode_url_to_base64, Client
+ import CommonServerPython
+
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+ params = demisto.params()
+ client = Client(params=params)
+
+ mock_response = {
+ 'data': [{
+ 'attributes': {
+ 'date': 0,
+ 'text': 'Hello',
+ 'votes': {
+ 'positive': 10,
+ 'negative': 5,
+ 'abuse': 1,
+ }
+ }
+ }]
+ }
+
+ for resource, resource_type, endpoint in [
+ ('0000000000000000000000000000000000000000000000000000000000000000', 'file', 'files'),
+ ('8.8.8.8', 'ip', 'ip_addresses'),
+ ('www.example.com', 'domain', 'domains'),
+ ('https://www.example.com', 'url', 'urls'),
+ ]:
+ mocker.patch.object(demisto, 'args', return_value={
+ 'resource': resource,
+ 'resource_type': resource_type,
+ 'limit': 10,
+ })
+
+ endpoint_resource = encode_url_to_base64(resource) if resource_type == 'url' else resource
+ requests_mock.get(f'https://www.virustotal.com/api/v3/{endpoint}/{endpoint_resource}/comments',
+ json=mock_response)
+
+ results = get_comments_command(client=client, args=demisto.args())
+
+ assert results.execution_metrics is None
+ assert results.outputs == {'indicator': resource, 'comments': mock_response['data']}
+
+
+def test_gti_add_comments_command(mocker, requests_mock):
+ """
+ Given:
+ - A valid IoC and comment
+
+ When:
+ - Running the !gti-comments-add command
+
+ Then:
+ - Validate the command results are valid
+ """
+ from GoogleThreatIntelligence import add_comments_command, encode_url_to_base64, Client
+ import CommonServerPython
+
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+ params = demisto.params()
+ client = Client(params=params)
+
+ mock_response = {
+ 'data': {
+ 'attributes': {
+ 'date': 0,
+ 'text': 'Hello',
+ 'votes': {
+ 'positive': 10,
+ 'negative': 5,
+ 'abuse': 1,
+ }
+ }
+ }
+ }
+
+ for resource, resource_type, endpoint in [
+ ('0000000000000000000000000000000000000000000000000000000000000000', 'file', 'files'),
+ ('8.8.8.8', 'ip', 'ip_addresses'),
+ ('www.example.com', 'domain', 'domains'),
+ ('https://www.example.com', 'url', 'urls'),
+ ]:
+ mocker.patch.object(demisto, 'args', return_value={
+ 'resource': resource,
+ 'resource_type': resource_type,
+ 'comment': 'Hello',
+ })
+
+ endpoint_resource = encode_url_to_base64(resource) if resource_type == 'url' else resource
+ requests_mock.post(f'https://www.virustotal.com/api/v3/{endpoint}/{endpoint_resource}/comments',
+ json=mock_response)
+
+ results = add_comments_command(client=client, args=demisto.args())
+
+ assert results.execution_metrics is None
+ assert results.outputs == mock_response['data']
+
+
+def test_gti_comments_by_id_command(mocker, requests_mock):
+ """
+ Given:
+ - A valid IoC
+
+ When:
+ - Running the !gti-comments-get-by-id command
+
+ Then:
+ - Validate the command results are valid
+ """
+ from GoogleThreatIntelligence import get_comments_by_id_command, Client
+ import CommonServerPython
+
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+ params = demisto.params()
+ client = Client(params=params)
+
+ mock_response = {
+ 'data': {
+ 'attributes': {
+ 'date': 0,
+ 'text': 'Hello',
+ 'votes': {
+ 'positive': 10,
+ 'negative': 5,
+ 'abuse': 1,
+ }
+ }
+ }
+ }
+
+ mocker.patch.object(demisto, 'args', return_value={'id': 'random_id'})
+ requests_mock.get('https://www.virustotal.com/api/v3/comments/random_id',
+ json=mock_response)
+
+ results = get_comments_by_id_command(client=client, args=demisto.args())
+
+ assert results.execution_metrics is None
+ assert results.outputs == mock_response['data']
+
+
+def test_gti_passive_dns(mocker, requests_mock):
+ """
+ Given:
+ - A valid IP address (8.8.8.8)
+
+ When:
+ - Running the !gti-passive-dns-data command
+
+ Then:
+ - Validate the command results are valid
+ """
+ from GoogleThreatIntelligence import passive_dns_data, Client
+ import CommonServerPython
+
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+ params = demisto.params()
+ client = Client(params=params)
+
+ mock_response = util_load_json('test_data/passive_dns_ip.json')
+ expected_response = util_load_json('test_data/passive_dns_ip_results.json')
+
+ mocker.patch.object(demisto, 'args', return_value={'id': '8.8.8.8', 'limit': 10})
+ requests_mock.get('https://www.virustotal.com/api/v3/ip_addresses/8.8.8.8/resolutions?limit=10',
+ json=mock_response)
+
+ results = passive_dns_data(client=client, args=demisto.args())
+
+ assert results.execution_metrics is None
+ assert results.outputs == expected_response
+
+
+def test_gti_analysis_get(mocker, requests_mock):
+ """
+ Given:
+ - A valid analysis ID
+
+ When:
+ - Running the !gti-analysis-get command
+
+ Then:
+ - Validate the command results are valid
+ """
+ from GoogleThreatIntelligence import get_analysis_command, Client
+ import CommonServerPython
+
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+ params = demisto.params()
+ client = Client(params=params)
+
+ mock_response = {
+ 'data': {
+ 'attributes': {
+ 'stats': {
+ 'threat_severity_level': '',
+ 'popular_threat_category': '',
+ 'threat_verdict': '',
+ },
+ 'status': 'completed',
+ }
+ }
+ }
+
+ mocker.patch.object(demisto, 'args', return_value={'id': 'random_id'})
+ requests_mock.get('https://www.virustotal.com/api/v3/analyses/random_id',
+ json=mock_response)
+
+ results = get_analysis_command(client=client, args=demisto.args())
+
+ assert results.execution_metrics is None
+ assert results.outputs == {'id': 'random_id', **mock_response}
+
+
+def test_gti_private_analysis_get(mocker, requests_mock):
+ """
+ Given:
+ - A valid analysis ID
+
+ When:
+ - Running the !gti-privatescanning-analysis-get command
+
+ Then:
+ - Validate the command results are valid
+ """
+ from GoogleThreatIntelligence import private_get_analysis_command, Client
+ import CommonServerPython
+
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+ params = demisto.params()
+ client = Client(params=params)
+
+ mock_response = {
+ 'data': {
+ 'attributes': {
+ 'stats': {
+ 'threat_severity_level': '',
+ 'popular_threat_category': '',
+ 'threat_verdict': '',
+ },
+ 'status': 'pending',
+ }
+ }
+ }
+ expected_response = mock_response.copy()
+ expected_response['id'] = 'random_id'
+ expected_response['data']['attributes'].update({
+ 'threat_severity_level': '',
+ 'popular_threat_category': '',
+ 'threat_verdict': '',
+ })
+
+ mocker.patch.object(demisto, 'args', return_value={'id': 'random_id'})
+ requests_mock.get('https://www.virustotal.com/api/v3/private/analyses/random_id',
+ json=mock_response)
+
+ results = private_get_analysis_command(client=client, args=demisto.args())
+
+ assert results.execution_metrics is None
+ assert results.outputs == expected_response
+
+
+def test_url_scan_command(mocker, requests_mock):
+ """
+ Given:
+ - A valid URL
+
+ When:
+ - Running the !url-scan command
+
+ Then:
+ - Validate the command results are valid
+ """
+ from GoogleThreatIntelligence import scan_url_command, Client
+ import CommonServerPython
+
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+ params = demisto.params()
+ client = Client(params=params)
+
+ url = 'https://www.example.com'
+ mock_response = {
+ 'data': {
+ 'id': 'random_id',
+ 'url': url,
+ }
+ }
+
+ mocker.patch.object(demisto, 'args', return_value={'url': url})
+ requests_mock.post('https://www.virustotal.com/api/v3/urls',
+ json=mock_response)
+
+ results = scan_url_command(client=client, args=demisto.args())
+
+ assert results.execution_metrics is None
+ assert results.outputs == {
+ 'GoogleThreatIntelligence.Submission(val.id && val.id === obj.id)': mock_response['data'],
+ 'vtScanID': 'random_id',
+ }
+
+
+def test_file_sigma_analysis_command(mocker, requests_mock):
+ """
+ Given:
+ - A valid file hash
+
+ When:
+ - Running the !gti-file-sigma-analysis command
+
+ Then:
+ - Validate the command results are valid and contains metric data
+ """
+ from GoogleThreatIntelligence import file_sigma_analysis_command, Client
+ import CommonServerPython
+
+ file_hash = '0000000000000000000000000000000000000000000000000000000000000000'
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+
+ params = demisto.params()
+ client = Client(params=params)
+
+ mock_response = util_load_json('test_data/file.json')
+ expected_results = util_load_json('test_data/file_extended_results.json')
+ requests_mock.get(f'https://www.virustotal.com/api/v3/files/{file_hash}?relationships=',
+ json=mock_response)
+
+ for only_stats in [True, False]:
+ mocker.patch.object(demisto, 'args', return_value={'file': file_hash, 'only_stats': only_stats})
+
+ results = file_sigma_analysis_command(client=client, args=demisto.args())
+
+ assert results.execution_metrics is None
+ assert results.outputs == expected_results
+
+
+def test_search_command(mocker, requests_mock):
+ """
+ Given:
+ - A valid query
+
+ When:
+ - Running the !gti-search command
+
+ Then:
+ - Validate the command results are valid
+ """
+ from GoogleThreatIntelligence import search_command, Client
+ import CommonServerPython
+
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+ params = demisto.params()
+ client = Client(params=params)
+
+ mock_response = {
+ 'data': [{
+ 'id': 'random_id',
+ 'attributes': {},
+ }]
+ }
+
+ mocker.patch.object(demisto, 'args', return_value={'query': 'random', 'limit': 2})
+ requests_mock.get('https://www.virustotal.com/api/v3/search?query=random&limit=2',
+ json=mock_response)
+
+ results = search_command(client=client, args=demisto.args())
+
+ assert results.execution_metrics is None
+ assert results.outputs == mock_response['data']
+
+
+def test_get_upload_url(mocker, requests_mock):
+ """
+ Given:
+ - A valid query
+
+ When:
+ - Running the !gti-file-scan-upload-url command
+
+ Then:
+ - Validate the command results are valid
+ """
+ from GoogleThreatIntelligence import get_upload_url, Client
+ import CommonServerPython
+
+ mocker.patch.object(demisto, 'params', return_value=DEFAULT_PARAMS)
+ mocker.patch.object(CommonServerPython, 'is_demisto_version_ge', return_value=True)
+ params = demisto.params()
+ client = Client(params=params)
+
+ mock_response = {
+ 'data': 'https://www.upload_url.com',
+ }
+
+ requests_mock.get('https://www.virustotal.com/api/v3/files/upload_url',
+ json=mock_response)
+
+ results = get_upload_url(client=client)
+
+ assert results.execution_metrics is None
+ assert results.outputs == {
+ 'GoogleThreatIntelligence.FileUploadURL': 'https://www.upload_url.com',
+ 'vtUploadURL': 'https://www.upload_url.com',
+ }
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/README.md b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/README.md
new file mode 100644
index 000000000000..56c995b93e6a
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/README.md
@@ -0,0 +1,2543 @@
+# Google Threat Intelligence
+
+This integration analyzes suspicious hashes, URLs, domains, and IP addresses.
+
+## Configure Google Threat Intelligence on Cortex XSOAR
+
+1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+1. Search for Google Threat Intelligence.
+1. Click **Add instance** to create and configure a new integration instance.
+
+ | **Parameter** | **Description** | **Required** |
+ | --- | --- | --- |
+ | API Key | See [Acquiring your API key](#acquiring-your-api-key) | True |
+ | Use system proxy settings | | False |
+ | Trust any certificate (not secure) | | False |
+ | Source Reliability | Reliability of the source providing the intelligence data | |
+ | GTI Malicious Verdict. Check Google Threat Intelligence verdict to consider the file malicious. | | False |
+ | GTI Suspicious Verdict. Check Google Threat Intelligence verdict to consider the file suspicious. | | False |
+ | File Malicious Threshold. Minimum number of positive results from GoogleThreatIntelligence scanners to consider the file malicious. | See [Indicator Thresholds](#indicator-thresholds). | False |
+ | File Suspicious Threshold. Minimum number of positive and suspicious results from GoogleThreatIntelligence scanners to consider the file suspicious. | See [Indicator Thresholds](#indicator-thresholds). | False |
+ | IP Malicious Threshold. Minimum number of positive results from GoogleThreatIntelligence scanners to consider the IP malicious. | See [Indicator Thresholds](#indicator-thresholds). | False |
+ | IP Suspicious Threshold. Minimum number of positive and suspicious results from GoogleThreatIntelligence scanners to consider the IP suspicious. | See [Indicator Thresholds](#indicator-thresholds). | False |
+ | Disable reputation lookups for private IP addresses | To reduce the number of lookups made to the GoogleThreatIntelligence API, this option can be selected to gracefully skip enrichment of any IP addresses allocated for private networks. | False |
+ | URL Malicious Threshold. Minimum number of positive results from GoogleThreatIntelligence scanners to consider the URL malicious. | See [Indicator Thresholds](#indicator-thresholds). | False |
+ | URL Suspicious Threshold. Minimum number of positive and suspicious results from GoogleThreatIntelligence scanners to consider the URL suspicious. | See [Indicator Thresholds](#indicator-thresholds). | False |
+ | Domain Malicious Threshold. Minimum number of positive results from GoogleThreatIntelligence scanners to consider the domain malicious. | See [Indicator Thresholds](#indicator-thresholds). | False |
+ | Domain Suspicious Threshold. Minimum number of positive and suspicious results from GoogleThreatIntelligence scanners to consider the domain suspicious. | See [Indicator Thresholds](#indicator-thresholds). | False |
+ | Preferred Vendors List. CSV list of vendors who are considered more trustworthy. | See [Indicator Thresholds](#indicator-thresholds). | False |
+ | Preferred Vendor Threshold. The minimum number of highly trusted vendors required to consider a domain, IP address, URL, or file as malicious. | See [Indicator Thresholds](#indicator-thresholds). | False |
+ | Enable score analyzing by Crowdsourced Yara Rules, Sigma, and IDS | See [Rules Threshold](#rules-threshold). | False |
+ | Crowdsourced Yara Rules Threshold | See [Rules Threshold](#rules-threshold). | False |
+ | Sigma and Intrusion Detection Rules Threshold | See [Rules Threshold](#rules-threshold). | False |
+ | Domain Popularity Ranking Threshold | See [Rules Threshold](#rules-threshold). | False |
+
+1. Click **Test** to validate the URLs, token, and connection.
+
+### Acquiring your API key
+
+Your API key can be found in your GoogleThreatIntelligence account user menu, clicking on your avatar:
+![How to get api key in GoogleThreatIntelligence](https://files.readme.io/ddeb298-Screen_Shot_2019-10-17_at_3.17.04_PM.png)
+Your API key carries all your privileges, so keep it secure and don't share it with anyone.
+
+## DBot Score / Reputation scores
+
+The following information describes DBot Score which is new for this version.
+
+### Indicator Thresholds
+
+Configure the default threshold for each indicator type in the instance settings.
+You can also specify the threshold as an argument when running relevant commands.
+
+- Indicators with positive results from preferred vendors equal to or higher than the threshold will be considered malicious.
+- Indicators with positive results equal to or higher than the malicious threshold will be considered malicious.
+- Indicators with positive results equal to or higher than the suspicious threshold value will be considered suspicious.
+- Domain popularity ranks: GoogleThreatIntelligence is returning a popularity ranks for each vendor. The integration will calculate its average and will compare it to the threshold.
+
+### Rules Threshold
+
+If the YARA rules analysis threshold is enabled:
+
+- Indicators with positive results, the number of found YARA rules results, Sigma analysis, or IDS equal to or higher than the threshold, will be considered suspicious.
+- If both the the basic analysis and the rules analysis is suspicious, the indicator will be considered as malicious.
+If the indicator was found to be suspicious only by the rules thresholds, the indicator will be considered suspicious.
+- Domain popularity ranks: GoogleThreatIntelligence is returning a popularity ranks for each vendor. The integration will calculate its average and will compare it to the threshold.
+
+The DbotScore calculation process can be seen on the "description" field in any malicious/suspicious DBot score.
+You can aquire those calculation on all of the indicators also from the debug log.
+
+Example of a GoogleThreatIntelligence DBot score log:
+
+```log
+Basic analyzing of ""
+Found popularity ranks. Analyzing.
+The average of the ranks is 809009.0 and the threshold is 10000
+Indicator is good by popularity ranks.
+Analyzing by get_domain_communicating_files
+Found safe by relationship files. total_malicious=0 >= 3
+Analyzing by get_url_downloaded_files
+Found safe by relationship files. total_malicious=0 >= 3
+Analyzing by get_url_referrer_files
+Found safe by relationship files. total_malicious=0 >= 3
+```
+
+### Reputation commands (ip, url, domain, and file)
+
+- Removed output paths: Due to changes in GoogleThreatIntelligence, the following output paths are no longer supported:
+ - *IP.GoogleThreatIntelligence*
+ - *Domain.GoogleThreatIntelligence*
+ - *URL.GoogleThreatIntelligence*
+ - *File.GoogleThreatIntelligence*
+
+ Instead, you can use the following output paths that return concrete indicator reputations.
+ - *GoogleThreatIntelligence.IP*
+ - *GoogleThreatIntelligence.Domain*
+ - *GoogleThreatIntelligence.File*
+ - *GoogleThreatIntelligence.URL*
+
+- The following commands will no longer analyze the file/url sent to it, but will get the information stored in GoogleThreatIntelligence.
+ - *GoogleThreatIntelligence.Domain*
+ - *GoogleThreatIntelligence.IP*
+
+ To analyze (detonate) the indicator, you can use the following playbooks:
+ - **Detonate File - GoogleThreatIntelligence**
+ - **Detonate URL - GoogleThreatIntelligence**
+- Each reputation command will use at least 1 API call. For advanced reputation commands, use the *Premium API* flag.
+- For each reputation command there is the new *extended_data* argument . When set to "true", the results returned by the commands will contain
+ additional information as *last_analysis_results* which contains the service name and its specific analysis.
+- Reputation commands can return relationships of the indicator.
+ The relationships that are supported are defined as part of the instance configuration.
+ For more information regarding URL relationships, see:
+ For more information regarding IP relationships, see:
+ For more information regarding Domain relationships, see:
+ For more information regarding File relationships, see:
+
+- Starting with XSOAR version 6.9.0, You may monitor API usage via the *GoogleThreatIntelligence Execution Metrics* dashboard.
+### Comments
+
+In GoogleThreatIntelligence you can now add comments to all indicator types (IP, Domain, File and URL) so each command now has the *resource_type* argument.
+If supplied, the command will use the resource type to add a comment. If not, the command will determine if the given input is a hash or a URL.
+This arguments is available in the following commands:
+
+- ***gti-comments-get***
+- ***gti-comments-add***
+
+### gti-comments-get
+
+- Added the *resource_type* argument. If not supplied, will try to determine if the *resource* argument is a hash or a URL.
+- Added the *limit* argument. Gets the latest comments within the given limit.
+- New output path: *GoogleThreatIntelligence.Comments*.
+
+### Detonation (scan) Commands
+
+Removed the *gtiLink* output from all commands as it does no longer return from the API.
+To easily use the scan commands we suggest using the following playbooks:
+
+- **Detonate File - GoogleThreatIntelligence**
+- **Detonate URL - GoogleThreatIntelligence**
+
+Use the ***gti-analysis-get*** command to get the report from the scans.
+
+### file
+
+***
+Checks the file reputation of the specified hash.
+
+#### Base Command
+
+`file`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| file | Hash of the file to query. Supports MD5, SHA1, and SHA256. | Required |
+| extended_data | Whether to return extended data (last_analysis_results). Possible values are: true, false. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| File.MD5 | String | Bad MD5 hash. |
+| File.SHA1 | String | Bad SHA1 hash. |
+| File.SHA256 | String | Bad SHA256 hash. |
+| File.Relationships.EntityA | String | The source of the relationship. |
+| File.Relationships.EntityB | String | The destination of the relationship. |
+| File.Relationships.Relationship | String | The name of the relationship. |
+| File.Relationships.EntityAType | String | The type of the source of the relationship. |
+| File.Relationships.EntityBType | String | The type of the destination of the relationship. |
+| File.Malicious.Vendor | String | For malicious files, the vendor that made the decision. |
+| File.Malicious.Detections | Number | For malicious files, the total number of detections. |
+| File.Malicious.TotalEngines | Number | For malicious files, the total number of engines that checked the file hash. |
+| DBotScore.Indicator | String | The indicator that was tested. |
+| DBotScore.Type | String | The indicator type. |
+| DBotScore.Vendor | unknown | The vendor used to calculate the score. |
+| DBotScore.Score | Number | The actual score. |
+| DBotScore.Reliability | String | Reliability of the source providing the intelligence data. |
+| GoogleThreatIntelligence.File.attributes.type_description | String | Description of the type of the file. |
+| GoogleThreatIntelligence.File.attributes.tlsh | String | The locality-sensitive hashing. |
+| GoogleThreatIntelligence.File.attributes.exiftool.MIMEType | String | MIME type of the file. |
+| GoogleThreatIntelligence.File.attributes.names | String | Names of the file. |
+| GoogleThreatIntelligence.File.attributes.javascript_info.tags | String | Tags of the JavaScript. |
+| GoogleThreatIntelligence.File.attributes.exiftool.FileType | String | The file type. |
+| GoogleThreatIntelligence.File.attributes.exiftool.WordCount | String | Total number of words in the file. |
+| GoogleThreatIntelligence.File.attributes.exiftool.LineCount | String | Total number of lines in file. |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_ids_stats.info | Number | Number of IDS that marked the file as "info". |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_ids_stats.high | Number | Number of IDS that marked the file as "high". |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_ids_stats.medium | Number | Number of IDS that marked the file as "medium". |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_ids_stats.low | Number | Number of IDS that marked the file as "low". |
+| GoogleThreatIntelligence.File.attributes.sigma_analysis_stats.critical | Number | Number of Sigma analysis that marked the file as "critical". |
+| GoogleThreatIntelligence.File.attributes.sigma_analysis_stats.high | Number | Number of Sigma analysis that marked the file as "high". |
+| GoogleThreatIntelligence.File.attributes.sigma_analysis_stats.medium | Number | Number of Sigma analysis that marked the file as "medium". |
+| GoogleThreatIntelligence.File.attributes.sigma_analysis_stats.low | Number | Number of Sigma analysis that marked the file as "low". |
+| GoogleThreatIntelligence.File.attributes.exiftool.MIMEEncoding | String | The MIME encoding. |
+| GoogleThreatIntelligence.File.attributes.exiftool.FileTypeExtension | String | The file type extension. |
+| GoogleThreatIntelligence.File.attributes.exiftool.Newlines | String | Number of newlines signs. |
+| GoogleThreatIntelligence.File.attributes.trid.file_type | String | The TrID file type. |
+| GoogleThreatIntelligence.File.attributes.trid.probability | Number | The TrID probability. |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.description | String | Description of the YARA rule. |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.source | String | Source of the YARA rule. |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.author | String | Author of the YARA rule. |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.ruleset_name | String | Rule set name of the YARA rule. |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.rule_name | String | Name of the YARA rule. |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.ruleset_id | String | ID of the YARA rule. |
+| GoogleThreatIntelligence.File.attributes.names | String | Name of the file. |
+| GoogleThreatIntelligence.File.attributes.last_modification_date | Number | The last modification date in epoch format. |
+| GoogleThreatIntelligence.File.attributes.type_tag | String | Tag of the type. |
+| GoogleThreatIntelligence.File.attributes.total_votes.harmless | Number | Total number of harmless votes. |
+| GoogleThreatIntelligence.File.attributes.total_votes.malicious | Number | Total number of malicious votes. |
+| GoogleThreatIntelligence.File.attributes.size | Number | Size of the file. |
+| GoogleThreatIntelligence.File.attributes.popular_threat_classification.suggested_threat_label | String | Suggested thread label. |
+| GoogleThreatIntelligence.File.attributes.popular_threat_classification.popular_threat_name | Number | The popular thread name. |
+| GoogleThreatIntelligence.File.attributes.times_submitted | Number | Number of times the file was submitted. |
+| GoogleThreatIntelligence.File.attributes.last_submission_date | Number | Last submission date in epoch format. |
+| GoogleThreatIntelligence.File.attributes.downloadable | Boolean | Whether the file is downloadable. |
+| GoogleThreatIntelligence.File.attributes.sha256 | String | SHA-256 hash of the file. |
+| GoogleThreatIntelligence.File.attributes.type_extension | String | Extension of the type. |
+| GoogleThreatIntelligence.File.attributes.tags | String | File tags. |
+| GoogleThreatIntelligence.File.attributes.last_analysis_date | Number | Last analysis date in epoch format. |
+| GoogleThreatIntelligence.File.attributes.unique_sources | Number | Unique sources. |
+| GoogleThreatIntelligence.File.attributes.first_submission_date | Number | First submission date in epoch format. |
+| GoogleThreatIntelligence.File.attributes.ssdeep | String | SSDeep hash of the file. |
+| GoogleThreatIntelligence.File.attributes.md5 | String | MD5 hash of the file. |
+| GoogleThreatIntelligence.File.attributes.sha1 | String | SHA-1 hash of the file. |
+| GoogleThreatIntelligence.File.attributes.magic | String | Identification of file by the magic number. |
+| GoogleThreatIntelligence.File.attributes.last_analysis_stats.harmless | Number | The number of engines that found the indicator to be harmless. |
+| GoogleThreatIntelligence.File.attributes.last_analysis_stats.type-unsupported | Number | The number of engines that found the indicator to be of type unsupported. |
+| GoogleThreatIntelligence.File.attributes.last_analysis_stats.suspicious | Number | The number of engines that found the indicator to be suspicious. |
+| GoogleThreatIntelligence.File.attributes.last_analysis_stats.confirmed-timeout | Number | The number of engines that confirmed the timeout of the indicator. |
+| GoogleThreatIntelligence.File.attributes.last_analysis_stats.timeout | Number | The number of engines that timed out for the indicator. |
+| GoogleThreatIntelligence.File.attributes.last_analysis_stats.failure | Number | The number of failed analysis engines. |
+| GoogleThreatIntelligence.File.attributes.last_analysis_stats.malicious | Number | The number of engines that found the indicator to be malicious. |
+| GoogleThreatIntelligence.File.attributes.last_analysis_stats.undetected | Number | The number of engines that could not detect the indicator. |
+| GoogleThreatIntelligence.File.attributes.meaningful_name | String | Meaningful name of the file. |
+| GoogleThreatIntelligence.File.attributes.reputation | Number | The reputation of the file. |
+| GoogleThreatIntelligence.File.type | String | Type of the indicator \(file\). |
+| GoogleThreatIntelligence.File.id | String | Type ID of the indicator. |
+| GoogleThreatIntelligence.File.links.self | String | Link to the response. |
+| GoogleThreatIntelligence.File.attributes.gti_assessment.verdict.value | String | GTI verdict of the file. |
+| GoogleThreatIntelligence.File.attributes.gti_assessment.severity.value | String | GTI severity of the file. |
+| GoogleThreatIntelligence.File.attributes.gti_assessment.threat_score.value | Number | GTI threat score of the file. |
+
+#### Command Example
+
+```!file file=0000000000000000000000000000000000000000000000000000000000000000```
+
+#### Context Example
+
+```json
+{
+ "DBotScore": {
+ "Indicator": "0000000000000000000000000000000000000000000000000000000000000000",
+ "Reliability": "A - Completely reliable",
+ "Score": 2,
+ "Type": "file",
+ "Vendor": "GoogleThreatIntelligence"
+ },
+ "File": {
+ "Extension": "txt",
+ "MD5": "00000000000000000000000000000000",
+ "SHA1": "0000000000000000000000000000000000000000",
+ "SHA256": "0000000000000000000000000000000000000000000000000000000000000000",
+ "SSDeep": "3:AIO9AJraNvsgzsVqSwHqiUZ:AeJuOgzskwZ",
+ "Size": 103,
+ "Tags": [
+ "text"
+ ],
+ "Type": "text/plain"
+ },
+ "GoogleThreatIntelligence": {
+ "File": {
+ "attributes": {
+ "capabilities_tags": [],
+ "crowdsourced_yara_results": [
+ {
+ "author": "Marc Rivero | McAfee ATR Team",
+ "description": "Rule to detect the EICAR pattern",
+ "rule_name": "malw_eicar",
+ "ruleset_id": "0019ab4291",
+ "ruleset_name": "MALW_Eicar",
+ "source": "https://github.com/advanced-threat-research/Yara-Rules"
+ }
+ ],
+ "downloadable": true,
+ "exiftool": {
+ "FileType": "TXT",
+ "FileTypeExtension": "txt",
+ "LineCount": "1",
+ "MIMEEncoding": "us-ascii",
+ "MIMEType": "text/plain",
+ "Newlines": "(none)",
+ "WordCount": "7"
+ },
+ "first_submission_date": 1613356237,
+ "last_analysis_date": 1617088893,
+ "last_analysis_stats": {
+ "confirmed-timeout": 0,
+ "failure": 0,
+ "harmless": 0,
+ "malicious": 7,
+ "suspicious": 0,
+ "timeout": 1,
+ "type-unsupported": 16,
+ "undetected": 50
+ },
+ "last_modification_date": 1617088964,
+ "last_submission_date": 1613356237,
+ "magic": "ASCII text, with no line terminators",
+ "md5": "00000000000000000000000000000000",
+ "meaningful_name": "brokencert.exe",
+ "names": [
+ "brokencert.exe"
+ ],
+ "popular_threat_classification": {
+ "popular_threat_name": [
+ [
+ "eicar",
+ 7
+ ]
+ ],
+ "suggested_threat_label": "eicar/test"
+ },
+ "reputation": 0,
+ "sha1": "0000000000000000000000000000000000000000",
+ "sha256": "0000000000000000000000000000000000000000000000000000000000000000",
+ "size": 103,
+ "ssdeep": "3:AIO9AJraNvsgzsVqSwHqiUZ:AeJuOgzskwZ",
+ "tags": [
+ "text"
+ ],
+ "times_submitted": 1,
+ "tlsh": "T1AEB01208274FFB1ED10738340431F8F14428434D1CD4697414911174887614512D8354",
+ "total_votes": {
+ "harmless": 0,
+ "malicious": 0
+ },
+ "type_description": "Text",
+ "type_extension": "txt",
+ "type_tag": "text",
+ "unique_sources": 1
+ },
+ "id": "0000000000000000000000000000000000000000000000000000000000000000",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/files/0000000000000000000000000000000000000000000000000000000000000000"
+ },
+ "type": "file"
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Results of file hash 0000000000000000000000000000000000000000000000000000000000000000
+>
+>|Sha1|Sha256|Md5|MeaningfulName|TypeExtension|Last Modified|Reputation|Positives|
+>|---|---|---|---|---|---|---|---|
+>| 0000000000000000000000000000000000000000 | 0000000000000000000000000000000000000000000000000000000000000000 | 00000000000000000000000000000000 | brokencert.exe | txt | 2021-03-30 07:22:44Z | 0 | 7/74 |
+
+### url-scan
+
+- New output path: *GoogleThreatIntelligence.Submission*
+- Preserved output: *gtiScanID*
+- Removed output path: *gtiLink* - The V3 API does not returns a link to the GUI anymore.
+
+### gti-file-scan-upload-url
+
+- New output path: *GoogleThreatIntelligence.FileUploadURL*
+- Preserved output: *gtiUploadURL*
+
+## New Commands
+
+- ***gti-search***
+- ***gti-ip-passive-dns-data***
+- ***gti-file-sandbox-report***
+- ***gti-comments-get-by-id***
+- ***gti-analysis-get***
+
+## Commands
+
+You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
+After you successfully execute a command, a DBot message appears in the War Room with the command details.
+
+### ip
+
+***
+Checks the reputation of an IP address.
+
+#### Base Command
+
+`ip`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| ip | IP address to check. | Required |
+| extended_data | Whether to return extended data (last_analysis_results). Possible values are: true, false. | Optional |
+| override_private_lookup | When set to "true", enrichment of private IP addresses will be conducted even if it has been disabled at the integration level. Possible values are: true, false. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| IP.Address | unknown | Bad IP address. |
+| IP.ASN | unknown | Bad IP ASN. |
+| IP.Geo.Country | unknown | Bad IP country. |
+| IP.Relationships.EntityA | string | The source of the relationship. |
+| IP.Relationships.EntityB | string | The destination of the relationship. |
+| IP.Relationships.Relationship | string | The name of the relationship. |
+| IP.Relationships.EntityAType | string | The type of the source of the relationship. |
+| IP.Relationships.EntityBType | string | The type of the destination of the relationship. |
+| IP.Malicious.Vendor | unknown | For malicious IPs, the vendor that made the decision. |
+| IP.Malicious.Description | unknown | For malicious IPs, the reason that the vendor made the decision. |
+| IP.ASOwner | String | The autonomous system owner of the IP. |
+| DBotScore.Indicator | unknown | The indicator that was tested. |
+| DBotScore.Type | unknown | The indicator type. |
+| DBotScore.Vendor | unknown | The vendor used to calculate the score. |
+| DBotScore.Score | Number | The actual score. |
+| DBotScore.Reliability | String | Reliability of the source providing the intelligence data. |
+| GoogleThreatIntelligence.IP.attributes.regional_internet_registry | String | Regional internet registry \(RIR\). |
+| GoogleThreatIntelligence.IP.attributes.jarm | String | JARM data. |
+| GoogleThreatIntelligence.IP.attributes.network | String | Network data. |
+| GoogleThreatIntelligence.IP.attributes.country | String | The country where the IP is located. |
+| GoogleThreatIntelligence.IP.attributes.as_owner | String | IP owner. |
+| GoogleThreatIntelligence.IP.attributes.last_analysis_stats.harmless | Number | The number of engines that found the domain to be harmless. |
+| GoogleThreatIntelligence.IP.attributes.last_analysis_stats.malicious | Number | The number of engines that found the indicator to be malicious. |
+| GoogleThreatIntelligence.IP.attributes.last_analysis_stats.suspicious | Number | The number of engines that found the indicator to be suspicious. |
+| GoogleThreatIntelligence.IP.attributes.last_analysis_stats.undetected | Number | The number of engines that could not detect the indicator. |
+| GoogleThreatIntelligence.IP.attributes.last_analysis_stats.timeout | Number | The number of engines that timed out for the indicator. |
+| GoogleThreatIntelligence.IP.attributes.asn | Number | ASN data. |
+| GoogleThreatIntelligence.IP.attributes.whois_date | Number | Date of the last update of the whois record. |
+| GoogleThreatIntelligence.IP.attributes.reputation | Number | IP reputation. |
+| GoogleThreatIntelligence.IP.attributes.last_modification_date | Number | Last modification date in epoch format. |
+| GoogleThreatIntelligence.IP.attributes.total_votes.harmless | Number | Total number of harmless votes. |
+| GoogleThreatIntelligence.IP.attributes.total_votes.malicious | Number | Total number of malicious votes. |
+| GoogleThreatIntelligence.IP.attributes.continent | String | The continent where the IP is located. |
+| GoogleThreatIntelligence.IP.attributes.whois | String | whois data. |
+| GoogleThreatIntelligence.IP.type | String | Indicator IP type. |
+| GoogleThreatIntelligence.IP.id | String | ID of the IP. |
+| GoogleThreatIntelligence.IP.attributes.gti_assessment.verdict.value | String | GTI verdict of the IP address. |
+| GoogleThreatIntelligence.IP.attributes.gti_assessment.severity.value | String | GTI severity of the IP address. |
+| GoogleThreatIntelligence.IP.attributes.gti_assessment.threat_score.value | Number | GTI threat score of the IP address. |
+
+#### Command example
+```!ip ip=1.1.1.1```
+#### Context Example
+```json
+{
+ "DBotScore": {
+ "Indicator": "1.1.1.1",
+ "Reliability": "C - Fairly reliable",
+ "Score": 1,
+ "Type": "ip",
+ "Vendor": "GoogleThreatIntelligence"
+ },
+ "IP": {
+ "ASN": 13335,
+ "ASOwner": "CLOUDFLARENET",
+ "Address": "1.1.1.1",
+ "DetectionEngines": 94,
+ "PositiveDetections": 4,
+ "Relationships": [
+ {
+ "EntityA": "1.1.1.1",
+ "EntityAType": "IP",
+ "EntityB": "00000cd773f456da710fa334507f8303e87ee228a0c42e365b0250a9a267e734",
+ "EntityBType": "File",
+ "Relationship": "communicates-with"
+ },
+ {
+ "EntityA": "1.1.1.1",
+ "EntityAType": "IP",
+ "EntityB": "0000703e66fe64992425a5a6231671c08a6c3382a28d0efacc7efd3fb289a143",
+ "EntityBType": "File",
+ "Relationship": "communicates-with"
+ }
+ ]
+ },
+ "GoogleThreatIntelligence": {
+ "IP": {
+ "attributes": {
+ "as_owner": "CLOUDFLARENET",
+ "asn": 13335,
+ "jarm": "27d3ed3ed0003ed1dc42d43d00041d6183ff1bfae51ebd88d70384363d525c",
+ "last_analysis_stats": {
+ "harmless": 80,
+ "malicious": 4,
+ "suspicious": 0,
+ "timeout": 0,
+ "undetected": 10
+ },
+ "last_https_certificate": {
+ "cert_signature": {
+ "signature": "3064023024c2cf6cbdf6aed1c9d51f4a742e3c3dd1c03edcd71bd394715bfea5861626820122d30a6efc98b5d2e2b9e5076977960230457b6f82a67db662c33185d5b5355d4f4c8488ac1a003d0c8440dcb0a7ca1c1327151e37f946c3aed9fdf9b9238b7f2a",
+ "signature_algorithm": "1.2.840.10045.4.3.3"
+ },
+ "extensions": {
+ "**exten**": "0481f200f00076002979bef09e393921f056739f63a577e5be577d9c600af8f9",
+ "CA": true,
+ "authority_key_identifier": {
+ "keyid": "0abc0829178ca5396d7a0ece33c72eb3edfbc37a"
+ },
+ "ca_information_access": {
+ "CA Issuers": "http://cacerts.example.com/exampleTLSHybridECCSHA3842020CA1.crt",
+ "OCSP": "http://ocsp.example.com"
+ },
+ "certificate_policies": [
+ "**policy**"
+ ],
+ "crl_distribution_points": [
+ "http://crl3.example.com/exampleTLSHybridECCSHA3842020CA1.crl",
+ "http://crl4.example.com/exampleTLSHybridECCSHA3842020CA1.crl"
+ ],
+ "extended_key_usage": [
+ "serverAuth",
+ "clientAuth"
+ ],
+ "key_usage": [
+ "ff"
+ ],
+ "subject_alternative_name": [
+ "cloudflare-dns.com",
+ "*.cloudflare-dns.com",
+ "one.one.one.one",
+ "\u0001\u0001\u0001\u0001",
+ "\u0001\u0001",
+ "\\xa2\\x9f$\\x01",
+ "\\xa2\\x9f.\\x01",
+ "&\u0006GG\u0011\u0011",
+ "&\u0006GG\u0010\u0001",
+ "GGd",
+ "GGd"
+ ],
+ "subject_key_identifier": "19451b2318f874da2214cb466be213b360158240",
+ "tags": []
+ },
+ "issuer": {
+ "C": "US",
+ "CN": "example TLS Hybrid ECC SHA384 2020 CA1",
+ "O": "example Inc"
+ },
+ "public_key": {
+ "algorithm": "EC",
+ "ec": {
+ "oid": "secp256r1",
+ "pub": "0417ad1fe835af70d38d9c9e64fd471e5b970c0ad110a826321136664d1299c3e131bbf5216373dda5c1c1a0f06da4c45ee1c2dbdaf90d34801af7b9e03af2d574"
+ }
+ },
+ "serial_number": "5076f66d11b692256ccacd546ffec53",
+ "signature_algorithm": "1.2.840.10045.4.3.3",
+ "size": 1418,
+ "subject": {
+ "C": "US",
+ "CN": "cloudflare-dns.com",
+ "L": "San Francisco",
+ "O": "Cloudflare, Inc.",
+ "ST": "California"
+ },
+ "tags": [],
+ "thumbprint": "f1b38143b992645497cf452f8c1ac84249794282",
+ "thumbprint_sha256": "fb444eb8e68437bae06232b9f5091bccff62a768ca09e92eb5c9c2cf9d17c426",
+ "validity": {
+ "not_after": "2022-10-25 23:59:59",
+ "not_before": "2021-10-25 00:00:00"
+ },
+ "version": "V3"
+ },
+ "last_https_certificate_date": 1617041198,
+ "last_modification_date": 1617083545,
+ "network": "1.1.1.0/24",
+ "reputation": 134,
+ "tags": [],
+ "total_votes": {
+ "harmless": 63,
+ "malicious": 8
+ },
+ "whois": "**whois string**",
+ "whois_date": 1631599972
+ },
+ "id": "1.1.1.1",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/ip_addresses/1.1.1.1"
+ },
+ "relationships": {
+ "communicating_files": {
+ "data": [
+ {
+ "id": "00000cd773f456da710fa334507f8303e87ee228a0c42e365b0250a9a267e734",
+ "type": "file"
+ },
+ {
+ "id": "0000703e66fe64992425a5a6231671c08a6c3382a28d0efacc7efd3fb289a143",
+ "type": "file"
+ }
+ ],
+ "links": {
+ "next": "https://www.virustotal.com/api/v3/ip_addresses/1.1.1.1/relationships/communicating_files?cursor=eyJsaW1pdCI6IDIwLCAib2Zmc2V0IjogMjB9&limit=20",
+ "related": "https://www.virustotal.com/api/v3/ip_addresses/1.1.1.1/communicating_files",
+ "self": "https://www.virustotal.com/api/v3/ip_addresses/1.1.1.1/relationships/communicating_files?limit=20"
+ },
+ "meta": {
+ "cursor": "eyJsaW1pdCI6IDIwLCAib2Zmc2V0IjogMjB9"
+ }
+ }
+ },
+ "type": "ip_address"
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### IP reputation of 1.1.1.1
+>
+>|Id|Network|Country|AsOwner|LastModified| Reputation |Positives|
+>|---|---|---|---|---|---|---|
+>| 1.1.1.1 | 1.1.1.0/24 | | CLOUDFLARENET | 2022-08-29 15:15:41Z | 134 | 4/94 |
+
+### url
+
+***
+Checks the reputation of a URL.
+
+#### Base Command
+
+`url`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| url | URL to check. | Required |
+| extended_data | Whether to return extended data (last_analysis_results). Possible values are: true, false. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+|-----------------------------------------------------------------------------------| --- |-------------------------------------------------------------------|
+| URL.Data | unknown | Bad URLs found. |
+| URL.Relationships.EntityA | String | The source of the relationship. |
+| URL.Relationships.EntityB | String | The destination of the relationship. |
+| URL.Relationships.Relationship | String | The name of the relationship. |
+| URL.Relationships.EntityAType | String | The type of the source of the relationship. |
+| URL.Relationships.EntityBType | String | The type of the destination of the relationship. |
+| URL.Malicious.Vendor | unknown | For malicious URLs, the vendor that made the decision. |
+| URL.Malicious.Description | unknown | For malicious URLs, the reason that the vendor made the decision. |
+| DBotScore.Indicator | unknown | The indicator that was tested. |
+| DBotScore.Type | unknown | The indicator type. |
+| DBotScore.Vendor | unknown | The vendor used to calculate the score. |
+| DBotScore.Score | Number | The actual score. |
+| DBotScore.Reliability | String | Reliability of the source providing the intelligence data. |
+| GoogleThreatIntelligence.URL.attributes.favicon.raw_md5 | String | The MD5 hash of the URL. |
+| GoogleThreatIntelligence.URL.attributes.favicon.dhash | String | Difference hash. |
+| GoogleThreatIntelligence.URL.attributes.last_modification_date | Number | Last modification date in epoch format. |
+| GoogleThreatIntelligence.URL.attributes.times_submitted | Number | The number of times the url has been submitted. |
+| GoogleThreatIntelligence.URL.attributes.total_votes.harmless | Number | Total number of harmless votes. |
+| GoogleThreatIntelligence.URL.attributes.total_votes.malicious | Number | Total number of malicious votes. |
+| GoogleThreatIntelligence.URL.attributes.threat_names | String | Name of the threats found. |
+| GoogleThreatIntelligence.URL.attributes.last_submission_date | Number | The last submission date in epoch format. |
+| GoogleThreatIntelligence.URL.attributes.last_http_response_content_length | Number | The last HTTPS response length. |
+| GoogleThreatIntelligence.URL.attributes.last_http_response_headers.date | Date | The last response header date. |
+| GoogleThreatIntelligence.URL.attributes.last_http_response_headers.x-sinkhole | String | DNS sinkhole from last response. |
+| GoogleThreatIntelligence.URL.attributes.last_http_response_headers.content-length | String | The content length of the last response. |
+| GoogleThreatIntelligence.URL.attributes.last_http_response_headers.content-type | String | The content type of the last response. |
+| GoogleThreatIntelligence.URL.attributes.reputation | Number | Reputation of the indicator. |
+| GoogleThreatIntelligence.URL.attributes.last_analysis_date | Number | The date of the last analysis in epoch format. |
+| GoogleThreatIntelligence.URL.attributes.has_content | Boolean | Whether the url has content in it. |
+| GoogleThreatIntelligence.URL.attributes.first_submission_date | Number | The first submission date in epoch format. |
+| GoogleThreatIntelligence.URL.attributes.last_http_response_content_sha256 | String | The SHA-256 hash of the content of the last response. |
+| GoogleThreatIntelligence.URL.attributes.last_http_response_code | Number | Last response status code. |
+| GoogleThreatIntelligence.URL.attributes.last_final_url | String | Last final URL. |
+| GoogleThreatIntelligence.URL.attributes.url | String | The URL itself. |
+| GoogleThreatIntelligence.URL.attributes.title | String | Title of the page. |
+| GoogleThreatIntelligence.URL.attributes.last_analysis_stats.harmless | Number | The number of engines that found the domain to be harmless. |
+| GoogleThreatIntelligence.URL.attributes.last_analysis_stats.malicious | Number | The number of engines that found the indicator to be malicious. |
+| GoogleThreatIntelligence.URL.attributes.last_analysis_stats.suspicious | Number | The number of engines that found the indicator to be suspicious. |
+| GoogleThreatIntelligence.URL.attributes.last_analysis_stats.undetected | Number | The number of engines that could not detect the indicator. |
+| GoogleThreatIntelligence.URL.attributes.last_analysis_stats.timeout | Number | The number of engines that timed out for the indicator. |
+| GoogleThreatIntelligence.URL.attributes.outgoing_links | String | Outgoing links of the URL page. |
+| GoogleThreatIntelligence.URL.type | String | Type of the indicator \(url\). |
+| GoogleThreatIntelligence.URL.id | String | ID of the indicator. |
+| GoogleThreatIntelligence.URL.links.self | String | Link to the response. |
+| GoogleThreatIntelligence.URL.attributes.gti_assessment.verdict.value | String | GTI verdict of the URL. |
+| GoogleThreatIntelligence.URL.attributes.gti_assessment.severity.value | String | GTI severity of the URL. |
+| GoogleThreatIntelligence.URL.attributes.gti_assessment.threat_score.value | Number | GTI threat score of the URL. |
+
+#### Command Example
+
+```!url url=https://example.com```
+
+#### Context Example
+
+```json
+{
+ "DBotScore": {
+ "Indicator": "https://example.com",
+ "Reliability": "A - Completely reliable",
+ "Score": 2,
+ "Type": "url",
+ "Vendor": "GoogleThreatIntelligence"
+ },
+ "URL": {
+ "Category": {
+ "Dr.Web": "known infection source",
+ "Forcepoint ThreatSeeker": "information technology",
+ "alphaMountain.ai": "Malicious",
+ "sophos": "malware callhome, command and control"
+ },
+ "Data": "https://example.com",
+ "DetectionEngines": 86,
+ "PositiveDetections": 8
+ },
+ "GoogleThreatIntelligence": {
+ "URL": {
+ "attributes": {
+ "categories": {
+ "Dr.Web": "known infection source"
+ },
+ "first_submission_date": 1554509044,
+ "has_content": false,
+ "html_meta": {},
+ "last_analysis_date": 1615900309,
+ "last_analysis_stats": {
+ "harmless": 71,
+ "malicious": 8,
+ "suspicious": 0,
+ "timeout": 0,
+ "undetected": 7
+ },
+ "last_final_url": "https://example.com/dashboard/",
+ "last_http_response_code": 200,
+ "last_http_response_content_length": 1671,
+ "last_http_response_content_sha256": "f2ddbc5b5468c2cd9c28ae820420d32c4f53d088e4a1cc31f661230e4893104a",
+ "last_http_response_headers": {
+ "content-length": "1671",
+ "content-type": "text/html; charset=utf-8",
+ "date": "Tue, 16 Mar 2021 13:16:50 GMT",
+ "x-sinkhole": "Malware"
+ },
+ "last_modification_date": 1615900620,
+ "last_submission_date": 1615900309,
+ "outgoing_links": [
+ "http://www.example.com",
+ "http://www.example.com"
+ ],
+ "reputation": 0,
+ "tags": [],
+ "targeted_brand": {},
+ "threat_names": [
+ "C2/Generic-A"
+ ],
+ "times_submitted": 5,
+ "title": "Welcome page",
+ "total_votes": {
+ "harmless": 0,
+ "malicious": 0
+ },
+ "trackers": {},
+ "url": "https://example.com/"
+ },
+ "id": "84eb1485254266e093683024b3bd172abde615fc6a37498707ca912964a108a9",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/urls/84eb1485254266e093683024b3bd172abde615fc6a37498707ca912964a108a9"
+ },
+ "type": "url"
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### URL data of "https://example.com"
+>
+>|Url|Title|LastModified|HasContent|LastHttpResponseContentSha256|Positives|Reputation|
+>|---|---|---|---|---|---|---|
+>| | Welcome page | 2021-03-16 13:17:00Z | false | f2ddbc5b5468c2cd9c28ae820420d32c4f53d088e4a1cc31f661230e4893104a | 8/86 | 0 |
+
+### domain
+
+***
+Checks the reputation of a domain.
+
+#### Base Command
+
+`domain`\
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| domain | Domain name to check. | Required |
+| extended_data | Whether to return extended data (last_analysis_results). Possible values are: true, false. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+|------------------------------------------------------------------------------| --- |----------------------------------------------------------------------|
+| Domain.Name | unknown | Bad domain found. |
+| Domain.Relationships.EntityA | String | The source of the relationship. |
+| Domain.Relationships.EntityB | String | The destination of the relationship. |
+| Domain.Relationships.Relationship | String | The name of the relationship. |
+| Domain.Relationships.EntityAType | String | The type of the source of the relationship. |
+| Domain.Relationships.EntityBType | String | The type of the destination of the relationship. |
+| Domain.Malicious.Vendor | unknown | For malicious domains, the vendor that made the decision. |
+| Domain.Malicious.Description | unknown | For malicious domains, the reason that the vendor made the decision. |
+| DBotScore.Indicator | unknown | The indicator that was tested. |
+| DBotScore.Type | unknown | The indicator type. |
+| DBotScore.Vendor | unknown | The vendor used to calculate the score. |
+| DBotScore.Score | Number | The actual score. |
+| DBotScore.Reliability | String | Reliability of the source providing the intelligence data. |
+| GoogleThreatIntelligence.Domain.attributes.last_dns_records.type | String | The type of the last DNS records. |
+| GoogleThreatIntelligence.Domain.attributes.last_dns_records.value | String | The value of the last DNS records. |
+| GoogleThreatIntelligence.Domain.attributes.last_dns_records.ttl | Number | The time To live \(ttl\) of the last DNS records. |
+| GoogleThreatIntelligence.Domain.attributes.jarm | String | JARM data. |
+| GoogleThreatIntelligence.Domain.attributes.whois | String | whois data. |
+| GoogleThreatIntelligence.Domain.attributes.last_dns_records_date | Number | The last DNS records date in epoch format. |
+| GoogleThreatIntelligence.Domain.attributes.last_analysis_stats.harmless | Number | The number of engines that found the domain to be harmless. |
+| GoogleThreatIntelligence.Domain.attributes.last_analysis_stats.malicious | Number | The number of engines that found the indicator to be malicious. |
+| GoogleThreatIntelligence.Domain.attributes.last_analysis_stats.suspicious | Number | The number of engines that found the indicator to be suspicious. |
+| GoogleThreatIntelligence.Domain.attributes.last_analysis_stats.undetected | Number | The number of engines that could not detect the indicator. |
+| GoogleThreatIntelligence.Domain.attributes.last_analysis_stats.timeout | Number | The number of engines that timed out for the indicator. |
+| GoogleThreatIntelligence.Domain.attributes.favicon.raw_md5 | String | MD5 hash of the domain. |
+| GoogleThreatIntelligence.Domain.attributes.favicon.dhash | String | Difference hash. |
+| GoogleThreatIntelligence.Domain.attributes.reputation | Number | Reputation of the indicator. |
+| GoogleThreatIntelligence.Domain.attributes.registrar | String | Registrar information. |
+| GoogleThreatIntelligence.Domain.attributes.last_update_date | Number | Last updated date in epoch format. |
+| GoogleThreatIntelligence.Domain.attributes.last_modification_date | Number | Last modification date in epoch format. |
+| GoogleThreatIntelligence.Domain.attributes.creation_date | Number | Creation date in epoch format. |
+| GoogleThreatIntelligence.Domain.attributes.total_votes.harmless | Number | Total number of harmless votes. |
+| GoogleThreatIntelligence.Domain.attributes.total_votes.malicious | Number | Total number of malicious votes. |
+| GoogleThreatIntelligence.Domain.type | String | Type of indicator \(domain\). |
+| GoogleThreatIntelligence.Domain.id | String | ID of the domain. |
+| GoogleThreatIntelligence.Domain.links.self | String | Link to the domain investigation. |
+| GoogleThreatIntelligence.Domain.attributes.gti_assessment.verdict.value | String | GTI verdict of the domain. |
+| GoogleThreatIntelligence.Domain.attributes.gti_assessment.severity.value | String | GTI severity of the domain. |
+| GoogleThreatIntelligence.Domain.attributes.gti_assessment.threat_score.value | Number | GTI threat score of the domain. |
+
+#### Command Example
+
+```!domain domain=example.com```
+
+#### Context Example
+
+```json
+{
+ "DBotScore": {
+ "Indicator": "example.com",
+ "Reliability": "A - Completely reliable",
+ "Score": 2,
+ "Type": "domain",
+ "Vendor": "GoogleThreatIntelligence"
+ },
+ "Domain": {
+ "Admin": {
+ "Country": " PA",
+ "Email": " [REDACTED]@whoisguard.com",
+ "Name": " WhoisGuard, Inc.",
+ "Phone": null
+ },
+ "CreationDate": [
+ " 2017-01-21T16:26:19.0Z"
+ ],
+ "ExpirationDate": " 2018-01-21T23:59:59.0Z",
+ "Name": "example.com",
+ "NameServers": [
+ " PDNS1.REGISTRAR-SERVERS.COM"
+ ],
+ "Registrant": {
+ "Country": " PA",
+ "Email": " [REDACTED]@whoisguard.com",
+ "Name": null,
+ "Phone": null
+ },
+ "Registrar": {
+ "AbuseEmail": " abuse@namecheap.com",
+ "AbusePhone": " +1.6613102107",
+ "Name": [
+ " Namecheap",
+ " NAMECHEAP INC"
+ ]
+ },
+ "UpdatedDate": [
+ "2017-03-06T21:52:39.0Z"
+ ],
+ "WHOIS": {
+ "Admin": {
+ "Country": " PA",
+ "Email": " [REDACTED]@whoisguard.com",
+ "Name": " WhoisGuard, Inc.",
+ "Phone": null
+ },
+ "CreationDate": [
+ "2017-01-21T16:26:19.0Z"
+ ],
+ "ExpirationDate": " 2018-01-21T23:59:59.0Z",
+ "NameServers": [
+ " PDNS1.REGISTRAR-SERVERS.COM"
+ ],
+ "Registrant": {
+ "Country": " PA",
+ "Email": " [REDACTED]@whoisguard.com",
+ "Name": null,
+ "Phone": null
+ },
+ "Registrar": {
+ "AbuseEmail": " abuse@namecheap.com",
+ "AbusePhone": " +1.6613102107",
+ "Name": [
+ " Namecheap",
+ " NAMECHEAP INC"
+ ]
+ },
+ "UpdatedDate": [
+ " 2017-03-06T21:52:39.0Z"
+ ]
+ }
+ },
+ "GoogleThreatIntelligence": {
+ "Domain": {
+ "attributes": {
+ "categories": {
+ "Dr.Web": "known infection source",
+ "Forcepoint ThreatSeeker": "information technology",
+ "alphaMountain.ai": "Malicious",
+ "sophos": "malware callhome, command and control"
+ },
+ "creation_date": 1485015979,
+ "favicon": {
+ "dhash": "f4cca89496a0ccb2",
+ "raw_md5": "6eb4a43cb64c97f76562af703893c8fd"
+ },
+ "jarm": "29d21b20d29d29d21c41d21b21b41d494e0df9532e75299f15ba73156cee38",
+ "last_analysis_stats": {
+ "harmless": 66,
+ "malicious": 8,
+ "suspicious": 0,
+ "timeout": 0,
+ "undetected": 8
+ },
+ "last_dns_records": [
+ {
+ "ttl": 3599,
+ "type": "A",
+ "value": "value"
+ }
+ ],
+ "last_dns_records_date": 1615900633,
+ "last_modification_date": 1615900633,
+ "last_update_date": 1488837159,
+ "popularity_ranks": {},
+ "registrar": "Namecheap",
+ "reputation": 0,
+ "tags": [],
+ "total_votes": {
+ "harmless": 0,
+ "malicious": 0
+ },
+ "whois": "**whoisstring**"
+ },
+ "id": "example.com",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/domains/example.com"
+ },
+ "type": "domain"
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Domain data of example.com
+>
+>|Id|Registrant Country|LastModified|LastAnalysisStats|
+>|---|---|---|---|
+>| example.com | PA | 2021-03-16 13:17:13Z | harmless: 66malicious: 8 suspicious: 0 undetected: 8 timeout: 0 |
+
+### url-scan
+
+***
+Scans a specified URL. Use the gti-analysis-get command to get the scan results.
+
+#### Base Command
+
+`url-scan`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| url | The URL to scan. | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| GoogleThreatIntelligence.Submission.Type | String | The type of the submission \(analysis\). |
+| GoogleThreatIntelligence.Submission.id | String | The ID of the submission. |
+| GoogleThreatIntelligence.Submission.hash | String | The indicator sent to rescan. |
+
+#### Command Example
+
+```!url-scan url=https://example.com```
+
+#### Context Example
+
+```json
+{
+ "GoogleThreatIntelligence": {
+ "Submission": {
+ "id": "u-0f115db062b7c0dd030b16878c99dea5c354b49dc37b38eb8846179c7783e9d7-1617088890",
+ "type": "analysis",
+ "url": "https://example.com"
+ }
+ },
+ "gtiScanID": "u-0f115db062b7c0dd030b16878c99dea5c354b49dc37b38eb8846179c7783e9d7-1617088890"
+}
+```
+
+#### Human Readable Output
+
+>### New url submission
+>
+>|id|url|
+>|---|---|
+>| u-0f115db062b7c0dd030b16878c99dea5c354b49dc37b38eb8846179c7783e9d7-1617088890 | |
+
+### gti-comments-add
+
+***
+Adds comments to files and URLs.
+
+#### Base Command
+
+`gti-comments-add`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| resource | The file hash (MD5, SHA1, orSHA256), Domain, URL or IP on which you're commenting on. If not supplied, will try to determine if it's a hash or a url. | Required |
+| resource_type | The type of the resource on which you're commenting. Possible values are: ip, url, domain, hash. | Optional |
+| comment | The actual review that you can tag by using the "#" twitter-like syntax, for example, #disinfection #zbot, and reference users using the "@" syntax, for example, @GoogleThreatIntelligenceTeam. | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| GoogleThreatIntelligence.Comments.comments.attributes.date | Number | The date of the comment in epoch format. |
+| GoogleThreatIntelligence.Comments.comments.attributes.text | String | The text of the comment. |
+| GoogleThreatIntelligence.Comments.comments.attributes.votes.positive | Number | Number of positive votes. |
+| GoogleThreatIntelligence.Comments.comments.attributes.votes.abuse | Number | Number of abuse votes. |
+| GoogleThreatIntelligence.Comments.comments.attributes.votes.negative | Number | Number of negative votes. |
+| GoogleThreatIntelligence.Comments.comments.attributes.html | String | The HTML content. |
+| GoogleThreatIntelligence.Comments.comments.type | String | The type of the comment. |
+| GoogleThreatIntelligence.Comments.comments.id | String | ID of the comment. |
+| GoogleThreatIntelligence.Comments.comments.links.self | String | Link to the request. |
+
+#### Command Example
+
+```!gti-comments-add resource=paloaltonetworks.com resource_type=domain comment="this is a comment"```
+
+#### Context Example
+
+```json
+{
+ "GoogleThreatIntelligence": {
+ "Comments": {
+ "comments": {
+ "attributes": {
+ "date": 1617088894,
+ "html": "this is a comment",
+ "tags": [],
+ "text": "this is a comment",
+ "votes": {
+ "abuse": 0,
+ "negative": 0,
+ "positive": 0
+ }
+ },
+ "id": "d-paloaltonetworks.com-e757b16b",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/comments/d-paloaltonetworks.com-e757b16b"
+ },
+ "type": "comment"
+ }
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Comment has been added
+>
+>|Date|Text|Positive Votes|Abuse Votes|Negative Votes|
+>|---|---|---|---|---|
+>| 2021-03-30 07:21:34Z | this is a comment | 0 | 0 | 0 |
+
+### gti-file-scan-upload-url
+
+***
+Premium API. Get a special URL for files larger than 32 MB.
+
+#### Base Command
+
+`gti-file-scan-upload-url`
+
+#### Input
+
+There are no input arguments for this command.
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| GoogleThreatIntelligence.FileUploadURL | unknown | The special upload URL for large files. |
+
+#### Command Example
+
+```!gti-file-scan-upload-url```
+
+#### Context Example
+
+```json
+{
+ "GoogleThreatIntelligence": {
+ "FileUploadURL": "https://www.virustotal.com/_ah/upload/**upload-hash**"
+ },
+ "gtiUploadURL": "https://www.virustotal.com/_ah/upload/**upload-hash**"
+}
+```
+
+#### Human Readable Output
+
+>### New upload url acquired
+>
+>|Upload url|
+>|---|
+>| |
+
+### gti-comments-delete
+
+***
+Delete a comment.
+
+#### Base Command
+
+`gti-comments-delete`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| id | Comment ID. | Required |
+
+#### Context Output
+
+There is no context output for this command.
+
+#### Command Example
+
+```!gti-comments-delete id=d-paloaltonetworks.com-7886a33c```
+
+#### Human Readable Output
+
+Comment d-paloaltonetworks.com-7886a33c has been deleted!
+
+### gti-comments-get
+
+***
+Retrieves comments for a given resource.
+
+#### Base Command
+
+`gti-comments-get`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| resource | The file hash (MD5, SHA1, orSHA256), Domain, URL or IP on which you're commenting on. If not supplied, will try to determine if it's a hash or a url. | Required |
+| resource_type | The type of the resource on which you're commenting. If not supplied, will determine if it's a url or a file. Possible values are: ip, url, domain, file. | Optional |
+| limit | Maximum comments to fetch. Default is 10. | Optional |
+| before | Fetch only comments before the given time. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| GoogleThreatIntelligence.Comments.id | String | ID that contains the comment \(the given hash, domain, url, or ip\). |
+| GoogleThreatIntelligence.Comments.comments.attributes.date | Number | The date of the comment in epoch format. |
+| GoogleThreatIntelligence.Comments.comments.attributes.text | String | The text of the comment. |
+| GoogleThreatIntelligence.Comments.comments.attributes.votes.positive | Number | Number of positive votes. |
+| GoogleThreatIntelligence.Comments.comments.attributes.votes.abuse | Number | Number of abuse votes. |
+| GoogleThreatIntelligence.Comments.comments.attributes.votes.negative | Number | Number of negative votes. |
+| GoogleThreatIntelligence.Comments.comments.attributes.html | String | The HTML content. |
+| GoogleThreatIntelligence.Comments.comments.type | String | The type of the comment. |
+| GoogleThreatIntelligence.Comments.comments.id | String | ID of the commented. |
+| GoogleThreatIntelligence.Comments.comments.links.self | String | Link to the request |
+
+#### Command Example
+
+```!gti-comments-get resource=https://paloaltonetworks.com```
+
+#### Context Example
+
+```json
+{
+ "GoogleThreatIntelligence": {
+ "Comments": {
+ "comments": [
+ {
+ "attributes": {
+ "date": 1616325673,
+ "html": "another comment",
+ "tags": [],
+ "text": "another comment",
+ "votes": {
+ "abuse": 0,
+ "negative": 0,
+ "positive": 0
+ }
+ },
+ "id": "u-c5fad1f7084153e328563fbacdb07a9ad6428dc3f0a88e756266efb7c0553d9d-fe2d6a9e",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/comments/u-c5fad1f7084153e328563fbacdb07a9ad6428dc3f0a88e756266efb7c0553d9d-fe2d6a9e"
+ },
+ "type": "comment"
+ },
+ {
+ "attributes": {
+ "date": 1616325673,
+ "html": "another comment",
+ "tags": [],
+ "text": "another comment",
+ "votes": {
+ "abuse": 0,
+ "negative": 0,
+ "positive": 0
+ }
+ },
+ "id": "u-c5fad1f7084153e328563fbacdb07a9ad6428dc3f0a88e756266efb7c0553d9d-d63782a9",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/comments/u-c5fad1f7084153e328563fbacdb07a9ad6428dc3f0a88e756266efb7c0553d9d-d63782a9"
+ },
+ "type": "comment"
+ },
+ {
+ "attributes": {
+ "date": 1616313101,
+ "html": "a new comment",
+ "tags": [],
+ "text": "a new comment",
+ "votes": {
+ "abuse": 0,
+ "negative": 0,
+ "positive": 0
+ }
+ },
+ "id": "u-c5fad1f7084153e328563fbacdb07a9ad6428dc3f0a88e756266efb7c0553d9d-97a331a3",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/comments/u-c5fad1f7084153e328563fbacdb07a9ad6428dc3f0a88e756266efb7c0553d9d-97a331a3"
+ },
+ "type": "comment"
+ },
+ {
+ "attributes": {
+ "date": 1616313067,
+ "html": "a comment",
+ "tags": [],
+ "text": "a comment",
+ "votes": {
+ "abuse": 0,
+ "negative": 0,
+ "positive": 0
+ }
+ },
+ "id": "u-c5fad1f7084153e328563fbacdb07a9ad6428dc3f0a88e756266efb7c0553d9d-ae0de9fc",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/comments/u-c5fad1f7084153e328563fbacdb07a9ad6428dc3f0a88e756266efb7c0553d9d-ae0de9fc"
+ },
+ "type": "comment"
+ }
+ ],
+ "indicator": "https://paloaltonetworks.com"
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### GoogleThreatIntelligence comments of url: "https://paloaltonetworks.com"
+>
+>|Date|Text|Positive Votes|Abuse Votes|Negative Votes|
+>|---|---|---|---|---|
+>| 2021-03-21 11:21:13Z | another comment | 0 | 0 | 0 |
+>| 2021-03-21 11:21:13Z | another comment | 0 | 0 | 0 |
+>| 2021-03-21 07:51:41Z | a new comment | 0 | 0 | 0 |
+>| 2021-03-21 07:51:07Z | a comment | 0 | 0 | 0 |
+
+### gti-comments-get-by-id
+
+***
+Retrieves a comment by comment ID.
+
+#### Base Command
+
+`gti-comments-get-by-id`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| id | The comment's ID. Can be retrieved using the gti-comments-get command. | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| GoogleThreatIntelligence.Comments.comments.id | String | ID of the comment. |
+| GoogleThreatIntelligence.Comments.comments.attributes.date | Number | The date of the comment in epoch format. |
+| GoogleThreatIntelligence.Comments.comments.attributes.text | String | The text of the comment. |
+| GoogleThreatIntelligence.Comments.comments.attributes.votes.positive | Number | Number of positive votes. |
+| GoogleThreatIntelligence.Comments.comments.attributes.votes.abuse | Number | Number of abuse votes. |
+| GoogleThreatIntelligence.Comments.comments.attributes.votes.negative | Number | Number of negative votes. |
+| GoogleThreatIntelligence.Comments.comments.attributes.html | String | The HTML content. |
+| GoogleThreatIntelligence.Comments.comments.type | String | The type of the comment. |
+| GoogleThreatIntelligence.Comments.comments.links.self | String | Link to the request. |
+
+#### Command Example
+
+```!gti-comments-get-by-id id=d-paloaltonetworks.com-64591897```
+
+#### Context Example
+
+```json
+{
+ "GoogleThreatIntelligence": {
+ "Comments": {
+ "comments": {
+ "attributes": {
+ "date": 1615195751,
+ "html": "a new comment!",
+ "tags": [],
+ "text": "a new comment!",
+ "votes": {
+ "abuse": 0,
+ "negative": 0,
+ "positive": 0
+ }
+ },
+ "id": "d-paloaltonetworks.com-64591897",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/comments/d-paloaltonetworks.com-64591897"
+ },
+ "type": "comment"
+ }
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Comment of ID d-paloaltonetworks.com-64591897
+>
+>|Date|Text|Positive Votes|Abuse Votes|Negative Votes|
+>|---|---|---|---|---|
+>| 2021-03-08 09:29:11Z | a new comment! | 0 | 0 | 0 |
+
+### gti-search
+
+***
+Search for an indicator in GoogleThreatIntelligence.
+
+#### Base Command
+
+`gti-search`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| query | This endpoint searches any of the following: A file hash, URL, domain, IP address, tag comments. | Required |
+| extended_data | Whether to return extended data (last_analysis_results). Possible values are: true, false. | Optional |
+| limit | Maximum number of results to fetch. Default is 10. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| GoogleThreatIntelligence.SearchResults.attributes.last_analysis_stats.harmless | Number | Number of engines that found the indicator to be harmless. |
+| GoogleThreatIntelligence.SearchResults.attributes.last_analysis_stats.malicious | Number | Number of engines that found the indicator to be malicious. |
+| GoogleThreatIntelligence.SearchResults.attributes.last_analysis_stats.suspicious | Number | Number of engines that found the indicator to be suspicious. |
+| GoogleThreatIntelligence.SearchResults.attributes.last_analysis_stats.undetected | Number | Number of engines that could not detect the indicator. |
+| GoogleThreatIntelligence.SearchResults.attributes.last_analysis_stats.timeout | Number | Number of engines that timed out. |
+| GoogleThreatIntelligence.SearchResults.attributes.reputation | Number | The indicator's reputation |
+| GoogleThreatIntelligence.SearchResults.attributes.last_modification_date | Number | The last modification date in epoch format. |
+| GoogleThreatIntelligence.SearchResults.attributes.total_votes.harmless | Number | Total number of harmless votes. |
+| GoogleThreatIntelligence.SearchResults.attributes.total_votes.malicious | Number | Total number of malicious votes. |
+| GoogleThreatIntelligence.SearchResults.type | String | The type of the indicator \(ip, domain, url, file\). |
+| GoogleThreatIntelligence.SearchResults.id | String | ID of the indicator. |
+| GoogleThreatIntelligence.SearchResults.links.self | String | Link to the response. |
+
+#### Command Example
+
+```!gti-search query=paloaltonetworks.com```
+
+#### Context Example
+
+```json
+{
+ "GoogleThreatIntelligence": {
+ "SearchResults": {
+ "attributes": {
+ "categories": {
+ "BitDefender": "marketing",
+ "Forcepoint ThreatSeeker": "information technology",
+ "alphaMountain.ai": "Business/Economy, Information Technology",
+ "sophos": "information technology"
+ },
+ "creation_date": 1108953730,
+ "favicon": {
+ "dhash": "02e9ecb69ac869a8",
+ "raw_md5": "920c3c89139c32d356fa4b8b61616f37"
+ },
+ "jarm": "29d3fd00029d29d00042d43d00041d598ac0c1012db967bb1ad0ff2491b3ae",
+ "last_analysis_stats": {
+ "harmless": 75,
+ "malicious": 0,
+ "suspicious": 0,
+ "timeout": 0,
+ "undetected": 7
+ },
+ "last_dns_records": [
+ {
+ "ttl": 14399,
+ "type": "TXT",
+ "value": "atlassian-domain-verification=WeW32v7AwYQEviMzlNjYyXNMUngcnmIMtNZKJ69TuQUoda5T6DFFV/A6rRvOzwvs"
+ }
+],
+ "last_dns_records_date": 1616986415,
+ "last_https_certificate": {
+ "cert_signature": {
+ "signature": "signature",
+ "signature_algorithm": "sha256RSA"
+ },
+ "extensions": {
+ "**exten**": "0482016a0168007600a4b90990b418581487bb13a2cc67700a3c359804f91bdf",
+ "CA": true,
+ "authority_key_identifier": {
+ "keyid": "40c2bd278ecc348330a233d7fb6cb3f0b42c80ce"
+ },
+ "ca_information_access": {
+ "CA Issuers": "http://certificates.example.com/repository/gdig2.crt",
+ "OCSP": "http://ocsp.example.com/"
+ },
+ "certificate_policies": [
+ "**policy**"
+ ],
+ "crl_distribution_points": [
+ "http://example.com/gdig2s1-1677.crl"
+ ],
+ "extended_key_usage": [
+ "serverAuth",
+ "clientAuth"
+ ],
+ "key_usage": [
+ "ff"
+ ],
+ "subject_alternative_name": [
+ "www.paloaltonetworks.com"
+ ],
+ "subject_key_identifier": "ed89d4b918aab2968bd1dfde421a179c51445be0",
+ "tags": []
+ },
+ "issuer": {
+ "C": "US",
+ "CN": "Go Daddy Secure Certificate Authority - G2",
+ "L": "Scottsdale",
+ "O": "example.com, Inc.",
+ "OU": "http://certs.example.com/repository/",
+ "ST": "Arizona"
+ },
+ "public_key": {
+ "algorithm": "RSA",
+ "rsa": {
+ "exponent": "010001",
+ "key_size": 2048,
+ "modulus": "modulus"
+ }
+ },
+ "serial_number": "f5fa379466d9884a",
+ "signature_algorithm": "sha256RSA",
+ "size": 1963,
+ "subject": {
+ "CN": "www.paloaltonetworks.com",
+ "OU": "Domain Control Validated"
+ },
+ "tags": [],
+ "thumbprint": "0296c20e3a4a607b8d9e2af86155cde04594535e",
+ "thumbprint_sha256": "17bb7bda507abc602bdf1b160d7f51edaccac39fd34f8dab1e793c3612cfc8c2",
+ "validity": {
+ "not_after": "2022-01-27 16:52:24",
+ "not_before": "2020-01-27 16:52:24"
+ },
+ "version": "V3"
+ },
+ "last_https_certificate_date": 1616986415,
+ "last_modification_date": 1617084294,
+ "last_update_date": 1594825871,
+ "popularity_ranks": {
+ "Alexa": {
+ "rank": 32577,
+ "timestamp": 1617032161
+ }
+ },
+ "registrar": "MarkMonitor Inc.",
+ "reputation": 0,
+ "tags": [],
+ "total_votes": {
+ "harmless": 0,
+ "malicious": 0
+ },
+ "whois": "whois string",
+ "whois_date": 1615321176
+ },
+ "id": "paloaltonetworks.com",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/domains/paloaltonetworks.com"
+ },
+ "type": "domain"
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Search result of query paloaltonetworks.com
+>
+>|Categories|CreationDate|LastAnalysisStats|
+>|---|---|---|
+>| Forcepoint ThreatSeeker: information technology sophos: information technology BitDefender: marketing alphaMountain.ai: Business/Economy, Information Technology | 1108953730 | harmless: 75 malicious: 0 suspicious: 0 undetected: 7 timeout: 0 | | 1615321176 |
+
+### gti-file-sandbox-report
+
+***
+Retrieves a behavioral relationship of the given file hash.
+
+#### Base Command
+
+`gti-file-sandbox-report`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| file | Hash of the file to query. Supports MD5, SHA1, and SHA256. | Required |
+| limit | Maximum number of results to fetch. Default is 10. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| SandboxReport.attributes.analysis_date | Number | The date of the analysis in epoch format. |
+| SandboxReport.attributes.behash | String | Behash of the attribute. |
+| SandboxReport.attributes.command_executions | String | Which command were executed. |
+| SandboxReport.attributes.dns_lookups.hostname | String | Host names found in the lookup. |
+| SandboxReport.attributes.dns_lookups.resolved_ips | String | The IPs that were resolved. |
+| SandboxReport.attributes.files_attribute_changed | String | The file attributes that were changed. |
+| SandboxReport.attributes.has_html_report | Boolean | Whether there is an HTML report. |
+| SandboxReport.attributes.has_pcap | Boolean | Whether the IP has a PCAP file. |
+| SandboxReport.attributes.http_conversations.request_method | String | The request method of the HTTP conversation. |
+| SandboxReport.attributes.http_conversations.response_headers.Cache-Control | String | The cache-control method of the response header. |
+| SandboxReport.attributes.http_conversations.response_headers.Connection | String | The connection of the response header. |
+| SandboxReport.attributes.http_conversations.response_headers.Content-Length | String | THe Content-Length of the response header. |
+| SandboxReport.attributes.http_conversations.response_headers.Content-Type | String | The Content-Type of the response header. |
+| SandboxReport.attributes.http_conversations.response_headers.Pragma | String | The pragma of the response header. |
+| SandboxReport.attributes.http_conversations.response_headers.Server | String | The server of the response header. |
+| SandboxReport.attributes.http_conversations.response_headers.Status-Line | String | The Status-Line of the response header. |
+| SandboxReport.attributes.http_conversations.response_status_code | Number | The response status code. |
+| SandboxReport.attributes.http_conversations.url | String | The conversation URL. |
+| SandboxReport.attributes.last_modification_date | Number | Last modified data in epoch format. |
+| SandboxReport.attributes.modules_loaded | String | Loaded modules. |
+| SandboxReport.attributes.mutexes_created | String | The mutexes that were created. |
+| SandboxReport.attributes.mutexes_opened | String | The mutexes that were opened. |
+| SandboxReport.attributes.processes_created | String | The processes that were created. |
+| SandboxReport.attributes.processes_tree.name | String | The name of the process tree. |
+| SandboxReport.attributes.processes_tree.process_id | String | The ID of the process. |
+| SandboxReport.attributes.registry_keys_deleted | String | Deleted registry keys. |
+| SandboxReport.attributes.registry_keys_set.key | String | Key of the registry key. |
+| SandboxReport.attributes.registry_keys_set.value | String | Value of the registry key. |
+| SandboxReport.attributes.sandbox_name | String | The name of the sandbox. |
+| SandboxReport.attributes.services_started | String | The services that were started. |
+| SandboxReport.attributes.verdicts | String | The verdicts. |
+| SandboxReport.id | String | The IP analyzed. |
+| SandboxReport.links.self | String | Link to the response. |
+| SandboxReport.attributes.files_dropped.path | String | Path of the file dropped. |
+| SandboxReport.attributes.files_dropped.sha256 | String | SHA-256 hash of the dropped files. |
+| SandboxReport.attributes.files_opened | String | The files that were opened. |
+| SandboxReport.attributes.files_written | String | The files that were written. |
+| SandboxReport.attributes.ip_traffic.destination_ip | String | Destination IP in the traffic. |
+| SandboxReport.attributes.ip_traffic.destination_port | Number | Destination port in the traffic. |
+| SandboxReport.attributes.ip_traffic.transport_layer_protocol | String | Transport layer protocol in the traffic. |
+| SandboxReport.attributes.registry_keys_opened | String | The registry keys that were opened. |
+| SandboxReport.attributes.tags | String | The tags of the DNS data. |
+| SandboxReport.attributes.files_copied.destination | String | Destination of the files copied. |
+| SandboxReport.attributes.files_copied.source | String | Source of the files copied. |
+| SandboxReport.attributes.permissions_requested | String | The permissions that where requested. |
+| SandboxReport.attributes.processes_injected | String | The processes that were injected. |
+| SandboxReport.attributes.processes_terminated | String | The processes that were terminated. |
+| SandboxReport.attributes.processes_tree.children.name | String | The name of the children of the process. |
+| SandboxReport.attributes.processes_tree.children.process_id | String | The ID of the children of the process. |
+| SandboxReport.attributes.services_opened | String | The services that were opened. |
+| SandboxReport.attributes.text_highlighted | String | The text that was highlighted. |
+| SandboxReport.attributes.calls_highlighted | String | The calls that were highlighted. |
+| SandboxReport.attributes.processes_tree.children.time_offset | Number | The time offset of the children in the process. |
+| SandboxReport.links.self | String | The link to the response. |
+| SandboxReport.meta.count | Number | The number of objects that were found in the attributes. |
+
+#### Command Example
+
+```!gti-file-sandbox-report file=2b294b3499d1cce794badffc959b7618```
+
+#### Context Example
+
+```json
+{
+ "GoogleThreatIntelligence": {
+ "SandboxReport": [
+ {
+ "attributes": {
+ "analysis_date": 1558429832,
+ "behash": "079386becc949a2aafdcd2c6042cf0a9",
+ "command_executions": [
+ "C:\\DOCUME~1\\Miller\\LOCALS~1\\Temp\\Win32.AgentTesla.exe",
+ ],
+ "dns_lookups": [
+ {
+ "hostname": "checkip.dyndns.org",
+ "resolved_ips": [
+ "**ip**"
+ ]
+ },
+ {
+ "hostname": "checkip.dyndns.org",
+ "resolved_ips": [
+ "**ip**"
+ ]
+ }
+ ],
+ "files_attribute_changed": [
+ "C:\\Documents and Settings\\Miller\\Local Settings\\Temp\\xws\\xws.exe"
+ ],
+ "has_html_report": false,
+ "has_pcap": false,
+ "http_conversations": [
+ {
+ "request_method": "GET",
+ "response_headers": {
+ "Cache-Control": "no-cache",
+ "Connection": "close",
+ "Content-Length": "107",
+ "Content-Type": "text/html",
+ "Pragma": "no-cache",
+ "Server": "DynDNS-CheckIP/1.0.1",
+ "Status-Line": "HTTP/1.1 200"
+ },
+ "response_status_code": 200,
+ "url": "http://checkip.dyndns.org/"
+ },
+ {
+ "request_method": "GET",
+ "response_headers": {
+ "Cache-Control": "no-cache",
+ "Connection": "close",
+ "Content-Length": "105",
+ "Content-Type": "text/html",
+ "Pragma": "no-cache",
+ "Server": "DynDNS-CheckIP/1.0.1",
+ "Status-Line": "HTTP/1.1 200"
+ },
+ "response_status_code": 200,
+ "url": "http://checkip.dyndns.org/"
+ }
+ ],
+ "last_modification_date": 1588377117,
+ "modules_loaded": [
+ "c:\\windows\\system32\\imm32.dll"
+ ],
+ "mutexes_created": [
+ "CTF.Compart.MutexDefaultS-1-5-21-1229272821-1563985344-1801674531-1003"
+ ],
+ "mutexes_opened": [
+ "ShimCacheMutex"
+ ],
+ "processes_created": [
+ "C:\\DOCUME~1\\Miller\\LOCALS~1\\Temp\\Win32.AgentTesla.exe"
+ ],
+ "processes_tree": [
+ {
+ "name": "C:\\DOCUME~1\\Miller\\LOCALS~1\\Temp\\Win32.AgentTesla.exe",
+ "process_id": "272"
+ }
+ ],
+ "registry_keys_deleted": [
+ "HKU\\S-1-5-21-3712457824-2419000099-45725732-1005\\SOFTWARE\\CLASSES\\MSCFILE\\SHELL\\OPEN\\COMMAND"
+ ],
+ "registry_keys_set": [
+ {
+ "key": "HKU\\S-1-5-21-1229272821-1563985344-1801674531-1003\\SOFTWARE\\MICROSOFT\\WINDOWS\\CURRENTVERSION\\RUN",
+ "value": "xws"
+ }
+ ],
+ "sandbox_name": "Lastline",
+ "services_started": [
+ "RASMAN",
+ "WinHttpAutoProxySvc"
+ ],
+ "verdicts": [
+ "MALWARE",
+ "TROJAN"
+ ]
+ },
+ "id": "699ec052ecc898bdbdafea0027c4ab44c3d01ae011c17745dd2b7fbddaa077f3_Lastline",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/file_behaviours/699ec052ecc898bdbdafea0027c4ab44c3d01ae011c17745dd2b7fbddaa077f3_Lastline"
+ },
+ "type": "file_behaviour"
+ },
+ {
+ "attributes": {
+ "analysis_date": 1561405459,
+ "files_dropped": [
+ {
+ "path": "\\Users\\Petra\\AppData\\Local\\Temp\\xws\\xws.exe",
+ "sha256": "699ec052ecc898bdbdafea0027c4ab44c3d01ae011c17745dd2b7fbddaa077f3"
+ }
+ ],
+ "files_opened": [
+ "C:\\Windows\\Microsoft.NET\\Framework\\v4.0.30319\\config\\machine.config"
+ ],
+ "files_written": [
+ "C:\\Users\\\\AppData\\Local\\Temp\\xws\\xws.exe"
+ ],
+ "has_html_report": false,
+ "has_pcap": false,
+ "ip_traffic": [
+ {
+ "destination_ip": "**ip**",
+ "destination_port": 80,
+ "transport_layer_protocol": "TCP"
+ }
+ ],
+ "last_modification_date": 1563272815,
+ "processes_tree": [
+ {
+ "name": "1526312897-2b294b349.pe32",
+ "process_id": "2624"
+ }
+ ],
+ "registry_keys_opened": [
+ "\\REGISTRY\\MACHINE\\SOFTWARE\\Microsoft\\OLE",
+ ],
+ "registry_keys_set": [
+ {
+ "key": "\\REGISTRY\\USER\\S-1-5-21-1119815420-2032815650-2779196966-1000\\Software\\Microsoft\\Windows\\CurrentVersion\\Run",
+ "value": "xws"
+ }
+ ],
+ "sandbox_name": "SNDBOX",
+ "tags": [
+ "PERSISTENCE"
+ ]
+ },
+ "id": "699ec052ecc898bdbdafea0027c4ab44c3d01ae011c17745dd2b7fbddaa077f3_SNDBOX",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/file_behaviours/699ec052ecc898bdbdafea0027c4ab44c3d01ae011c17745dd2b7fbddaa077f3_SNDBOX"
+ },
+ "type": "file_behaviour"
+ },
+ {
+ "attributes": {
+ "analysis_date": 1601545446,
+ "behash": "7617055bb3994dea99c19877fd7ec55a",
+ "command_executions": [
+ "\"C:\\Documents and Settings\\Administrator\\Local Settings\\Temp\\EB93A6\\996E.exe\"",
+ "Shutdown -r -t 5"
+ ],
+ "dns_lookups": [
+ {
+ "hostname": "checkip.dyndns.org"
+ }
+ ],
+ "files_copied": [
+ {
+ "destination": "C:\\Documents and Settings\\Administrator\\Local Settings\\Temp\\xws\\xws.exe ",
+ "source": "C:\\Documents and Settings\\Administrator\\Local Settings\\Temp\\EB93A6\\996E.exe "
+ }
+ ],
+ "files_opened": [
+ "C:\\WINDOWS\\system32\\winime32.dll"
+ ],
+ "files_written": [
+ "C:\\Documents and Settings\\Administrator\\Local Settings\\Temp\\xws\\xws.exe",
+ "C:\\Documents and Settings\\Administrator\\Local Settings\\Temp\\Ktx.exe"
+ ],
+ "has_html_report": true,
+ "has_pcap": false,
+ "last_modification_date": 1601545448,
+ "modules_loaded": [
+ "ADVAPI32.dll"
+ ],
+ "mutexes_created": [
+ "CTF.LBES.MutexDefaultS-1-5-21-1482476501-1645522239-1417001333-500",
+
+ ],
+ "mutexes_opened": [
+ "ShimCacheMutex"
+ ],
+ "permissions_requested": [
+ "SE_DEBUG_PRIVILEGE"
+ ],
+ "processes_created": [
+ "C:\\Documents and Settings\\Administrator\\Local Settings\\Temp\\EB93A6\\996E.exe"
+ ],
+ "processes_injected": [
+ "C:\\Documents and Settings\\Administrator\\Local Settings\\Temp\\EB93A6\\996E.exe"
+ ],
+ "processes_terminated": [
+ "C:\\Documents and Settings\\Administrator\\Local Settings\\Temp\\EB93A6\\996E.exe"
+ ],
+ "processes_tree": [
+ {
+ "children": [
+ {
+ "children": [
+ {
+ "name": "shutdown.exe",
+ "process_id": "2336"
+ }
+ ],
+ "name": "****.exe",
+ "process_id": "1024"
+ }
+ ],
+ "name": "****.exe",
+ "process_id": "628"
+ }
+ ],
+ "registry_keys_opened": [
+ "\\Registry\\Machine\\Software\\Microsoft\\Windows NT\\CurrentVersion\\Image File Execution Options\\996E.exe"
+ ],
+ "registry_keys_set": [
+ {
+ "key": "HKCU\\Software\\Microsoft\\Windows\\CurrentVersion\\Run\\xws",
+ "value": "C:\\Users\\\\AppData\\Local\\Temp\\xws\\xws.exe"
+ }
+ ],
+ "sandbox_name": "GoogleThreatIntelligence Jujubox",
+ "tags": [
+ "DIRECT_CPU_CLOCK_ACCESS"
+ ],
+ "text_highlighted": [
+ "C:\\Windows\\system32\\cmd.exe"
+ ]
+ },
+ "id": "699ec052ecc898bdbdafea0027c4ab44c3d01ae011c17745dd2b7fbddaa077f3_GoogleThreatIntelligence Jujubox",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/file_behaviours/699ec052ecc898bdbdafea0027c4ab44c3d01ae011c17745dd2b7fbddaa077f3_GoogleThreatIntelligence Jujubox"
+ },
+ "type": "file_behaviour"
+ }
+ ]
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Sandbox Reports for file hash: 2b294b3499d1cce794badffc959b7618
+>
+>|AnalysisDate|LastModificationDate|SandboxName|Link|
+>|---|---|---|---|
+>| 1558429832 | 1588377117 | Lastline | |
+>| 1561405459 | 1563272815 | SNDBOX | |
+>| 1601545446 | 1601545448 | Tencent HABO | HABO |
+>| 1592373137 | 1592373137 | GoogleThreatIntelligence Jujubox | Jujubox |
+
+### gti-passive-dns-data
+
+***
+Returns passive DNS records by indicator.
+
+#### Base Command
+
+`gti-passive-dns-data`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| id | IP or domain for which to get its DNS data. | Optional |
+| ip | IP for which to get its DNS data. | Optional |
+| domain | Domain for which to get its DNS data. | Optional |
+| limit | Maximum number of results to fetch. Default is 10. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| GoogleThreatIntelligence.PassiveDNS.attributes.date | Number | Date of the DNS analysis in epoch format. |
+| GoogleThreatIntelligence.PassiveDNS.attributes.host_name | String | The DNS host name. |
+| GoogleThreatIntelligence.PassiveDNS.attributes.ip_address | String | The DNS IP address. |
+| GoogleThreatIntelligence.PassiveDNS.attributes.resolver | String | The name of the resolver. |
+| GoogleThreatIntelligence.PassiveDNS.id | String | The ID of the resolution. |
+| GoogleThreatIntelligence.PassiveDNS.links.self | String | The link to the resolution. |
+| GoogleThreatIntelligence.PassiveDNS.type | String | The type of the resolution. |
+
+#### Command Example
+
+```!gti-passive-dns-data ip=1.1.1.1```
+
+#### Context Example
+
+```json
+{
+ "GoogleThreatIntelligence": {
+ "PassiveDNS": [
+ {
+ "attributes": {
+ "date": 1617085962,
+ "host_name": "muhaha.xyz",
+ "ip_address": "1.1.1.1",
+ "resolver": "GoogleThreatIntelligence"
+ },
+ "id": "1.1.1.1muhaha.xyz",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/resolutions/1.1.1.1muhaha.xyz"
+ },
+ "type": "resolution"
+ }
+ ]
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Passive DNS data for IP 1.1.1.1
+>
+>|Id|Date|HostName|IpAddress|Resolver|
+>|---|---|---|---|---|
+>| 1.1.1.1muhaha.xyz | 1617085962 | muhaha.xyz | 1.1.1.1 | GoogleThreatIntelligence |
+
+### gti-analysis-get
+
+***
+Retrieves resolutions of the given IP.
+
+#### Base Command
+
+`gti-analysis-get`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| id | ID of the analysis (from file-scan, file-rescan, or url-scan). | Required |
+| extended_data | Whether to return extended data (last_analysis_results). | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| GoogleThreatIntelligence.Analysis.data.attributes.date | Number | Date of the analysis in epoch format. |
+| GoogleThreatIntelligence.Analysis.data.attributes.stats.harmless | Number | Number of engines that found the indicator to be harmless. |
+| GoogleThreatIntelligence.Analysis.data.attributes.stats.malicious | Number | Number of engines that found the indicator to be malicious. |
+| GoogleThreatIntelligence.Analysis.data.attributes.stats.suspicious | Number | Number of engines that found the indicator to be suspicious. |
+| GoogleThreatIntelligence.Analysis.data.attributes.stats.timeout | Number | he number of engines that timed out for the indicator. |
+| GoogleThreatIntelligence.Analysis.data.attributes.stats.undetected | Number | Number of engines the found the indicator to be undetected. |
+| GoogleThreatIntelligence.Analysis.data.attributes.status | String | Status of the analysis. |
+| GoogleThreatIntelligence.Analysis.data.id | String | ID of the analysis. |
+| GoogleThreatIntelligence.Analysis.data.type | String | Type of object \(analysis\). |
+| GoogleThreatIntelligence.Analysis.meta.file_info.sha256 | String | SHA-256 hash of the file \(if it is a file\). |
+| GoogleThreatIntelligence.Analysis.meta.file_info.sha1 | String | SHA-1 hash of the file \(if it is a file\). |
+| GoogleThreatIntelligence.Analysis.meta.file_info.md5 | String | MD5 hash of the file \(if it is a file\). |
+| GoogleThreatIntelligence.Analysis.meta.file_info.name | unknown | Name of the file \(if it is a file\). |
+| GoogleThreatIntelligence.Analysis.meta.file_info.size | String | Size of the file \(if it is a file\). |
+| GoogleThreatIntelligence.Analysis.meta.url_info.id | String | ID of the url \(if it is a URL\). |
+| GoogleThreatIntelligence.Analysis.meta.url_info.url | String | The URL \(if it is a URL\). |
+| GoogleThreatIntelligence.Analysis.id | String | The analysis ID. |
+
+#### Command Example
+
+```!gti-analysis-get id=u-20694f234fbac92b1dcc16f424aa1c85e9dd7af75b360745df6484dcae410853-1613980758```
+
+#### Context Example
+
+```json
+{
+ "GoogleThreatIntelligence": {
+ "Analysis": {
+ "data": {
+ "attributes": {
+ "date": 1613980758,
+ "results": {
+ "ADMINUSLabs": {
+ "category": "harmless",
+ "engine_name": "ADMINUSLabs",
+ "method": "blacklist",
+ "result": "clean"
+ }
+ },
+ "stats": {
+ "harmless": 69,
+ "malicious": 7,
+ "suspicious": 0,
+ "timeout": 0,
+ "undetected": 7
+ },
+ "status": "completed"
+ },
+ "id": "u-20694f234fbac92b1dcc16f424aa1c85e9dd7af75b360745df6484dcae410853-1613980758",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/analyses/u-20694f234fbac92b1dcc16f424aa1c85e9dd7af75b360745df6484dcae410853-1613980758"
+ },
+ "type": "analysis"
+ },
+ "id": "u-20694f234fbac92b1dcc16f424aa1c85e9dd7af75b360745df6484dcae410853-1613980758",
+ "meta": {
+ "url_info": {
+ "id": "20694f234fbac92b1dcc16f424aa1c85e9dd7af75b360745df6484dcae410853"
+ }
+ }
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Analysis results
+>
+>|Id|Stats|Status|
+>|---|---|---|
+>| u-20694f234fbac92b1dcc16f424aa1c85e9dd7af75b360745df6484dcae410853-1613980758 | harmless: 69 malicious: 7 suspicious: 0 undetected: 7 timeout: 0 | completed |
+
+### gti-file-sigma-analysis
+
+***
+Retrieves result of the last Sigma analysis.
+
+#### Base Command
+
+`gti-file-sigma-analysis`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| file | File hash (md5, sha1, sha256). | Required |
+| only_stats | Print only Sigma analysis summary stats. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| GoogleThreatIntelligence.SigmaAnalysis.data.attributes.last_modification_date | Number | Date of the last update in epoch format. |
+| GoogleThreatIntelligence.SigmaAnalysis.data.attributes.analysis_date | Number | Date of the last update in epoch format. |
+| GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.rule_matches.match_context | String | Matched strings from the log file. |
+| GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.rule_matches.rule_author | String | Rule authors separated by commas. |
+| GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.rule_matches.rule_description | String | Brief summary about what the rule detects. |
+| GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.rule_matches.rule_id | String | Rule ID in GoogleThreatIntelligence's database. |
+| GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.rule_matches.rule_level | String | Rule severity. Can be "low", "medium", "high" or "critical". |
+| GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.rule_matches.rule_source | String | Ruleset where the rule belongs. |
+| GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.rule_matches.rule_title | String | Rule title. |
+| GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.severity_stats.critical | Number | Number of matched rules having a "critical" severity. |
+| GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.severity_stats.high | Number | Number of matched rules having a "high" severity. |
+| GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.severity_stats.low | Number | Number of matched rules having a "low" severity. |
+| GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.severity_stats.medium | Number | Number of matched rules having a "medium" severity. |
+| GoogleThreatIntelligence.SigmaAnalysis.data.attributes.stats.source_severity_stats | unknown | Same as severity_stats but grouping stats by ruleset. Keys are ruleset names as string and values are stats in a dictionary. |
+| GoogleThreatIntelligence.SigmaAnalysis.data.id | String | ID of the analysis. |
+
+#### Command Example
+
+```!gti-file-sigma-analysis file=f912398cb3542ab704fe917af4a60d4feee21ac577535b10453170f10c6fd6de```
+
+#### Context Example
+
+```json
+{
+ "GoogleThreatIntelligence": {
+ "SigmaAnalysis": {
+ "meta": {
+ "count": 1
+ },
+ "data": {
+ "attributes": {
+ "last_modification_date": 1650970667,
+ "analysis_date": 1650968852,
+ "rule_matches": [
+ {
+ "match_context": "$EventID: '1117'",
+ "rule_level": "high",
+ "rule_description": "Detects all actions taken by Windows Defender malware detection engines",
+ "rule_source": "Sigma Integrated Rule Set (GitHub)",
+ "rule_title": "Windows Defender Threat Detected",
+ "rule_id": "cf90b923dcb2c8192e6651425886607684aac6680bf25b20c39ae3f8743aebf1",
+ "rule_author": "Ján Trenčanský"
+ },
+ {
+ "match_context": "$EventID: '2002'",
+ "rule_level": "low",
+ "rule_description": "Setting have been change in Windows Firewall",
+ "rule_source": "Sigma Integrated Rule Set (GitHub)",
+ "rule_title": "Setting Change in Windows Firewall with Advanced Security",
+ "rule_id": "693c36f61ac022fd66354b440464f490058c22b984ba1bef05ca246aba210ed1",
+ "rule_author": "frack113"
+ }
+ ],
+ "source_severity_stats": {
+ "Sigma Integrated Rule Set (GitHub)": {
+ "high": 1,
+ "medium": 0,
+ "critical": 0,
+ "low": 1
+ },
+ },
+ "severity_stats": {
+ "high": 1,
+ "medium": 0,
+ "critical": 0,
+ "low": 1
+ }
+ },
+ "type": "sigma_analysis",
+ "id": "f912398cb3542ab704fe917af4a60d4feee21ac577535b10453170f10c6fd6de",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/sigma_analyses/f912398cb3542ab704fe917af4a60d4feee21ac577535b10453170f10c6fd6de"
+ }
+ },
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/files/f912398cb3542ab704fe917af4a60d4feee21ac577535b10453170f10c6fd6de/sigma_analysis"
+ }
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Last Sigma analysis results
+>
+>|MatchContext|RuleLevel|RuleDescription|RuleSource|RuleTitle|RuleId|RuleAuthor|
+>|---|---|---|---|---|---|---|
+>| $EventID: '1117' | high | Detects all actions taken by Windows Defender malware detection engines | Sigma Integrated Rule Set (GitHub) | Windows Defender Threat Detected | 693c36f61ac022fd66354b440464f490058c22b984ba1bef05ca246aba210ed1 | Ján Trenčanský |
+
+
+### gti-privatescanning-file
+
+***
+Checks the file reputation of the specified private hash.
+
+See files through the eyes of GoogleThreatIntelligence without uploading them to the main threat corpus, keeping them entirely private. Static, dynamic, network and similarity analysis included, as well as automated threat intel enrichment, but NOT multi-antivirus analysis.
+
+#### Base Command
+
+`gti-privatescanning-file`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| file | File hash (md5, sha1, sha256). | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| GoogleThreatIntelligence.File.attributes.type_description | String | Description of the type of the file. |
+| GoogleThreatIntelligence.File.attributes.tlsh | String | The locality-sensitive hashing. |
+| GoogleThreatIntelligence.File.attributes.exiftool.MIMEType | String | MIME type of the file. |
+| GoogleThreatIntelligence.File.attributes.names | String | Names of the file. |
+| GoogleThreatIntelligence.File.attributes.javascript_info.tags | String | Tags of the JavaScript. |
+| GoogleThreatIntelligence.File.attributes.exiftool.FileType | String | The file type. |
+| GoogleThreatIntelligence.File.attributes.exiftool.WordCount | Number | Total number of words in the file. |
+| GoogleThreatIntelligence.File.attributes.exiftool.LineCount | Number | Total number of lines in file. |
+| GoogleThreatIntelligence.File.attributes.exiftool.MIMEEncoding | String | The MIME encoding. |
+| GoogleThreatIntelligence.File.attributes.exiftool.FileTypeExtension | String | The file type extension. |
+| GoogleThreatIntelligence.File.attributes.exiftool.Newlines | Number | Number of newlines signs. |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_ids_stats.info | Number | Number of IDS that marked the file as "info". |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_ids_stats.high | Number | Number of IDS that marked the file as "high". |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_ids_stats.medium | Number | Number of IDS that marked the file as "medium". |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_ids_stats.low | Number | Number of IDS that marked the file as "low". |
+| GoogleThreatIntelligence.File.attributes.trid.file_type | String | The TrID file type. |
+| GoogleThreatIntelligence.File.attributes.trid.probability | Number | The TrID probability. |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.description | String | Description of the YARA rule. |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.source | String | Source of the YARA rule. |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.author | String | Author of the YARA rule. |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.ruleset_name | String | Rule set name of the YARA rule. |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.rule_name | String | Name of the YARA rule. |
+| GoogleThreatIntelligence.File.attributes.crowdsourced_yara_results.ruleset_id | String | ID of the YARA rule. |
+| GoogleThreatIntelligence.File.attributes.names | String | Name of the file. |
+| GoogleThreatIntelligence.File.attributes.type_tag | String | Tag of the type. |
+| GoogleThreatIntelligence.File.attributes.size | Number | Size of the file. |
+| GoogleThreatIntelligence.File.attributes.sha256 | String | SHA-256 hash of the file. |
+| GoogleThreatIntelligence.File.attributes.type_extension | String | Extension of the type. |
+| GoogleThreatIntelligence.File.attributes.tags | String | File tags. |
+| GoogleThreatIntelligence.File.attributes.last_analysis_date | Number | Last analysis date in epoch format. |
+| GoogleThreatIntelligence.File.attributes.ssdeep | String | SSDeep hash of the file. |
+| GoogleThreatIntelligence.File.attributes.md5 | String | MD5 hash of the file. |
+| GoogleThreatIntelligence.File.attributes.sha1 | String | SHA-1 hash of the file. |
+| GoogleThreatIntelligence.File.attributes.magic | String | Identification of file by the magic number. |
+| GoogleThreatIntelligence.File.attributes.meaningful_name | String | Meaningful name of the file. |
+| GoogleThreatIntelligence.File.attributes.threat_severity.threat_severity_level | String | Threat severity level of the file. |
+| GoogleThreatIntelligence.File.attributes.threat_severity.threat_severity_data.popular_threat_category | String | Popular threat category of the file. |
+| GoogleThreatIntelligence.File.attributes.threat_verdict | String | Threat verdict of the file. |
+| GoogleThreatIntelligence.File.type | String | Type of the file. |
+| GoogleThreatIntelligence.File.id | String | ID of the file. |
+| GoogleThreatIntelligence.File.links.self | String | Link to the response. |
+
+#### Command Example
+
+```!gti-privatescanning-file file=example-file-hash```
+
+#### Context Example
+
+```json
+{
+ "GoogleThreatIntelligence": {
+ "File": {
+ "attributes": {
+ "type_description": "ELF",
+ "tlsh": "Example tlsh",
+ "vhash": "Example vhash",
+ "exiftool": {
+ "MIMEType": "application/octet-stream",
+ "CPUByteOrder": "Little endian",
+ "ObjectFileType": "Executable file",
+ "CPUArchitecture": "32 bit",
+ "CPUType": "i386",
+ "FileType": "ELF executable"
+ },
+ "trid": [
+ {
+ "file_type": "ELF Executable and Linkable format (Linux)",
+ "probability": 55
+ },
+ {
+ "file_type": "ELF Executable and Linkable format (generic)",
+ "probability": 45
+ }
+ ],
+ "crowdsourced_yara_results": [
+ {
+ "description": "Detects a suspicious ELF binary with UPX compression",
+ "source": "https://www.example.com",
+ "author": "Author X",
+ "ruleset_name": "gen_elf_file_anomalies",
+ "rule_name": "SUSP_ELF_LNX_UPX_Compressed_File",
+ "ruleset_id": "0224a54ba7"
+ }
+ ],
+ "threat_severity": {
+ "threat_severity_level": "SEVERITY_HIGH",
+ "threat_severity_data": {
+ "has_dropped_files_with_detections": true,
+ "type_tag": "elf",
+ "has_execution_parents_with_detections": true,
+ "can_be_detonated": true,
+ "popular_threat_category": "trojan"
+ },
+ "last_analysis_date": "1681045097",
+ "version": 1
+ },
+ "names": [
+ "private",
+ "/usr/lib/sample.so",
+ "private_sample.bin",
+ ],
+ "owner": "virustotal",
+ "type_tag": "elf",
+ "elf_info": {
+ "header": {
+ "hdr_version": "1 (current)",
+ "type": "EXEC (Executable file)",
+ "obj_version": "0x1",
+ "data": "2's complement, little endian",
+ "machine": "Intel 80386",
+ "num_section_headers": 0,
+ "os_abi": "UNIX - Linux",
+ "abi_version": 0,
+ "entrypoint": 4633,
+ "num_prog_headers": 2,
+ "class": "ELF32"
+ },
+ "packers": [
+ "upx"
+ ],
+ "segment_list": [
+ {
+ "segment_type": "LOAD"
+ }
+ ]
+ },
+ "size": 255510,
+ "type_extension": "so",
+ "threat_verdict": "VERDICT_MALICIOUS",
+ "detectiteasy": {
+ "filetype": "ELF32",
+ "values": [
+ {
+ "info": "EXEC 386-32",
+ "version": "3.05",
+ "type": "Packer",
+ "name": "UPX"
+ }
+ ]
+ },
+ "crowdsourced_ids_stats": {
+ "high": 0,
+ "info": 0,
+ "medium": 1,
+ "low": 1
+ },
+ "type_tags": [
+ "executable",
+ "linux",
+ "elf"
+ ],
+ "sandbox_verdicts": {
+ "Zenbox Linux": {
+ "category": "malicious",
+ "confidence": 81,
+ "sandbox_name": "Zenbox Linux",
+ "malware_classification": [
+ "MALWARE",
+ "TROJAN",
+ "EVADER"
+ ],
+ "malware_names": [
+ "MalwareName"
+ ]
+ }
+ },
+ "sha256": "Example_sha256",
+ "tags": [
+ "elf",
+ "upx"
+ ],
+ "crowdsourced_ids_results": [
+ {
+ "rule_category": "Misc Attack",
+ "alert_severity": "medium",
+ "rule_msg": "Known Compromised or Hostile Host Traffic",
+ "rule_raw": "alert ip [8.8.8.8] any -> $HOME_NET any"
+ },
+ {
+ "rule_category": "Misc Attack",
+ "alert_severity": "low",
+ "rule_msg": "Poor Reputation IP",
+ "rule_raw": "alert ip [1.1.1.1] any -> $HOME_NET any)"
+ },
+ ],
+ "last_analysis_date": 1681386314,
+ "ssdeep": "Example ssdeep",
+ "packers": {
+ "Gandelf": "upx"
+ },
+ "md5": "Example_md5",
+ "sha1": "Example_sha1",
+ "magic": "ELF 32-bit LSB executable, Intel 80386, version 1 (GNU/Linux), statically linked, stripped",
+ "meaningful_name": "private"
+ },
+ "type": "private_file",
+ "id": "Example_sha256",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/private/files/Example_sha256"
+ }
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Results of file hash Example_sha256
+>
+>|Sha1|Sha256|Md5|Meaningful Name|Threat Severity Level|Popular Threat Category|Threat Verdict|
+>|---|---|---|---|---|---|---|
+>| Example_sha1 | Example_sha256 | Example_md5 | private | HIGH | trojan | MALICIOUS |
+
+
+### gti-privatescanning-file-scan
+
+***
+Submits a file for private scanning. Use the gti-privatescanning-analysis-get command to get the scan results.
+
+#### Base Command
+
+`gti-privatescanning-file-scan`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| entryID | The file entry ID to submit. | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| GoogleThreatIntelligence.Submission.type | String | The type of the submission (analysis). |
+| GoogleThreatIntelligence.Submission.id | String | The ID of the submission. |
+| GoogleThreatIntelligence.Submission.EntryID | String | The entry ID of the file detonated. |
+| GoogleThreatIntelligence.Submission.Extension | String | File extension. |
+| GoogleThreatIntelligence.Submission.Info | String | File info. |
+| GoogleThreatIntelligence.Submission.MD5 | String | MD5 hash of the file. |
+| GoogleThreatIntelligence.Submission.Name | String | Name of the file. |
+| GoogleThreatIntelligence.Submission.SHA1 | String | SHA-1 of the file. |
+| GoogleThreatIntelligence.Submission.SHA256 | String | SHA-256 of the file. |
+| GoogleThreatIntelligence.Submission.SHA512 | String | SHA-512 of the file. |
+| GoogleThreatIntelligence.Submission.SSDeep | String | SSDeep of the file. |
+| GoogleThreatIntelligence.Submission.Size | String | Size of the file. |
+| GoogleThreatIntelligence.Submission.Type | String | Type of the file. |
+
+#### Command Example
+
+```!gti-privatescanning-file-scan entryID=example-entry-id```
+
+#### Context Example
+
+```json
+{
+ "GoogleThreatIntelligence": {
+ "Submission": {
+ "type": "private_analysis",
+ "id": "example-analysis-id",
+ "EntryID": "example-entry-id",
+ "Extension": "txt",
+ "Info": "ASCII text, with no line terminators",
+ "MD5": "Example_md5",
+ "Name": "Testing.txt",
+ "SHA1": "Example_sha1",
+ "SHA256": "Example_sha256",
+ "SHA512": "Example_sha512",
+ "SSDeep": "Example ssdeep",
+ "Size": "71 bytes",
+ "Type": "text/plain; charset=utf-8"
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### The file has been submitted "Testing.txt"
+>
+>|id|EntryID|MD5|SHA1|SHA256|
+>|---|---|---|---|---|---|---|
+>| example-analysis-id | example-entry-id | Example_md5 | Example_sha1 | Example_sha256 |
+
+
+### gti-privatescanning-analysis-get
+
+***
+Get analysis of a private file submitted to GoogleThreatIntelligence.
+
+#### Base Command
+
+`gti-privatescanning-analysis-get`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| id | ID of the analysis. | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| GoogleThreatIntelligence.Analysis.data.attributes.date | Number | Date of the analysis in epoch format. |
+| GoogleThreatIntelligence.Analysis.data.attributes.status | String | Status of the analysis. |
+| GoogleThreatIntelligence.Analysis.data.attributes.threat_severity_level | String | Threat severity level of the private file. |
+| GoogleThreatIntelligence.Analysis.data.attributes.popular_threat_category | String | Popular threat category of the private file. |
+| GoogleThreatIntelligence.Analysis.data.attributes.threat_verdict | String | Threat verdict of the private file. |
+| GoogleThreatIntelligence.Analysis.data.id | String | ID of the analysis. |
+| GoogleThreatIntelligence.Analysis.data.type | String | Type of object (analysis). |
+| GoogleThreatIntelligence.Analysis.meta.file_info.sha256 | String | SHA-256 hash of the file (if it is a file). |
+| GoogleThreatIntelligence.Analysis.meta.file_info.sha1 | String | SHA-1 hash of the file (if it is a file). |
+| GoogleThreatIntelligence.Analysis.meta.file_info.md5 | String | MD5 hash of the file (if it is a file). |
+| GoogleThreatIntelligence.Analysis.meta.file_info.size | Number | Size of the file (if it is a file). |
+| GoogleThreatIntelligence.Analysis.id | String | The analysis ID. |
+
+#### Command Example
+
+```!gti-privatescanning-analysis-get id=example-analysis-id```
+
+#### Context Example
+
+```json
+{
+ "GoogleThreatIntelligence": {
+ "Analysis": {
+ "id": "example-analysis-id",
+ "meta": {
+ "file_info": {
+ "sha256": "Example_sha256",
+ "sha1": "Example_sha1",
+ "md5": "Example_md5",
+ "size": 48
+ }
+ },
+ "data": {
+ "attributes": {
+ "date": 1681461324,
+ "status": "completed",
+ "threat_severity_level": "SEVERITY_HIGH",
+ "popular_threat_category": "trojan",
+ "threat_verdict": "VERDICT_MALICIOUS",
+ },
+ "type": "private_analysis",
+ "id": "example-analysis-id"
+ }
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Analysis results
+>
+>|Id|Threat Severity Level|Popular Threat Category|Threat Verdict|Status|
+>|---|---|---|---|---|---|---|
+>| example-analysis-id | HIGH | trojan | MALICIOUS | completed |
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/command_examples.txt b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/command_examples.txt
new file mode 100644
index 000000000000..2a9eadcd05bb
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/command_examples.txt
@@ -0,0 +1,16 @@
+!domain domain=example.com
+!url url=https://example.com
+!ip ip=1.1.1.1
+!file file=6bcae8ceb7f8b3a503c321085d59d7441c2ae87220f7e7170fec91098d99bb7e
+!gti-analysis-get id=u-20694f234fbac92b1dcc16f424aa1c85e9dd7af75b360745df6484dcae410853-1613980758
+!gti-file-sandbox-report file=2b294b3499d1cce794badffc959b7618
+!url-scan url=https://example.com
+!file-scan entryID=VyoASWK4aRCWLS8T3Jc7EL@2c18b8c3-8f96-458e-8849-39fc741e78fa
+!file-rescan file=6bcae8ceb7f8b3a503c321085d59d7441c2ae87220f7e7170fec91098d99bb7e
+!gti-comments-add resource=paloaltonetworks.com resource_type=domain comment="this is a comment"
+!gti-comments-get resource=https://paloaltonetworks.com
+!gti-comments-get-by-id id=d-paloaltonetworks.com-64591897
+!gti-search query=paloaltonetworks.com
+!gti-passive-dns-data ip=1.1.1.1
+!gti-file-scan-upload-url
+!gti-assessment-get resource=6bcae8ceb7f8b3a503c321085d59d7441c2ae87220f7e7170fec91098d99bb7e
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/comminicating_files.json b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/comminicating_files.json
new file mode 100644
index 000000000000..5e6648aba153
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/comminicating_files.json
@@ -0,0 +1,193 @@
+{
+ "data": [
+ {
+ "attributes": {
+ "authentihash": ":hash:",
+ "creation_date": 1488326400,
+ "downloadable": true,
+ "exiftool": {
+ "CharacterSet": "Windows, Latin1",
+ "CodeSize": "7248896",
+ "CompanyName": "Bitcoin",
+ "EntryPoint": "0x14d0",
+ "FileDescription": "bitcoind (Bitcoin node with a JSON-RPC server)",
+ "FileFlagsMask": "0x0000",
+ "FileOS": "Windows NT 32-bit",
+ "FileSubtype": "0",
+ "FileType": "Win64 EXE",
+ "FileTypeExtension": "exe",
+ "FileVersion": ":version:",
+ "FileVersionNumber": ":version:",
+ "ImageFileCharacteristics": "Executable, No line numbers, No symbols, Large address aware, No debug",
+ "ImageVersion": "0.0",
+ "InitializedDataSize": "8889344",
+ "InternalName": "bitcoind",
+ "LanguageCode": "English (U.S.)",
+ "LegalCopyright": "",
+ "LegalTrademarks1": "",
+ "LinkerVersion": "2.23",
+ "MIMEType": "application/octet-stream",
+ "MachineType": "AMD AMD64",
+ "OSVersion": "4.0",
+ "ObjectFileType": "Executable application",
+ "OriginalFileName": "bitcoind.exe",
+ "PEType": "PE32+",
+ "ProductName": "bitcoind",
+ "ProductVersion": ":version:",
+ "ProductVersionNumber": ":version:",
+ "Subsystem": "Windows command line",
+ "SubsystemVersion": "5.2",
+ "TimeStamp": "2017:03:01 00:00:00+00:00",
+ "UninitializedDataSize": "366592"
+ },
+ "first_submission_date": 1492547503,
+ "last_analysis_date": 1612764864,
+ "last_analysis_results": {
+ "ALYac": {
+ "category": "undetected",
+ "engine_name": "ALYac",
+ "engine_update": "20210208",
+ "engine_version": ":version:",
+ "method": "blacklist",
+ "result": null
+ },
+ "APEX": {
+ "category": "undetected",
+ "engine_name": "APEX",
+ "engine_update": "20210207",
+ "engine_version": "6.129",
+ "method": "blacklist",
+ "result": null
+ },
+ "AVG": {
+ "category": "malicious",
+ "engine_name": "AVG",
+ "engine_update": "20210208",
+ "engine_version": "21.1.5827.0",
+ "method": "blacklist",
+ "result": "FileRepMalware [PUP]"
+ }
+ }
+ },
+ "last_analysis_stats": {
+ "confirmed-timeout": 0,
+ "failure": 0,
+ "harmless": 0,
+ "malicious": 6,
+ "suspicious": 0,
+ "timeout": 0,
+ "type-unsupported": 5,
+ "undetected": 64
+ },
+ "last_modification_date": 1612855184,
+ "last_submission_date": 1492547503,
+ "magic": "PE32+ executable for MS Windows (console) Mono/.Net assembly",
+ "md5": ":md5:",
+ "meaningful_name": "bitcoind.exe",
+ "names": [
+ "bitcoind",
+ "bitcoind.exe",
+ ":md5:.virus"
+ ],
+ "pe_info": {
+ "entry_point": 5328,
+ "exports": [
+ "secp256k1_nonce_function_default",
+ "secp256k1_nonce_function_rfc6979"
+ ],
+ "imphash": ":imphash:",
+ "import_list": [
+ {
+ "imported_functions": [
+ "GetDeviceCaps",
+ "DeleteDC",
+ "GetBitmapBits",
+ "BitBlt",
+ "SelectObject",
+ "GetObjectW",
+ "CreateDCW",
+ "CreateCompatibleDC",
+ "DeleteObject",
+ "CreateCompatibleBitmap"
+ ],
+ "library_name": "GDI32.dll"
+ }
+ ],
+ "machine_type": 34404,
+ "resource_details": [
+ {
+ "chi2": 83899.2734375,
+ "entropy": 3.453192949295044,
+ "filetype": "Data",
+ "lang": "ENGLISH US",
+ "sha256": ":sha256:",
+ "type": "RT_VERSION"
+ }
+ ],
+ "resource_langs": {
+ "ENGLISH US": 1
+ },
+ "resource_types": {
+ "RT_VERSION": 1
+ },
+ "sections": [
+ {
+ "chi2": 82273200.0,
+ "entropy": 5.97,
+ "flags": "rx",
+ "md5": ":md5:",
+ "name": ".text",
+ "raw_size": 7248896,
+ "virtual_address": 4096,
+ "virtual_size": 7248680
+ }
+ ],
+ "timestamp": 1488326400
+ },
+ "reputation": 0,
+ "sandbox_verdicts": {
+ "C2AE": {
+ "category": "undetected",
+ "malware_classification": [
+ "UNKNOWN_VERDICT"
+ ],
+ "sandbox_name": "C2AE"
+ }
+ },
+ "sha1": ":sha1:",
+ "sha256": ":sha256:",
+ "signature_info": {
+ "copyright": "2009-2017 The Bitcoin Core Developers",
+ "description": "bitcoind (Bitcoin node with a JSON-RPC server)",
+ "file version": ":version:",
+ "internal name": "bitcoind",
+ "original name": "bitcoind.exe",
+ "product": "bitcoind"
+ },
+ "size": 8890368,
+ "ssdeep": ":ssdeep:",
+ "tags": [
+ "64bits",
+ "peexe",
+ "assembly"
+ ],
+ "times_submitted": 1,
+ "tlsh": ":tlsh:",
+ "total_votes": {
+ "harmless": 0,
+ "malicious": 0
+ },
+ "trid": [
+ {
+ "file_type": "Microsoft Visual C++ compiled executable (generic)",
+ "probability": 41.1
+ }
+ ],
+ "type_description": "Win32 EXE",
+ "type_extension": "exe",
+ "type_tag": "peexe",
+ "unique_sources": 1,
+ "vhash": ":vhash:"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/domain.json b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/domain.json
new file mode 100644
index 000000000000..02f418072ffe
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/domain.json
@@ -0,0 +1,121 @@
+{
+ "data": {
+ "relationships": {
+ "siblings": {
+ "data": [
+ {
+ "type": "domain",
+ "id": "test1.pw"
+ },
+ {
+ "type": "domain",
+ "id": "test2.pw"
+ }
+ ],
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/domains/down.mykings.pw/relationships/siblings?limit=20",
+ "related": "https://www.virustotal.com/api/v3/domains/down.mykings.pw/siblings"
+ }
+ },
+ "subdomains": {
+ "data": [
+ {
+ "type": "domain",
+ "id": "test3.pw"
+ }
+ ],
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/domains/down.mykings.pw/relationships/subdomains?limit=20",
+ "related": "https://www.virustotal.com/api/v3/domains/down.mykings.pw/subdomains"
+ }
+ }
+ },
+ "attributes": {
+ "categories": {
+ "Comodo Valkyrie Verdict": "media sharing",
+ "Dr.Web": "known infection source",
+ "Forcepoint ThreatSeeker": "information technology",
+ "sophos": "malware callhome, command and control"
+ },
+ "creation_date": 1485015979,
+ "favicon": {
+ "dhash": "f4cca89496a0ccb2",
+ "raw_md5": "6eb4a43cb64c97f76562af703893c8fd"
+ },
+ "jarm": "29d21b20d29d29d21c41d21b21b41d494e0df9532e75299f15ba73156cee38",
+ "last_analysis_results": {
+ "ADMINUSLabs": {
+ "category": "harmless",
+ "engine_name": "ADMINUSLabs",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "AICC (MONITORAPP)": {
+ "category": "harmless",
+ "engine_name": "AICC (MONITORAPP)",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "AegisLab WebGuard": {
+ "category": "harmless",
+ "engine_name": "AegisLab WebGuard",
+ "method": "blacklist",
+ "result": "clean"
+ }
+ },
+ "last_analysis_stats": {
+ "harmless": 83,
+ "malicious": 8,
+ "suspicious": 0,
+ "timeout": 0,
+ "undetected": 7
+ },
+ "last_dns_records": [
+ {
+ "ttl": 3599,
+ "type": "A",
+ "value": ":ip:"
+ }
+ ],
+ "last_dns_records_date": 1612828461,
+ "last_modification_date": 1612828461,
+ "last_update_date": 1488837159,
+ "popularity_ranks": {},
+ "registrar": "Namecheap",
+ "reputation": 0,
+ "tags": [],
+ "total_votes": {
+ "harmless": 0,
+ "malicious": 0
+ },
+ "whois": "Domain Name: www.example.com\nRegistry Domain ID: D42904076-CNIC\nRegistrar WHOIS Server: whois.example.com\nUpdated Date: 2017-03-06T21:52:39.0Z\nCreation Date: 2017-01-21T16:26:19.0Z\nRegistry Expiry Date: 2018-01-21T23:59:59.0Z\nRegistrar: Namecheap\nRegistrar IANA ID: 1068\nDomain Status: serverHold https://icann.org/epp#serverHold\nDomain Status: clientTransferProhibited https://icann.org/epp#clientTransferProhibited\nRegistry Registrant ID: C113380656-CNIC\nRegistrant Country: PA\nRegistrant Email: [REDACTED]@whoisguard.com\nRegistry Admin ID: C113380651-CNIC\nAdmin Organization: WhoisGuard, Inc.\nAdmin City: Panama\nAdmin State/Province: Panama\nAdmin Country: PA\nAdmin Email: [REDACTED]@example.com\nRegistry Tech ID: :tech-id:\nTech Organization: WhoisGuard, Inc.\nTech City: Panama\nTech State/Province: Panama\nTech Country: PA\nTech Email: [REDACTED]@whoisguard.com\nName Server: PDNS1.EXAMPLE.COM\nName Server: PDNS2.EXAMPLE.COM\nDNSSEC: unsigned\nRegistry Billing ID: C113380652-CNIC\nBilling Organization: WhoisGuard, Inc.\nBilling City: Panama\nBilling State/Province: Panama\nBilling Country: PA\nBilling Email: [REDACTED]@whoisguard.com\nRegistrar Abuse Contact Email: abuse@example.com\nRegistrar Abuse Contact Phone: +1.6613102107\nDomain name: :domain_id:\nRegistrar URL: http://www.example.com\nUpdated Date: 2017-01-21T16:26:23.00Z\nCreation Date: 2017-01-21T16:26:19.00Z\nRegistrar Registration Expiration Date: 2018-01-21T23:59:59.00Z\nRegistrar: NAMECHEAP INC\nDomain Status: serverTransferProhibited https://icann.org/epp#serverTransferProhibited\nDomain Status: addPeriod https://icann.org/epp#addPeriod\nRegistry Registrant ID: :id:\nRegistry Admin ID: :id:\nRegistry Tech ID: :id:\nName Server: pdns1.EXAMPLE.COM\nName Server: pdns2.EXAMPLE.COM",
+ "gti_assessment": {
+ "threat_score": {
+ "value": 100
+ },
+ "severity": {
+ "value": "SEVERITY_HIGH"
+ },
+ "contributing_factors": {
+ "mandiant_association_report": true,
+ "mandiant_association_actor": true,
+ "gavs_detections": 3,
+ "normalised_categories": [
+ "trojan"
+ ],
+ "mandiant_association_malware": true,
+ "mandiant_confidence_score": 100
+ },
+ "verdict": {
+ "value": "VERDICT_MALICIOUS"
+ },
+ "description": "This indicator is malicious (high severity) with high impact."
+ }
+ },
+ "id": "www.example.com",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/domains/www.example.com"
+ },
+ "type": "domain"
+ }
+}
\ No newline at end of file
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/domain_assessment_results.json b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/domain_assessment_results.json
new file mode 100644
index 000000000000..6faf1d2e0ec6
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/domain_assessment_results.json
@@ -0,0 +1,31 @@
+{
+ "id": "www.example.com",
+ "type": "domain",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/domains/www.example.com"
+ },
+ "attributes": {
+ "gti_assessment": {
+ "threat_score": {
+ "value": 100
+ },
+ "severity": {
+ "value": "SEVERITY_HIGH"
+ },
+ "contributing_factors": {
+ "mandiant_association_report": true,
+ "mandiant_association_actor": true,
+ "gavs_detections": 3,
+ "normalised_categories": [
+ "trojan"
+ ],
+ "mandiant_association_malware": true,
+ "mandiant_confidence_score": 100
+ },
+ "verdict": {
+ "value": "VERDICT_MALICIOUS"
+ },
+ "description": "This indicator is malicious (high severity) with high impact."
+ }
+ }
+}
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/domain_results.json b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/domain_results.json
new file mode 100644
index 000000000000..7b5ee4d886ac
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/domain_results.json
@@ -0,0 +1,92 @@
+{
+ "relationships": {
+ "siblings": {
+ "data": [{
+ "type": "domain",
+ "id": "test1.pw"
+ }, {
+ "type": "domain",
+ "id": "test2.pw"
+ }],
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/domains/down.mykings.pw/relationships/siblings?limit=20",
+ "related": "https://www.virustotal.com/api/v3/domains/down.mykings.pw/siblings"
+ }
+ },
+ "subdomains": {
+ "data": [{
+ "type": "domain",
+ "id": "test3.pw"
+ }],
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/domains/down.mykings.pw/relationships/subdomains?limit=20",
+ "related": "https://www.virustotal.com/api/v3/domains/down.mykings.pw/subdomains"
+ }
+ }
+ },
+ "attributes": {
+ "categories": {
+ "Comodo Valkyrie Verdict": "media sharing",
+ "Dr.Web": "known infection source",
+ "Forcepoint ThreatSeeker": "information technology",
+ "sophos": "malware callhome, command and control"
+ },
+ "creation_date": 1485015979,
+ "favicon": {
+ "dhash": "f4cca89496a0ccb2",
+ "raw_md5": "6eb4a43cb64c97f76562af703893c8fd"
+ },
+ "jarm": "29d21b20d29d29d21c41d21b21b41d494e0df9532e75299f15ba73156cee38",
+ "last_analysis_stats": {
+ "harmless": 83,
+ "malicious": 8,
+ "suspicious": 0,
+ "timeout": 0,
+ "undetected": 7
+ },
+ "last_dns_records": [{
+ "ttl": 3599,
+ "type": "A",
+ "value": ":ip:"
+ }],
+ "last_dns_records_date": 1612828461,
+ "last_modification_date": 1612828461,
+ "last_update_date": 1488837159,
+ "popularity_ranks": {},
+ "registrar": "Namecheap",
+ "reputation": 0,
+ "tags": [],
+ "total_votes": {
+ "harmless": 0,
+ "malicious": 0
+ },
+ "whois": "Domain Name: www.example.com\nRegistry Domain ID: D42904076-CNIC\nRegistrar WHOIS Server: whois.example.com\nUpdated Date: 2017-03-06T21:52:39.0Z\nCreation Date: 2017-01-21T16:26:19.0Z\nRegistry Expiry Date: 2018-01-21T23:59:59.0Z\nRegistrar: Namecheap\nRegistrar IANA ID: 1068\nDomain Status: serverHold https://icann.org/epp#serverHold\nDomain Status: clientTransferProhibited https://icann.org/epp#clientTransferProhibited\nRegistry Registrant ID: C113380656-CNIC\nRegistrant Country: PA\nRegistrant Email: [REDACTED]@whoisguard.com\nRegistry Admin ID: C113380651-CNIC\nAdmin Organization: WhoisGuard, Inc.\nAdmin City: Panama\nAdmin State/Province: Panama\nAdmin Country: PA\nAdmin Email: [REDACTED]@example.com\nRegistry Tech ID: :tech-id:\nTech Organization: WhoisGuard, Inc.\nTech City: Panama\nTech State/Province: Panama\nTech Country: PA\nTech Email: [REDACTED]@whoisguard.com\nName Server: PDNS1.EXAMPLE.COM\nName Server: PDNS2.EXAMPLE.COM\nDNSSEC: unsigned\nRegistry Billing ID: C113380652-CNIC\nBilling Organization: WhoisGuard, Inc.\nBilling City: Panama\nBilling State/Province: Panama\nBilling Country: PA\nBilling Email: [REDACTED]@whoisguard.com\nRegistrar Abuse Contact Email: abuse@example.com\nRegistrar Abuse Contact Phone: +1.6613102107\nDomain name: :domain_id:\nRegistrar URL: http://www.example.com\nUpdated Date: 2017-01-21T16:26:23.00Z\nCreation Date: 2017-01-21T16:26:19.00Z\nRegistrar Registration Expiration Date: 2018-01-21T23:59:59.00Z\nRegistrar: NAMECHEAP INC\nDomain Status: serverTransferProhibited https://icann.org/epp#serverTransferProhibited\nDomain Status: addPeriod https://icann.org/epp#addPeriod\nRegistry Registrant ID: :id:\nRegistry Admin ID: :id:\nRegistry Tech ID: :id:\nName Server: pdns1.EXAMPLE.COM\nName Server: pdns2.EXAMPLE.COM",
+ "gti_assessment": {
+ "threat_score": {
+ "value": 100
+ },
+ "severity": {
+ "value": "SEVERITY_HIGH"
+ },
+ "contributing_factors": {
+ "mandiant_association_report": true,
+ "mandiant_association_actor": true,
+ "gavs_detections": 3,
+ "normalised_categories": [
+ "trojan"
+ ],
+ "mandiant_association_malware": true,
+ "mandiant_confidence_score": 100
+ },
+ "verdict": {
+ "value": "VERDICT_MALICIOUS"
+ },
+ "description": "This indicator is malicious (high severity) with high impact."
+ }
+ },
+ "id": "www.example.com",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/domains/www.example.com"
+ },
+ "type": "domain"
+}
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/file.json b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/file.json
new file mode 100644
index 000000000000..826faaca88aa
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/file.json
@@ -0,0 +1,151 @@
+{
+ "data": {
+ "type": "file",
+ "id": "0000000000000000000000000000000000000000000000000000000000000000",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/files/0000000000000000000000000000000000000000000000000000000000000000"
+ },
+ "relationships": {
+ "contacted_domains": {
+ "data": [
+ {
+ "type": "domain",
+ "id": "test.org"
+ }
+ ],
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/files/0000000000000000000000000000000000000000000000000000000000000000/relationships/contacted_domains?limit=20",
+ "related": "https://www.virustotal.com/api/v3/files/0000000000000000000000000000000000000000000000000000000000000000/contacted_domains"
+ }
+ }
+ },
+ "attributes": {
+ "first_seen_itw_date": 1075654056,
+ "first_submission_date": 1170892383,
+ "last_analysis_date": 1502355193,
+ "last_analysis_results": {
+ "AVG": {
+ "category": "undetected",
+ "engine_name": "AVG",
+ "engine_update": "20170810",
+ "engine_version": "8.0.1489.320",
+ "method": "blacklist",
+ "result": null
+ }
+ },
+ "last_analysis_stats": {
+ "harmless": 0,
+ "malicious": 0,
+ "suspicious": 0,
+ "timeout": 0,
+ "type-unsupported": 8,
+ "undetected": 59
+ },
+ "last_submission_date": 1502355193,
+ "magic": "data",
+ "md5": ":md5:",
+ "names": [
+ "zipnew.dat",
+ "327916-1502345099.zip",
+ "ac3plug.zip",
+ "IMG_6937.zip",
+ "DOC952.zip",
+ "20170801486960.zip"
+ ],
+ "nsrl_info": {
+ "filenames": [
+ "WINDOWS DIALUP.ZIP",
+ "kemsetup.ZIP",
+ "Data_Linux.zip",
+ "2003.zip",
+ "_6A271FB199E041FC82F4D282E68B01D6"
+ ],
+ "products": [
+ "Master Hacker Internet Terrorism (Core Publishing Inc.)",
+ "Read Rabbits Math Ages 6-9 (Smart Saver)",
+ "Neverwinter Nights Gold (Atari)",
+ "Limited Edition Print Workshop 2004 (ValuSoft)",
+ "Crysis (Electronic Arts Inc.)"
+ ]
+ },
+ "reputation": -889,
+ "sha1": ":sha1:",
+ "sha256": "0000000000000000000000000000000000000000000000000000000000000000",
+ "size": 22,
+ "ssdeep": ":ssdeep:",
+ "tags": [
+ "software-collection",
+ "nsrl",
+ "attachment",
+ "trusted",
+ "via-tor"
+ ],
+ "times_submitted": 26471,
+ "total_votes": {
+ "harmless": 639,
+ "malicious": 958
+ },
+ "trid": [
+ {
+ "file_type": "ZIP compressed archive (empty)",
+ "probability": 100
+ }
+ ],
+ "trusted_verdict": {
+ "filename": "somefile.zip",
+ "link": "example.com/somefile.zip",
+ "organization": "Google",
+ "verdict": "goodware"
+ },
+ "type_description": "unknown",
+ "gti_assessment": {
+ "threat_score": {
+ "value": 100
+ },
+ "severity": {
+ "value": "SEVERITY_HIGH"
+ },
+ "contributing_factors": {
+ "mandiant_association_report": true,
+ "mandiant_association_actor": true,
+ "gavs_detections": 3,
+ "normalised_categories": [
+ "trojan"
+ ],
+ "mandiant_association_malware": true,
+ "mandiant_confidence_score": 100
+ },
+ "verdict": {
+ "value": "VERDICT_MALICIOUS"
+ },
+ "description": "This indicator is malicious (high severity) with high impact."
+ },
+ "sigma_analysis_stats": {
+ "high": 0,
+ "medium": 1,
+ "critical": 0,
+ "low": 1
+ },
+ "sigma_analysis_results": [
+ {
+ "match_context": [],
+ "rule_level": "medium",
+ "rule_description": "Description 1",
+ "rule_source": "Source 1",
+ "rule_title": "Title 1",
+ "rule_id": "random_id_1",
+ "rule_author": "Author 1"
+ },
+ {
+ "match_context": [],
+ "rule_level": "low",
+ "rule_description": "Description 2",
+ "rule_source": "Source 2",
+ "rule_title": "Title 2",
+ "rule_id": "random_id_2",
+ "rule_author": "Author 2"
+ }
+ ]
+ }
+ }
+}
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/file_assessment_results.json b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/file_assessment_results.json
new file mode 100644
index 000000000000..d5364d8bcdb3
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/file_assessment_results.json
@@ -0,0 +1,31 @@
+{
+ "id": "0000000000000000000000000000000000000000000000000000000000000000",
+ "type": "file",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/files/0000000000000000000000000000000000000000000000000000000000000000"
+ },
+ "attributes": {
+ "gti_assessment": {
+ "threat_score": {
+ "value": 100
+ },
+ "severity": {
+ "value": "SEVERITY_HIGH"
+ },
+ "contributing_factors": {
+ "mandiant_association_report": true,
+ "mandiant_association_actor": true,
+ "gavs_detections": 3,
+ "normalised_categories": [
+ "trojan"
+ ],
+ "mandiant_association_malware": true,
+ "mandiant_confidence_score": 100
+ },
+ "verdict": {
+ "value": "VERDICT_MALICIOUS"
+ },
+ "description": "This indicator is malicious (high severity) with high impact."
+ }
+ }
+}
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/file_extended_results.json b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/file_extended_results.json
new file mode 100644
index 000000000000..b0e4a2194f5f
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/file_extended_results.json
@@ -0,0 +1,149 @@
+{
+ "type": "file",
+ "id": "0000000000000000000000000000000000000000000000000000000000000000",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/files/0000000000000000000000000000000000000000000000000000000000000000"
+ },
+ "relationships": {
+ "contacted_domains": {
+ "data": [
+ {
+ "type": "domain",
+ "id": "test.org"
+ }
+ ],
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/files/0000000000000000000000000000000000000000000000000000000000000000/relationships/contacted_domains?limit=20",
+ "related": "https://www.virustotal.com/api/v3/files/0000000000000000000000000000000000000000000000000000000000000000/contacted_domains"
+ }
+ }
+ },
+ "attributes": {
+ "first_seen_itw_date": 1075654056,
+ "first_submission_date": 1170892383,
+ "last_analysis_date": 1502355193,
+ "last_analysis_results": {
+ "AVG": {
+ "category": "undetected",
+ "engine_name": "AVG",
+ "engine_update": "20170810",
+ "engine_version": "8.0.1489.320",
+ "method": "blacklist",
+ "result": null
+ }
+ },
+ "last_analysis_stats": {
+ "harmless": 0,
+ "malicious": 0,
+ "suspicious": 0,
+ "timeout": 0,
+ "type-unsupported": 8,
+ "undetected": 59
+ },
+ "last_submission_date": 1502355193,
+ "magic": "data",
+ "md5": ":md5:",
+ "names": [
+ "zipnew.dat",
+ "327916-1502345099.zip",
+ "ac3plug.zip",
+ "IMG_6937.zip",
+ "DOC952.zip",
+ "20170801486960.zip"
+ ],
+ "nsrl_info": {
+ "filenames": [
+ "WINDOWS DIALUP.ZIP",
+ "kemsetup.ZIP",
+ "Data_Linux.zip",
+ "2003.zip",
+ "_6A271FB199E041FC82F4D282E68B01D6"
+ ],
+ "products": [
+ "Master Hacker Internet Terrorism (Core Publishing Inc.)",
+ "Read Rabbits Math Ages 6-9 (Smart Saver)",
+ "Neverwinter Nights Gold (Atari)",
+ "Limited Edition Print Workshop 2004 (ValuSoft)",
+ "Crysis (Electronic Arts Inc.)"
+ ]
+ },
+ "reputation": -889,
+ "sha1": ":sha1:",
+ "sha256": "0000000000000000000000000000000000000000000000000000000000000000",
+ "size": 22,
+ "ssdeep": ":ssdeep:",
+ "tags": [
+ "software-collection",
+ "nsrl",
+ "attachment",
+ "trusted",
+ "via-tor"
+ ],
+ "times_submitted": 26471,
+ "total_votes": {
+ "harmless": 639,
+ "malicious": 958
+ },
+ "trid": [
+ {
+ "file_type": "ZIP compressed archive (empty)",
+ "probability": 100
+ }
+ ],
+ "trusted_verdict": {
+ "filename": "somefile.zip",
+ "link": "example.com/somefile.zip",
+ "organization": "Google",
+ "verdict": "goodware"
+ },
+ "type_description": "unknown",
+ "gti_assessment": {
+ "threat_score": {
+ "value": 100
+ },
+ "severity": {
+ "value": "SEVERITY_HIGH"
+ },
+ "contributing_factors": {
+ "mandiant_association_report": true,
+ "mandiant_association_actor": true,
+ "gavs_detections": 3,
+ "normalised_categories": [
+ "trojan"
+ ],
+ "mandiant_association_malware": true,
+ "mandiant_confidence_score": 100
+ },
+ "verdict": {
+ "value": "VERDICT_MALICIOUS"
+ },
+ "description": "This indicator is malicious (high severity) with high impact."
+ },
+ "sigma_analysis_stats": {
+ "high": 0,
+ "medium": 1,
+ "critical": 0,
+ "low": 1
+ },
+ "sigma_analysis_results": [
+ {
+ "match_context": [],
+ "rule_level": "medium",
+ "rule_description": "Description 1",
+ "rule_source": "Source 1",
+ "rule_title": "Title 1",
+ "rule_id": "random_id_1",
+ "rule_author": "Author 1"
+ },
+ {
+ "match_context": [],
+ "rule_level": "low",
+ "rule_description": "Description 2",
+ "rule_source": "Source 2",
+ "rule_title": "Title 2",
+ "rule_id": "random_id_2",
+ "rule_author": "Author 2"
+ }
+ ]
+ }
+}
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/file_results.json b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/file_results.json
new file mode 100644
index 000000000000..1f2f43e067ab
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/file_results.json
@@ -0,0 +1,139 @@
+{
+ "type": "file",
+ "id": "0000000000000000000000000000000000000000000000000000000000000000",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/files/0000000000000000000000000000000000000000000000000000000000000000"
+ },
+ "relationships": {
+ "contacted_domains": {
+ "data": [
+ {
+ "type": "domain",
+ "id": "test.org"
+ }
+ ],
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/files/0000000000000000000000000000000000000000000000000000000000000000/relationships/contacted_domains?limit=20",
+ "related": "https://www.virustotal.com/api/v3/files/0000000000000000000000000000000000000000000000000000000000000000/contacted_domains"
+ }
+ }
+ },
+ "attributes": {
+ "first_seen_itw_date": 1075654056,
+ "first_submission_date": 1170892383,
+ "last_analysis_date": 1502355193,
+ "last_analysis_stats": {
+ "harmless": 0,
+ "malicious": 0,
+ "suspicious": 0,
+ "timeout": 0,
+ "type-unsupported": 8,
+ "undetected": 59
+ },
+ "last_submission_date": 1502355193,
+ "magic": "data",
+ "md5": ":md5:",
+ "names": [
+ "zipnew.dat",
+ "327916-1502345099.zip",
+ "ac3plug.zip",
+ "IMG_6937.zip",
+ "DOC952.zip",
+ "20170801486960.zip"
+ ],
+ "nsrl_info": {
+ "filenames": [
+ "WINDOWS DIALUP.ZIP",
+ "kemsetup.ZIP",
+ "Data_Linux.zip",
+ "2003.zip",
+ "_6A271FB199E041FC82F4D282E68B01D6"
+ ],
+ "products": [
+ "Master Hacker Internet Terrorism (Core Publishing Inc.)",
+ "Read Rabbits Math Ages 6-9 (Smart Saver)",
+ "Neverwinter Nights Gold (Atari)",
+ "Limited Edition Print Workshop 2004 (ValuSoft)",
+ "Crysis (Electronic Arts Inc.)"
+ ]
+ },
+ "reputation": -889,
+ "sha1": ":sha1:",
+ "sha256": "0000000000000000000000000000000000000000000000000000000000000000",
+ "size": 22,
+ "ssdeep": ":ssdeep:",
+ "tags": [
+ "software-collection",
+ "nsrl",
+ "attachment",
+ "trusted",
+ "via-tor"
+ ],
+ "times_submitted": 26471,
+ "total_votes": {
+ "harmless": 639,
+ "malicious": 958
+ },
+ "trid": [
+ {
+ "file_type": "ZIP compressed archive (empty)",
+ "probability": 100
+ }
+ ],
+ "trusted_verdict": {
+ "filename": "somefile.zip",
+ "link": "example.com/somefile.zip",
+ "organization": "Google",
+ "verdict": "goodware"
+ },
+ "type_description": "unknown",
+ "gti_assessment": {
+ "threat_score": {
+ "value": 100
+ },
+ "severity": {
+ "value": "SEVERITY_HIGH"
+ },
+ "contributing_factors": {
+ "mandiant_association_report": true,
+ "mandiant_association_actor": true,
+ "gavs_detections": 3,
+ "normalised_categories": [
+ "trojan"
+ ],
+ "mandiant_association_malware": true,
+ "mandiant_confidence_score": 100
+ },
+ "verdict": {
+ "value": "VERDICT_MALICIOUS"
+ },
+ "description": "This indicator is malicious (high severity) with high impact."
+ },
+ "sigma_analysis_stats": {
+ "high": 0,
+ "medium": 1,
+ "critical": 0,
+ "low": 1
+ },
+ "sigma_analysis_results": [
+ {
+ "match_context": [],
+ "rule_level": "medium",
+ "rule_description": "Description 1",
+ "rule_source": "Source 1",
+ "rule_title": "Title 1",
+ "rule_id": "random_id_1",
+ "rule_author": "Author 1"
+ },
+ {
+ "match_context": [],
+ "rule_level": "low",
+ "rule_description": "Description 2",
+ "rule_source": "Source 2",
+ "rule_title": "Title 2",
+ "rule_id": "random_id_2",
+ "rule_author": "Author 2"
+ }
+ ]
+ }
+}
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/ip.json b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/ip.json
new file mode 100644
index 000000000000..3c5d344b917f
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/ip.json
@@ -0,0 +1,76 @@
+{
+ "data": {
+ "attributes": {
+ "as_owner": "EMERALD-ONION",
+ "asn": 396507,
+ "continent": "NA",
+ "country": "US",
+ "jarm": ":jarm:",
+ "last_analysis_results": {
+ "ADMINUSLabs": {
+ "category": "harmless",
+ "engine_name": "ADMINUSLabs",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "GreenSnow": {
+ "category": "malicious",
+ "engine_name": "GreenSnow",
+ "method": "blacklist",
+ "result": "malicious"
+ },
+ "Snort IP sample list": {
+ "category": "suspicious",
+ "engine_name": "Snort IP sample list",
+ "method": "blacklist",
+ "result": "suspicious"
+ }
+ },
+ "last_analysis_stats": {
+ "harmless": 72,
+ "malicious": 5,
+ "suspicious": 2,
+ "timeout": 0,
+ "undetected": 8
+ },
+ "last_modification_date": 1613300914,
+ "network": ":cidr:",
+ "regional_internet_registry": "ARIN",
+ "reputation": -4,
+ "tags": [],
+ "total_votes": {
+ "harmless": 0,
+ "malicious": 1
+ },
+ "whois": "NetRange: :range:\nCIDR: :cidr:\nNetName: ENCRYPTED-TRANSIT-IPV4\nNetHandle: :net-handle:\nParent: :parent:\nNetType: Direct Allocation\nOriginAS: :oas:\nOrganization: :org name: (:org-id:)\nRegDate: 2017-07-19\nUpdated: 2017-07-19\nComment: :comment:\nRef: :ref:\nOrgName: :org name:\nOrgId: :org-id:\nAddress: 815 1st Ave # 331\nCity: Seattle\nStateProv: WA\nPostalCode: :code:\nCountry: US\nRegDate: 2017-06-20\nUpdated: 2018-11-15\nRef: :ref:\nOrgTechHandle: :handle:\nOrgTechName: Technical Support\nOrgTechPhone: :phone: \nOrgTechEmail: tech@example.com\nOrgTechRef: :ref:/:handle:\nOrgAbuseHandle: :abuse-handle:\nOrgAbuseName: Abuse Management\nOrgAbusePhone: :phone: \nOrgAbuseEmail: abuse@example.com\nOrgAbuseRef: :ref:/:abuse-handle:\nOrgNOCHandle: NETWO8737-ARIN\nOrgNOCName: Network Operations\nOrgNOCPhone: :phone: \nOrgNOCEmail: noc@example.com\nOrgNOCRef: :ref::ref\n",
+ "whois_date": 1611870274,
+ "gti_assessment": {
+ "threat_score": {
+ "value": 100
+ },
+ "severity": {
+ "value": "SEVERITY_HIGH"
+ },
+ "contributing_factors": {
+ "mandiant_association_report": true,
+ "mandiant_association_actor": true,
+ "gavs_detections": 3,
+ "normalised_categories": [
+ "trojan"
+ ],
+ "mandiant_association_malware": true,
+ "mandiant_confidence_score": 100
+ },
+ "verdict": {
+ "value": "VERDICT_MALICIOUS"
+ },
+ "description": "This indicator is malicious (high severity) with high impact."
+ }
+ },
+ "id": "8.8.8.8",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/ip_addresses/8.8.8.8"
+ },
+ "type": "ip_address"
+ }
+}
\ No newline at end of file
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/ip_assessment_results.json b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/ip_assessment_results.json
new file mode 100644
index 000000000000..aced60a242ae
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/ip_assessment_results.json
@@ -0,0 +1,31 @@
+{
+ "id": "8.8.8.8",
+ "type": "ip_address",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/ip_addresses/8.8.8.8"
+ },
+ "attributes": {
+ "gti_assessment": {
+ "threat_score": {
+ "value": 100
+ },
+ "severity": {
+ "value": "SEVERITY_HIGH"
+ },
+ "contributing_factors": {
+ "mandiant_association_report": true,
+ "mandiant_association_actor": true,
+ "gavs_detections": 3,
+ "normalised_categories": [
+ "trojan"
+ ],
+ "mandiant_association_malware": true,
+ "mandiant_confidence_score": 100
+ },
+ "verdict": {
+ "value": "VERDICT_MALICIOUS"
+ },
+ "description": "This indicator is malicious (high severity) with high impact."
+ }
+ }
+}
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/ip_results.json b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/ip_results.json
new file mode 100644
index 000000000000..53a6889772b4
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/ip_results.json
@@ -0,0 +1,54 @@
+{
+ "attributes": {
+ "as_owner": "EMERALD-ONION",
+ "asn": 396507,
+ "continent": "NA",
+ "country": "US",
+ "jarm": ":jarm:",
+ "last_analysis_stats": {
+ "harmless": 72,
+ "malicious": 5,
+ "suspicious": 2,
+ "timeout": 0,
+ "undetected": 8
+ },
+ "last_modification_date": 1613300914,
+ "network": ":cidr:",
+ "regional_internet_registry": "ARIN",
+ "reputation": -4,
+ "tags": [],
+ "total_votes": {
+ "harmless": 0,
+ "malicious": 1
+ },
+ "whois": "NetRange: :range:\nCIDR: :cidr:\nNetName: ENCRYPTED-TRANSIT-IPV4\nNetHandle: :net-handle:\nParent: :parent:\nNetType: Direct Allocation\nOriginAS: :oas:\nOrganization: :org name: (:org-id:)\nRegDate: 2017-07-19\nUpdated: 2017-07-19\nComment: :comment:\nRef: :ref:\nOrgName: :org name:\nOrgId: :org-id:\nAddress: 815 1st Ave # 331\nCity: Seattle\nStateProv: WA\nPostalCode: :code:\nCountry: US\nRegDate: 2017-06-20\nUpdated: 2018-11-15\nRef: :ref:\nOrgTechHandle: :handle:\nOrgTechName: Technical Support\nOrgTechPhone: :phone: \nOrgTechEmail: tech@example.com\nOrgTechRef: :ref:/:handle:\nOrgAbuseHandle: :abuse-handle:\nOrgAbuseName: Abuse Management\nOrgAbusePhone: :phone: \nOrgAbuseEmail: abuse@example.com\nOrgAbuseRef: :ref:/:abuse-handle:\nOrgNOCHandle: NETWO8737-ARIN\nOrgNOCName: Network Operations\nOrgNOCPhone: :phone: \nOrgNOCEmail: noc@example.com\nOrgNOCRef: :ref::ref\n",
+ "whois_date": 1611870274,
+ "gti_assessment": {
+ "threat_score": {
+ "value": 100
+ },
+ "severity": {
+ "value": "SEVERITY_HIGH"
+ },
+ "contributing_factors": {
+ "mandiant_association_report": true,
+ "mandiant_association_actor": true,
+ "gavs_detections": 3,
+ "normalised_categories": [
+ "trojan"
+ ],
+ "mandiant_association_malware": true,
+ "mandiant_confidence_score": 100
+ },
+ "verdict": {
+ "value": "VERDICT_MALICIOUS"
+ },
+ "description": "This indicator is malicious (high severity) with high impact."
+ }
+ },
+ "id": "8.8.8.8",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/ip_addresses/8.8.8.8"
+ },
+ "type": "ip_address"
+}
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/passive_dns_ip.json b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/passive_dns_ip.json
new file mode 100644
index 000000000000..2c19a1007382
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/passive_dns_ip.json
@@ -0,0 +1,61 @@
+{
+ "data": [
+ {
+ "id": "8.8.8.8franttini.com.ua",
+ "type": "resolution",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/resolutions/8.8.8.8franttini.com.ua"
+ },
+ "attributes": {
+ "ip_address": "8.8.8.8",
+ "resolver": "VirusTotal",
+ "date": 1715236628,
+ "host_name": "franttini.com.ua",
+ "host_name_last_analysis_stats": {
+ "malicious": 0,
+ "suspicious": 0,
+ "undetected": 26,
+ "harmless": 64,
+ "timeout": 0
+ },
+ "ip_address_last_analysis_stats": {
+ "malicious": 1,
+ "suspicious": 0,
+ "undetected": 23,
+ "harmless": 66,
+ "timeout": 0
+ }
+ }
+ },
+ {
+ "id": "8.8.8.8pxl.pl",
+ "type": "resolution",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/resolutions/8.8.8.8pxl.pl"
+ },
+ "attributes": {
+ "ip_address": "8.8.8.8",
+ "resolver": "VirusTotal",
+ "date": 1715231875,
+ "host_name": "pxl.pl",
+ "host_name_last_analysis_stats": {
+ "malicious": 0,
+ "suspicious": 0,
+ "undetected": 53,
+ "harmless": 37,
+ "timeout": 0
+ },
+ "ip_address_last_analysis_stats": {
+ "malicious": 1,
+ "suspicious": 0,
+ "undetected": 23,
+ "harmless": 66,
+ "timeout": 0
+ }
+ }
+ }
+ ],
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/ip_addresses/8.8.8.8/resolutions"
+ }
+}
\ No newline at end of file
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/passive_dns_ip_results.json b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/passive_dns_ip_results.json
new file mode 100644
index 000000000000..0b15a0c5e950
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/passive_dns_ip_results.json
@@ -0,0 +1,56 @@
+[
+ {
+ "id": "8.8.8.8franttini.com.ua",
+ "type": "resolution",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/resolutions/8.8.8.8franttini.com.ua"
+ },
+ "attributes": {
+ "ip_address": "8.8.8.8",
+ "resolver": "VirusTotal",
+ "date": 1715236628,
+ "host_name": "franttini.com.ua",
+ "host_name_last_analysis_stats": {
+ "malicious": 0,
+ "suspicious": 0,
+ "undetected": 26,
+ "harmless": 64,
+ "timeout": 0
+ },
+ "ip_address_last_analysis_stats": {
+ "malicious": 1,
+ "suspicious": 0,
+ "undetected": 23,
+ "harmless": 66,
+ "timeout": 0
+ }
+ }
+ },
+ {
+ "id": "8.8.8.8pxl.pl",
+ "type": "resolution",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/resolutions/8.8.8.8pxl.pl"
+ },
+ "attributes": {
+ "ip_address": "8.8.8.8",
+ "resolver": "VirusTotal",
+ "date": 1715231875,
+ "host_name": "pxl.pl",
+ "host_name_last_analysis_stats": {
+ "malicious": 0,
+ "suspicious": 0,
+ "undetected": 53,
+ "harmless": 37,
+ "timeout": 0
+ },
+ "ip_address_last_analysis_stats": {
+ "malicious": 1,
+ "suspicious": 0,
+ "undetected": 23,
+ "harmless": 66,
+ "timeout": 0
+ }
+ }
+ }
+]
\ No newline at end of file
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/private_file.json b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/private_file.json
new file mode 100644
index 000000000000..d35516bfddd1
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/private_file.json
@@ -0,0 +1,152 @@
+{
+ "data": {
+ "attributes": {
+ "type_description": "ELF",
+ "tlsh": "Example tlsh",
+ "vhash": "Example vhash",
+ "exiftool": {
+ "MIMEType": "application/octet-stream",
+ "CPUByteOrder": "Little endian",
+ "ObjectFileType": "Executable file",
+ "CPUArchitecture": "32 bit",
+ "CPUType": "i386",
+ "FileType": "ELF executable"
+ },
+ "trid": [
+ {
+ "file_type": "ELF Executable and Linkable format (Linux)",
+ "probability": 55
+ },
+ {
+ "file_type": "ELF Executable and Linkable format (generic)",
+ "probability": 45
+ }
+ ],
+ "crowdsourced_yara_results": [
+ {
+ "description": "Detects a suspicious ELF binary with UPX compression",
+ "source": "https://www.example.com",
+ "author": "Author X",
+ "ruleset_name": "gen_elf_file_anomalies",
+ "rule_name": "SUSP_ELF_LNX_UPX_Compressed_File",
+ "ruleset_id": "0224a54ba7"
+ }
+ ],
+ "threat_severity": {
+ "threat_severity_level": "SEVERITY_HIGH",
+ "threat_severity_data": {
+ "has_dropped_files_with_detections": true,
+ "type_tag": "elf",
+ "has_execution_parents_with_detections": true,
+ "can_be_detonated": true,
+ "popular_threat_category": "trojan"
+ },
+ "last_analysis_date": "1681045097",
+ "version": 1
+ },
+ "names": [
+ "private",
+ "/usr/lib/sample.so",
+ "private_sample.bin"
+ ],
+ "owner": "virustotal",
+ "type_tag": "elf",
+ "elf_info": {
+ "header": {
+ "hdr_version": "1 (current)",
+ "type": "EXEC (Executable file)",
+ "obj_version": "0x1",
+ "data": "2's complement, little endian",
+ "machine": "Intel 80386",
+ "num_section_headers": 0,
+ "os_abi": "UNIX - Linux",
+ "abi_version": 0,
+ "entrypoint": 4633,
+ "num_prog_headers": 2,
+ "class": "ELF32"
+ },
+ "packers": [
+ "upx"
+ ],
+ "segment_list": [
+ {
+ "segment_type": "LOAD"
+ }
+ ]
+ },
+ "size": 255510,
+ "type_extension": "so",
+ "threat_verdict": "VERDICT_MALICIOUS",
+ "detectiteasy": {
+ "filetype": "ELF32",
+ "values": [
+ {
+ "info": "EXEC 386-32",
+ "version": "3.05",
+ "type": "Packer",
+ "name": "UPX"
+ }
+ ]
+ },
+ "crowdsourced_ids_stats": {
+ "high": 0,
+ "info": 0,
+ "medium": 1,
+ "low": 1
+ },
+ "type_tags": [
+ "executable",
+ "linux",
+ "elf"
+ ],
+ "sandbox_verdicts": {
+ "Zenbox Linux": {
+ "category": "malicious",
+ "confidence": 81,
+ "sandbox_name": "Zenbox Linux",
+ "malware_classification": [
+ "MALWARE",
+ "TROJAN",
+ "EVADER"
+ ],
+ "malware_names": [
+ "MalwareName"
+ ]
+ }
+ },
+ "sha256": "Example_sha256_with_64_characters_000000000000000000000000000000",
+ "tags": [
+ "elf",
+ "upx"
+ ],
+ "crowdsourced_ids_results": [
+ {
+ "rule_category": "Misc Attack",
+ "alert_severity": "medium",
+ "rule_msg": "Known Compromised or Hostile Host Traffic",
+ "rule_raw": "alert ip [8.8.8.8] any -> $HOME_NET any"
+ },
+ {
+ "rule_category": "Misc Attack",
+ "alert_severity": "low",
+ "rule_msg": "Poor Reputation IP",
+ "rule_raw": "alert ip [1.1.1.1] any -> $HOME_NET any)"
+ }
+ ],
+ "last_analysis_date": 1681386314,
+ "ssdeep": "Example ssdeep",
+ "packers": {
+ "Gandelf": "upx"
+ },
+ "md5": "Example_md5_00000000000000000000",
+ "sha1": "Example_sha1_000000000000000000000000000",
+ "magic": "ELF 32-bit LSB executable, Intel 80386, version 1 (GNU/Linux), statically linked, stripped",
+ "meaningful_name": "private"
+ },
+ "type": "private_file",
+ "id": "Example_sha256_with_64_characters_000000000000000000000000000000",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/private/files/Example_sha256_with_64_characters_000000000000000000000000000000"
+ }
+ }
+}
\ No newline at end of file
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/private_file_results.json b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/private_file_results.json
new file mode 100644
index 000000000000..f08f90873dd9
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/private_file_results.json
@@ -0,0 +1,150 @@
+{
+ "attributes": {
+ "type_description": "ELF",
+ "tlsh": "Example tlsh",
+ "vhash": "Example vhash",
+ "exiftool": {
+ "MIMEType": "application/octet-stream",
+ "CPUByteOrder": "Little endian",
+ "ObjectFileType": "Executable file",
+ "CPUArchitecture": "32 bit",
+ "CPUType": "i386",
+ "FileType": "ELF executable"
+ },
+ "trid": [
+ {
+ "file_type": "ELF Executable and Linkable format (Linux)",
+ "probability": 55
+ },
+ {
+ "file_type": "ELF Executable and Linkable format (generic)",
+ "probability": 45
+ }
+ ],
+ "crowdsourced_yara_results": [
+ {
+ "description": "Detects a suspicious ELF binary with UPX compression",
+ "source": "https://www.example.com",
+ "author": "Author X",
+ "ruleset_name": "gen_elf_file_anomalies",
+ "rule_name": "SUSP_ELF_LNX_UPX_Compressed_File",
+ "ruleset_id": "0224a54ba7"
+ }
+ ],
+ "threat_severity": {
+ "threat_severity_level": "SEVERITY_HIGH",
+ "threat_severity_data": {
+ "has_dropped_files_with_detections": true,
+ "type_tag": "elf",
+ "has_execution_parents_with_detections": true,
+ "can_be_detonated": true,
+ "popular_threat_category": "trojan"
+ },
+ "last_analysis_date": "1681045097",
+ "version": 1
+ },
+ "names": [
+ "private",
+ "/usr/lib/sample.so",
+ "private_sample.bin"
+ ],
+ "owner": "virustotal",
+ "type_tag": "elf",
+ "elf_info": {
+ "header": {
+ "hdr_version": "1 (current)",
+ "type": "EXEC (Executable file)",
+ "obj_version": "0x1",
+ "data": "2's complement, little endian",
+ "machine": "Intel 80386",
+ "num_section_headers": 0,
+ "os_abi": "UNIX - Linux",
+ "abi_version": 0,
+ "entrypoint": 4633,
+ "num_prog_headers": 2,
+ "class": "ELF32"
+ },
+ "packers": [
+ "upx"
+ ],
+ "segment_list": [
+ {
+ "segment_type": "LOAD"
+ }
+ ]
+ },
+ "size": 255510,
+ "type_extension": "so",
+ "threat_verdict": "VERDICT_MALICIOUS",
+ "detectiteasy": {
+ "filetype": "ELF32",
+ "values": [
+ {
+ "info": "EXEC 386-32",
+ "version": "3.05",
+ "type": "Packer",
+ "name": "UPX"
+ }
+ ]
+ },
+ "crowdsourced_ids_stats": {
+ "high": 0,
+ "info": 0,
+ "medium": 1,
+ "low": 1
+ },
+ "type_tags": [
+ "executable",
+ "linux",
+ "elf"
+ ],
+ "sandbox_verdicts": {
+ "Zenbox Linux": {
+ "category": "malicious",
+ "confidence": 81,
+ "sandbox_name": "Zenbox Linux",
+ "malware_classification": [
+ "MALWARE",
+ "TROJAN",
+ "EVADER"
+ ],
+ "malware_names": [
+ "MalwareName"
+ ]
+ }
+ },
+ "sha256": "Example_sha256_with_64_characters_000000000000000000000000000000",
+ "tags": [
+ "elf",
+ "upx"
+ ],
+ "crowdsourced_ids_results": [
+ {
+ "rule_category": "Misc Attack",
+ "alert_severity": "medium",
+ "rule_msg": "Known Compromised or Hostile Host Traffic",
+ "rule_raw": "alert ip [8.8.8.8] any -> $HOME_NET any"
+ },
+ {
+ "rule_category": "Misc Attack",
+ "alert_severity": "low",
+ "rule_msg": "Poor Reputation IP",
+ "rule_raw": "alert ip [1.1.1.1] any -> $HOME_NET any)"
+ }
+ ],
+ "last_analysis_date": 1681386314,
+ "ssdeep": "Example ssdeep",
+ "packers": {
+ "Gandelf": "upx"
+ },
+ "md5": "Example_md5_00000000000000000000",
+ "sha1": "Example_sha1_000000000000000000000000000",
+ "magic": "ELF 32-bit LSB executable, Intel 80386, version 1 (GNU/Linux), statically linked, stripped",
+ "meaningful_name": "private"
+ },
+ "type": "private_file",
+ "id": "Example_sha256_with_64_characters_000000000000000000000000000000",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/private/files/Example_sha256_with_64_characters_000000000000000000000000000000"
+ }
+}
\ No newline at end of file
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/relationships.json b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/relationships.json
new file mode 100644
index 000000000000..2244848eea4b
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/relationships.json
@@ -0,0 +1,42 @@
+{
+ "communicating_files": {
+ "meta": {
+ "cursor": "STIwCi4="
+ },
+ "data": [
+ {
+ "type": "file",
+ "id": "1471f53e5391f8a06171ffe21dec0d6d27b0201e28e66455030609b7a7ba955e"
+ },
+ {
+ "type": "file",
+ "id": "fedfc48d750b959d11004f56ba49815a011295824e02684aada549505378468a"
+ }
+ ],
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/ip_addresses/1.1.1.1/relationships/communicating_files?limit=20",
+ "related": "https://www.virustotal.com/api/v3/ip_addresses/1.1.1.1/communicating_files",
+ "next": "https://www.virustotal.com/api/v3/ip_addresses/1.1.1.1/relationships/communicating_files?cursor=STIwCi4%3D&limit=20"
+ }
+ },
+ "referrer_files": {
+ "meta": {
+ "cursor": "STIwCi4="
+ },
+ "data": [
+ {
+ "type": "file",
+ "id": "5d4c6801a5d1c9e4d3f8317242723e17eefc7fbdfcf1b0a99fbc5b92b4b83631"
+ },
+ {
+ "type": "file",
+ "id": "e0d60d71b9ceaf6fb672cd78668e02eb5dcfd0c1d46137ed363f42f024ef6e83"
+ }
+ ],
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/ip_addresses/1.1.1.1/relationships/referrer_files?limit=20",
+ "related": "https://www.virustotal.com/api/v3/ip_addresses/1.1.1.1/referrer_files",
+ "next": "https://www.virustotal.com/api/v3/ip_addresses/1.1.1.1/relationships/referrer_files?cursor=STIwCi4%3D&limit=20"
+ }
+ }
+}
\ No newline at end of file
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/url.json b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/url.json
new file mode 100644
index 000000000000..99b89d8f477d
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/url.json
@@ -0,0 +1,663 @@
+{
+ "data": {
+ "attributes": {
+ "categories": {
+ "Forcepoint ThreatSeeker": "newly registered websites",
+ "Sophos": "illegal phishing, phishing and fraud",
+ "Webroot": "Phishing and Other Frauds"
+ },
+ "first_submission_date": 1603275371,
+ "last_analysis_date": 1635971126,
+ "last_analysis_results": {
+ "0xSI_f33d": {
+ "category": "undetected",
+ "engine_name": "0xSI_f33d",
+ "method": "blacklist",
+ "result": "unrated"
+ },
+ "ADMINUSLabs": {
+ "category": "harmless",
+ "engine_name": "ADMINUSLabs",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "AICC (MONITORAPP)": {
+ "category": "harmless",
+ "engine_name": "AICC (MONITORAPP)",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Abusix": {
+ "category": "harmless",
+ "engine_name": "Abusix",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Acronis": {
+ "category": "harmless",
+ "engine_name": "Acronis",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "AlienVault": {
+ "category": "harmless",
+ "engine_name": "AlienVault",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Antiy-AVL": {
+ "category": "harmless",
+ "engine_name": "Antiy-AVL",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Armis": {
+ "category": "harmless",
+ "engine_name": "Armis",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Artists Against 419": {
+ "category": "harmless",
+ "engine_name": "Artists Against 419",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "AutoShun": {
+ "category": "undetected",
+ "engine_name": "AutoShun",
+ "method": "blacklist",
+ "result": "unrated"
+ },
+ "Avira": {
+ "category": "harmless",
+ "engine_name": "Avira",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "BADWARE.INFO": {
+ "category": "harmless",
+ "engine_name": "BADWARE.INFO",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Baidu-International": {
+ "category": "harmless",
+ "engine_name": "Baidu-International",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Bfore.Ai PreCrime": {
+ "category": "harmless",
+ "engine_name": "Bfore.Ai PreCrime",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "BitDefender": {
+ "category": "malicious",
+ "engine_name": "BitDefender",
+ "method": "blacklist",
+ "result": "malware"
+ },
+ "BlockList": {
+ "category": "harmless",
+ "engine_name": "BlockList",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Blueliv": {
+ "category": "harmless",
+ "engine_name": "Blueliv",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "CINS Army": {
+ "category": "harmless",
+ "engine_name": "CINS Army",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "CMC Threat Intelligence": {
+ "category": "harmless",
+ "engine_name": "CMC Threat Intelligence",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "CRDF": {
+ "category": "harmless",
+ "engine_name": "CRDF",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Certego": {
+ "category": "harmless",
+ "engine_name": "Certego",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Chong Lua Dao": {
+ "category": "harmless",
+ "engine_name": "Chong Lua Dao",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Comodo Valkyrie Verdict": {
+ "category": "harmless",
+ "engine_name": "Comodo Valkyrie Verdict",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "CyRadar": {
+ "category": "harmless",
+ "engine_name": "CyRadar",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Cyan": {
+ "category": "undetected",
+ "engine_name": "Cyan",
+ "method": "blacklist",
+ "result": "unrated"
+ },
+ "CyberCrime": {
+ "category": "harmless",
+ "engine_name": "CyberCrime",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Cyren": {
+ "category": "harmless",
+ "engine_name": "Cyren",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "DNS8": {
+ "category": "harmless",
+ "engine_name": "DNS8",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Dr.Web": {
+ "category": "harmless",
+ "engine_name": "Dr.Web",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "ESET": {
+ "category": "malicious",
+ "engine_name": "ESET",
+ "method": "blacklist",
+ "result": "malware"
+ },
+ "EmergingThreats": {
+ "category": "harmless",
+ "engine_name": "EmergingThreats",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Emsisoft": {
+ "category": "harmless",
+ "engine_name": "Emsisoft",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "EonScope": {
+ "category": "harmless",
+ "engine_name": "EonScope",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Feodo Tracker": {
+ "category": "harmless",
+ "engine_name": "Feodo Tracker",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Forcepoint ThreatSeeker": {
+ "category": "harmless",
+ "engine_name": "Forcepoint ThreatSeeker",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Fortinet": {
+ "category": "malicious",
+ "engine_name": "Fortinet",
+ "method": "blacklist",
+ "result": "phishing"
+ },
+ "FraudScore": {
+ "category": "harmless",
+ "engine_name": "FraudScore",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "G-Data": {
+ "category": "harmless",
+ "engine_name": "G-Data",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Google Safebrowsing": {
+ "category": "malicious",
+ "engine_name": "Google Safebrowsing",
+ "method": "blacklist",
+ "result": "phishing"
+ },
+ "GreenSnow": {
+ "category": "harmless",
+ "engine_name": "GreenSnow",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Heimdal Security": {
+ "category": "harmless",
+ "engine_name": "Heimdal Security",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Hoplite Industries": {
+ "category": "harmless",
+ "engine_name": "Hoplite Industries",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "IPsum": {
+ "category": "harmless",
+ "engine_name": "IPsum",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "K7AntiVirus": {
+ "category": "harmless",
+ "engine_name": "K7AntiVirus",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Kaspersky": {
+ "category": "malicious",
+ "engine_name": "Kaspersky",
+ "method": "blacklist",
+ "result": "phishing"
+ },
+ "Lionic": {
+ "category": "harmless",
+ "engine_name": "Lionic",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Lumu": {
+ "category": "undetected",
+ "engine_name": "Lumu",
+ "method": "blacklist",
+ "result": "unrated"
+ },
+ "MalBeacon": {
+ "category": "harmless",
+ "engine_name": "MalBeacon",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "MalSilo": {
+ "category": "harmless",
+ "engine_name": "MalSilo",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "MalwareDomainList": {
+ "category": "harmless",
+ "engine_name": "MalwareDomainList",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "MalwarePatrol": {
+ "category": "harmless",
+ "engine_name": "MalwarePatrol",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Malwared": {
+ "category": "harmless",
+ "engine_name": "Malwared",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Netcraft": {
+ "category": "undetected",
+ "engine_name": "Netcraft",
+ "method": "blacklist",
+ "result": "unrated"
+ },
+ "NotMining": {
+ "category": "undetected",
+ "engine_name": "NotMining",
+ "method": "blacklist",
+ "result": "unrated"
+ },
+ "Nucleon": {
+ "category": "harmless",
+ "engine_name": "Nucleon",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "OpenPhish": {
+ "category": "harmless",
+ "engine_name": "OpenPhish",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "PREBYTES": {
+ "category": "harmless",
+ "engine_name": "PREBYTES",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "PhishLabs": {
+ "category": "harmless",
+ "engine_name": "PhishLabs",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Phishing Database": {
+ "category": "harmless",
+ "engine_name": "Phishing Database",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Phishtank": {
+ "category": "harmless",
+ "engine_name": "Phishtank",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Quick Heal": {
+ "category": "harmless",
+ "engine_name": "Quick Heal",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Quttera": {
+ "category": "harmless",
+ "engine_name": "Quttera",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Rising": {
+ "category": "harmless",
+ "engine_name": "Rising",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "SCUMWARE.org": {
+ "category": "harmless",
+ "engine_name": "SCUMWARE.org",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "SafeToOpen": {
+ "category": "undetected",
+ "engine_name": "SafeToOpen",
+ "method": "blacklist",
+ "result": "unrated"
+ },
+ "Sangfor": {
+ "category": "harmless",
+ "engine_name": "Sangfor",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Scantitan": {
+ "category": "harmless",
+ "engine_name": "Scantitan",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "SecureBrain": {
+ "category": "harmless",
+ "engine_name": "SecureBrain",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Snort IP sample list": {
+ "category": "harmless",
+ "engine_name": "Snort IP sample list",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Sophos": {
+ "category": "malicious",
+ "engine_name": "Sophos",
+ "method": "blacklist",
+ "result": "phishing"
+ },
+ "Spam404": {
+ "category": "harmless",
+ "engine_name": "Spam404",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Spamhaus": {
+ "category": "harmless",
+ "engine_name": "Spamhaus",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "StopBadware": {
+ "category": "undetected",
+ "engine_name": "StopBadware",
+ "method": "blacklist",
+ "result": "unrated"
+ },
+ "StopForumSpam": {
+ "category": "harmless",
+ "engine_name": "StopForumSpam",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Sucuri SiteCheck": {
+ "category": "harmless",
+ "engine_name": "Sucuri SiteCheck",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Tencent": {
+ "category": "harmless",
+ "engine_name": "Tencent",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "ThreatHive": {
+ "category": "harmless",
+ "engine_name": "ThreatHive",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Threatsourcing": {
+ "category": "harmless",
+ "engine_name": "Threatsourcing",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Trustwave": {
+ "category": "harmless",
+ "engine_name": "Trustwave",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "URLhaus": {
+ "category": "harmless",
+ "engine_name": "URLhaus",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "VX Vault": {
+ "category": "harmless",
+ "engine_name": "VX Vault",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Virusdie External Site Scan": {
+ "category": "harmless",
+ "engine_name": "Virusdie External Site Scan",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Web Security Guard": {
+ "category": "harmless",
+ "engine_name": "Web Security Guard",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "Webroot": {
+ "category": "malicious",
+ "engine_name": "Webroot",
+ "method": "blacklist",
+ "result": "malicious"
+ },
+ "Yandex Safebrowsing": {
+ "category": "harmless",
+ "engine_name": "Yandex Safebrowsing",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "ZeroCERT": {
+ "category": "harmless",
+ "engine_name": "ZeroCERT",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "alphaMountain.ai": {
+ "category": "harmless",
+ "engine_name": "alphaMountain.ai",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "benkow.cc": {
+ "category": "harmless",
+ "engine_name": "benkow.cc",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "desenmascara.me": {
+ "category": "harmless",
+ "engine_name": "desenmascara.me",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "malwares.com URL checker": {
+ "category": "harmless",
+ "engine_name": "malwares.com URL checker",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "securolytics": {
+ "category": "harmless",
+ "engine_name": "securolytics",
+ "method": "blacklist",
+ "result": "clean"
+ },
+ "zvelo": {
+ "category": "harmless",
+ "engine_name": "zvelo",
+ "method": "blacklist",
+ "result": "clean"
+ }
+ },
+ "last_analysis_stats": {
+ "harmless": 77,
+ "malicious": 7,
+ "suspicious": 0,
+ "timeout": 0,
+ "undetected": 8
+ },
+ "last_final_url": "http://www.example.com",
+ "last_http_response_code": 200,
+ "last_http_response_content_length": 4372,
+ "last_http_response_content_sha256": "67c1bc796bffe9169be3ef87dd8af4e8776d04484812a5bfdc7c5c9042aed1ca",
+ "last_http_response_headers": {
+ "accept-ch": "viewport-width, dpr, device-memory, rtt, downlink, ect, ua, ua-full-version, ua-platform, ua-platform-version, ua-arch, ua-model, ua-mobile",
+ "accept-ch-lifetime": "30",
+ "connection": "keep-alive",
+ "content-type": "text/html; charset=UTF-8",
+ "date": "Wed, 03 Nov 2021 20:25:28 GMT",
+ "server": "nginx",
+ "transfer-encoding": "chunked",
+ "vary": "Accept-Encoding",
+ "x-redirect": "skenzo"
+ },
+ "last_modification_date": 1635971440,
+ "last_submission_date": 1635971126,
+ "reputation": 0,
+ "tags": [],
+ "threat_names": ["Mal/HTMLGen-A"],
+ "times_submitted": 18,
+ "title": "something_definitely_unsecure.com",
+ "total_votes": {
+ "harmless": 0,
+ "malicious": 0
+ },
+ "trackers": {
+ "Amazon Cloudfront": [{
+ "id": "d1lxhc4jvstzrp",
+ "timestamp": 1635971126,
+ "url": "//something_definitely_unsecure.js"
+ }]
+ },
+ "url": "http://www.example.com",
+ "gti_assessment": {
+ "threat_score": {
+ "value": 100
+ },
+ "severity": {
+ "value": "SEVERITY_HIGH"
+ },
+ "contributing_factors": {
+ "mandiant_association_report": true,
+ "mandiant_association_actor": true,
+ "gavs_detections": 3,
+ "normalised_categories": [
+ "trojan"
+ ],
+ "mandiant_association_malware": true,
+ "mandiant_confidence_score": 100
+ },
+ "verdict": {
+ "value": "VERDICT_MALICIOUS"
+ },
+ "description": "This indicator is malicious (high severity) with high impact."
+ }
+ },
+ "id": "aHR0cHM6Ly93d3cuZXhhbXBsZS5jb20",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/urls/aHR0cHM6Ly93d3cuZXhhbXBsZS5jb20"
+ },
+ "relationships": {
+ "last_serving_ip_address": {
+ "data": {
+ "id": "1.1.1.1",
+ "type": "ip_address"
+ },
+ "links": {
+ "related": "https://www.virustotal.com/api/v3/urls/aHR0cHM6Ly93d3cuZXhhbXBsZS5jb20/last_serving_ip_address",
+ "self": "https://www.virustotal.com/api/v3/urls/aHR0cHM6Ly93d3cuZXhhbXBsZS5jb20/relationships/last_serving_ip_address"
+ },
+ "meta": {
+ "count": 1
+ }
+ },
+ "network_location": {
+ "data": {
+ "id": "something_definitely_unsecure.com",
+ "type": "domain"
+ },
+ "links": {
+ "related": "https://www.virustotal.com/api/v3/urls/aHR0cHM6Ly93d3cuZXhhbXBsZS5jb20/network_location",
+ "self": "https://www.virustotal.com/api/v3/urls/aHR0cHM6Ly93d3cuZXhhbXBsZS5jb20/relationships/network_location"
+ },
+ "meta": {
+ "count": 1
+ }
+ }
+ },
+ "type": "url"
+ }
+}
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/url_assessment_results.json b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/url_assessment_results.json
new file mode 100644
index 000000000000..5f7296651ec8
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/url_assessment_results.json
@@ -0,0 +1,31 @@
+{
+ "id": "aHR0cHM6Ly93d3cuZXhhbXBsZS5jb20",
+ "type": "url",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/urls/aHR0cHM6Ly93d3cuZXhhbXBsZS5jb20"
+ },
+ "attributes": {
+ "gti_assessment": {
+ "threat_score": {
+ "value": 100
+ },
+ "severity": {
+ "value": "SEVERITY_HIGH"
+ },
+ "contributing_factors": {
+ "mandiant_association_report": true,
+ "mandiant_association_actor": true,
+ "gavs_detections": 3,
+ "normalised_categories": [
+ "trojan"
+ ],
+ "mandiant_association_malware": true,
+ "mandiant_confidence_score": 100
+ },
+ "verdict": {
+ "value": "VERDICT_MALICIOUS"
+ },
+ "description": "This indicator is malicious (high severity) with high impact."
+ }
+ }
+}
diff --git a/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/url_results.json b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/url_results.json
new file mode 100644
index 000000000000..b2578db732af
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/Integrations/GoogleThreatIntelligence/test_data/url_results.json
@@ -0,0 +1,107 @@
+{
+ "attributes": {
+ "categories": {
+ "Forcepoint ThreatSeeker": "newly registered websites",
+ "Sophos": "illegal phishing, phishing and fraud",
+ "Webroot": "Phishing and Other Frauds"
+ },
+ "first_submission_date": 1603275371,
+ "last_analysis_date": 1635971126,
+ "last_analysis_stats": {
+ "harmless": 77,
+ "malicious": 7,
+ "suspicious": 0,
+ "timeout": 0,
+ "undetected": 8
+ },
+ "last_final_url": "http://www.example.com",
+ "last_http_response_code": 200,
+ "last_http_response_content_length": 4372,
+ "last_http_response_content_sha256": "67c1bc796bffe9169be3ef87dd8af4e8776d04484812a5bfdc7c5c9042aed1ca",
+ "last_http_response_headers": {
+ "accept-ch": "viewport-width, dpr, device-memory, rtt, downlink, ect, ua, ua-full-version, ua-platform, ua-platform-version, ua-arch, ua-model, ua-mobile",
+ "accept-ch-lifetime": "30",
+ "connection": "keep-alive",
+ "content-type": "text/html; charset=UTF-8",
+ "date": "Wed, 03 Nov 2021 20:25:28 GMT",
+ "server": "nginx",
+ "transfer-encoding": "chunked",
+ "vary": "Accept-Encoding",
+ "x-redirect": "skenzo"
+ },
+ "last_modification_date": 1635971440,
+ "last_submission_date": 1635971126,
+ "reputation": 0,
+ "tags": [],
+ "threat_names": ["Mal/HTMLGen-A"],
+ "times_submitted": 18,
+ "title": "something_definitely_unsecure.com",
+ "total_votes": {
+ "harmless": 0,
+ "malicious": 0
+ },
+ "trackers": {
+ "Amazon Cloudfront": [{
+ "id": "d1lxhc4jvstzrp",
+ "timestamp": 1635971126,
+ "url": "//something_definitely_unsecure.js"
+ }]
+ },
+ "url": "http://www.example.com",
+ "gti_assessment": {
+ "threat_score": {
+ "value": 100
+ },
+ "severity": {
+ "value": "SEVERITY_HIGH"
+ },
+ "contributing_factors": {
+ "mandiant_association_report": true,
+ "mandiant_association_actor": true,
+ "gavs_detections": 3,
+ "normalised_categories": [
+ "trojan"
+ ],
+ "mandiant_association_malware": true,
+ "mandiant_confidence_score": 100
+ },
+ "verdict": {
+ "value": "VERDICT_MALICIOUS"
+ },
+ "description": "This indicator is malicious (high severity) with high impact."
+ }
+ },
+ "id": "aHR0cHM6Ly93d3cuZXhhbXBsZS5jb20",
+ "links": {
+ "self": "https://www.virustotal.com/api/v3/urls/aHR0cHM6Ly93d3cuZXhhbXBsZS5jb20"
+ },
+ "relationships": {
+ "last_serving_ip_address": {
+ "data": {
+ "id": "1.1.1.1",
+ "type": "ip_address"
+ },
+ "links": {
+ "related": "https://www.virustotal.com/api/v3/urls/aHR0cHM6Ly93d3cuZXhhbXBsZS5jb20/last_serving_ip_address",
+ "self": "https://www.virustotal.com/api/v3/urls/aHR0cHM6Ly93d3cuZXhhbXBsZS5jb20/relationships/last_serving_ip_address"
+ },
+ "meta": {
+ "count": 1
+ }
+ },
+ "network_location": {
+ "data": {
+ "id": "something_definitely_unsecure.com",
+ "type": "domain"
+ },
+ "links": {
+ "related": "https://www.virustotal.com/api/v3/urls/aHR0cHM6Ly93d3cuZXhhbXBsZS5jb20/network_location",
+ "self": "https://www.virustotal.com/api/v3/urls/aHR0cHM6Ly93d3cuZXhhbXBsZS5jb20/relationships/network_location"
+ },
+ "meta": {
+ "count": 1
+ }
+ }
+ },
+ "type": "url"
+}
diff --git a/Packs/GoogleThreatIntelligence/README.md b/Packs/GoogleThreatIntelligence/README.md
new file mode 100644
index 000000000000..420b0e864813
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/README.md
@@ -0,0 +1,72 @@
+**What**. Google Threat Intelligence provides unparalleled visibility into the global threat landscape. We offer deep insights from Mandiant’s leading incident response and threat research team, and combine them with our massive user and device footprint and VirusTotal’s broad crowdsourced malware database.
+
+**Why**. Security teams are often confronted with an unknown file/URL/domain/IP address and asked to make sense of an attack. Without further context, it is virtually impossible to determine attribution, build effective defenses against other strains of the attack, or understand the impact of a given threat in your organization. Through API and web based interaction with Google Threat Intelligence, security analysts can rapidly build a picture of an incident and then use those insights to neutralize other attacks.
+
+**Outcome**. Faster, more confident, more accurate and more cost-effective security operations.
+
+**Where**. On-premise, in the cloud, in your hosting, in your corporate network, everywhere.
+
+**What we solve for leaders**.
+
+Security team challenges | Solving with Google Threat Intelligence + XSOAR
+------------------------ | ------------------------
+**Alert fatigue + quality & speed of IR handling.** PANW survey data shows that SOC analysts are only able to handle 14% of alerts generated by security tools. | **Eradicate analyst burnout through automation.** Automate false positive discarding and alert prioritization, optimize SOC resources. Malicious+Benign info.
+**Lack of context & missed threats.** Reliance on reactive threat feeds. Only information about internal systems and users, no in-the-wild contextual details. | **Improved and early detection.** Track threats going forward with [YARA](https://virustotal.github.io/yara/). Crowdsourced threat reputation for files/hashes, domains, IPs and URLs coming from over 90 security vendors.
+**Finding and maintaining security talent.** There is a shortage of qualified security candidates; recruiting + retaining these is an endemic challenge | **Juniors operating as advanced threat hunters.** Automate repetitive tasks with playbooks, elevate SOC Level 1 effectiveness. Faster, more confident and more accurate decisions. Greater productivity.
+**Budget constraints.** Cybersecurity isn’t top of mind at many organizations when budget line items are getting funded. Difficult to prove ROI. | **Condense & lower costs + Increase toolset ROI.** One-stop-shop for everything threat intelligence related (domains, IPs, URLs, files). Take your SIEM, IDS, EDR, Firewall, etc. to the next level.
+
+**Use cases.**
+
+- **_Automatic security telemetry enrichment._** Event contextualization. Alert prioritization. False positive discarding. True positive confirmation. Automated hunting.
+- **_Incident response & forensic analysis._** Blast radius identification. Generation of remediative IoCs. Context expansion beyond your internal network.
+- **_Threat Intelligence & advanced hunting._** Unknown threat discovery. Campaign & adversary monitoring. Preventative IoCs.
+- **_Brand & corporate infrastructure monitoring._** Phishing campaign tracking. Brand impersonation alerts. Attack surface compromise identification.
+- **_Vulnerability prioritization._** Smart risk-driven patching. Vulnerability weaponization monitoring. Vulnerability landscape exploration.
+- **_Red teaming & ethical hacking._** Automatic reconnaissance/passive fingerprinting operations. Breach & attack simulation. Security stack validation.
+
+**Example questions we answer.**
+
+- Is a given {file, hash, domain, IP, URL} malicious according to the security industry? How widely known and detected is it?
+- Has a given IP address been part of a given threat campaign? What domains have resolved historically to such IP (passive DNS)? Who owns the IP? etc.
+- Is a given domain part of a threat’s network infrastructure? Are there any other domains registered by the same threat actor? What types of threats are connected with the given domain? How does the industry categorize the domain (CnC, exploit, phishing, ...)? etc.
+- Is a given URL part of a phishing attack? Does it deliver malware? Does the server side setup exhibit any commonalities that allow me to pivot to other setups operated by the same attacker?
+- Are there any IoCs that can be used to block or hunt for a given malware file or variants of its family/campaign? What does the file do when executed in a sandbox? etc.
+- Is some fake/malicious mobile application making use of my logo/brand? What are the latest set of newly registered domains that seem to be typosquatting my site? Are there any potential phishing websites making use of my site’s title/favicon?
+- Is a vulnerability (CVE) that appeared in my environment being currently leveraged by malware? How popular is it?
+
+**Technical capabilities**
+- Threat reputation for {files, hashes, domains, IPs, URLs} coming from [over 90 security vendors](https://support.virustotal.com/hc/en-us/articles/115002146809-Contributors) (antivirus solutions, nextgen EDRs, domain blocklists, network perimeter solutions, etc.).
+- Multi-angular detection for files via crowdsourced {[YARA](https://virustotal.github.io/yara/), [SIGMA](https://blog.virustotal.com/2021/05/context-is-king-part-i-crowdsourced.html), [IDS](https://support.virustotal.com/hc/en-us/articles/360018436078-Crowdsourced-IDS-Rules)} rules.
+- Allowlist (benign) information through the [aggregation of goodware indicators and provenance details](https://blog.virustotal.com/2021/08/introducing-known-distributors.html).
+- Dynamic analysis for files through detonation in [multiple home-grown and 3rd-party partner](https://blog.virustotal.com/search/label/multisandbox) sandbox solutions.
+- Extended file context and metadata through static analysis tools such as sigcheck’s authenticode signature extractor, MS Office macro VBA dissectors, Didier Stevens’ PDF tools, etc.
+- Community comments and assessments coming from over 2M monthly users of the free [www.virustotal.com](https://www.virustotal.com/gui/) public site.
+- Threat graph schema tying together the files, domains, IPs and URLs in the dataset through relationships such as downloaded files, communicating files, passive DNS resolutions, etc.
+- Passive DNS information listing historical domains seen behind a given IP address and detailing all infrastructure changes for a given domain.
+- Whois lookup information for domains and IP addresses, including pivoting based on Whois properties such as registrant details (if available).
+- Historical SSL certificate information for domains and IPs.
+- Vulnerability intelligence by tagging files with CVEs that they might be exploiting and allowing searches and alerts based on CVEs.
+- Custom threat intelligence feeds (ransomware, APTs, first stage delivery vectors, OS X malware, IoT, etc.) by filtering Google Threat Intelligence real-time file flux with [VT HUNTING Livehunt](https://www.virustotal.com/gui/hunting-overview) YARA rules.
+- Operational and strategic intelligence through crowdsourcing of OSINT sources digging into threat campaigns and threat actors.
+- Advanced faceted/elastic searches over the {file, domain, IP, URL} corpus to identify IoCs that match certain criteria, e.g. list all MS Office documents that when opened launch a powershell script and end up exhibiting network communication.
+- Download any file in the Google Threat Intelligence corpus and reroute it to other analysis systems you own.
+
+
+**Popular tasks**
+- Enrich (context + reputation) IoCs (domains, IPs, URLs, attachments) found in suspicious emails entering your organization, escalate to the pertinent SOC function.
+- Scan suspicious files seen in your organization and get a second opinion that complements your corporate security stack.
+- Automatically discard false positive alerts recorded in your organization’s SIEM, sparing SOC resources.
+- Automatically confirm true positive alerts recorded in your organization’s SIEM.
+- Rank and prioritize SOC alerts based on severity and threat categories (trojan > adware > PUA).
+- Append an additional layer of context to your alert/incident tickets so that SOC analysts can perform faster and more confident decision making.
+- Feed your network perimeter defenses (Firewall, IDS, web proxy, etc.) with additional IoCs related to an incident or tracked via YARA rules.
+- Create custom IoC feeds (ransomware, APTs, IoT, etc.) with VT HUNTING Livehunt (YARA) and automatically match them against your security logs/SIEM/etc.
+- Cover blindspots in your EDR by feeding it lists of highly relevant and undetected threats identified through the use of YARA in Google Threat Intelligence.
+- Derive scores based on malicious observations and relationships for IPs transacting with your business.
+- Assign a severity score to issues identified in a vulnerability scan of your networks.
+
+
+**Additional information**
+- [Contact the Google Threat Intelligence team](https://www.virustotal.com/gui/contact-us)
+- [Google Threat Intelligence website](https://www.virustotal.com/)
+- [Google Threat Intelligence API developer reference guide](https://gtidocs.virustotal.com/reference)
diff --git a/Packs/GoogleThreatIntelligence/TestPlaybooks/testplaybook-GoogleThreatIntelligence.yml b/Packs/GoogleThreatIntelligence/TestPlaybooks/testplaybook-GoogleThreatIntelligence.yml
new file mode 100644
index 000000000000..ef9d8f89e922
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/TestPlaybooks/testplaybook-GoogleThreatIntelligence.yml
@@ -0,0 +1,940 @@
+id: GoogleThreatIntelligence-test
+version: -1
+name: GoogleThreatIntelligence-test
+starttaskid: "0"
+tasks:
+ "0":
+ id: "0"
+ taskid: 4b4964af-06c0-4329-81f3-0d1655eb0ee0
+ type: start
+ task:
+ id: 4b4964af-06c0-4329-81f3-0d1655eb0ee0
+ version: -1
+ name: ""
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "13"
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 1125,
+ "y": 50
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "1":
+ id: "1"
+ taskid: 14ee5e52-15a3-4500-8f91-6a788552531b
+ type: title
+ task:
+ id: 14ee5e52-15a3-4500-8f91-6a788552531b
+ version: -1
+ name: Reputation
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "3"
+ - "4"
+ - "5"
+ - "6"
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 1125,
+ "y": 370
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "2":
+ id: "2"
+ taskid: f9d13e5f-af66-478f-8c96-e48a97626e5f
+ type: title
+ task:
+ id: f9d13e5f-af66-478f-8c96-e48a97626e5f
+ version: -1
+ name: Comments
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "7"
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 370
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "3":
+ id: "3"
+ taskid: e2c32aaf-af6a-47ca-80fc-3bfdc5163da3
+ type: regular
+ task:
+ id: e2c32aaf-af6a-47ca-80fc-3bfdc5163da3
+ version: -1
+ name: URL
+ description: Checks the reputation of a URL.
+ script: GoogleThreatIntelligence|||url
+ type: regular
+ iscommand: true
+ brand: GoogleThreatIntelligence
+ nexttasks:
+ '#none#':
+ - "14"
+ scriptarguments:
+ extended_data: {}
+ url:
+ complex:
+ root: inputs.url
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 480,
+ "y": 515
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "4":
+ id: "4"
+ taskid: 4a412f55-bdc1-40a1-8653-5d6ab9d02a10
+ type: regular
+ task:
+ id: 4a412f55-bdc1-40a1-8653-5d6ab9d02a10
+ version: -1
+ name: File
+ description: Checks the file reputation of the specified hash.
+ script: GoogleThreatIntelligence|||file
+ type: regular
+ iscommand: true
+ brand: GoogleThreatIntelligence
+ nexttasks:
+ '#none#':
+ - "16"
+ scriptarguments:
+ extended_data: {}
+ file:
+ complex:
+ root: inputs.hash
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 910,
+ "y": 515
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "5":
+ id: "5"
+ taskid: 7fdf69bf-33b7-402d-8f3a-6e2e854d61cf
+ type: regular
+ task:
+ id: 7fdf69bf-33b7-402d-8f3a-6e2e854d61cf
+ version: -1
+ name: Domain
+ description: Checks the reputation of a domain.
+ script: GoogleThreatIntelligence|||domain
+ type: regular
+ iscommand: true
+ brand: GoogleThreatIntelligence
+ nexttasks:
+ '#none#':
+ - "17"
+ scriptarguments:
+ domain:
+ complex:
+ root: inputs.domain
+ extended_data: {}
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 1340,
+ "y": 515
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "6":
+ id: "6"
+ taskid: f963395d-5d10-4ed3-8b7b-92babfd237d2
+ type: regular
+ task:
+ id: f963395d-5d10-4ed3-8b7b-92babfd237d2
+ version: -1
+ name: IP
+ description: Checks the reputation of an IP address.
+ script: GoogleThreatIntelligence|||ip
+ type: regular
+ iscommand: true
+ brand: GoogleThreatIntelligence
+ nexttasks:
+ '#none#':
+ - "18"
+ scriptarguments:
+ extended_data: {}
+ ip:
+ complex:
+ root: inputs.ip
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 1770,
+ "y": 515
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "7":
+ id: "7"
+ taskid: cbe8a307-8eca-4316-85c6-ed942fd91658
+ type: regular
+ task:
+ id: cbe8a307-8eca-4316-85c6-ed942fd91658
+ version: -1
+ name: Create a comment
+ description: Adds comments to files and URLs.
+ script: GoogleThreatIntelligence|||gti-comments-add
+ type: regular
+ iscommand: true
+ brand: GoogleThreatIntelligence
+ nexttasks:
+ '#none#':
+ - "8"
+ scriptarguments:
+ comment:
+ simple: A new comment
+ resource:
+ complex:
+ root: inputs.url
+ resource_type: {}
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 515
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "8":
+ id: "8"
+ taskid: 7236c243-6cfe-4659-8f5d-05127c585663
+ type: regular
+ task:
+ id: 7236c243-6cfe-4659-8f5d-05127c585663
+ version: -1
+ name: Get comment by ID
+ description: Retrieves a comment by comment ID.
+ script: GoogleThreatIntelligence|||gti-comments-get-by-id
+ type: regular
+ iscommand: true
+ brand: GoogleThreatIntelligence
+ nexttasks:
+ '#none#':
+ - "10"
+ scriptarguments:
+ id:
+ complex:
+ root: GoogleThreatIntelligence.Comments.comments
+ accessor: id
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 690
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "10":
+ id: "10"
+ taskid: bf72437f-74b6-41ed-8e39-1039d95989e0
+ type: regular
+ task:
+ id: bf72437f-74b6-41ed-8e39-1039d95989e0
+ version: -1
+ name: Delete comment
+ description: Delete a comment.
+ script: GoogleThreatIntelligence|||gti-comments-delete
+ type: regular
+ iscommand: true
+ brand: GoogleThreatIntelligence
+ nexttasks:
+ '#none#':
+ - "11"
+ scriptarguments:
+ id:
+ complex:
+ root: GoogleThreatIntelligence.Comments.comments
+ accessor: id
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 865
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "11":
+ id: "11"
+ taskid: 1e6027cd-1bc6-4104-83c9-5f4ba1be4892
+ type: regular
+ task:
+ id: 1e6027cd-1bc6-4104-83c9-5f4ba1be4892
+ version: -1
+ name: Get comments
+ description: Retrieves comments for a given resource.
+ script: GoogleThreatIntelligence|||gti-comments-get
+ type: regular
+ iscommand: true
+ brand: GoogleThreatIntelligence
+ nexttasks:
+ '#none#':
+ - "12"
+ scriptarguments:
+ before: {}
+ limit: {}
+ resource:
+ complex:
+ root: inputs.url
+ resource_type: {}
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1040
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "12":
+ id: "12"
+ taskid: edeed62c-7cd8-4ba1-87d4-803195d0b5e3
+ type: title
+ task:
+ id: edeed62c-7cd8-4ba1-87d4-803195d0b5e3
+ version: -1
+ name: Done comments
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1215
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "13":
+ id: "13"
+ taskid: 522dba60-f43a-4bba-80ad-fadcd45f57f0
+ type: regular
+ task:
+ id: 522dba60-f43a-4bba-80ad-fadcd45f57f0
+ version: -1
+ name: DeleteContext
+ description: Delete field from context
+ scriptName: DeleteContext
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "19"
+ - "2"
+ - "1"
+ scriptarguments:
+ all:
+ simple: "yes"
+ index: {}
+ key: {}
+ keysToKeep: {}
+ subplaybook: {}
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 1125,
+ "y": 195
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "14":
+ id: "14"
+ taskid: b48c5828-8301-4b0d-88bd-5b89fcb3007a
+ type: condition
+ task:
+ id: b48c5828-8301-4b0d-88bd-5b89fcb3007a
+ version: -1
+ name: Validate score
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "15"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: containsGeneral
+ left:
+ value:
+ complex:
+ root: DBotScore
+ accessor: Indicator
+ iscontext: true
+ right:
+ value:
+ complex:
+ root: inputs.url
+ iscontext: true
+ view: |-
+ {
+ "position": {
+ "x": 480,
+ "y": 690
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "15":
+ id: "15"
+ taskid: 052f6983-5d3c-4952-8711-4d25ab56141a
+ type: title
+ task:
+ id: 052f6983-5d3c-4952-8711-4d25ab56141a
+ version: -1
+ name: Done reputation
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 1125,
+ "y": 880
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "16":
+ id: "16"
+ taskid: 739644c6-b039-4a5a-8683-efe6e32b77ae
+ type: condition
+ task:
+ id: 739644c6-b039-4a5a-8683-efe6e32b77ae
+ version: -1
+ name: Validate score
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "15"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: containsGeneral
+ left:
+ value:
+ complex:
+ root: DBotScore
+ accessor: Indicator
+ iscontext: true
+ right:
+ value:
+ complex:
+ root: inputs.hash
+ iscontext: true
+ view: |-
+ {
+ "position": {
+ "x": 910,
+ "y": 690
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "17":
+ id: "17"
+ taskid: a4000949-c514-40c6-8437-cdb5c51193d2
+ type: condition
+ task:
+ id: a4000949-c514-40c6-8437-cdb5c51193d2
+ version: -1
+ name: Validate score
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "15"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: containsGeneral
+ left:
+ value:
+ complex:
+ root: DBotScore
+ accessor: Indicator
+ iscontext: true
+ right:
+ value:
+ complex:
+ root: inputs.domain
+ iscontext: true
+ view: |-
+ {
+ "position": {
+ "x": 1340,
+ "y": 690
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "18":
+ id: "18"
+ taskid: 3e39aed7-7511-4f91-8466-e2fb72726c32
+ type: condition
+ task:
+ id: 3e39aed7-7511-4f91-8466-e2fb72726c32
+ version: -1
+ name: Validate score
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "15"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: containsGeneral
+ left:
+ value:
+ complex:
+ root: DBotScore
+ accessor: Indicator
+ iscontext: true
+ right:
+ value:
+ complex:
+ root: inputs.ip
+ iscontext: true
+ view: |-
+ {
+ "position": {
+ "x": 1770,
+ "y": 690
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "19":
+ id: "19"
+ taskid: 4437af0c-6e8e-45d4-852d-df1567a06b0f
+ type: title
+ task:
+ id: 4437af0c-6e8e-45d4-852d-df1567a06b0f
+ version: -1
+ name: Other commands
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "20"
+ - "22"
+ - "24"
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 2630,
+ "y": 370
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "20":
+ id: "20"
+ taskid: 6ce4f04c-d2ae-4587-8484-775e6c729d6a
+ type: regular
+ task:
+ id: 6ce4f04c-d2ae-4587-8484-775e6c729d6a
+ version: -1
+ name: Passive DNS Data
+ description: Returns passive DNS records by indicator.
+ script: GoogleThreatIntelligence|||gti-passive-dns-data
+ type: regular
+ iscommand: true
+ brand: GoogleThreatIntelligence
+ nexttasks:
+ '#none#':
+ - "21"
+ scriptarguments:
+ ip:
+ complex:
+ root: inputs.ip
+ limit: {}
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 2200,
+ "y": 515
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "21":
+ id: "21"
+ taskid: f7085816-ac0b-4aa2-80cc-40133280c040
+ type: condition
+ task:
+ id: f7085816-ac0b-4aa2-80cc-40133280c040
+ version: -1
+ name: Verify Context
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "26"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ complex:
+ root: GoogleThreatIntelligence.PassiveDNS.attributes.ip_address
+ filters:
+ - - operator: containsGeneral
+ left:
+ value:
+ simple: GoogleThreatIntelligence.PassiveDNS.attributes.ip_address
+ iscontext: true
+ right:
+ value:
+ simple: inputs.ip
+ iscontext: true
+ iscontext: true
+ view: |-
+ {
+ "position": {
+ "x": 2200,
+ "y": 690
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "22":
+ id: "22"
+ taskid: 0bde68c3-ea6f-44e3-8d41-6a056ef7488f
+ type: regular
+ task:
+ id: 0bde68c3-ea6f-44e3-8d41-6a056ef7488f
+ version: -1
+ name: Sandbox report
+ description: Retrieves a behavioral relationship of the given file hash.
+ script: GoogleThreatIntelligence|||gti-file-sandbox-report
+ type: regular
+ iscommand: true
+ brand: GoogleThreatIntelligence
+ nexttasks:
+ '#none#':
+ - "23"
+ scriptarguments:
+ file:
+ complex:
+ root: inputs.hash
+ limit: {}
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 3060,
+ "y": 515
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "23":
+ id: "23"
+ taskid: 66c7e01c-b6d6-4157-8887-43853c47247c
+ type: condition
+ task:
+ id: 66c7e01c-b6d6-4157-8887-43853c47247c
+ version: -1
+ name: Verify Context
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "26"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ complex:
+ root: GoogleThreatIntelligence
+ accessor: SandboxReport
+ iscontext: true
+ view: |-
+ {
+ "position": {
+ "x": 3060,
+ "y": 690
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "24":
+ id: "24"
+ taskid: 35d2d8e6-d679-4335-88a0-076d076f4b6b
+ type: regular
+ task:
+ id: 35d2d8e6-d679-4335-88a0-076d076f4b6b
+ version: -1
+ name: GTI Search
+ description: Search for an indicator in Google Threat Intelligence.
+ script: GoogleThreatIntelligence|||gti-search
+ type: regular
+ iscommand: true
+ brand: GoogleThreatIntelligence
+ nexttasks:
+ '#none#':
+ - "25"
+ scriptarguments:
+ extended_data: {}
+ limit: {}
+ query:
+ simple: paloaltonetworks.com
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 2630,
+ "y": 515
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "25":
+ id: "25"
+ taskid: 4f18dde1-b929-4a74-8d80-c17e82121e4a
+ type: condition
+ task:
+ id: 4f18dde1-b929-4a74-8d80-c17e82121e4a
+ version: -1
+ name: Verify Context
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "26"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isExists
+ left:
+ value:
+ complex:
+ root: GoogleThreatIntelligence.SearchResults
+ accessor: id
+ iscontext: true
+ view: |-
+ {
+ "position": {
+ "x": 2630,
+ "y": 690
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ "26":
+ id: "26"
+ taskid: 565a1fa5-2987-4a80-83b7-911a42450b9f
+ type: title
+ task:
+ id: 565a1fa5-2987-4a80-83b7-911a42450b9f
+ version: -1
+ name: Done others
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 2630,
+ "y": 880
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+view: |-
+ {
+ "linkLabelsPosition": {},
+ "paper": {
+ "dimensions": {
+ "height": 1230,
+ "width": 3390,
+ "x": 50,
+ "y": 50
+ }
+ }
+ }
+inputs:
+- key: url
+ value:
+ simple: https://paloaltonetworks.com
+ required: true
+ description: URL to check its reputation
+ playbookInputQuery:
+- key: ip
+ value:
+ simple: 8.8.8.8
+ required: true
+ description: IP to check its reputation
+ playbookInputQuery:
+- key: domain
+ value:
+ simple: paloaltonetworks.com
+ required: true
+ description: Domain to check its reputation
+ playbookInputQuery:
+- key: hash
+ value:
+ simple: 2b294b3499d1cce794badffc959b7618
+ required: false
+ description: hash to check reputation
+ playbookInputQuery:
+outputs: []
+fromversion: 6.10.0
+description: Test playbook of GoogleThreatIntelligence.
diff --git a/Packs/GoogleThreatIntelligence/pack_metadata.json b/Packs/GoogleThreatIntelligence/pack_metadata.json
new file mode 100644
index 000000000000..ad6fbdcabe65
--- /dev/null
+++ b/Packs/GoogleThreatIntelligence/pack_metadata.json
@@ -0,0 +1,21 @@
+{
+ "name": "GoogleThreatIntelligence",
+ "description": "Analyze suspicious hashes, URLs, domains and IP addresses",
+ "support": "partner",
+ "currentVersion": "1.0.0",
+ "author": "Google Threat Intelligence",
+ "url": "https://www.virustotal.com",
+ "email": "contact@virustotal.com",
+ "created": "2024-04-16T00:00:00Z",
+ "categories": [
+ "Data Enrichment & Threat Intelligence"
+ ],
+ "tags": [],
+ "useCases": [],
+ "keywords": [],
+ "dependencies": {},
+ "marketplaces": [
+ "xsoar",
+ "marketplacev2"
+ ]
+}
\ No newline at end of file
diff --git a/Packs/Gurucul/Integrations/GuruculGRA/GuruculGRA.py b/Packs/Gurucul/Integrations/GuruculGRA/GuruculGRA.py
index babbb071895e..9582ff855a1e 100644
--- a/Packs/Gurucul/Integrations/GuruculGRA/GuruculGRA.py
+++ b/Packs/Gurucul/Integrations/GuruculGRA/GuruculGRA.py
@@ -3,7 +3,7 @@
''' IMPORTS '''
import traceback
-from typing import Any, Dict, List, Optional, Tuple, cast
+from typing import Any, cast
import dateparser
import urllib3
@@ -22,7 +22,7 @@
class Client(BaseClient):
def fetch_command_result(self, url_suffix, params, post_url):
- incidents: List = list()
+ incidents: list = []
try:
if post_url is None:
method = 'GET'
@@ -52,7 +52,7 @@ def validate_api_key(self):
''' HELPER FUNCTIONS '''
-def arg_to_int(arg: Any, arg_name: str, required: bool = False) -> Optional[int]:
+def arg_to_int(arg: Any, arg_name: str, required: bool = False) -> int | None:
if arg is None:
if required is True:
raise ValueError(f'Missing "{arg_name}"')
@@ -66,7 +66,7 @@ def arg_to_int(arg: Any, arg_name: str, required: bool = False) -> Optional[int]
raise ValueError(f'Invalid number: "{arg_name}"')
-def arg_to_timestamp(arg: Any, arg_name: str, required: bool = False) -> Optional[int]:
+def arg_to_timestamp(arg: Any, arg_name: str, required: bool = False) -> int | None:
if arg is None:
if required is True:
@@ -81,7 +81,7 @@ def arg_to_timestamp(arg: Any, arg_name: str, required: bool = False) -> Optiona
raise ValueError(f'Invalid date: {arg_name}')
return int(date.timestamp())
- if isinstance(arg, (int, float)):
+ if isinstance(arg, int | float):
return int(arg)
raise ValueError(f'Invalid date: "{arg_name}"')
@@ -90,7 +90,7 @@ def arg_to_timestamp(arg: Any, arg_name: str, required: bool = False) -> Optiona
def fetch_record_command(client: Client, url_suffix, prefix, key, params, post_url=None):
- incidents: List = list()
+ incidents: list = []
r = client.fetch_command_result(url_suffix, params, post_url)
incidents.extend(r)
results = CommandResults(
@@ -111,9 +111,9 @@ def fetch_post_records(client: Client, url_suffix, prefix, key, params, post_url
return_results(results)
-def fetch_incidents(client: Client, max_results: int, last_run: Dict[str, int],
- first_fetch_time: Optional[int]
- ) -> Tuple[Dict[str, int], List[dict]]:
+def fetch_incidents(client: Client, max_results: int, last_run: dict[str, int],
+ first_fetch_time: int | None
+ ) -> tuple[dict[str, int], list[dict]]:
last_fetch = last_run.get('last_fetch', None)
case_status = 'OPEN'
url_access_time = datetime.now().timestamp()
@@ -127,7 +127,7 @@ def fetch_incidents(client: Client, max_results: int, last_run: Dict[str, int],
last_fetch = int(last_fetch)
startDate = (datetime.fromtimestamp(cast(int, last_fetch) + 1).strftime(API_DATE_FORMAT))
- incidents: List[Dict[str, Any]] = []
+ incidents: list[dict[str, Any]] = []
page = 1
isContinue = True
diff --git a/Packs/Gurucul/Integrations/GuruculGRA/GuruculGRA.yml b/Packs/Gurucul/Integrations/GuruculGRA/GuruculGRA.yml
index 2e6022153622..563c9cc2458d 100644
--- a/Packs/Gurucul/Integrations/GuruculGRA/GuruculGRA.yml
+++ b/Packs/Gurucul/Integrations/GuruculGRA/GuruculGRA.yml
@@ -51,664 +51,664 @@ script:
arguments:
- name: page
required: true
- description: Page No
+ description: Page No.
defaultValue: "1"
- name: max
required: true
default: true
- description: Per page record count
+ description: Per page record count.
defaultValue: "25"
outputs:
- contextPath: Gra.Users.firstName
- description: First Name
+ description: First Name.
type: string
- contextPath: Gra.Users.middleName
- description: Middle Name
+ description: Middle Name.
type: string
- contextPath: Gra.Users.lastName
- description: Last Name
+ description: Last Name.
type: string
- contextPath: Gra.Users.employeeId
- description: Employee Id
+ description: Employee Id.
type: string
- contextPath: Gra.Users.riskScore
- description: Risk Score
+ description: Risk Score.
type: string
- contextPath: Gra.Users.userRisk
- description: User Risk
+ description: User Risk.
type: number
- contextPath: Gra.Users.department
- description: Department
+ description: Department.
type: string
- contextPath: Gra.Users.email
- description: Email
+ description: Email.
type: string
- contextPath: Gra.Users.phone
- description: Phone
+ description: Phone.
type: string
- contextPath: Gra.Users.location
- description: Location
+ description: Location.
type: string
- contextPath: Gra.Users.manager
- description: Manager
+ description: Manager.
type: string
- contextPath: Gra.Users.title
- description: Title
+ description: Title.
type: string
- contextPath: Gra.Users.joiningDate
- description: Joining Date
+ description: Joining Date.
type: date
- contextPath: Gra.Users.exitDate
- description: Exit Date
+ description: Exit Date.
type: date
description: Retrieve list of all users (identities).
- name: gra-fetch-accounts
arguments:
- name: page
required: true
- description: Page No
+ description: Page No.
defaultValue: "1"
- name: max
required: true
default: true
- description: Per page record count
+ description: Per page record count.
defaultValue: "25"
outputs:
- contextPath: Gra.Accounts.id
- description: Id
+ description: Id.
type: number
- contextPath: Gra.Accounts.name
- description: Name
+ description: Name.
type: string
- contextPath: Gra.Accounts.type
- description: Type
+ description: Type.
type: string
- contextPath: Gra.Accounts.created_on
- description: Created On
+ description: Created On.
type: date
- contextPath: Gra.Accounts.department
- description: Department
+ description: Department.
type: string
- contextPath: Gra.Accounts.description
- description: Description
+ description: Description.
type: string
- contextPath: Gra.Accounts.resource
- description: Resource
+ description: Resource.
type: string
- contextPath: Gra.Accounts.domain
- description: Domain
+ description: Domain.
type: string
- contextPath: Gra.Accounts.high_risk
- description: High Risk
+ description: High Risk.
type: string
- contextPath: Gra.Accounts.is_orphan
- description: Is Orphan
+ description: Is Orphan.
type: string
- contextPath: Gra.Accounts.is_reassigned
- description: Is Reassigned
+ description: Is Reassigned.
type: string
- contextPath: Gra.Accounts.risk_score
- description: Risk Score
+ description: Risk Score.
type: number
- contextPath: Gra.Accounts.updated_on
- description: Updated On
+ description: Updated On.
type: date
description: Retrieve list of all accounts.
- name: gra-fetch-active-resource-accounts
arguments:
- name: resource_name
required: true
- description: 'Resource Name '
+ description: 'Resource Name.'
defaultValue: Windows Security
- name: page
required: true
- description: Page No
+ description: Page No.
defaultValue: "1"
- name: max
required: true
default: true
- description: Per page record count
+ description: Per page record count.
defaultValue: "25"
outputs:
- contextPath: Gra.Active.Resource.Accounts.id
- description: ID
+ description: ID.
type: number
- contextPath: Gra.Active.Resource.Accounts.name
- description: Name
+ description: Name.
type: string
- contextPath: Gra.Active.Resource.Accounts.type
- description: Type
+ description: Type.
type: string
- contextPath: Gra.Active.Resource.Accounts.created_on
- description: Created On
+ description: Created On.
type: date
- contextPath: Gra.Active.Resource.Accounts.department
- description: Department
+ description: Department.
type: string
- contextPath: Gra.Active.Resource.Accounts.description
- description: Description
+ description: Description.
type: string
- contextPath: Gra.Active.Resource.Accounts.resource
- description: Resource
+ description: Resource.
type: string
- contextPath: Gra.Active.Resource.Accounts.domain
- description: Domain
+ description: Domain.
type: string
- contextPath: Gra.Active.Resource.Accounts.high_risk
- description: High Risk
+ description: High Risk.
type: string
- contextPath: Gra.Active.Resource.Accounts.is_orphan
- description: Is Orphan
+ description: Is Orphan.
type: string
- contextPath: Gra.Active.Resource.Accounts.is_reassigned
- description: Is Reassigned
+ description: Is Reassigned.
type: string
- contextPath: Gra.Active.Resource.Accounts.risk_score
- description: Risk Score
+ description: Risk Score.
type: number
- contextPath: Gra.Active.Resource.Accounts.updated_on
- description: Updated On
+ description: Updated On.
type: date
description: Retrieve list of all active accounts for specified resource.
- name: gra-fetch-user-accounts
arguments:
- name: employee_id
required: true
- description: Employee ID
+ description: Employee ID.
- name: page
required: true
- description: Page No
+ description: Page No.
defaultValue: "1"
- name: max
required: true
default: true
- description: Per page record count
+ description: Per page record count.
defaultValue: "25"
outputs:
- contextPath: Gra.User.Accounts.id
- description: Id
+ description: Id.
type: number
- contextPath: Gra.User.Accounts.name
- description: Name
+ description: Name.
type: string
- contextPath: Gra.User.Accounts.type
- description: Type
+ description: Type.
type: string
- contextPath: Gra.User.Accounts.created_on
- description: Created On
+ description: Created On.
type: date
- contextPath: Gra.User.Accounts.department
- description: Department
+ description: Department.
type: string
- contextPath: Gra.User.Accounts.description
- description: Description
+ description: Description.
type: string
- contextPath: Gra.User.Accounts.resource
- description: Resource
+ description: Resource.
type: string
- contextPath: Gra.User.Accounts.domain
- description: Domain
+ description: Domain.
type: string
- contextPath: Gra.User.Accounts.high_risk
- description: High Risk
+ description: High Risk.
type: string
- contextPath: Gra.User.Accounts.is_orphan
- description: Is Orphan
+ description: Is Orphan.
type: string
- contextPath: Gra.User.Accounts.is_reassigned
- description: Is Reassigned
+ description: Is Reassigned.
type: string
- contextPath: Gra.User.Accounts.risk_score
- description: Risk Score
+ description: Risk Score.
type: number
- contextPath: Gra.User.Accounts.updated_on
- description: Updated On
+ description: Updated On.
type: date
description: Retrieve list of all active accounts and details for specified user identity.
- name: gra-fetch-resource-highrisk-accounts
arguments:
- name: resource_name
required: true
- description: Resource Name
+ description: Resource Name.
defaultValue: Windows Security
- name: page
required: true
- description: Page No
+ description: Page No.
defaultValue: "1"
- name: max
required: true
default: true
- description: Per page record count
+ description: Per page record count.
defaultValue: "25"
outputs:
- contextPath: Gra.Resource.Highrisk.Accounts.id
- description: Id
+ description: Id.
type: number
- contextPath: Gra.Resource.Highrisk.Accounts.name
- description: Name
+ description: Name.
type: string
- contextPath: Gra.Resource.Highrisk.Accounts.type
- description: Type
+ description: Type.
type: string
- contextPath: Gra.Resource.Highrisk.Accounts.created_on
- description: Created On
+ description: Created On.
type: date
- contextPath: Gra.Resource.Highrisk.Accounts.department
- description: Department
+ description: Department.
type: string
- contextPath: Gra.Resource.Highrisk.Accounts.description
- description: Description
+ description: Description.
type: string
- contextPath: Gra.Resource.Highrisk.Accounts.resource
- description: Resource
+ description: Resource.
type: string
- contextPath: Gra.Resource.Highrisk.Accounts.domain
- description: Domain
+ description: Domain.
type: string
- contextPath: Gra.Resource.Highrisk.Accounts.high_risk
- description: High Risk
+ description: High Risk.
type: string
- contextPath: Gra.Resource.Highrisk.Accounts.is_orphan
- description: Is Orphan
+ description: Is Orphan.
type: string
- contextPath: Gra.Resource.Highrisk.Accounts.is_reassigned
- description: Is Reassigned
+ description: Is Reassigned.
type: string
- contextPath: Gra.Resource.Highrisk.Accounts.risk_score
- description: Risk Score
+ description: Risk Score.
type: number
- contextPath: Gra.Resource.Highrisk.Accounts.updated_on
- description: Updated On
+ description: Updated On.
type: date
description: Retrieve high risk accounts for specified resource.
- name: gra-fetch-hpa
arguments:
- name: page
required: true
- description: Page No
+ description: Page No.
defaultValue: "1"
- name: max
required: true
default: true
- description: Per page record count
+ description: Per page record count.
defaultValue: "25"
outputs:
- contextPath: Gra.Hpa.id
- description: Id
+ description: Id.
type: number
- contextPath: Gra.Hpa.name
- description: Name
+ description: Name.
type: string
- contextPath: Gra.Hpa.type
- description: Type
+ description: Type.
type: string
- contextPath: Gra.Hpa.created_on
- description: Created On
+ description: Created On.
type: date
- contextPath: Gra.Hpa.department
- description: Department
+ description: Department.
type: string
- contextPath: Gra.Hpa.description
- description: Description
+ description: Description.
type: string
- contextPath: Gra.Hpa.resource
- description: Resource
+ description: Resource.
type: string
- contextPath: Gra.Hpa.domain
- description: Domain
+ description: Domain.
type: string
- contextPath: Gra.Hpa.high_risk
- description: High Risk
+ description: High Risk.
type: string
- contextPath: Gra.Hpa.is_orphan
- description: Is Orphan
+ description: Is Orphan.
type: string
- contextPath: Gra.Hpa.is_reassigned
- description: Is Reassigned
+ description: Is Reassigned.
type: string
- contextPath: Gra.Hpa.risk_score
- description: Risk Score
+ description: Risk Score.
type: number
- contextPath: Gra.Hpa.updated_on
- description: Updated On
+ description: Updated On.
type: date
description: Retrieve list of all high risk privileged accounts.
- name: gra-fetch-resource-hpa
arguments:
- name: resource_name
required: true
- description: Resource Name
+ description: Resource Name.
defaultValue: Windows Security
- name: page
required: true
- description: Page No
+ description: Page No.
defaultValue: "1"
- name: max
required: true
default: true
- description: Per page record count
+ description: Per page record count.
defaultValue: "25"
outputs:
- contextPath: Gra.Resource.Hpa.id
- description: Id
+ description: Id.
type: number
- contextPath: Gra.Resource.Hpa.name
- description: Name
+ description: Name.
type: string
- contextPath: Gra.Resource.Hpa.type
- description: Type
+ description: Type.
type: string
- contextPath: Gra.Resource.Hpa.created_on
- description: Created On
+ description: Created On.
type: date
- contextPath: Gra.Resource.Hpa.department
- description: Department
+ description: Department.
type: string
- contextPath: Gra.Resource.Hpa.description
- description: Description
+ description: Description.
type: string
- contextPath: Gra.Resource.Hpa.resource
- description: Resource
+ description: Resource.
type: string
- contextPath: Gra.Resource.Hpa.domain
- description: Domain
+ description: Domain.
type: string
- contextPath: Gra.Resource.Hpa.high_risk
- description: High Risk
+ description: High Risk.
type: string
- contextPath: Gra.Resource.Hpa.is_orphan
- description: Is Orphan
+ description: Is Orphan.
type: string
- contextPath: Gra.Resource.Hpa.is_reassigned
- description: Is Reassigned
+ description: Is Reassigned.
type: string
- contextPath: Gra.Resource.Hpa.risk_score
- description: Risk Score
+ description: Risk Score.
type: number
- contextPath: Gra.Resource.Hpa.updated_on
- description: Updated On
+ description: Updated On.
type: date
description: Retrieve all high privileged accounts for specified resource.
- name: gra-fetch-orphan-accounts
arguments:
- name: page
required: true
- description: Page No
+ description: Page No.
defaultValue: "1"
- name: max
required: true
default: true
- description: Per page record count
+ description: Per page record count.
defaultValue: "25"
outputs:
- contextPath: Gra.Orphan.Accounts.id
- description: Id
+ description: Id.
type: number
- contextPath: Gra.Orphan.Accounts.name
- description: Name
+ description: Name.
type: string
- contextPath: Gra.Orphan.Accounts.type
- description: Type
+ description: Type.
type: string
- contextPath: Gra.Orphan.Accounts.created_on
- description: Created_on
+ description: Created_on.
type: date
- contextPath: Gra.Orphan.Accounts.department
- description: Department
+ description: Department.
type: string
- contextPath: Gra.Orphan.Accounts.description
- description: Description
+ description: Description.
type: string
- contextPath: Gra.Orphan.Accounts.resource
- description: Resource
+ description: Resource.
type: string
- contextPath: Gra.Orphan.Accounts.domain
- description: Domain
+ description: Domain.
type: string
- contextPath: Gra.Orphan.Accounts.high_risk
- description: High Risk
+ description: High Risk.
type: string
- contextPath: Gra.Orphan.Accounts.is_orphan
- description: Is Orphan
+ description: Is Orphan.
type: string
- contextPath: Gra.Orphan.Accounts.is_reassigned
- description: Is Reassigned
+ description: Is Reassigned.
type: string
- contextPath: Gra.Orphan.Accounts.risk_score
- description: Risk Score
+ description: Risk Score.
type: number
- contextPath: Gra.Orphan.Accounts.updated_on
- description: Updated On
+ description: Updated On.
type: date
description: Retrieve list of all orphan / rogue accounts.
- name: gra-fetch-resource-orphan-accounts
arguments:
- name: resource_name
required: true
- description: Resource Name
+ description: Resource Name.
defaultValue: Windows Security
- name: page
required: true
default: true
- description: Page No
+ description: Page No.
defaultValue: "1"
- name: max
required: true
- description: Per page record count
+ description: Per page record count.
defaultValue: "25"
outputs:
- contextPath: Gra.Resource.Orphan.Accounts.id
- description: Id
+ description: Id.
type: number
- contextPath: Gra.Resource.Orphan.Accounts.name
- description: Name
+ description: Name.
type: string
- contextPath: Gra.Resource.Orphan.Accounts.type
- description: Type
+ description: Type.
type: string
- contextPath: Gra.Resource.Orphan.Accounts.created_on
- description: Created On
+ description: Created On.
type: date
- contextPath: Gra.Resource.Orphan.Accounts.department
- description: Department
+ description: Department.
type: string
- contextPath: Gra.Resource.Orphan.Accounts.description
- description: Description
+ description: Description.
type: string
- contextPath: Gra.Resource.Orphan.Accounts.resource
- description: Resource
+ description: Resource.
type: string
- contextPath: Gra.Resource.Orphan.Accounts.domain
- description: Domain
+ description: Domain.
type: string
- contextPath: Gra.Resource.Orphan.Accounts.high_risk
- description: High Risk
+ description: High Risk.
type: string
- contextPath: Gra.Resource.Orphan.Accounts.is_orphan
- description: Is Orphan
+ description: Is Orphan.
type: string
- contextPath: Gra.Resource.Orphan.Accounts.is_reassigned
- description: Is Reassigned
+ description: Is Reassigned.
type: string
- contextPath: Gra.Resource.Orphan.Accounts.risk_score
- description: Risk Score
+ description: Risk Score.
type: number
- contextPath: Gra.Resource.Orphan.Accounts.updated_on
- description: Updated On
+ description: Updated On.
type: date
description: Retrieve all orphan / rogue accounts for specified resource.
- name: gra-user-activities
arguments:
- name: employee_id
required: true
- description: EMployee ID
+ description: EMployee ID.
- name: page
required: true
- description: Page No
+ description: Page No.
defaultValue: "1"
- name: max
required: true
default: true
- description: Per page record count
+ description: Per page record count.
defaultValue: "25"
outputs:
- contextPath: Gra.User.Activity.employee_id
- description: Employee Id
+ description: Employee Id.
type: string
- contextPath: Gra.User.Activity.account_name
- description: Account Name
+ description: Account Name.
type: string
- contextPath: Gra.User.Activity.resource_name
- description: Resource Name
+ description: Resource Name.
type: string
- contextPath: Gra.User.Activity.event_desc
- description: Event Desc
+ description: Event Desc.
type: string
- contextPath: Gra.User.Activity.event_date
- description: Event Date
+ description: Event Date.
type: date
- contextPath: Gra.User.Activity.risk_score
- description: Risk Score
+ description: Risk Score.
type: number
description: Retrieve activity for specified user.
- name: gra-fetch-users-details
arguments:
- name: employee_id
required: true
- description: Employee ID
+ description: Employee ID.
outputs:
- contextPath: Gra.User.firstName
- description: First Name
+ description: First Name.
type: string
- contextPath: Gra.User.middleName
- description: Middle Name
+ description: Middle Name.
type: string
- contextPath: Gra.User.lastName
- description: Last Name
+ description: Last Name.
type: string
- contextPath: Gra.User.employeeId
- description: Employee Id
+ description: Employee Id.
type: string
- contextPath: Gra.User.riskScore
- description: Risk Score
+ description: Risk Score.
type: number
- contextPath: Gra.User.userRisk
- description: User Risk
+ description: User Risk.
type: number
- contextPath: Gra.User.department
- description: Department
+ description: Department.
type: string
- contextPath: Gra.User.email
- description: Email
+ description: Email.
type: string
- contextPath: Gra.User.phone
- description: Phone
+ description: Phone.
type: string
- contextPath: Gra.User.location
- description: Location
+ description: Location.
type: string
- contextPath: Gra.User.manager
- description: Manager
+ description: Manager.
type: string
- contextPath: Gra.User.title
- description: Title
+ description: Title.
type: string
- contextPath: Gra.User.joiningDate
- description: Joining Date
+ description: Joining Date.
type: date
- contextPath: Gra.User.exitDate
- description: Exit Date
+ description: Exit Date.
type: date
- contextPath: Gra.User.profilePicturePath
- description: Profile Picture Path
+ description: Profile Picture Path.
type: string
description: Retrieve details for specified user.
- name: gra-highRisk-users
arguments:
- name: page
required: true
- description: Page No
+ description: Page No.
defaultValue: "1"
- name: max
required: true
default: true
- description: params
+ description: params.
defaultValue: Per page record count
outputs:
- contextPath: Gra.Highrisk.Users.firstName
- description: First Name
+ description: First Name.
type: string
- contextPath: Gra.Highrisk.Users.middleName
- description: Middle Name
+ description: Middle Name.
type: string
- contextPath: Gra.Highrisk.Users.lastName
- description: Last Name
+ description: Last Name.
type: string
- contextPath: Gra.Highrisk.Users.employeeId
- description: Employee Id
+ description: Employee Id.
type: string
- contextPath: Gra.Highrisk.Users.riskScore
- description: Risk Score
+ description: Risk Score.
type: number
- contextPath: Gra.Highrisk.Users.userRisk
- description: User Risk
+ description: User Risk.
type: string
- contextPath: Gra.Highrisk.Users.department
- description: Department
+ description: Department.
type: string
- contextPath: Gra.Highrisk.Users.email
- description: Email
+ description: Email.
type: string
- contextPath: Gra.Highrisk.Users.phone
- description: Phone
+ description: Phone.
type: string
- contextPath: Gra.Highrisk.Users.location
- description: Location
+ description: Location.
type: string
- contextPath: Gra.Highrisk.Users.manager
- description: Manager
+ description: Manager.
type: string
- contextPath: Gra.Highrisk.Users.title
- description: Title
+ description: Title.
type: string
- contextPath: Gra.Highrisk.Users.joiningDate
- description: Joining Date
+ description: Joining Date.
type: date
- contextPath: Gra.Highrisk.Users.exitDate
- description: Exit Date
+ description: Exit Date.
type: date
- contextPath: Gra.Highrisk.Users.updated_on
- description: Updated On
+ description: Updated On.
type: date
- contextPath: Gra.Highrisk.Users.created_on
- description: Created On
+ description: Created On.
type: date
- contextPath: Gra.Highrisk.Users.resource
- description: Resource
+ description: Resource.
type: string
- contextPath: Gra.Highrisk.Users.name
- description: Name
+ description: Name.
type: string
- contextPath: Gra.Highrisk.Users.id
- description: Id
+ description: Id.
type: string
- contextPath: Gra.Highrisk.Users.type
- description: Type
+ description: Type.
type: string
- contextPath: Gra.Highrisk.Users.description
- description: Description
+ description: Description.
type: string
- contextPath: Gra.Highrisk.Users.is_reassigned
- description: Is Reassigned
+ description: Is Reassigned.
type: string
- contextPath: Gra.Highrisk.Users.high_risk
- description: High Risk
+ description: High Risk.
type: string
- contextPath: Gra.Highrisk.Users.is_orphan
- description: Is Orphan
+ description: Is Orphan.
type: string
- contextPath: Gra.Highrisk.Users.domain
- description: Domain
+ description: Domain.
type: string
description: Retrieve list of all high risk users.
- name: gra-cases
@@ -722,67 +722,67 @@ script:
- RISK ACCEPTED
- REOPENED
- ALL
- description: Status
+ description: Status.
defaultValue: OPEN
- name: page
required: true
- description: Page No
+ description: Page No.
defaultValue: "1"
- name: max
required: true
- description: Per page record count
+ description: Per page record count.
defaultValue: "25"
outputs:
- contextPath: Gra.Cases.entityId
- description: EntityId
+ description: EntityId.
type: number
- contextPath: Gra.Cases.entityTypeId
- description: Entity Type Id
+ description: Entity Type Id.
type: number
- contextPath: Gra.Cases.entity
- description: Entity
+ description: Entity.
type: string
- contextPath: Gra.Cases.caseId
- description: Case Id
+ description: Case Id.
type: number
- contextPath: Gra.Cases.openDate
- description: Open Date
+ description: Open Date.
type: date
- contextPath: Gra.Cases.ownerId
- description: Owner Id
+ description: Owner Id.
type: number
- contextPath: Gra.Cases.ownerType
- description: Owner Type
+ description: Owner Type.
type: string
- contextPath: Gra.Cases.ownerName
- description: Owner Name
+ description: Owner Name.
type: string
- contextPath: Gra.Cases.riskDate
- description: Risk Date
+ description: Risk Date.
type: date
- contextPath: Gra.Cases.status
- description: satus
+ description: satus.
type: string
- contextPath: Gra.Cases.anomalies
- description: Anomalies
+ description: Anomalies.
type: string
description: Retrieve list of all cases for specified status.
- name: gra-user-anomalies
arguments:
- name: employee_id
required: true
- description: employee Id
+ description: employee Id.
- name: page
required: true
- description: Page No
+ description: Page No.
defaultValue: "1"
- name: max
required: true
- description: Per page record count
+ description: Per page record count.
defaultValue: "25"
outputs:
- contextPath: Gra.User.Anomalies.anomaly_name
- description: Anomaly Name
+ description: Anomaly Name.
type: string
description: Retrieve list of anomalies for specified user.
- name: gra-case-action
@@ -795,11 +795,11 @@ script:
- closeCase
- modelReviewCase
- riskManageCase
- description: Action
+ description: Action.
defaultValue: closeCase
- name: caseId
required: true
- description: Case Id
+ description: Case Id.
- name: subOption
required: true
auto: PREDEFINED
@@ -808,16 +808,16 @@ script:
- Correct Detection
- Tuning Required
- Others
- description: Sub Option
+ description: Sub Option.
defaultValue: True Incident
- name: caseComment
required: true
- description: Case Comment
+ description: Case Comment.
- name: riskAcceptDate
- description: Risk Accept Date in ‘yyyy-MM-dd’ format. (applicable only in case of closing a case as Risk Managed)
+ description: Risk Accept Date in ‘yyyy-MM-dd’ format. (applicable only in case of closing a case as Risk Managed).
outputs:
- contextPath: Gra.Case.Action.Message
- description: Message
+ description: Message.
type: string
description: Close a case and update the anomaly status as Closed / Risk Managed / Model Reviewed.
- name: gra-case-action-anomaly
@@ -830,14 +830,14 @@ script:
- closeCaseAnomaly
- modelReviewCaseAnomaly
- riskAcceptCaseAnomaly
- description: Action
+ description: Action.
defaultValue: closeCaseAnomaly
- name: caseId
required: true
- description: Case ID
+ description: Case ID.
- name: anomalyNames
required: true
- description: Anomaly Names
+ description: Anomaly Names.
- name: subOption
required: true
auto: PREDEFINED
@@ -846,139 +846,139 @@ script:
- Correct Detection
- Tuning Required
- Others
- description: Sub Option
+ description: Sub Option.
defaultValue: True Incident
- name: caseComment
required: true
- description: Case Comment
+ description: Case Comment.
- name: riskAcceptDate
- description: Risk Accept Date in ‘yyyy-MM-dd’ format. (applicable only in case of closing anomalies as Risk Accepted)
+ description: "Risk Accept Date in ‘yyyy-MM-dd’ format. (applicable only in case of closing anomalies as Risk Accepted)."
outputs:
- contextPath: Gra.Case.Action.Anomaly.Message
- description: Message
+ description: Message.
type: string
- contextPath: Gra.Case.Action.Anomaly.anomalyName
- description: Anomaly Name
+ description: Anomaly Name.
type: string
description: Close an anomaly or anomalies within a case and update the anomaly status as Closed / Risk Managed / Model Reviewed.
- name: gra-investigate-anomaly-summary
arguments:
- name: modelName
required: true
- description: Model Name
+ description: Model Name.
- name: fromDate
- description: From Date ( yyyy-MM-dd )
+ description: From Date ( yyyy-MM-dd ).
- name: toDate
- description: To Date ( yyyy-MM-dd )
+ description: To Date ( yyyy-MM-dd ).
outputs:
- contextPath: Gra.Investigate.Anomaly.Summary.analyticalFeatures
- description: Analytical Features
+ description: Analytical Features.
type: string
- contextPath: Gra.Investigate.Anomaly.Summary.entityCount
- description: Entity Count
+ description: Entity Count.
type: string
- contextPath: Gra.Investigate.Anomaly.Summary.resourceCount
- description: Resource Count
+ description: Resource Count.
type: string
- contextPath: Gra.Investigate.Anomaly.Summary.records
- description: Records
+ description: Records.
type: string
- contextPath: Gra.Investigate.Anomaly.Summary.anomalyBaseline
- description: Anomaly Baseline
+ description: Anomaly Baseline.
type: string
- contextPath: Gra.Investigate.Anomaly.Summary.anomalyLastCatch
- description: Anomaly Last Catch
+ description: Anomaly Last Catch.
type: string
- contextPath: Gra.Investigate.Anomaly.Summary.executionDays
- description: Execution Days
+ description: Execution Days.
type: string
- contextPath: Gra.Investigate.Anomaly.Summary.chainDetails
- description: Chain Details
+ description: Chain Details.
type: string
- contextPath: Gra.Investigate.Anomaly.Summary.resourceName
- description: Resource Name
+ description: Resource Name.
type: string
- contextPath: Gra.Investigate.Anomaly.Summary.type
- description: Type
+ description: Type.
type: string
- contextPath: Gra.Investigate.Anomaly.Summary.value
- description: Value
+ description: Value.
type: string
- contextPath: Gra.Investigate.Anomaly.Summary.anomalousActivity
- description: Anomalous Activity
+ description: Anomalous Activity.
type: number
- contextPath: Gra.Investigate.Anomaly.Summary.anomalyName
- description: Anomaly Name
+ description: Anomaly Name.
type: string
- contextPath: Gra.Investigate.Anomaly.Summary.classifier
- description: Classifier
+ description: Classifier.
type: string
- contextPath: Gra.Investigate.Anomaly.Summary.anomalyFirstCatch
- description: Anomaly First Catch
+ description: Anomaly First Catch.
type: string
- contextPath: Gra.Investigate.Anomaly.Summary.anomalyDescription
- description: Anomaly Description
+ description: Anomaly Description.
type: string
- contextPath: Gra.Investigate.Anomaly.Summary.similarTemplateAnomalies
- description: Similar Template Anomalies
+ description: Similar Template Anomalies.
type: string
- contextPath: Gra.Investigate.Anomaly.Summary.entitiesFlagged
- description: Entities Flagged
+ description: Entities Flagged.
type: number
description: Retrieve detailed anomaly summary of specified anomaly name.
- name: gra-analytical-features-entity-value
arguments:
- name: entityValue
required: true
- description: Entity Value
+ description: Entity Value.
- name: modelName
required: true
- description: Model Name
+ description: Model Name.
- name: fromDate
required: true
- description: From Date ( yyyy-MM-dd )
+ description: From Date ( yyyy-MM-dd ).
- name: toDate
required: true
- description: To Date ( yyyy-MM-dd )
+ description: To Date ( yyyy-MM-dd ).
- name: entityTypeId
- description: Entity Type Id
+ description: Entity Type Id.
outputs:
- contextPath: Gra.Analytical.Features.Entity.Value.analyticalFeatures
- description: Analytical Features
+ description: Analytical Features.
type: string
- contextPath: Gra.Analytical.Features.Entity.Value.analyticalFeatureValues
- description: Analytical Feature Values
+ description: Analytical Feature Values.
type: string
description: Retrieve analytical features for specified entity value, model name and dates.
- name: gra-cases-anomaly
arguments:
- name: caseId
- description: GRA Case Id
+ description: GRA Case Id.
required: true
outputs:
- contextPath: Gra.Cases.anomalies.anomalyName
- description: Cases Anomaly name
+ description: Cases Anomaly name.
type: String
- contextPath: Gra.Cases.anomalies.riskAcceptedDate
- description: Risk accepted date of anomaly
+ description: Risk accepted date of anomaly.
type: date
- contextPath: Gra.Cases.anomalies.resourceName
- description: Resource Name
+ description: Resource Name.
type: String
- contextPath: Gra.Cases.anomalies.riskScore
- description: Risk score for anomaly
+ description: Risk score for anomaly.
type: String
- contextPath: Gra.Cases.anomalies.assignee
- description: Assignee name
+ description: Assignee name.
type: String
- contextPath: Gra.Cases.anomalies.assigneeType
- description: Assignee type (User/Role)
+ description: Assignee type (User/Role).
type: String
- contextPath: Gra.Cases.anomalies.status
- description: Current status of anomaly
+ description: Current status of anomaly.
type: String
description: Retrieve anomalies for specified case id.
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.95956
isfetch: true
subtype: python3
fromversion: 5.0.0
diff --git a/Packs/Gurucul/Integrations/GuruculGRA/GuruculGRA_test.py b/Packs/Gurucul/Integrations/GuruculGRA/GuruculGRA_test.py
index 371f7f9c0e14..35dc6995b43c 100644
--- a/Packs/Gurucul/Integrations/GuruculGRA/GuruculGRA_test.py
+++ b/Packs/Gurucul/Integrations/GuruculGRA/GuruculGRA_test.py
@@ -1,9 +1,8 @@
import json
-import io
def util_load_json(path):
- with io.open(path, mode='r', encoding='utf-8') as f:
+ with open(path, encoding='utf-8') as f:
return json.loads(f.read())
diff --git a/Packs/Gurucul/ReleaseNotes/2_0_4.md b/Packs/Gurucul/ReleaseNotes/2_0_4.md
new file mode 100644
index 000000000000..6f169ed4de41
--- /dev/null
+++ b/Packs/Gurucul/ReleaseNotes/2_0_4.md
@@ -0,0 +1,10 @@
+
+#### Integrations
+
+##### Gurucul-GRA
+
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+#### Scripts
+
+##### GRAAnomaliesDisplay
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
diff --git a/Packs/Gurucul/Scripts/GRAAnomaliesDisplay/GRAAnomaliesDisplay.py b/Packs/Gurucul/Scripts/GRAAnomaliesDisplay/GRAAnomaliesDisplay.py
index ba8bba50899c..4b4eb6b33646 100644
--- a/Packs/Gurucul/Scripts/GRAAnomaliesDisplay/GRAAnomaliesDisplay.py
+++ b/Packs/Gurucul/Scripts/GRAAnomaliesDisplay/GRAAnomaliesDisplay.py
@@ -36,9 +36,8 @@ def getAnomaliesByCaseId():
anomaliesChangedCount += 1
break
- if anomaliesChangedCount == 0:
- if len(oldAnomalies) != len(updatedAnomalies):
- anomaliesChangedCount = len(updatedAnomalies) - len(oldAnomalies)
+ if anomaliesChangedCount == 0 and len(oldAnomalies) != len(updatedAnomalies):
+ anomaliesChangedCount = len(updatedAnomalies) - len(oldAnomalies)
if anomaliesChangedCount != 0:
execute_command("setIncident", {"id": incident['id'], "gracaseanomalydetails": updatedAnomalies})
diff --git a/Packs/Gurucul/Scripts/GRAAnomaliesDisplay/GRAAnomaliesDisplay.yml b/Packs/Gurucul/Scripts/GRAAnomaliesDisplay/GRAAnomaliesDisplay.yml
index 4693e214a1d3..2c7b3bb9d0e0 100644
--- a/Packs/Gurucul/Scripts/GRAAnomaliesDisplay/GRAAnomaliesDisplay.yml
+++ b/Packs/Gurucul/Scripts/GRAAnomaliesDisplay/GRAAnomaliesDisplay.yml
@@ -6,12 +6,12 @@ script: '-'
type: python
tags:
- dynamic-section
-comment: Retrieve anomalies for specified case id from GRA and update in XSOAR
+comment: Retrieve anomalies for specified case id from GRA and update in XSOAR.
enabled: true
scripttarget: 0
subtype: python3
timeout: 900ns
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
fromversion: 6.0.0
tests:
diff --git a/Packs/Gurucul/pack_metadata.json b/Packs/Gurucul/pack_metadata.json
index d9eb3f9a40db..b4ffb01598e4 100644
--- a/Packs/Gurucul/pack_metadata.json
+++ b/Packs/Gurucul/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Gurucul Risk Analytics",
"description": "Gurucul Risk Analytics (GRA) is a Unified Security and Risk Analytics platform.",
"support": "partner",
- "currentVersion": "2.0.3",
+ "currentVersion": "2.0.4",
"author": "Gurucul",
"url": "https://www.gurucul.com",
"email": "support@gurucul.com",
diff --git a/Packs/HPEArubaClearPass/ModelingRules/HPEArubaClearPass/HPEArubaClearPass.xif b/Packs/HPEArubaClearPass/ModelingRules/HPEArubaClearPass/HPEArubaClearPass.xif
index dd4598efbce7..1f810a988eb7 100644
--- a/Packs/HPEArubaClearPass/ModelingRules/HPEArubaClearPass/HPEArubaClearPass.xif
+++ b/Packs/HPEArubaClearPass/ModelingRules/HPEArubaClearPass/HPEArubaClearPass.xif
@@ -1,4 +1,4 @@
-[MODEL: dataset=aruba_clearpass_raw]
+[MODEL: dataset=aruba_networks_clearpass_raw]
filter cat ~= "[s|S]ession"
| alter
dst_ip_v4 = if(dst !~= ":", dst, null),
diff --git a/Packs/HPEArubaClearPass/ModelingRules/HPEArubaClearPass/HPEArubaClearPass_schema.json b/Packs/HPEArubaClearPass/ModelingRules/HPEArubaClearPass/HPEArubaClearPass_schema.json
index 97a40fbab78d..2bbc1647a66e 100644
--- a/Packs/HPEArubaClearPass/ModelingRules/HPEArubaClearPass/HPEArubaClearPass_schema.json
+++ b/Packs/HPEArubaClearPass/ModelingRules/HPEArubaClearPass/HPEArubaClearPass_schema.json
@@ -1,6 +1,6 @@
{
- "aruba_clearpass_raw": {
+ "aruba_networks_clearpass_raw": {
"rt": {
"type": "int",
"is_array": false
diff --git a/Packs/HPEArubaClearPass/ParsingRules/HPEArubaClearPass/HPEArubaClearPass.xif b/Packs/HPEArubaClearPass/ParsingRules/HPEArubaClearPass/HPEArubaClearPass.xif
index f528c3f20c74..5cc51fced0a1 100644
--- a/Packs/HPEArubaClearPass/ParsingRules/HPEArubaClearPass/HPEArubaClearPass.xif
+++ b/Packs/HPEArubaClearPass/ParsingRules/HPEArubaClearPass/HPEArubaClearPass.xif
@@ -1,3 +1,3 @@
-[INGEST:vendor="aruba", product="clearpass", target_dataset="aruba_clearpass_raw", no_hit = keep]
+[INGEST:vendor="aruba_networks", product="clearpass", target_dataset="aruba_networks_clearpass_raw", no_hit = keep]
filter to_string(rt) ~= "\d{13}"
| alter _time = to_timestamp(to_integer(rt), "MILLIS");
\ No newline at end of file
diff --git a/Packs/HPEArubaClearPass/ReleaseNotes/1_0_27.md b/Packs/HPEArubaClearPass/ReleaseNotes/1_0_27.md
new file mode 100644
index 000000000000..5a99473235d2
--- /dev/null
+++ b/Packs/HPEArubaClearPass/ReleaseNotes/1_0_27.md
@@ -0,0 +1,12 @@
+
+#### Modeling Rules
+
+##### HPE Aruba ClearPass Modeling Rule
+
+Updated the Modeling Rule dataset name to aruba_networks_clearpass raw.
+
+#### Parsing Rules
+
+##### HPE Aruba ClearPass Parsing Rule
+
+Updated the Parsing Rule dataset name to aruba_networks_clearpass raw.
diff --git a/Packs/HPEArubaClearPass/pack_metadata.json b/Packs/HPEArubaClearPass/pack_metadata.json
index e64c760664d8..9fa5afd93e85 100644
--- a/Packs/HPEArubaClearPass/pack_metadata.json
+++ b/Packs/HPEArubaClearPass/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "HPE Aruba Clearpass",
"description": "Aruba ClearPass Policy Manager provides role and device-based network access control for employees, contractors, and guests across any multivendor wired, wireless and VPN infrastructure.",
"support": "xsoar",
- "currentVersion": "1.0.26",
+ "currentVersion": "1.0.27",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/HelloWorld/Integrations/HelloWorld/HelloWorld.yml b/Packs/HelloWorld/Integrations/HelloWorld/HelloWorld.yml
index 673c03e39eea..45797170f636 100644
--- a/Packs/HelloWorld/Integrations/HelloWorld/HelloWorld.yml
+++ b/Packs/HelloWorld/Integrations/HelloWorld/HelloWorld.yml
@@ -290,7 +290,7 @@ script:
- contextPath: IP.Relationships.EntityBType
description: The type of the destination of the relationship.
type: string
- dockerimage: demisto/python3:3.10.13.89009
+ dockerimage: demisto/python3:3.10.14.96411
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/HelloWorld/Integrations/HelloWorldEventCollector/README.md b/Packs/HelloWorld/Integrations/HelloWorldEventCollector/README.md
index 72e95a3136e0..283d04ad5328 100644
--- a/Packs/HelloWorld/Integrations/HelloWorldEventCollector/README.md
+++ b/Packs/HelloWorld/Integrations/HelloWorldEventCollector/README.md
@@ -1,5 +1,7 @@
This is the Hello World event collector integration for XSIAM.
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure HelloWorld Event Collector on Cortex XSOAR
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
diff --git a/Packs/HelloWorld/ReleaseNotes/3_0_10.md b/Packs/HelloWorld/ReleaseNotes/3_0_10.md
new file mode 100644
index 000000000000..60f234d8efd5
--- /dev/null
+++ b/Packs/HelloWorld/ReleaseNotes/3_0_10.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### HelloWorld
+
+- Updated the Docker image to: *demisto/python3:3.10.14.96411*.
diff --git a/Packs/HelloWorld/ReleaseNotes/3_0_11.md b/Packs/HelloWorld/ReleaseNotes/3_0_11.md
new file mode 100644
index 000000000000..711a20b61f7c
--- /dev/null
+++ b/Packs/HelloWorld/ReleaseNotes/3_0_11.md
@@ -0,0 +1,3 @@
+## HelloWorld
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/HelloWorld/pack_metadata.json b/Packs/HelloWorld/pack_metadata.json
index 4c27d10c878d..ac87dc25fb95 100644
--- a/Packs/HelloWorld/pack_metadata.json
+++ b/Packs/HelloWorld/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "HelloWorld",
"description": "This is the Hello World integration for getting started.",
"support": "community",
- "currentVersion": "3.0.9",
+ "currentVersion": "3.0.11",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -29,5 +29,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "HelloWorldEventCollector"
}
\ No newline at end of file
diff --git a/Packs/HuaweiNetworkDevices/README.md b/Packs/HuaweiNetworkDevices/README.md
index 2eee436bfedc..f64de629cfca 100644
--- a/Packs/HuaweiNetworkDevices/README.md
+++ b/Packs/HuaweiNetworkDevices/README.md
@@ -17,11 +17,11 @@ This section describes the configuration that needs to be done on a Huawei S Ser
3. Type the following command to enable the information center:
```bash
info-center enable
- ```
+ ```
4. Type the following command to send informational level log messages to the default channel:
-```bash
- info-center source default channel loghost log level informational debug state off trap state off
-```
+ ```bash
+ info-center source default channel loghost log level informational debug state off trap state off
+ ```
5. **Optional:** To verify your Huawei S Series Switch/AR Series Router source configuration, type the command:
```bash
display channel loghost
diff --git a/Packs/IPQualityScore/ReleaseNotes/1_0_10.json b/Packs/IPQualityScore/ReleaseNotes/1_0_10.json
new file mode 100644
index 000000000000..cd1e9358ed36
--- /dev/null
+++ b/Packs/IPQualityScore/ReleaseNotes/1_0_10.json
@@ -0,0 +1,4 @@
+{
+ "breakingChanges": true,
+ "breakingChangesNotes": "Due to a fixed issue in `Base` pack (Version `1.33.52`) IPQualityScore will now correctly input email addresses into context under `Account.Email` and not under `Email` as it did up until now."
+}
\ No newline at end of file
diff --git a/Packs/IPQualityScore/ReleaseNotes/1_0_10.md b/Packs/IPQualityScore/ReleaseNotes/1_0_10.md
new file mode 100644
index 000000000000..7bb0763a6dcf
--- /dev/null
+++ b/Packs/IPQualityScore/ReleaseNotes/1_0_10.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### IPQualityScore
+
+Fixed an issue in `Base` pack (Version `1.33.52`) so now IPQualityScore will correctly input email addresses into context under `Account.Email` and not under `Email`.
diff --git a/Packs/IPQualityScore/pack_metadata.json b/Packs/IPQualityScore/pack_metadata.json
index 46f9952e5dd7..abd738abb7a5 100644
--- a/Packs/IPQualityScore/pack_metadata.json
+++ b/Packs/IPQualityScore/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "IPQualityScore (IPQS) Threat Risk Scoring",
"description": "Detect threats with real-time risk scoring by IPQS. Playbook analyzes IP addresses, email addresses, and domains or URLs for high risk behavior.",
"support": "partner",
- "currentVersion": "1.0.9",
+ "currentVersion": "1.0.10",
"author": "IPQualityScore",
"url": "https://www.ipqualityscore.com",
"email": "support@ipqualityscore.com",
diff --git a/Packs/Identity/ReleaseNotes/1_0_5.md b/Packs/Identity/ReleaseNotes/1_0_5.md
new file mode 100644
index 000000000000..2d34c46be268
--- /dev/null
+++ b/Packs/Identity/ReleaseNotes/1_0_5.md
@@ -0,0 +1,3 @@
+## Identity
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/Identity/pack_metadata.json b/Packs/Identity/pack_metadata.json
index 95acc69e188f..18558a9e3f70 100644
--- a/Packs/Identity/pack_metadata.json
+++ b/Packs/Identity/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Identity",
"description": "Base pack for any packs using identity fields.",
"support": "xsoar",
- "currentVersion": "1.0.4",
+ "currentVersion": "1.0.5",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/ImageOCR/ReleaseNotes/1_1_32.md b/Packs/ImageOCR/ReleaseNotes/1_1_32.md
new file mode 100644
index 000000000000..6e3808cf24c1
--- /dev/null
+++ b/Packs/ImageOCR/ReleaseNotes/1_1_32.md
@@ -0,0 +1,3 @@
+## Image OCR
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/ImageOCR/pack_metadata.json b/Packs/ImageOCR/pack_metadata.json
index fee8daa55c0d..795f89867801 100644
--- a/Packs/ImageOCR/pack_metadata.json
+++ b/Packs/ImageOCR/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Image OCR",
"description": "Extracts text from images.",
"support": "xsoar",
- "currentVersion": "1.1.31",
+ "currentVersion": "1.1.32",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Incapsula/Integrations/Incapsula/Incapsula.js b/Packs/Incapsula/Integrations/Incapsula/Incapsula.js
index c46b0b0c412e..d9c966bff116 100644
--- a/Packs/Incapsula/Integrations/Incapsula/Incapsula.js
+++ b/Packs/Incapsula/Integrations/Incapsula/Incapsula.js
@@ -9,7 +9,7 @@ var sendRequest = function(url, body, queryName) {
{
Method: 'POST',
Body: body,
- Headers: {'content-type': ['application/x-www-form-urlencoded']},
+ Headers: {'content-type': ['application/json'], 'x-API-Id': [apiid], 'x-API-Key': [apikey]},
},
true,
proxy
@@ -147,8 +147,7 @@ var urlDict = {
'incap-test-alert-attack-monitoring-start': '/api/v1/infra-protect/test-alerts/monitoring/attack-start'
}
-args['api_id'] = apiid;
-args['api_key'] = apikey;
+
switch (command) {
case 'test-module':
var res = sendRequest(base + urlDict['incap-get-texts'], encodeToURLQuery(args).substr(1), 'test');
diff --git a/Packs/Incapsula/Integrations/Incapsula/Incapsula.yml b/Packs/Incapsula/Integrations/Incapsula/Incapsula.yml
index e19220f4ae04..4d0e3b5c460d 100644
--- a/Packs/Incapsula/Integrations/Incapsula/Incapsula.yml
+++ b/Packs/Incapsula/Integrations/Incapsula/Incapsula.yml
@@ -4,7 +4,7 @@ commonfields:
name: Incapsula
display: Imperva Incapsula
category: Network Security
-description: Uses incapsula to manage sites and IPs
+description: Uses incapsula to manage sites and IPs.
configuration:
- display: API ID
name: apiid
@@ -50,9 +50,9 @@ script:
- name: account_name
description: Account name.
- name: log_level
- description: Available only for Enterprise Plan customers that purchased the Logs Integration SKU. Sets the log reporting level for the site. Options are “full”, “security”, “none” and "default"
+ description: Available only for Enterprise Plan customers that purchased the Logs Integration SKU. Sets the log reporting level for the site. Options are “full”, “security”, “none” and "default".
- name: logs_account_id
- description: Available only for Enterprise Plan customers that purchased the Logs Integration SKU. Numeric identifier of the account that purchased the logs integration SKU and which collects the logs. If not specified, operation will be performed on the account identified by the authentication parameters
+ description: Available only for Enterprise Plan customers that purchased the Logs Integration SKU. Numeric identifier of the account that purchased the logs integration SKU and which collects the logs. If not specified, operation will be performed on the account identified by the authentication parameters.
description: Use this operation to add a new account that should be managed by the account of the API client (the parent account). The new account will be configured according to the preferences set for the parent account by Incapsula. Depending on these preferences, an activation e-mail will be sent to the specified e-mail address. The user responds to the activation e-mail, selects a password, and can then log directly into the Incapsula console. The same e-mail address can also be used to send system notifications to the account. The new account is identified by a numeric value as provided by Incapsula in the response in the field account_id.
- name: incap-list-managed-accounts
arguments:
@@ -64,9 +64,9 @@ script:
Default: 50
- Maximum: 100'
+ Maximum: 100.'
- name: page_num
- description: "The page to return starting from 0. Default: 0"
+ description: "The page to return starting from 0. Default: 0."
description: Use this operation to get the list of accounts that are managed by account of the API client (the parent account).
- name: incap-add-subaccount
arguments:
@@ -81,9 +81,9 @@ script:
- name: ref_id
description: Customer specific identifier for this operation.
- name: log_level
- description: Available only for Enterprise Plan customers that purchased the Logs Integration SKU. Sets the log reporting level for the site. Options are “full”, “security”, “none” and "default"
+ description: Available only for Enterprise Plan customers that purchased the Logs Integration SKU. Sets the log reporting level for the site. Options are “full”, “security”, “none” and "default".
- name: logs_account_id
- description: Available only for Enterprise Plan customers that purchased the Logs Integration SKU. Numeric identifier of the account that purchased the logs integration SKU and which collects the logs. If not specified, operation will be performed on the account identified by the authentication parameters
+ description: Available only for Enterprise Plan customers that purchased the Logs Integration SKU. Numeric identifier of the account that purchased the logs integration SKU and which collects the logs. If not specified, operation will be performed on the account identified by the authentication parameters.
description: Use this operation to add a new sub account to be managed by the account of the API client (the parent account).
- name: incap-list-subaccounts
arguments:
@@ -95,9 +95,9 @@ script:
Default: 50
- Maximum: 100'
+ Maximum: 100.'
- name: page_num
- description: "The page to return starting from 0. Default: 0"
+ description: "The page to return starting from 0. Default: 0."
description: Use this operation to get a list of sub accounts that are managed by the account of the API client (the parent account).
- name: incap-get-account-status
arguments:
@@ -121,7 +121,7 @@ script:
- "support_all_tls_versions"
description: 'Name of the configuration parameter to set.
- Possible values: name | email | plan_id | error_page_template | support_all_tls_versions'
+ Possible values: name | email | plan_id | error_page_template | support_all_tls_versions.'
- name: value
required: true
description: 'According to the configuration paramater used.
@@ -176,7 +176,7 @@ script:
predefined:
- "true"
- "false"
- description: 'Save this configuration if the test connection was successful. Default value: false'
+ description: 'Save this configuration if the test connection was successful. Default value: false.'
description: Use this operation to check that a connection can be created with your Amazon S3 bucket.
- name: incap-test-account-sftp-connection
arguments:
@@ -201,7 +201,7 @@ script:
predefined:
- "true"
- "false"
- description: 'Save this configuration if the test connection was successful. Default value: false'
+ description: 'Save this configuration if the test connection was successful. Default value: false.'
description: Use this operation to check that a connection can be created with your SFTP storage.
- name: incap-set-account-s3-log-storage
arguments:
@@ -252,7 +252,7 @@ script:
description: Numeric identifier of the account to operate on. If not specified, operation will be performed on the account identified by the authentication parameters.
description: 'Tokens are used instead of user/password based authentication to log in to the Incapsula management console. Use this operation to generate a token for an account. The token is valid for 15 minutes.
- In order to use the token, the user must use the following link: https://my.incapsula.com/?token={generated_token}'
+ In order to use the token, the user must use the following link: https://my.incapsula.com/?token={generated_token}.'
- name: incap-delete-managed-account
arguments:
- name: account_id
@@ -284,7 +284,7 @@ script:
Default: 50.
- Maximum: 100'
+ Maximum: 100.'
- name: page_num
description: 'The page to return starting from 0. Default: 0.'
description: Use this operation to get audit events for an account.
@@ -318,56 +318,56 @@ script:
default: true
description: 'The domain name of the site. For example: www.example.com, hello.example.com, example.com'
- name: account_id
- description: Numeric identifier of the account to operate on. If not specified, operation will be performed on the account identified by the authentication parameters
+ description: Numeric identifier of the account to operate on. If not specified, operation will be performed on the account identified by the authentication parameters.
- name: ref_id
- description: Customer specific identifier for this operation
+ description: Customer specific identifier for this operation.
- name: send_site_setup_emails
- description: If this value is "false", end users will not get emails about the add site process such as "DNS instructions" and "SSL setup"
+ description: If this value is "false", end users will not get emails about the add site process such as "DNS instructions" and "SSL setup".
- name: site_ip
- description: Manually set the web server IP/cname. This option is only available for specific accounts. Please contact support for more details
+ description: Manually set the web server IP/cname. This option is only available for specific accounts. Please contact support for more details.
- name: force_ssl
- description: If this value is "true", manually set the site to support SSL. This option is only available for sites with manually configured IP/cname and for specific accounts. Please contact support for more details
+ description: If this value is "true", manually set the site to support SSL. This option is only available for sites with manually configured IP/cname and for specific accounts. Please contact support for more details.
- name: log_level
- description: Available only for Enterprise Plan customers that purchased the Logs Integration SKU. Sets the log reporting level for the site. Options are “full”, “security”, “none” and "default"
+ description: Available only for Enterprise Plan customers that purchased the Logs Integration SKU. Sets the log reporting level for the site. Options are “full”, “security”, “none” and "default".
- name: logs_account_id
- description: Available only for Enterprise Plan customers that purchased the Logs Integration SKU. Numeric identifier of the account that purchased the logs integration SKU and which collects the logs. If not specified, operation will be performed on the account identified by the authentication parameters
- description: Add a new site to an account. If the site already exists, its status is returned
+ description: Available only for Enterprise Plan customers that purchased the Logs Integration SKU. Numeric identifier of the account that purchased the logs integration SKU and which collects the logs. If not specified, operation will be performed on the account identified by the authentication parameters.
+ description: Add a new site to an account. If the site already exists, its status is returned.
- name: incap-get-site-status
arguments:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate on
+ description: Numeric identifier of the site to operate on.
- name: tests
- description: 'List of tests to run on site before returning its status. A comma separated list of one of: domain_validation, services, dns'
- description: Use this operation to get the status of a site
+ description: 'List of tests to run on site before returning its status. A comma separated list of one of: domain_validation, services, dns.'
+ description: Use this operation to get the status of a site.
- name: incap-get-domain-approver-email
arguments:
- name: domain
description: 'The domain name of the site. For example: www.example.com, hello.example.com, example.com'
- description: Use this operation to get the list of email addresses that can be used when adding an SSL site
+ description: Use this operation to get the list of email addresses that can be used when adding an SSL site.
- name: incap-modify-site-configuration
arguments:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate on
+ description: Numeric identifier of the site to operate on.
- name: param
required: true
- description: Name of configuration parameter to set
+ description: Name of configuration parameter to set.
- name: value
required: true
- description: According to the param value
+ description: According to the param value.
description: Use this operation to change one of the basic configuration settings of the site. To watch param table, visit https://my.incapsula.com/api/docs/v1/sites#modifySiteConfig
- name: incap-modify-site-log-level
arguments:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate on
+ description: Numeric identifier of the site to operate on.
- name: log_level
- description: Available only for Enterprise Plan customers that purchased the Logs Integration SKU. Sets the log reporting level for the site. Options are “full”, “security”, “none” and "default"
- description: Use this operation to change the site log configuration
+ description: Available only for Enterprise Plan customers that purchased the Logs Integration SKU. Sets the log reporting level for the site. Options are “full”, “security”, “none” and "default".
+ description: Use this operation to change the site log configuration.
- name: incap-modify-site-tls-support
arguments:
- name: site_id
@@ -380,122 +380,122 @@ script:
predefined:
- "true"
- "false"
- description: 'Support all TLS versions. Default value: false'
+ description: 'Support all TLS versions. Default value: false.'
description: Use this operation to support all TLS versions for the site for connectivity between clients (visitors) and the Incapsula service. To remain PCI-compliant, do not enable this option.
- name: incap-modify-site-scurity-config
arguments:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate on
+ description: Numeric identifier of the site to operate on.
- name: rule_id
required: true
- description: ID of the security rule to change. For possible values see the security section in the Get Site Status API call
+ description: ID of the security rule to change. For possible values see the security section in the Get Site Status API call.
- name: block_bad_bots
- description: 'Whether or not to block bad bots. One of: true, false'
+ description: 'Whether or not to block bad bots. One of: true, false.'
- name: challenge_suspected_bots
- description: 'Whether or not to send a challenge to clients that are suspected to be bad bots (CAPTCHA for example). One of: true, false'
+ description: 'Whether or not to send a challenge to clients that are suspected to be bad bots (CAPTCHA for example). One of: true, false.'
- name: activation_mode
- description: 'One of the following: off (security measures are disabled even if site is under a DDoS attack), auto (security measures will be activated automatically when the system suspects site is under a DDoS attack), on (security measures are enabled even if site is not under a DDoS attack). The syntax is as follows: api.threats.ddos.activation_mode.( e.g. for "off", use "api.threats.ddos.activation_mode.off" )'
+ description: 'One of the following: off (security measures are disabled even if site is under a DDoS attack), auto (security measures will be activated automatically when the system suspects site is under a DDoS attack), on (security measures are enabled even if site is not under a DDoS attack). The syntax is as follows: api.threats.ddos.activation_mode.( e.g. for "off", use "api.threats.ddos.activation_mode.off" ).'
- name: security_rule_action
- description: 'The action that should be taken when a threat is detected, for example: api.threats.action.block_ip. Different actions are allowed per different threats, e.g. backdoors may only be quarantined, ignored or trigger an alert. For possible values see below'
+ description: 'The action that should be taken when a threat is detected, for example: api.threats.action.block_ip. Different actions are allowed per different threats, e.g. backdoors may only be quarantined, ignored or trigger an alert. For possible values see below.'
- name: quarantined_urls
- description: A comma seperated list of encoded URLs to be kept in quarantine
+ description: A comma seperated list of encoded URLs to be kept in quarantine.
- name: ddos_traffic_threshold
- description: Consider site to be under DDoS if the request rate is above this threshold. The valid values are 10, 20, 50, 100, 200, 500, 750, 1000, 2000, 3000, 4000, 5000
- description: Use this operation to change the security configuration of a site
+ description: Consider site to be under DDoS if the request rate is above this threshold. The valid values are 10, 20, 50, 100, 200, 500, 750, 1000, 2000, 3000, 4000, 5000.
+ description: Use this operation to change the security configuration of a site.
- name: incap-modify-site-acl-config
arguments:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate on
+ description: Numeric identifier of the site to operate on.
- name: rule_id
required: true
description: "The id of the acl, e.g api.acl.blacklisted_ips. One of: api.acl.blacklisted_countries, api.acl.blacklisted_urls, api.acl.blacklisted_ips, api.acl.whitelisted_ips"
- name: urls
- description: A comma separated list of resource paths. For example, /home and /admin/index.html are resource paths, however http://www.example.com/home is not. Each URL should be encoded separately using percent encoding as specified by RFC 3986 (http://tools.ietf.org/html/rfc3986#section-2.1). An empty URL list will remove all URLs
+ description: A comma separated list of resource paths. For example, /home and /admin/index.html are resource paths, however http://www.example.com/home is not. Each URL should be encoded separately using percent encoding as specified by RFC 3986 (http://tools.ietf.org/html/rfc3986#section-2.1). An empty URL list will remove all URLs.
- name: url_patterns
- description: 'A comma seperated list of url patterns, one of: contains | equals | prefix | suffix | not_equals | not_contain | not_prefix | not_suffix. The patterns should be in accordance with the matching urls sent by the urls parameter'
+ description: 'A comma seperated list of url patterns, one of: contains | equals | prefix | suffix | not_equals | not_contain | not_prefix | not_suffix. The patterns should be in accordance with the matching urls sent by the urls parameter.'
- name: countries
- description: "A comma seperated list of country codes"
+ description: "A comma seperated list of country codes."
- name: continents
- description: "A comma seperated list of continent codes"
+ description: "A comma seperated list of continent codes."
- name: ips
- description: 'A comma seperated list of IPs or IP ranges, e.g: 192.168.1.1, 192.168.1.1-192.168.1.100 or 192.168.1.1/24'
- description: Use this operation to change the ACL configuration of a site. To modify the configuration for a specific ACL rule, its values are required, as documented below. To delete an entire ACL list, send an empty string as the list values
+ description: 'A comma seperated list of IPs or IP ranges, e.g: 192.168.1.1, 192.168.1.1-192.168.1.100 or 192.168.1.1/24.'
+ description: Use this operation to change the ACL configuration of a site. To modify the configuration for a specific ACL rule, its values are required, as documented below. To delete an entire ACL list, send an empty string as the list values.
- name: incap-modify-site-wl-config
arguments:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate on
+ description: Numeric identifier of the site to operate on.
- name: rule_id
required: true
- description: The id of the rule (either a security or an acl rule), e.g api.acl.blacklisted_ips. See other examples below
+ description: The id of the rule (either a security or an acl rule), e.g api.acl.blacklisted_ips. See other examples below.
- name: whitelist_id
- description: The id (an integer) of the allow list to be set. This field is optional - in case no id is supplied, a new allow list will be created
+ description: The id (an integer) of the allow list to be set. This field is optional - in case no id is supplied, a new allow list will be created.
- name: delete_whitelist
- description: An optional boolean parameter, in case it is set to "true" and a allow list id is sent, then the allow list will be deleted
+ description: An optional boolean parameter, in case it is set to "true" and a allow list id is sent, then the allow list will be deleted.
- name: urls
- description: A comma separated list of resource paths. For example, /home and /admin/index.html are resource paths, however http://www.example.com/home is not. Each URL should be encoded separately using percent encoding as specified by RFC 3986 (http://tools.ietf.org/html/rfc3986#section-2.1). An empty URL list will remove all URLs
+ description: A comma separated list of resource paths. For example, /home and /admin/index.html are resource paths, however http://www.example.com/home is not. Each URL should be encoded separately using percent encoding as specified by RFC 3986 (http://tools.ietf.org/html/rfc3986#section-2.1). An empty URL list will remove all URLs.
- name: countries
- description: A comma seperated list of country codes
+ description: A comma seperated list of country codes.
- name: continents
- description: A comma seperated list of continent codes
+ description: A comma seperated list of continent codes.
- name: ips
- description: 'A comma seperated list of IPs or IP ranges, e.g: 192.168.1.1, 192.168.1.1-192.168.1.100 or 192.168.1.1/24'
+ description: 'A comma seperated list of IPs or IP ranges, e.g: 192.168.1.1, 192.168.1.1-192.168.1.100 or 192.168.1.1/24.'
- name: client_app_types
- description: A comma seperated list of client application types
+ description: A comma seperated list of client application types.
- name: client_apps
- description: A comma seperated list of client application ids
+ description: A comma seperated list of client application ids.
- name: parameters
- description: A comma seperated list of encoded user agents
+ description: A comma seperated list of encoded user agents.
- name: user_agents
- description: A comma seperated list of encoded user agents
- description: Use this operation to set allow lists to security rules or ACLs. To update an existing allow list, send its ID in the id parameter. If the id parameter does not exist a new allow list will be created
+ description: A comma seperated list of encoded user agents.
+ description: Use this operation to set allow lists to security rules or ACLs. To update an existing allow list, send its ID in the id parameter. If the id parameter does not exist a new allow list will be created.
- name: incap-delete-site
arguments:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate on
- description: Delete the site
+ description: Numeric identifier of the site to operate on.
+ description: Delete the site.
- name: incap-list-sites
arguments:
- name: account_id
- description: Numeric identifier of the account to operate on. If not specified, operation will be performed on the account identified by the authentication parameters
+ description: Numeric identifier of the account to operate on. If not specified, operation will be performed on the account identified by the authentication parameters.
- name: page_size
- description: The number of objects to return in the response. Defaults to 50
+ description: The number of objects to return in the response. Defaults to 50.
- name: page_num
- description: The page to return starting from 0. Default to 0
- description: List sites for an account
+ description: The page to return starting from 0. Default to 0.
+ description: List sites for an account.
- name: incap-get-site-report
arguments:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate on
+ description: Numeric identifier of the site to operate on.
- name: report
required: true
- description: 'The report to get, one of: pci-compliance'
+ description: 'The report to get, one of: pci-compliance.'
- name: format
required: true
- description: 'The format to get the report in, one of: pdf | html'
+ description: 'The format to get the report in, one of: pdf | html.'
- name: time_range
required: true
- description: Time range to fetch data for. See the introduction of the API documentation for a detailed description
+ description: Time range to fetch data for. See the introduction of the API documentation for a detailed description.
- name: start
- description: Start date in milliseconds since 1970. See the introduction of the API documentation for a detailed description
+ description: Start date in milliseconds since 1970. See the introduction of the API documentation for a detailed description.
- name: end
- description: End date in milliseconds since 1970. See the introduction of the API documentation for a detailed description
- description: Use this operation to get a report for a site. Reports are sent using Base64 encoding
+ description: End date in milliseconds since 1970. See the introduction of the API documentation for a detailed description.
+ description: Use this operation to get a report for a site. Reports are sent using Base64 encoding.
- name: incap-get-site-html-injection-rules
arguments:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate on
+ description: Numeric identifier of the site to operate on.
description: Use this operation to list all the HTML Injection rules.
- name: incap-add-site-html-injection-rule
arguments:
@@ -518,7 +518,7 @@ script:
- "suffix"
- "not_prefix"
- "not_suffix"
- description: 'The url pattern. One of: contains | not_contains | equals | not_equals | prefix | suffix | not_prefix | not_suffix'
+ description: 'The url pattern. One of: contains | not_contains | equals | not_equals | prefix | suffix | not_prefix | not_suffix.'
- name: location
required: true
auto: PREDEFINED
@@ -550,7 +550,7 @@ script:
- "suffix"
- "not_prefix"
- "not_suffix"
- description: 'The url pattern. One of: contains | not_contains | equals | not_equals | prefix | suffix | not_prefix | not_suffix'
+ description: 'The url pattern. One of: contains | not_contains | equals | not_equals | prefix | suffix | not_prefix | not_suffix.'
- name: location
required: true
auto: PREDEFINED
@@ -563,7 +563,7 @@ script:
predefined:
- "true"
- "false"
- description: 'Whether or not to delete existing HTML content. Possible values: true/false'
+ description: 'Whether or not to delete existing HTML content. Possible values: true/false.'
description: Use this operation to removes an existing HTML injection rule. To confirm the removal, set the parameter delete_content to true.
- name: incap-create-new-csr
arguments:
@@ -583,28 +583,28 @@ script:
description: The state/region where your organization is located. This should not be abbreviated.
- name: city
description: The city where your organization is located.
- description: Use this operation to create a certificate signing request (CSR) for your site
+ description: Use this operation to create a certificate signing request (CSR) for your site.
- name: incap-upload-certificate
arguments:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate o
+ description: Numeric identifier of the site to operate o.
- name: certificate
required: true
- description: The new certificate
+ description: The new certificate.
- name: private_key
- description: The private key of the certificate in base64 format. Optional in case of PFX certificate file format
+ description: The private key of the certificate in base64 format. Optional in case of PFX certificate file format.
- name: passphrase
- description: The passphrase used to protect your SSL certificate
- description: 'Use this operation to upload custom certificate for your site. The following SSL certificate file formats are supported: PFX, PEM, CER'
+ description: The passphrase used to protect your SSL certificate.
+ description: 'Use this operation to upload custom certificate for your site. The following SSL certificate file formats are supported: PFX, PEM, CER.'
- name: incap-remove-custom-integration
arguments:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate on
- description: Use this operation to remove custom certificate
+ description: Numeric identifier of the site to operate on.
+ description: Use this operation to remove custom certificate.
- name: incap-move-site
arguments:
- name: site_id
@@ -670,78 +670,78 @@ script:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate on
+ description: Numeric identifier of the site to operate on.
- name: purge_pattern
- description: 'The pattern of the resource to be purged from the cache. For example: (1) Resource_name - resources that contain Resource_name will be purged, (2) ^Resource_name - resources that start with Resource_name will be purged, and (3) Resource_name$ - resources that end with Resource_name will be purged'
- description: ' Use this operation to purge all cached content on our proxy servers for a specific site. Our Proxy servers keep cached content of your sites in order to accelerate page load times for your users. When you want this cached content to be refreshed (for example, after making adjustments in your site) you can use this API call. In order to purge the entire cached content for this site just use the API call with no parameters. If you want to purge a specific resource add the resource name as parameter'
+ description: 'The pattern of the resource to be purged from the cache. For example: (1) Resource_name - resources that contain Resource_name will be purged, (2) ^Resource_name - resources that start with Resource_name will be purged, and (3) Resource_name$ - resources that end with Resource_name will be purged.'
+ description: 'Use this operation to purge all cached content on our proxy servers for a specific site. Our Proxy servers keep cached content of your sites in order to accelerate page load times for your users. When you want this cached content to be refreshed (for example, after making adjustments in your site) you can use this API call. In order to purge the entire cached content for this site just use the API call with no parameters. If you want to purge a specific resource add the resource name as parameter.'
- name: incap-modify-cache-mode
arguments:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate on
+ description: Numeric identifier of the site to operate on.
- name: cache_mode
required: true
- description: 'disable | static_only | static_and_dynamic | aggressive : default Static_Only'
+ description: 'disable | static_only | static_and_dynamic | aggressive : default Static_Only.'
- name: dynamic_cache_duration
- description: 'Profile dynamic pages and cache duration, pass number followed by "_" and one of: hr | min | sec | days | weeks: default: 5_min'
+ description: 'Profile dynamic pages and cache duration, pass number followed by "_" and one of: hr | min | sec | days | weeks: default: 5_min.'
- name: aggressive_cache_duration
- description: 'Cache resource duration, pass number followed by "_" and one of: hr | min | sec | days | weeks: default: 1_hr'
- description: Use this operation to edit basic site caching settings
+ description: 'Cache resource duration, pass number followed by "_" and one of: hr | min | sec | days | weeks: default: 1_hr.'
+ description: Use this operation to edit basic site caching settings.
- name: incap-purge-resources
arguments:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate on
+ description: Numeric identifier of the site to operate on.
- name: resource_url
- description: Comma seperated list of URLs where the resource is located
+ description: Comma seperated list of URLs where the resource is located.
- name: resource_pattern
- description: 'Comma seperated list of pattern, one of: contains | equals | prefix | suffix | not_equals | not_contains | not_prefix | not_suffix'
+ description: 'Comma seperated list of pattern, one of: contains | equals | prefix | suffix | not_equals | not_contains | not_prefix | not_suffix.'
- name: should_purge_all_site_resources
- description: Should purge all cached resources on site
- description: Use this operation to purge site resources
+ description: Should purge all cached resources on site.
+ description: Use this operation to purge site resources.
- name: incap-modify-caching-rules
arguments:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate on
+ description: Numeric identifier of the site to operate on.
- name: always_cache_resource_url
- description: "Comma seperated list of always cache resources url"
+ description: "Comma seperated list of always cache resources url."
- name: always_cache_resource_pattern
- description: 'Comma seperated list of always cache resources pattern, one of: contains | equals | prefix | suffix | not_equals | not_contains | not_prefix | not_suffix'
+ description: 'Comma seperated list of always cache resources pattern, one of: contains | equals | prefix | suffix | not_equals | not_contains | not_prefix | not_suffix.'
- name: always_cache_resource_duration
- description: 'Duration that resources will be in cache, pass number followed by "_" and one of: hr | min | sec | days | weeks. Either provide a comma seperated list of duration expressions, matching the number of always cache rules, or a single duration expression to be used for all always cache rules'
+ description: 'Duration that resources will be in cache, pass number followed by "_" and one of: hr | min | sec | days | weeks. Either provide a comma seperated list of duration expressions, matching the number of always cache rules, or a single duration expression to be used for all always cache rules.'
- name: never_cache_resource_url
- description: Comma seperated list of never cache resources url
+ description: Comma seperated list of never cache resources url.
- name: never_cahce_resource_pattern
- description: Comma seperated list of cached headers seperated with comma
+ description: Comma seperated list of cached headers seperated with comma.
- name: clear_always_cache_rules
- description: An optional boolean parameter, in case it is set to "true", the site's always cache rules will be cleared
+ description: An optional boolean parameter, in case it is set to "true", the site's always cache rules will be cleared.
- name: clear_never_cache_rules
- description: An optional boolean parameter, in case it is set to "true", the site's never cache rules will be cleared
+ description: An optional boolean parameter, in case it is set to "true", the site's never cache rules will be cleared.
- name: clear_cache_headers_rules
- description: An optional boolean parameter, in case it is set to "true", the site's cache headers rules will be cleared
- description: Use this operation to set-up advanced caching rules
+ description: An optional boolean parameter, in case it is set to "true", the site's cache headers rules will be cleared.
+ description: Use this operation to set-up advanced caching rules.
- name: incap-set-advanced-caching-settings
arguments:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate on
+ description: Numeric identifier of the site to operate on.
- name: param
- description: Name of configuration parameter to set
+ description: Name of configuration parameter to set.
- name: value
- description: According to the param value
+ description: According to the param value.
description: Use this operation to modify advanced caching settings. For more information, https://my.incapsula.com/api/docs/v1/sites#modifyAdvancedCachingSettings
- name: incap-purge-hostname-from-cache
arguments:
- name: host_name
required: true
default: true
- description: The hostname to purge from cache
- description: Use this operation to purge the hostname from the cache. This API is for customers who use the same CNAME provided by Incapsula for multiple hostnames and would like to change the CNAME for a particular hostname. Purging the hostname is required for the CNAME change to take effect
+ description: The hostname to purge from cache.
+ description: Use this operation to purge the hostname from the cache. This API is for customers who use the same CNAME provided by Incapsula for multiple hostnames and would like to change the CNAME for a particular hostname. Purging the hostname is required for the CNAME change to take effect.
- name: incap-site-get-xray-link
arguments:
- name: site_id
@@ -800,9 +800,9 @@ script:
- "308"
description: Redirect rule's response code.
- name: protocol
- description: Protocol
+ description: Protocol.
- name: add_missing
- description: Add cookie or header if it doesn't exist (Rewrite cookie rule only)
+ description: Add cookie or header if it doesn't exist (Rewrite cookie rule only).
- name: from
description: 'The pattern to rewrite.
@@ -832,7 +832,7 @@ script:
- "LB_LEAST_OPEN_CONNECTIONS"
- "LB_SOURCE_IP_HASH"
- "RANDOM"
- description: Data center load balancing algorithm
+ description: Data center load balancing algorithm.
description: Use this operation to add a rule (Delivery Rules or IncapRules).
- name: incap-edit-site-rule
arguments:
@@ -874,9 +874,9 @@ script:
- "308"
description: Redirect rule's response code.
- name: protocol
- description: Protocol
+ description: Protocol.
- name: add_missing
- description: Add cookie or header if it doesn't exist (Rewrite cookie rule only)
+ description: Add cookie or header if it doesn't exist (Rewrite cookie rule only).
- name: from
description: 'The pattern to rewrite.
@@ -906,7 +906,7 @@ script:
- "LB_LEAST_OPEN_CONNECTIONS"
- "LB_SOURCE_IP_HASH"
- "RANDOM"
- description: Data center load balancing algorithm
+ description: Data center load balancing algorithm.
description: Use this operation to edit an existing rule (Delivery Rules or IncapRules).
- name: incap-enable-site-rule
arguments:
@@ -961,7 +961,7 @@ script:
Default is 50.
- Maximum: 100'
+ Maximum: 100.'
- name: page_num
description: The page to return starting from 0. Default is 0.
description: Use this operation to list rules (Delivery Rules and IncapRules) for a given site.
@@ -994,19 +994,19 @@ script:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate on
+ description: Numeric identifier of the site to operate on.
- name: name
required: true
description: The new data center's name.
- name: server_address
required: true
- description: "The server's address. Possible values: IP, CNAME"
+ description: "The server's address. Possible values: IP, CNAME."
- name: is_enabled
auto: PREDEFINED
predefined:
- "true"
- "false"
- description: Enables the data center
+ description: Enables the data center.
- name: is_content
auto: PREDEFINED
predefined:
@@ -1027,7 +1027,7 @@ script:
predefined:
- "true"
- "false"
- description: Enables the data center
+ description: Enables the data center.
- name: is_standby
auto: PREDEFINED
predefined:
@@ -1063,7 +1063,7 @@ script:
description: The data center's ID.
- name: server_address
required: true
- description: "The server's address. Possible values: IP, CNAME"
+ description: "The server's address. Possible values: IP, CNAME."
- name: name
description: The new data center's name.
- name: is_standby
@@ -1081,7 +1081,7 @@ script:
description: Server ID.
- name: server_address
required: true
- description: "The server's address. Possible values: IP, CNAME"
+ description: "The server's address. Possible values: IP, CNAME."
- name: name
description: The new data center's name.
- name: is_enabled
@@ -1108,70 +1108,70 @@ script:
- name: incap-get-statistics
arguments:
- name: account_id
- description: Numeric identifier of the account to fetch data for. If not specified, data will be fetched for all site of the account identified by the authentication parameters
+ description: Numeric identifier of the account to fetch data for. If not specified, data will be fetched for all site of the account identified by the authentication parameters.
- name: time_range
required: true
default: true
description: Time range to fetch data for. See https://my.incapsula.com/api/docs/v1#timeRange
- name: start
- description: Start date in milliseconds since 1970. See the introduction of the API documentation for a detailed description
+ description: Start date in milliseconds since 1970. See the introduction of the API documentation for a detailed description.
- name: end
- description: End date in milliseconds since 1970. See the introduction of the API documentation for a detailed description
+ description: End date in milliseconds since 1970. See the introduction of the API documentation for a detailed description.
- name: site_id
description: 'Numeric identifier of the site to fetch data for. Multiple sites can be specified in a comma separated list. For example: 123,124,125.'
- name: stats
required: true
description: Statistics to fetch, see options at https://my.incapsula.com/api/docs/v1/data#getStats
- name: granularity
- description: Time interval in milliseconds between data points for time series stats. Default is 86400000 (1 day) for a range of less than 30 days and 259200000 (3 days) for a range of less than 90 days
- description: Use this operation to get site statistics for one or more sites. This operation may return multiple statistics, as specified in the stats parameter
+ description: Time interval in milliseconds between data points for time series stats. Default is 86400000 (1 day) for a range of less than 30 days and 259200000 (3 days) for a range of less than 90 days.
+ description: Use this operation to get site statistics for one or more sites. This operation may return multiple statistics, as specified in the stats parameter.
- name: incap-get-visits
arguments:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate on
+ description: Numeric identifier of the site to operate on.
- name: time_range
- description: Time range to fetch data for. Default is last_7_days
+ description: Time range to fetch data for. Default is last_7_days.
- name: start
- description: Start date in milliseconds since 1970. See the introduction of the API documentation for a detailed description
+ description: Start date in milliseconds since 1970. See the introduction of the API documentation for a detailed description.
- name: end
- description: End date in milliseconds since 1970. See the introduction of the API documentation for a detailed description
+ description: End date in milliseconds since 1970. See the introduction of the API documentation for a detailed description.
- name: page_size
- description: The number of objects to return in the response. Defaults to 10
+ description: The number of objects to return in the response. Defaults to 10.
- name: page_num
- description: 'Filter the sessions that were handled according to the security-related specifications. Multiple values are supported, e.g.: "api.threats.action.block_ip, api.threats.sql_injection"'
+ description: 'Filter the sessions that were handled according to the security-related specifications. Multiple values are supported, e.g.: "api.threats.action.block_ip, api.threats.sql_injection".'
- name: country
- description: "Filter the sessions coming from the specified country"
+ description: "Filter the sessions coming from the specified country."
- name: ip
- description: "Filter the sessions coming from the specified IP"
+ description: "Filter the sessions coming from the specified IP."
- name: visit_id
- description: Comma separated list of visit IDs to load
+ description: Comma separated list of visit IDs to load.
- name: list_live_visits
- description: 'Whether or not to list visits that did not end and that may still be updated. One of: true | false. Default: true'
+ description: 'Whether or not to list visits that did not end and that may still be updated. One of: true | false. Default: true.'
- name: security
- description: 'Filter the sessions that were handled according to the security-related specifications. Multiple values are supported, e.g.: "api.threats.action.block_ip, api.threats.sql_injection"'
- description: Use this operation to get a log of recent visits to a website. The visits are fetched in reverse chronological order, starting with the most recent visit. Not all visits are recorded - only visits with abnormal activity are recorded e.g. violation of security rules, visits from black-listed IPs/Countries, etc. A visit may still be updated even after it was retrieved. To avoid retrieving such visits and to retrieve only visits that will no longer be updated use the list_live_visits parameter
+ description: 'Filter the sessions that were handled according to the security-related specifications. Multiple values are supported, e.g.: "api.threats.action.block_ip, api.threats.sql_injection".'
+ description: Use this operation to get a log of recent visits to a website. The visits are fetched in reverse chronological order, starting with the most recent visit. Not all visits are recorded - only visits with abnormal activity are recorded e.g. violation of security rules, visits from black-listed IPs/Countries, etc. A visit may still be updated even after it was retrieved. To avoid retrieving such visits and to retrieve only visits that will no longer be updated use the list_live_visits parameter.
- name: incap-upload-public-key
arguments:
- name: config_id
required: true
default: true
- description: The Logs Collector configuration identifier
+ description: The Logs Collector configuration identifier.
- name: public_key
required: true
- description: "The public key file(2048bit) in base64 format (without password protection)"
- description: Organizations that purchased the Security Logs Integration SKU can download security events created for their account and archive or push those events into their SIEM solution
+ description: "The public key file(2048bit) in base64 format (without password protection)."
+ description: Organizations that purchased the Security Logs Integration SKU can download security events created for their account and archive or push those events into their SIEM solution.
- name: incap-change-logs-collector-configuration
arguments:
- name: config_id
required: true
default: true
- description: The Logs Collector configuration identifier
+ description: The Logs Collector configuration identifier.
- name: logs_config_new_status
required: true
- description: The new configuration status of the Logs Collector. Values can be ACTIVE or SUSPENDED
- description: Available only for Enterprise Plan customers that purchased the Security Logs Integration SKU. Use this operation to change the status of the Logs Collector configuration
+ description: The new configuration status of the Logs Collector. Values can be ACTIVE or SUSPENDED.
+ description: Available only for Enterprise Plan customers that purchased the Security Logs Integration SKU. Use this operation to change the status of the Logs Collector configuration.
- name: incap-get-infra-protection-statistics
arguments:
- name: account_id
@@ -1196,13 +1196,13 @@ script:
description: Use this operation to get Infrastructure Protection statistics for an account or IP range.
outputs:
- contextPath: Imperva.EventStats.stats.payload.ipPrefix
- description: IP prefix
+ description: IP prefix.
type: string
- contextPath: Imperva.EventStats.stats.payload.ipPrefixType
- description: IP prefix type
+ description: IP prefix type.
type: string
- contextPath: Imperva.EventStats.stats.payload.traffic
- description: Traffic state, such as blocked or passed
+ description: Traffic state, such as blocked or passed.
- name: incap-get-infra-protection-events
arguments:
- name: account_id
@@ -1216,9 +1216,9 @@ script:
Default: 50
- Maximum: 100'
+ Maximum: 100.'
- name: page_num
- description: 'The page to return starting from 0. Default: 0'
+ description: 'The page to return starting from 0. Default: 0.'
- name: start
description: The start date in milliseconds, since 1970. For a detailed description, see https://docs.incapsula.com/Content/API/api.htm
- name: end
@@ -1230,119 +1230,119 @@ script:
- name: account_id
required: true
default: true
- description: Numeric identifier of the account to operate on
+ description: Numeric identifier of the account to operate on.
- name: email
required: true
- description: 'E-mail address, for example: "joe@example.com"'
+ description: 'E-mail address, for example: "joe@example.com".'
- name: name
- description: 'Example: John Smith'
+ description: 'Example: John Smith.'
- name: phone
- description: 'Phone number, country code - number, for example: "1-8662507659"'
+ description: 'Phone number, country code - number, for example: "1-8662507659".'
- name: is_email_verified
- description: Whether or not to skip E-Mail address verificaion
+ description: Whether or not to skip E-Mail address verificaion.
- name: is_phone_verified
- description: Whether or not to skip phone address verificaion
+ description: Whether or not to skip phone address verificaion.
- name: should_send_activation_email
- description: Whether or not to send activation E-Mail to user
- description: Use this operation to add Login Protect user for site
+ description: Whether or not to send activation E-Mail to user.
+ description: Use this operation to add Login Protect user for site.
- name: incap-edit-login-protect
arguments:
- name: account_id
required: true
default: true
- description: Numeric identifier of the account to operate on
+ description: Numeric identifier of the account to operate on.
- name: email
required: true
- description: 'E-mail address, for example: "joe@example.com"'
+ description: 'E-mail address, for example: "joe@example.com".'
- name: name
- description: 'Example: John Smith'
+ description: 'Example: John Smith.'
- name: phone
- description: 'Phone number, country code - number, for example: "1-8662507659"'
+ description: 'Phone number, country code - number, for example: "1-8662507659".'
- name: is_email_verified
- description: Whether or not to skip E-Mail address verificaion
+ description: Whether or not to skip E-Mail address verificaion.
- name: is_phone_verified
- description: Whether or not to skip phone address verificaion
+ description: Whether or not to skip phone address verificaion.
- name: should_send_activation_email
- description: "Whether or not to send activation E-Mail to user"
- description: Use this operation to edit Login Protect user's settings
+ description: "Whether or not to send activation E-Mail to user."
+ description: Use this operation to edit Login Protect user's settings.
- name: incap-get-login-protect
arguments:
- name: account_id
required: true
default: true
- description: Numeric identifier of the account to operate on
- description: Use this operation to get the account's login protect user list
+ description: Numeric identifier of the account to operate on.
+ description: Use this operation to get the account's login protect user list.
- name: incap-remove-login-protect
arguments:
- name: account_id
required: true
default: true
- description: Numeric identifier of the account to operate on
+ description: Numeric identifier of the account to operate on.
- name: email
required: true
- description: 'E-mail address, for example: "joe@example.com"'
- description: Use this operation to remove login protect user from account's user list
+ description: 'E-mail address, for example: "joe@example.com".'
+ description: Use this operation to remove login protect user from account's user list.
- name: incap-send-sms-to-user
arguments:
- name: account_id
required: true
default: true
- description: Numeric identifier of the account to operate on
+ description: Numeric identifier of the account to operate on.
- name: email
- description: 'E-mail address, for example: "joe@example.com"'
+ description: 'E-mail address, for example: "joe@example.com".'
- name: sms_text
- description: Text that will be sent in SMS
- description: Use this operation to send SMS to login protect user
+ description: Text that will be sent in SMS.
+ description: Use this operation to send SMS to login protect user.
- name: incap-modify-login-protect
arguments:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate on
+ description: Numeric identifier of the site to operate on.
- name: enabled
- description: Pass true to enable login protect on site, and false to disable it. Default is true
+ description: Pass true to enable login protect on site, and false to disable it. Default is true.
- name: specific_users_list
- description: Comma seperated E-Mail list to set login protect users for the site, if the list is empty all users will be allowed to access the site using Login Protect
+ description: Comma seperated E-Mail list to set login protect users for the site, if the list is empty all users will be allowed to access the site using Login Protect.
- name: send_lp_notifications
- description: Pass true to send notification on successful login using login protect. Default is false
+ description: Pass true to send notification on successful login using login protect. Default is false.
- name: allow_all_users
- description: Pass true to allow all login protect users to access the site. If you choose to allow only spesific list of users to access the site using Login Protect set this to false, and add the list to specific_user_list. Default value is true
+ description: Pass true to allow all login protect users to access the site. If you choose to allow only spesific list of users to access the site using Login Protect set this to false, and add the list to specific_user_list. Default value is true.
- name: authentication_methods
- description: Comma seperated list of allowed authentication methods sms | email | ga
+ description: Comma seperated list of allowed authentication methods sms | email | ga.
- name: urls
- description: A comma separated list of resource paths. For example, /home and /admin/index.html are resource paths, however http://www.example.com/home is not. Each URL should be encoded separately using percent encoding as specified by RFC 3986 (http://tools.ietf.org/html/rfc3986#section-2.1). An empty URL list will remove all URLs
+ description: A comma separated list of resource paths. For example, /home and /admin/index.html are resource paths, however http://www.example.com/home is not. Each URL should be encoded separately using percent encoding as specified by RFC 3986 (http://tools.ietf.org/html/rfc3986#section-2.1). An empty URL list will remove all URLs.
- name: url_patterns
- description: 'A comma seperated list of url patterns, one of: contains | equals | prefix | suffix | not_equals | not_contain | not_prefix | not_suffix. The patterns should be in accordance with the matching urls sent by the urls parameter'
- description: Use this operation to change Login Protect settings for site
+ description: 'A comma seperated list of url patterns, one of: contains | equals | prefix | suffix | not_equals | not_contain | not_prefix | not_suffix. The patterns should be in accordance with the matching urls sent by the urls parameter.'
+ description: Use this operation to change Login Protect settings for site.
- name: incap-configure-app
arguments:
- name: site_id
required: true
default: true
- description: Numeric identifier of the site to operate on
+ description: Numeric identifier of the site to operate on.
- name: protected_app
- description: Protect admin areas of joomla | wordpress | phpBB
- description: Use this operation to configure Login Protect on wordpress | joomla | phpbb admin areas
+ description: Protect admin areas of joomla | wordpress | phpBB.
+ description: Use this operation to configure Login Protect on wordpress | joomla | phpbb admin areas.
# Integration
- name: incap-get-ip-ranges
arguments: []
- description: Use this operation to get the updated list of Incapsula IP ranges
+ description: Use this operation to get the updated list of Incapsula IP ranges.
- name: incap-get-texts
arguments: []
- description: Use this operation to retrieve a list of all text messages that may be part of API responses
+ description: Use this operation to retrieve a list of all text messages that may be part of API responses.
- name: incap-get-geo-info
arguments: []
- description: Use this operation to retrieve a list of all the countries and continents codes
+ description: Use this operation to retrieve a list of all the countries and continents codes.
- name: incap-get-app-info
arguments: []
- description: Use this operation to retrieve a list of all the client applications
+ description: Use this operation to retrieve a list of all the client applications.
# Infrastructure Protection Test Alerts
- arguments:
- description: The customer's IP range.
name: ip_range
required: true
- auto: PREDEFINED
- description: 'One of the following: BGP, NETFLOW, PROTECTED_IP'
+ description: 'One of the following: BGP, NETFLOW, PROTECTED_IP.'
name: range_type
predefined:
- BGP
@@ -1356,7 +1356,7 @@ script:
name: end
required: true
- auto: PREDEFINED
- description: 'One of the following: SRC_IP, DST_IP, SRC_PORT_PROTOCOL, DST_PORT_PROTOCOL'
+ description: 'One of the following: SRC_IP, DST_IP, SRC_PORT_PROTOCOL, DST_PORT_PROTOCOL.'
name: data_type
predefined:
- SRC_IP
@@ -1365,27 +1365,27 @@ script:
- DST_PORT_PROTOCOL
required: true
- auto: PREDEFINED
- description: 'One of the following: BW, PPS'
+ description: 'One of the following: BW, PPS.'
name: metric_type
predefined:
- BW
- PPS
required: true
- auto: PREDEFINED
- description: 'One of the following: BLOCK, PASS'
+ description: 'One of the following: BLOCK, PASS.'
name: mitigation_type
predefined:
- BLOCK
- PASS
required: true
- auto: PREDEFINED
- description: 'One of the following: PEAK, AVERAGE'
+ description: 'One of the following: PEAK, AVERAGE.'
name: aggregation_type
predefined:
- PEAK
- AVERAGE
required: true
- description: Use this operation to view the highest peak values and highest average values for a protected IP range during a selected time period
+ description: Use this operation to view the highest peak values and highest average values for a protected IP range during a selected time period.
name: incap-get-infra-protection-top-items-table
system: true
tests:
diff --git a/Packs/Incapsula/ReleaseNotes/1_1_6.md b/Packs/Incapsula/ReleaseNotes/1_1_6.md
new file mode 100644
index 000000000000..5f0411ac561a
--- /dev/null
+++ b/Packs/Incapsula/ReleaseNotes/1_1_6.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Imperva Incapsula
+
+Fixed an issue where the Test button failed on authentication.
diff --git a/Packs/Incapsula/pack_metadata.json b/Packs/Incapsula/pack_metadata.json
index fcf4eb057719..db3f4094006a 100644
--- a/Packs/Incapsula/pack_metadata.json
+++ b/Packs/Incapsula/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Imperva Incapsula",
"description": "Uses Incapsula to manage sites and IPs",
"support": "xsoar",
- "currentVersion": "1.1.5",
+ "currentVersion": "1.1.6",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Indeni/Integrations/Indeni/Indeni.py b/Packs/Indeni/Integrations/Indeni/Indeni.py
index 7a05941d27fd..6b415a21215a 100644
--- a/Packs/Indeni/Integrations/Indeni/Indeni.py
+++ b/Packs/Indeni/Integrations/Indeni/Indeni.py
@@ -1,7 +1,6 @@
from CommonServerPython import *
''' IMPORTS '''
-from typing import List
import json
import requests
import urllib3
@@ -105,7 +104,7 @@ def get_alert_detail_request(alert_id, base_url):
def get_all_active_issues(per_page, sort_by, base_url):
- issues: List[dict] = []
+ issues: list[dict] = []
# The service endpoint to request from
endpoint_url = 'issues'
# Dictionary of params for the request
@@ -209,7 +208,7 @@ def get_alert_info(base_url):
if 'alert_blocks' in human_format:
n = human_format['alert_blocks']
if isinstance(n, list):
- bodies: List[str] = []
+ bodies: list[str] = []
for a in n:
body = a.get('body', None)
if body:
@@ -224,7 +223,7 @@ def get_alert_info(base_url):
'ContentsFormat': formats['json'],
'Contents': alert_response,
'ReadableContentsFormat': formats['markdown'],
- 'HumanReadable': tableToMarkdown('Alert ID {}'.format(alert_id), human_format, removeNull=True),
+ 'HumanReadable': tableToMarkdown(f'Alert ID {alert_id}', human_format, removeNull=True),
'EntryContext': ec
})
diff --git a/Packs/Indeni/Integrations/Indeni/Indeni.yml b/Packs/Indeni/Integrations/Indeni/Indeni.yml
index 431ff010751c..a63fae49b119 100644
--- a/Packs/Indeni/Integrations/Indeni/Indeni.yml
+++ b/Packs/Indeni/Integrations/Indeni/Indeni.yml
@@ -55,106 +55,106 @@ name: Indeni
script:
commands:
- arguments:
- - description: device id string
+ - description: device id string.
name: device_id
required: true
- description: get the device information
+ description: get the device information.
name: indeni-get-device-info
outputs:
- contextPath: Indeni.DeviceInfo.DeviceId
- description: device id string
+ description: device id string.
type: string
- contextPath: Indeni.DeviceInfo.DeviceIP
- description: device ip string
+ description: device ip string.
type: string
- contextPath: Indeni.DeviceInfo.DeviceName
- description: device hostname
+ description: device hostname.
type: string
- contextPath: Indeni.DeviceInfo.DeviceModel
- description: device model
+ description: device model.
type: string
- contextPath: Indeni.DeviceInfo.OSVersion
- description: device OS version
+ description: device OS version.
type: string
- contextPath: Indeni.DeviceInfo.CriticalAlertStats
- description: '# of critical alerts on the device'
+ description: '# of critical alerts on the device.'
type: number
- contextPath: Indeni.DeviceInfo.ErrorAlertStats
- description: '# of error alerts on the device'
+ description: '# of error alerts on the device.'
type: number
- contextPath: Indeni.DeviceInfo.WarnAlertStats
- description: '# of warn alerts on the device'
+ description: '# of warn alerts on the device.'
type: number
- contextPath: Indeni.DeviceInfo.InfoAlertStats
- description: '# of info alerts on the device'
+ description: '# of info alerts on the device.'
type: number
- arguments:
- - description: the id of the alert
+ - description: the id of the alert.
name: alert_id
required: true
- description: Get detailed alert info
+ description: Get detailed alert info.
name: indeni-get-alert-info
outputs:
- contextPath: Indeni.AlertInfo.AlertId
- description: id of the alert
+ description: id of the alert.
type: string
- contextPath: Indeni.AlertInfo.Headline
- description: headline of the alert
+ description: headline of the alert.
type: string
- contextPath: Indeni.AlertInfo.DeviceId
- description: device id
+ description: device id.
type: string
- contextPath: Indeni.AlertInfo.AlertType
- description: the alert type unique identifier
+ description: the alert type unique identifier.
type: string
- arguments:
- - description: identifier for alert type
+ - description: identifier for alert type.
name: alert_type_identifier
required: true
- description: Get summary of given alert type for all devices
+ description: Get summary of given alert type for all devices.
name: indeni-get-alert-summary
outputs:
- contextPath: Indeni.AffectedDevices.AlertType
- description: Alert type that's affecting the devices
+ description: Alert type that's affecting the devices.
type: String
- contextPath: Indeni.AffectedDevices.Device.DeviceName
- description: Name of the affected device
+ description: Name of the affected device.
type: String
- contextPath: Indeni.AffectedDevices.Device.DeviceId
- description: Id of the affected device
+ description: Id of the affected device.
type: String
- arguments:
- - description: the id of the alert
+ - description: the id of the alert.
name: alert_id
required: true
- - description: the content of the note
+ - description: the content of the note.
name: note
required: true
- description: Post a note to a given issue id
+ description: Post a note to a given issue id.
name: indeni-post-note
- arguments:
- - description: the alert id of the issue
+ - description: the alert id of the issue.
name: alert_id
required: true
- description: Archive an issue for the given alert id
+ description: Archive an issue for the given alert id.
name: indeni-archive-issue
- arguments:
- - description: the alert id of the issue
+ - description: the alert id of the issue.
name: alert_id
required: true
- description: Unarchive an existing issue
+ description: Unarchive an existing issue.
name: indeni-unarchive-issue
- arguments:
- - description: The id of the alert
+ - description: The id of the alert.
name: alert_id
required: true
- description: Get the notes from issue
+ description: Get the notes from issue.
name: indeni-get-notes
outputs:
- contextPath: Indeni.AlertInfo.Note
- description: Notes for the given issue
+ description: Notes for the given issue.
type: Unknown
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.95956
isfetch: true
longRunning: true
runonce: false
@@ -164,3 +164,5 @@ script:
fromversion: 5.0.0
defaultclassifier: Indeni
defaultmapperin: Indeni-mapper
+tests:
+- No tests (auto formatted)
diff --git a/Packs/Indeni/ReleaseNotes/1_0_13.md b/Packs/Indeni/ReleaseNotes/1_0_13.md
new file mode 100644
index 000000000000..2b23a30dfab4
--- /dev/null
+++ b/Packs/Indeni/ReleaseNotes/1_0_13.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Indeni
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/Indeni/pack_metadata.json b/Packs/Indeni/pack_metadata.json
index de76e0f4fbc3..fe996011a3b3 100644
--- a/Packs/Indeni/pack_metadata.json
+++ b/Packs/Indeni/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Indeni",
"description": "Indeni is a turn-key automated monitoring providing visibility for security infrastructure. Indeni's production-ready Knowledge is curated from vetted, community-sourced experience, to deliver automation of tedious tasks with integration with your existing processes.\ndisplay: Indeni",
"support": "partner",
- "currentVersion": "1.0.12",
+ "currentVersion": "1.0.13",
"author": "Indeni",
"url": "",
"email": "support@indeni.com",
diff --git a/Packs/Infoblox/ParsingRules/InfobloxParsingRules/InfobloxParsingRules.xif b/Packs/Infoblox/ParsingRules/InfobloxParsingRules/InfobloxParsingRules.xif
index b59f3053a6f7..8c3a61cd7c82 100644
--- a/Packs/Infoblox/ParsingRules/InfobloxParsingRules/InfobloxParsingRules.xif
+++ b/Packs/Infoblox/ParsingRules/InfobloxParsingRules/InfobloxParsingRules.xif
@@ -39,7 +39,7 @@ filter // Exclude DNS Queries & Responses events timestamp format
| alter // Converts the previous year string representation to datetime format
tmp_previous_year_timestamp_datetime = if(tmp_previous_year_timestamp_string != null, parse_timestamp("%Y %b %e %H:%M:%S", tmp_previous_year_timestamp_string), null)
| alter // Set to the relevant timestamp
- _time = coalesce(tmp_previous_year_timestamp_datetime, tmp_current_year_timestamp_datetime, _insert_time)
+ _time = coalesce(tmp_previous_year_timestamp_datetime, tmp_current_year_timestamp_datetime)
| fields - tmp*; // Remove all temporary util fields
/*
diff --git a/Packs/Infoblox/ReleaseNotes/1_1_2.md b/Packs/Infoblox/ReleaseNotes/1_1_2.md
new file mode 100644
index 000000000000..e28096e16a8b
--- /dev/null
+++ b/Packs/Infoblox/ReleaseNotes/1_1_2.md
@@ -0,0 +1,6 @@
+
+#### Parsing Rules
+
+##### Infoblox Parsing Rule
+
+Updated the Parsing Rule logic, removing _insert_time from the rule.
diff --git a/Packs/Infoblox/pack_metadata.json b/Packs/Infoblox/pack_metadata.json
index 050775ef0ccf..81f12a0f628e 100644
--- a/Packs/Infoblox/pack_metadata.json
+++ b/Packs/Infoblox/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Infoblox NIOS",
"description": "Infoblox is a comprehensive solution that consolidates DNS, DHCP, and IP address management into a single platform. It is designed to simplify network management by automating these critical functions and providing a centralized console for managing them.",
"support": "xsoar",
- "currentVersion": "1.1.1",
+ "currentVersion": "1.1.2",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Ironscales/ReleaseNotes/1_1_12.md b/Packs/Ironscales/ReleaseNotes/1_1_12.md
new file mode 100644
index 000000000000..0d7d1456e279
--- /dev/null
+++ b/Packs/Ironscales/ReleaseNotes/1_1_12.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### IronscalesEmailFieldTrigger
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/Ironscales/Scripts/IronscalesEmailFieldTrigger/IronscalesEmailFieldTrigger.yml b/Packs/Ironscales/Scripts/IronscalesEmailFieldTrigger/IronscalesEmailFieldTrigger.yml
index 0392a2abf310..c1e474599887 100644
--- a/Packs/Ironscales/Scripts/IronscalesEmailFieldTrigger/IronscalesEmailFieldTrigger.yml
+++ b/Packs/Ironscales/Scripts/IronscalesEmailFieldTrigger/IronscalesEmailFieldTrigger.yml
@@ -6,10 +6,12 @@ script: ""
type: python
tags:
- field-change-triggered
-comment: Automatically changes email field when choosing classification
+comment: Automatically changes email field when choosing classification.
fromversion: 6.0.0
enabled: true
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
+tests:
+- No tests (auto formatted)
diff --git a/Packs/Ironscales/pack_metadata.json b/Packs/Ironscales/pack_metadata.json
index a54e29a22197..865b48a703d2 100644
--- a/Packs/Ironscales/pack_metadata.json
+++ b/Packs/Ironscales/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Ironscales",
"description": "IRONSCALES is a self-learning email security platform, automatically responding to malicious emails.",
"support": "partner",
- "currentVersion": "1.1.11",
+ "currentVersion": "1.1.12",
"author": "Ironscales",
"url": "",
"email": "support@ironscales.com",
@@ -20,4 +20,4 @@
"xsoar",
"marketplacev2"
]
-}
+}
\ No newline at end of file
diff --git a/Packs/Jira/Integrations/JiraEventCollector/README.md b/Packs/Jira/Integrations/JiraEventCollector/README.md
index 29bb48dccd5f..65f525eef167 100644
--- a/Packs/Jira/Integrations/JiraEventCollector/README.md
+++ b/Packs/Jira/Integrations/JiraEventCollector/README.md
@@ -1,5 +1,7 @@
This integration was integrated and tested with version 3 of Jira Event Collector rest API.
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure Jira Event Collector on Cortex XSIAM
1. Navigate to **Settings** > **Configurations** > **Data Collection** > **Automations & Feed Integrations**.
diff --git a/Packs/Jira/ReleaseNotes/3_1_16.md b/Packs/Jira/ReleaseNotes/3_1_16.md
new file mode 100644
index 000000000000..3d6e69f5a00d
--- /dev/null
+++ b/Packs/Jira/ReleaseNotes/3_1_16.md
@@ -0,0 +1,3 @@
+## Atlassian Jira
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/Jira/pack_metadata.json b/Packs/Jira/pack_metadata.json
index b01b3eea7775..fd9cf10909b5 100644
--- a/Packs/Jira/pack_metadata.json
+++ b/Packs/Jira/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Atlassian Jira",
"description": "Use the Jira integration to manage issues and create Cortex XSOAR incidents from Jira projects.",
"support": "xsoar",
- "currentVersion": "3.1.15",
+ "currentVersion": "3.1.16",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -20,5 +20,6 @@
"xsoar",
"marketplacev2",
"xpanse"
- ]
+ ],
+ "defaultDataSource": "Jira Event Collector"
}
\ No newline at end of file
diff --git a/Packs/JoeSecurity/Integrations/JoeSecurityV2/JoeSecurityV2.yml b/Packs/JoeSecurity/Integrations/JoeSecurityV2/JoeSecurityV2.yml
index b881d3ba2b9a..ee5dc0349cd3 100644
--- a/Packs/JoeSecurity/Integrations/JoeSecurityV2/JoeSecurityV2.yml
+++ b/Packs/JoeSecurity/Integrations/JoeSecurityV2/JoeSecurityV2.yml
@@ -625,6 +625,7 @@ script:
arguments:
- name: submission_ids
description: A comma-separated list of submission IDs.
+ required: true
- name: full_display
description: When set to true, indicators information, including their DBot Scores, will be displayed.
defaultValue: "true"
diff --git a/Packs/JoeSecurity/Integrations/JoeSecurityV2/README.md b/Packs/JoeSecurity/Integrations/JoeSecurityV2/README.md
index ac61fa65d6d4..44a2a7a76a99 100644
--- a/Packs/JoeSecurity/Integrations/JoeSecurityV2/README.md
+++ b/Packs/JoeSecurity/Integrations/JoeSecurityV2/README.md
@@ -719,7 +719,7 @@ Retrieve the submission info.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
-| submission_ids | A comma-separated list of submission IDs. | Optional |
+| submission_ids | A comma-separated list of submission IDs. | Required |
| full_display | When set to true, indicators information, including their DBot Scores, will be displayed. Possible values are: true, false. Default is true. | Optional |
diff --git a/Packs/JoeSecurity/ReleaseNotes/1_1_21.json b/Packs/JoeSecurity/ReleaseNotes/1_1_21.json
new file mode 100644
index 000000000000..f6043d94b5d1
--- /dev/null
+++ b/Packs/JoeSecurity/ReleaseNotes/1_1_21.json
@@ -0,0 +1,4 @@
+{
+ "breakingChanges": true,
+ "breakingChangesNotes": "The **joe-submission-info** command now requires the *submission_ids* argument."
+}
\ No newline at end of file
diff --git a/Packs/JoeSecurity/ReleaseNotes/1_1_21.md b/Packs/JoeSecurity/ReleaseNotes/1_1_21.md
new file mode 100644
index 000000000000..4f872c789d77
--- /dev/null
+++ b/Packs/JoeSecurity/ReleaseNotes/1_1_21.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Joe Security v2
+
+- **Breaking Change**: The *submission_ids* argument is now required when using the command **joe-submission-info*.
+
diff --git a/Packs/JoeSecurity/pack_metadata.json b/Packs/JoeSecurity/pack_metadata.json
index bd4fb0b40299..9284d987e220 100644
--- a/Packs/JoeSecurity/pack_metadata.json
+++ b/Packs/JoeSecurity/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Joe Security",
"description": "Sandbox Cloud",
"support": "xsoar",
- "currentVersion": "1.1.20",
+ "currentVersion": "1.1.21",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/KELARaDark/ReleaseNotes/1_1_6.md b/Packs/KELARaDark/ReleaseNotes/1_1_6.md
new file mode 100644
index 000000000000..814e21ae69f1
--- /dev/null
+++ b/Packs/KELARaDark/ReleaseNotes/1_1_6.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### MapRaDarkIncidentDetails
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/KELARaDark/Scripts/MapRaDarkIncidentDetails/MapRaDarkIncidentDetails.yml b/Packs/KELARaDark/Scripts/MapRaDarkIncidentDetails/MapRaDarkIncidentDetails.yml
index f553c3f60aa6..4a1169ce3020 100644
--- a/Packs/KELARaDark/Scripts/MapRaDarkIncidentDetails/MapRaDarkIncidentDetails.yml
+++ b/Packs/KELARaDark/Scripts/MapRaDarkIncidentDetails/MapRaDarkIncidentDetails.yml
@@ -5,7 +5,7 @@ comment: Map details to an RaDark incident.
commonfields:
id: MapRaDarkIncidentDetails
version: -1
-dockerimage: demisto/python3:3.10.12.63474
+dockerimage: demisto/python3:3.10.14.95956
enabled: true
name: MapRaDarkIncidentDetails
outputs:
diff --git a/Packs/KELARaDark/pack_metadata.json b/Packs/KELARaDark/pack_metadata.json
index f3938131a57b..64230530a6f3 100644
--- a/Packs/KELARaDark/pack_metadata.json
+++ b/Packs/KELARaDark/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "KELA RaDark",
"description": "This content pack enables you to fetch incidents and manage your RaDark monitor from Cortex XSOAR.",
"support": "partner",
- "currentVersion": "1.1.5",
+ "currentVersion": "1.1.6",
"author": "KELA",
"url": "ke-la.com",
"email": "support@ke-la.com",
diff --git a/Packs/LinuxEventsCollection/ParsingRules/LinuxEventsCollectionParsingRules/LinuxEventsCollectionParsingRules.xif b/Packs/LinuxEventsCollection/ParsingRules/LinuxEventsCollectionParsingRules/LinuxEventsCollectionParsingRules.xif
index b6c2bf489e31..2c0d2ff513e9 100644
--- a/Packs/LinuxEventsCollection/ParsingRules/LinuxEventsCollectionParsingRules/LinuxEventsCollectionParsingRules.xif
+++ b/Packs/LinuxEventsCollection/ParsingRules/LinuxEventsCollectionParsingRules/LinuxEventsCollectionParsingRules.xif
@@ -1,5 +1,6 @@
[INGEST:vendor="linux", product="linux", target_dataset="linux_linux_raw", no_hit=keep]
-filter _raw_log ~= "\w{3}\s+\d{1,2}\s\d{2}:\d{2}:\d{2}"
+// Filter to apply the parsing rule only on logs ingested via syslog and not xdrc.
+filter _raw_log ~= "\w{3}\s+\d{1,2}\s\d{2}:\d{2}:\d{2}" and _collector_type != "XDR Collector"
| alter
// Get the current year and timestamp.
tmp_get_current_year = arrayindex(regextract(to_string(_insert_time), "\d{4}"), 0),
diff --git a/Packs/LinuxEventsCollection/ReleaseNotes/1_0_9.md b/Packs/LinuxEventsCollection/ReleaseNotes/1_0_9.md
new file mode 100644
index 000000000000..f81a3aa37ce1
--- /dev/null
+++ b/Packs/LinuxEventsCollection/ReleaseNotes/1_0_9.md
@@ -0,0 +1,3 @@
+#### Parsing Rules
+##### Linux Events Collection Parsing Rule
+Updated the parsing rule to refer only to logs ingested via Broker VM (syslog) and not via XDRC.
diff --git a/Packs/LinuxEventsCollection/pack_metadata.json b/Packs/LinuxEventsCollection/pack_metadata.json
index daec191a9f87..47868b5096ef 100644
--- a/Packs/LinuxEventsCollection/pack_metadata.json
+++ b/Packs/LinuxEventsCollection/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Linux Events Collection",
"description": "Linux is an operating system for servers, desktops, cloud, and IoTs",
"support": "xsoar",
- "currentVersion": "1.0.8",
+ "currentVersion": "1.0.9",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/MISP/ReleaseNotes/2_1_43.json b/Packs/MISP/ReleaseNotes/2_1_43.json
new file mode 100644
index 000000000000..124ac75460c5
--- /dev/null
+++ b/Packs/MISP/ReleaseNotes/2_1_43.json
@@ -0,0 +1,4 @@
+{
+ "breakingChanges": true,
+ "breakingChangesNotes": "Due to a fixed issue in `Base` pack (Version `1.33.52`) MISP will now correctly input email addresses into context under `Account.Email` and not under `Email` as it did up until now."
+}
\ No newline at end of file
diff --git a/Packs/MISP/ReleaseNotes/2_1_43.md b/Packs/MISP/ReleaseNotes/2_1_43.md
new file mode 100644
index 000000000000..57928fad0c49
--- /dev/null
+++ b/Packs/MISP/ReleaseNotes/2_1_43.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### MISP v3
+
+Fixed an issue in `Base` pack (Version `1.33.52`) so now MISP will correctly input email addresses into context under `Account.Email` and not under `Email`.
\ No newline at end of file
diff --git a/Packs/MISP/TestPlaybooks/MISP_V3_Test.yml b/Packs/MISP/TestPlaybooks/MISP_V3_Test.yml
index 628e1a935a27..0dac3de18741 100644
--- a/Packs/MISP/TestPlaybooks/MISP_V3_Test.yml
+++ b/Packs/MISP/TestPlaybooks/MISP_V3_Test.yml
@@ -362,7 +362,7 @@ tasks:
- - operator: isEqualString
left:
value:
- simple: Email.Address
+ simple: Account.Email.Address
iscontext: true
right:
value:
diff --git a/Packs/MISP/pack_metadata.json b/Packs/MISP/pack_metadata.json
index 589637e36da5..3cce685e4101 100644
--- a/Packs/MISP/pack_metadata.json
+++ b/Packs/MISP/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "MISP",
"description": "Malware information and threat sharing platform.",
"support": "xsoar",
- "currentVersion": "2.1.42",
+ "currentVersion": "2.1.43",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/MailListener/Integrations/MailListenerV2/MailListenerV2.py b/Packs/MailListener/Integrations/MailListenerV2/MailListenerV2.py
index 5a8ea899a79d..54a31f7c0969 100644
--- a/Packs/MailListener/Integrations/MailListenerV2/MailListenerV2.py
+++ b/Packs/MailListener/Integrations/MailListenerV2/MailListenerV2.py
@@ -5,7 +5,7 @@
import email
from datetime import timezone
from typing import Any
-
+from email.message import Message
from dateparser import parse
from mailparser import parse_from_bytes, parse_from_string
from imap_tools import OR
@@ -31,8 +31,12 @@ def __init__(self, message_bytes: bytes, include_raw_body: bool, save_file: bool
'\nWill replace backslash and try to parse again')
message_bytes = self.handle_message_slashes(message_bytes)
email_object = parse_from_bytes(message_bytes)
- except Exception:
- email_object = parse_from_string(message_bytes.decode('ISO-8859-1'))
+ except TypeError as e:
+ demisto.info(f'Failed parsing mail from bytes: [{e}]\n{traceback.format_exc()}.'
+ '\nWill try to parse from string')
+ message_string = message_bytes.decode('ISO-8859-1')
+ email_object = parse_from_string(message_string)
+
eml_attachments = self.get_eml_attachments(message_bytes)
self.id = id_
self.to = [mail_addresses for _, mail_addresses in email_object.to]
@@ -56,23 +60,39 @@ def __init__(self, message_bytes: bytes, include_raw_body: bool, save_file: bool
@staticmethod
def get_eml_attachments(message_bytes: bytes) -> list:
+
+ def get_attachment_payload(part: Message) -> bytes:
+ """Returns the payload of the email attachment as bytes object"""
+ payload = part.get_payload(decode=False)
+ if isinstance(payload, list) and isinstance(payload[0], Message):
+ payload = payload[0].as_bytes()
+ elif isinstance(payload, str):
+ payload = payload.encode('utf-8')
+ else:
+ raise DemistoException(f'Could not parse the email attachment: {part.get_filename()}')
+
+ return payload
+
eml_attachments = []
msg = email.message_from_bytes(message_bytes)
+
if msg:
for part in msg.walk():
if part.get_content_maintype() == "multipart" or part.get("Content-Disposition") is None:
continue
+
filename = part.get_filename()
if filename and filename.endswith('.eml'):
eml_attachments.append({
"filename": filename,
- "payload": part.get_payload(decode=False)[0].as_bytes(),
+ "payload": get_attachment_payload(part),
"binary": False,
"mail_content_type": part.get_content_subtype(),
"content-id": part.get('content-id'),
"content-disposition": part.get('content-disposition'),
"charset": part.get_content_charset(),
"content_transfer_encoding": part.get_content_charset()})
+
return eml_attachments
@staticmethod
@@ -284,7 +304,8 @@ def fetch_incidents(client: IMAPClient,
save_file=save_file,
uid_to_fetch_from=uid_to_fetch_from # type: ignore[arg-type]
)
- incidents = []
+ incidents: list = []
+ demisto.debug(f'fetched {len(incidents)} incidents')
for mail in mails_fetched:
incidents.append(mail.convert_to_incident())
uid_to_fetch_from = max(uid_to_fetch_from, mail.id)
@@ -353,6 +374,7 @@ def fetch_mails(client: IMAPClient,
demisto.debug(f'Messages to fetch: {messages_uids}')
for mail_id, message_data in client.fetch(messages_uids, 'RFC822').items():
+ demisto.debug(f"Starting to parse the mail with {mail_id=}")
message_bytes = message_data.get(b'RFC822')
# For cases the message_bytes is returned as a string. If failed, will try to use the message_bytes returned.
try:
@@ -361,16 +383,18 @@ def fetch_mails(client: IMAPClient,
demisto.debug(f"{mail_id=}: Converting to bytest failed. {message_data=}. Error: {e}")
if not message_bytes:
- demisto.debug(f"{mail_id=}: {message_bytes=}, skipping")
+ demisto.debug(f"{mail_id=}: Skipping because did not managed to convert to bytes")
continue
try:
+ demisto.debug("Creating email object")
email_message_object = Email(message_bytes, include_raw_body, save_file, mail_id)
demisto.debug(f"{mail_id=}: Created email object.")
except Exception as e:
demisto.debug(f"{mail_id=}: Failed creating Email object, skipping. {message_data=}. Error: {e}")
continue
+ demisto.debug(f"{mail_id=}: Created email object successfully.")
# Add mails if the current email UID is higher than the previous incident UID
if int(email_message_object.id) > int(uid_to_fetch_from):
fetched_email_objects.append(email_message_object)
diff --git a/Packs/MailListener/Integrations/MailListenerV2/MailListenerV2.yml b/Packs/MailListener/Integrations/MailListenerV2/MailListenerV2.yml
index ca0a451cce96..ed73110d8878 100644
--- a/Packs/MailListener/Integrations/MailListenerV2/MailListenerV2.yml
+++ b/Packs/MailListener/Integrations/MailListenerV2/MailListenerV2.yml
@@ -175,7 +175,7 @@ script:
required: true
description: Fetches an email by message ID and returns the information in an eml file format.
name: mail-listener-get-email-as-eml
- dockerimage: demisto/py3-tools:1.0.0.91603
+ dockerimage: demisto/py3-tools:1.0.0.95440
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/MailListener/Integrations/MailListenerV2/MailListenerV2_test.py b/Packs/MailListener/Integrations/MailListenerV2/MailListenerV2_test.py
index fc99bfa89964..3f2be7ef0be7 100644
--- a/Packs/MailListener/Integrations/MailListenerV2/MailListenerV2_test.py
+++ b/Packs/MailListener/Integrations/MailListenerV2/MailListenerV2_test.py
@@ -325,6 +325,15 @@ def test_get_eml_attachments():
res = Email.get_eml_attachments(msg.as_bytes())
assert res[0]['filename'] == 'Test with an image.eml'
+ # Test an email with EML attachment with an EML attachment
+ with open(
+ 'test_data/eml_test_with_attachment_with_eml_attachment.eml', "rb") as f:
+ msg = email.message_from_bytes(f.read())
+ res = Email.get_eml_attachments(msg.as_bytes())
+
+ assert res[0]['filename'] == 'Fwd: MOIS DE MARSè.eml'
+ assert isinstance(res[0]['payload'], bytes)
+
@pytest.mark.parametrize('cert_and_key', [
# - cert and key are in the integration instance parameters
diff --git a/Packs/MailListener/Integrations/MailListenerV2/test_data/eml_test_with_attachment_with_eml_attachment.eml b/Packs/MailListener/Integrations/MailListenerV2/test_data/eml_test_with_attachment_with_eml_attachment.eml
new file mode 100644
index 000000000000..cd2534d6b664
--- /dev/null
+++ b/Packs/MailListener/Integrations/MailListenerV2/test_data/eml_test_with_attachment_with_eml_attachment.eml
@@ -0,0 +1,159 @@
+From: test test
+Content-Type: multipart/mixed;
+ boundary="Apple-Mail=_5CF4B951-A4B6-4D08-ACB8-B345F0D07AEC"
+Mime-Version: 1.0 (Mac OS X Mail 16.0 \(1.1.1.1\))
+Subject: =?utf-8?Q?Fwd=3A_MOIS_DE_MARS=C3=A8?=
+X-Universally-Unique-Identifier: 1C2A58C1-6EDF-417D-B9E1-140F5E403132
+Message-Id: <8DBD1C79-1206-4FD6-9460-D4990C27965A@test.com>
+Date: Mon, 20 May 2024 14:35:28 +0300
+To: test@test.com
+
+
+--Apple-Mail=_5CF4B951-A4B6-4D08-ACB8-B345F0D07AEC
+Content-Disposition: attachment;
+ filename*=utf-8''Fwd%3A%20MOIS%20DE%20MARS%C3%A8.eml
+Content-Type: message/rfc822;
+ name="=?utf-8?Q?Fwd=3A_MOIS_DE_MARS=C3=A8=2Eeml?="
+Content-Transfer-Encoding: 7bit
+
+From: test test
+Content-Type: multipart/mixed;
+ boundary="Apple-Mail=_8B215924-5958-4A9C-82D4-49730ED44B22"
+Mime-Version: 1.0 (Mac OS X Mail 16.0 \(1.1.1.1\))
+Subject: =?utf-8?Q?Fwd=3A_MOIS_DE_MARS=C3=A8?=
+X-Universally-Unique-Identifier: 1C2A58C1-6EDF-417D-B9E1-140F5E403132
+Message-Id:
+Date: Mon, 20 May 2024 14:35:15 +0300
+To: =?utf-8?B?CnRlc3QuVMOoc3Q=?=@test.com
+
+
+
+--Apple-Mail=_8B215924-5958-4A9C-82D4-49730ED44B22
+Content-Disposition: attachment;
+ filename="test.eml"
+Content-Type: message/rfc822;
+ name="test.eml"
+Content-Transfer-Encoding: 7bit
+
+From: test test
+Content-Type: multipart/mixed;
+ boundary="Apple-Mail=_EA9D3B93-041C-40FE-BC46-8E675A270322"
+Mime-Version: 1.0 (Mac OS X Mail 16.0 \(1.1.1.1\))
+Subject: test
+X-Universally-Unique-Identifier: 1C2A58C1-6EDF-417D-B9E1-140F5E403132
+Message-Id:
+Date: Mon, 20 May 2024 08:45:15 +0300
+To: =?utf-8?Q?test=2ET=C3=A8st?=@test.com
+
+
+--Apple-Mail=_EA9D3B93-041C-40FE-BC46-8E675A270322
+Content-Disposition: inline;
+ filename*=utf-8''RA%CC%83%C2%A8glement%20de%20Mars.pdf
+Content-Type: application/pdf;
+ x-unix-mode=0644;
+ name="=?utf-8?Q?RA=CC=83=C2=A8glement_de_Mars=2Epdf?="
+Content-Transfer-Encoding: base64
+
+JVBERi0xLjUKJeLjz9MKNyAwIG9iago8PAovVHlwZSAvRm9udERlc2NyaXB0b3IKL0ZvbnROYW1l
+IC9UaW1lcyMyME5ldyMyMFJvbWFuCi9GbGFncyAzMgovSXRhbGljQW5nbGUgMAovQXNjZW50IDg5
+MQovRGVzY2VudCAtMjE2Ci9DYXBIZWlnaHQgNjkzCi9BdmdXaWR0aCA0MDEKL01heFdpZHRoIDI2
+MTQKL0ZvbnRXZWlnaHQgNDAwCi9YSGVpZ2h0IDI1MAovTGVhZGluZyA0MgovU3RlbVYgNDAKL0Zv
+bnRCQm94IFstNTY4IC0yMTYgMjA0NiA2OTNdCj4+CmVuZG9iago4IDAgb2JqClsyNTAgMCAwIDAg
+MCAwIDAgMTgwIDAgMCAwIDAgMjUwIDAgMjUwIDI3OCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAw
+IDAgMCAwIDAgMCA2NjcgMCAwIDAgMCAwIDAgMCAwIDAgNjExIDg4OSA3MjIgMCAwIDAgMCAwIDAg
+MCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgNDQ0IDUwMCA0NDQgNTAwIDQ0NCAzMzMgNTAwIDUwMCAy
+NzggMjc4IDAgMjc4IDc3OCA1MDAgNTAwIDUwMCAwIDMzMyAzODkgMjc4IDUwMCA1MDAgMCAwIDUw
+MCA0NDQgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAw
+IDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAg
+MCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAw
+IDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDQ0NCAwIDAgMCAwIDAgMCAwIDQ0
+NCA0NDQgNDQ0XQplbmRvYmoKNiAwIG9iago8PAovVHlwZSAvRm9udAovU3VidHlwZSAvVHJ1ZVR5
+cGUKL05hbWUgL0YxCi9CYXNlRm9udCAvVGltZXMjMjBOZXcjMjBSb21hbgovRW5jb2RpbmcgL1dp
+bkFuc2lFbmNvZGluZwovRm9udERlc2NyaXB0b3IgNyAwIFIKL0ZpcnN0Q2hhciAzMgovTGFzdENo
+YXIgMjM0Ci9XaWR0aHMgOCAwIFIKPj4KZW5kb2JqCjkgMCBvYmoKPDwKL1R5cGUgL0V4dEdTdGF0
+ZQovQk0gL05vcm1hbAovY2EgMQo+PgplbmRvYmoKMTAgMCBvYmoKPDwKL1R5cGUgL0V4dEdTdGF0
+ZQovQk0gL05vcm1hbAovQ0EgMQo+PgplbmRvYmoKMTEgMCBvYmoKPDwKL0ZpbHRlciAvRmxhdGVE
+ZWNvZGUKL0xlbmd0aCAxNjA0Cj4+CnN0cmVhbQp4nMVbSW/bRhS+C9B/mJvFoh7PvgBBDs6GFnHR
+xUAPRg+yZLsKbFKlrQDpr+0x7q/oezMULbWxx0pnRB9oLSTfvnzvUeToR/LixdHJq+9eE3b0flpf
+kclle/j25+rlS3L8+hX5YzxilOGfc1YTRrTXVBvilKBMkfZiPPr1G1KPR8en49HRW044p1qQ08vx
+iMPZjHBiGXVCEasd5Yqc3sB5736x5OoWb80lvAqfuI1P3o1HZ5PjivNJU3E2qfHwobLx3QoPLR6+
+rX4jp9+PR2+A9k/jUQZeOWdUb/HKkD0WOSLb9Mibk1eEbGiQF9Kg0ZQZ4EpaanfnShS2q1BUicjV
+2pwbVryt1GRaHcrJ6jK7tZynniU5INnpegFOkqR7gVK3LfxTk1Wb31UFQx/du/BcCOqfJksO4SV8
+OTubzC/yMyAltQPILQ1VJkW3bu6qQzFpC4htGBViALkN2Nul6C4xH4c4b+/wWCA1O0mNHkB+Z6hI
+OPxZLFHN6jY7ecEsdUny+cUWWAllim4bkhzm+OD2TV1AAUpSPkC8C6Wp4ym68+DzJcQ2hvqk/guI
+bTzVSXVfo7UPpiHU76AxC25QBx/In/kk09QmLZFfFZI5KhMZZ7PUrfJzwD21A+Q8CTVeJzXefkar
+X2Hmu46Wh7c3G96QnS3jqB4gF0rLd3KEAiHgBOVDOIJT1Ccd4WMofqUaHwVhqJMNSH7ZFXwkkw12
+8PzmCI+fKsG7rj8/MxIQ6QA6UADykk3vTR/9NbYCqwvIAUEvND9HHeo0cOEaC3+BoxNgI1pihscF
+MkPm6KbhU3K+6NMUid67ioYMZ7YkjBfu+/QWv4zt7SJ2PHxNgGx7P94dlRBuFD5c9a0xVspF7JPI
+/HN/H9IGSrPKxFsuw2XdiXgxmcf74usgz8Pbm56xRa/0IhFot1S+w/hBlh0/GOiT3OOu+UMli7Tl
+Po5kEtQLjB7sc6T+WAqNcGhIIBHsXWwcOjCTohtCpr4Mwrc3GBQlAAmHDhko7l8HFk7lKbrLaZkC
+hDDYDiG184hDn6RLgIjE1ATN1/U0PwsQdD7BQhEoxhg1PkUXG+6/Hzrv7EwIN4jdheRJu59NmhkI
+HUridD2IiHUzrg2yMwVAwA7hCRb6r4QnkI4IU2CuQjBEOE1lMgUVkN85yhJOiACMOWFKZQHhDYLA
+/cvuPeVJnd+E1tKGCCgAPwXDCczeZZdCUpVMPn8VoOuoGSDOJSQ9kZT3Q8hwBVZKUnnqBsj1Uouk
+ugvPVww42hAGB4I8WeLrBsOahDrXhNLWzpu6vl8Dxvw9rsKh+wDwRinAmC5F9zwO3bu6b2LJf1hF
+FJw6SIk7uK+dOtQIyXq8v4xNyrx9QPNBroBfIpRZA/8ez8HFD6PW8Payv25DC2RjHfexp1/HM/LP
+BzBNC7mlnR0GBKrsgEBD/TRPDwgOfr+/Xdz1g5U/C8BmCCeXYqUAeOIa4yhBdzktk0Q4VLNBpAY/
+TEudv2vg0uGmfP/yKo6DuQTdh5HmsoGkE7NCfrQucHu4fxUgMJEpuvM+s8Y0iRPXbmdpNofYeFr+
+7ZWQfhDdCDg1rZv84SCURdC6f3k1wznZk3QRLCvNQzs5K4CTDXuGykss7wV1OkW32xl0fdN1AbQu
+qRzC8M5QltD6f6YkyyJAillc1+5dA5IBwWSKwTVUsyr1sIYUOCrYv+hS46jgORWgf04hwogScNIa
+6geIAGk9PreRUsLBYmtJsrGbtGFjogqwtu7GoUHw/FHWsCI3ca9p4zOF1/DiPpRnG8p0jUv2u2A8
+ExjuOhkTd6n5wQ2HcgLK3GR8B3Cjyz4Srk2YnOzKlSkMuZTHacpjRj5u6rrDvNmNxWLr/zQDXSts
+wN149KplbPm6EOh/c1DAm5TDh0o2GdzBbrawNwmPBXRXrlxhb0IQ+4Q3LcCXTIFlj4fQMknyBVb8
+Ovy6IEG3z5TBmTtkE7PidcVdB2nWObMIoOFaBV/eYHQHr/FlfVnBhV8RYTzTr6C+6MkKn+Lri59z
+nUnhxXrALeOE+/3/bkm26UNDrlL0M7jyv4gaKlmS6O1FhUPKxQx/JpNZbPyJjDB7FpvjMDRJdIbN
+StfpuHX5cRC+anIeY9lBHLvwH7+4z8wkcMf8FpOJIPkH9oHT+AplbmRzdHJlYW0KZW5kb2JqCjUg
+MCBvYmoKPDwKL1R5cGUgL1BhZ2UKL01lZGlhQm94IFswIDAgNTk1LjU2IDg0Mi4wNF0KL1Jlc291
+cmNlcyA8PAovRm9udCA8PAovRjEgNiAwIFIKPj4KL0V4dEdTdGF0ZSA8PAovR1M3IDkgMCBSCi9H
+UzggMTAgMCBSCj4+Ci9Qcm9jU2V0IFsvUERGIC9UZXh0IC9JbWFnZUIgL0ltYWdlQyAvSW1hZ2VJ
+XQo+PgovQ29udGVudHMgMTEgMCBSCi9Hcm91cCA8PAovVHlwZSAvR3JvdXAKL1MgL1RyYW5zcGFy
+ZW5jeQovQ1MgL0RldmljZVJHQgo+PgovVGFicyAvUwovU3RydWN0UGFyZW50cyAwCi9QYXJlbnQg
+MiAwIFIKPj4KZW5kb2JqCjEyIDAgb2JqCjw8Ci9TIC9QCi9UeXBlIC9TdHJ1Y3RFbGVtCi9LIFsw
+XQovUCAxMyAwIFIKL1BnIDUgMCBSCj4+CmVuZG9iagoxNCAwIG9iago8PAovUyAvUAovVHlwZSAv
+U3RydWN0RWxlbQovSyBbMV0KL1AgMTMgMCBSCi9QZyA1IDAgUgo+PgplbmRvYmoKMTUgMCBvYmoK
+PDwKL1MgL1AKL1R5cGUgL1N0cnVjdEVsZW0KL0sgWzJdCi9QIDEzIDAgUgovUGcgNSAwIFIKPj4K
+ZW5kb2JqCjE2IDAgb2JqCjw8Ci9TIC9QCi9UeXBlIC9TdHJ1Y3RFbGVtCi9LIFszXQovUCAxMyAw
+IFIKL1BnIDUgMCBSCj4+CmVuZG9iagoxNyAwIG9iago8PAovUyAvUAovVHlwZSAvU3RydWN0RWxl
+bQovSyBbNF0KL1AgMTMgMCBSCi9QZyA1IDAgUgo+PgplbmRvYmoKMTggMCBvYmoKPDwKL1MgL1AK
+L1R5cGUgL1N0cnVjdEVsZW0KL0sgWzVdCi9QIDEzIDAgUgovUGcgNSAwIFIKPj4KZW5kb2JqCjE5
+IDAgb2JqCjw8Ci9TIC9QCi9UeXBlIC9TdHJ1Y3RFbGVtCi9LIFs2XQovUCAxMyAwIFIKL1BnIDUg
+MCBSCj4+CmVuZG9iagoyMCAwIG9iago8PAovUyAvUAovVHlwZSAvU3RydWN0RWxlbQovSyBbN10K
+L1AgMTMgMCBSCi9QZyA1IDAgUgo+PgplbmRvYmoKMjEgMCBvYmoKPDwKL1MgL1AKL1R5cGUgL1N0
+cnVjdEVsZW0KL0sgWzhdCi9QIDEzIDAgUgovUGcgNSAwIFIKPj4KZW5kb2JqCjIyIDAgb2JqCjw8
+Ci9TIC9QCi9UeXBlIC9TdHJ1Y3RFbGVtCi9LIFs5XQovUCAxMyAwIFIKL1BnIDUgMCBSCj4+CmVu
+ZG9iagoyMyAwIG9iago8PAovUyAvUAovVHlwZSAvU3RydWN0RWxlbQovSyBbMTBdCi9QIDEzIDAg
+UgovUGcgNSAwIFIKPj4KZW5kb2JqCjEzIDAgb2JqCjw8Ci9TIC9QYXJ0Ci9UeXBlIC9TdHJ1Y3RF
+bGVtCi9LIFsxMiAwIFIgMTQgMCBSIDE1IDAgUiAxNiAwIFIgMTcgMCBSIDE4IDAgUiAxOSAwIFIg
+MjAgMCBSIDIxIDAgUiAyMiAwIFIgMjMgMCBSXQovUCAzIDAgUgo+PgplbmRvYmoKMjQgMCBvYmoK
+PDwKL051bXMgWzAgWzEyIDAgUiAxNCAwIFIgMTUgMCBSIDE2IDAgUiAxNyAwIFIgMTggMCBSIDE5
+IDAgUiAyMCAwIFIgMjEgMCBSIDIyIDAgUiAyMyAwIFJdXQo+PgplbmRvYmoKNCAwIG9iago8PAov
+Rm9vdG5vdGUgL05vdGUKL0VuZG5vdGUgL05vdGUKL1RleHRib3ggL1NlY3QKL0hlYWRlciAvU2Vj
+dAovRm9vdGVyIC9TZWN0Ci9JbmxpbmVTaGFwZSAvU2VjdAovQW5ub3RhdGlvbiAvU2VjdAovQXJ0
+aWZhY3QgL1NlY3QKL1dvcmtib29rIC9Eb2N1bWVudAovV29ya3NoZWV0IC9QYXJ0Ci9NYWNyb3No
+ZWV0IC9QYXJ0Ci9DaGFydHNoZWV0IC9QYXJ0Ci9EaWFsb2dzaGVldCAvUGFydAovU2xpZGUgL1Bh
+cnQKL0NoYXJ0IC9TZWN0Ci9EaWFncmFtIC9GaWd1cmUKPj4KZW5kb2JqCjMgMCBvYmoKPDwKL1R5
+cGUgL1N0cnVjdFRyZWVSb290Ci9Sb2xlTWFwIDQgMCBSCi9LIFsxMyAwIFJdCi9QYXJlbnRUcmVl
+IDI0IDAgUgovUGFyZW50VHJlZU5leHRLZXkgMQo+PgplbmRvYmoKMiAwIG9iago8PAovVHlwZSAv
+UGFnZXMKL0tpZHMgWzUgMCBSXQovQ291bnQgMQo+PgplbmRvYmoKMSAwIG9iago8PAovVHlwZSAv
+Q2F0YWxvZwovUGFnZXMgMiAwIFIKL0xhbmcgKGVuLVVTKQovU3RydWN0VHJlZVJvb3QgMyAwIFIK
+L01hcmtJbmZvIDw8Ci9NYXJrZWQgdHJ1ZQo+Pgo+PgplbmRvYmoKMjUgMCBvYmoKPDwKL0F1dGhv
+ciAoQkVSTkFSRCkKL0NyZWF0b3IgPEZFRkYwMDREMDA2OTAwNjMwMDcyMDA2RjAwNzMwMDZGMDA2
+NjAwNzQwMEFFMDAyMDAwNTcwMDZGMDA3MjAwNjQwMDIwMDAzMjAwMzAwMDMxMDAzNj4KL0NyZWF0
+aW9uRGF0ZSAoRDoyMDI0MDMwNDEzMDAyNSswMCcwMCcpCi9Qcm9kdWNlciAod3d3Lmlsb3ZlcGRm
+LmNvbSkKL01vZERhdGUgKEQ6MjAyNDAzMDQxMzAwMjhaKQo+PgplbmRvYmoKeHJlZgowIDI2CjAw
+MDAwMDAwMDAgNjU1MzUgZg0KMDAwMDAwNDU0MiAwMDAwMCBuDQowMDAwMDA0NDg1IDAwMDAwIG4N
+CjAwMDAwMDQzNzUgMDAwMDAgbg0KMDAwMDAwNDA5MiAwMDAwMCBuDQowMDAwMDAyNzM0IDAwMDAw
+IG4NCjAwMDAwMDA3NTkgMDAwMDAgbg0KMDAwMDAwMDAxNSAwMDAwMCBuDQowMDAwMDAwMjY2IDAw
+MDAwIG4NCjAwMDAwMDA5NDMgMDAwMDAgbg0KMDAwMDAwMDk5OSAwMDAwMCBuDQowMDAwMDAxMDU2
+IDAwMDAwIG4NCjAwMDAwMDMwMzcgMDAwMDAgbg0KMDAwMDAwMzg0MSAwMDAwMCBuDQowMDAwMDAz
+MTEwIDAwMDAwIG4NCjAwMDAwMDMxODMgMDAwMDAgbg0KMDAwMDAwMzI1NiAwMDAwMCBuDQowMDAw
+MDAzMzI5IDAwMDAwIG4NCjAwMDAwMDM0MDIgMDAwMDAgbg0KMDAwMDAwMzQ3NSAwMDAwMCBuDQow
+MDAwMDAzNTQ4IDAwMDAwIG4NCjAwMDAwMDM2MjEgMDAwMDAgbg0KMDAwMDAwMzY5NCAwMDAwMCBu
+DQowMDAwMDAzNzY3IDAwMDAwIG4NCjAwMDAwMDM5ODEgMDAwMDAgbg0KMDAwMDAwNDY1NiAwMDAw
+MCBuDQp0cmFpbGVyCjw8Ci9TaXplIDI2Ci9Sb290IDEgMCBSCi9JbmZvIDI1IDAgUgovSUQgWzxC
+QzA0NTAzOERDNEU0QjcwRjQzMThBM0JDODMxRkM1OT4gPEMzMzRCOTBEQUE1QzRFNjEyQzQyQTA0
+OUM2MTFCRjQ3Pl0KPj4Kc3RhcnR4cmVmCjQ4OTAKJSVFT0YK
+--Apple-Mail=_EA9D3B93-041C-40FE-BC46-8E675A270322--
+
+--Apple-Mail=_8B215924-5958-4A9C-82D4-49730ED44B22--
+
+--Apple-Mail=_5CF4B951-A4B6-4D08-ACB8-B345F0D07AEC--
diff --git a/Packs/MailListener/ReleaseNotes/1_0_54.md b/Packs/MailListener/ReleaseNotes/1_0_54.md
new file mode 100644
index 000000000000..94f4efcf7333
--- /dev/null
+++ b/Packs/MailListener/ReleaseNotes/1_0_54.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Mail Listener v2
+
+- Updated the Docker image to: *demisto/py3-tools:1.0.0.95440*.
+- Fixed an issue where an email attachment was parsed incorrectly when encoded in an unexpected format.
diff --git a/Packs/MailListener/pack_metadata.json b/Packs/MailListener/pack_metadata.json
index 8b3be4985c5e..f5295143e8f7 100644
--- a/Packs/MailListener/pack_metadata.json
+++ b/Packs/MailListener/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Mail Listener",
"description": "Listen to a mailbox, enable incident triggering via e-mail",
"support": "xsoar",
- "currentVersion": "1.0.53",
+ "currentVersion": "1.0.54",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Malware/ReleaseNotes/1_4_13.md b/Packs/Malware/ReleaseNotes/1_4_13.md
new file mode 100644
index 000000000000..7282491c7070
--- /dev/null
+++ b/Packs/Malware/ReleaseNotes/1_4_13.md
@@ -0,0 +1,3 @@
+## Malware Core
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/Malware/pack_metadata.json b/Packs/Malware/pack_metadata.json
index bb5df57be142..5779c5408193 100644
--- a/Packs/Malware/pack_metadata.json
+++ b/Packs/Malware/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Malware Core",
"description": "Supporting pack for the Malware Investigation & Response pack.",
"support": "xsoar",
- "currentVersion": "1.4.12",
+ "currentVersion": "1.4.13",
"serverMinVersion": "6.0.0",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
diff --git a/Packs/Mattermost/.secrets-ignore b/Packs/Mattermost/.secrets-ignore
index e69de29bb2d1..c2b9de583ce1 100644
--- a/Packs/Mattermost/.secrets-ignore
+++ b/Packs/Mattermost/.secrets-ignore
@@ -0,0 +1,7 @@
+user@example.com
+test@example.com
+1.1.1.1
+8.8.8.8
+https://example.com
+https://jira-dc.paloaltonetworks.com
+test@test.com
\ No newline at end of file
diff --git a/Packs/Mattermost/Integrations/MattermostV2/MattermostV2.py b/Packs/Mattermost/Integrations/MattermostV2/MattermostV2.py
new file mode 100644
index 000000000000..ab6990ba0ad3
--- /dev/null
+++ b/Packs/Mattermost/Integrations/MattermostV2/MattermostV2.py
@@ -0,0 +1,1704 @@
+import demistomock as demisto # noqa: F401
+from CommonServerPython import * # noqa: F401
+
+
+from CommonServerUserPython import * # noqa
+import asyncio
+import concurrent
+import aiohttp
+import urllib3
+from typing import Any
+from urllib.parse import urlparse
+
+# Disable insecure warnings
+urllib3.disable_warnings()
+
+
+''' CONSTANTS '''
+DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ' # ISO8601 format with UTC, default in XSOAR
+DEFAULT_PAGE_NUMBER = 0
+DEFAULT_PAGE_SIZE = 50
+DEFAULT_LIMIT = 50
+PLAYGROUND_INVESTIGATION_TYPE = 9
+SECRET_TOKEN: str
+BASE_URL: str
+PROXY = False
+VERIFY = False
+SSL_CONTEXT: Optional[ssl.SSLContext]
+PROXIES = {}
+PROXY_URL: str
+DEMISTO_URL: str
+WEBSOCKET_URL: str
+MAX_SAMPLES = 10
+INCIDENT_TYPE: str
+ALLOW_INCIDENTS: bool
+PORT: int
+MIRRORING_ENABLED: bool
+LONG_RUNNING: bool
+CACHED_INTEGRATION_CONTEXT: dict
+VERIFY_CERT: bool
+CACHE_EXPIRY: float
+MESSAGE_FOOTER = '\n**From Mattermost**'
+MIRROR_TYPE = 'mirrorEntry'
+OBJECTS_TO_KEYS = {
+ 'mirrors': 'investigation_id',
+ 'messages': 'entitlement',
+}
+DEFAULT_OPTIONS: Dict[str, Any] = {
+ "timeout": 100,
+ "request_timeout": None,
+ "mfa_token": None,
+ "auth": None,
+ "keepalive": False,
+ "keepalive_delay": 5,
+ "websocket_kw_args": {},
+ "debug": False,
+ "http2": False,
+}
+GUID_REGEX = r'(\{){0,1}[0-9a-fA-F]{8}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{12}(\}){0,1}'
+ENTITLEMENT_REGEX = fr'{GUID_REGEX}@(({GUID_REGEX})|(?:[\d_]+))_*(\|\S+)?\b'
+''' CLIENT CLASS '''
+
+
+class WebSocketClient: # pragma: no cover
+ def __init__(
+ self,
+ base_url: str,
+ token: str,
+ verify: bool,
+ proxy: bool
+ ):
+ self.base_url = base_url
+ self.token = token
+ self.alive = False
+ self.last_msg = 0.0
+ self.verify = verify
+ self.proxy = proxy
+ self.options = DEFAULT_OPTIONS.copy()
+
+ async def connect(self, event_handler):
+ """
+ Connect to the websocket and authenticate it.
+ When the authentication has finished, start the loop listening for messages,
+ sending a ping to the server to keep the connection alive.
+
+ :param event_handler: Every websocket event will be passed there. Takes one argument.
+ :type event_handler: Function(message)
+ :return:
+ """
+ if 'https://' in self.base_url:
+ uri = self.base_url.replace("https://", "wss://", 1)
+ else:
+ uri = self.base_url.replace("http://", "ws://", 1)
+ uri += '/api/v4/websocket'
+ url = self.base_url + '/api/v4/websocket'
+ demisto.debug(f'MM: The uri for the websocket is {uri}, the url is {url}')
+
+ self.alive = True
+
+ while True:
+ try:
+ async with aiohttp.ClientSession() as session:
+ async with session.ws_connect(
+ uri,
+ ssl=SSL_CONTEXT,
+ proxy=PROXY_URL,
+ ) as websocket:
+ demisto.debug('MM: starting to authenticate')
+ await self.authenticate(websocket, event_handler)
+ while self.alive:
+ try:
+ await self.start_loop(websocket, event_handler)
+ except aiohttp.ClientError:
+ break
+ if (not self.options["keepalive"]) or (not self.alive):
+ break
+ except Exception as e:
+ demisto.info(f"MM: Failed to establish websocket connection: {type(e)} thrown - {str(e)}")
+ await asyncio.sleep(float("inf"))
+
+ async def start_loop(self, websocket, event_handler):
+ """
+ We will listen for websockets events, sending a heartbeats on a timer.
+ If we don't the webserver would close the idle connection,
+ forcing us to reconnect.
+ """
+ demisto.debug("MM: Starting websocket loop")
+ keep_alive = asyncio.ensure_future(self.heartbeat(websocket))
+ demisto.debug("MM: Waiting for messages on websocket")
+ while self.alive:
+ message = await websocket.receive_str()
+ self.last_msg = time.time()
+ demisto.debug(f"MM: {message=}")
+ await event_handler(self, message)
+ demisto.debug("MM: Cancelling heartbeat task")
+ keep_alive.cancel()
+ try:
+ await keep_alive
+ except asyncio.CancelledError:
+ pass
+
+ async def heartbeat(self, websocket):
+ """
+ Pongs the server if did not get a message within the timeframe
+ """
+ timeout: float = self.options["timeout"]
+ while True:
+ since_last_msg: float = time.time() - self.last_msg
+ next_timeout: float = timeout - since_last_msg if since_last_msg <= timeout else timeout
+ await asyncio.sleep(next_timeout)
+ if time.time() - self.last_msg >= timeout:
+ await websocket.pong()
+ self.last_msg = time.time()
+
+ def disconnect(self):
+ """Sets `self.alive` to False so the loop in `self.start_loop` will finish."""
+ demisto.debug("Disconnecting websocket")
+ self.alive = False
+
+ async def authenticate(self, websocket, event_handler):
+ """
+ Sends a authentication challenge over a websocket.
+ """
+ demisto.debug("MM: Authenticating websocket")
+ json_data = json.dumps({"seq": 1, "action": "authentication_challenge", "data": {"token": self.token}})
+ await websocket.send_str(json_data)
+ while True:
+ message = await websocket.receive_str()
+ status = json.loads(message)
+ demisto.debug(f"MM: The status is: {status}")
+ await event_handler(self, message)
+ if ("event" in status and status["event"] == "hello") and ("seq" in status and status["seq"] == 0):
+ demisto.debug("MM: Websocket authentification OK")
+ return True
+ demisto.error("MM: Websocket authentification failed")
+
+
+class HTTPClient(BaseClient):
+ """Client class to interact with the MatterMost API
+ """
+
+ def __init__(
+ self,
+ base_url: str,
+ headers: dict,
+ personal_access_token: str,
+ bot_access_token: str,
+ team_name: str,
+ notification_channel: str | None = None,
+ verify: bool = True,
+ proxy: bool = False,
+ ):
+ super().__init__(base_url, verify, proxy, headers=headers)
+ self.bot_access_token = bot_access_token
+ self.personal_access_token = personal_access_token
+ self.team_name = team_name
+ self.notification_channel = notification_channel
+
+ def get_team_request(self, team_name: str) -> dict[str, str]:
+ """Gets a team details based on its name"""
+ response = self._http_request(method='GET', url_suffix=f'/api/v4/teams/name/{team_name}')
+
+ return response
+
+ def list_channel_request(self, team_id: str, params: dict, get_private: bool = False) -> list[dict[str, Any]]:
+ """lists channels in a specific team"""
+ if get_private:
+ response = self._http_request(method='GET', url_suffix=f'/api/v4/teams/{team_id}/channels/private', params=params)
+ else:
+ response = self._http_request(method='GET', url_suffix=f'/api/v4/teams/{team_id}/channels', params=params)
+
+ return response
+
+ def create_channel_request(self, params: dict) -> dict[str, str]:
+ """Creates a channel"""
+ response = self._http_request(method='POST', url_suffix='/api/v4/channels', json_data=params)
+
+ return response
+
+ def get_channel_by_name_and_team_name_request(self, team_name: str, channel_name: str) -> dict[str, Any]:
+ """Gets a channel based on name and team name"""
+ response = self._http_request(method='GET', url_suffix=f'/api/v4/teams/name/{team_name}/channels/name/{channel_name}')
+
+ return response
+
+ def add_channel_member_request(self, channel_id: str, data: dict) -> dict[str, str]:
+ """Adds a channel member"""
+ response = self._http_request(method='POST', url_suffix=f'/api/v4/channels/{channel_id}/members', json_data=data)
+
+ return response
+
+ def remove_channel_member_request(self, channel_id: str, user_id: str) -> dict[str, str]:
+ """Removes a channel member"""
+ response = self._http_request(method='DELETE', url_suffix=f'/api/v4/channels/{channel_id}/members/{user_id}')
+
+ return response
+
+ def list_users_request(self, params: dict) -> list[dict[str, Any]]:
+ """lists users"""
+ response = self._http_request(method='GET', url_suffix='/api/v4/users', params=params)
+
+ return response
+
+ def close_channel_request(self, channel_id: str) -> list[dict[str, Any]]:
+ """Closes a channel"""
+ response = self._http_request(method='DELETE', url_suffix=f'/api/v4/channels/{channel_id}')
+
+ return response
+
+ def send_file_request(self, file_info: dict, params: dict) -> dict[str, Any]:
+ "Sends a file"
+ files = {'file': (file_info['name'], open(file_info['path'], 'rb'))}
+
+ response = self._http_request(
+ method='POST',
+ url_suffix='/api/v4/files',
+ files=files,
+ params=params,
+ json_data={'channel_id': params.get('channel_id')}
+ )
+ return response
+
+ def create_post_with_file_request(self, data: dict) -> list[dict[str, Any]]:
+ """Creates a post with a file request"""
+ response = self._http_request(method='POST', url_suffix='/api/v4/posts', json_data=data)
+
+ return response
+
+ def update_channel_request(self, channel_id: str, params: dict) -> list[dict[str, Any]]:
+ """Updates a channel"""
+ response = self._http_request(method='PUT', url_suffix=f'/api/v4/channels/{channel_id}', json_data=params)
+
+ return response
+
+ def get_user_request(self, user_id: str = '', bot_user: bool = False) -> dict[str, Any]:
+ """Gets a user"""
+ if not user_id:
+ user_id = 'me'
+ if bot_user:
+ response = self._http_request(method='GET', url_suffix=f'/api/v4/users/{user_id}',
+ headers={'authorization': f'Bearer {self.bot_access_token}'})
+ else:
+ response = self._http_request(method='GET', url_suffix=f'/api/v4/users/{user_id}')
+
+ return response
+
+ def send_notification_request(self, channel_id: str, message: str, file_ids: list[str] = [], root_id: str = '', props: dict = {}) -> dict[str, Any]: # noqa: E501
+ "Sends a notification"
+ data = {"channel_id": channel_id,
+ "message": message,
+ "props": props,
+ "root_id": root_id,
+ "file_ids": file_ids,
+ }
+ remove_nulls_from_dictionary(data)
+ response = self._http_request(method='POST', url_suffix='/api/v4/posts', json_data=data,
+ headers={'authorization': f'Bearer {self.bot_access_token}'})
+
+ return response
+
+ def update_post_request(self, message: str, root_id: str) -> dict[str, Any]: # noqa: E501
+ "Sends a notification"
+ data = {
+ "message": message,
+ "id": root_id,
+ }
+ demisto.debug(f"MM: {data=}")
+ remove_nulls_from_dictionary(data)
+ response = self._http_request(method='PUT', url_suffix=f'/api/v4/posts/{root_id}', json_data=data,
+ headers={'authorization': f'Bearer {self.bot_access_token}'})
+
+ demisto.debug(f"MM: response fom update message. {response=}")
+ return response
+
+ def get_user_by_email_request(self, user_email: str) -> dict[str, Any]:
+ "Gets a user by email"
+ response = self._http_request(method='GET', url_suffix=f'/api/v4/users/email/{user_email}')
+
+ return response
+
+ def get_user_by_username_request(self, username: str) -> dict[str, Any]:
+ "Gets a user by username"
+ response = self._http_request(method='GET', url_suffix=f'/api/v4/users/username/{username}')
+
+ return response
+
+ def create_direct_channel_request(self, user_id: str, bot_id: str) -> dict[str, Any]:
+ "creates a direct channel"
+ response = self._http_request(method='POST', url_suffix='/api/v4/channels/direct', json_data=[bot_id, user_id])
+
+ return response
+
+
+CLIENT: HTTPClient
+
+''' HELPER FUNCTIONS '''
+
+
+def get_war_room_url(url: str, incident_id: str = '') -> str:
+ # a workaround until this bug is resolved: https://jira-dc.paloaltonetworks.com/browse/CRTX-107526
+ if is_xsiam():
+ if not incident_id:
+ incident_id = demisto.callingContext.get('context', {}).get('Inv', {}).get('id')
+ incident_url = urlparse(url)
+ war_room_url = f"{incident_url.scheme}://{incident_url.netloc}/incidents"
+ # executed from the incident War Room
+ if incident_id and incident_id.startswith('INCIDENT-'):
+ war_room_url += f"/war_room?caseId={incident_id.split('-')[-1]}"
+ # executed from the alert War Room
+ else:
+ war_room_url += f"/alerts_and_insights?caseId={incident_id}&action:openAlertDetails={incident_id}-warRoom"
+
+ return war_room_url
+
+ return url
+
+
+def next_expiry_time() -> float:
+ """
+ Returns:
+ A float representation of a new expiry time with an offset of 5 seconds
+ """
+ return (datetime.now(timezone.utc) + timedelta(seconds=5)).timestamp()
+
+
+def get_current_utc_time() -> datetime:
+ """
+ Returns:
+ The current UTC time.
+ """
+ return datetime.utcnow()
+
+
+async def check_and_handle_entitlement(answer_text: str, root_id: str, user_name: str) -> str: # pragma: no cover
+ """
+ Handles an entitlement message (a reply to a question)
+ Args:
+ Returns:
+ If the message contains entitlement, return a reply.
+ """
+ integration_context = fetch_context(force_refresh=True)
+ messages = integration_context.get('messages', [])
+ reply = ''
+ if not messages:
+ return reply
+ messages = json.loads(messages)
+ demisto.debug(f"MM: messages with entitlements. {messages=}")
+ message_filter = list(filter(lambda q: q.get('root_id') == root_id, messages))
+ if message_filter:
+ demisto.debug("MM: Found correct message")
+ message = message_filter[0]
+ entitlement = message.get('entitlement')
+ reply = message.get('reply')
+ guid, incident_id, task_id = extract_entitlement(entitlement)
+ demisto.handleEntitlementForUser(incident_id, guid, user_name, answer_text, task_id)
+ message['remove'] = True
+ set_to_integration_context_with_retries({'messages': messages}, OBJECTS_TO_KEYS)
+ return reply
+
+
+def run_long_running(): # pragma: no cover
+ """
+ Starts the long running thread.
+ """
+ try:
+ asyncio.run(start_listening())
+ except Exception as e:
+ demisto.error(f"MM: The Loop has failed to run {str(e)}")
+ finally:
+ loop = asyncio.get_running_loop()
+ try:
+ loop.stop()
+ loop.close()
+ except Exception as e_:
+ demisto.error(f'MM: Failed to gracefully close the loop - {e_}')
+
+
+async def start_listening(): # pragma: no cover
+ """
+ Starts a Slack SocketMode client and checks for mirrored incidents.
+ """
+ try:
+ demisto.debug('MM: Starting to listen')
+ executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
+ loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(loop)
+ loop.run_in_executor(executor, long_running_loop)
+ await mattermost_loop()
+ except Exception as e:
+ demisto.error(f"An error has occurred while gathering the loop tasks. {e}")
+
+
+async def mattermost_loop(): # pragma: no cover
+
+ try:
+ exception_await_seconds = 1
+ while True:
+ ws_client = WebSocketClient(BASE_URL, SECRET_TOKEN, VERIFY, PROXY)
+
+ try:
+ demisto.debug('MM: Trying to connect')
+ await ws_client.connect(event_handler)
+ # After successful connection, we reset the backoff time.
+ exception_await_seconds = 1
+ await asyncio.sleep(float("inf"))
+ except Exception as e:
+ demisto.debug(f"MM: Exception in long running loop, waiting {exception_await_seconds} - {e}")
+ await asyncio.sleep(exception_await_seconds)
+ exception_await_seconds *= 2
+ finally:
+ try:
+ ws_client.disconnect()
+ except Exception as e:
+ demisto.debug(f"MM: Failed to close client. - {e}")
+ except Exception as e:
+ demisto.error(f"MM: An error has occurred while trying to create the socket client. {e}")
+
+
+def long_running_loop(): # pragma: no cover
+ global MIRRORING_ENABLED
+ tts = 30 if MIRRORING_ENABLED else 60
+ while True:
+ error = ''
+ try:
+ check_for_unanswered_messages()
+ time.sleep(tts)
+ except requests.exceptions.ConnectionError as e:
+ error = f'Could not connect to the MatterMost endpoint: {str(e)}'
+ except Exception as e:
+ error = f'An error occurred: {e}'
+ finally:
+ demisto.updateModuleHealth('')
+ if error:
+ demisto.error(error)
+ demisto.updateModuleHealth(error)
+
+
+def check_for_unanswered_messages():
+ demisto.debug('MM: checking for unanswered messages')
+ integration_context = fetch_context()
+ messages = integration_context.get('messages')
+ if messages:
+ messages = json.loads(messages)
+ now = datetime.utcnow()
+ updated_messages = []
+
+ for message in messages:
+ if message.get('expiry'):
+ # Check if the question expired - if it did, answer it with the default response
+ # and remove it
+ expiry = datetime.strptime(message['expiry'], DATE_FORMAT)
+ if expiry < now:
+ demisto.debug(f"MM: message expired: {message}, answering it with the default response")
+ answer_question(message.get('default_response'), message, email='')
+ message['remove'] = True
+ updated_messages.append(message)
+ continue
+ updated_messages.append(message)
+
+ if updated_messages:
+
+ set_to_integration_context_with_retries({'messages': messages}, OBJECTS_TO_KEYS)
+
+
+async def event_handler(client: WebSocketClient, req: str):
+ """Handling the events coming from the websocket"""
+ demisto.debug(f"MM: Got events: {req} - with type {type(req)}")
+ payload = json.loads(req)
+
+ if 'error' in payload:
+ error = payload.get('error', {})
+ error_code = error.get('id')
+ error_msg = error.get('message')
+ await handle_listen_error(
+ f'MatterMost API has thrown an error. Code: {error_code}, Message: {error_msg}.')
+ return
+
+ if payload.get('event') == 'hello' or payload.get('seq_reply') == 1:
+ # we handle hello and authentication events afterwards
+ return
+
+ if payload.get('event') == 'posted':
+ await handle_posts(payload)
+ return
+
+
+def is_bot_message(payload: dict) -> bool:
+ """
+ Determines if the message received was created by a bot or not.
+ :param payload: dict: The payload sent with the message
+ :return: bool: True indicates the message was from a Bot, False indicates it was from an individual
+ """
+ global CLIENT
+ from_bot = payload.get('props', {}).get('from_bot', '')
+ bot_id = get_user_id_from_token(CLIENT, bot_user=True)
+ post = json.loads(payload.get("data", {}).get("post"))
+
+ if bot_id and bot_id == post.get('user_id', ''):
+ return True
+ elif from_bot:
+ return True
+ return False
+
+
+def is_dm(payload: dict):
+ """
+ Takes the channel type and will see if it's 'D'. If so, we know it's from a direct message.
+ :param channel: str: The channel ID to check.
+ :return: bool: Boolean indicating if the channel is a DM or not.
+ """
+ channel_type = payload.get('data', {}).get('channel_type')
+ return channel_type == 'D'
+
+
+def is_thread(post: dict):
+ """
+ Takes the root ID and will see if its not empty. If so, we know it's from a direct message.
+ :param post: str: The post to check.
+ :return: bool: Boolean indicating if the post is part of a thread or not.
+ """
+ root_id = post.get('root_id', '')
+ return root_id != ''
+
+
+def get_user_id_from_token(client: HTTPClient, bot_user: bool = False) -> str:
+ """
+ Gets the user id from the token
+ :return: str: The id of the user
+ """
+ result = client.get_user_request(bot_user=bot_user)
+
+ return result.get('id', '')
+
+
+def get_user_id_by_email(client: HTTPClient, email: str) -> str:
+ """
+ Gets a user ID from the email
+ :param email: str: The email of the user
+ :return: str: The id of the user
+ """
+ result = client.get_user_by_email_request(email)
+ return result.get('id', '')
+
+
+def get_user_id_by_username(client: HTTPClient, username: str) -> str:
+ """
+ Gets a user ID from the email
+ :param email: str: The email of the user
+ :return: str: The id of the user
+ """
+ result = client.get_user_by_username_request(username)
+ return result.get('id', '')
+
+
+def get_username_by_email(client: HTTPClient, email: str) -> str:
+ """
+ Gets a username from the email
+ :param email: str: The email of the user
+ :return: str: The username of the user
+ """
+ result = client.get_user_by_email_request(email)
+ return result.get('username', '')
+
+
+def get_username_by_id(client: HTTPClient, user_id: str) -> str:
+ """
+ Gets a username from the id
+ :param email: str: The email of the user
+ :return: str: The username of the user
+ """
+ result = client.get_user_request(user_id)
+ return result.get('username', '')
+
+
+def fetch_context(force_refresh: bool = False) -> dict:
+ """
+ Fetches the integration instance context from the server if the CACHE_EXPIRY is smaller than the current epoch time
+ In the event that the cache is not expired, we return a cached copy of the context which has been stored in memory.
+ We can force the retrieval of the updated context by setting the force_refresh flag to True.
+ :param force_refresh: bool: Indicates if the context should be refreshed regardless of the expiry time.
+ :return: dict: Either a cached copy of the integration context, or the context itself.
+ """
+ global CACHED_INTEGRATION_CONTEXT, CACHE_EXPIRY
+ now = int(datetime.now(timezone.utc).timestamp())
+ if (now >= CACHE_EXPIRY) or force_refresh:
+ demisto.debug(f'Cached context has expired or forced refresh. forced refresh value is {force_refresh}. '
+ 'Fetching new context')
+ CACHE_EXPIRY = next_expiry_time()
+ CACHED_INTEGRATION_CONTEXT = get_integration_context()
+
+ return CACHED_INTEGRATION_CONTEXT
+
+
+def get_channel_id_from_context(channel_name: str = '', investigation_id=None):
+ """
+ Retrieves a MatterMost channel ID based on the provided criteria.
+
+ :param channel_name: The name of the channel to get the ID for.
+ :param investigation_id: The Demisto investigation ID to search for a mirrored channel.
+
+ :return: The requested channel ID or None if not found.
+ """
+ if not channel_name and not investigation_id:
+ return None
+ integration_context = fetch_context()
+ mirrors = json.loads(integration_context.get('mirrors', '[]'))
+
+ # Filter mirrors based on the provided criteria.
+ if investigation_id:
+ mirrored_channel_filter = next((m for m in mirrors if m["investigation_id"] == investigation_id), None)
+ else:
+ mirrored_channel_filter = next((m for m in mirrors if m["channel_name"] == channel_name), None)
+ if mirrored_channel_filter:
+ return mirrored_channel_filter.get('channel_id')
+ return None
+
+
+def get_channel_id_to_send_notif(client: HTTPClient, to: str, channel_name: str | None, investigation_id: str) -> str:
+ """
+ Gets a channel ID for the correct channel to send the notification to
+ :return: str: The channel id of the channel
+ """
+ channel_id = ''
+ if to:
+ # create a new channel and send the message there
+ if re.match(emailRegex, to):
+ to = get_user_id_by_email(client, to)
+ else:
+ to = get_user_id_by_username(client, to)
+
+ bot_id = get_user_id_from_token(client, bot_user=True)
+ channel_object = client.create_direct_channel_request(to, bot_id)
+ channel_id = channel_object.get('id', '')
+ demisto.debug(f'MM: Created a new direct channel to: {to} with channel_id: {channel_id}')
+
+ elif channel_name: # if channel name provided and the channel was mirrored
+ channel_id = get_channel_id_from_context(channel_name, investigation_id)
+
+ if not channel_id:
+ try:
+ channel_details = client.get_channel_by_name_and_team_name_request(client.team_name, channel_name)
+ channel_id = channel_details.get('id', '')
+ except Exception as e:
+ raise DemistoException(f"Did not find channel with name {channel_name}. Error: {e}")
+
+ return channel_id
+
+
+def save_entitlement(entitlement, message_id, reply, expiry, default_response, to_id):
+ """
+ Saves an entitlement
+
+ Args:
+ entitlement: The entitlement
+ message_id: The message_id
+ reply: The reply to send to the user.
+ expiry: The question expiration date.
+ default_response: The response to send if the question times out.
+ to_id: the user id the message was sent to
+ """
+ integration_context = get_integration_context()
+ messages = integration_context.get('messages', [])
+ if messages:
+ messages = json.loads(integration_context['messages'])
+ messages.append({
+ 'root_id': message_id,
+ 'entitlement': entitlement,
+ 'reply': reply,
+ 'expiry': expiry,
+ 'sent': datetime.strftime(datetime.utcnow(), DATE_FORMAT),
+ 'default_response': default_response,
+ 'to_id': to_id
+ })
+ set_to_integration_context_with_retries({'messages': messages}, OBJECTS_TO_KEYS)
+
+
+def extract_entitlement(entitlement: str) -> tuple[str, str, str]:
+ """
+ Extracts entitlement components from an entitlement string
+ Args:
+ entitlement: The entitlement itself
+
+ Returns:
+ Entitlement components
+ """
+ parts = entitlement.split('@')
+ if len(parts) < 2:
+ raise DemistoException("Entitlement cannot be parsed")
+ guid = parts[0]
+ id_and_task = parts[1].split('|')
+ incident_id = id_and_task[0]
+ task_id = ''
+
+ if len(id_and_task) > 1:
+ task_id = id_and_task[1]
+
+ return guid, incident_id, task_id
+
+
+def answer_question(text: str, message: dict, email: str = ''):
+ """Answers a question from MattermostAskUser
+ """
+ global CLIENT
+ entitlement = message.get('entitlement', '')
+ root_id = message.get('root_id', '')
+ guid, incident_id, task_id = extract_entitlement(entitlement)
+ try:
+ demisto.handleEntitlementForUser(incident_id, guid, email, text, task_id)
+ process_entitlement_reply(text, root_id)
+ demisto.debug(f"MM: Handled question for {incident_id=}, {task_id=} with {text=}")
+ except Exception as e:
+ demisto.error(f'Failed handling entitlement {entitlement}: {str(e)}')
+ message['remove'] = True
+ return incident_id
+
+
+async def send_notification_async(client: HTTPClient, channel_id, message, root_id=''):
+ client.send_notification_request(channel_id, message, root_id=root_id)
+
+
+async def update_post_async(client: HTTPClient, message, root_id):
+ client.update_post_request(message, root_id)
+
+
+def process_entitlement_reply( # pragma: no cover
+ entitlement_reply: str,
+ root_id: str = '',
+ user_name: str | None = None,
+ answer_text: str | None = None,
+):
+ """
+ Triggered when an entitlement reply is found, this function will update the original message with the reply message.
+ :param entitlement_reply: str: The text to update the asking question with.
+ :param user_name: str: name of the user who answered the entitlement
+ :param answer_text: str: The text attached to the button, used for string replacement.
+ :return: None
+ """
+ global CLIENT
+ if '{user}' in entitlement_reply:
+ entitlement_reply = entitlement_reply.replace('{user}', str(user_name))
+ if '{response}' in entitlement_reply and answer_text:
+ entitlement_reply = entitlement_reply.replace('{response}', str(answer_text))
+ demisto.debug(f'MM: process entitlement reply with {entitlement_reply} for {root_id}')
+ CLIENT.update_post_request(entitlement_reply, root_id)
+
+
+async def handle_text_received_from_mm(investigation_id: str, text: str, operator_email: str, operator_name: str):
+ """
+ Handles text received from MatterMost
+
+ Args:
+ investigation_id: The mirrored investigation ID
+ text: The received text
+ operator_email: The sender email
+ operator_name: The sender name
+ """
+ if text:
+ demisto.addEntry(id=investigation_id,
+ entry=text,
+ username=operator_name,
+ email=operator_email,
+ footer=MESSAGE_FOOTER
+ )
+
+
+async def handle_posts(payload):
+ """
+ handle posts from the Mattermost that have been identified as possible mirrored messages
+ If we find one, we will update the mirror object and send
+ the message to the corresponding investigation's war room as an entry.
+ :param payload: str: The request payload from mattermost
+ :return: None
+ """
+ global CLIENT
+ post = json.loads(payload.get("data", {}).get("post"))
+ message = post.get('message', {})
+ channel_id = post.get("channel_id")
+ user_id = post.get('user_id')
+ if not channel_id:
+ return
+
+ if is_bot_message(payload):
+ demisto.debug("MM: Got a bot message. Will not mirror.")
+ return
+
+ # If a thread, we will check if it is a reply to a MattermostAsk task.
+ if is_thread(post):
+ demisto.debug(f"MM: Got a thread message. {payload=}")
+ username = get_username_by_id(CLIENT, user_id)
+ answer_text = post.get('message', '')
+ root_id = post.get('root_id', '')
+ entitlement_reply = await check_and_handle_entitlement(answer_text, root_id, username)
+ demisto.debug(f"MM: {entitlement_reply=}")
+ if entitlement_reply:
+ process_entitlement_reply(entitlement_reply, root_id, username, answer_text)
+
+ reset_listener_health()
+ return
+
+ # Check if the message is being sent directly to our bot.
+ if is_dm(payload):
+ demisto.debug(f"MM: Got a dm message. {payload=}")
+ await handle_dm(user_id, message, channel_id, CLIENT)
+ reset_listener_health()
+ return
+
+ integration_context = fetch_context()
+ if not integration_context or 'mirrors' not in integration_context:
+ return
+ mirrors = json.loads(integration_context['mirrors'])
+ mirror_filter = list(filter(lambda m: m['channel_id'] == channel_id, mirrors))
+
+ if not mirror_filter:
+ return
+ for mirror in mirror_filter:
+ if mirror['mirror_direction'] == 'FromDemisto' or mirror['mirror_type'] == 'none':
+ return
+ if not mirror['mirrored']:
+ # In case the investigation is not mirrored yet
+ mirror = mirrors.pop(mirrors.index(mirror))
+ if mirror['mirror_direction'] and mirror['mirror_type']:
+ investigation_id = mirror['investigation_id']
+ mirror_type = mirror['mirror_type']
+ auto_close = mirror['auto_close']
+ direction = mirror['mirror_direction']
+ demisto.mirrorInvestigation(investigation_id,
+ f'{mirror_type}:{direction}', auto_close)
+ mirror['mirrored'] = True
+ mirrors.append(mirror)
+ set_to_integration_context_with_retries({'mirrors': mirrors},
+ OBJECTS_TO_KEYS)
+
+ user_details = CLIENT.get_user_request(user_id)
+ operator_name = user_details.get('username', '')
+ operator_email = user_details.get('email', '')
+ investigation_id = mirror['investigation_id']
+ await handle_text_received_from_mm(investigation_id, message, operator_email, operator_name)
+
+
+async def handle_listen_error(error: str):
+ """
+ Logs an error and updates the module health accordingly.
+
+ Args:
+ error: The error string.
+ """
+ demisto.error(error)
+ demisto.updateModuleHealth(error)
+
+
+async def handle_dm(user_id: str, text: str, channel_id: str, client: HTTPClient):
+ """
+ Handles a direct message sent to the bot
+
+ Args:
+ user: The user who sent the message
+ text: The message text
+ client: The Slack client
+
+ Returns:
+ Text to return to the user
+ """
+ message: str = text.lower()
+ user_details = client.get_user_request(user_id)
+ user_name = user_details.get('username', '')
+ user_email = user_details.get('email', '')
+ if message.find('incident') != -1 and (message.find('create') != -1
+ or message.find('open') != -1
+ or message.find('new') != -1):
+
+ demisto_user = demisto.findUser(email=user_email) if user_email else demisto.findUser(username=user_name)
+
+ if not demisto_user and not ALLOW_INCIDENTS:
+ data = 'You are not allowed to create incidents.'
+ else:
+ try:
+ data = await translate_create(text, user_name, user_email, demisto_user)
+ except Exception as e:
+ data = f'Failed creating incidents: {str(e)}'
+ else:
+ try:
+ data = demisto.directMessage(text, user_name, user_email, ALLOW_INCIDENTS)
+ except Exception as e:
+ data = str(e)
+
+ if not data:
+ data = 'Sorry, I could not perform the selected operation.'
+
+ await send_notification_async(client, channel_id, data)
+
+
+async def translate_create(message: str, user_name: str, user_email: str, demisto_user: dict) -> str: # pragma: no cover
+ """
+ Processes an incident creation message
+ Args:
+ message: The creation message
+ user_name: The name of the user in Slack
+ user_email: The email of the user in Slack
+ demisto_user: The demisto user associated with the request (if exists)
+
+ Returns:
+ Creation result
+ """
+ json_pattern = r'(?<=json=).*'
+ name_pattern = r'(?<=name=).*'
+ type_pattern = r'(?<=type=).*'
+ message = message.replace("\n", '').replace('`', '')
+ json_match = re.search(json_pattern, message)
+ created_incident = None
+ data = ''
+ user_demisto_id = ''
+ request_fields = {'ReporterEmail': user_email, 'Message': message}
+ incidents = []
+ if demisto_user:
+ user_demisto_id = demisto_user.get('id', '')
+
+ if json_match:
+ if re.search(name_pattern, message) or re.search(type_pattern, message):
+ data = 'No other properties other than json should be specified.'
+ else:
+ incidents_json = json_match.group()
+ incidents = json.loads(incidents_json.replace('“', '"').replace('”', '"'))
+ if not isinstance(incidents, list):
+ incidents = [incidents]
+ add_req_data_to_incidents(incidents, request_fields)
+ created_incident = await create_incidents(incidents, user_name, user_email, user_demisto_id)
+
+ if not created_incident:
+ data = 'Failed creating incidents.'
+ else:
+ name_match = re.search(name_pattern, message)
+ if not name_match:
+ data = 'Please specify arguments in the following manner: name= type=[type] or json=.'
+ else:
+ incident_name = re.sub('type=.*', '', name_match.group()).strip()
+ incident_type = ''
+
+ type_match = re.search(type_pattern, message)
+ if type_match:
+ incident_type = re.sub('name=.*', '', type_match.group()).strip()
+
+ incident = {'name': incident_name}
+
+ incident_type = incident_type or INCIDENT_TYPE
+ if incident_type:
+ incident['type'] = incident_type
+ incidents = add_req_data_to_incidents([incident], request_fields)
+ created_incident = await create_incidents([incident], user_name, user_email, user_demisto_id)
+ if not created_incident:
+ data = 'Failed creating incidents.'
+
+ if created_incident:
+ demisto.debug(f'Created {len(incidents)} incidents')
+ update_integration_context_samples(incidents)
+ if isinstance(created_incident, list):
+ created_incident = created_incident[0]
+ server_links = demisto.demistoUrls()
+ server_link = server_links.get('server')
+ incident_name = created_incident['name']
+ incident_id = created_incident['id']
+ incident_url = get_war_room_url(f'{server_link}#/WarRoom/{incident_id}', incident_id)
+ data = f'Successfully created incident {incident_name}.\n View it on: {incident_url}'
+
+ return data
+
+
+def add_req_data_to_incidents(incidents: list, request_fields: dict) -> list: # pragma: no cover
+ """
+ Adds the request_fields as a rawJSON to every created incident for further information on the incident
+ """
+ for incident in incidents:
+ incident['rawJSON'] = json.dumps(request_fields)
+ return incidents
+
+
+async def create_incidents(incidents: list, user_name: str, user_email: str, user_demisto_id: str = ''):
+ """
+ Creates incidents according to a provided JSON object
+ Args:
+ incidents: The incidents JSON
+ user_name: The name of the user in MatterMost
+ user_email: The email of the user in MattermOST
+ user_demisto_id: The id of demisto user associated with the request (if exists)
+
+ Returns:
+ The creation result
+ """
+
+ for incident in incidents:
+ # Add relevant labels to context
+ labels = incident.get('labels', [])
+ keys = [label.get('type') for label in labels]
+ if 'Reporter' not in keys:
+ labels.append({'type': 'Reporter', 'value': user_name})
+ if 'ReporterEmail' not in keys:
+ labels.append({'type': 'ReporterEmail', 'value': user_email})
+ if 'Source' not in keys:
+ labels.append({'type': 'Source', 'value': 'Slack'})
+ incident['labels'] = labels
+
+ data = demisto.createIncidents(incidents, userID=user_demisto_id) if user_demisto_id else demisto.createIncidents(
+ incidents)
+
+ return data
+
+
+def update_integration_context_samples(incidents: list, max_samples: int = MAX_SAMPLES): # pragma: no cover
+ """
+ Updates the integration context samples with the newly created incident.
+ If the size of the samples has reached `MAX_SAMPLES`, will pop out the latest sample.
+ Args:
+ incidents (list): The list of the newly created incidents.
+ max_samples (int): Max samples size.
+ """
+ ctx = get_integration_context()
+ updated_samples_list: List[Dict] = incidents + ctx.get('samples', [])
+ ctx['samples'] = updated_samples_list[:max_samples]
+ set_integration_context(ctx)
+
+
+def reset_listener_health():
+ demisto.updateModuleHealth("MatterMost V2 - Event handled successfully.")
+ demisto.info("MatterMost V2 - Event handled successfully.")
+
+
+def find_mirror_by_investigation() -> dict:
+ """
+ Finds a mirrored channel by the mirrored investigation
+
+ Returns:
+ The mirror object
+ """
+ mirror: dict = {}
+ investigation = demisto.investigation()
+ if investigation:
+ integration_context = get_integration_context()
+ if integration_context.get('mirrors'):
+ mirrors = json.loads(integration_context['mirrors'])
+ investigation_filter = list(filter(lambda m: investigation.get('id') == m['investigation_id'],
+ mirrors))
+ if investigation_filter:
+ mirror = investigation_filter[0]
+
+ return mirror
+
+
+''' COMMAND FUNCTIONS '''
+
+
+def test_module(client: HTTPClient) -> str: # pragma: no cover
+ """Tests connectivity with the client.
+ """
+ try:
+ client.get_user_request(user_id='me', bot_user=False) # Validating the Personal Access Token
+ except Exception as e:
+ demisto.debug(str(e))
+ if 'Invalid or expired session, please login again.' in str(e):
+ raise DemistoException('Invalid or expired session. Make sure the Personal Access Token is configured properly.')
+ else:
+ raise e
+
+ try:
+ client.get_user_request(user_id='me', bot_user=True) # Validating the Bot Access Token
+ except Exception as e:
+ demisto.debug(str(e))
+ if 'Invalid or expired session, please login again.' in str(e):
+ raise DemistoException('Invalid or expired session. Make sure the Bot Access Token is configured properly.')
+ else:
+ raise e
+
+ try:
+ if client.notification_channel and client.team_name:
+ # Validating the default team and channel exists
+ channel_details = client.get_channel_by_name_and_team_name_request(client.team_name, client.notification_channel)
+ client.send_notification_request(channel_details.get('id', ''), 'Hi there! This is a test message from XSOAR.')
+
+ except Exception as e:
+ demisto.debug(str(e))
+ if 'Unable to find the existing team' in str(e):
+ raise DemistoException('Could not find the team, make sure it is valid and/or exists.')
+ elif 'Channel does not exist' in str(e):
+ raise DemistoException('Channel does not exist.')
+ else:
+ raise e
+
+ return 'ok'
+
+
+def get_team_command(client: HTTPClient, args: dict[str, Any]) -> CommandResults:
+ """ Gets a team """
+ team_name = args.get('team_name', client.team_name)
+
+ team_details = client.get_team_request(team_name)
+
+ hr = tableToMarkdown('Team details:', team_details, headers=['name', 'display_name', 'type', 'id'])
+ return CommandResults(
+ outputs_prefix='Mattermost.Team',
+ outputs_key_field='name',
+ outputs=team_details,
+ readable_output=hr,
+ )
+
+
+def list_channels_command(client: HTTPClient, args: dict[str, Any]) -> CommandResults:
+ """ Lists channels """
+ team_name = args.get('team', client.team_name)
+ include_private_channels = argToBoolean(args.get('include_private_channels', False))
+ page = arg_to_number(args.get('page', DEFAULT_PAGE_NUMBER))
+ page_size = arg_to_number(args.get('page_size', DEFAULT_PAGE_SIZE))
+ limit = args.get('limit', '')
+ channel_details = []
+ if limit:
+ page = DEFAULT_PAGE_NUMBER
+ page_size = limit
+
+ team_details = client.get_team_request(team_name)
+
+ params = {'page': page, 'per_page': page_size}
+ channel_details = client.list_channel_request(team_details.get('id', ''), params)
+
+ if include_private_channels:
+ channel_details.extend(client.list_channel_request(team_details.get('id', ''), params, get_private=True))
+
+ hr = tableToMarkdown('Channels:', channel_details, headers=['name', 'display_name', 'type', 'id'])
+ return CommandResults(
+ outputs_prefix='Mattermost.Channel',
+ outputs_key_field='name',
+ outputs=channel_details,
+ readable_output=hr,
+ )
+
+
+def create_channel_command(client: HTTPClient, args: dict[str, Any]) -> CommandResults:
+ """ Creates a channel """
+ team_name = args.get('team', client.team_name)
+ channel_name = args.get('name', '')
+ channel_display_name = args.get('display_name')
+ channel_type = 'O' if args.get('type') == 'public' else 'P'
+ purpose = args.get('purpose', '')
+ header = args.get('header', '')
+
+ team_details = client.get_team_request(team_name)
+
+ params = {'team_id': team_details.get('id', ''),
+ 'name': channel_name,
+ 'display_name': channel_display_name,
+ 'type': channel_type,
+ 'purpose': purpose,
+ 'header': header}
+
+ remove_nulls_from_dictionary(params)
+
+ try:
+ channel_details = client.create_channel_request(params)
+ hr = f'The channel {channel_display_name} was created successfully, with channel ID: {channel_details.get("id")}'
+ except Exception as e:
+ if 'A channel with that name already exists' in str(e):
+ try:
+ channel_details = client.get_channel_by_name_and_team_name_request(team_name, channel_name)
+ hr = f"Channel {channel_display_name} already exists."
+ except Exception as sec_e:
+ if 'Channel does not exist.' in str(sec_e):
+ hr = 'Could not create a new channel. An archived channel with the same name may exist' \
+ 'in the provided team. Please choose a different name.'
+ raise DemistoException(hr)
+ else:
+ raise sec_e
+ else:
+ raise e
+
+ return CommandResults(
+ outputs_prefix='Mattermost.Channel',
+ outputs_key_field='id',
+ outputs=channel_details,
+ readable_output=hr
+ )
+
+
+def add_channel_member_command(client: HTTPClient, args: dict[str, Any]) -> CommandResults:
+ """ Adds a channel member """
+ team_name = args.get('team', client.team_name)
+ channel_name = args.get('channel', '')
+ user_id = args.get('user_id', '')
+
+ channel_details = client.get_channel_by_name_and_team_name_request(team_name, channel_name)
+
+ data = {'user_id': user_id}
+ client.add_channel_member_request(channel_details.get('id', ''), data)
+
+ user_details = client.get_user_request(user_id)
+
+ hr = f'The member {user_details.get("username", user_id)} was added to the channel successfully, with channel ID: {channel_details.get("id")}' # noqa: E501
+ return CommandResults(
+ readable_output=hr
+ )
+
+
+def remove_channel_member_command(client: HTTPClient, args: dict[str, Any]) -> CommandResults:
+ """ Removes a channel member """
+ team_name = args.get('team', client.team_name)
+ channel_name = args.get('channel', '')
+ user_id = args.get('user_id', '')
+
+ channel_details = client.get_channel_by_name_and_team_name_request(team_name, channel_name)
+
+ client.remove_channel_member_request(channel_details.get('id', ''), user_id)
+
+ user_details = client.get_user_request(user_id)
+
+ hr = f'The member {user_details.get("username", user_id)} was removed from the channel successfully.'
+ return CommandResults(
+ readable_output=hr
+ )
+
+
+def close_channel_command(client: HTTPClient, args: dict[str, Any]) -> CommandResults:
+ """ Closes a channels """
+ team_name = args.get('team_name', client.team_name)
+ channel_name = args.get('channel', '')
+
+ channel_details = {}
+ channel_id = ''
+ if channel_name:
+ try:
+ channel_details = client.get_channel_by_name_and_team_name_request(team_name, channel_name)
+ except Exception as e:
+ if 'Channel does not exist.' in str(e):
+ hr = f'The channel {channel_name} was not found. It may have been already deleted, or not in the team provided.'
+ return CommandResults(readable_output=hr)
+ else:
+ raise e
+
+ try:
+ client.close_channel_request(channel_details.get('id', '') or channel_id)
+ hr = f'The channel {channel_name} was delete successfully.'
+ except Exception as e:
+ if 'Channel does not exist.' in str(e):
+ hr = f'The channel {channel_name} was already deleted.'
+ else:
+ raise e
+
+ mirror = find_mirror_by_investigation()
+ integration_context = get_integration_context()
+ if mirror:
+ demisto.debug('MM: Found mirrored channel to close.')
+ channel_id = mirror.get('channel_id', '')
+ mirrors = json.loads(integration_context['mirrors'])
+ # Check for mirrors on the archived channel
+ channel_mirrors = list(filter(lambda m: channel_id == m['channel_id'], mirrors))
+ for mirror in channel_mirrors:
+ mirror['remove'] = True
+ demisto.mirrorInvestigation(mirror['investigation_id'], f'none:{mirror["mirror_direction"]}',
+ mirror['auto_close'])
+
+ set_to_integration_context_with_retries({'mirrors': mirrors}, OBJECTS_TO_KEYS)
+
+ return CommandResults(
+ readable_output=hr
+ )
+
+
+def list_users_command(client: HTTPClient, args: dict[str, Any]) -> CommandResults:
+ """ Lists users """
+ team_name = args.get('team_name', '')
+ channel_name = args.get('channel', '')
+ page = arg_to_number(args.get('page', DEFAULT_PAGE_NUMBER))
+ page_size = arg_to_number(args.get('page_size', DEFAULT_PAGE_SIZE))
+ limit = arg_to_number(args.get('limit', ''))
+
+ if limit:
+ page = DEFAULT_PAGE_NUMBER
+ page_size = limit
+
+ team_id = ''
+ if team_name:
+ team_details = client.get_team_request(team_name)
+ team_id = team_details.get('id', '')
+
+ channel_id = ''
+ if channel_name:
+ if not team_name:
+ raise DemistoException("Must provide a team name if a channel name was provided.")
+ channel_details = client.get_channel_by_name_and_team_name_request(team_name, channel_name)
+ channel_id = channel_details.get('id', '')
+ team_id = '' # The search in Mattermost is done with an OR operator
+
+ params = {'page': page, 'per_page': page_size, 'in_team': team_id, 'in_channel': channel_id}
+ remove_nulls_from_dictionary(params)
+
+ users = client.list_users_request(params)
+
+ hr = tableToMarkdown('Users:', users, headers=['username', 'email', 'role', 'id'])
+ return CommandResults(
+ outputs_prefix='Mattermost.User',
+ outputs_key_field='id',
+ outputs=users,
+ readable_output=hr,
+ )
+
+
+def send_file_command(client: HTTPClient, args) -> CommandResults:
+ """ Sends a file """
+ channel_name = args.get('channel', '')
+ team_name = args.get('team_name', client.team_name)
+ message = args.get('message')
+ entry_id = args.get('entry_id') or args.get('file')
+ to = args.get('to', '')
+
+ demisto.debug(f'{to=}, {channel_name=}')
+
+ if (to and channel_name):
+ raise DemistoException("Cannot use both to and channel_name arguments")
+
+ if not to and not channel_name:
+ raise DemistoException("You must provide an to or channel_name arguments")
+
+ if to:
+ # create a new direct channel and send the message there
+ if re.match(emailRegex, to):
+ to = get_user_id_by_email(client, to)
+ else:
+ to = get_user_id_by_username(client, to)
+
+ bot_id = get_user_id_from_token(client, bot_user=True)
+ channel_details = client.create_direct_channel_request(to, bot_id)
+ demisto.debug(f'MM: Created a new direct channel to: {to} with channel_id: {channel_details.get("id")}')
+ else:
+ channel_details = client.get_channel_by_name_and_team_name_request(team_name, channel_name)
+
+ file_info = demisto.getFilePath(entry_id)
+ params = {'channel_id': channel_details.get('id'),
+ 'filename': file_info['name']}
+
+ upload_response = client.send_file_request(file_info, params)
+ demisto.debug('MM: Uploaded the file successfully to mattermost')
+
+ data = {'channel_id': channel_details.get('id'),
+ 'message': message,
+ 'file_ids': [upload_response.get('file_infos', [])[0].get('id', '')]} # always uploading a single file
+ remove_nulls_from_dictionary(params)
+
+ client.create_post_with_file_request(data)
+
+ return CommandResults(
+ readable_output=f'file {file_info["name"]} was successfully sent to channel {channel_name}'
+ )
+
+
+def mirror_investigation(client: HTTPClient, **args) -> CommandResults:
+ """
+ Updates the integration context with a new or existing mirror.
+ """
+ if not MIRRORING_ENABLED:
+ raise DemistoException("Couldn't mirror investigation, Mirroring is disabled")
+ if not LONG_RUNNING:
+ raise DemistoException('Mirroring is enabled, however long running is disabled. For mirrors to work correctly,'
+ ' long running must be enabled.')
+ client = client
+ mirror_type = args.get('type', 'all')
+ direction = args.get('direction', 'Both')
+ channel_name = args.get('channel', '')
+ team_name = args.get('team_name', client.team_name)
+ mirror_to = args.get('mirrorTo', 'group')
+
+ autoclose = argToBoolean(args.get('autoclose', True))
+ send_first_message = False
+ kick_admin = argToBoolean(args.get('kickAdmin', False))
+
+ investigation = demisto.investigation()
+ investigation_id = str(investigation.get('id'))
+ if investigation.get('type') == PLAYGROUND_INVESTIGATION_TYPE:
+ raise DemistoException('This action cannot be performed in the playground.')
+
+ integration_context = get_integration_context()
+ if not integration_context or not integration_context.get('mirrors', []):
+ mirrors: list = []
+ current_mirror = []
+ else:
+ mirrors = json.loads(integration_context['mirrors'])
+ current_mirror = list(filter(lambda m: m['investigation_id'] == investigation_id, mirrors))
+
+ demisto.debug(f'MM: {mirrors=}')
+ demisto.debug(f'MM: {current_mirror=}')
+ # get admin user id from token
+ admin_user_id = get_user_id_from_token(client)
+
+ channel_filter: list = []
+ channel_id = ''
+ if channel_name:
+ # check if channel already exists
+ channel_filter = list(filter(lambda m: m['channel_name'] == channel_name, mirrors))
+
+ if not current_mirror:
+ channel_name = channel_name or f'incident-{investigation_id}'
+ if not channel_filter:
+ channel_details: dict = {}
+ try:
+ channel_details = client.get_channel_by_name_and_team_name_request(team_name, channel_name)
+ send_first_message = False
+ except Exception as e:
+ if '404' in str(e):
+ # create new channel
+ demisto.debug(f'MM: Creating a new channel for mirroring with name: {channel_name}')
+ channel_type = 'public' if mirror_to == 'channel' else 'private'
+ args = {'team_name': team_name, 'name': channel_name.lower(),
+ 'display_name': channel_name, 'type': channel_type}
+ result = create_channel_command(client=client, args=args)
+ channel_details = result.outputs # type: ignore
+ send_first_message = True
+ else:
+ raise e
+
+ channel_id = channel_details.get('id', '')
+ channel_team_id = channel_details.get('team_id')
+ else:
+ mirrored_channel = channel_filter[0]
+ channel_team_id = mirrored_channel['channel_team_id']
+ channel_id = mirrored_channel['channel_id']
+ channel_name = mirrored_channel['channel_name']
+
+ mirror = {
+ 'channel_team_id': channel_team_id,
+ 'channel_id': channel_id,
+ 'channel_name': channel_name,
+ 'investigation_id': investigation.get('id'),
+ 'mirror_type': mirror_type,
+ 'mirror_direction': direction,
+ 'auto_close': bool(autoclose),
+ 'mirrored': True
+ }
+ else:
+ mirror = mirrors.pop(mirrors.index(current_mirror[0]))
+ channel_id = mirror['channel_id']
+ if mirror_type:
+ mirror['mirror_type'] = mirror_type
+ if autoclose:
+ mirror['auto_close'] = autoclose
+ if direction:
+ mirror['mirror_direction'] = direction
+ if channel_name:
+ # update channel name if needed
+ demisto.debug(f'MM: Updating channel name to {channel_name}')
+ params = {'name': channel_name, 'display_name': channel_name, 'id': channel_id}
+ client.update_channel_request(channel_id=channel_id, params=params)
+ mirror['channel_name'] = channel_name
+ channel_name = mirror['channel_name']
+ mirror['mirrored'] = True
+ demisto.mirrorInvestigation(investigation_id, f'{mirror_type}:{direction}', autoclose)
+
+ mirrors.append(mirror)
+ set_to_integration_context_with_retries({'mirrors': mirrors}, OBJECTS_TO_KEYS)
+
+ if send_first_message:
+ server_links = demisto.demistoUrls()
+ server_link = server_links.get('server')
+ incident_url = get_war_room_url(f'{server_link}#/WarRoom/{investigation_id}', investigation_id)
+ message_to_send = (f'This channel was created to mirror incident {investigation_id}.'
+ f' \n View it on: {incident_url}')
+
+ client.send_notification_request(channel_id, message_to_send)
+ if kick_admin:
+ try:
+ client.remove_channel_member_request(channel_id, admin_user_id)
+ except Exception as e:
+ demisto.debug(f'Could not kick admin from channel. Error: {e}')
+
+ return CommandResults(
+ readable_output=f'Investigation mirrored successfully with mirror type {mirror_type},\n channel name: {channel_name}'
+ )
+
+
+def send_notification(client: HTTPClient, **args):
+ """
+ Sends notification for a MatterMost channel
+ """
+ demisto.debug(f'MM: {args=}')
+ to = args.get('to', '')
+ entry = args.get('entry')
+ channel_name = args.get('channel', '')
+ message_to_send = args.get("message", "")
+ ignore_add_url = argToBoolean(args.get('ignoreAddURL', False))
+ mattermost_ask = argToBoolean(args.get('mattermost_ask', False))
+ entitlement = ''
+
+ if mattermost_ask:
+ parsed_message = json.loads(args.get("message", ''))
+ entitlement = parsed_message.get('entitlement', '')
+ expiry = parsed_message.get('expiry', '')
+ default_response = parsed_message.get('default_response', '')
+ reply = parsed_message.get('reply', '')
+ message_to_send = parsed_message.get('message', '')
+
+ message_type = args.get('messageType', '') # From server
+ original_message = args.get('originalMessage', '') # From server
+ entry_object = args.get('entryObject') # From server
+ investigation_id = ''
+ poll: dict = {}
+
+ if (to and channel_name):
+ raise DemistoException("Cannot use both to and channel_name arguments")
+
+ channel_name = channel_name or client.notification_channel
+
+ if entry_object:
+ investigation_id = entry_object.get('investigationId') # From server, available from demisto v6.1 and above
+
+ if message_type and message_type != MIRROR_TYPE:
+ return (f"Message type is not in permitted options. Received: {message_type}")
+
+ if message_type == MIRROR_TYPE and original_message.find(MESSAGE_FOOTER) != -1:
+ # return so there will not be a loop of messages
+ return ("Message already mirrored")
+
+ if not ignore_add_url:
+ investigation = demisto.investigation()
+ server_links = demisto.demistoUrls()
+ if investigation:
+ if investigation.get('type') != PLAYGROUND_INVESTIGATION_TYPE:
+ link = server_links.get('warRoom')
+ if link:
+ link = get_war_room_url(link)
+ if entry:
+ link += '/' + entry
+ message_to_send += f'\nView it on: {link}'
+ else:
+ link = server_links.get('server', '')
+ if link:
+ message_to_send += f'\nView it on: {link}#/home'
+ channel_id = get_channel_id_to_send_notif(client, to, channel_name, investigation_id)
+
+ raw_data = client.send_notification_request(channel_id, message_to_send, props=poll)
+ message_id = raw_data.get("id")
+ demisto.debug(f'MM: Got replay from post: {raw_data}')
+ if entitlement:
+ demisto.debug(f'MM: Found entitlement, saving message to context: {entitlement}')
+ save_entitlement(entitlement, message_id, reply, expiry, default_response, to if to else channel_id)
+ return CommandResults(
+ readable_output=f'Message sent to MatterMost successfully. Message ID is: {message_id}'
+ )
+
+
+''' MAIN FUNCTION '''
+
+
+def handle_global_parameters(params: dict): # pragma: no cover
+ """Initializing the global parameters"""
+ url = params.get('url', '')
+ bot_access_token = params.get('bot_access_token', {}).get('password')
+ personal_access_token = params.get('personal_access_token', {}).get('password')
+ proxy = params.get('proxy', False)
+
+ # Initializing global variables
+ global SECRET_TOKEN, LONG_RUNNING, MIRRORING_ENABLED, CACHE_EXPIRY, CACHED_INTEGRATION_CONTEXT, DEMISTO_URL
+ global BASE_URL, PROXY, SSL_CONTEXT, VERIFY_CERT, PROXIES, ALLOW_INCIDENTS, INCIDENT_TYPE, PROXY_URL
+ global WEBSOCKET_URL, PORT
+ LONG_RUNNING = params.get('longRunning', False)
+ MIRRORING_ENABLED = params.get('mirroring', False)
+ SECRET_TOKEN = personal_access_token
+ BASE_URL = url
+ PROXY = proxy
+ demisto_urls = demisto.demistoUrls()
+ DEMISTO_URL = demisto_urls.get('server', '')
+ PROXIES, _ = handle_proxy_for_long_running()
+ PROXY_URL = PROXIES.get('http', '') # aiohttp only supports http proxy
+ ALLOW_INCIDENTS = params.get('allow_incidents', True)
+ INCIDENT_TYPE = params.get('incidentType', 'Unclassified')
+ VERIFY_CERT = not params.get('insecure', False)
+ if not VERIFY_CERT:
+ SSL_CONTEXT = ssl.create_default_context()
+ SSL_CONTEXT.check_hostname = False
+ SSL_CONTEXT.verify_mode = ssl.CERT_NONE
+ else:
+ # Use default SSL context
+ SSL_CONTEXT = None
+
+ if 'https://' in url:
+ uri = url.replace("https://", "wss://", 1)
+ else:
+ uri = url.replace("http://", "ws://", 1)
+ uri += '/api/v4/websocket'
+ WEBSOCKET_URL = uri
+
+ # Pull initial Cached context and set the Expiry
+ CACHE_EXPIRY = next_expiry_time()
+ CACHED_INTEGRATION_CONTEXT = get_integration_context()
+
+ if MIRRORING_ENABLED and (not LONG_RUNNING or not bot_access_token):
+ raise DemistoException("""Mirroring is enabled, however long running is disabled
+or the necessary bot authentication parameters are missing.
+For mirrors to work correctly, long running must be enabled and you must provide all
+the mattermost-bot following parameters:
+Bot Access Token""")
+
+
+def main(): # pragma: no cover
+ params = demisto.params()
+ args = demisto.args()
+ url = params.get('url', '')
+ bot_access_token = params.get('bot_access_token', {}).get('password')
+ personal_access_token = params.get('personal_access_token', {}).get('password')
+ team_name = params.get('team_name', '')
+ notification_channel = params.get('notification_channel')
+ verify_certificate = not params.get('insecure', False)
+ proxy = params.get('proxy', False)
+
+ handle_global_parameters(params)
+
+ command = demisto.command()
+ try:
+ global CLIENT
+
+ headers = {'Authorization': f'Bearer {personal_access_token}'}
+ client = HTTPClient(
+ base_url=url,
+ headers=headers,
+ verify=verify_certificate,
+ proxy=proxy,
+ bot_access_token=bot_access_token,
+ personal_access_token=personal_access_token,
+ team_name=team_name,
+ notification_channel=notification_channel,
+ )
+ CLIENT = client
+ demisto.debug(f'Command being called is {command}')
+ if command == 'test-module':
+ return_results(test_module(client))
+ elif command == 'mirror-investigation':
+ return_results(mirror_investigation(client, **args))
+ elif command == 'mattermost-mirror-investigation':
+ return_results(mirror_investigation(client, **args))
+ elif command == 'send-notification':
+ return_results(send_notification(client, **args))
+ elif command == 'long-running-execution':
+ run_long_running()
+ elif command == 'mattermost-get-team':
+ return_results(get_team_command(client, args))
+ elif command == 'mattermost-list-channels':
+ return_results(list_channels_command(client, args))
+ elif command == 'mattermost-create-channel':
+ return_results(create_channel_command(client, args))
+ elif command == 'mattermost-add-channel-member':
+ return_results(add_channel_member_command(client, args))
+ elif command == 'mattermost-remove-channel-member':
+ return_results(remove_channel_member_command(client, args))
+ elif command == 'mattermost-list-users':
+ return_results(list_users_command(client, args))
+ elif command == 'mattermost-close-channel':
+ return_results(close_channel_command(client, args))
+ elif command == 'close-channel':
+ return_results(close_channel_command(client, args))
+ elif command == 'mattermost-send-file':
+ return_results(send_file_command(client, args))
+ else:
+ raise DemistoException('Unrecognized command: ' + demisto.command())
+
+ except Exception as e:
+ # For any other integration command exception, return an error
+ return_error(f'Failed to execute {command} command. Error: {str(e)}.')
+
+
+''' ENTRY POINT '''
+
+
+if __name__ in ('__main__', '__builtin__', 'builtins'):
+ main()
diff --git a/Packs/Mattermost/Integrations/MattermostV2/MattermostV2.yml b/Packs/Mattermost/Integrations/MattermostV2/MattermostV2.yml
new file mode 100644
index 000000000000..d087dfe4144e
--- /dev/null
+++ b/Packs/Mattermost/Integrations/MattermostV2/MattermostV2.yml
@@ -0,0 +1,510 @@
+sectionOrder:
+- Connect
+- Collect
+category: Messaging and Conferencing
+commonfields:
+ id: MattermostV2
+ version: -1
+configuration:
+- display: Server URL
+ name: url
+ required: true
+ type: 0
+ section: Connect
+- displaypassword: Bot Access Token
+ additionalinfo: The Bot Access Token to use for connection.
+ name: bot_access_token
+ required: true
+ hiddenusername: true
+ type: 9
+ section: Connect
+- displaypassword: Personal Access Token
+ additionalinfo: The Personal Access Token to use for connection.
+ name: personal_access_token
+ required: true
+ hiddenusername: true
+ type: 9
+ section: Connect
+- display: Team Name
+ name: team_name
+ required: true
+ type: 0
+ section: Connect
+- display: Default Notifications Channel
+ name: notification_channel
+ advanced: true
+ required: false
+ type: 0
+ section: Connect - Advanced
+- defaultvalue: 'true'
+ display: Enable Incident Mirroring
+ name: mirroring
+ type: 8
+ section: Connect - Advanced
+ required: false
+- display: Allow external users to create incidents via DM.
+ name: allow_incidents
+ type: 8
+ section: Collect
+ advanced: true
+ required: false
+- defaultvalue: 'true'
+ display: Long running instance. Required for investigation mirroring and direct messages.
+ name: longRunning
+ type: 8
+ section: Connect
+ advanced: true
+ required: false
+- display: Trust any certificate (not secure)
+ name: insecure
+ type: 8
+ required: false
+ section: Connect - Advanced
+- display: Use system proxy settings
+ name: proxy
+ type: 8
+ required: false
+ section: Connect - Advanced
+description: Mattermost is an open-source, self-hostable online chat service with file sharing, search, and integrations. It is designed as an internal chat for organizations and companies.
+display: Mattermost v2
+name: MattermostV2
+script:
+ commands:
+ - arguments:
+ - description: The name of the team to retrieve.
+ name: team_name
+ required: true
+ description: Gets a team's details.
+ name: mattermost-get-team
+ outputs:
+ - contextPath: Mattermost.Team.id
+ description: The ID of the team.
+ type: String
+ - contextPath: Mattermost.Team.create_at
+ description: When was the team created.
+ type: Unknown
+ - contextPath: Mattermost.Team.update_at
+ description: When was the team updated.
+ type: Unknown
+ - contextPath: Mattermost.Team.delete_at
+ description: When was the team deleted.
+ type: Unknown
+ - contextPath: Mattermost.Team.display_name
+ description: The display name of the team.
+ type: String
+ - contextPath: Mattermost.Team.name
+ description: The name of the team.
+ type: String
+ - contextPath: Mattermost.Team.description
+ description: The description of the team.
+ type: String
+ - contextPath: Mattermost.Team.email
+ description: The email of the team.
+ type: String
+ - contextPath: Mattermost.Team.type
+ description: The type of the team.
+ type: String
+ - contextPath: Mattermost.Team.company_name
+ description: The company name of the team.
+ type: String
+ - contextPath: Mattermost.Team.allowed_domains
+ description: The allowed domains of the team.
+ type: String
+ - contextPath: Mattermost.Team.invite_id
+ description: The allowed domains of the team.
+ type: String
+ - contextPath: Mattermost.Team.allow_open_invite
+ description: Does the team allow open invites.
+ type: Unknown
+ - contextPath: Mattermost.Team.scheme_id
+ description: The scheme ID of the team.
+ type: String
+ - contextPath: Mattermost.Team.policy_id
+ description: The policy ID of the team.
+ type: String
+ - arguments:
+ - description: The name of the team to list channels from. Default is the team name from the integration configuration.
+ name: team
+ required: false
+ - description: Whether to include private channels. Default is false.
+ name: include_private_channels
+ required: false
+ predefined:
+ - 'true'
+ - 'false'
+ auto: PREDEFINED
+ - description: The page number to retrieve. Default value is 0.
+ name: page
+ required: false
+ - description: The size of the page to retrieve. Default value is 50.
+ name: page_size
+ required: false
+ - description: How many results to retrieve. Will override the page and page_size arguments if given.
+ name: limit
+ required: false
+ description: Lists channels.
+ name: mattermost-list-channels
+ outputs:
+ - contextPath: Mattermost.Channel.id
+ description: The ID of the channel.
+ type: String
+ - contextPath: Mattermost.Channel.create_at
+ description: When was the channel created.
+ type: Unknown
+ - contextPath: Mattermost.Channel.update_at
+ description: When was the channel updated.
+ type: Unknown
+ - contextPath: Mattermost.Channel.delete_at
+ description: When was the channel deleted.
+ type: Unknown
+ - contextPath: Mattermost.Channel.display_name
+ description: The display name of the channel.
+ type: String
+ - contextPath: Mattermost.Channel.name
+ description: The name of the channel.
+ type: String
+ - contextPath: Mattermost.Channel.description
+ description: The description of the channel.
+ type: String
+ - contextPath: Mattermost.Channel.header
+ description: The header of the channel.
+ type: String
+ - contextPath: Mattermost.Channel.purpose
+ description: The purpose of the channel.
+ type: String
+ - contextPath: Mattermost.Channel.last_post_at
+ description: When was the last post to the channel made.
+ type: Unknown
+ - contextPath: Mattermost.Channel.total_msg_count
+ description: The total massage count of the channel.
+ type: Unknown
+ - contextPath: Mattermost.Channel.extra_update_at
+ description: When was the channel updated.
+ type: Unknown
+ - contextPath: Mattermost.Channel.creator_id
+ description: The creator ID of the channel.
+ type: String
+ - arguments:
+ - description: The display name of the channel to create.
+ name: display_name
+ required: true
+ - description: The name of the channel to create.
+ name: name
+ required: true
+ - description: The type of the channel to create.
+ name: type
+ defaultValue: public
+ predefined:
+ - public
+ - private
+ auto: PREDEFINED
+ required: false
+ - description: The purpose of the channel to create.
+ name: purpose
+ required: false
+ - description: The header of the channel to create.
+ name: header
+ required: false
+ - description: The team name of the channel to create. Default is the team name from the integration configuration.
+ name: team
+ required: false
+ description: Creates a channel.
+ name: mattermost-create-channel
+ outputs:
+ - contextPath: Mattermost.Channel.id
+ description: The ID of the channel.
+ type: String
+ - contextPath: Mattermost.Channel.create_at
+ description: When was the channel created.
+ type: Unknown
+ - contextPath: Mattermost.Channel.update_at
+ description: When was the channel updated.
+ type: Unknown
+ - contextPath: Mattermost.Channel.delete_at
+ description: When was the channel deleted.
+ type: Unknown
+ - contextPath: Mattermost.Channel.display_name
+ description: The display name of the channel.
+ type: String
+ - contextPath: Mattermost.Channel.name
+ description: The name of the channel.
+ type: String
+ - contextPath: Mattermost.Channel.description
+ description: The description of the channel.
+ type: String
+ - contextPath: Mattermost.Channel.header
+ description: The header of the channel.
+ type: String
+ - contextPath: Mattermost.Channel.purpose
+ description: The purpose of the channel.
+ type: String
+ - contextPath: Mattermost.Channel.last_post_at
+ description: When was the last post to the channel made.
+ type: Unknown
+ - contextPath: Mattermost.Channel.total_msg_count
+ description: The total massage count of the channel.
+ type: Unknown
+ - contextPath: Mattermost.Channel.extra_update_at
+ description: When was the channel updated.
+ type: Unknown
+ - contextPath: Mattermost.Channel.creator_id
+ description: The creator ID of the channel.
+ type: String
+ - contextPath: Mattermost.Channel.scheme_id
+ description: The scheme ID of the channel.
+ type: String
+ - arguments:
+ - description: The team name of the channel to add the user to. Default is the team name from the integration configuration.
+ name: team
+ required: false
+ - description: The name of the channel to add the user to.
+ name: channel
+ required: true
+ - description: The ID of the user to add. Use the command 'mattermost-list-users' to fetch the user ID.
+ name: user_id
+ required: true
+ description: Adds a channel member.
+ name: mattermost-add-channel-member
+ outputs: []
+ - arguments:
+ - description: The team name of the channel to add the user to. Default is the team name from the integration configuration.
+ name: team
+ required: false
+ - description: The channel name of the channel to remove the user from.
+ name: channel
+ required: true
+ - description: The ID of the user to remove. Use the command 'mattermost-list-users' to fetch the user ID.
+ name: user_id
+ required: true
+ description: Removes a channel member.
+ name: mattermost-remove-channel-member
+ outputs: []
+ - arguments:
+ - description: The name of the team to filter users by.
+ name: team_name
+ required: false
+ - description: The name of the channel to filters users by. If mentioned, a team name must be mentioned as well.
+ name: channel
+ required: false
+ - description: The page number to retrieve. Should be provided with the page_size argument. Default value is 0.
+ name: page
+ required: false
+ - description: The size of the page to retrieve. Should be provided with the page argument. Default value is 50.
+ name: page_size
+ required: false
+ - description: How many results to retrieve. If provided, overrides the page and page_size arguments.
+ name: limit
+ required: false
+ description: Lists users.
+ name: mattermost-list-users
+ outputs:
+ - contextPath: Mattermost.User.id
+ description: The ID of the user.
+ type: String
+ - contextPath: Mattermost.User.create_at
+ description: When was the user created.
+ type: Unknown
+ - contextPath: Mattermost.User.update_at
+ description: When was the user updated.
+ type: Unknown
+ - contextPath: Mattermost.User.delete_at
+ description: When was the user deleted.
+ type: Unknown
+ - contextPath: Mattermost.User.username
+ description: The username of the user.
+ type: String
+ - contextPath: Mattermost.User.auth_data
+ description: The authorization data of the user.
+ type: String
+ - contextPath: Mattermost.User.auth_service
+ description: The authorization service of the user.
+ type: String
+ - contextPath: Mattermost.User.email
+ description: The email of the user.
+ type: String
+ - contextPath: Mattermost.User.nickname
+ description: The nickname of the user.
+ type: String
+ - contextPath: Mattermost.User.first_name
+ description: The first name of the user.
+ type: Unknown
+ - contextPath: Mattermost.User.last_name
+ description: The last name of the user.
+ type: Unknown
+ - contextPath: Mattermost.User.position
+ description: The position of the user.
+ type: Unknown
+ - contextPath: Mattermost.User.roles
+ description: The roles of the channel.
+ type: String
+ - contextPath: Mattermost.User.locale
+ description: The locale of the channel.
+ type: String
+ - contextPath: Mattermost.User.timezone
+ description: The timezone of the user.
+ type: Unknown
+ - arguments:
+ - description: The team name of the channel to send the file to. Default is the Team Name in the integration configuration.
+ name: team_name
+ required: false
+ - description: The channel name of the channel to send the file to. Cannot be combined with the to argument.
+ name: channel
+ required: false
+ - description: The message to send to the channel along with the file.
+ name: message
+ required: true
+ - description: The entry ID of the file.
+ name: entry_id
+ required: true
+ - description: The username or email of the user to send the file to.
+ name: to
+ required: false
+ description: Sends a file.
+ name: mattermost-send-file
+ outputs: []
+ - arguments:
+ - description: The message to send.
+ name: message
+ required: true
+ - description: The channel name to send the notification to. Default value is the channel configuration parameter.
+ name: channel
+ required: false
+ - description: An entry ID to send as a link.
+ name: entry
+ required: false
+ - description: The username or email of the user to send the file to.
+ name: to
+ required: false
+ - description: Adds the War Room link to the message.
+ name: ignoreAddURL
+ required: false
+ predefined:
+ - 'true'
+ - 'false'
+ auto: PREDEFINED
+ - name: mattermost_ask
+ description: The message as a JSON for asking questions to the user. Default value is false.
+ type: boolean
+ required: false
+ predefined:
+ - 'true'
+ - 'false'
+ auto: PREDEFINED
+ description: Send a message using a chatbot app.
+ name: send-notification
+ outputs: []
+ - arguments:
+ - description: The team name of the channel to close. Default value is the team name from the integration configuration.
+ name: team_name
+ required: false
+ - description: The channel name of the channel to close. If not provided, the mirrored investigation channel is archived (if the channel exists).
+ name: channel
+ required: false
+ description: Closes a channel.
+ name: mattermost-close-channel
+ outputs: []
+ - arguments:
+ - description: The team name of the channel to delete. Default value is the team name from the integration configuration.
+ name: team_name
+ required: false
+ - description: The channel name of the channel to close.
+ name: channel
+ required: false
+ description: Closes a mirrored Mattermost channel. If not provided, the mirrored investigation channel is archived (if the channel exists).
+ name: close-channel
+ outputs: []
+ - arguments:
+ - auto: PREDEFINED
+ default: true
+ defaultValue: all
+ description: The mirroring type. Can be "all", which mirrors everything, "chat", which mirrors only chats (not commands), or "none", which stops all mirroring.
+ name: type
+ predefined:
+ - all
+ - chat
+ - none
+ - auto: PREDEFINED
+ defaultValue: 'true'
+ description: Whether the channel is auto-closed when an investigation is closed.
+ name: autoclose
+ predefined:
+ - 'true'
+ - 'false'
+ - auto: PREDEFINED
+ defaultValue: Both
+ description: The mirroring direction.
+ name: direction
+ predefined:
+ - Both
+ - FromDemisto
+ - ToDemisto
+ - description: The name of the channel. The default is "incident-".
+ name: channel
+ - auto: PREDEFINED
+ defaultValue: 'false'
+ description: Whether to remove the admin from the newly created channel. Default value is false.
+ name: kickAdmin
+ predefined:
+ - 'true'
+ - 'false'
+ - auto: PREDEFINED
+ defaultValue: 'group'
+ description: Mirrors the investigation to a group (private channel) or a public channel. Default is group.
+ name: mirrorTo
+ predefined:
+ - 'group'
+ - 'channel'
+ description: Mirrors the investigation between Mattermost and the Cortex XSOAR War Room.
+ name: mirror-investigation
+ - arguments:
+ - auto: PREDEFINED
+ default: true
+ defaultValue: all
+ description: The mirroring type. Can be "all", which mirrors everything, "chat", which mirrors only chats (not commands), or "none", which stops all mirroring.
+ name: type
+ predefined:
+ - all
+ - chat
+ - none
+ - auto: PREDEFINED
+ defaultValue: 'true'
+ description: Whether the channel is auto-closed when an investigation is closed.
+ name: autoclose
+ predefined:
+ - 'true'
+ - 'false'
+ - auto: PREDEFINED
+ defaultValue: Both
+ description: The mirroring direction.
+ name: direction
+ predefined:
+ - Both
+ - FromDemisto
+ - ToDemisto
+ - description: The name of the channel. The default is "incident-".
+ name: channel
+ - auto: PREDEFINED
+ defaultValue: 'false'
+ description: Whether to remove the admin from the newly created channel. Default value is false.
+ name: kickAdmin
+ predefined:
+ - 'true'
+ - 'false'
+ - auto: PREDEFINED
+ defaultValue: 'group'
+ description: Mirrors the investigation to a group (private channel) or a public channel.
+ name: mirrorTo
+ predefined:
+ - 'group'
+ - 'channel'
+ description: Mirrors the investigation between Mattermost and the Cortex XSOAR War Room.
+ name: mattermost-mirror-investigation
+ longRunning: true
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+ dockerimage: demisto/netutils:1.0.0.96365
+fromversion: 6.10.0
+tests:
+- MattermostV2 - testplaybook
diff --git a/Packs/Mattermost/Integrations/MattermostV2/MattermostV2_description.md b/Packs/Mattermost/Integrations/MattermostV2/MattermostV2_description.md
new file mode 100644
index 000000000000..3c5410540637
--- /dev/null
+++ b/Packs/Mattermost/Integrations/MattermostV2/MattermostV2_description.md
@@ -0,0 +1,14 @@
+## To use the Mattermost integration:
+
+1. Create a new bot to access Mattermost on behalf of a user, as described in the [instructions](https://developers.mattermost.com/integrate/reference/bot-accounts/):
+
+* Go to **System Console** > **Integrations** > **Bot Accounts**.
+* Set Enable Bot Account Creation to **true**.
+* Once set, the system administrator can create bot accounts for integrations using the **Integrations** > **Bot Accounts** link in the description provided.
+
+1. Under **Manage Members**, make it a System Admin.
+2. Create a Personal Access Token for the new account [(Detailed Instruction)](https://developers.mattermost.com/integrate/reference/personal-access-token/)
+
+### For sending messages using the ChatBot app and mirroring
+
+To enable a direct communication with Mattermost for mirroring and sending messages by a Mattermost chatbot, make sure both the *Long running instance* and *Enable Incident Mirroring* parameters are checked.
\ No newline at end of file
diff --git a/Packs/Mattermost/Integrations/MattermostV2/MattermostV2_image.png b/Packs/Mattermost/Integrations/MattermostV2/MattermostV2_image.png
new file mode 100644
index 000000000000..0b2d5d529d93
Binary files /dev/null and b/Packs/Mattermost/Integrations/MattermostV2/MattermostV2_image.png differ
diff --git a/Packs/Mattermost/Integrations/MattermostV2/MattermostV2_test.py b/Packs/Mattermost/Integrations/MattermostV2/MattermostV2_test.py
new file mode 100644
index 000000000000..30ce383ee3b3
--- /dev/null
+++ b/Packs/Mattermost/Integrations/MattermostV2/MattermostV2_test.py
@@ -0,0 +1,578 @@
+import json
+from MattermostV2 import (get_team_command, list_channels_command, create_channel_command, add_channel_member_command,
+ remove_channel_member_command, list_users_command, close_channel_command, send_file_command,
+ get_channel_id_to_send_notif, event_handler, handle_text_received_from_mm, get_channel_id_from_context,
+ extract_entitlement, answer_question, handle_posts, create_incidents, get_war_room_url,
+ mirror_investigation, send_notification)
+import pytest
+import demistomock as demisto
+from unittest.mock import patch
+from freezegun import freeze_time
+
+
+def util_load_json(path):
+ with open(path, encoding="utf-8") as f:
+ return json.loads(f.read())
+
+
+def http_mock(method: str, url_suffix: str = "", full_url: str = "", params: dict = {},
+ data: dict = {}, files: dict = {}, json_data: dict = {}, headers: dict = {}):
+
+ if 'bot_access_token' in headers.get('Authorization', ''):
+ if url_suffix == '/api/v4/users/me':
+ return util_load_json('test_data/get_bot_response.json')
+ if url_suffix == '/api/v4/posts':
+ return util_load_json("test_data/create_post_response.json")
+
+ if url_suffix == "/api/v4/teams/name/team_name":
+ return util_load_json("test_data/get_team_response.json")
+ elif url_suffix == '/api/v4/teams/team_id/channels' or url_suffix == '/api/v4/teams/team_id/channels/private':
+ return util_load_json("test_data/list_channels_response.json")
+ elif url_suffix == '/api/v4/channels':
+ return util_load_json("test_data/create_channel_response.json")
+ elif url_suffix == '/api/v4/users':
+ return util_load_json("test_data/list_users_response.json")
+ elif url_suffix == '/api/v4/files':
+ return util_load_json("test_data/send_file_response.json")
+ elif (url_suffix == '/api/v4/users/email/user_email' or url_suffix == '/api/v4/users/username/username'
+ or url_suffix == '/api/v4/users/me' or url_suffix == '/api/v4/users/user_id'):
+ return util_load_json("test_data/list_users_response.json")[0]
+ elif url_suffix == '/api/v4/channels/direct':
+ channel = util_load_json("test_data/create_channel_response.json")
+ channel["type"] = 'D'
+ return channel
+ else:
+ return {}
+
+
+@pytest.fixture(autouse=True)
+def ws_client(mocker):
+ from MattermostV2 import WebSocketClient
+
+ return WebSocketClient(
+ base_url='mock url',
+ verify=True,
+ proxy=False,
+ token='personal_access_token',
+ )
+
+
+@pytest.fixture(autouse=True)
+def http_client(mocker):
+ from MattermostV2 import HTTPClient
+
+ headers = {"Authorization": "Token mock"}
+ http_client = HTTPClient(
+ base_url='mock url',
+ headers=headers,
+ verify=True,
+ proxy=False,
+ bot_access_token='bot_access_token',
+ personal_access_token='personal_access_token',
+ team_name='team_name',
+ notification_channel='notification_channel',
+ )
+ mocker.patch.object(http_client, "_http_request", side_effect=http_mock)
+ return http_client
+
+
+def test_get_team_command(http_client):
+ """
+ Given: A mock MatterMost client.
+ When: Running get_team_command with a team name.
+ Then: Ensure we get the result.
+ """
+ args = {'team_name': 'team_name'}
+ results = get_team_command(http_client, args)
+ assert results.outputs.get('name', '') == 'team_name'
+
+
+def test_list_channels_command(http_client):
+ """
+ Given: A mock MatterMost client.
+ When: Running list_channels_command with a team name.
+ Then: Ensure we get the result.
+ """
+ args = {'team_name': 'team_name',
+ 'include_private_channels': True}
+ results = list_channels_command(http_client, args)
+ assert results.outputs[0].get('name') == 'name'
+ assert len(results.outputs) == 2
+
+
+def test_create_channel_command(http_client):
+ """
+ Given: A mock MatterMost client.
+ When: Running create_channel_command with a team name.
+ Then: Ensure we get the result.
+ """
+ args = {'team_name': 'team_name',
+ 'name': 'channel_name',
+ 'display_name': 'display_name',
+ 'type': 'Public',
+ 'purpose': 'purpose',
+ 'header': 'header', }
+ results = create_channel_command(http_client, args)
+ assert results.outputs.get('name') == 'name'
+
+
+def test_add_channel_member_command(http_client):
+ """
+ Given: A mock MatterMost client.
+ When: Running add_channel_member_command with a team name.
+ Then: Ensure we get the result.
+ """
+ args = {'team_name': 'team_name',
+ 'channel_name': 'channel_name',
+ 'user_id': 'user_id', }
+ results = add_channel_member_command(http_client, args)
+ assert 'The member username was added to the channel successfully' in results.readable_output
+
+
+def test_remove_channel_member_command(http_client):
+ """
+ Given: A mock MatterMost client.
+ When: Running remove_channel_member_command with a team name.
+ Then: Ensure we get the result.
+ """
+ args = {'team_name': 'team_name',
+ 'channel_name': 'channel_name',
+ 'user_id': 'user_id', }
+ results = remove_channel_member_command(http_client, args)
+ assert 'The member username was removed from the channel successfully.' in results.readable_output
+
+
+def test_list_users_command(http_client):
+ """
+ Given: A mock MatterMost client.
+ When: Running list_users_command with a team name.
+ Then: Ensure we get the result.
+ """
+ args = {'team_name': 'team_name',
+ 'channel_id': 'channel_id', }
+ results = list_users_command(http_client, args)
+ assert results.outputs[0].get('first_name') == 'first_name'
+
+
+def test_close_channel_command_no_mirror(http_client):
+ """
+ Given: A mock MatterMost client.
+ When: Running close_channel_command with a team name.
+ Then: Ensure we get the result.
+ """
+ args = {'team_name': 'team_name',
+ 'channel': 'channel_name', }
+ results = close_channel_command(http_client, args)
+ assert 'The channel channel_name was delete successfully.' in results.readable_output
+
+
+def test_close_channel_command_mirror(http_client, mocker):
+ """
+ Given: A mock MatterMost client.
+ When: Running close_channel_command with a team name.
+ Then: Ensure we get the result, and was called only once with the first mirror
+ """
+ args = {'team_name': 'team_name',
+ 'channel': 'channel_name', }
+
+ import MattermostV2
+ MattermostV2.CACHE_EXPIRY = False
+ MattermostV2.CACHED_INTEGRATION_CONTEXT = ''
+ mock_integration_context = {
+ 'mirrors': json.dumps([
+ {'channel_name': 'Channel1', 'team_id': 'team_id', 'channel_id': 'channel_id', 'mirrored': False,
+ 'investigation_id': 'Incident123', 'mirror_direction': 'toDemisto', 'auto_close': True, 'mirror_type': 'all'},
+ {'channel_name': 'Channel2', 'team_id': 'team_id', 'channel_id': 'channel_id_different_channel', 'mirrored': True,
+ 'investigation_id': 'Incident123', 'mirror_direction': 'both', 'auto_close': True, 'mirror_type': 'chat'},
+ ])
+ }
+ mocker.patch('MattermostV2.get_integration_context', return_value=mock_integration_context)
+ mocker.patch.object(demisto, 'investigation', return_value={'id': 'Incident123'})
+ mocker.patch.object(demisto, 'mirrorInvestigation')
+ results = close_channel_command(http_client, args)
+
+ demisto.mirrorInvestigation.assert_called_once_with('Incident123', 'none:toDemisto', True)
+ assert 'The channel channel_name was delete successfully.' in results.readable_output
+
+
+def test_send_file_command(http_client, mocker):
+ """
+ Given: A mock MatterMost client.
+ When: Running send_file_command with a team name.
+ Then: Ensure we get the result.
+ """
+ expected_file_info = {
+ 'name': 'test_file.txt',
+ 'path': '/path/to/test_file.txt'
+ }
+ mocker.patch('MattermostV2.demisto.getFilePath', return_value=expected_file_info)
+ mocker.patch.object(http_client, 'send_file_request', return_value=util_load_json("test_data/send_file_response.json"))
+
+ args = {'team_name': 'team_name',
+ 'channel': 'channel_name', }
+ send_file_command(http_client, args)
+
+
+def test_get_channel_id_to_send_notif(http_client, mocker):
+ """
+ Given: A mock MatterMost client.
+ When: Running get_channel_id_to_send_notif.
+ Then: Ensure we get the result.
+ """
+ results = get_channel_id_to_send_notif(http_client, 'username', 'channel_name', 'investigation_id')
+ assert results == 'id'
+
+
+def test_get_channel_id_from_context(mocker):
+ """
+ Given: A mock MatterMost client.
+ When: Running get_channel_id_from_context.
+ Then: Ensure we get the result.
+ """
+ import MattermostV2
+ MattermostV2.CACHE_EXPIRY = False
+ MattermostV2.CACHED_INTEGRATION_CONTEXT = ''
+ mock_integration_context = {
+ 'mirrors': json.dumps([
+ {'channel_name': 'Channel1', 'team_id': 'team_id', 'channel_id': 'ID1',
+ 'investigation_id': 'Incident123', 'mirror_direction': 'both', 'auto_close': True},
+ {'channel_name': 'Channel2', 'team_id': 'team_id', 'channel_id': 'ID2',
+ 'investigation_id': 'Incident123', 'mirror_direction': 'both', 'auto_close': True},
+ ])
+ }
+ mocker.patch('MattermostV2.get_integration_context', return_value=mock_integration_context)
+ results = get_channel_id_from_context('Channel1', 'Incident123')
+ assert results
+
+
+def test_save_entitlement():
+ """
+ Given:
+ - arguments.
+ When:
+ - Calling the save_entitlement function.
+ Then:
+ - Validate that the mocked functions were called with the expected arguments
+ """
+ entitlement = "Test Entitlement"
+ message_id = "123"
+ reply = "Test Reply"
+ expiry = "2023-09-09"
+ default_response = "Default Response"
+ to_id = "user@example.com"
+ OBJECTS_TO_KEYS = {
+ 'mirrors': 'investigation_id',
+ 'messages': 'entitlement',
+ }
+
+ with patch('MattermostV2.get_integration_context') as mock_get_integration_context, \
+ patch('MattermostV2.set_to_integration_context_with_retries') as mock_set_integration_context:
+
+ mock_get_integration_context.return_value = {'messages': []}
+ fixed_timestamp = '2023-09-09T20:08:50Z'
+
+ with freeze_time(fixed_timestamp):
+ from MattermostV2 import save_entitlement
+ save_entitlement(entitlement, message_id, reply, expiry, default_response, to_id)
+
+ expected_data = {
+ 'messages': [
+ {
+ 'root_id': message_id,
+ 'entitlement': entitlement,
+ 'reply': reply,
+ 'expiry': expiry,
+ 'sent': fixed_timestamp,
+ 'default_response': default_response,
+ 'to_id': to_id
+ }
+ ]
+ }
+
+ mock_get_integration_context.assert_called_once_with()
+ mock_set_integration_context.assert_called_once_with(expected_data, OBJECTS_TO_KEYS)
+
+
+@pytest.mark.parametrize("entitlement, expected_result", [
+ ("guid123@incident456|task789", ("guid123", "incident456", "task789")), # Scenario 1: Full entitlement
+ ("guid123@incident456", ("guid123", "incident456", "")), # Scenario 2: No task ID
+ ("guid123@", ("guid123", "", "")), # Scenario 3: No incident ID or task ID
+])
+def test_extract_entitlement(entitlement, expected_result):
+ """
+ Test the extract_entitlement function.
+ Given:
+ - Input entitlement string.
+ When:
+ - Calling the extract_entitlement function with the given input entitlement.
+ Then:
+ - Validate that the function correctly extracts the entitlement components: guid, incident_id, and task_id.
+ """
+ result = extract_entitlement(entitlement)
+
+ assert result == expected_result
+
+
+def test_mirror_investigation_create_new_channel(http_client, mocker):
+ """
+ Given a mock client and relevant arguments,
+ When calling the mirror_investigation function to create a new channel,
+ Then validate that the function returns the expected CommandResults.
+ """
+ import MattermostV2
+ MattermostV2.MIRRORING_ENABLED = True
+ MattermostV2.LONG_RUNNING = True
+ MattermostV2.SYNC_CONTEXT = True
+ mocker.patch.object(demisto, 'demistoUrls', return_value={'server': 'mock_server_url'})
+
+ # Test data
+ args = {
+ 'type': 'all',
+ 'direction': 'Both',
+ 'channelName': 'mirror-channel',
+ 'autoclose': True,
+ }
+ mock_integration_context = {
+ 'mirrors': json.dumps([
+ {'channel_name': 'Channel1', 'team_id': 'team_id', 'channel_id': 'channel_id', 'mirrored': False,
+ 'investigation_id': 'Incident123', 'mirror_direction': 'toDemisto', 'auto_close': True, 'mirror_type': 'all'},
+ {'channel_name': 'Channel2', 'team_id': 'team_id', 'channel_id': 'channel_id', 'mirrored': True,
+ 'investigation_id': 'Incident123', 'mirror_direction': 'both', 'auto_close': True, 'mirror_type': 'chat'},
+ ])
+ }
+ mocker.patch('MattermostV2.get_integration_context', return_value=mock_integration_context)
+ mocker.patch.object(demisto, 'mirrorInvestigation')
+ # Call the function
+ result = mirror_investigation(http_client, **args)
+
+ # Assert the result
+
+ demisto.mirrorInvestigation.assert_called_once_with('1', 'all:Both', True)
+ assert 'Investigation mirrored successfully' in result.readable_output
+
+
+def test_send_notification_command(http_client, mocker):
+ """
+ Given -
+ client
+ When -
+ send message to channel
+ Then -
+ Validate that
+ """
+ mocker.patch.object(http_client, "send_notification_request", return_value={'id': 'message_id'})
+ result = send_notification(http_client,
+ user_id='user1',
+ message='Hello',
+ to='channel1',
+ )
+
+ assert result.readable_output == 'Message sent to MatterMost successfully. Message ID is: message_id'
+
+
+######### async tests #########
+
+
+@pytest.mark.asyncio
+async def test_handle_posts_regular_post(http_client, mocker):
+ """
+ Given:
+ - Post payload.
+ When:
+ - Calling the handle_posts function.
+ Then:
+ - Validate that the mirror investigation func was called. only once, as one of the mirrors was already mirrored.
+ """
+ import MattermostV2
+ payload = util_load_json("test_data/posted_data_user.json")
+ mock_integration_context = {
+ 'mirrors': json.dumps([
+ {'channel_name': 'Channel1', 'team_id': 'team_id', 'channel_id': 'channel_id', 'mirrored': False,
+ 'investigation_id': 'Incident123', 'mirror_direction': 'toDemisto', 'auto_close': True, 'mirror_type': 'all'},
+ {'channel_name': 'Channel2', 'team_id': 'team_id', 'channel_id': 'channel_id', 'mirrored': True,
+ 'investigation_id': 'Incident123', 'mirror_direction': 'both', 'auto_close': True, 'mirror_type': 'chat'},
+ ])
+ }
+ MattermostV2.CLIENT = http_client
+ MattermostV2.CACHE_EXPIRY = False
+ mocker.patch('MattermostV2.get_integration_context', return_value=mock_integration_context)
+ mocker.patch('MattermostV2.handle_text_received_from_mm', return_value=None)
+ mocker.patch.object(demisto, 'mirrorInvestigation')
+ await handle_posts(payload)
+ demisto.mirrorInvestigation.assert_called_once_with('Incident123', 'all:toDemisto', True)
+
+
+@pytest.mark.asyncio
+async def test_handle_text(mocker):
+ """
+ Given:
+ - arguments.
+ When:
+ - Calling the handle_text_received_from_mm function.
+ Then:
+ - Validate that the `demisto.addEntry` method was called with the expected arguments
+ """
+
+ investigation_id = "123"
+ text = "Hello, this is a test message"
+ operator_email = "test@example.com"
+ operator_name = "Test User"
+ MESSAGE_FOOTER = '\n**From Mattermost**'
+
+ with patch('MattermostV2.demisto') as mock_demisto:
+ await handle_text_received_from_mm(investigation_id, text, operator_email, operator_name)
+ mock_demisto.addEntry.assert_called_once_with(
+ id=investigation_id,
+ entry=text,
+ username=operator_name,
+ email=operator_email,
+ footer=MESSAGE_FOOTER
+ )
+
+
+@pytest.mark.asyncio
+async def test_event_handler_error(ws_client, mocker):
+ """
+ Given:
+ - Error post payload.
+ When:
+ - Calling the handle_posts function.
+ Then:
+ - Validate that the demisto.error func was called.
+ """
+ error_payload = """{"status": "FAIL",
+ "seq_reply": 2,
+ "error": {"id": "some.error.id.here", "message": "Some error message here"
+ }
+ }"""
+ error_mock = mocker.patch.object(demisto, 'error')
+ mocker.patch.object(demisto, 'updateModuleHealth')
+
+ await event_handler(ws_client, error_payload)
+
+ assert error_mock.call_count == 1
+
+
+@pytest.mark.asyncio
+async def test_event_handler_bot_message(http_client, mocker):
+ """
+ Given:
+ - Bot post payload.
+ When:
+ - Calling the handle_posts function.
+ Then:
+ - Validate that the demisto.debug func was called.
+ """
+ import MattermostV2
+ MattermostV2.CLIENT = http_client
+ bot_payload = util_load_json("test_data/posted_data_bot.json")
+ mocker.patch.object(demisto, 'updateModuleHealth')
+ mocker.patch.object(demisto, 'debug')
+
+ await handle_posts(bot_payload)
+ demisto.debug.assert_called_once_with(
+ "MM: Got a bot message. Will not mirror."
+ )
+
+
+@pytest.mark.asyncio
+async def test_event_handler_direct_message(http_client, mocker):
+ """
+ Given:
+ - dm post payload.
+ When:
+ - Calling the handle_posts function.
+ Then:
+ - Validate that the demisto.debug func was called.
+ """
+ import MattermostV2
+ MattermostV2.CLIENT = http_client
+ MattermostV2.ALLOW_INCIDENTS = True
+
+ payload = util_load_json("test_data/posted_data_user.json")
+ payload["data"]["channel_type"] = "D"
+ mocker.patch.object(demisto, 'updateModuleHealth')
+ mocker.patch.object(demisto, 'directMessage', return_value={})
+
+ await handle_posts(payload)
+ demisto.directMessage.assert_called_once_with(
+ "message", "", "", True
+ )
+
+
+def test_answer_question(http_client, mocker):
+ """
+ Test the answer_question function.
+ Given:
+ - A mocked question dictionary.
+ When:
+ - Calling the answer_question function with the mocked question.
+ Then:
+ - Validate that the function correctly handles the entitlement and returns the incident_id.
+ """
+ import MattermostV2
+ MattermostV2.CLIENT = http_client
+ mock_question = {
+ 'entitlement': 'guid123@incident456|task789',
+ 'to_id': '123'
+ }
+
+ mocker.patch('MattermostV2.process_entitlement_reply')
+
+ result = answer_question("Answer123", mock_question, "user@example.com")
+ assert result == 'incident456'
+
+
+@pytest.mark.asyncio
+async def test_create_incidents(mocker):
+ """
+ Given:
+ - Incidents
+ When:
+ - Calling the create_incidents function.
+ Then:
+ - Validate that the demisto.createIncidents func was called.
+ """
+
+ mocker.patch.object(demisto, 'createIncidents', return_value='nice')
+
+ incidents = [{"name": "xyz", "details": "1.1.1.1,8.8.8.8"}]
+
+ incidents_with_labels = [{'name': 'xyz', 'details': '1.1.1.1,8.8.8.8',
+ 'labels': [{'type': 'Reporter', 'value': 'spengler'},
+ {'type': 'ReporterEmail', 'value': 'test@test.com'},
+ {'type': 'Source', 'value': 'Slack'}]}]
+
+ data = await create_incidents(incidents, 'spengler', 'test@test.com', 'demisto_user')
+
+ incident_arg = demisto.createIncidents.call_args[0][0]
+ user_arg = demisto.createIncidents.call_args[1]['userID']
+
+ assert incident_arg == incidents_with_labels
+ assert user_arg == 'demisto_user'
+ assert data == 'nice'
+
+
+class TestGetWarRoomURL:
+
+ def test_get_war_room_url_with_xsiam_from_incident_war_room(self, mocker):
+ url = "https://example.com/WarRoom/INCIDENT-2930"
+ expected_war_room_url = "https://example.com/incidents/war_room?caseId=2930"
+ mocker.patch('MattermostV2.is_xsiam', return_value=True)
+ mocker.patch.dict(demisto.callingContext, {'context': {'Inv': {'id': 'INCIDENT-2930'}}})
+
+ assert get_war_room_url(url) == expected_war_room_url
+
+ def test_get_war_room_url_without_xsiam_from_incident_war_room(self, mocker):
+ url = "https://example.com/WarRoom/INCIDENT-2930"
+ mocker.patch('MattermostV2.is_xsiam', return_value=False)
+ expected_war_room_url = "https://example.com/WarRoom/INCIDENT-2930"
+ assert get_war_room_url(url) == expected_war_room_url
+
+ def test_get_war_room_url_with_xsiam_from_alert_war_room(self, mocker):
+ url = "https://example.com/WarRoom/ALERT-1234"
+ mocker.patch('MattermostV2.is_xsiam', return_value=True)
+ mocker.patch.dict(demisto.callingContext, {'context': {'Inv': {'id': '1234'}}})
+ expected_war_room_url = \
+ "https://example.com/incidents/alerts_and_insights?caseId=1234&action:openAlertDetails=1234-warRoom"
+ assert get_war_room_url(url) == expected_war_room_url
diff --git a/Packs/Mattermost/Integrations/MattermostV2/README.md b/Packs/Mattermost/Integrations/MattermostV2/README.md
new file mode 100644
index 000000000000..d63d2b0b3cbd
--- /dev/null
+++ b/Packs/Mattermost/Integrations/MattermostV2/README.md
@@ -0,0 +1,636 @@
+Mattermost is an open-source, self-hostable online chat service with file sharing, search, and integrations. It is designed as an internal chat for organizations and companies.
+
+Some changes have been made that might affect your existing content.
+If you are upgrading from a previous version of this integration, see [Breaking Changes](#breaking-changes-from-the-previous-version-of-this-integration---mattermost-v2).
+
+## Configure Mattermost v2 on Cortex XSOAR
+
+1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+2. Search for Mattermost v2.
+3. Click **Add instance** to create and configure a new integration instance.
+
+ | **Parameter** | **Description** | **Required** |
+ | --- | --- | --- |
+ | Server URL | | True |
+ | Bot Access Token | The Bot Access Token to use for connection. | True |
+ | Personal Access Token | The Personal Access Token to use for connection. | True |
+ | Team Name | | True |
+ | Default Notifications Channel | | False |
+ | Enable Incident Mirroring | | False |
+ | Allow external users to create incidents via DM. | | False |
+ | Long running instance. Required for investigation mirroring and direct messages. | | False |
+ | Trust any certificate (not secure) | | False |
+ | Use system proxy settings | | False |
+
+4. Click **Test** to validate the URLs, token, and connection.
+
+## Commands
+
+You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
+After you successfully execute a command, a DBot message appears in the War Room with the command details.
+
+### mattermost-get-team
+
+***
+Gets a team's details.
+
+#### Required Permissions
+
+Must be authenticated and have the view_team permission.
+
+#### Base Command
+
+`mattermost-get-team`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| team_name | The name of the team to retrieve. | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Mattermost.Team.id | String | The ID of the team. |
+| Mattermost.Team.create_at | Unknown | When was the team created. |
+| Mattermost.Team.update_at | Unknown | When was the team updated. |
+| Mattermost.Team.delete_at | Unknown | When was the team deleted. |
+| Mattermost.Team.display_name | String | The display name of the team. |
+| Mattermost.Team.name | String | The name of the team. |
+| Mattermost.Team.description | String | The description of the team. |
+| Mattermost.Team.email | String | The email of the team. |
+| Mattermost.Team.type | String | The type of the team. |
+| Mattermost.Team.company_name | String | The company name of the team. |
+| Mattermost.Team.allowed_domains | String | The allowed domains of the team. |
+| Mattermost.Team.invite_id | String | The allowed domains of the team. |
+| Mattermost.Team.allow_open_invite | Unknown | Does the team allow open invites. |
+| Mattermost.Team.scheme_id | String | The scheme ID of the team. |
+| Mattermost.Team.policy_id | String | The policy ID of the team. |
+
+#### Command example
+```!mattermost-get-team team_name=panw```
+#### Context Example
+```json
+{
+ "Mattermost": {
+ "Team": {
+ "allow_open_invite": false,
+ "allowed_domains": "",
+ "cloud_limits_archived": false,
+ "company_name": "",
+ "create_at": 1696486762638,
+ "delete_at": 0,
+ "description": "",
+ "display_name": "PANW",
+ "email": "rrapoport@paloaltonetworks.com",
+ "group_constrained": false,
+ "id": "6ie46zmi4fdqiqqe7p5gfki9hr",
+ "invite_id": "ocoh4fcrw7dzxgfu5bdtqpy7cr",
+ "name": "panw",
+ "policy_id": null,
+ "scheme_id": "",
+ "type": "O",
+ "update_at": 1696486762638
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Team details:
+>|allow_open_invite|allowed_domains|cloud_limits_archived|company_name|create_at|delete_at|description|display_name|email|group_constrained|id|invite_id|name|policy_id|scheme_id|type|update_at|
+>|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
+>| false | | false | | 1696486762638 | 0 | | PANW | email | false | id | id | panw | | | O | 1696486762638 |
+
+
+### mattermost-list-channels
+
+***
+Lists channels.
+
+#### Required Permissions
+
+manage_system
+
+#### Base Command
+
+`mattermost-list-channels`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| team | The name of the team to list channels from. Default is the team name from the integration configuration. | Optional |
+| include_private_channels | Whether to include private channels. Default is false. Possible values are: true, false. | Optional |
+| page | The page number to retrieve. Default value is 0. | Optional |
+| page_size | The size of the page to retrieve. Default value is 50. | Optional |
+| limit | How many results to retrieve. Will override the page and page_size arguments if given. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Mattermost.Channel.id | String | The ID of the channel. |
+| Mattermost.Channel.create_at | Unknown | When was the channel created. |
+| Mattermost.Channel.update_at | Unknown | When was the channel updated. |
+| Mattermost.Channel.delete_at | Unknown | When was the channel deleted. |
+| Mattermost.Channel.display_name | String | The display name of the channel. |
+| Mattermost.Channel.name | String | The name of the channel. |
+| Mattermost.Channel.description | String | The description of the channel. |
+| Mattermost.Channel.header | String | The header of the channel. |
+| Mattermost.Channel.purpose | String | The purpose of the channel. |
+| Mattermost.Channel.last_post_at | Unknown | When was the last post to the channel made. |
+| Mattermost.Channel.total_msg_count | Unknown | The total massage count of the channel. |
+| Mattermost.Channel.extra_update_at | Unknown | When was the channel updated. |
+| Mattermost.Channel.creator_id | String | The creator ID of the channel. |
+
+#### Command example
+```!mattermost-list-channels limit=2 include_private_channels=true```
+#### Context Example
+```json
+{
+ "Mattermost": {
+ "Channel": [
+ {
+ "create_at": 1697024204532,
+ "creator_id": "creator_id",
+ "delete_at": 0,
+ "display_name": "Name",
+ "extra_update_at": 0,
+ "group_constrained": null,
+ "header": "",
+ "id": "id",
+ "last_post_at": 1712503619042,
+ "last_root_post_at": 1712503619042,
+ "name": "name",
+ "policy_id": null,
+ "props": null,
+ "purpose": "",
+ "scheme_id": null,
+ "shared": null,
+ "team_id": "team_id",
+ "total_msg_count": 58,
+ "total_msg_count_root": 56,
+ "type": "O",
+ "update_at": 1697024204532
+ },
+ {
+ "create_at": 1696486762650,
+ "creator_id": "",
+ "delete_at": 0,
+ "display_name": "Off-Topic",
+ "extra_update_at": 0,
+ "group_constrained": null,
+ "header": "",
+ "id": "id",
+ "last_post_at": 1712501916866,
+ "last_root_post_at": 1712501916866,
+ "name": "off-topic",
+ "policy_id": null,
+ "props": null,
+ "purpose": "",
+ "scheme_id": null,
+ "shared": null,
+ "team_id": "team_id",
+ "total_msg_count": 4,
+ "total_msg_count_root": 4,
+ "type": "O",
+ "update_at": 1696486762650
+ }
+ ]
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Channels:
+>|name|display_name|type|id|
+>|---|---|---|---|
+>| name | Display_Name | O | id |
+>| off-topic | Off-Topic | O | id |
+
+
+### mattermost-create-channel
+
+***
+Creates a channel.
+
+#### Required Permissions
+
+If creating a public channel, create_public_channel permission is required. If creating a private channel, create_private_channel permission is required.
+
+#### Base Command
+
+`mattermost-create-channel`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| display_name | The display name of the channel to create. | Required |
+| name | The name of the channel to create. | Required |
+| type | The type of the channel to create. Possible values are: public, private. Default is public. | Optional |
+| purpose | The purpose of the channel to create. | Optional |
+| header | The header of the channel to create. | Optional |
+| team | The team name of the channel to create. Default is the team name from the integration configuration. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Mattermost.Channel.id | String | The ID of the channel. |
+| Mattermost.Channel.create_at | Unknown | When was the channel created. |
+| Mattermost.Channel.update_at | Unknown | When was the channel updated. |
+| Mattermost.Channel.delete_at | Unknown | When was the channel deleted. |
+| Mattermost.Channel.display_name | String | The display name of the channel. |
+| Mattermost.Channel.name | String | The name of the channel. |
+| Mattermost.Channel.description | String | The description of the channel. |
+| Mattermost.Channel.header | String | The header of the channel. |
+| Mattermost.Channel.purpose | String | The purpose of the channel. |
+| Mattermost.Channel.last_post_at | Unknown | When was the last post to the channel made. |
+| Mattermost.Channel.total_msg_count | Unknown | The total massage count of the channel. |
+| Mattermost.Channel.extra_update_at | Unknown | When was the channel updated. |
+| Mattermost.Channel.creator_id | String | The creator ID of the channel. |
+| Mattermost.Channel.scheme_id | String | The scheme ID of the channel. |
+
+#### Command example
+```!mattermost-create-channel display_name=channel_name name=channel_name type=Private```
+#### Context Example
+```json
+{
+ "Mattermost": {
+ "Channel": {
+ "create_at": 1712649608411,
+ "creator_id": "creator_id",
+ "delete_at": 0,
+ "display_name": "channel_name",
+ "extra_update_at": 0,
+ "group_constrained": null,
+ "header": "",
+ "id": "id",
+ "last_post_at": 1712649608426,
+ "last_root_post_at": 1712649608426,
+ "name": "channel_name",
+ "policy_id": null,
+ "props": null,
+ "purpose": "",
+ "scheme_id": null,
+ "shared": null,
+ "team_id": "team_id",
+ "total_msg_count": 0,
+ "total_msg_count_root": 0,
+ "type": "P",
+ "update_at": 1712649608411
+ }
+ }
+}
+```
+
+#### Human Readable Output
+
+>Channel channel_name was created successfully.
+
+### mattermost-add-channel-member
+
+***
+Adds a channel member.
+
+#### Required Permissions
+
+No permissions required.
+
+#### Base Command
+
+`mattermost-add-channel-member`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| team | The team name of the channel to add the user to. Default is the team name from the integration configuration. | Optional |
+| channel | The name of the channel to add the user to. | Required |
+| user_id | The ID of the user to add. Use the command 'mattermost-list-users' to fetch the user ID. | Required |
+
+#### Context Output
+
+There is no context output for this command.
+### mattermost-remove-channel-member
+
+***
+Removes a channel member.
+
+#### Required Permissions
+
+manage_public_channel_members permission if the channel is public. manage_private_channel_members permission if the channel is private.
+
+#### Base Command
+
+`mattermost-remove-channel-member`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| team | The team name of the channel to add the user to. Default is the team name from the integration configuration. | Optional |
+| channel | The channel name of the channel to remove the user from. | Required |
+| user_id | The ID of the user to remove. Use the command 'mattermost-list-users' to fetch the user ID. | Required |
+
+#### Context Output
+
+There is no context output for this command.
+### mattermost-list-users
+
+***
+Lists users.
+
+#### Required Permissions
+
+Requires an active session and (if specified) membership to the channel or team being selected from.
+
+#### Base Command
+
+`mattermost-list-users`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| team_name | The name of the team to filter users by. | Optional |
+| channel | The name of the channel to filters users by. If mentioned, a team name must be mentioned as well. | Optional |
+| page | The page number to retrieve. Should be provided with the page_size argument. Default value is 0. | Optional |
+| page_size | The size of the page to retrieve. Should be provided with the page argument. Default value is 50. | Optional |
+| limit | How many results to retrieve. If provided, overrides the page and page_size arguments. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Mattermost.User.id | String | The ID of the user. |
+| Mattermost.User.create_at | Unknown | When was the user created. |
+| Mattermost.User.update_at | Unknown | When was the user updated. |
+| Mattermost.User.delete_at | Unknown | When was the user deleted. |
+| Mattermost.User.username | String | The username of the user. |
+| Mattermost.User.auth_data | String | The authorization data of the user. |
+| Mattermost.User.auth_service | String | The authorization service of the user. |
+| Mattermost.User.email | String | The email of the user. |
+| Mattermost.User.nickname | String | The nickname of the user. |
+| Mattermost.User.first_name | Unknown | The first name of the user. |
+| Mattermost.User.last_name | Unknown | The last name of the user. |
+| Mattermost.User.position | Unknown | The position of the user. |
+| Mattermost.User.roles | String | The roles of the channel. |
+| Mattermost.User.locale | String | The locale of the channel. |
+| Mattermost.User.timezone | Unknown | The timezone of the user. |
+
+#### Command example
+```!mattermost-list-users limit=2 team_name=panw```
+#### Context Example
+```json
+{
+ "Mattermost": {
+ "User": [
+ {
+ "auth_data": "",
+ "auth_service": "",
+ "create_at": 1696486752272,
+ "delete_at": 0,
+ "disable_welcome_email": false,
+ "email": "email",
+ "first_name": "",
+ "id": "id",
+ "last_name": "",
+ "locale": "en",
+ "nickname": "",
+ "position": "",
+ "roles": "system_admin system_user",
+ "timezone": {
+ "automaticTimezone": "Asia/Jerusalem",
+ "manualTimezone": "",
+ "useAutomaticTimezone": "true"
+ },
+ "update_at": 1696486762658,
+ "username": "admin"
+ },
+ {
+ "auth_data": "",
+ "auth_service": "",
+ "create_at": 1696500307646,
+ "delete_at": 0,
+ "disable_welcome_email": false,
+ "email": "email",
+ "first_name": "",
+ "id": "id",
+ "last_name": "",
+ "locale": "en",
+ "nickname": "",
+ "position": "",
+ "roles": "system_user system_admin",
+ "timezone": {
+ "automaticTimezone": "Asia/Jerusalem",
+ "manualTimezone": "",
+ "useAutomaticTimezone": "true"
+ },
+ "update_at": 1697354262697,
+ "username": "username"
+ }
+ ]
+ }
+}
+```
+
+#### Human Readable Output
+
+>### Users:
+>|username|email|role|id|
+>|---|---|---|---|
+>| admin | admin@admin.com | | 8a6t7whumbdbxrawretujh6rre |
+>| dev | admin@ddev.com | | o9hpcwz73fdwxe9adue8jxo16o |
+
+
+### mattermost-send-file
+
+***
+Sends a file.
+
+#### Required Permissions
+
+Must have upload_file permission.
+
+#### Base Command
+
+`mattermost-send-file`
+
+#### Command example
+```!mattermost-send-file message=check entry_id=85@109 channel=test```
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| team_name | The team name of the channel to send the file to. Default is the team name from the integration configuration. | Optional |
+| channel | The channel name of the channel to send the file to. Cannot be combined with the to argument. | Optional |
+| message | The message to send to the channel along with the file. | Required |
+| entry_id | The entry ID of the file. | Required |
+| to | The username or email of the user to send the file to. | Optional |
+
+#### Context Output
+
+There is no context output for this command.
+
+#### Human Readable Output
+
+file test.txt was successfully sent to channel test
+
+### send-notification
+
+***
+Send a message using a chatbot app.
+
+#### Required Permissions
+
+Must have create_post permission for the channel the post is being created in.
+
+#### Base Command
+
+`send-notification`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| message | The message to send. | Required |
+| channel | The channel name to send the notification to. Default value is the channel configuration parameter. | Optional |
+| entry | An entry ID to send as a link. | Optional |
+| to | The username or email of the user to send the file to. | Optional |
+| ignoreAddURL | Adds the War Room link to the message. Possible values are: true, false. | Optional |
+| mattermost_ask | The message as a JSON for asking questions to the user. Default value is false. Possible values are: true, false. | Optional |
+
+#### Context Output
+
+There is no context output for this command.
+### mattermost-close-channel
+
+***
+Closes a channel.
+
+#### Required Permissions
+
+delete_public_channel permission if the channel is public. delete_private_channel permission if the channel is private, or has manage_system permission.
+
+#### Base Command
+
+`mattermost-close-channel`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| team_name | The team name of the channel to close. Default value is the team name from the integration configuration. | Optional |
+| channel | The channel name of the channel to close. If not provided, the mirrored investigation channel is archived (if the channel exists). | Optional |
+
+#### Context Output
+
+There is no context output for this command.
+### close-channel
+
+***
+Closes a mirrored MatterMost channel. If not provided, the mirrored investigation channel is archived (if the channel exists).
+
+#### Required Permissions
+
+delete_public_channel permission if the channel is public. delete_private_channel permission if the channel is private, or has manage_system permission.
+
+#### Base Command
+
+`close-channel`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| team_name | The team name of the channel to delete. Default value is the team name from the integration configuration. | Optional |
+| channel | The channel name of the channel to close. | Optional |
+
+#### Context Output
+
+There is no context output for this command.
+### mirror-investigation
+
+***
+Mirrors the investigation between Mattermost and the Cortex XSOAR War Room.
+
+#### Required Permissions
+
+No permissions required.
+
+#### Base Command
+
+`mirror-investigation`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| type | The mirroring type. Can be "all", which mirrors everything, "chat", which mirrors only chats (not commands), or "none", which stops all mirroring. Possible values are: all, chat, none. Default is all. | Optional |
+| autoclose | Whether the channel is auto-closed when an investigation is closed. Possible values are: true, false. Default is true. | Optional |
+| direction | The mirroring direction. Possible values are: Both, FromDemisto, ToDemisto. Default is Both. | Optional |
+| channel | The name of the channel. The default is "incident-<incidentID>". | Optional |
+| kickAdmin | Whether to remove the admin from the newly created channel. Default value is false. Possible values are: true, false. Default is false. | Optional |
+| mirrorTo | Mirrors the investigation to a group (private channel) or a public channel. Default is group. Possible values are: group, channel. Default is group. | Optional |
+
+#### Context Output
+
+There is no context output for this command.
+
+### close-channel
+
+***
+Closes a mirrored Mattermost channel. If not provided, the mirrored investigation channel is archived (if the channel exists).
+
+#### Base Command
+
+`close-channel`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| team_name | The team name of the channel to delete. Default value is the team name from the integration configuration. | Optional |
+| channel_name | The channel name of the channel to delete. | Optional |
+
+#### Context Output
+
+There is no context output for this command.
+### mattermost-mirror-investigation
+
+***
+Mirrors the investigation between Mattermost and the Cortex XSOAR War Room.
+
+#### Required Permissions
+
+No permissions channel.
+
+#### Base Command
+
+`mattermost-mirror-investigation`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| type | The mirroring type. Can be "all", which mirrors everything, "chat", which mirrors only chats (not commands), or "none", which stops all mirroring. Possible values are: all, chat, none. Default is all. | Optional |
+| autoclose | Whether the channel is auto-closed when an investigation is closed. Possible values are: true, false. Default is true. | Optional |
+| direction | The mirroring direction. Possible values are: Both, FromDemisto, ToDemisto. Default is Both. | Optional |
+| channel | The name of the channel. The default is "incident-<incidentID>". | Optional |
+| kickAdmin | Whether to remove the admin from the newly created channel. Default value is false. Possible values are: true, false. Default is false. | Optional |
+| mirrorTo | Mirrors the investigation to a group (private channel) or a public channel. Possible values are: group, channel. Default is group. | Optional |
+
+#### Context Output
+
+There is no context output for this command.
+
+## Breaking changes from the previous version of this integration - Mattermost v2
+
+A new required configuration parameters was added: *Bot Access Token*.
diff --git a/Packs/Mattermost/Integrations/MattermostV2/command_examples b/Packs/Mattermost/Integrations/MattermostV2/command_examples
new file mode 100644
index 000000000000..0041068ebdfd
--- /dev/null
+++ b/Packs/Mattermost/Integrations/MattermostV2/command_examples
@@ -0,0 +1,4 @@
+!mattermost-get-team team_name=panw
+!mattermost-list-users limit=2 team_name=panw
+!mattermost-create-channel display_name=channel_name name=channel_name type=Private
+!mattermost-list-channels limit=2 include_private_channels=true
diff --git a/Packs/Mattermost/Integrations/MattermostV2/test_data/create_channel_response.json b/Packs/Mattermost/Integrations/MattermostV2/test_data/create_channel_response.json
new file mode 100644
index 000000000000..dd0bc60383ce
--- /dev/null
+++ b/Packs/Mattermost/Integrations/MattermostV2/test_data/create_channel_response.json
@@ -0,0 +1,23 @@
+{
+ "id": "id",
+ "create_at": 1697024204532,
+ "update_at": 1697024204532,
+ "delete_at": 0,
+ "team_id": "team_id",
+ "type": "O",
+ "display_name": "display_name",
+ "name": "name",
+ "header": "",
+ "purpose": "",
+ "last_post_at": 0,
+ "total_msg_count": 0,
+ "extra_update_at": 0,
+ "creator_id": "creator_id",
+ "scheme_id": null,
+ "props": null,
+ "group_constrained": null,
+ "shared": null,
+ "total_msg_count_root": 0,
+ "policy_id": null,
+ "last_root_post_at": 0
+}
\ No newline at end of file
diff --git a/Packs/Mattermost/Integrations/MattermostV2/test_data/create_post_response.json b/Packs/Mattermost/Integrations/MattermostV2/test_data/create_post_response.json
new file mode 100644
index 000000000000..6e28dee66daf
--- /dev/null
+++ b/Packs/Mattermost/Integrations/MattermostV2/test_data/create_post_response.json
@@ -0,0 +1,21 @@
+{
+ "id": "id",
+ "create_at": 1712503619042,
+ "update_at": 1712503619042,
+ "edit_at": 0,
+ "delete_at": 0,
+ "is_pinned": false,
+ "user_id": "user_id",
+ "channel_id": "channel_id",
+ "root_id": "",
+ "original_id": "",
+ "message": "message",
+ "type": "",
+ "props": {},
+ "hashtags": "",
+ "pending_post_id": "",
+ "reply_count": 0,
+ "last_reply_at": 0,
+ "participants": null,
+ "metadata": {}
+}
\ No newline at end of file
diff --git a/Packs/Mattermost/Integrations/MattermostV2/test_data/get_bot_response.json b/Packs/Mattermost/Integrations/MattermostV2/test_data/get_bot_response.json
new file mode 100644
index 000000000000..b1b48c72de1a
--- /dev/null
+++ b/Packs/Mattermost/Integrations/MattermostV2/test_data/get_bot_response.json
@@ -0,0 +1,39 @@
+{
+ "id": "id",
+ "create_at": 1696501254734,
+ "update_at": 1696501465092,
+ "delete_at": 0,
+ "username": "testbot",
+ "auth_data": "",
+ "auth_service": "",
+ "email": "testbot@test",
+ "nickname": "",
+ "first_name": "testbot",
+ "last_name": "",
+ "position": "",
+ "roles": "system_user system_post_all",
+ "notify_props": {
+ "channel": "true",
+ "comments": "never",
+ "desktop": "mention",
+ "desktop_sound": "true",
+ "desktop_threads": "all",
+ "email": "true",
+ "email_threads": "all",
+ "first_name": "false",
+ "mention_keys": "",
+ "push": "mention",
+ "push_status": "away",
+ "push_threads": "all"
+ },
+ "last_password_update": 1696501254734,
+ "last_picture_update": -1696501254843,
+ "locale": "en",
+ "timezone": {
+ "automaticTimezone": "",
+ "manualTimezone": "",
+ "useAutomaticTimezone": "true"
+ },
+ "is_bot": true,
+ "disable_welcome_email": false
+}
\ No newline at end of file
diff --git a/Packs/Mattermost/Integrations/MattermostV2/test_data/get_team_response.json b/Packs/Mattermost/Integrations/MattermostV2/test_data/get_team_response.json
new file mode 100644
index 000000000000..d564c2f8c616
--- /dev/null
+++ b/Packs/Mattermost/Integrations/MattermostV2/test_data/get_team_response.json
@@ -0,0 +1,19 @@
+{
+ "id": "team_id",
+ "create_at": 1696486762638,
+ "update_at": 1696486762638,
+ "delete_at": 0,
+ "display_name": "display_name",
+ "name": "team_name",
+ "description": "mock team",
+ "email": "mock email",
+ "type": "O",
+ "company_name": "company_name",
+ "allowed_domains": "",
+ "invite_id": "invite_id",
+ "allow_open_invite": false,
+ "scheme_id": "scheme_id",
+ "group_constrained": false,
+ "policy_id": null,
+ "cloud_limits_archived": false
+}
\ No newline at end of file
diff --git a/Packs/Mattermost/Integrations/MattermostV2/test_data/list_channels_response.json b/Packs/Mattermost/Integrations/MattermostV2/test_data/list_channels_response.json
new file mode 100644
index 000000000000..48013977baac
--- /dev/null
+++ b/Packs/Mattermost/Integrations/MattermostV2/test_data/list_channels_response.json
@@ -0,0 +1,16 @@
+[ {
+ "id": "channel_id",
+ "create_at": 1519847358431,
+ "update_at": 1519847358431,
+ "delete_at": 0,
+ "team_id": "team_id",
+ "type": "O",
+ "display_name": "display_name",
+ "name": "name",
+ "header": "",
+ "purpose": "",
+ "last_post_at": 1519847395298,
+ "total_msg_count": 0,
+ "extra_update_at": 1519847395295,
+ "creator_id": "creator_id"
+}]
\ No newline at end of file
diff --git a/Packs/Mattermost/Integrations/MattermostV2/test_data/list_users_response.json b/Packs/Mattermost/Integrations/MattermostV2/test_data/list_users_response.json
new file mode 100644
index 000000000000..7e2bfa899206
--- /dev/null
+++ b/Packs/Mattermost/Integrations/MattermostV2/test_data/list_users_response.json
@@ -0,0 +1,22 @@
+[{
+ "id": "user_id",
+ "create_at": 1696486752272,
+ "update_at": 1696486762658,
+ "delete_at": 0,
+ "username": "username",
+ "auth_data": "",
+ "auth_service": "",
+ "email": "email",
+ "nickname": "",
+ "first_name": "first_name",
+ "last_name": "last_name",
+ "position": "position",
+ "roles": "system_admin system_user",
+ "locale": "en",
+ "timezone": {
+ "automaticTimezone": "Asia/Jerusalem",
+ "manualTimezone": "",
+ "useAutomaticTimezone": "true"
+ },
+ "disable_welcome_email": false
+}]
\ No newline at end of file
diff --git a/Packs/Mattermost/Integrations/MattermostV2/test_data/posted_data_bot.json b/Packs/Mattermost/Integrations/MattermostV2/test_data/posted_data_bot.json
new file mode 100644
index 000000000000..522d5bbd185b
--- /dev/null
+++ b/Packs/Mattermost/Integrations/MattermostV2/test_data/posted_data_bot.json
@@ -0,0 +1,21 @@
+{
+ "event": "posted",
+ "data": {
+ "channel_display_name": "channel_display_name",
+ "channel_name": "channel_name",
+ "channel_type": "O",
+ "post": "{\"id\":\"id\",\"create_at\":1712132106629,\"update_at\":1712132106629,\"edit_at\":0,\"delete_at\":0,\"is_pinned\":false,\"user_id\":\"user_id\",\"channel_id\":\"channel_id\",\"root_id\":\"\",\"original_id\":\"\",\"message\":\"admin: check\",\"type\":\"\",\"props\":{\"from_bot\":\"true\"},\"hashtags\":\"\",\"pending_post_id\":\"\",\"reply_count\":0,\"last_reply_at\":0,\"participants\":null,\"metadata\":{}}",
+ "sender_name": "MASKED_SECRET@testbot",
+ "set_online": true,
+ "team_id": "team_id"
+ },
+ "broadcast": {
+ "omit_users": null,
+ "user_id": "",
+ "channel_id": "channel_id",
+ "team_id": "",
+ "connection_id": "",
+ "omit_connection_id": ""
+ },
+ "seq": 15
+}
\ No newline at end of file
diff --git a/Packs/Mattermost/Integrations/MattermostV2/test_data/posted_data_user.json b/Packs/Mattermost/Integrations/MattermostV2/test_data/posted_data_user.json
new file mode 100644
index 000000000000..0913e22c3af8
--- /dev/null
+++ b/Packs/Mattermost/Integrations/MattermostV2/test_data/posted_data_user.json
@@ -0,0 +1,21 @@
+{
+ "event": "posted",
+ "data": {
+ "channel_display_name": "channel_display_name",
+ "channel_name": "channel_name",
+ "channel_type": "O",
+ "post": "{\"id\":\"id\",\"create_at\":1712131194180,\"update_at\":1712131194180,\"edit_at\":0,\"delete_at\":0,\"is_pinned\":false,\"user_id\":\"user_id_not_bot\",\"channel_id\":\"channel_id\",\"root_id\":\"\",\"original_id\":\"\",\"message\":\"message\",\"type\":\"\",\"props\":{\"disable_group_highlight\":true},\"hashtags\":\"\",\"pending_post_id\":\"pending_post_id\",\"reply_count\":0,\"last_reply_at\":0,\"participants\":null,\"metadata\":{}}",
+ "sender_name": "sender_name",
+ "set_online": true,
+ "team_id": "team_id"
+ },
+ "broadcast": {
+ "omit_users": null,
+ "user_id": "",
+ "channel_id": "channel_id",
+ "team_id": "",
+ "connection_id": "",
+ "omit_connection_id": ""
+ },
+ "seq": 8
+}
\ No newline at end of file
diff --git a/Packs/Mattermost/Integrations/MattermostV2/test_data/send_file_response.json b/Packs/Mattermost/Integrations/MattermostV2/test_data/send_file_response.json
new file mode 100644
index 000000000000..ace458f2b190
--- /dev/null
+++ b/Packs/Mattermost/Integrations/MattermostV2/test_data/send_file_response.json
@@ -0,0 +1,19 @@
+{
+ "file_infos": [
+ {
+ "id": "id",
+ "user_id": "user_id",
+ "channel_id": "",
+ "create_at": 1697024555904,
+ "update_at": 1697024555904,
+ "delete_at": 0,
+ "name": "name",
+ "extension": "",
+ "size": 2271,
+ "mime_type": "",
+ "remote_id": "",
+ "archived": false
+ }
+ ],
+ "client_ids": []
+}
\ No newline at end of file
diff --git a/Packs/Mattermost/ReleaseNotes/2_0_0.json b/Packs/Mattermost/ReleaseNotes/2_0_0.json
new file mode 100644
index 000000000000..9da00b9971e1
--- /dev/null
+++ b/Packs/Mattermost/ReleaseNotes/2_0_0.json
@@ -0,0 +1,4 @@
+{
+ "breakingChanges": true,
+ "breakingChangesNotes": "The script 'MattermostAskUser' now only supported by Mattermost V2, as Mattermost integration is deprecated."
+}
\ No newline at end of file
diff --git a/Packs/Mattermost/ReleaseNotes/2_0_0.md b/Packs/Mattermost/ReleaseNotes/2_0_0.md
new file mode 100644
index 000000000000..4164e272a7d3
--- /dev/null
+++ b/Packs/Mattermost/ReleaseNotes/2_0_0.md
@@ -0,0 +1,19 @@
+
+#### Integrations
+
+##### New: Mattermost v2
+
+- New: Mattermost is an open-source, self-hostable online chat service with file sharing, search, and integrations. It is designed as an internal chat for organizations and companies.
+
+#### Scripts
+
+##### MattermostAskUser
+
+- Updated the Docker image to: *demisto/python3:3.10.14.96411*.
+
+- The script is now only supported by Mattermost V2, as the Mattermost integration is deprecated.
+- Added new arguments:
+ - *reply*
+ - *lifetime*
+ - *defaultResponse*
+
diff --git a/Packs/Mattermost/Scripts/MattermostAskUser/MattermostAskUser.py b/Packs/Mattermost/Scripts/MattermostAskUser/MattermostAskUser.py
index 1c5c1965140a..4196e89ee53b 100644
--- a/Packs/Mattermost/Scripts/MattermostAskUser/MattermostAskUser.py
+++ b/Packs/Mattermost/Scripts/MattermostAskUser/MattermostAskUser.py
@@ -1,6 +1,8 @@
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
+DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ' # pragma: no cover
+
def main():
res = demisto.executeCommand('addEntitlement', {
@@ -22,18 +24,40 @@ def main():
option2 = 'no'
entitlementString = entitlement + '@' + demisto.investigation()['id']
- if demisto.get(demisto.args(), 'task'):
- entitlementString += '|' + demisto.get(demisto.args(), 'task')
- message = '%s - Please reply `%s %s` or `%s %s`' % (demisto.args()['message'],
- option1,
- entitlementString,
- option2,
- entitlementString)
- demisto.results(demisto.executeCommand('send-notification', {
- 'to': demisto.get(demisto.args(), 'user'),
+ args = demisto.args()
+ lifetime = args.get('lifetime', '1 day')
+ try:
+ parsed_date = arg_to_datetime('in ' + lifetime)
+ assert parsed_date is not None, f'Could not parse in {lifetime}'
+ expiry = datetime.strftime(parsed_date, DATE_FORMAT)
+ except Exception:
+ demisto.debug(f'Could not parse the argument "lifetime" , got {lifetime}. will use "in 1 day" instead')
+ parsed_date = arg_to_datetime('in 1 day')
+ assert parsed_date is not None
+ expiry = datetime.strftime(parsed_date,
+ DATE_FORMAT)
+ default_response = args.get('default_response')
+ reply = args.get('reply')
+
+ if task := demisto.get(args, 'task'):
+ entitlementString += '|' + task
+
+ message = f'**{args.get("message")}** - Please reply to this thread with `{option1}` or `{option2}`.'
+
+ message_dict = json.dumps({
'message': message,
+ 'entitlement': entitlementString,
+ 'reply': reply,
+ 'expiry': expiry,
+ 'default_response': default_response
+ })
+
+ return_results(demisto.executeCommand('send-notification', {
+ 'to': demisto.get(demisto.args(), 'user'),
+ 'message': message_dict,
'ignoreAddURL': 'true',
- 'using-brand': 'mattermost'
+ 'mattermost_ask': True,
+ 'using-brand': 'MattermostV2',
}))
diff --git a/Packs/Mattermost/Scripts/MattermostAskUser/MattermostAskUser.yml b/Packs/Mattermost/Scripts/MattermostAskUser/MattermostAskUser.yml
index df57e9575213..e5a3ba369f9e 100644
--- a/Packs/Mattermost/Scripts/MattermostAskUser/MattermostAskUser.yml
+++ b/Packs/Mattermost/Scripts/MattermostAskUser/MattermostAskUser.yml
@@ -32,11 +32,17 @@ args:
- "false"
description: Indicates whether to use one-time entitlement or a persistent one.
defaultValue: "false"
+- defaultValue: Thank you **{user}**. You have answered **{response}** .
+ description: The reply to send to the user. Use the templates {user} and {response} to incorporate these in the reply. (i.e., "Thank you **{user}**. You have answered **{response}**.").
+ name: reply
+- defaultValue: 1 day
+ description: Time until the question expires. For example - 1 day. When it expires, a default response defined under the 'default_response' argument is sent.
+ name: lifetime
+- description: Default response in case the question expires.
+ name: default_response
+ defaultValue: No response was received from the user.
scripttarget: 0
-dependson:
- must:
- - mattermost-send
fromversion: 5.0.0
-dockerimage: demisto/python3:3.10.13.83255
+dockerimage: demisto/python3:3.10.14.96411
tests:
-- No tests (auto formatted)
+- MattermostAskUser_testplaybook
diff --git a/Packs/Mattermost/Scripts/MattermostAskUser/MattermostAskUser_test.py b/Packs/Mattermost/Scripts/MattermostAskUser/MattermostAskUser_test.py
index 436d86b0592c..bc09840cac6c 100644
--- a/Packs/Mattermost/Scripts/MattermostAskUser/MattermostAskUser_test.py
+++ b/Packs/Mattermost/Scripts/MattermostAskUser/MattermostAskUser_test.py
@@ -36,11 +36,10 @@ def test_MattermostAskUser(mocker):
from MattermostAskUser import main
mocker.patch.object(demisto, 'args', return_value={'message': 'message', 'persistent': 'persistent',
'replyEntriesTag': 'replyEntriesTag',
- 'option1': {'no'}, 'option2': {'yes'},
- 'task': 'none', 'user': {'emai'}})
+ 'task': '1', 'user': {'email'}, 'lifetime': 'test'})
execute_command_add_entitlement_res = [{'Type': EntryType.NOTE, 'Contents': 'some-guid'}]
execute_command_send_notification_res = [{'Type': EntryType.NOTE, 'HumanReadable':
- 'Message sent to Slack successfully.'
+ 'Message sent to Mattermost successfully.'
' \nThread ID is: 1660645689.649679'}]
execute_mock = mocker.patch.object(demisto, 'executeCommand', side_effect=[execute_command_add_entitlement_res,
execute_command_send_notification_res])
diff --git a/Packs/Mattermost/Scripts/MattermostAskUser/README.md b/Packs/Mattermost/Scripts/MattermostAskUser/README.md
index 5e8c3ba409ea..367e42b8c72c 100644
--- a/Packs/Mattermost/Scripts/MattermostAskUser/README.md
+++ b/Packs/Mattermost/Scripts/MattermostAskUser/README.md
@@ -1,32 +1,40 @@
-Asks a user a question on `Mattermost` and expects a response. The response can also close a task, (this can be conditional) in a playbook.
+Ask a user a question on Mattermost and expect a response. The response can also close a task (might be conditional) in a playbook.
## Script Data
+
---
| **Name** | **Description** |
| --- | --- |
-| Script Type | python |
+| Script Type | python3 |
| Tags | mattermost |
-
+| Cortex XSOAR Version | 5.0.0 |
## Dependencies
+
---
This script uses the following commands and scripts.
+
* send-notification
## Inputs
+
---
| **Argument Name** | **Description** |
| --- | --- |
-| user | The Mattermost user to ask. Can be, "email" or "mattermost username". |
+| user | The Mattermost user to ask. Can be either an email or Mattermost username. |
| message | The message to ask the user. |
-| option1 | The first option for a user to reply. The default is "yes". |
-| option2 | The second option for the user reply. The default is "no". |
-| task | Whether the task should close with the reply. If "none" then no playbook tasks will be closed. |
-| replyEntriesTag | The tag to add on to the email reply entries. |
-| persistent | Whether to use a one-time entitlement or a persistent one. |
+| option1 | First option for a user reply. "yes" is the default. |
+| option2 | Second option for the user reply. "no" is the default. |
+| task | Which task should we close with the reply. If none, then no playbook tasks will be closed. |
+| replyEntriesTag | Tag to add on email reply entries. |
+| persistent | Indicates whether to use one-time entitlement or a persistent one. |
+| reply | The reply to send to the user. Use the templates \{user\} and \{response\} to incorporate these in the reply. \(i.e., "Thank you \*\*\{user\}\*\*. You have answered \*\*\{response\}\*\*."\). |
+| lifetime | Time until the question expires. For example - 1 day. When it expires, a default response is sent. Default value is 1 day. |
+| defaultResponse | Default response in case the question expires. |
## Outputs
+
---
There are no outputs for this script.
diff --git a/Packs/Mattermost/TestPlaybooks/MattermostAskUser_testplaybook.yml b/Packs/Mattermost/TestPlaybooks/MattermostAskUser_testplaybook.yml
new file mode 100644
index 000000000000..90c252562112
--- /dev/null
+++ b/Packs/Mattermost/TestPlaybooks/MattermostAskUser_testplaybook.yml
@@ -0,0 +1,85 @@
+id: MattermostAskUser_testplaybook
+version: -1
+name: MattermostAskUser_testplaybook
+starttaskid: "0"
+tasks:
+ "0":
+ id: "0"
+ taskid: 004b674b-0db6-442f-8789-916397c9f3b4
+ type: start
+ task:
+ id: 004b674b-0db6-442f-8789-916397c9f3b4
+ version: -1
+ name: ""
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "1"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 50
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "1":
+ id: "1"
+ taskid: 4b08bfc8-201b-4983-84c5-d6fb85c45ac1
+ type: regular
+ task:
+ id: 4b08bfc8-201b-4983-84c5-d6fb85c45ac1
+ version: -1
+ name: MattermostAskUser
+ description: Ask a user a question on Mattermost and expect a response. The response can also close a task (might be conditional) in a playbook.
+ scriptName: MattermostAskUser
+ type: regular
+ iscommand: false
+ brand: ""
+ scriptarguments:
+ message:
+ simple: test mattermostAskUser
+ user:
+ simple: demistodev
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 210
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+view: |-
+ {
+ "linkLabelsPosition": {},
+ "paper": {
+ "dimensions": {
+ "height": 255,
+ "width": 380,
+ "x": 450,
+ "y": 50
+ }
+ }
+ }
+inputs: []
+outputs: []
+fromversion: 6.10.0
+description: ''
diff --git a/Packs/Mattermost/TestPlaybooks/Mattermost_V2_-_testplaybook.yml b/Packs/Mattermost/TestPlaybooks/Mattermost_V2_-_testplaybook.yml
new file mode 100644
index 000000000000..d30d59e3625b
--- /dev/null
+++ b/Packs/Mattermost/TestPlaybooks/Mattermost_V2_-_testplaybook.yml
@@ -0,0 +1,477 @@
+id: MattermostV2 - testplaybook
+version: -1
+name: MattermostV2 - testplaybook
+starttaskid: "0"
+tasks:
+ "0":
+ id: "0"
+ taskid: b9c0e612-09b8-4e05-80e1-498c7f3cfd30
+ type: start
+ task:
+ id: b9c0e612-09b8-4e05-80e1-498c7f3cfd30
+ version: -1
+ name: ""
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "1"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": -90
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "1":
+ id: "1"
+ taskid: d468b666-e9fa-4214-8a61-26afa164437e
+ type: regular
+ task:
+ id: d468b666-e9fa-4214-8a61-26afa164437e
+ version: -1
+ name: mattermost-get-team
+ description: Gets a team details.
+ script: MattermostV2|||mattermost-get-team
+ type: regular
+ iscommand: true
+ brand: MattermostV2
+ nexttasks:
+ '#none#':
+ - "13"
+ scriptarguments:
+ team_name:
+ simple: PANW
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 60
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "2":
+ id: "2"
+ taskid: fdc038b7-61e6-4c28-8a40-3fda07fa21fe
+ type: regular
+ task:
+ id: fdc038b7-61e6-4c28-8a40-3fda07fa21fe
+ version: -1
+ name: mattermost-create-channel
+ description: Creates a channel.
+ script: MattermostV2|||mattermost-create-channel
+ type: regular
+ iscommand: true
+ brand: MattermostV2
+ nexttasks:
+ '#none#':
+ - "10"
+ scriptarguments:
+ display_name:
+ simple: channel_test_playbook_${RandomString}
+ name:
+ simple: channel_test_playbook_${RandomString}
+ team:
+ simple: ${Mattermost.Team.name}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 380
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "4":
+ id: "4"
+ taskid: 271aef1c-3a4e-481e-891f-1a8cb952352b
+ type: condition
+ task:
+ id: 271aef1c-3a4e-481e-891f-1a8cb952352b
+ version: -1
+ name: Check list-channels
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "5"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: Mattermost.Channel
+ iscontext: true
+ right:
+ value: {}
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 760
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "5":
+ id: "5"
+ taskid: 02af1a20-c1dc-44a5-84f4-e225f484a34e
+ type: regular
+ task:
+ id: 02af1a20-c1dc-44a5-84f4-e225f484a34e
+ version: -1
+ name: mattermost-list-users
+ description: Lists users.
+ script: MattermostV2|||mattermost-list-users
+ type: regular
+ iscommand: true
+ brand: MattermostV2
+ nexttasks:
+ '#none#':
+ - "7"
+ scriptarguments:
+ limit:
+ simple: "1"
+ team_name:
+ simple: ${Mattermost.Team.name}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 990
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "7":
+ id: "7"
+ taskid: 71ba0d16-6003-46b2-89c8-36ec0b812f25
+ type: regular
+ task:
+ id: 71ba0d16-6003-46b2-89c8-36ec0b812f25
+ version: -1
+ name: mattermost-add-channel-member
+ description: Adds a channel member.
+ script: MattermostV2|||mattermost-add-channel-member
+ type: regular
+ iscommand: true
+ brand: MattermostV2
+ nexttasks:
+ '#none#':
+ - "8"
+ scriptarguments:
+ channel:
+ simple: channel_test_playbook_${RandomString}
+ team:
+ simple: ${Mattermost.Team.name}
+ user_id:
+ simple: ${Mattermost.User.id}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 1180
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "8":
+ id: "8"
+ taskid: f8265688-c1fa-4e1b-8b1e-81046a010b15
+ type: regular
+ task:
+ id: f8265688-c1fa-4e1b-8b1e-81046a010b15
+ version: -1
+ name: mattermost-remove-channel-member
+ description: Removes a channel member.
+ script: MattermostV2|||mattermost-remove-channel-member
+ type: regular
+ iscommand: true
+ brand: MattermostV2
+ nexttasks:
+ '#none#':
+ - "11"
+ scriptarguments:
+ channel:
+ simple: channel_test_playbook_${RandomString}
+ team:
+ simple: ${Mattermost.Team.name}
+ user_id:
+ simple: ${Mattermost.User.id}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 1370
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "9":
+ id: "9"
+ taskid: c6d57e48-cb3d-4c7d-8c40-c60d6bdf9b58
+ type: regular
+ task:
+ id: c6d57e48-cb3d-4c7d-8c40-c60d6bdf9b58
+ version: -1
+ name: mattermost-close-channel
+ description: Deletes a channel.
+ script: MattermostV2|||mattermost-close-channel
+ type: regular
+ iscommand: true
+ brand: MattermostV2
+ scriptarguments:
+ channel:
+ simple: ${Mattermost.Channel.name}
+ channel_name:
+ simple: channel_test_playbook_${RandomString}
+ team_name:
+ simple: ${Mattermost.Team.name}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 1930
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "10":
+ id: "10"
+ taskid: 61463f88-bea8-40f9-830e-1970e186433c
+ type: regular
+ task:
+ id: 61463f88-bea8-40f9-830e-1970e186433c
+ version: -1
+ name: mattermost-list-channels
+ description: Lists channels.
+ script: MattermostV2|||mattermost-list-channels
+ type: regular
+ iscommand: true
+ brand: MattermostV2
+ nexttasks:
+ '#none#':
+ - "4"
+ scriptarguments:
+ limit:
+ simple: "2"
+ team:
+ simple: ${Mattermost.Team.name}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 560
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "11":
+ id: "11"
+ taskid: ea7afcbe-22f3-4f68-87bb-7e5ae53a120d
+ type: regular
+ task:
+ id: ea7afcbe-22f3-4f68-87bb-7e5ae53a120d
+ version: -1
+ name: FileCreateAndUploadV2
+ description: |
+ Creates a file (using the given data input or entry ID) and uploads it to the current investigation War Room.
+ scriptName: FileCreateAndUploadV2
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "12"
+ scriptarguments:
+ data:
+ simple: "123"
+ filename:
+ simple: MattermostV2test
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 1570
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "12":
+ id: "12"
+ taskid: c73cc8f1-7dcf-4ec3-8545-f7d442ce47eb
+ type: regular
+ task:
+ id: c73cc8f1-7dcf-4ec3-8545-f7d442ce47eb
+ version: -1
+ name: mattermost-send-file
+ description: Deletes a channel.
+ script: MattermostV2|||mattermost-send-file
+ type: regular
+ iscommand: true
+ brand: MattermostV2
+ nexttasks:
+ '#none#':
+ - "9"
+ scriptarguments:
+ channel:
+ simple: ${Mattermost.Channel.name}
+ channel_name:
+ simple: ${Mattermost.Channel.name}
+ entry_id:
+ simple: ${File.EntryID}
+ message:
+ simple: test message with file
+ team_name:
+ simple: ${Mattermost.Team.name}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 1750
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "13":
+ id: "13"
+ taskid: 6c361b08-ebc7-4ce9-8313-deaf4c862a74
+ type: regular
+ task:
+ id: 6c361b08-ebc7-4ce9-8313-deaf4c862a74
+ version: -1
+ name: GenerateRandomString
+ description: Generates random string
+ scriptName: GenerateRandomString
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "2"
+ scriptarguments:
+ Digits:
+ simple: "True"
+ Length:
+ simple: "4"
+ Lowercase:
+ simple: "True"
+ Punctuation:
+ simple: "False"
+ Uppercase:
+ simple: "False"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 450,
+ "y": 220
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+view: |-
+ {
+ "linkLabelsPosition": {},
+ "paper": {
+ "dimensions": {
+ "height": 2115,
+ "width": 380,
+ "x": 450,
+ "y": -90
+ }
+ }
+ }
+inputs: []
+outputs: []
+fromversion: 6.10.0
+description: ''
diff --git a/Packs/Mattermost/pack_metadata.json b/Packs/Mattermost/pack_metadata.json
index 6f2e256e7ca5..fc610bccdae6 100644
--- a/Packs/Mattermost/pack_metadata.json
+++ b/Packs/Mattermost/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Mattermost",
"description": "Send messages and notifications to your Mattermost Team.",
"support": "xsoar",
- "currentVersion": "1.0.7",
+ "currentVersion": "2.0.0",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -14,7 +14,6 @@
"useCases": [],
"keywords": [],
"marketplaces": [
- "xsoar",
- "marketplacev2"
+ "xsoar_on_prem"
]
}
\ No newline at end of file
diff --git a/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender.py b/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender.py
index b52f24a7b7a7..9178ff29d676 100644
--- a/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender.py
+++ b/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender.py
@@ -52,7 +52,7 @@ def __init__(self, app_id: str, verify: bool, proxy: bool, base_url: str = BASE_
certificate_thumbprint=certificate_thumbprint,
private_key=private_key,
managed_identities_client_id=managed_identities_client_id,
- managed_identities_resource_uri=Resources.security_center,
+ managed_identities_resource_uri=Resources.security,
command_prefix="microsoft-365-defender",
)
self.ms_client = MicrosoftClient(**client_args) # type: ignore
diff --git a/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender.yml b/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender.yml
index f875021a0c91..dac1b536347e 100644
--- a/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender.yml
+++ b/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender.yml
@@ -382,7 +382,7 @@ script:
type: string
- contextPath: Microsoft365Defender.Incident.alerts
description: List of alerts relevant for the incidents.
- dockerimage: demisto/crypto:1.0.0.87358
+ dockerimage: demisto/crypto:1.0.0.96042
isfetch: true
script: ''
subtype: python3
diff --git a/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_dark.svg b/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_dark.svg
index a77ab2f7a65c..3e29e1cfa5ce 100644
--- a/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_dark.svg
+++ b/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_dark.svg
@@ -1,18 +1,22 @@
diff --git a/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_description.md b/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_description.md
index 44639e1732b7..cb3765cf4dee 100644
--- a/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_description.md
+++ b/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_description.md
@@ -1,5 +1,5 @@
-## Methods to Authenticate Microsoft 365 Defender
-You can use the following methods to authenticate Microsoft 365 Defender.
+## Methods to Authenticate Microsoft Defender XDR
+You can use the following methods to authenticate Microsoft Defender XDR.
- Device Code Flow
- Client Credentials Flow
- Azure Managed Identities
@@ -8,9 +8,9 @@ You can use the following methods to authenticate Microsoft 365 Defender.
___
Use the [device code flow](https://xsoar.pan.dev/docs/reference/articles/microsoft-integrations---authentication#device-code-flow)
-to link Microsoft 365 Defender with Cortex XSOAR.
+to link Microsoft Defender XDR with Cortex XSOAR.
-To connect to the Microsoft 365 Defender:
+To connect to the Microsoft Defender XDR:
1. Fill in the required parameters.
2. Run the ***!microsoft-365-defender-auth-start*** command.
3. Follow the instructions that appear.
diff --git a/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_image.png b/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_image.png
index 51221d4e64ed..71b10075dc2a 100644
Binary files a/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_image.png and b/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_image.png differ
diff --git a/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_light.svg b/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_light.svg
index 913acc6d5b92..04fcc2c0aa79 100644
--- a/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_light.svg
+++ b/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_light.svg
@@ -1,18 +1,22 @@
diff --git a/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_test.py b/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_test.py
index e91789396a2a..f66ac3083f2f 100644
--- a/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_test.py
+++ b/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/Microsoft365Defender_test.py
@@ -183,5 +183,5 @@ def test_test_module_command_with_managed_identities(mocker, requests_mock, clie
assert 'ok' in Microsoft365Defender.return_results.call_args[0][0]
qs = get_mock.last_request.qs
- assert qs['resource'] == [Resources.security_center]
+ assert qs['resource'] == [Resources.security]
assert client_id and qs['client_id'] == [client_id] or 'client_id' not in qs
diff --git a/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/README.md b/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/README.md
index bc638eedb97c..88adb6777995 100644
--- a/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/README.md
+++ b/Packs/Microsoft365Defender/Integrations/Microsoft365Defender/README.md
@@ -1,12 +1,12 @@
-Microsoft 365 Defender is a unified pre- and post-breach enterprise defense suite that natively coordinates detection,
+Microsoft Defender XDR is a unified pre- and post-breach enterprise defense suite that natively coordinates detection,
prevention, investigation, and response across endpoints, identities, email, and applications to provide integrated
protection against sophisticated attacks.
## Authentication Using the Device Code Flow
Use the [device code flow](https://xsoar.pan.dev/docs/reference/articles/microsoft-integrations---authentication#device-code-flow)
-to link Microsoft 365 Defender with Cortex XSOAR.
+to link Microsoft Defender XDR with Cortex XSOAR.
-To connect to the Microsoft 365 Defender:
+To connect to the Microsoft Defender XDR:
1. Fill in the required parameters.
2. Run the ***!microsoft-365-defender-auth-start*** command.
3. Follow the instructions that appear.
@@ -45,10 +45,10 @@ Follow these steps for a self-deployed configuration:
* AdvancedHunting.Read.All - Application
* Incident.ReadWrite.All - Application
-## Configure Microsoft 365 Defender on Cortex XSOAR
+## Configure Microsoft Defender XDR on Cortex XSOAR
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
-2. Search for Microsoft 365 Defender.
+2. Search for Microsoft Defender XDR.
3. Click **Add instance** to create and configure a new integration instance.
| **Parameter** | **Description** | **Required** |
@@ -166,7 +166,7 @@ There is no context output for this command.
### microsoft-365-defender-auth-test
***
-Tests the connectivity to the Microsoft 365 Defender.
+Tests the connectivity to the Microsoft Defender XDR.
#### Base Command
@@ -320,7 +320,7 @@ Update the incident with the given ID.
### microsoft-365-defender-advanced-hunting
***
-Advanced hunting is a threat-hunting tool that uses specially constructed queries to examine the past 30 days of event data in Microsoft 365 Defender.
+Advanced hunting is a threat-hunting tool that uses specially constructed queries to examine the past 30 days of event data in Microsoft Defender XDR.
Details on how to write queries you can find [here](https://docs.microsoft.com/en-us/microsoft-365/security/defender/advanced-hunting-query-language?view=o365-worldwide).
#### Base Command
diff --git a/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinks/O365DefenderSafeLinks.yml b/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinks/O365DefenderSafeLinks.yml
index d6aec1ab30e1..609c8bba6e31 100644
--- a/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinks/O365DefenderSafeLinks.yml
+++ b/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinks/O365DefenderSafeLinks.yml
@@ -265,13 +265,13 @@ script:
description: "The date and time the Safe Links policy was modified. Time format: YYYY-MM-DDThh:mm:ss+00:00."
type: Date
- contextPath: O365Defender.SafeLinks.Policy.WhenChangedUTC
- description: "The date and time (in UTC) the Safe Links policy was modified. Time format: YYYY-MM-DDTHH:MM:SSZ"
+ description: "The date and time (in UTC) the Safe Links policy was modified. Time format: YYYY-MM-DDTHH:MM:SSZ."
type: Date
- contextPath: O365Defender.SafeLinks.Policy.WhenCreated
description: "The date and time the Safe Links policy was created. Time format: YYYY-MM-DDThh:mm:ss+00:00."
type: Date
- contextPath: O365Defender.SafeLinks.Policy.WhenCreatedUTC
- description: "The date and time (in UTC) the Safe Links policy was created. Time format: YYYY-MM-DDTHH:MM:SSZ"
+ description: "The date and time (in UTC) the Safe Links policy was created. Time format: YYYY-MM-DDTHH:MM:SSZ."
type: Date
description: Create a new Safe Links policy.
- name: o365-defender-safelinks-policy-update
@@ -819,10 +819,10 @@ script:
description: ""
type: Unknown
- contextPath: O365Defender.SafeLinks.DetailedReport.Data.Flags
- description: "0: Allowed 1: Blocked 2: ClickedEvenBlocked 3: ClickedDuringScan"
+ description: "0: Allowed 1: Blocked 2: ClickedEvenBlocked 3: ClickedDuringScan."
type: Number
- contextPath: O365Defender.SafeLinks.DetailedReport.ReportId
- description: "The report id, unique for every run"
+ description: "The report id, unique for every run."
type: Number
- name: o365-defender-safelinks-aggregate-report-get
description: general information about Safe Links results for the last 90 days. Yesterday is the most recent date that you can specify.
@@ -883,7 +883,7 @@ script:
description: Number of recipients of the link.
type: Number
- contextPath: O365Defender.SafeLinks.AggregateReport.ReportId
- description: "The report id, unique for every run"
+ description: "The report id, unique for every run."
type: Number
- name: o365-defender-safelinks-atp-policy-get
description: Get APT policy. In order to manage BlockURLs, use the o365-defender-safelinks-global-url-blocklist command.
@@ -925,7 +925,7 @@ script:
- "true"
- "false"
- name: enable_atp_spo_teams_odb
- description: Enable or disable O365 Defender for SharePoint, OneDrive, and Microsoft Teams
+ description: Enable or disable O365 Defender for SharePoint, OneDrive, and Microsoft Teams.
auto: PREDEFINED
predefined:
- "true"
@@ -939,7 +939,7 @@ script:
runonce: false
script: "-"
type: powershell
- dockerimage: demisto/pwsh-exchangev3:1.0.0.80547
+ dockerimage: demisto/pwsh-exchangev3:1.0.0.88371
fromversion: 6.0.0
tests:
- No Test
diff --git a/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinks/O365DefenderSafeLinks_description.md b/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinks/O365DefenderSafeLinks_description.md
index 97d1606590ab..de924ff002dd 100644
--- a/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinks/O365DefenderSafeLinks_description.md
+++ b/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinks/O365DefenderSafeLinks_description.md
@@ -12,8 +12,8 @@ To use this integration, you need to add a new Azure App Registration in the Azu
Office 365 Exchange Online -> Exchange.ManageAsApp - Application
* To create, modify, and delete Safe Links policies, you need to be a member of the `Organization Management` or `Security Administrator` role groups.
-* To manage permissions in the Microsoft 365 Defender portal, go to `Permissions & roles` or https://security.microsoft.com/securitypermissions. You need to be a global administrator or a member of the Organization Management role group in the Microsoft 365 Defender portal. Specifically, the Role Management role allows users to view, create, and modify role groups in the Microsoft 365 Defender portal, and by default, that role is assigned only to the Organization Management role group.
-* See [Permissions in the Microsoft 365 Defender portal](https://docs.microsoft.com/en-us/microsoft-365/security/office-365-security/permissions-microsoft-365-security-center?view=o365-worldwide) for more information.
+* To manage permissions in the Microsoft Defender XDR portal, go to `Permissions & roles` or https://security.microsoft.com/securitypermissions. You need to be a global administrator or a member of the Organization Management role group in the Microsoft Defender XDR portal. Specifically, the Role Management role allows users to view, create, and modify role groups in the Microsoft Defender XDR portal, and by default, that role is assigned only to the Organization Management role group.
+* See [Permissions in the Microsoft Defender XDR portal](https://docs.microsoft.com/en-us/microsoft-365/security/office-365-security/permissions-microsoft-365-security-center?view=o365-worldwide) for more information.
-------
##### Note
diff --git a/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinks/README.md b/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinks/README.md
index ac052b762106..c1b6a183e2a3 100644
--- a/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinks/README.md
+++ b/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinks/README.md
@@ -8,7 +8,7 @@ ___
* In the Azure Application, give the following application permission:
* Office 365 Exchange Online -> Exchange.ManageAsApp - Application
* To create, modify, and delete Safe Links policies, or use any of the report commands (detailed or aggregate report), you need to be a member of the `Organization Management` or `Security Administrator` role groups.
-* To manage permissions in the Microsoft 365 Defender portal, go to `Permissions & roles` or https://security.microsoft.com/securitypermissions. You need to be a global administrator or a member of the Organization Management role group in the Microsoft 365 Defender portal. Specifically, the Role Management role allows users to view, create, and modify role groups in the Microsoft 365 Defender portal, and by default, that role is assigned only to the Organization Management role group. See [Permissions in the Microsoft 365 Defender portal](https://docs.microsoft.com/en-us/microsoft-365/security/office-365-security/permissions-microsoft-365-security-center?view=o365-worldwide)
+* To manage permissions in the Microsoft Defender XDR portal, go to `Permissions & roles` or https://security.microsoft.com/securitypermissions. You need to be a global administrator or a member of the Organization Management role group in the Microsoft Defender XDR portal. Specifically, the Role Management role allows users to view, create, and modify role groups in the Microsoft Defender XDR portal, and by default, that role is assigned only to the Organization Management role group. See [Permissions in the Microsoft Defender XDR portal](https://docs.microsoft.com/en-us/microsoft-365/security/office-365-security/permissions-microsoft-365-security-center?view=o365-worldwide)
## Configure O365 Defender SafeLinks on Cortex XSOAR
@@ -37,7 +37,7 @@ The basic elements of a Safe Links policy are:
**The safe links policy**: Turn on Safe Links protection, turn on real-time URL scanning, specify whether to wait for real-time scanning to complete before delivering the message, turn on scanning for internal messages, specify whether to track user clicks on URLs, and specify whether to allow users to click trough to the original URL.
**The safe links rule**: Specifies the priority and recipient filters (who the policy applies to).
-The difference between these two elements isn't obvious when you manage Safe Links policies in the Microsoft 365 Defender portal:
+The difference between these two elements isn't obvious when you manage Safe Links policies in the Microsoft Defender XDR portal:
When you create a Safe Links policy, you're actually creating a safe links rule and the associated safe links policy at the same time using the same name for both.
When you modify a Safe Links policy, settings related to the name, priority, enabled or disabled, and recipient filters modify the safe links rule. All other settings modify the associated safe links policy.
diff --git a/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinksSingleUser/O365DefenderSafeLinksSingleUser.yml b/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinksSingleUser/O365DefenderSafeLinksSingleUser.yml
index ca89917c26fb..c28222e5a2ce 100644
--- a/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinksSingleUser/O365DefenderSafeLinksSingleUser.yml
+++ b/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinksSingleUser/O365DefenderSafeLinksSingleUser.yml
@@ -24,8 +24,8 @@ configuration:
section: Connect
advanced: true
required: false
-description: Enables URL scanning, rewriting inbound email messages in the mail flow, time-of-click URL verification, and links in email messages and other locations.
-display: O365 Defender SafeLinks - Single User
+description: Deprecated. Use O365 Defender SafeLinks instead. Enables URL scanning, rewriting inbound email messages in the mail flow, time-of-click URL verification, and links in email messages and other locations.
+display: O365 Defender SafeLinks - Single User (Deprecated)
name: O365 Defender SafeLinks - Single User
script:
commands:
@@ -960,6 +960,7 @@ script:
script: '-'
type: powershell
dockerimage: demisto/powershell-ubuntu:7.4.1.86201
+deprecated: true
fromversion: 6.0.0
tests:
- No Test
diff --git a/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinksSingleUser/O365DefenderSafeLinksSingleUser_description.md b/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinksSingleUser/O365DefenderSafeLinksSingleUser_description.md
index 358bdeb8a3a7..5131dae598c1 100644
--- a/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinksSingleUser/O365DefenderSafeLinksSingleUser_description.md
+++ b/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinksSingleUser/O365DefenderSafeLinksSingleUser_description.md
@@ -11,5 +11,5 @@
#### Required Permissions
* To create, modify, and delete Safe Links policies, you need to be a member of the **Organization Management** or **Security Administrator** role groups.
-* To manage permissions in the Microsoft 365 Defender portal, go to **Permissions & roles** or https://security.microsoft.com/securitypermissions. You need to be a global administrator or a member of the Organization Management role group in the Microsoft 365 Defender portal. Specifically, the Role Management role allows users to view, create, and modify role groups in the Microsoft 365 Defender portal. By default, that role is assigned only to the Organization Management role group.
-See [Permissions in the Microsoft 365 Defender portal](https://docs.microsoft.com/en-us/microsoft-365/security/office-365-security/permissions-microsoft-365-security-center?view=o365-worldwide) for more information.
+* To manage permissions in the Microsoft Defender XDR portal, go to **Permissions & roles** or https://security.microsoft.com/securitypermissions. You need to be a global administrator or a member of the Organization Management role group in the Microsoft Defender XDR portal. Specifically, the Role Management role allows users to view, create, and modify role groups in the Microsoft Defender XDR portal. By default, that role is assigned only to the Organization Management role group.
+See [Permissions in the Microsoft Defender XDR portal](https://docs.microsoft.com/en-us/microsoft-365/security/office-365-security/permissions-microsoft-365-security-center?view=o365-worldwide) for more information.
diff --git a/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinksSingleUser/README.md b/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinksSingleUser/README.md
index a9646d88c002..5e9c89b53632 100644
--- a/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinksSingleUser/README.md
+++ b/Packs/Microsoft365Defender/Integrations/O365DefenderSafeLinksSingleUser/README.md
@@ -6,7 +6,7 @@ This integration was integrated and tested with Exchange Online PowerShell V1 mo
### Required Permissions
___
* To create, modify, and delete Safe Links policies, or use any of the report commands (detailed or aggregate report), you need to be a member of the `Organization Management` or `Security Administrator` role groups.
-* To manage permissions in the Microsoft 365 Defender portal, go to `Permissions & roles` or https://security.microsoft.com/securitypermissions. You need to be a global administrator or a member of the Organization Management role group in the Microsoft 365 Defender portal. Specifically, the Role Management role allows users to view, create, and modify role groups in the Microsoft 365 Defender portal, and by default, that role is assigned only to the Organization Management role group. See [Permissions in the Microsoft 365 Defender portal](https://docs.microsoft.com/en-us/microsoft-365/security/office-365-security/permissions-microsoft-365-security-center?view=o365-worldwide)
+* To manage permissions in the Microsoft Defender XDR portal, go to `Permissions & roles` or https://security.microsoft.com/securitypermissions. You need to be a global administrator or a member of the Organization Management role group in the Microsoft Defender XDR portal. Specifically, the Role Management role allows users to view, create, and modify role groups in the Microsoft Defender XDR portal, and by default, that role is assigned only to the Organization Management role group. See [Permissions in the Microsoft Defender XDR portal](https://docs.microsoft.com/en-us/microsoft-365/security/office-365-security/permissions-microsoft-365-security-center?view=o365-worldwide)
## Configure O365 Defender SafeLinks - Single User on Cortex XSOAR
@@ -30,7 +30,7 @@ The basic elements of a Safe Links policy are:
**The safe links policy**: Turn on Safe Links protection, turn on real-time URL scanning, specify whether to wait for real-time scanning to complete before delivering the message, turn on scanning for internal messages, specify whether to track user clicks on URLs, and specify whether to allow users to click trough to the original URL.
**The safe links rule**: Specifies the priority and recipient filters (who the policy applies to).
-The difference between these two elements isn't obvious when you manage Safe Links policies in the Microsoft 365 Defender portal:
+The difference between these two elements isn't obvious when you manage Safe Links policies in the Microsoft Defender XDR portal:
When you create a Safe Links policy, you're actually creating a safe links rule and the associated safe links policy at the same time using the same name for both.
When you modify a Safe Links policy, settings related to the name, priority, enabled or disabled, and recipient filters modify the safe links rule. All other settings modify the associated safe links policy.
diff --git a/Packs/Microsoft365Defender/Playbooks/playbook-Microsoft_365_Defender_-_Emails_Indicators_Hunt_README.md b/Packs/Microsoft365Defender/Playbooks/playbook-Microsoft_365_Defender_-_Emails_Indicators_Hunt_README.md
index a7297a9c2b92..9e456e0858ea 100644
--- a/Packs/Microsoft365Defender/Playbooks/playbook-Microsoft_365_Defender_-_Emails_Indicators_Hunt_README.md
+++ b/Packs/Microsoft365Defender/Playbooks/playbook-Microsoft_365_Defender_-_Emails_Indicators_Hunt_README.md
@@ -64,4 +64,4 @@ This playbook does not use any sub-playbooks.
## Playbook Image
---
-![Microsoft 365 Defender - Emails Indicators Hunt](../doc_files/Microsoft_365_Defender_-_Emails_Indicators_Hunt.png)
\ No newline at end of file
+![Microsoft Defender XDR - Emails Indicators Hunt](../doc_files/Microsoft_365_Defender_-_Emails_Indicators_Hunt.png)
\ No newline at end of file
diff --git a/Packs/Microsoft365Defender/Playbooks/playbook-Microsoft_365_Defender_-_Get_Email_URL_Clicks_README.md b/Packs/Microsoft365Defender/Playbooks/playbook-Microsoft_365_Defender_-_Get_Email_URL_Clicks_README.md
index ea33442e27ef..bd1432a5b3be 100644
--- a/Packs/Microsoft365Defender/Playbooks/playbook-Microsoft_365_Defender_-_Get_Email_URL_Clicks_README.md
+++ b/Packs/Microsoft365Defender/Playbooks/playbook-Microsoft_365_Defender_-_Get_Email_URL_Clicks_README.md
@@ -1,4 +1,4 @@
-This playbook retrieves email data based on the `URLDomain` and `MessageID` inputs. It uses the Microsoft 365 Defender's Advanced Hunting to search only for URL click events based on the playbook inputs and enriches it with the full email data.
+This playbook retrieves email data based on the `URLDomain` and `MessageID` inputs. It uses the Microsoft Defender XDR's Advanced Hunting to search only for URL click events based on the playbook inputs and enriches it with the full email data.
**URLDomain** - If the “URLDomain” value is found as a substring of the URL(s) in the body of the email, the email is retrieved.
@@ -76,4 +76,4 @@ This playbook does not use any sub-playbooks.
## Playbook Image
---
-![Microsoft 365 Defender - Get Email URL Clicks](../doc_files/Microsoft_365_Defender_-_Get_Email_URL_Clicks.png)
\ No newline at end of file
+![Microsoft Defender XDR - Get Email URL Clicks](../doc_files/Microsoft_365_Defender_-_Get_Email_URL_Clicks.png)
\ No newline at end of file
diff --git a/Packs/Microsoft365Defender/Playbooks/playbook-Microsoft_365_Defender_-_Threat_Hunting_Generic_README.md b/Packs/Microsoft365Defender/Playbooks/playbook-Microsoft_365_Defender_-_Threat_Hunting_Generic_README.md
index 3e214f28912c..64c5e8f56023 100644
--- a/Packs/Microsoft365Defender/Playbooks/playbook-Microsoft_365_Defender_-_Threat_Hunting_Generic_README.md
+++ b/Packs/Microsoft365Defender/Playbooks/playbook-Microsoft_365_Defender_-_Threat_Hunting_Generic_README.md
@@ -83,4 +83,4 @@ This playbook does not use any commands.
## Playbook Image
---
-![Microsoft 365 Defender - Threat Hunting Generic](../doc_files/Microsoft_365_Defender_-_Threat_Hunting_Generic.png)
+![Microsoft Defender XDR - Threat Hunting Generic](../doc_files/Microsoft_365_Defender_-_Threat_Hunting_Generic.png)
diff --git a/Packs/Microsoft365Defender/README.md b/Packs/Microsoft365Defender/README.md
index 822d255787b2..7b9b75636d61 100644
--- a/Packs/Microsoft365Defender/README.md
+++ b/Packs/Microsoft365Defender/README.md
@@ -1,4 +1,4 @@
-With the Microsoft 365 Defender content pack, you can determine how a threat entered your environment and what part of your organization is affected.
+With the Microsoft Defender XDR content pack, you can determine how a threat entered your environment and what part of your organization is affected.
## What does this pack do?
diff --git a/Packs/Microsoft365Defender/ReleaseNotes/4_5_24.md b/Packs/Microsoft365Defender/ReleaseNotes/4_5_24.md
new file mode 100644
index 000000000000..22a2c7df6938
--- /dev/null
+++ b/Packs/Microsoft365Defender/ReleaseNotes/4_5_24.md
@@ -0,0 +1,10 @@
+
+#### Integrations
+
+##### Microsoft 365 Defender
+
+- Updated the Docker image to: *demisto/crypto:1.0.0.96042*.
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Fixed an issue where the wrong endpoint was accessed when using the *Azure Managed Identities* authentication flow.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
+
diff --git a/Packs/Microsoft365Defender/ReleaseNotes/4_5_25.md b/Packs/Microsoft365Defender/ReleaseNotes/4_5_25.md
new file mode 100644
index 000000000000..b280ad17b9f5
--- /dev/null
+++ b/Packs/Microsoft365Defender/ReleaseNotes/4_5_25.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Microsoft 365 Defender
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/Microsoft365Defender/ReleaseNotes/4_5_26.md b/Packs/Microsoft365Defender/ReleaseNotes/4_5_26.md
new file mode 100644
index 000000000000..df4f234036a8
--- /dev/null
+++ b/Packs/Microsoft365Defender/ReleaseNotes/4_5_26.md
@@ -0,0 +1,11 @@
+
+#### Integrations
+##### Microsoft 365 Defender
+Rebranded **Microsoft 365 Defender** to **Microsoft Defender XDR**.
+
+##### O365 Defender SafeLinks
+- Rebranded **Microsoft 365 Defender** to **Microsoft Defender XDR**.
+- Updated the Docker image to: *demisto/pwsh-exchangev3:1.0.0.88371*.
+
+##### O365 Defender SafeLinks - Single User
+Rebranded **Microsoft 365 Defender** to **Microsoft Defender XDR**.
diff --git a/Packs/Microsoft365Defender/ReleaseNotes/4_5_27.md b/Packs/Microsoft365Defender/ReleaseNotes/4_5_27.md
new file mode 100644
index 000000000000..d6aeeeab44db
--- /dev/null
+++ b/Packs/Microsoft365Defender/ReleaseNotes/4_5_27.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### O365 Defender SafeLinks - Single User (Deprecated)
+
+Deprecated. Use O365 Defender SafeLinks instead.
diff --git a/Packs/Microsoft365Defender/pack_metadata.json b/Packs/Microsoft365Defender/pack_metadata.json
index 2601eece7e21..917e2db1c544 100644
--- a/Packs/Microsoft365Defender/pack_metadata.json
+++ b/Packs/Microsoft365Defender/pack_metadata.json
@@ -1,8 +1,8 @@
{
"name": "Microsoft 365 Defender",
- "description": "Microsoft 365 Defender is a unified pre- and post-breach enterprise defense suite that natively coordinates detection, prevention, investigation, and response across endpoints, identities, email, and applications to provide integrated protection against sophisticated attacks.",
+ "description": "Microsoft Defender XDR (formerly Microsoft 365 Defender) is a unified pre- and post-breach enterprise defense suite that natively coordinates detection, prevention, investigation, and response across endpoints, identities, email, and applications to provide integrated protection against sophisticated attacks.",
"support": "xsoar",
- "currentVersion": "4.5.23",
+ "currentVersion": "4.5.27",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -11,7 +11,9 @@
],
"tags": [],
"useCases": [],
- "keywords": [],
+ "keywords": [
+ "Microsoft Defender XDR"
+ ],
"dependencies": {},
"marketplaces": [
"xsoar",
diff --git a/Packs/MicrosoftCloudAppSecurity/Integrations/MicrosoftDefenderEventCollector/README.md b/Packs/MicrosoftCloudAppSecurity/Integrations/MicrosoftDefenderEventCollector/README.md
index b4f2b32ced8f..173f7b56cb18 100644
--- a/Packs/MicrosoftCloudAppSecurity/Integrations/MicrosoftDefenderEventCollector/README.md
+++ b/Packs/MicrosoftCloudAppSecurity/Integrations/MicrosoftDefenderEventCollector/README.md
@@ -1,5 +1,7 @@
Microsoft Defender for Cloud Apps Event Collector integration.
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure Microsoft Defender for Cloud Apps Event Collector on Cortex XSOAR
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
diff --git a/Packs/MicrosoftCloudAppSecurity/ReleaseNotes/2_1_61.md b/Packs/MicrosoftCloudAppSecurity/ReleaseNotes/2_1_61.md
new file mode 100644
index 000000000000..6e5168440e67
--- /dev/null
+++ b/Packs/MicrosoftCloudAppSecurity/ReleaseNotes/2_1_61.md
@@ -0,0 +1,12 @@
+
+#### Integrations
+
+##### Microsoft Defender for Cloud Apps Event Collector
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
+
+##### Microsoft Defender for Cloud Apps
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/MicrosoftCloudAppSecurity/ReleaseNotes/2_1_62.md b/Packs/MicrosoftCloudAppSecurity/ReleaseNotes/2_1_62.md
new file mode 100644
index 000000000000..326c9d1df075
--- /dev/null
+++ b/Packs/MicrosoftCloudAppSecurity/ReleaseNotes/2_1_62.md
@@ -0,0 +1,10 @@
+
+#### Integrations
+
+##### Microsoft Defender for Cloud Apps
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
+
+##### Microsoft Defender for Cloud Apps Event Collector
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/MicrosoftCloudAppSecurity/pack_metadata.json b/Packs/MicrosoftCloudAppSecurity/pack_metadata.json
index 700429cc5793..c25b66383c5c 100644
--- a/Packs/MicrosoftCloudAppSecurity/pack_metadata.json
+++ b/Packs/MicrosoftCloudAppSecurity/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Defender for Cloud Apps",
"description": "Microsoft Cloud App Security Integration, a Cloud Access Security Broker that supports various deployment modes",
"support": "xsoar",
- "currentVersion": "2.1.60",
+ "currentVersion": "2.1.62",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -16,5 +16,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "Microsoft Defender for Cloud Apps Event Collector"
}
\ No newline at end of file
diff --git a/Packs/MicrosoftDefenderAdvancedThreatProtection/Integrations/Microsoft365DefenderEventCollector/README.md b/Packs/MicrosoftDefenderAdvancedThreatProtection/Integrations/Microsoft365DefenderEventCollector/README.md
index 20c64d32c867..f6784c6549ee 100644
--- a/Packs/MicrosoftDefenderAdvancedThreatProtection/Integrations/Microsoft365DefenderEventCollector/README.md
+++ b/Packs/MicrosoftDefenderAdvancedThreatProtection/Integrations/Microsoft365DefenderEventCollector/README.md
@@ -1,5 +1,7 @@
Microsoft 365 Defender event collector integration for Cortex XSIAM.
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure Microsoft 365 Defender Event Collector on Cortex XSIAM
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
diff --git a/Packs/MicrosoftDefenderAdvancedThreatProtection/Integrations/MicrosoftDefenderAdvancedThreatProtection/MicrosoftDefenderAdvancedThreatProtection.py b/Packs/MicrosoftDefenderAdvancedThreatProtection/Integrations/MicrosoftDefenderAdvancedThreatProtection/MicrosoftDefenderAdvancedThreatProtection.py
index b75ff2a4b1f2..4fb7f68f1be8 100644
--- a/Packs/MicrosoftDefenderAdvancedThreatProtection/Integrations/MicrosoftDefenderAdvancedThreatProtection/MicrosoftDefenderAdvancedThreatProtection.py
+++ b/Packs/MicrosoftDefenderAdvancedThreatProtection/Integrations/MicrosoftDefenderAdvancedThreatProtection/MicrosoftDefenderAdvancedThreatProtection.py
@@ -1993,7 +1993,7 @@ def create_action(self, machine_id, request_body, overwrite_rate_limit_retry=Fal
def download_file(self, url_link):
try:
- response = requests.get(url=url_link, verify=self.ms_client.verify)
+ response = requests.get(url=url_link, verify=self.ms_client.verify, timeout=300)
except Exception as e:
raise Exception(f'Could not download file. {url_link=}. error: {str(e)}')
return response
diff --git a/Packs/MicrosoftDefenderAdvancedThreatProtection/Integrations/MicrosoftDefenderAdvancedThreatProtection/MicrosoftDefenderAdvancedThreatProtection.yml b/Packs/MicrosoftDefenderAdvancedThreatProtection/Integrations/MicrosoftDefenderAdvancedThreatProtection/MicrosoftDefenderAdvancedThreatProtection.yml
index 36445ff76fa1..c1b0602e61ed 100644
--- a/Packs/MicrosoftDefenderAdvancedThreatProtection/Integrations/MicrosoftDefenderAdvancedThreatProtection/MicrosoftDefenderAdvancedThreatProtection.yml
+++ b/Packs/MicrosoftDefenderAdvancedThreatProtection/Integrations/MicrosoftDefenderAdvancedThreatProtection/MicrosoftDefenderAdvancedThreatProtection.yml
@@ -5534,7 +5534,7 @@ script:
execution: false
name: microsoft-atp-auth-reset
arguments: []
- dockerimage: demisto/crypto:1.0.0.87358
+ dockerimage: demisto/crypto:1.0.0.96042
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/MicrosoftDefenderAdvancedThreatProtection/Integrations/MicrosoftDefenderAdvancedThreatProtection/MicrosoftDefenderAdvancedThreatProtection_test.py b/Packs/MicrosoftDefenderAdvancedThreatProtection/Integrations/MicrosoftDefenderAdvancedThreatProtection/MicrosoftDefenderAdvancedThreatProtection_test.py
index 259cfc8ba841..e0dd890ebe2e 100644
--- a/Packs/MicrosoftDefenderAdvancedThreatProtection/Integrations/MicrosoftDefenderAdvancedThreatProtection/MicrosoftDefenderAdvancedThreatProtection_test.py
+++ b/Packs/MicrosoftDefenderAdvancedThreatProtection/Integrations/MicrosoftDefenderAdvancedThreatProtection/MicrosoftDefenderAdvancedThreatProtection_test.py
@@ -2892,6 +2892,6 @@ def test_generate_login_url(mocker):
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
f'response_type=code&scope=offline_access%20' \
'https://securitycenter.onmicrosoft.com/windowsatpservice/.default' \
- f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
+ f'&client_id={client_id}&redirect_uri={redirect_uri})'
res = MicrosoftDefenderAdvancedThreatProtection.return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/MicrosoftDefenderAdvancedThreatProtection/ReleaseNotes/1_16_32.md b/Packs/MicrosoftDefenderAdvancedThreatProtection/ReleaseNotes/1_16_32.md
new file mode 100644
index 000000000000..785be59711e6
--- /dev/null
+++ b/Packs/MicrosoftDefenderAdvancedThreatProtection/ReleaseNotes/1_16_32.md
@@ -0,0 +1,12 @@
+
+#### Integrations
+
+##### Microsoft Defender for Endpoint Event Collector
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
+
+##### Microsoft Defender for Endpoint
+- Updated the Docker image to: *demisto/crypto:1.0.0.96042*.
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/MicrosoftDefenderAdvancedThreatProtection/ReleaseNotes/1_16_33.md b/Packs/MicrosoftDefenderAdvancedThreatProtection/ReleaseNotes/1_16_33.md
new file mode 100644
index 000000000000..1f142baf07c5
--- /dev/null
+++ b/Packs/MicrosoftDefenderAdvancedThreatProtection/ReleaseNotes/1_16_33.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Microsoft Defender for Endpoint
+
+- Fixed an issue with timeout in download_file function.
diff --git a/Packs/MicrosoftDefenderAdvancedThreatProtection/ReleaseNotes/1_16_34.md b/Packs/MicrosoftDefenderAdvancedThreatProtection/ReleaseNotes/1_16_34.md
new file mode 100644
index 000000000000..d50274e9ec22
--- /dev/null
+++ b/Packs/MicrosoftDefenderAdvancedThreatProtection/ReleaseNotes/1_16_34.md
@@ -0,0 +1,10 @@
+
+#### Integrations
+
+##### Microsoft Defender for Endpoint Event Collector
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
+
+##### Microsoft Defender for Endpoint
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/MicrosoftDefenderAdvancedThreatProtection/TestPlaybooks/Test_Playbook_-_MDE_-_Retrieve_File.yml b/Packs/MicrosoftDefenderAdvancedThreatProtection/TestPlaybooks/Test_Playbook_-_MDE_-_Retrieve_File.yml
index cf96369c4afa..5a35e4dd8594 100644
--- a/Packs/MicrosoftDefenderAdvancedThreatProtection/TestPlaybooks/Test_Playbook_-_MDE_-_Retrieve_File.yml
+++ b/Packs/MicrosoftDefenderAdvancedThreatProtection/TestPlaybooks/Test_Playbook_-_MDE_-_Retrieve_File.yml
@@ -12,10 +12,10 @@ starttaskid: "0"
tasks:
"0":
id: "0"
- taskid: 21896868-8531-4ae5-8666-cea52014a9ec
+ taskid: 217f0253-cd71-48fb-8b59-2e49f1b39d3b
type: start
task:
- id: 21896868-8531-4ae5-8666-cea52014a9ec
+ id: 217f0253-cd71-48fb-8b59-2e49f1b39d3b
version: -1
name: ""
iscommand: false
@@ -30,7 +30,7 @@ tasks:
{
"position": {
"x": -140,
- "y": -2175
+ "y": -2345
}
}
note: false
@@ -42,10 +42,10 @@ tasks:
isautoswitchedtoquietmode: false
"32":
id: "32"
- taskid: 4dbfaa04-78aa-482f-800b-e0ed5b5dd204
+ taskid: 3ce5da91-838b-4bd6-8938-3a0f802d8988
type: regular
task:
- id: 4dbfaa04-78aa-482f-800b-e0ed5b5dd204
+ id: 3ce5da91-838b-4bd6-8938-3a0f802d8988
version: -1
name: Delete Context
description: The task deletes all of the context data. Having a clean beginning to a test playbook ensures that a test can be sterile and that unrelated issues can be eliminated.
@@ -55,7 +55,7 @@ tasks:
brand: ""
nexttasks:
'#none#':
- - "286"
+ - "288"
scriptarguments:
all:
simple: "yes"
@@ -65,7 +65,7 @@ tasks:
{
"position": {
"x": -140,
- "y": -2045
+ "y": -2215
}
}
note: false
@@ -77,10 +77,10 @@ tasks:
isautoswitchedtoquietmode: false
"84":
id: "84"
- taskid: 4de8eaba-01aa-4a61-85df-65de61567633
+ taskid: 8921b137-fbcb-4a88-81c1-9a42b03d215e
type: title
task:
- id: 4de8eaba-01aa-4a61-85df-65de61567633
+ id: 8921b137-fbcb-4a88-81c1-9a42b03d215e
version: -1
name: Start Testing
type: title
@@ -106,94 +106,12 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "246":
- id: "246"
- taskid: dbeba00b-20ba-45cf-8042-0c9ddd6cad52
- type: regular
- task:
- id: dbeba00b-20ba-45cf-8042-0c9ddd6cad52
- version: -1
- name: Get MDE Available Alerts
- description: Retrieves a collection of alerts related to the SHA1 of 'taskhostw.exe' and 'svc.exe' common processes.
- script: Microsoft Defender Advanced Threat Protection|||microsoft-atp-get-file-alerts
- type: regular
- iscommand: true
- brand: Microsoft Defender Advanced Threat Protection
- nexttasks:
- '#none#':
- - "248"
- scriptarguments:
- file_hash:
- complex:
- root: incident
- accessor: filesha1
- separatecontext: false
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": -140,
- "y": -1720
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
- "248":
- id: "248"
- taskid: 65300448-711b-4bc6-8ec8-2691c350d913
- type: condition
- task:
- id: 65300448-711b-4bc6-8ec8-2691c350d913
- version: -1
- name: Check MDE Available Alerts & Related Files
- description: Checks if there are available alerts and related files for testing processes.
- type: condition
- iscommand: false
- brand: ""
- nexttasks:
- '#default#':
- - "285"
- "yes":
- - "290"
- separatecontext: false
- conditions:
- - label: "yes"
- condition:
- - - operator: isNotEmpty
- left:
- value:
- complex:
- root: MicrosoftATP.FileAlert.Alerts.Evidence
- accessor: filePath
- iscontext: true
- right:
- value: {}
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": -140,
- "y": -1560
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
"249":
id: "249"
- taskid: f7bc72bc-824f-493b-8fa3-7afddfba028e
+ taskid: 397af96b-90ed-4b1b-8cd0-61092c259f87
type: regular
task:
- id: f7bc72bc-824f-493b-8fa3-7afddfba028e
+ id: 397af96b-90ed-4b1b-8cd0-61092c259f87
version: -1
name: Verify Playbook Output Error - Extracted Files
description: Prints an error entry with a given message
@@ -225,10 +143,10 @@ tasks:
isautoswitchedtoquietmode: false
"250":
id: "250"
- taskid: f97ec921-a3d8-472a-8b05-3679cb95cf60
+ taskid: cc53f2cb-69a4-46be-82a5-6018d1984202
type: condition
task:
- id: f97ec921-a3d8-472a-8b05-3679cb95cf60
+ id: cc53f2cb-69a4-46be-82a5-6018d1984202
version: -1
name: Verify Extracted Files
description: |
@@ -276,10 +194,10 @@ tasks:
isautoswitchedtoquietmode: false
"255":
id: "255"
- taskid: aaa17405-11a3-4b8f-8ae4-feb2c518cd24
+ taskid: 3558fa0d-ead6-4011-8c3c-4ed1c6f38aef
type: title
task:
- id: aaa17405-11a3-4b8f-8ae4-feb2c518cd24
+ id: 3558fa0d-ead6-4011-8c3c-4ed1c6f38aef
version: -1
name: Check Context Data
type: title
@@ -329,10 +247,10 @@ tasks:
isautoswitchedtoquietmode: false
"257":
id: "257"
- taskid: 0b99697b-2595-4f6f-8e0e-a1d0ff2b6b5d
+ taskid: f49d404f-6ef0-418d-8a85-0f8b92e06441
type: condition
task:
- id: 0b99697b-2595-4f6f-8e0e-a1d0ff2b6b5d
+ id: f49d404f-6ef0-418d-8a85-0f8b92e06441
version: -1
name: Verify Cancellation Date Time UTC
description: |
@@ -373,10 +291,10 @@ tasks:
isautoswitchedtoquietmode: false
"259":
id: "259"
- taskid: adae7779-8b35-4d95-855f-a88533bd3973
+ taskid: ee17379a-8504-4c50-87fc-171ad329af47
type: condition
task:
- id: adae7779-8b35-4d95-855f-a88533bd3973
+ id: ee17379a-8504-4c50-87fc-171ad329af47
version: -1
name: Verify Cancellation Comment
description: |
@@ -417,10 +335,10 @@ tasks:
isautoswitchedtoquietmode: false
"261":
id: "261"
- taskid: e6b2e9b1-1488-468e-8de3-40c9decc211a
+ taskid: eaf885a5-4b50-4403-8633-88a22ad7f889
type: condition
task:
- id: e6b2e9b1-1488-468e-8de3-40c9decc211a
+ id: eaf885a5-4b50-4403-8633-88a22ad7f889
version: -1
name: Verify Cancellation Requestor
description: |
@@ -461,10 +379,10 @@ tasks:
isautoswitchedtoquietmode: false
"263":
id: "263"
- taskid: 65537d8d-053d-4e92-8e0b-5e1b82bfdfdc
+ taskid: 50cd9e90-dd00-40dc-810b-397c7f6e14e5
type: condition
task:
- id: 65537d8d-053d-4e92-8e0b-5e1b82bfdfdc
+ id: 50cd9e90-dd00-40dc-810b-397c7f6e14e5
version: -1
name: Verify Last Update Date Time UTC
description: |
@@ -507,10 +425,10 @@ tasks:
isautoswitchedtoquietmode: false
"265":
id: "265"
- taskid: d6e01094-8029-449a-889c-95b142e5d6b0
+ taskid: 54f4e353-e48a-4e65-8d48-8c55e25e73ab
type: condition
task:
- id: d6e01094-8029-449a-889c-95b142e5d6b0
+ id: 54f4e353-e48a-4e65-8d48-8c55e25e73ab
version: -1
name: Verify Creation Date Time UTC
description: |
@@ -553,10 +471,10 @@ tasks:
isautoswitchedtoquietmode: false
"267":
id: "267"
- taskid: 55535581-192f-4135-8126-d3ccb2bc1f65
+ taskid: bb2c0883-4a64-4408-8b11-a4bdc7b20ddb
type: condition
task:
- id: 55535581-192f-4135-8126-d3ccb2bc1f65
+ id: bb2c0883-4a64-4408-8b11-a4bdc7b20ddb
version: -1
name: Verify Computer DNS Name
description: |
@@ -599,10 +517,10 @@ tasks:
isautoswitchedtoquietmode: false
"269":
id: "269"
- taskid: af47adc5-8bff-49b4-8b1c-94383fc2dfb5
+ taskid: 6355a87e-4fa4-40d9-887c-29534f7a914d
type: condition
task:
- id: af47adc5-8bff-49b4-8b1c-94383fc2dfb5
+ id: 6355a87e-4fa4-40d9-887c-29534f7a914d
version: -1
name: Verify Machine ID
description: |
@@ -649,10 +567,10 @@ tasks:
isautoswitchedtoquietmode: false
"271":
id: "271"
- taskid: b0d6c00c-4d58-43b2-8c0f-4f5a3ee2db7a
+ taskid: c356c217-c6b6-4233-869a-23687a7df477
type: condition
task:
- id: b0d6c00c-4d58-43b2-8c0f-4f5a3ee2db7a
+ id: c356c217-c6b6-4233-869a-23687a7df477
version: -1
name: Verify Status
description: |
@@ -697,10 +615,10 @@ tasks:
isautoswitchedtoquietmode: false
"273":
id: "273"
- taskid: e8c2197f-42ea-4170-88e2-2d65d765f85a
+ taskid: b68bea93-d5f1-495f-86d5-91a9ff85310d
type: condition
task:
- id: e8c2197f-42ea-4170-88e2-2d65d765f85a
+ id: b68bea93-d5f1-495f-86d5-91a9ff85310d
version: -1
name: Verify Requestor Comment
description: |
@@ -743,10 +661,10 @@ tasks:
isautoswitchedtoquietmode: false
"275":
id: "275"
- taskid: 7965c159-558d-413e-8a5a-c20520169fad
+ taskid: 268c7919-408f-4308-8101-f515920870e7
type: condition
task:
- id: 7965c159-558d-413e-8a5a-c20520169fad
+ id: 268c7919-408f-4308-8101-f515920870e7
version: -1
name: Verify Requestor
description: |
@@ -789,10 +707,10 @@ tasks:
isautoswitchedtoquietmode: false
"279":
id: "279"
- taskid: 7d05fdaa-0cfc-4dcb-8365-2735324fc05b
+ taskid: dff79de2-94d8-4ed1-80c3-e19fd7168141
type: condition
task:
- id: 7d05fdaa-0cfc-4dcb-8365-2735324fc05b
+ id: dff79de2-94d8-4ed1-80c3-e19fd7168141
version: -1
name: Verify Type
description: |
@@ -835,10 +753,10 @@ tasks:
isautoswitchedtoquietmode: false
"280":
id: "280"
- taskid: 3ba539d7-84f9-402e-8797-33f28eeb546b
+ taskid: eeaed0e9-d4e7-4188-8d97-07fbd3029f19
type: regular
task:
- id: 3ba539d7-84f9-402e-8797-33f28eeb546b
+ id: eeaed0e9-d4e7-4188-8d97-07fbd3029f19
version: -1
name: Verify Context Error - ID
description: Prints an error entry with a given message
@@ -870,10 +788,10 @@ tasks:
isautoswitchedtoquietmode: false
"281":
id: "281"
- taskid: f1c22c3b-d8a0-4449-885d-093ed1221e59
+ taskid: 67aa147e-fd1d-430b-83da-4a86f45c07f7
type: condition
task:
- id: f1c22c3b-d8a0-4449-885d-093ed1221e59
+ id: 67aa147e-fd1d-430b-83da-4a86f45c07f7
version: -1
name: Verify ID
description: |
@@ -890,7 +808,7 @@ tasks:
conditions:
- label: ' Verified'
condition:
- - - operator: isEqualString
+ - - operator: containsGeneral
left:
value:
complex:
@@ -919,94 +837,32 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "285":
- id: "285"
- taskid: e0bb8dbb-a847-4029-8830-9f16edd0ed14
- type: title
- task:
- id: e0bb8dbb-a847-4029-8830-9f16edd0ed14
- version: -1
- name: Tests cannot be performed
- type: title
- iscommand: false
- brand: ""
- description: ''
- nexttasks:
- '#none#':
- - "289"
- separatecontext: false
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": 250,
- "y": -1390
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
- "286":
- id: "286"
- taskid: bbd2db51-924a-496e-8e6d-d03dc9ddf92e
- type: regular
- task:
- id: bbd2db51-924a-496e-8e6d-d03dc9ddf92e
- version: -1
- name: Set SHA1 To Incident Field
- description: Publish the SHA1 of 'taskhostw.exe' and 'svc.exe' common processes in the 'incident.filesha1' incident field so that alerts relating to these files can be identified later.
- script: Builtin|||setIncident
- type: regular
- iscommand: true
- brand: Builtin
- nexttasks:
- '#none#':
- - "246"
- scriptarguments:
- filesha1:
- simple: '["6d9d0be989c8383c06b279a71f770edad617af27", "a1385ce20ad79f55df235effd9780c31442aa234"]'
- separatecontext: false
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": -140,
- "y": -1880
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
"287":
id: "287"
- taskid: 453faecc-6a17-42d1-88ed-234a702ff29a
+ taskid: 40508e9b-9849-4bd1-8899-4bd5a71a8890
type: playbook
task:
- id: 453faecc-6a17-42d1-88ed-234a702ff29a
+ id: 40508e9b-9849-4bd1-8899-4bd5a71a8890
version: -1
name: MDE - Retrieve File
description: |-
- This playbook is part of the 'Malware Investigation And Response' pack. For more information, refer to https://xsoar.pan.dev/docs/reference/packs/malware-investigation-and-response.
- This playbook uses the Live Response feature to retrieve a file from an endpoint./nNote that the endpoint id will be set from the incident field "DeviceID".
+ This playbook is part of the ‘Malware Investigation And Response’ pack. For more information, refer to https://xsoar.pan.dev/docs/reference/packs/malware-investigation-and-response.
+ This playbook uses the Live Response feature to retrieve a file from an endpoint. The playbook supports a supplied machine id as an input. Otherwise, it will take the Device ID incident field.
+ The playbook supports only one element to be retrieved for each task (if needed more then one - use the playbook loop feature).
playbookName: MDE - Retrieve File
type: playbook
iscommand: false
brand: ""
nexttasks:
'#none#':
- - "84"
+ - "332"
scriptarguments:
paths:
+ simple: C:\Windows\System32\taskhostw.exe
+ MachineID:
complex:
- root: zipped_list
+ root: incident
+ accessor: deviceid
separatecontext: false
continueonerrortype: ""
loop:
@@ -1018,7 +874,7 @@ tasks:
{
"position": {
"x": -140,
- "y": -1060
+ "y": -1890
}
}
note: false
@@ -1030,10 +886,10 @@ tasks:
isautoswitchedtoquietmode: false
"288":
id: "288"
- taskid: 2623d7d2-2ced-4571-81b5-5a829b8c7c8e
+ taskid: d05dd61d-b027-4e25-8cb2-f6d3b1537fe3
type: regular
task:
- id: 2623d7d2-2ced-4571-81b5-5a829b8c7c8e
+ id: d05dd61d-b027-4e25-8cb2-f6d3b1537fe3
version: -1
name: Set Device ID To Incident Field
description: Change the properties of an incident
@@ -1046,23 +902,14 @@ tasks:
- "287"
scriptarguments:
deviceid:
- complex:
- root: MicrosoftATP.FileAlert.Alerts.MachineID
- filters:
- - - operator: isNotEmpty
- left:
- value:
- simple: MicrosoftATP.FileAlert.Alerts.MachineID
- iscontext: true
- transformers:
- - operator: FirstArrayElement
+ simple: 4cceb3c642212014e0e9553aa8b59e999ea515ff
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
"x": -140,
- "y": -1225
+ "y": -2055
}
}
note: false
@@ -1074,10 +921,10 @@ tasks:
isautoswitchedtoquietmode: false
"289":
id: "289"
- taskid: 16da57ac-b698-488a-82bb-01649d64ab42
+ taskid: 4e9ea99b-ecd5-4008-8a36-bd2d42b0a2e7
type: title
task:
- id: 16da57ac-b698-488a-82bb-01649d64ab42
+ id: 4e9ea99b-ecd5-4008-8a36-bd2d42b0a2e7
version: -1
name: Done
type: title
@@ -1100,59 +947,12 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "290":
- id: "290"
- taskid: 63c7c980-eba3-42d5-8bb5-889e200c05ca
- type: regular
- task:
- id: 63c7c980-eba3-42d5-8bb5-889e200c05ca
- version: -1
- name: Join File Paths and File Names
- description: Joins values from two lists by index according to a given format.
- scriptName: ZipStrings
- type: regular
- iscommand: false
- brand: ""
- nexttasks:
- '#none#':
- - "288"
- scriptarguments:
- format:
- simple: '{1}\{2}'
- list1:
- complex:
- root: MicrosoftATP.FileAlert.Alerts.Evidence
- accessor: filePath
- transformers:
- - operator: FirstArrayElement
- list2:
- complex:
- root: MicrosoftATP.FileAlert.Alerts.Evidence
- accessor: fileName
- transformers:
- - operator: FirstArrayElement
- separatecontext: false
- continueonerrortype: ""
- view: |-
- {
- "position": {
- "x": -140,
- "y": -1390
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- isoversize: false
- isautoswitchedtoquietmode: false
"291":
id: "291"
- taskid: 8adcf714-e189-455f-85ef-a3b73017fc85
+ taskid: 38049694-2efd-4f46-8d1e-6d06cfc2f8f3
type: regular
task:
- id: 8adcf714-e189-455f-85ef-a3b73017fc85
+ id: 38049694-2efd-4f46-8d1e-6d06cfc2f8f3
version: -1
name: Verify Context Error - Type
description: Prints an error entry with a given message
@@ -1183,10 +983,10 @@ tasks:
isautoswitchedtoquietmode: false
"293":
id: "293"
- taskid: b3005e83-a8dc-4a7d-8af5-81c2bd05ea06
+ taskid: 5dfce3d7-aa24-47b2-8a9f-a432562234b9
type: regular
task:
- id: b3005e83-a8dc-4a7d-8af5-81c2bd05ea06
+ id: 5dfce3d7-aa24-47b2-8a9f-a432562234b9
version: -1
name: Verify Context Error - Requestor
description: Prints an error entry with a given message
@@ -1217,10 +1017,10 @@ tasks:
isautoswitchedtoquietmode: false
"294":
id: "294"
- taskid: 9689502e-43b4-446d-84f9-9a79c5a0926e
+ taskid: 306f4cfd-0d30-4b59-848d-296a0436a46a
type: regular
task:
- id: 9689502e-43b4-446d-84f9-9a79c5a0926e
+ id: 306f4cfd-0d30-4b59-848d-296a0436a46a
version: -1
name: Verify Context Error - Requestor Comment
description: Prints an error entry with a given message
@@ -1251,10 +1051,10 @@ tasks:
isautoswitchedtoquietmode: false
"295":
id: "295"
- taskid: 58cb2043-9c3c-4505-8247-bda18aaa6aeb
+ taskid: f6cff1b8-2571-42a1-8243-1e7e388ccc3b
type: regular
task:
- id: 58cb2043-9c3c-4505-8247-bda18aaa6aeb
+ id: f6cff1b8-2571-42a1-8243-1e7e388ccc3b
version: -1
name: Verify Context Error - Status
description: Prints an error entry with a given message
@@ -1285,10 +1085,10 @@ tasks:
isautoswitchedtoquietmode: false
"296":
id: "296"
- taskid: 41ac61f9-25b4-4718-8e16-1d78375b9418
+ taskid: ff68def4-e123-462c-8da4-63ec5173a460
type: regular
task:
- id: 41ac61f9-25b4-4718-8e16-1d78375b9418
+ id: ff68def4-e123-462c-8da4-63ec5173a460
version: -1
name: Verify Context Error - Machine ID
description: Prints an error entry with a given message
@@ -1319,10 +1119,10 @@ tasks:
isautoswitchedtoquietmode: false
"297":
id: "297"
- taskid: 12a18cb9-a76a-40e2-8099-2653d7e63516
+ taskid: 1dc6c39c-a3ac-4c70-8f86-f20905f59ccc
type: regular
task:
- id: 12a18cb9-a76a-40e2-8099-2653d7e63516
+ id: 1dc6c39c-a3ac-4c70-8f86-f20905f59ccc
version: -1
name: Verify Context Error - Computer DNS Name
description: Prints an error entry with a given message
@@ -1353,10 +1153,10 @@ tasks:
isautoswitchedtoquietmode: false
"298":
id: "298"
- taskid: c4a2f16c-551a-4aa0-83a5-c97cfb63c2a2
+ taskid: 0fca13c6-b848-4364-8fbc-1e475ef29452
type: regular
task:
- id: c4a2f16c-551a-4aa0-83a5-c97cfb63c2a2
+ id: 0fca13c6-b848-4364-8fbc-1e475ef29452
version: -1
name: Verify Context Error - Creation Date Time UTC
description: Prints an error entry with a given message
@@ -1387,10 +1187,10 @@ tasks:
isautoswitchedtoquietmode: false
"299":
id: "299"
- taskid: 6cdf206f-1c3b-4078-8681-89b462edf418
+ taskid: cbe84506-3bf4-496d-8b3a-654c9f09d565
type: regular
task:
- id: 6cdf206f-1c3b-4078-8681-89b462edf418
+ id: cbe84506-3bf4-496d-8b3a-654c9f09d565
version: -1
name: Verify Context Error - Last Update Date Time UTC
description: Prints an error entry with a given message
@@ -1421,10 +1221,10 @@ tasks:
isautoswitchedtoquietmode: false
"300":
id: "300"
- taskid: 37ce13f5-f25a-4759-8230-47ef6a210677
+ taskid: b595ee96-0d26-4289-8144-91731feb9870
type: regular
task:
- id: 37ce13f5-f25a-4759-8230-47ef6a210677
+ id: b595ee96-0d26-4289-8144-91731feb9870
version: -1
name: Verify Context Error - Cancellation Requestor
description: Prints an error entry with a given message
@@ -1455,10 +1255,10 @@ tasks:
isautoswitchedtoquietmode: false
"301":
id: "301"
- taskid: b5b064ff-344b-4fa9-8323-e1b47c5bd690
+ taskid: 2b4728d4-e1ef-4991-8e0c-d6253561a908
type: regular
task:
- id: b5b064ff-344b-4fa9-8323-e1b47c5bd690
+ id: 2b4728d4-e1ef-4991-8e0c-d6253561a908
version: -1
name: Verify Context Error - Cancellation Comment
description: Prints an error entry with a given message
@@ -1489,10 +1289,10 @@ tasks:
isautoswitchedtoquietmode: false
"302":
id: "302"
- taskid: bc65480c-5440-4ac1-84bb-99835fa4d550
+ taskid: 40f1bb55-d50e-4e51-8f3e-55b282347e14
type: regular
task:
- id: bc65480c-5440-4ac1-84bb-99835fa4d550
+ id: 40f1bb55-d50e-4e51-8f3e-55b282347e14
version: -1
name: Verify Context Error - Cancellation Date Time UTC
description: Prints an error entry with a given message
@@ -1523,10 +1323,10 @@ tasks:
isautoswitchedtoquietmode: false
"303":
id: "303"
- taskid: a476583c-152c-4a8e-895c-6f90b2a39f7b
+ taskid: 657c8549-2b9c-4746-8f93-4de1ef4e3d86
type: condition
task:
- id: a476583c-152c-4a8e-895c-6f90b2a39f7b
+ id: 657c8549-2b9c-4746-8f93-4de1ef4e3d86
version: -1
name: Verify Error Result
description: |
@@ -1570,10 +1370,10 @@ tasks:
isautoswitchedtoquietmode: false
"304":
id: "304"
- taskid: f24f0f53-919a-457c-8936-833ffe633e78
+ taskid: 917e8110-e279-48e7-8e28-9921f4a65df3
type: regular
task:
- id: f24f0f53-919a-457c-8936-833ffe633e78
+ id: 917e8110-e279-48e7-8e28-9921f4a65df3
version: -1
name: Verify Context Error - Error Result
description: Prints an error entry with a given message
@@ -1604,10 +1404,10 @@ tasks:
isautoswitchedtoquietmode: false
"309":
id: "309"
- taskid: 65282322-7a2a-4ee2-8dee-083eefd9d0a1
+ taskid: 75d01912-5cab-4653-82a2-369ac81abd4e
type: condition
task:
- id: 65282322-7a2a-4ee2-8dee-083eefd9d0a1
+ id: 75d01912-5cab-4653-82a2-369ac81abd4e
version: -1
name: Verify Request Source
description: |
@@ -1652,10 +1452,10 @@ tasks:
isautoswitchedtoquietmode: false
"310":
id: "310"
- taskid: 9a5a127b-c25e-4b7d-8c25-8e275511a461
+ taskid: 003036fc-5632-4a6f-8e9d-e4f0882731ec
type: regular
task:
- id: 9a5a127b-c25e-4b7d-8c25-8e275511a461
+ id: 003036fc-5632-4a6f-8e9d-e4f0882731ec
version: -1
name: Verify Context Error - Request Source
description: Prints an error entry with a given message
@@ -1686,10 +1486,10 @@ tasks:
isautoswitchedtoquietmode: false
"313":
id: "313"
- taskid: 83c411c5-53e9-4d88-8b81-7b468f6f035d
+ taskid: a56c6f71-2ff8-40a3-81dc-2faa51e480d7
type: condition
task:
- id: 83c411c5-53e9-4d88-8b81-7b468f6f035d
+ id: a56c6f71-2ff8-40a3-81dc-2faa51e480d7
version: -1
name: Verify Troubleshoot Info
description: |
@@ -1730,10 +1530,10 @@ tasks:
isautoswitchedtoquietmode: false
"314":
id: "314"
- taskid: 24ed6d53-8175-4bb5-823d-d63e729f36eb
+ taskid: b91fd6bb-0a25-477f-89b9-18bd5acb00c8
type: regular
task:
- id: 24ed6d53-8175-4bb5-823d-d63e729f36eb
+ id: b91fd6bb-0a25-477f-89b9-18bd5acb00c8
version: -1
name: Verify Context Error - Troubleshoot Info
description: Prints an error entry with a given message
@@ -1764,10 +1564,10 @@ tasks:
isautoswitchedtoquietmode: false
"315":
id: "315"
- taskid: 3db4af0a-a785-4188-86ec-e42ee61c2509
+ taskid: 7dd7faba-8930-41fb-89e2-48ca9e73b749
type: condition
task:
- id: 3db4af0a-a785-4188-86ec-e42ee61c2509
+ id: 7dd7faba-8930-41fb-89e2-48ca9e73b749
version: -1
name: Verify Commands Index
description: |
@@ -1808,10 +1608,10 @@ tasks:
isautoswitchedtoquietmode: false
"316":
id: "316"
- taskid: 1e6333c9-ac15-4cb3-82ba-588b642922e0
+ taskid: ac10107a-28e8-4fdd-83bc-c58fcb3533cf
type: regular
task:
- id: 1e6333c9-ac15-4cb3-82ba-588b642922e0
+ id: ac10107a-28e8-4fdd-83bc-c58fcb3533cf
version: -1
name: Verify Context Error - Commands Index
description: Prints an error entry with a given message
@@ -1842,10 +1642,10 @@ tasks:
isautoswitchedtoquietmode: false
"319":
id: "319"
- taskid: dff8235e-e04b-4687-83e7-a875d9ebbf96
+ taskid: f82635a8-0b9b-4173-854a-8e56eec4c54b
type: condition
task:
- id: dff8235e-e04b-4687-83e7-a875d9ebbf96
+ id: f82635a8-0b9b-4173-854a-8e56eec4c54b
version: -1
name: Verify Commands End Time
description: |
@@ -1886,10 +1686,10 @@ tasks:
isautoswitchedtoquietmode: false
"320":
id: "320"
- taskid: f8de9921-208f-4d91-8f2a-0f9a3339ce6b
+ taskid: c6985310-cd79-43e3-8775-51d68a05a833
type: regular
task:
- id: f8de9921-208f-4d91-8f2a-0f9a3339ce6b
+ id: c6985310-cd79-43e3-8775-51d68a05a833
version: -1
name: Verify Context Error - Commands End Time
description: Prints an error entry with a given message
@@ -1920,10 +1720,10 @@ tasks:
isautoswitchedtoquietmode: false
"321":
id: "321"
- taskid: a3056615-b210-4f85-86d8-3118cea352bb
+ taskid: da1872c2-0a71-498c-863f-d6030a556eda
type: condition
task:
- id: a3056615-b210-4f85-86d8-3118cea352bb
+ id: da1872c2-0a71-498c-863f-d6030a556eda
version: -1
name: Verify Commands Status
description: |
@@ -1968,10 +1768,10 @@ tasks:
isautoswitchedtoquietmode: false
"322":
id: "322"
- taskid: 0b18aa13-7cbe-4f5a-8ff8-5a3554e217fe
+ taskid: 61edcc18-e45e-4100-8030-0cb7bcb4d826
type: regular
task:
- id: 0b18aa13-7cbe-4f5a-8ff8-5a3554e217fe
+ id: 61edcc18-e45e-4100-8030-0cb7bcb4d826
version: -1
name: Verify Context Error - Commands Status
description: Prints an error entry with a given message
@@ -2002,10 +1802,10 @@ tasks:
isautoswitchedtoquietmode: false
"323":
id: "323"
- taskid: 3e99f9a2-760c-4137-852b-88231b283e51
+ taskid: 127ad472-1b96-46ac-8bfc-e1578c561143
type: condition
task:
- id: 3e99f9a2-760c-4137-852b-88231b283e51
+ id: 127ad472-1b96-46ac-8bfc-e1578c561143
version: -1
name: Verify Commands Error
description: |
@@ -2046,10 +1846,10 @@ tasks:
isautoswitchedtoquietmode: false
"324":
id: "324"
- taskid: 66511bcd-953f-4b1e-8c49-ff4841e4e934
+ taskid: 82bcd6ed-55ee-46a7-8a5e-565dfe8b8b48
type: regular
task:
- id: 66511bcd-953f-4b1e-8c49-ff4841e4e934
+ id: 82bcd6ed-55ee-46a7-8a5e-565dfe8b8b48
version: -1
name: Verify Context Error - Commands Error
description: Prints an error entry with a given message
@@ -2080,10 +1880,10 @@ tasks:
isautoswitchedtoquietmode: false
"325":
id: "325"
- taskid: cf9702f7-3162-4a5e-849d-fdb8d78bd5eb
+ taskid: 58a57397-e8c5-4687-859e-ecb26e6435ca
type: condition
task:
- id: cf9702f7-3162-4a5e-849d-fdb8d78bd5eb
+ id: 58a57397-e8c5-4687-859e-ecb26e6435ca
version: -1
name: Verify Command Type
description: |
@@ -2125,10 +1925,10 @@ tasks:
isautoswitchedtoquietmode: false
"326":
id: "326"
- taskid: ec91765d-4c27-49b5-8e71-5d0b933bc44f
+ taskid: d1643373-7191-4a30-8da8-22f719e5a155
type: regular
task:
- id: ec91765d-4c27-49b5-8e71-5d0b933bc44f
+ id: d1643373-7191-4a30-8da8-22f719e5a155
version: -1
name: Verify Context Error - Command Type
description: Prints an error entry with a given message
@@ -2159,10 +1959,10 @@ tasks:
isautoswitchedtoquietmode: false
"327":
id: "327"
- taskid: 6a7ab130-d7c5-43ae-8680-b804b1e74da1
+ taskid: 10ca92e4-aac7-4a8f-8aaf-f6ec9033e929
type: condition
task:
- id: 6a7ab130-d7c5-43ae-8680-b804b1e74da1
+ id: 10ca92e4-aac7-4a8f-8aaf-f6ec9033e929
version: -1
name: Verify Commands Params Key
description: |
@@ -2207,10 +2007,10 @@ tasks:
isautoswitchedtoquietmode: false
"328":
id: "328"
- taskid: 92f48efc-7a3a-4cb1-8f17-868aa2f320cb
+ taskid: f6114a43-02d6-4049-8d17-468b43a26506
type: regular
task:
- id: 92f48efc-7a3a-4cb1-8f17-868aa2f320cb
+ id: f6114a43-02d6-4049-8d17-468b43a26506
version: -1
name: Verify Context Error - Commands Params Key
description: Prints an error entry with a given message
@@ -2241,10 +2041,10 @@ tasks:
isautoswitchedtoquietmode: false
"329":
id: "329"
- taskid: ec918ad1-1a84-42cb-824d-4f8fd352c087
+ taskid: 60fd2b9e-b1d7-4d0a-83a2-4c2b8dd490db
type: condition
task:
- id: ec918ad1-1a84-42cb-824d-4f8fd352c087
+ id: 60fd2b9e-b1d7-4d0a-83a2-4c2b8dd490db
version: -1
name: Verify Commands Params Value
description: |
@@ -2261,18 +2061,13 @@ tasks:
conditions:
- label: ' Verified'
condition:
- - - operator: isEqualString
+ - - operator: isNotEmpty
left:
value:
complex:
root: MicrosoftATP.LiveResponseAction.commands.command.params
accessor: value
iscontext: true
- right:
- value:
- complex:
- root: zipped_list
- iscontext: true
ignorecase: true
continueonerrortype: ""
view: |-
@@ -2291,10 +2086,10 @@ tasks:
isautoswitchedtoquietmode: false
"330":
id: "330"
- taskid: 014b118f-1689-46f2-8c4a-5a4cd2e4ae1c
+ taskid: e1c95fc8-693a-4b7c-886b-bc32070fab74
type: regular
task:
- id: 014b118f-1689-46f2-8c4a-5a4cd2e4ae1c
+ id: e1c95fc8-693a-4b7c-886b-bc32070fab74
version: -1
name: Verify Context Error - Commands Params Value
description: Prints an error entry with a given message
@@ -2323,18 +2118,238 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
+ "332":
+ id: "332"
+ taskid: 302392ce-d584-4167-8f92-e5fe1b7dad2b
+ type: regular
+ task:
+ id: 302392ce-d584-4167-8f92-e5fe1b7dad2b
+ version: -1
+ name: Get Error Entries
+ description: Collect entries matching to the conditions in the war room
+ scriptName: GetEntries
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "333"
+ scriptarguments:
+ categories:
+ simple: playbookErrors
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -140,
+ "y": -1725
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "333":
+ id: "333"
+ taskid: 5c2f901b-fa01-429b-8246-5d0807db7dbb
+ type: regular
+ task:
+ id: 5c2f901b-fa01-429b-8246-5d0807db7dbb
+ version: -1
+ name: Get Errors Content
+ description: Get the error(s) associated with a given entry/entries. Use ${lastCompletedTaskEntries} to check the previous task entries. The automation will return an array of the error contents from those entries.
+ scriptName: GetErrorsFromEntry
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "334"
+ scriptarguments:
+ entry_id:
+ complex:
+ root: Entry
+ accessor: ID
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -140,
+ "y": -1565
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "334":
+ id: "334"
+ taskid: 6b7fdaae-9e67-4ae1-8af5-529ca260f069
+ type: condition
+ task:
+ id: 6b7fdaae-9e67-4ae1-8af5-529ca260f069
+ version: -1
+ name: Has 404 API Error?
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#default#':
+ - "84"
+ "yes":
+ - "335"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: containsString
+ left:
+ value:
+ complex:
+ root: ErrorEntries
+ iscontext: true
+ right:
+ value:
+ simple: ActiveRequestAlreadyExists
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -140,
+ "y": -1400
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "335":
+ id: "335"
+ taskid: 9c91dc7f-19c6-4cd5-8923-91f11f231aee
+ type: regular
+ task:
+ id: 9c91dc7f-19c6-4cd5-8923-91f11f231aee
+ version: -1
+ name: Cancel Current Action
+ description: Cancels an action with an unfinished status.
+ script: '|||microsoft-atp-live-response-cancel-action'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "336"
+ scriptarguments:
+ comment:
+ simple: Retrieving file - XSOAR PB ${incident.id}
+ machine_action_id:
+ complex:
+ root: ErrorEntries
+ filters:
+ - - operator: containsString
+ left:
+ value:
+ simple: ErrorEntries
+ iscontext: true
+ right:
+ value:
+ simple: ActiveRequestAlreadyExists
+ ignorecase: true
+ transformers:
+ - operator: RegexGroups
+ args:
+ flags: {}
+ groups: {}
+ keys: {}
+ regex:
+ value:
+ simple: action\sid\:\s(.*)\"\,\s
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": -390,
+ "y": -1230
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "336":
+ id: "336"
+ taskid: b7bf5d90-8942-4f38-811a-2549f4045179
+ type: playbook
+ task:
+ id: b7bf5d90-8942-4f38-811a-2549f4045179
+ version: -1
+ name: MDE - Retrieve File
+ description: |-
+ This playbook is part of the ‘Malware Investigation And Response’ pack. For more information, refer to https://xsoar.pan.dev/docs/reference/packs/malware-investigation-and-response.
+ This playbook uses the Live Response feature to retrieve a file from an endpoint. The playbook supports a supplied machine id as an input. Otherwise, it will take the Device ID incident field.
+ The playbook supports only one element to be retrieved for each task (if needed more then one - use the playbook loop feature).
+ playbookName: MDE - Retrieve File
+ type: playbook
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "84"
+ scriptarguments:
+ MachineID:
+ complex:
+ root: incident
+ accessor: deviceid
+ paths:
+ simple: C:\\Users\\Administrator\\Desktop\\test.txt
+ separatecontext: false
+ continueonerrortype: ""
+ loop:
+ iscommand: false
+ exitCondition: ""
+ wait: 1
+ max: 100
+ view: |-
+ {
+ "position": {
+ "x": -390,
+ "y": -1070
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
view: |-
{
"linkLabelsPosition": {},
"paper": {
"dimensions": {
- "height": 1870,
+ "height": 2040,
"width": 9490,
"x": -4900,
- "y": -2175
+ "y": -2345
}
}
}
inputs: []
outputs: []
-fromversion: 6.5.0
+fromversion: 6.5.0
\ No newline at end of file
diff --git a/Packs/MicrosoftDefenderAdvancedThreatProtection/TestPlaybooks/playbook-Microsoft_ATP_indicators_SC_Test.yml b/Packs/MicrosoftDefenderAdvancedThreatProtection/TestPlaybooks/playbook-Microsoft_ATP_indicators_SC_Test.yml
index dd8cd8948721..89b4413c8d70 100644
--- a/Packs/MicrosoftDefenderAdvancedThreatProtection/TestPlaybooks/playbook-Microsoft_ATP_indicators_SC_Test.yml
+++ b/Packs/MicrosoftDefenderAdvancedThreatProtection/TestPlaybooks/playbook-Microsoft_ATP_indicators_SC_Test.yml
@@ -682,7 +682,7 @@ tasks:
- '2'
scriptarguments:
seconds:
- simple: '5'
+ simple: '7'
separatecontext: false
view: |-
{
diff --git a/Packs/MicrosoftDefenderAdvancedThreatProtection/pack_metadata.json b/Packs/MicrosoftDefenderAdvancedThreatProtection/pack_metadata.json
index 263adb854792..9b07e7222dfe 100644
--- a/Packs/MicrosoftDefenderAdvancedThreatProtection/pack_metadata.json
+++ b/Packs/MicrosoftDefenderAdvancedThreatProtection/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Defender for Endpoint",
"description": "Microsoft Defender for Endpoint (previously Microsoft Defender Advanced Threat Protection (ATP)) is a unified platform for preventative protection, post-breach detection, automated investigation, and response.",
"support": "xsoar",
- "currentVersion": "1.16.31",
+ "currentVersion": "1.16.34",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -65,5 +65,6 @@
"MalwareInvestigationAndResponse",
"CommonPlaybooks",
"CommonTypes"
- ]
+ ],
+ "defaultDataSource": "Microsoft 365 Defender Event Collector"
}
\ No newline at end of file
diff --git a/Packs/MicrosoftExchangeOnPremise/TestPlaybooks/playbook-EWS-Searchmailbox-Test.yml b/Packs/MicrosoftExchangeOnPremise/TestPlaybooks/playbook-EWS-Searchmailbox-Test.yml
index 210c4594bc09..983c84a27c74 100644
--- a/Packs/MicrosoftExchangeOnPremise/TestPlaybooks/playbook-EWS-Searchmailbox-Test.yml
+++ b/Packs/MicrosoftExchangeOnPremise/TestPlaybooks/playbook-EWS-Searchmailbox-Test.yml
@@ -1,23 +1,23 @@
id: EWS search-mailbox test
version: -1
name: EWS search-mailbox test
-deprecated: true
-description: Deprecated. Playbook is broken.
+description: Test search and move commands for ews and o365.
starttaskid: "0"
tasks:
"0":
id: "0"
- taskid: 2879eb8b-9405-463d-8667-9d57579eda52
+ taskid: f37f2377-0308-4b9c-81f0-0a1b2b471c4c
type: start
task:
- id: 2879eb8b-9405-463d-8667-9d57579eda52
+ id: f37f2377-0308-4b9c-81f0-0a1b2b471c4c
version: -1
name: ""
iscommand: false
brand: ""
+ description: ''
nexttasks:
'#none#':
- - "4"
+ - "35"
separatecontext: false
continueonerrortype: ""
view: |-
@@ -34,302 +34,523 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "1":
- id: "1"
- taskid: 0eacd260-0182-4c45-8f13-acdf8f793f34
+ "3":
+ id: "3"
+ taskid: e01275fd-036b-4e0f-8185-039d334bced3
+ type: title
+ task:
+ id: e01275fd-036b-4e0f-8185-039d334bced3
+ version: -1
+ name: Done
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 4160
+ }
+ }
+ note: false
+ timertriggers: []
+ continueonerrortype: ""
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "5":
+ id: "5"
+ taskid: 60cb5b86-06ed-4876-8dd9-8dec6a9bb6ea
type: regular
task:
- id: 0eacd260-0182-4c45-8f13-acdf8f793f34
+ id: 60cb5b86-06ed-4876-8dd9-8dec6a9bb6ea
version: -1
- name: Search Item In All Directories
- description: Searches for items in the specified mailbox. Specific permissions
- are needed for this operation to search in a target mailbox other than the
- default.
- script: '|||ews-search-mailbox'
+ name: Send Email (ews v2)
+ script: 'EWS v2|||send-mail'
type: regular
iscommand: true
- brand: ""
+ brand: EWS v2
+ description: Sends an email using EWS.
nexttasks:
'#none#':
- - "2"
+ - "6"
scriptarguments:
- folder-path: {}
- is-public: {}
- limit:
- simple: "1"
- query:
- simple: body:384868fafe3ef9190a8a5e59c5c195c
+ body:
+ simple: 384868fafe3ef9190a8a5e59c5c195c
+ from:
+ simple: demistoadmin@demisto.int
+ subject:
+ simple: EWS search-mailbox test
+ to:
+ simple: demistoadmin@demisto.int
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
"x": 50,
- "y": 370
+ "y": 2235
}
}
note: false
timertriggers: []
- "2":
- id: "2"
- taskid: 6d47eb4e-561c-458e-84a8-45515a17b9e8
- type: condition
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "6":
+ id: "6"
+ taskid: 978964c1-17d8-43a2-8ca5-c64ce6bbdfd7
+ type: regular
task:
- id: 6d47eb4e-561c-458e-84a8-45515a17b9e8
+ id: 978964c1-17d8-43a2-8ca5-c64ce6bbdfd7
version: -1
- name: Verify Context
- type: condition
+ name: Wait For Email
+ scriptName: Sleep
+ type: regular
iscommand: false
brand: ""
nexttasks:
- '#default#':
- - "14"
- "yes":
- - "3"
+ '#none#':
+ - "25"
+ scriptarguments:
+ seconds:
+ simple: "60"
separatecontext: false
- conditions:
- - label: "yes"
- condition:
- - - operator: isEqualString
- left:
- value:
- simple: EWS.Items.body
- iscontext: true
- right:
- value:
- simple: 384868fafe3ef9190a8a5e59c5c195c
- - - operator: isExists
- left:
- value:
- simple: EWS.Items.messageId
- iscontext: true
- - - operator: isExists
- left:
- value:
- simple: EWS.Items.author
- iscontext: true
- - - operator: isExists
- left:
- value:
- simple: EWS.Items.subject
- iscontext: true
+ continueonerrortype: ""
view: |-
{
"position": {
"x": 50,
- "y": 545
+ "y": 2410
}
}
note: false
timertriggers: []
- "3":
- id: "3"
- taskid: 35b0bf02-a656-4fb2-8762-b1d6d0fe9620
- type: title
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "8":
+ id: "8"
+ taskid: 322aa042-ae0e-4a44-8bd5-0a2443b4e27f
+ type: regular
task:
- id: 35b0bf02-a656-4fb2-8762-b1d6d0fe9620
+ id: 322aa042-ae0e-4a44-8bd5-0a2443b4e27f
version: -1
- name: Done
- type: title
+ name: Delete email from inbox
+ script: EWS v2|||ews-delete-items
+ type: regular
+ iscommand: true
+ brand: "EWS v2"
+ description: Delete items from mailbox.
+ nexttasks:
+ '#none#':
+ - "20"
+ scriptarguments:
+ delete-type:
+ simple: hard
+ item-ids:
+ simple: ${EWS.Items.itemId}
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 3460
+ }
+ }
+ note: false
+ timertriggers: []
+ continueonerrortype: ""
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "11":
+ id: "11"
+ taskid: fa38c987-bb3f-4c18-8e53-925deff10b1c
+ type: regular
+ task:
+ id: fa38c987-bb3f-4c18-8e53-925deff10b1c
+ version: -1
+ name: Search Item In Inbox/test
+ script: 'EWS v2|||ews-search-mailbox'
+ type: regular
+ iscommand: true
+ brand: "EWS v2"
+ description: Searches for items in the specified mailbox. Specific permissions are needed for this operation to search in a target mailbox other than the default.
+ nexttasks:
+ '#none#':
+ - "8"
+ scriptarguments:
+ limit:
+ simple: "1"
+ query:
+ simple: body:384868fafe3ef9190a8a5e59c5c195c
+ folder-path:
+ simple: Inbox/Test
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 3285
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "12":
+ id: "12"
+ taskid: 34afebc3-ef52-488d-8087-70e5b09ba012
+ type: regular
+ task:
+ id: 34afebc3-ef52-488d-8087-70e5b09ba012
+ version: -1
+ name: Wait
+ scriptName: Sleep
+ type: regular
iscommand: false
brand: ""
+ nexttasks:
+ '#none#':
+ - "19"
+ scriptarguments:
+ seconds:
+ simple: "90"
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
"x": 50,
- "y": 2645
+ "y": 2935
}
}
note: false
timertriggers: []
- "4":
- id: "4"
- taskid: aa408de4-70bf-4400-8f57-0661dc1a06dc
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "14":
+ id: "14"
+ taskid: ccd1f742-b3db-4a8f-8d7c-c5f752d98f6a
type: regular
task:
- id: aa408de4-70bf-4400-8f57-0661dc1a06dc
+ id: ccd1f742-b3db-4a8f-8d7c-c5f752d98f6a
version: -1
- name: Delete Context
+ name: DeleteContext
+ description: |-
+ Delete field from context.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
scriptName: DeleteContext
type: regular
iscommand: false
brand: ""
nexttasks:
'#none#':
- - "1"
+ - "23"
scriptarguments:
all:
simple: "yes"
- index: {}
- key: {}
- keysToKeep: {}
- subplaybook: {}
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
"x": 50,
- "y": 195
+ "y": 340
}
}
note: false
timertriggers: []
- "5":
- id: "5"
- taskid: 82e621c4-177d-444e-8d31-8f659ea8cdb5
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "17":
+ id: "17"
+ taskid: ebea068d-29f8-4775-89b4-33ae345f7b40
type: regular
task:
- id: 82e621c4-177d-444e-8d31-8f659ea8cdb5
+ id: ebea068d-29f8-4775-89b4-33ae345f7b40
version: -1
- name: Send Email
- script: 'EWSO365|||send-mail'
+ name: Search Item In "Sent Items" (o365)
+ description: Searches for items in the specified mailbox. Specific permissions are needed for this operation to search in a target mailbox other than the default.
+ script: EWSO365|||ews-search-mailbox
type: regular
iscommand: true
brand: EWSO365
nexttasks:
'#none#':
- - "6"
+ - "26"
scriptarguments:
- body:
- simple: 384868fafe3ef9190a8a5e59c5c195c
- from:
- simple: testbox@demistodev.onmicrosoft.com
- subject:
- simple: EWS search-mailbox test
- to:
- simple: demistoadmin@demisto.int
+ limit:
+ simple: "1"
+ query:
+ simple: body:384868fafe3ef9190a8a5e59c5c195c
+ folder-path:
+ simple: Sent items
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 162.5,
- "y": 895
+ "x": 50,
+ "y": 865
}
}
note: false
timertriggers: []
- "6":
- id: "6"
- taskid: 206a0560-eeb0-4e26-8142-a8dc13d0c9e8
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "18":
+ id: "18"
+ taskid: 641af685-0ccb-42de-81d8-e42f1a1aa93a
type: regular
task:
- id: 206a0560-eeb0-4e26-8142-a8dc13d0c9e8
+ id: 641af685-0ccb-42de-81d8-e42f1a1aa93a
version: -1
- name: Wait For Email
- scriptName: Sleep
+ name: Move (ews v2)
+ description: Move an item to different folder in the mailbox.
+ script: EWS v2|||ews-move-item
+ type: regular
+ iscommand: true
+ brand: EWS v2
+ nexttasks:
+ '#none#':
+ - "12"
+ scriptarguments:
+ item-id:
+ simple: ${EWS.Items.itemId}
+ target-folder-path:
+ simple: Inbox/Test
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 2760
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "19":
+ id: "19"
+ taskid: b2114585-1ef6-4e2f-81d5-a733ecc1f8bf
+ type: regular
+ task:
+ id: b2114585-1ef6-4e2f-81d5-a733ecc1f8bf
+ version: -1
+ name: Delete Context
+ description: |-
+ Delete field from context.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: DeleteContext
type: regular
iscommand: false
brand: ""
nexttasks:
'#none#':
- - "7"
+ - "11"
scriptarguments:
- seconds:
- simple: "150"
+ all:
+ simple: "yes"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 3110
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "20":
+ id: "20"
+ taskid: e8c1d094-07e6-4882-88e8-23454a6cb658
+ type: regular
+ task:
+ id: e8c1d094-07e6-4882-88e8-23454a6cb658
+ version: -1
+ name: Delete Context
+ description: |-
+ Delete field from context.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: DeleteContext
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "21"
+ scriptarguments:
+ all:
+ simple: "yes"
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 162.5,
- "y": 1070
+ "x": 50,
+ "y": 3635
}
}
note: false
timertriggers: []
- "7":
- id: "7"
- taskid: eea5b310-ad37-46aa-85f7-a467d33f7577
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "21":
+ id: "21"
+ taskid: 32630e20-8123-49a3-84ed-35c0ef3d5b1e
type: regular
task:
- id: eea5b310-ad37-46aa-85f7-a467d33f7577
+ id: 32630e20-8123-49a3-84ed-35c0ef3d5b1e
version: -1
- name: Search Item In Inbox (ews 0365)
- description: Searches for items in the specified mailbox. Specific permissions
- are needed for this operation to search in a target mailbox other than the
- default.
- script: EWSO365|||ews-search-mailbox
+ name: Search Item In sent-items
+ description: Searches for items in the specified mailbox. Specific permissions are needed for this operation to search in a target mailbox other than the default.
+ script: EWS v2|||ews-search-mailbox
type: regular
iscommand: true
- brand: EWSO365
+ brand: EWS v2
nexttasks:
'#none#':
- - "9"
+ - "22"
scriptarguments:
+ folder-path:
+ simple: Sent Items
limit:
simple: "1"
query:
simple: body:384868fafe3ef9190a8a5e59c5c195c
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 162.5,
- "y": 1245
+ "x": 50,
+ "y": 3810
}
}
note: false
timertriggers: []
- "8":
- id: "8"
- taskid: 9ec4bc36-4fe5-43ea-8fe1-a6ddd84ac9a0
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "22":
+ id: "22"
+ taskid: 097eb61d-086e-4cb2-8706-fb787c58360c
type: regular
task:
- id: 9ec4bc36-4fe5-43ea-8fe1-a6ddd84ac9a0
+ id: 097eb61d-086e-4cb2-8706-fb787c58360c
version: -1
- name: Verify Context
- script: VerifyContext
+ name: Delete email from sent items
+ description: Delete items from mailbox.
+ script: EWS v2|||ews-delete-items
type: regular
- iscommand: false
- brand: ""
+ iscommand: true
+ brand: EWS v2
nexttasks:
'#none#':
- "3"
scriptarguments:
- fields:
- simple: body,messageId,author,subject
- path:
- simple: EWS.Items
+ delete-type:
+ simple: hard
+ item-ids:
+ simple: ${EWS.Items.itemId}
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 162.5,
- "y": 2470
+ "x": 50,
+ "y": 3985
}
}
note: false
timertriggers: []
- "9":
- id: "9"
- taskid: fc5694c5-70de-47ec-85d4-507dc5d69c04
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "23":
+ id: "23"
+ taskid: 0bd79810-9b68-492c-8fc8-602d964e71c6
type: regular
task:
- id: fc5694c5-70de-47ec-85d4-507dc5d69c04
+ id: 0bd79810-9b68-492c-8fc8-602d964e71c6
version: -1
- name: Move (ews o365)
- description: Move an item to different folder in the mailbox.
- script: EWSO365|||ews-move-item
+ name: Send Email (o365)
+ description: Sends an email.
+ script: EWSO365|||send-mail
type: regular
iscommand: true
brand: EWSO365
nexttasks:
'#none#':
- - "10"
+ - "24"
scriptarguments:
- item-id:
- simple: ${EWS.Items.itemId}
- target-folder-path:
- simple: Inbox/TEST
+ body:
+ simple: 384868fafe3ef9190a8a5e59c5c195c
+ from:
+ simple: testbox@demistodev.onmicrosoft.com
+ subject:
+ simple: EWS search-mailbox test
+ to:
+ simple: testbox@demistodev.onmicrosoft.com
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 162.5,
- "y": 1420
+ "x": 50,
+ "y": 515
}
}
note: false
@@ -339,15 +560,15 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "10":
- id: "10"
- taskid: c73506d8-b8a8-464b-899a-15838e386d77
+ "24":
+ id: "24"
+ taskid: c9aae28e-51ab-4fd5-84f8-229b2dc173b2
type: regular
task:
- id: c73506d8-b8a8-464b-899a-15838e386d77
+ id: c9aae28e-51ab-4fd5-84f8-229b2dc173b2
version: -1
- name: DeleteContext
- scriptName: DeleteContext
+ name: Wait For Email
+ scriptName: Sleep
type: regular
iscommand: false
brand: ""
@@ -355,15 +576,15 @@ tasks:
'#none#':
- "17"
scriptarguments:
- all:
- simple: "yes"
+ seconds:
+ simple: "60"
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 162.5,
- "y": 1595
+ "x": 50,
+ "y": 690
}
}
note: false
@@ -373,33 +594,34 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "11":
- id: "11"
- taskid: 470969b9-86ce-43c0-8e5d-448b83ea9876
+ "25":
+ id: "25"
+ taskid: a58ab584-2b32-43cd-88fc-1d263b2dcd21
type: regular
task:
- id: 470969b9-86ce-43c0-8e5d-448b83ea9876
+ id: a58ab584-2b32-43cd-88fc-1d263b2dcd21
version: -1
- name: Search Item In All Directories
- script: '|||ews-search-mailbox'
+ name: Search Item In Inbox (ews v2)
+ description: Searches for items in the specified mailbox. Specific permissions are needed for this operation to search in a target mailbox other than the default.
+ script: EWS v2|||ews-search-mailbox
type: regular
iscommand: true
- brand: ""
+ brand: EWS v2
nexttasks:
'#none#':
- - "8"
+ - "18"
scriptarguments:
limit:
simple: "1"
query:
- simple: body:384868fafe3ef9190a8a5e59c5c195c3
+ simple: body:384868fafe3ef9190a8a5e59c5c195c
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 162.5,
- "y": 2295
+ "x": 50,
+ "y": 2585
}
}
note: false
@@ -409,31 +631,68 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "12":
- id: "12"
- taskid: ea2b979c-60d1-4545-8379-d08353f7acbe
+ "26":
+ id: "26"
+ taskid: 266a3431-ffac-4ce0-8a0f-90c02370f86b
type: regular
task:
- id: ea2b979c-60d1-4545-8379-d08353f7acbe
+ id: 266a3431-ffac-4ce0-8a0f-90c02370f86b
version: -1
- name: Wait
+ name: Move (o365)
+ description: Move an item to different folder in the mailbox.
+ script: EWSO365|||ews-move-item
+ type: regular
+ iscommand: true
+ brand: EWSO365
+ nexttasks:
+ '#none#':
+ - "27"
+ scriptarguments:
+ item-id:
+ simple: ${EWS.Items.itemId}
+ target-folder-path:
+ simple: for_test_playbook-do not delete
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1040
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "27":
+ id: "27"
+ taskid: 40d5f1ef-6530-458f-8d28-4815f82c481f
+ type: regular
+ task:
+ id: 40d5f1ef-6530-458f-8d28-4815f82c481f
+ version: -1
+ name: Wait 90 seconds
scriptName: Sleep
type: regular
iscommand: false
brand: ""
nexttasks:
'#none#':
- - "11"
+ - "28"
scriptarguments:
seconds:
- simple: "60"
+ simple: "90"
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 162.5,
- "y": 2120
+ "x": 50,
+ "y": 1215
}
}
note: false
@@ -443,14 +702,14 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "14":
- id: "14"
- taskid: 442cf066-c615-4185-80c2-7a57d6a9a92a
+ "28":
+ id: "28"
+ taskid: 48e40e80-13b9-43d2-84c6-c49252673b8e
type: regular
task:
- id: 442cf066-c615-4185-80c2-7a57d6a9a92a
+ id: 48e40e80-13b9-43d2-84c6-c49252673b8e
version: -1
- name: DeleteContext
+ name: Delete Context
description: |-
Delete field from context.
@@ -463,7 +722,7 @@ tasks:
brand: ""
nexttasks:
'#none#':
- - "5"
+ - "34"
scriptarguments:
all:
simple: "yes"
@@ -472,8 +731,8 @@ tasks:
view: |-
{
"position": {
- "x": 162.5,
- "y": 720
+ "x": 50,
+ "y": 1390
}
}
note: false
@@ -483,36 +742,73 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "17":
- id: "17"
- taskid: a6b685f9-671b-4db0-826c-2f4deb148ba8
+ "30":
+ id: "30"
+ taskid: 22a118a2-16b3-4d43-8143-7258568ab0af
type: regular
task:
- id: a6b685f9-671b-4db0-826c-2f4deb148ba8
+ id: 22a118a2-16b3-4d43-8143-7258568ab0af
version: -1
- name: Search Item In Mailbox (ews v2)
- description: Searches for items in the specified mailbox. Specific permissions
- are needed for this operation to search in a target mailbox other than the
- default.
- script: EWS v2|||ews-search-mailbox
+ name: Delete email (o365)
+ description: Delete items from mailbox.
+ script: EWSO365|||ews-delete-items
type: regular
iscommand: true
- brand: EWS v2
+ brand: EWSO365
nexttasks:
'#none#':
- - "18"
+ - "36"
+ scriptarguments:
+ delete-type:
+ simple: hard
+ item-ids:
+ simple: ${EWS.Items.itemId}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1740
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "34":
+ id: "34"
+ taskid: c64fbfeb-5174-4e7c-8036-e26fc6a5dc69
+ type: regular
+ task:
+ id: c64fbfeb-5174-4e7c-8036-e26fc6a5dc69
+ version: -1
+ name: Search Item In for_test_playbook-do not delete
+ description: Searches for items in the specified mailbox. Specific permissions are needed for this operation to search in a target mailbox other than the default.
+ script: EWSO365|||ews-search-mailbox
+ type: regular
+ iscommand: true
+ brand: EWSO365
+ nexttasks:
+ '#none#':
+ - "30"
scriptarguments:
+ folder-path:
+ simple: for_test_playbook-do not delete
limit:
simple: "1"
query:
- simple: body:384868fafe3ef9190a8a5e59c5c195c3
+ simple: body:384868fafe3ef9190a8a5e59c5c195c
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 162.5,
- "y": 1770
+ "x": 50,
+ "y": 1565
}
}
note: false
@@ -522,34 +818,99 @@ tasks:
quietmode: 0
isoversize: false
isautoswitchedtoquietmode: false
- "18":
- id: "18"
- taskid: a14ac252-82c9-4667-88f7-32fa23c91a5b
+ "35":
+ id: "35"
+ taskid: 7aeb2a57-363d-4919-83e9-bda34b9ef7c0
+ type: title
+ task:
+ id: 7aeb2a57-363d-4919-83e9-bda34b9ef7c0
+ version: -1
+ name: o365
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "14"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 195
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "36":
+ id: "36"
+ taskid: a028e8b6-0ad3-4efc-8f47-0181a1bb4c1a
+ type: title
+ task:
+ id: a028e8b6-0ad3-4efc-8f47-0181a1bb4c1a
+ version: -1
+ name: ews v2
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ nexttasks:
+ '#none#':
+ - "37"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1915
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "37":
+ id: "37"
+ taskid: 1f708aa7-9eea-4dd0-8bfe-fce34190989e
type: regular
task:
- id: a14ac252-82c9-4667-88f7-32fa23c91a5b
+ id: 1f708aa7-9eea-4dd0-8bfe-fce34190989e
version: -1
- name: Move (ews v2)
- description: Move an item to different folder in the mailbox.
- script: EWS v2|||ews-move-item
+ name: Delete context
+ description: |-
+ Delete field from context.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: DeleteContext
type: regular
- iscommand: true
- brand: EWS v2
+ iscommand: false
+ brand: ""
nexttasks:
'#none#':
- - "12"
+ - "5"
scriptarguments:
- item-id:
- simple: ${EWS.Items.itemId}
- target-folder-path:
- simple: Inbox/Test
+ all:
+ simple: "yes"
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 162.5,
- "y": 1945
+ "x": 50,
+ "y": 2060
}
}
note: false
@@ -561,13 +922,11 @@ tasks:
isautoswitchedtoquietmode: false
view: |-
{
- "linkLabelsPosition": {
- "2_3_yes": 0.25
- },
+ "linkLabelsPosition": {},
"paper": {
"dimensions": {
- "height": 2660,
- "width": 492.5,
+ "height": 4175,
+ "width": 380,
"x": 50,
"y": 50
}
diff --git a/Packs/MicrosoftExchangeOnPremise/TestPlaybooks/playbook-EWSv2_empty_attachment_test.yml b/Packs/MicrosoftExchangeOnPremise/TestPlaybooks/playbook-EWSv2_empty_attachment_test.yml
index 74309bb9df5a..18f183fae4e1 100644
--- a/Packs/MicrosoftExchangeOnPremise/TestPlaybooks/playbook-EWSv2_empty_attachment_test.yml
+++ b/Packs/MicrosoftExchangeOnPremise/TestPlaybooks/playbook-EWSv2_empty_attachment_test.yml
@@ -2,81 +2,374 @@ id: EWSv2_empty_attachment_test
version: -1
name: EWSv2_empty_attachment_test
starttaskid: "0"
-description: ""
tasks:
"0":
id: "0"
- taskid: 4398b442-0e88-4a0b-8f69-5d6704628f22
+ taskid: ada0286e-3be5-4c82-8264-9a4c7b232955
type: start
task:
- description: ""
- id: 4398b442-0e88-4a0b-8f69-5d6704628f22
+ id: ada0286e-3be5-4c82-8264-9a4c7b232955
version: -1
name: ""
iscommand: false
brand: ""
+ description: ''
nexttasks:
'#none#':
- - "1"
+ - "6"
separatecontext: false
view: |-
{
"position": {
- "x": 450,
+ "x": 50,
"y": 50
}
}
note: false
- evidencedata:
- description:
- occurred:
- tags:
- customfields: {}
+ continueonerrortype: ""
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
"1":
id: "1"
- taskid: 9ee95d9f-d075-44e9-80c6-5796b2b5e0d6
+ taskid: 9d2e0189-85f6-41e5-812c-d14b7c92f6aa
type: regular
task:
- description: ""
- id: 9ee95d9f-d075-44e9-80c6-5796b2b5e0d6
+ description: "Retrieves the actual attachments from an item (email message). To get all attachments for a message, only specify the item-id argument."
+ id: 9d2e0189-85f6-41e5-812c-d14b7c92f6aa
version: -1
name: Verify getting empty attachment does not return error
- script: EWS v2|||ews-get-attachment
+ script: '|||ews-get-attachment'
type: regular
iscommand: true
- brand: EWS v2
+ brand: ""
scriptarguments:
- attachment-ids: {}
item-id:
- simple: AAMkADQ1OWE0NzI5LTUxZTktNDE2Zi1hYTdkLWRiMDFmZDgxNDY1MABGAAAAAAAEzx7ZB+bhTYeJK95WqEa8BwBL/fkrvLLNQrnWSaIq1hMZAABe4mnAAABL/fkrvLLNQrnWSaIq1hMZAABe4vgwAAA=
- target-mailbox: {}
+ simple: ${EWS.Items.itemId}
+ separatecontext: false
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1245
+ }
+ }
+ note: false
+ nexttasks:
+ '#none#':
+ - "10"
+ continueonerrortype: ""
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "3":
+ id: "3"
+ taskid: cf7da2d8-3fc8-4e7d-83ff-3b1ddecabdaf
+ type: condition
+ task:
+ id: cf7da2d8-3fc8-4e7d-83ff-3b1ddecabdaf
+ version: -1
+ name: |
+ check file is created
+ type: condition
+ iscommand: false
+ brand: ""
+ nexttasks:
+ "yes":
+ - "13"
+ separatecontext: false
+ conditions:
+ - label: "yes"
+ condition:
+ - - operator: isNotEmpty
+ left:
+ value:
+ simple: File.EntryID
+ iscontext: true
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 370
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "4":
+ id: "4"
+ taskid: b4ca1ef8-0e34-467f-8b7e-164316b28f71
+ type: regular
+ task:
+ id: b4ca1ef8-0e34-467f-8b7e-164316b28f71
+ version: -1
+ name: send-mail
+ description: Sends an email using EWS.
+ script: EWS v2|||send-mail
+ type: regular
+ iscommand: true
+ brand: EWS v2
+ nexttasks:
+ '#none#':
+ - "11"
+ scriptarguments:
+ attachIDs:
+ simple: ${File.EntryID}
+ body:
+ simple: EWSv2_empty_attachment_test, triggered in ${incident.incidentlink}
+ subject:
+ simple: EWSv2_empty_attachment_test_${ServerURL.Host}_${incident.created}
+ to:
+ simple: ${inputs.target_mail}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 720
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "6":
+ id: "6"
+ taskid: 61257cb8-1267-4104-873c-2e664457499c
+ type: regular
+ task:
+ id: 61257cb8-1267-4104-873c-2e664457499c
+ version: -1
+ name: create empty file
+ description: |
+ Creates a file (using the given data input or entry ID) and uploads it to the current investigation War Room.
+ scriptName: FileCreateAndUploadV2
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "3"
+ scriptarguments:
+ data:
+ simple: '""'
+ filename:
+ simple: emptyfile
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 195
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "9":
+ id: "9"
+ taskid: 5b13b9b3-1cd9-4116-81e2-b866105ed9fe
+ type: title
+ task:
+ id: 5b13b9b3-1cd9-4116-81e2-b866105ed9fe
+ version: -1
+ name: End
+ type: title
+ iscommand: false
+ brand: ""
+ description: ''
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1595
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "10":
+ id: "10"
+ taskid: 13c0cba9-3d1f-4540-88d5-fa1b8485cfcc
+ type: regular
+ task:
+ id: 13c0cba9-3d1f-4540-88d5-fa1b8485cfcc
+ version: -1
+ name: Delete the email we sent
+ description: Delete items from mailbox.
+ script: '|||ews-delete-items'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "9"
+ scriptarguments:
+ item-ids:
+ simple: ${EWS.Items.itemId}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1420
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "11":
+ id: "11"
+ taskid: 5f312abf-44f4-45b8-849c-7424f446202c
+ type: regular
+ task:
+ id: 5f312abf-44f4-45b8-849c-7424f446202c
+ version: -1
+ name: Sleep 120 seconds
+ description: Sleep for X seconds.
+ scriptName: Sleep
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "12"
+ scriptarguments:
+ seconds:
+ simple: "120"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 895
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "12":
+ id: "12"
+ taskid: 9f31b387-bcdb-4264-8f93-e7620cd894dd
+ type: regular
+ task:
+ id: 9f31b387-bcdb-4264-8f93-e7620cd894dd
+ version: -1
+ name: Search The Email
+ description: Searches for items in the specified mailbox. Specific permissions are needed for this operation to search in a target mailbox other than the default.
+ script: EWS v2|||ews-search-mailbox
+ type: regular
+ iscommand: true
+ brand: EWS v2
+ nexttasks:
+ '#none#':
+ - "1"
+ scriptarguments:
+ query:
+ simple: EWSv2_empty_attachment_test_${ServerURL.Host}_${incident.created}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 50,
+ "y": 1070
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "13":
+ id: "13"
+ taskid: 7fd3e961-5568-49a8-8a98-efd105fd52ec
+ type: regular
+ task:
+ id: 7fd3e961-5568-49a8-8a98-efd105fd52ec
+ version: -1
+ name: GetServerURL
+ description: Get the Server URL.
+ scriptName: GetServerURL
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "4"
separatecontext: false
+ continueonerrortype: ""
view: |-
{
"position": {
- "x": 450,
- "y": 222
+ "x": 50,
+ "y": 545
}
}
note: false
- evidencedata:
- description:
- occurred:
- tags:
- customfields: {}
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
view: |-
{
"linkLabelsPosition": {},
"paper": {
"dimensions": {
- "height": 267,
+ "height": 1610,
"width": 380,
- "x": 450,
+ "x": 50,
"y": 50
}
}
}
-inputs: []
+inputs:
+- key: target_mail
+ value:
+ simple: demistoadmin@demisto.int
+ required: false
+ description: ""
+ playbookInputQuery:
outputs: []
fromversion: 5.0.0
-
+description: ''
diff --git a/Packs/MicrosoftExchangeOnPremise/TestPlaybooks/playbook-Get_Original_Email_-_EWS_v2_-_test.yml b/Packs/MicrosoftExchangeOnPremise/TestPlaybooks/playbook-Get_Original_Email_-_EWS_v2_-_test.yml
index 3299ad466b60..f1746e279596 100644
--- a/Packs/MicrosoftExchangeOnPremise/TestPlaybooks/playbook-Get_Original_Email_-_EWS_v2_-_test.yml
+++ b/Packs/MicrosoftExchangeOnPremise/TestPlaybooks/playbook-Get_Original_Email_-_EWS_v2_-_test.yml
@@ -6,10 +6,10 @@ starttaskid: "0"
tasks:
"0":
id: "0"
- taskid: 03873e01-5b6e-43e1-82ac-28d95a1288b0
+ taskid: 282d756e-df64-4190-817b-dd088b693ab5
type: start
task:
- id: 03873e01-5b6e-43e1-82ac-28d95a1288b0
+ id: 282d756e-df64-4190-817b-dd088b693ab5
version: -1
name: ""
iscommand: false
@@ -17,13 +17,13 @@ tasks:
description: ''
nexttasks:
'#none#':
- - "5"
+ - "6"
separatecontext: false
continueonerrortype: ""
view: |-
{
"position": {
- "x": 450,
+ "x": 265,
"y": 50
}
}
@@ -36,10 +36,10 @@ tasks:
isautoswitchedtoquietmode: false
"2":
id: "2"
- taskid: 1548afd0-21ca-4c8e-85d1-f2efa6e41c3a
+ taskid: cdcb88f6-daf2-443d-87ae-2e5f0c8e4b17
type: condition
task:
- id: 1548afd0-21ca-4c8e-85d1-f2efa6e41c3a
+ id: cdcb88f6-daf2-443d-87ae-2e5f0c8e4b17
version: -1
name: Check output
description: Check the playbook outputs.
@@ -50,7 +50,7 @@ tasks:
'#default#':
- "4"
"yes":
- - "3"
+ - "11"
separatecontext: false
conditions:
- label: "yes"
@@ -66,8 +66,8 @@ tasks:
view: |-
{
"position": {
- "x": 450,
- "y": 400
+ "x": 265,
+ "y": 1245
}
}
note: false
@@ -79,10 +79,10 @@ tasks:
isautoswitchedtoquietmode: false
"3":
id: "3"
- taskid: 8b153235-d4eb-4f8c-898d-8cf985fe3dca
+ taskid: 0a855082-c7ca-4c29-8e2e-000da0170e76
type: regular
task:
- id: 8b153235-d4eb-4f8c-898d-8cf985fe3dca
+ id: 0a855082-c7ca-4c29-8e2e-000da0170e76
version: -1
name: Print success
description: Prints text to war room (Markdown supported)
@@ -98,8 +98,8 @@ tasks:
view: |-
{
"position": {
- "x": 680,
- "y": 570
+ "x": 480,
+ "y": 2120
}
}
note: false
@@ -111,10 +111,10 @@ tasks:
isautoswitchedtoquietmode: false
"4":
id: "4"
- taskid: 2634bc10-1180-4987-872e-80b1506eae70
+ taskid: 84b860cb-30ca-454a-871e-6d099d8ada7e
type: regular
task:
- id: 2634bc10-1180-4987-872e-80b1506eae70
+ id: 84b860cb-30ca-454a-871e-6d099d8ada7e
version: -1
name: Print Error
description: Prints an error entry with a given message
@@ -130,8 +130,8 @@ tasks:
view: |-
{
"position": {
- "x": 220,
- "y": 570
+ "x": 50,
+ "y": 1420
}
}
note: false
@@ -143,16 +143,16 @@ tasks:
isautoswitchedtoquietmode: false
"5":
id: "5"
- taskid: c82679a1-c7e3-4716-8444-e12f787602d6
+ taskid: e75db3de-209e-4fc4-8d31-8063f8374341
type: playbook
task:
- id: c82679a1-c7e3-4716-8444-e12f787602d6
+ id: e75db3de-209e-4fc4-8d31-8063f8374341
version: -1
name: Get Original Email - EWS v2
description: |-
- This v2 playbook retrieves the original email in the thread (as eml file) by using the EWS v2 integration.
- The main difference between this playbook and its previous version is that this playbook will retrieve the email as eml and not as an Email object. This version also reduces the amount of tasks needed to perform the fetch action.
- You must have the necessary permissions in the EWS integration to execute global search: eDiscovery
+ This v2 playbook retrieves the original email in a thread as an EML file (and not an email object as in the previous version) by using the EWS v2 or EWSO365 integration.
+ It also reduces the number of tasks to perform the fetch action.
+ Note: You must have the necessary eDiscovery permissions in the EWS integration to execute a global search.
playbookName: Get Original Email - EWS v2
type: playbook
iscommand: false
@@ -162,7 +162,7 @@ tasks:
- "2"
scriptarguments:
MessageID:
- simple: ${inputs.MessageID}
+ simple: ${EWS.Items.messageId}
TargetMailbox:
simple: ${inputs.UserID}
separatecontext: true
@@ -175,8 +175,356 @@ tasks:
view: |-
{
"position": {
- "x": 450,
- "y": 220
+ "x": 265,
+ "y": 1070
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "6":
+ id: "6"
+ taskid: e673d0cf-94f6-466a-88bc-45e877deac69
+ type: regular
+ task:
+ id: e673d0cf-94f6-466a-88bc-45e877deac69
+ version: -1
+ name: Upload file
+ description: |
+ Creates a file (using the given data input or entry ID) and uploads it to the current investigation War Room.
+ scriptName: FileCreateAndUploadV2
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "7"
+ scriptarguments:
+ data:
+ simple: hi i am a file
+ filename:
+ simple: for Get Original Email - EWS v2 - test TPB
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 265,
+ "y": 195
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "7":
+ id: "7"
+ taskid: fcb59dff-f1d8-4872-8814-5132f2761d70
+ type: regular
+ task:
+ id: fcb59dff-f1d8-4872-8814-5132f2761d70
+ version: -1
+ name: Send mail with file
+ description: Sends an email using Microsoft Graph.
+ script: '|||send-mail'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "8"
+ scriptarguments:
+ attachIDs:
+ simple: ${File.EntryID}
+ body:
+ simple: for_Get_Original_Email-EWS_v2-test_TPB
+ from:
+ simple: ${inputs.UserID}
+ subject:
+ simple: for Get Original Email - EWS v2 - test TPB
+ to:
+ simple: ${inputs.UserID}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 265,
+ "y": 370
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "8":
+ id: "8"
+ taskid: 45a1c01e-06c1-4dbc-8b5b-eb300db97ba0
+ type: regular
+ task:
+ id: 45a1c01e-06c1-4dbc-8b5b-eb300db97ba0
+ version: -1
+ name: Delete Context
+ description: |-
+ Delete field from context.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: DeleteContext
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "10"
+ scriptarguments:
+ all:
+ simple: "yes"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 265,
+ "y": 545
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "9":
+ id: "9"
+ taskid: aa9d38b9-2230-4a8d-828b-d8fb3c6592ee
+ type: regular
+ task:
+ id: aa9d38b9-2230-4a8d-828b-d8fb3c6592ee
+ version: -1
+ name: Search in mailbox
+ description: Searches for items in the specified mailbox. Specific permissions are needed for this operation to search in a target mailbox other than the default.
+ script: '|||ews-search-mailbox'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "5"
+ scriptarguments:
+ folder-path:
+ simple: Inbox
+ limit:
+ simple: "1"
+ query:
+ simple: body:for_Get_Original_Email-EWS_v2-test_TPB
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 265,
+ "y": 895
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "10":
+ id: "10"
+ taskid: c5b11532-9afc-47d5-858b-ccce7fe40feb
+ type: regular
+ task:
+ id: c5b11532-9afc-47d5-858b-ccce7fe40feb
+ version: -1
+ name: Sleep for 60 seconds
+ description: Sleep for X seconds.
+ scriptName: Sleep
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "9"
+ scriptarguments:
+ seconds:
+ simple: "90"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 265,
+ "y": 720
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "11":
+ id: "11"
+ taskid: 99ec473d-abee-4130-8aa1-ef34e0e91a33
+ type: regular
+ task:
+ id: 99ec473d-abee-4130-8aa1-ef34e0e91a33
+ version: -1
+ name: Delete mail
+ description: Delete items from mailbox.
+ script: '|||ews-delete-items'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "12"
+ scriptarguments:
+ delete-type:
+ simple: hard
+ item-ids:
+ simple: ${EWS.Items.itemId}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 480,
+ "y": 1420
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "12":
+ id: "12"
+ taskid: e4b5c0be-0320-4b9b-8c5b-db46e6bc51ec
+ type: regular
+ task:
+ id: e4b5c0be-0320-4b9b-8c5b-db46e6bc51ec
+ version: -1
+ name: Delete context
+ description: |-
+ Delete field from context.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: DeleteContext
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "13"
+ scriptarguments:
+ all:
+ simple: "yes"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 480,
+ "y": 1595
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "13":
+ id: "13"
+ taskid: 9e6745bd-b487-4692-8f66-413456e521b7
+ type: regular
+ task:
+ id: 9e6745bd-b487-4692-8f66-413456e521b7
+ version: -1
+ name: Search in Sent items
+ description: Searches for items in the specified mailbox. Specific permissions are needed for this operation to search in a target mailbox other than the default.
+ script: '|||ews-search-mailbox'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "14"
+ scriptarguments:
+ folder-path:
+ simple: Sent Items
+ limit:
+ simple: "1"
+ query:
+ simple: body:for_Get_Original_Email-EWS_v2-test_TPB
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 480,
+ "y": 1770
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "14":
+ id: "14"
+ taskid: 59f33361-2da5-4dfe-8c25-2ddf12808e79
+ type: regular
+ task:
+ id: 59f33361-2da5-4dfe-8c25-2ddf12808e79
+ version: -1
+ name: Delete mail from sent items
+ description: Delete items from mailbox.
+ script: '|||ews-delete-items'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "3"
+ scriptarguments:
+ delete-type:
+ simple: hard
+ item-ids:
+ simple: ${EWS.Items.itemId}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 480,
+ "y": 1945
}
}
note: false
@@ -191,9 +539,9 @@ view: |-
"linkLabelsPosition": {},
"paper": {
"dimensions": {
- "height": 615,
- "width": 840,
- "x": 220,
+ "height": 2165,
+ "width": 810,
+ "x": 50,
"y": 50
}
}
diff --git a/Packs/MicrosoftExchangeOnPremise/TestPlaybooks/playbook-pyEWS_Test.yml b/Packs/MicrosoftExchangeOnPremise/TestPlaybooks/playbook-pyEWS_Test.yml
index 71afcaee6b8a..f2a45a730292 100644
--- a/Packs/MicrosoftExchangeOnPremise/TestPlaybooks/playbook-pyEWS_Test.yml
+++ b/Packs/MicrosoftExchangeOnPremise/TestPlaybooks/playbook-pyEWS_Test.yml
@@ -7,10 +7,10 @@ starttaskid: "0"
tasks:
"0":
id: "0"
- taskid: 7ce53ad6-ea9f-4586-8279-983848313e89
+ taskid: fc0772ab-dc81-48be-8447-9a738e8547c3
type: start
task:
- id: 7ce53ad6-ea9f-4586-8279-983848313e89
+ id: fc0772ab-dc81-48be-8447-9a738e8547c3
version: -1
name: ""
iscommand: false
@@ -23,7 +23,7 @@ tasks:
view: |-
{
"position": {
- "x": 2200,
+ "x": 1125,
"y": 50
}
}
@@ -32,12 +32,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"1":
id: "1"
- taskid: ddb9d210-0a94-4c22-8956-846e9dc599fb
+ taskid: 260f1502-823f-4a7b-831b-591e1f9c36dc
type: regular
task:
- id: ddb9d210-0a94-4c22-8956-846e9dc599fb
+ id: 260f1502-823f-4a7b-831b-591e1f9c36dc
version: -1
name: Delete Context
scriptName: DeleteContext
@@ -50,11 +53,8 @@ tasks:
- "6"
- "13"
- "15"
- - "17"
- "21"
- "23"
- - "8"
- - "10"
scriptarguments:
all:
simple: "yes"
@@ -62,7 +62,7 @@ tasks:
view: |-
{
"position": {
- "x": 2200,
+ "x": 1125,
"y": 195
}
}
@@ -71,12 +71,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"2":
id: "2"
- taskid: 523a03ea-ae00-47e1-8b0b-1705218ea9aa
+ taskid: 4f7f7453-5365-4b3d-880c-4c414fc9f104
type: regular
task:
- id: 523a03ea-ae00-47e1-8b0b-1705218ea9aa
+ id: 4f7f7453-5365-4b3d-880c-4c414fc9f104
version: -1
name: Get searchable mailboxes
script: '|||ews-get-searchable-mailboxes'
@@ -90,8 +93,8 @@ tasks:
view: |-
{
"position": {
- "x": 480,
- "y": 545
+ "x": 50,
+ "y": 3695
}
}
note: false
@@ -99,12 +102,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"3":
id: "3"
- taskid: 12d4afe7-7e9a-4b0c-844b-a99df4e3d6ee
+ taskid: 4a5f5ea6-d9da-406f-8566-2195ea742f27
type: condition
task:
- id: 12d4afe7-7e9a-4b0c-844b-a99df4e3d6ee
+ id: 4a5f5ea6-d9da-406f-8566-2195ea742f27
version: -1
name: Verify Mailboxes context key
type: condition
@@ -130,8 +136,8 @@ tasks:
view: |-
{
"position": {
- "x": 480,
- "y": 720
+ "x": 50,
+ "y": 3870
}
}
note: false
@@ -139,12 +145,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"6":
id: "6"
- taskid: c7f3fb44-ed0a-459c-8252-34305b03d352
+ taskid: 758d522f-fb9d-4ea1-8dbb-86280bbd04d8
type: regular
task:
- id: c7f3fb44-ed0a-459c-8252-34305b03d352
+ id: 758d522f-fb9d-4ea1-8dbb-86280bbd04d8
version: -1
name: Get contacts
script: '|||ews-get-contacts'
@@ -163,8 +172,8 @@ tasks:
view: |-
{
"position": {
- "x": 3920,
- "y": 545
+ "x": 480,
+ "y": 3695
}
}
note: false
@@ -172,12 +181,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"7":
id: "7"
- taskid: 78727767-acc1-4070-861f-8cf6e67abd7e
+ taskid: 36661064-e0ce-47ca-8d77-63627e1d3b27
type: condition
task:
- id: 78727767-acc1-4070-861f-8cf6e67abd7e
+ id: 36661064-e0ce-47ca-8d77-63627e1d3b27
version: -1
name: Verify Contacts context key
type: condition
@@ -203,8 +215,8 @@ tasks:
view: |-
{
"position": {
- "x": 3920,
- "y": 720
+ "x": 480,
+ "y": 3870
}
}
note: false
@@ -212,12 +224,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"8":
id: "8"
- taskid: 412078ab-5a29-462d-860a-08ef147c8f4d
+ taskid: d8ff6bb9-f1c3-4b08-8721-6035d8ec8a9e
type: regular
task:
- id: 412078ab-5a29-462d-860a-08ef147c8f4d
+ id: d8ff6bb9-f1c3-4b08-8721-6035d8ec8a9e
version: -1
name: Get item attachment
script: '|||ews-get-attachment'
@@ -229,13 +244,15 @@ tasks:
- "9"
scriptarguments:
item-id:
- simple: AAMkADQ1OWE0NzI5LTUxZTktNDE2Zi1hYTdkLWRiMDFmZDgxNDY1MABGAAAAAAAEzx7ZB+bhTYeJK95WqEa8BwBL/fkrvLLNQrnWSaIq1hMZAAAApMXAAABL/fkrvLLNQrnWSaIq1hMZAAAApM+rAAA=
+ complex:
+ root: EWS.Items
+ accessor: itemId
separatecontext: false
view: |-
{
"position": {
- "x": 910,
- "y": 545
+ "x": 2230,
+ "y": 1070
}
}
note: false
@@ -243,12 +260,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"9":
id: "9"
- taskid: e36aad59-231d-48db-86d0-456db16f0238
+ taskid: 0c2736e0-f748-448a-862d-f4a20b3c6572
type: condition
task:
- id: e36aad59-231d-48db-86d0-456db16f0238
+ id: 0c2736e0-f748-448a-862d-f4a20b3c6572
version: -1
name: Verify Items context
type: condition
@@ -256,7 +276,7 @@ tasks:
brand: ""
nexttasks:
"yes":
- - "25"
+ - "33"
separatecontext: false
conditions:
- label: "yes"
@@ -266,11 +286,8 @@ tasks:
value:
simple: EWS.Items.ItemAttachments.datetimeCreated
iscontext: true
- - - operator: isExists
- left:
- value:
- simple: EWS.Items.ItemAttachments.headers
- iscontext: true
+ right:
+ value: {}
- - operator: isExists
left:
value:
@@ -279,18 +296,18 @@ tasks:
- - operator: isExists
left:
value:
- simple: EWS.Items.ItemAttachments.body
+ simple: EWS.Items.ItemAttachments.hasAttachments
iscontext: true
- - operator: isExists
left:
value:
- simple: EWS.Items.ItemAttachments.hasAttachments
+ simple: EWS.Items.ItemAttachments.attachmentType
iscontext: true
view: |-
{
"position": {
- "x": 910,
- "y": 720
+ "x": 2230,
+ "y": 1245
}
}
note: false
@@ -298,12 +315,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"10":
id: "10"
- taskid: c8a1f539-5da9-4972-802d-3d5c57d78bd0
+ taskid: ec5ffa8a-5ba6-4fb8-8c90-bb75b1bc9900
type: regular
task:
- id: c8a1f539-5da9-4972-802d-3d5c57d78bd0
+ id: ec5ffa8a-5ba6-4fb8-8c90-bb75b1bc9900
version: -1
name: Get file attachment
script: '|||ews-get-attachment'
@@ -312,18 +332,20 @@ tasks:
brand: ""
nexttasks:
'#none#':
- - "11"
+ - "42"
scriptarguments:
attachment-ids:
- simple: AAMkADQ1OWE0NzI5LTUxZTktNDE2Zi1hYTdkLWRiMDFmZDgxNDY1MABGAAAAAAAEzx7ZB+bhTYeJK95WqEa8BwBL/fkrvLLNQrnWSaIq1hMZAAAApMXAAABL/fkrvLLNQrnWSaIq1hMZAAAApM+qAAABEgAQAPINyV3OXiNAtzjDCQ1jdJw=
+ simple: ${EWS.Items.FileAttachments.attachmentId}
item-id:
- simple: AAMkADQ1OWE0NzI5LTUxZTktNDE2Zi1hYTdkLWRiMDFmZDgxNDY1MABGAAAAAAAEzx7ZB+bhTYeJK95WqEa8BwBL/fkrvLLNQrnWSaIq1hMZAAAApMXAAABL/fkrvLLNQrnWSaIq1hMZAAAApM+qAAA=
+ complex:
+ root: EWS.Items
+ accessor: itemId
separatecontext: false
view: |-
{
"position": {
- "x": 4350,
- "y": 370
+ "x": 2455,
+ "y": 2470
}
}
note: false
@@ -331,12 +353,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"11":
id: "11"
- taskid: 14f2925e-4db9-4461-8da3-faec7665d677
+ taskid: 2d81d28b-de39-4988-8aa7-9bb807ee8e9a
type: condition
task:
- id: 14f2925e-4db9-4461-8da3-faec7665d677
+ id: 2d81d28b-de39-4988-8aa7-9bb807ee8e9a
version: -1
name: Verify EWS file attachment
type: condition
@@ -362,8 +387,8 @@ tasks:
view: |-
{
"position": {
- "x": 4350,
- "y": 545
+ "x": 2455,
+ "y": 2820
}
}
note: false
@@ -371,12 +396,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"12":
id: "12"
- taskid: 1fe0d1eb-2079-4a9c-8bd6-2fb5fabc6ae3
+ taskid: 23efabe3-e88c-44df-881f-5df4a83d9488
type: condition
task:
- id: 1fe0d1eb-2079-4a9c-8bd6-2fb5fabc6ae3
+ id: 23efabe3-e88c-44df-881f-5df4a83d9488
version: -1
name: Verify file context
type: condition
@@ -384,21 +412,23 @@ tasks:
brand: ""
nexttasks:
"yes":
- - "25"
+ - "36"
separatecontext: false
conditions:
- label: "yes"
condition:
- - - operator: isExists
+ - - operator: isEqualString
left:
value:
- simple: File.Name
- iscontext: true
+ simple: ${File.Name}
+ right:
+ value:
+ simple: for-tpb-pyEWS
view: |-
{
"position": {
- "x": 4350,
- "y": 720
+ "x": 2465,
+ "y": 2985
}
}
note: false
@@ -406,12 +436,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"13":
id: "13"
- taskid: 22027822-7b5f-4109-86e1-22980fda37b1
+ taskid: 1631eadb-02b9-4051-8f1d-8e4116442ea5
type: regular
task:
- id: 22027822-7b5f-4109-86e1-22980fda37b1
+ id: 1631eadb-02b9-4051-8f1d-8e4116442ea5
version: -1
name: Get out of office status
script: '|||ews-get-out-of-office'
@@ -423,13 +456,13 @@ tasks:
- "14"
scriptarguments:
target-mailbox:
- simple: DEM174861@demisto.int
+ simple: demistoadmin@demisto.int
separatecontext: false
view: |-
{
"position": {
- "x": 1340,
- "y": 545
+ "x": 910,
+ "y": 3695
}
}
note: false
@@ -437,12 +470,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"14":
id: "14"
- taskid: 413adcf7-1c51-4d85-8acc-f1f08787174a
+ taskid: c0deb192-3a02-41d0-860f-6dcb0f43324b
type: condition
task:
- id: 413adcf7-1c51-4d85-8acc-f1f08787174a
+ id: c0deb192-3a02-41d0-860f-6dcb0f43324b
version: -1
name: Verify out of office context key
type: condition
@@ -478,8 +514,8 @@ tasks:
view: |-
{
"position": {
- "x": 1340,
- "y": 720
+ "x": 910,
+ "y": 3870
}
}
note: false
@@ -487,12 +523,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"15":
id: "15"
- taskid: ac7760c2-9759-4592-8e84-e07d6a8f5c18
+ taskid: 2cd7f7cc-fc5c-4494-8231-1d8efd07278c
type: regular
task:
- id: ac7760c2-9759-4592-8e84-e07d6a8f5c18
+ id: 2cd7f7cc-fc5c-4494-8231-1d8efd07278c
version: -1
name: Find folders
script: '|||ews-find-folders'
@@ -506,8 +545,8 @@ tasks:
view: |-
{
"position": {
- "x": 1770,
- "y": 545
+ "x": 1340,
+ "y": 3695
}
}
note: false
@@ -515,12 +554,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"16":
id: "16"
- taskid: 58455273-c84d-4de1-8064-78f89adff082
+ taskid: 4c801c6f-2b94-4b1a-83ea-ebf21a1d11d8
type: condition
task:
- id: 58455273-c84d-4de1-8064-78f89adff082
+ id: 4c801c6f-2b94-4b1a-83ea-ebf21a1d11d8
version: -1
name: Verify folders context key
type: condition
@@ -541,8 +583,8 @@ tasks:
view: |-
{
"position": {
- "x": 1770,
- "y": 720
+ "x": 1340,
+ "y": 3870
}
}
note: false
@@ -550,12 +592,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"17":
id: "17"
- taskid: 78588475-f2a8-4498-8f09-eac22b05a10d
+ taskid: c5ade2d7-dbf9-4d98-83b7-ad2308e15b40
type: regular
task:
- id: 78588475-f2a8-4498-8f09-eac22b05a10d
+ id: c5ade2d7-dbf9-4d98-83b7-ad2308e15b40
version: -1
name: Get items
script: '|||ews-get-items'
@@ -567,13 +612,15 @@ tasks:
- "18"
scriptarguments:
item-ids:
- simple: AAMkADQ1OWE0NzI5LTUxZTktNDE2Zi1hYTdkLWRiMDFmZDgxNDY1MABGAAAAAAAEzx7ZB+bhTYeJK95WqEa8BwBL/fkrvLLNQrnWSaIq1hMZAAAApMXAAABL/fkrvLLNQrnWSaIq1hMZAAAApM+qAAA=
+ complex:
+ root: EWS.Items
+ accessor: itemId
separatecontext: false
view: |-
{
"position": {
- "x": 3275,
- "y": 370
+ "x": 2005,
+ "y": 2820
}
}
note: false
@@ -581,12 +628,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"18":
id: "18"
- taskid: 33a6df30-ab19-4994-8686-b0800f78e649
+ taskid: 84c25969-ef36-4249-8226-7fb570fdff65
type: condition
task:
- id: 33a6df30-ab19-4994-8686-b0800f78e649
+ id: 84c25969-ef36-4249-8226-7fb570fdff65
version: -1
name: Filter item id
type: condition
@@ -596,24 +646,21 @@ tasks:
'#default#':
- "19"
"yes":
- - "20"
+ - "36"
separatecontext: false
conditions:
- label: "yes"
condition:
- - - operator: inList
+ - - operator: isExists
left:
value:
simple: EWS.Items.itemId
iscontext: true
- right:
- value:
- simple: AAMkADQ1OWE0NzI5LTUxZTktNDE2Zi1hYTdkLWRiMDFmZDgxNDY1MABGAAAAAAAEzx7ZB+bhTYeJK95WqEa8BwBL/fkrvLLNQrnWSaIq1hMZAAAApMXAAABL/fkrvLLNQrnWSaIq1hMZAAAApM+qAAA=
view: |-
{
"position": {
- "x": 3275,
- "y": 545
+ "x": 2005,
+ "y": 2995
}
}
note: false
@@ -621,12 +668,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"19":
id: "19"
- taskid: bbee830d-d192-4dc4-8c8a-da2e17a3f2b0
+ taskid: 4f8772e3-921c-453d-8f9e-a2d13e6482ec
type: regular
task:
- id: bbee830d-d192-4dc4-8c8a-da2e17a3f2b0
+ id: 4f8772e3-921c-453d-8f9e-a2d13e6482ec
version: -1
name: Did not find item in content
description: Prints an error entry with a given message
@@ -641,33 +691,8 @@ tasks:
view: |-
{
"position": {
- "x": 3060,
- "y": 720
- }
- }
- note: false
- timertriggers: []
- ignoreworker: false
- skipunavailable: false
- quietmode: 0
- "20":
- id: "20"
- taskid: 71a45991-7602-4501-8f4f-7bcf695c1a98
- type: title
- task:
- id: 71a45991-7602-4501-8f4f-7bcf695c1a98
- version: -1
- name: Found
- type: title
- iscommand: false
- brand: ""
- description: ''
- separatecontext: false
- view: |-
- {
- "position": {
- "x": 3490,
- "y": 735
+ "x": 1995,
+ "y": 3170
}
}
note: false
@@ -675,12 +700,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"21":
id: "21"
- taskid: 584e2c4e-def6-4b24-8279-8f85b912afb5
+ taskid: e2424daf-8daa-4b72-86b9-d25b9d67b191
type: regular
task:
- id: 584e2c4e-def6-4b24-8279-8f85b912afb5
+ id: e2424daf-8daa-4b72-86b9-d25b9d67b191
version: -1
name: Get expanded group
script: '|||ews-expand-group'
@@ -697,8 +725,8 @@ tasks:
view: |-
{
"position": {
- "x": 2200,
- "y": 545
+ "x": 1770,
+ "y": 3695
}
}
note: false
@@ -706,12 +734,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"22":
id: "22"
- taskid: d915ce7c-81fd-4be6-88aa-a83537a9260f
+ taskid: 22b783f3-f158-4388-8a62-5b75653f7eeb
type: condition
task:
- id: d915ce7c-81fd-4be6-88aa-a83537a9260f
+ id: 22b783f3-f158-4388-8a62-5b75653f7eeb
version: -1
name: Verify Expanded Group members
type: condition
@@ -732,8 +763,8 @@ tasks:
view: |-
{
"position": {
- "x": 2200,
- "y": 720
+ "x": 1770,
+ "y": 3870
}
}
note: false
@@ -741,12 +772,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"23":
id: "23"
- taskid: d5f286a8-9d6d-45ff-8bbc-c7891d6d8fad
+ taskid: 25903e54-29c1-4b05-8c71-a8f12e398b74
type: regular
task:
- id: d5f286a8-9d6d-45ff-8bbc-c7891d6d8fad
+ id: 25903e54-29c1-4b05-8c71-a8f12e398b74
version: -1
name: Get items from folder
script: '|||ews-get-items-from-folder'
@@ -758,15 +792,15 @@ tasks:
- "24"
scriptarguments:
folder-path:
- simple: Inbox\TEST
+ simple: Inbox\DO_NOT_DELETE-TPB:pyEWS_TEST
limit:
simple: "10"
separatecontext: false
view: |-
{
"position": {
- "x": 2630,
- "y": 545
+ "x": 2230,
+ "y": 370
}
}
note: false
@@ -774,12 +808,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"24":
id: "24"
- taskid: 5ef115f9-6e8e-4513-8a24-f335aa599c1f
+ taskid: 2621f7d7-5d7e-4968-8acd-0a0d051df3c1
type: condition
task:
- id: 5ef115f9-6e8e-4513-8a24-f335aa599c1f
+ id: 2621f7d7-5d7e-4968-8acd-0a0d051df3c1
version: -1
name: Verify items from folder
type: condition
@@ -787,7 +824,7 @@ tasks:
brand: ""
nexttasks:
"yes":
- - "25"
+ - "31"
separatecontext: false
conditions:
- label: "yes"
@@ -800,8 +837,8 @@ tasks:
view: |-
{
"position": {
- "x": 2630,
- "y": 720
+ "x": 2230,
+ "y": 545
}
}
note: false
@@ -809,12 +846,15 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
"25":
id: "25"
- taskid: 0b969dae-3342-46d6-8f95-9501a484bf2f
+ taskid: 50cb5f28-b0a5-417e-875e-b67546c16a9e
type: title
task:
- id: 0b969dae-3342-46d6-8f95-9501a484bf2f
+ id: 50cb5f28-b0a5-417e-875e-b67546c16a9e
version: -1
name: Done
type: title
@@ -825,7 +865,207 @@ tasks:
view: |-
{
"position": {
- "x": 1770,
+ "x": 1125,
+ "y": 4045
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ continueonerrortype: ""
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "26":
+ id: "26"
+ taskid: aceefa3e-706e-4921-8d2c-a024de91f520
+ type: regular
+ task:
+ id: aceefa3e-706e-4921-8d2c-a024de91f520
+ version: -1
+ name: Send mail
+ description: Sends an email using Microsoft Graph.
+ script: '|||send-mail'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "28"
+ scriptarguments:
+ attachIDs:
+ simple: ${File.EntryID}
+ body:
+ simple: i_am_for_tpb_pyEWS
+ from:
+ simple: demistoadmin@demisto.int
+ subject:
+ simple: for tpb pyEWS with fileAttachment
+ to:
+ simple: demistoadmin@demisto.int
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2230,
+ "y": 1770
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "27":
+ id: "27"
+ taskid: 05853e24-99f5-46f0-85b5-ad4823a431a2
+ type: regular
+ task:
+ id: 05853e24-99f5-46f0-85b5-ad4823a431a2
+ version: -1
+ name: upload a file
+ description: |
+ Creates a file (using the given data input or entry ID) and uploads it to the current investigation War Room.
+ scriptName: FileCreateAndUploadV2
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "26"
+ scriptarguments:
+ data:
+ simple: hi i am fot tpb
+ filename:
+ simple: for-tpb-pyEWS
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2230,
+ "y": 1595
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "28":
+ id: "28"
+ taskid: 83f8ecee-9198-4f69-8bff-8b689c5e68ff
+ type: regular
+ task:
+ id: 83f8ecee-9198-4f69-8bff-8b689c5e68ff
+ version: -1
+ name: Sleep for 60 seconds
+ description: Sleep for X seconds.
+ scriptName: Sleep
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "34"
+ scriptarguments:
+ seconds:
+ simple: "90"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2230,
+ "y": 1945
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "31":
+ id: "31"
+ taskid: 42fdf67f-5555-45ca-8372-2d69880dc783
+ type: regular
+ task:
+ id: 42fdf67f-5555-45ca-8372-2d69880dc783
+ version: -1
+ name: Delete context
+ description: |-
+ Delete field from context.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: DeleteContext
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "32"
+ scriptarguments:
+ all:
+ simple: "yes"
+ key:
+ complex:
+ root: EWS
+ accessor: Items
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2230,
+ "y": 720
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "32":
+ id: "32"
+ taskid: 404bd878-1620-4f54-8bbd-e0353d1748ed
+ type: regular
+ task:
+ id: 404bd878-1620-4f54-8bbd-e0353d1748ed
+ version: -1
+ name: search for mail with itemAttachment
+ description: Searches for items in the specified mailbox. Specific permissions are needed for this operation to search in a target mailbox other than the default.
+ script: '|||ews-search-mailbox'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "8"
+ scriptarguments:
+ folder-path:
+ simple: Inbox/DO_NOT_DELETE-TPB:pyEWS_TEST
+ limit:
+ simple: "1"
+ query:
+ simple: 'body: this_is_for_pyEWS TPB'
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2230,
"y": 895
}
}
@@ -834,13 +1074,400 @@ tasks:
ignoreworker: false
skipunavailable: false
quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "33":
+ id: "33"
+ taskid: 3e6c41e4-2329-4dab-8e01-78f871f993bd
+ type: regular
+ task:
+ id: 3e6c41e4-2329-4dab-8e01-78f871f993bd
+ version: -1
+ name: Delete context
+ description: |-
+ Delete field from context.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: DeleteContext
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "27"
+ scriptarguments:
+ all:
+ simple: "yes"
+ key:
+ simple: ${EWS.Items}
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2230,
+ "y": 1420
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 2
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "34":
+ id: "34"
+ taskid: 8a19a778-2cc0-42db-8163-6aad5c464fd2
+ type: regular
+ task:
+ id: 8a19a778-2cc0-42db-8163-6aad5c464fd2
+ version: -1
+ name: Search mail with fileAttachment
+ description: Searches for items in the specified mailbox. Specific permissions are needed for this operation to search in a target mailbox other than the default.
+ script: '|||ews-search-mailbox'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "35"
+ - "41"
+ scriptarguments:
+ folder-path:
+ simple: Inbox
+ limit:
+ simple: "1"
+ query:
+ simple: body:for tpb pyEWS with fileAttachment
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2230,
+ "y": 2120
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "35":
+ id: "35"
+ taskid: 3bcd53e2-03f3-425a-8728-111c66601804
+ type: regular
+ task:
+ id: 3bcd53e2-03f3-425a-8728-111c66601804
+ version: -1
+ name: Sleep for 60 seconds
+ description: Sleep for X seconds.
+ scriptName: Sleep
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "17"
+ - "10"
+ scriptarguments:
+ seconds:
+ simple: "60"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2015,
+ "y": 2295
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "36":
+ id: "36"
+ taskid: d463d749-9260-4231-8114-0f00150abb19
+ type: regular
+ task:
+ id: d463d749-9260-4231-8114-0f00150abb19
+ version: -1
+ name: Delete mail from DO_NOT_DELETE-TPB:pyEWS_TEST
+ description: Delete items from mailbox.
+ script: '|||ews-delete-items'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "38"
+ scriptarguments:
+ delete-type:
+ simple: hard
+ item-ids:
+ complex:
+ root: EWS.Items
+ accessor: itemId
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2445,
+ "y": 3170
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "37":
+ id: "37"
+ taskid: 26d64ecf-3b00-437b-85cf-6549788f6076
+ type: regular
+ task:
+ id: 26d64ecf-3b00-437b-85cf-6549788f6076
+ version: -1
+ name: search mail in sent Items
+ description: Searches for items in the specified mailbox. Specific permissions are needed for this operation to search in a target mailbox other than the default.
+ script: '|||ews-search-mailbox'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "40"
+ scriptarguments:
+ folder-path:
+ simple: Sent Items
+ query:
+ simple: body:for tpb pyEWS with fileAttachment
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2445,
+ "y": 3520
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "38":
+ id: "38"
+ taskid: ae8d4793-84be-49e7-8e92-f017b60520f9
+ type: regular
+ task:
+ id: ae8d4793-84be-49e7-8e92-f017b60520f9
+ version: -1
+ name: Delete context
+ description: |-
+ Delete field from context.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: DeleteContext
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "37"
+ scriptarguments:
+ all:
+ simple: "yes"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2445,
+ "y": 3345
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "39":
+ id: "39"
+ taskid: febd4367-7611-4f01-8920-c87f443ad963
+ type: regular
+ task:
+ id: febd4367-7611-4f01-8920-c87f443ad963
+ version: -1
+ name: Delete mail from sent items
+ description: Delete items from mailbox.
+ script: '|||ews-delete-items'
+ type: regular
+ iscommand: true
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "25"
+ scriptarguments:
+ delete-type:
+ simple: hard
+ item-ids:
+ complex:
+ root: EWS.Items
+ accessor: itemId
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2445,
+ "y": 3870
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "40":
+ id: "40"
+ taskid: 5bed75fc-afca-4d57-8095-cbe70c0453c9
+ type: regular
+ task:
+ id: 5bed75fc-afca-4d57-8095-cbe70c0453c9
+ version: -1
+ name: Sleep for 30 seconds
+ description: Sleep for X seconds.
+ scriptName: Sleep
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "39"
+ scriptarguments:
+ seconds:
+ simple: "30"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2445,
+ "y": 3695
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "41":
+ id: "41"
+ taskid: 9f5668ac-5760-42fb-8fd9-88f2ffe8f2b4
+ type: regular
+ task:
+ id: 9f5668ac-5760-42fb-8fd9-88f2ffe8f2b4
+ version: -1
+ name: Delete file key context
+ description: |-
+ Delete field from context.
+
+ This automation runs using the default Limited User role, unless you explicitly change the permissions.
+ For more information, see the section about permissions here:
+ https://docs-cortex.paloaltonetworks.com/r/Cortex-XSOAR/6.10/Cortex-XSOAR-Administrator-Guide/Automations
+ scriptName: DeleteContext
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "10"
+ scriptarguments:
+ all:
+ simple: "no"
+ key:
+ simple: File
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2465,
+ "y": 2295
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
+ "42":
+ id: "42"
+ taskid: 5b8a8042-8908-4cdc-8d88-ae3fbcd9b1ae
+ type: regular
+ task:
+ id: 5b8a8042-8908-4cdc-8d88-ae3fbcd9b1ae
+ version: -1
+ name: Sleep for 45 seconds
+ description: Sleep for X seconds.
+ scriptName: Sleep
+ type: regular
+ iscommand: false
+ brand: ""
+ nexttasks:
+ '#none#':
+ - "11"
+ scriptarguments:
+ seconds:
+ simple: "45"
+ separatecontext: false
+ continueonerrortype: ""
+ view: |-
+ {
+ "position": {
+ "x": 2455,
+ "y": 2645
+ }
+ }
+ note: false
+ timertriggers: []
+ ignoreworker: false
+ skipunavailable: false
+ quietmode: 0
+ isoversize: false
+ isautoswitchedtoquietmode: false
view: |-
{
"linkLabelsPosition": {},
"paper": {
"dimensions": {
- "height": 910,
- "width": 4680,
+ "height": 4060,
+ "width": 2795,
"x": 50,
"y": 50
}
diff --git a/Packs/MicrosoftExchangeOnline/Integrations/EWSO365/EWSO365.py b/Packs/MicrosoftExchangeOnline/Integrations/EWSO365/EWSO365.py
index b870764c1810..f11789c2208c 100644
--- a/Packs/MicrosoftExchangeOnline/Integrations/EWSO365/EWSO365.py
+++ b/Packs/MicrosoftExchangeOnline/Integrations/EWSO365/EWSO365.py
@@ -2092,7 +2092,12 @@ def decode_email_data(email_obj: Message):
except UnicodeDecodeError:
# In case the detected encoding fails apply the default encoding
demisto.info(f'Could not decode attached email using detected encoding:{encoding}, retrying '
- f'using utf-8.\nAttached email:\n{email_obj}')
+ f'using utf-8.\nAttached email details:'
+ f'\nMessage-ID = {email_obj.get("Message-ID")}'
+ f'\nDate = {email_obj.get("Date")}'
+ f'\nSubject = {email_obj.get("Subject")}'
+ f'\nFrom = {email_obj.get("From")}'
+ f'\nTo = {email_obj.get("To")}')
try:
data = attached_email_bytes.decode('utf-8')
except UnicodeDecodeError:
diff --git a/Packs/MicrosoftExchangeOnline/ReleaseNotes/1_3_10.md b/Packs/MicrosoftExchangeOnline/ReleaseNotes/1_3_10.md
new file mode 100644
index 000000000000..34471bce34eb
--- /dev/null
+++ b/Packs/MicrosoftExchangeOnline/ReleaseNotes/1_3_10.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### EWS O365
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/MicrosoftExchangeOnline/ReleaseNotes/1_3_11.md b/Packs/MicrosoftExchangeOnline/ReleaseNotes/1_3_11.md
new file mode 100644
index 000000000000..003a8b295861
--- /dev/null
+++ b/Packs/MicrosoftExchangeOnline/ReleaseNotes/1_3_11.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### EWS O365
+
+- Fixed an issue where ***fetch incidents*** command failed when parsing messages with incorrect encoding.
\ No newline at end of file
diff --git a/Packs/MicrosoftExchangeOnline/ReleaseNotes/1_3_9.md b/Packs/MicrosoftExchangeOnline/ReleaseNotes/1_3_9.md
new file mode 100644
index 000000000000..e483dd7f5344
--- /dev/null
+++ b/Packs/MicrosoftExchangeOnline/ReleaseNotes/1_3_9.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### EWS O365
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/MicrosoftExchangeOnline/pack_metadata.json b/Packs/MicrosoftExchangeOnline/pack_metadata.json
index ea1bc12bb800..8cda09ba8124 100644
--- a/Packs/MicrosoftExchangeOnline/pack_metadata.json
+++ b/Packs/MicrosoftExchangeOnline/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Exchange Online",
"description": "Exchange Online and Office 365 (mail)",
"support": "xsoar",
- "currentVersion": "1.3.8",
+ "currentVersion": "1.3.11",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/MicrosoftGraphAPI/ReleaseNotes/1_1_46.md b/Packs/MicrosoftGraphAPI/ReleaseNotes/1_1_46.md
new file mode 100644
index 000000000000..565c5b85d399
--- /dev/null
+++ b/Packs/MicrosoftGraphAPI/ReleaseNotes/1_1_46.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Microsoft Graph API
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/MicrosoftGraphAPI/ReleaseNotes/1_1_47.md b/Packs/MicrosoftGraphAPI/ReleaseNotes/1_1_47.md
new file mode 100644
index 000000000000..54cbbd48dd2a
--- /dev/null
+++ b/Packs/MicrosoftGraphAPI/ReleaseNotes/1_1_47.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Microsoft Graph API
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/MicrosoftGraphAPI/pack_metadata.json b/Packs/MicrosoftGraphAPI/pack_metadata.json
index 56187c3024a7..a551ea888cd1 100644
--- a/Packs/MicrosoftGraphAPI/pack_metadata.json
+++ b/Packs/MicrosoftGraphAPI/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Graph API",
"description": "Use the Microsoft Graph API integration to interact with Microsoft APIs that do not have dedicated integrations in Cortex XSOAR, for example, Mail Single-User, etc.",
"support": "xsoar",
- "currentVersion": "1.1.45",
+ "currentVersion": "1.1.47",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/MicrosoftGraphApplications/ReleaseNotes/1_2_43.md b/Packs/MicrosoftGraphApplications/ReleaseNotes/1_2_43.md
new file mode 100644
index 000000000000..9b7db1790503
--- /dev/null
+++ b/Packs/MicrosoftGraphApplications/ReleaseNotes/1_2_43.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Azure Active Directory Applications
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/MicrosoftGraphApplications/ReleaseNotes/1_2_44.md b/Packs/MicrosoftGraphApplications/ReleaseNotes/1_2_44.md
new file mode 100644
index 000000000000..b44dde582683
--- /dev/null
+++ b/Packs/MicrosoftGraphApplications/ReleaseNotes/1_2_44.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Active Directory Applications
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/MicrosoftGraphApplications/pack_metadata.json b/Packs/MicrosoftGraphApplications/pack_metadata.json
index 4f4adfb02395..a56fc1604991 100644
--- a/Packs/MicrosoftGraphApplications/pack_metadata.json
+++ b/Packs/MicrosoftGraphApplications/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Graph Applications",
"description": "Use this pack to manage connected applications and services",
"support": "xsoar",
- "currentVersion": "1.2.42",
+ "currentVersion": "1.2.44",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/MicrosoftGraphCalendar/ReleaseNotes/1_1_22.md b/Packs/MicrosoftGraphCalendar/ReleaseNotes/1_1_22.md
new file mode 100644
index 000000000000..937fc85874ab
--- /dev/null
+++ b/Packs/MicrosoftGraphCalendar/ReleaseNotes/1_1_22.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### O365 Outlook Calendar
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/MicrosoftGraphCalendar/ReleaseNotes/1_1_23.md b/Packs/MicrosoftGraphCalendar/ReleaseNotes/1_1_23.md
new file mode 100644
index 000000000000..8b746f0043d9
--- /dev/null
+++ b/Packs/MicrosoftGraphCalendar/ReleaseNotes/1_1_23.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### O365 Outlook Calendar
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/MicrosoftGraphCalendar/pack_metadata.json b/Packs/MicrosoftGraphCalendar/pack_metadata.json
index dc5981319cc7..483eb793d8d7 100644
--- a/Packs/MicrosoftGraphCalendar/pack_metadata.json
+++ b/Packs/MicrosoftGraphCalendar/pack_metadata.json
@@ -1,7 +1,7 @@
{
"name": "Microsoft Graph Calendar",
"description": "Microsoft Graph Calendar enables you to create and manage different calendars and events\n according to your requirements.",
- "currentVersion": "1.1.21",
+ "currentVersion": "1.1.23",
"support": "xsoar",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
diff --git a/Packs/MicrosoftGraphDeviceManagement/ReleaseNotes/1_1_28.md b/Packs/MicrosoftGraphDeviceManagement/ReleaseNotes/1_1_28.md
new file mode 100644
index 000000000000..787a1e0da540
--- /dev/null
+++ b/Packs/MicrosoftGraphDeviceManagement/ReleaseNotes/1_1_28.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Microsoft Endpoint Manager (Intune)
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/MicrosoftGraphDeviceManagement/ReleaseNotes/1_1_29.md b/Packs/MicrosoftGraphDeviceManagement/ReleaseNotes/1_1_29.md
new file mode 100644
index 000000000000..05621ec3fb56
--- /dev/null
+++ b/Packs/MicrosoftGraphDeviceManagement/ReleaseNotes/1_1_29.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Microsoft Endpoint Manager (Intune)
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/MicrosoftGraphDeviceManagement/pack_metadata.json b/Packs/MicrosoftGraphDeviceManagement/pack_metadata.json
index e5c5a954adb8..e1988e159868 100644
--- a/Packs/MicrosoftGraphDeviceManagement/pack_metadata.json
+++ b/Packs/MicrosoftGraphDeviceManagement/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Graph Device Management",
"description": "Microsoft Graph Device Management",
"support": "xsoar",
- "currentVersion": "1.1.27",
+ "currentVersion": "1.1.29",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/MicrosoftGraphFiles/Integrations/MicrosoftGraphFiles/MicrosoftGraphFiles.yml b/Packs/MicrosoftGraphFiles/Integrations/MicrosoftGraphFiles/MicrosoftGraphFiles.yml
index bad56de07738..017a21043dae 100644
--- a/Packs/MicrosoftGraphFiles/Integrations/MicrosoftGraphFiles/MicrosoftGraphFiles.yml
+++ b/Packs/MicrosoftGraphFiles/Integrations/MicrosoftGraphFiles/MicrosoftGraphFiles.yml
@@ -762,7 +762,7 @@ script:
- description: Generate the login URL used for Authorization code flow.
name: msgraph-files-generate-login-url
arguments: []
- dockerimage: demisto/python_pancloud:1.0.0.77747
+ dockerimage: demisto/crypto:1.0.0.96042
script: ''
subtype: python3
type: python
diff --git a/Packs/MicrosoftGraphFiles/Integrations/MicrosoftGraphFiles/MicrosoftGraphFiles_test.py b/Packs/MicrosoftGraphFiles/Integrations/MicrosoftGraphFiles/MicrosoftGraphFiles_test.py
index 2d43b5508c98..86bcf0566c25 100644
--- a/Packs/MicrosoftGraphFiles/Integrations/MicrosoftGraphFiles/MicrosoftGraphFiles_test.py
+++ b/Packs/MicrosoftGraphFiles/Integrations/MicrosoftGraphFiles/MicrosoftGraphFiles_test.py
@@ -934,7 +934,7 @@ def test_generate_login_url(mocker):
main()
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
f'response_type=code&scope=offline_access%20{Scopes.graph}' \
- f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
+ f'&client_id={client_id}&redirect_uri={redirect_uri})'
res = return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/MicrosoftGraphFiles/ReleaseNotes/1_1_26.md b/Packs/MicrosoftGraphFiles/ReleaseNotes/1_1_26.md
new file mode 100644
index 000000000000..68c794a8691c
--- /dev/null
+++ b/Packs/MicrosoftGraphFiles/ReleaseNotes/1_1_26.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### O365 File Management (Onedrive/Sharepoint/Teams)
+- Updated the Docker image to: *demisto/crypto:1.0.0.96042*.
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/MicrosoftGraphFiles/ReleaseNotes/1_1_27.md b/Packs/MicrosoftGraphFiles/ReleaseNotes/1_1_27.md
new file mode 100644
index 000000000000..6288ad251842
--- /dev/null
+++ b/Packs/MicrosoftGraphFiles/ReleaseNotes/1_1_27.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### O365 File Management (Onedrive/Sharepoint/Teams)
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/MicrosoftGraphFiles/pack_metadata.json b/Packs/MicrosoftGraphFiles/pack_metadata.json
index faa30cef7177..b72a303f294b 100644
--- a/Packs/MicrosoftGraphFiles/pack_metadata.json
+++ b/Packs/MicrosoftGraphFiles/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Graph Files",
"description": "Use the O365 File Management (Onedrive/Sharepoint/Teams) integration to enable your app get authorized access to files in OneDrive, SharePoint, and MS Teams across your entire organization. This integration requires admin consent.",
"support": "xsoar",
- "currentVersion": "1.1.25",
+ "currentVersion": "1.1.27",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/MicrosoftGraphGroups/ReleaseNotes/1_1_46.md b/Packs/MicrosoftGraphGroups/ReleaseNotes/1_1_46.md
new file mode 100644
index 000000000000..33d284c23f95
--- /dev/null
+++ b/Packs/MicrosoftGraphGroups/ReleaseNotes/1_1_46.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Azure Active Directory Groups
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/MicrosoftGraphGroups/ReleaseNotes/1_1_47.md b/Packs/MicrosoftGraphGroups/ReleaseNotes/1_1_47.md
new file mode 100644
index 000000000000..062f91b0b4cf
--- /dev/null
+++ b/Packs/MicrosoftGraphGroups/ReleaseNotes/1_1_47.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Active Directory Groups
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/MicrosoftGraphGroups/pack_metadata.json b/Packs/MicrosoftGraphGroups/pack_metadata.json
index 31febfeeb627..c24e20742c97 100644
--- a/Packs/MicrosoftGraphGroups/pack_metadata.json
+++ b/Packs/MicrosoftGraphGroups/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Graph Groups",
"description": "Microsoft Graph Groups enables you to create and manage different types of groups and group functionality according to your requirements.",
"support": "xsoar",
- "currentVersion": "1.1.45",
+ "currentVersion": "1.1.47",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/MicrosoftGraphIdentityandAccess/ReleaseNotes/1_2_49.md b/Packs/MicrosoftGraphIdentityandAccess/ReleaseNotes/1_2_49.md
new file mode 100644
index 000000000000..b4c79b53ab9d
--- /dev/null
+++ b/Packs/MicrosoftGraphIdentityandAccess/ReleaseNotes/1_2_49.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Azure Active Directory Identity And Access
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/MicrosoftGraphIdentityandAccess/ReleaseNotes/1_2_50.md b/Packs/MicrosoftGraphIdentityandAccess/ReleaseNotes/1_2_50.md
new file mode 100644
index 000000000000..96ed7ec9d656
--- /dev/null
+++ b/Packs/MicrosoftGraphIdentityandAccess/ReleaseNotes/1_2_50.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Active Directory Identity And Access
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/MicrosoftGraphIdentityandAccess/ReleaseNotes/1_2_51.md b/Packs/MicrosoftGraphIdentityandAccess/ReleaseNotes/1_2_51.md
new file mode 100644
index 000000000000..3a9af2ea5e3a
--- /dev/null
+++ b/Packs/MicrosoftGraphIdentityandAccess/ReleaseNotes/1_2_51.md
@@ -0,0 +1,3 @@
+## Microsoft Graph Identity and Access
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/MicrosoftGraphIdentityandAccess/pack_metadata.json b/Packs/MicrosoftGraphIdentityandAccess/pack_metadata.json
index 945033175be3..05e5d54eeb8b 100644
--- a/Packs/MicrosoftGraphIdentityandAccess/pack_metadata.json
+++ b/Packs/MicrosoftGraphIdentityandAccess/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Graph Identity and Access",
"description": "Use this pack to manage roles and members in Microsoft.",
"support": "xsoar",
- "currentVersion": "1.2.48",
+ "currentVersion": "1.2.51",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/MicrosoftGraphMail/Integrations/MicrosoftGraphListener/MicrosoftGraphListener_test.py b/Packs/MicrosoftGraphMail/Integrations/MicrosoftGraphListener/MicrosoftGraphListener_test.py
index 3fd6b3066a67..3a81b5319448 100644
--- a/Packs/MicrosoftGraphMail/Integrations/MicrosoftGraphListener/MicrosoftGraphListener_test.py
+++ b/Packs/MicrosoftGraphMail/Integrations/MicrosoftGraphListener/MicrosoftGraphListener_test.py
@@ -1291,6 +1291,6 @@ def test_generate_login_url(mocker):
# assert
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
f'response_type=code&scope=offline_access%20{Scopes.graph}' \
- f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
+ f'&client_id={client_id}&redirect_uri={redirect_uri})'
res = MicrosoftGraphListener.return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/MicrosoftGraphMail/Integrations/MicrosoftGraphListener/README.md b/Packs/MicrosoftGraphMail/Integrations/MicrosoftGraphListener/README.md
index ca4d3ff96771..4d306082babd 100644
--- a/Packs/MicrosoftGraphMail/Integrations/MicrosoftGraphListener/README.md
+++ b/Packs/MicrosoftGraphMail/Integrations/MicrosoftGraphListener/README.md
@@ -3,9 +3,11 @@ This integration was integrated and tested with version 1.0 of Microsoft Graph M
## Fetch Incidents
+
The integration imports email messages from the destination folder in the target mailbox as incidents. If the message contains any attachments, they are uploaded to the War Room as files. If the attachment is an email (item attachment), Cortex XSOAR fetches information about the attached email and downloads all of its attachments (if there are any) as files. To use Fetch incidents, configure a new instance and select the Fetches incidents option in the instance settings.
## OData Usage
+
The OData parameter can be used to create different queries for the `msgraph-mail-list-emails` and `msgraph-mail-get-email` commands. Please see [OData Docs](https://docs.microsoft.com/en-us/graph/query-parameters) for detailed information.
Examples:
!msgraph-mail-list-emails odata="$select=from"
@@ -16,22 +18,32 @@ Note:
The query parameter `$filter` is not supported when using the `search` parameter.
## Authentication
+
For more details about the authentication used in this integration, see [Microsoft Integrations - Authentication](https://xsoar.pan.dev/docs/reference/articles/microsoft-integrations---authentication).
-Note: For this integration, you cannot use a "Shared mailbox" regardless of the authentication method used.
## Email Attachments Limitations
+
* The maximum attachment size to be sent in an email can be 150-MB. [large-attachments](https://docs.microsoft.com/en-us/graph/outlook-large-attachments?tabs=http)
* The larger the attachment, the longer it would take for a command that supports adding attachments to run.
* Requires the permission of Mail.ReadWrite (Application) - to send attachments > 3mb
* When sending mails with large attachments, it could take up to 5 minutes for the mail to actually be sent.
### Required Permissions
+
The following permissions are required for all commands:
-- Mail.ReadWrite - Delegated
-- Mail.Send - Delegated
-- User.Read - Delegated
-- MailboxSettings.ReadWrite - Delegated
+
+* Mail.ReadWrite - Delegated
+* Mail.Send - Delegated
+* User.Read - Delegated
+* MailboxSettings.ReadWrite - Delegated
+
+The following permissions are required for Shared Mailbox:
+
+* Mail.Read.Shared
+* Mail.ReadBasic.Shared
+* Mail.ReadWrite.Shared
+* Mail.Send.Shared
## Configure Microsoft Graph Mail Single User on Cortex XSOAR
diff --git a/Packs/MicrosoftGraphMail/Integrations/MicrosoftGraphMail/README.md b/Packs/MicrosoftGraphMail/Integrations/MicrosoftGraphMail/README.md
index b02c2573c254..541a4cc8a247 100644
--- a/Packs/MicrosoftGraphMail/Integrations/MicrosoftGraphMail/README.md
+++ b/Packs/MicrosoftGraphMail/Integrations/MicrosoftGraphMail/README.md
@@ -1,6 +1,8 @@
Microsoft Graph lets your app get authorized access to a user's Outlook mail data in a personal or organization account.
This integration was integrated and tested with version v1 of Microsoft Graph.
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure O365 Outlook Mail (Using Graph API) on Cortex XSOAR
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
diff --git a/Packs/MicrosoftGraphMail/ReleaseNotes/1_6_8.md b/Packs/MicrosoftGraphMail/ReleaseNotes/1_6_8.md
new file mode 100644
index 000000000000..248367be3c0f
--- /dev/null
+++ b/Packs/MicrosoftGraphMail/ReleaseNotes/1_6_8.md
@@ -0,0 +1,12 @@
+
+#### Integrations
+
+##### O365 Outlook Mail (Using Graph API)
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
+
+##### Microsoft Graph Mail Single User
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/MicrosoftGraphMail/ReleaseNotes/1_6_9.md b/Packs/MicrosoftGraphMail/ReleaseNotes/1_6_9.md
new file mode 100644
index 000000000000..a4778485f197
--- /dev/null
+++ b/Packs/MicrosoftGraphMail/ReleaseNotes/1_6_9.md
@@ -0,0 +1,10 @@
+
+#### Integrations
+
+##### Microsoft Graph Mail Single User
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
+
+##### O365 Outlook Mail (Using Graph API)
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/MicrosoftGraphMail/pack_metadata.json b/Packs/MicrosoftGraphMail/pack_metadata.json
index c0a892885884..13a577face96 100644
--- a/Packs/MicrosoftGraphMail/pack_metadata.json
+++ b/Packs/MicrosoftGraphMail/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Graph Mail",
"description": "Microsoft Graph lets your app get authorized access to a user's Outlook mail data in a personal or organization account.",
"support": "xsoar",
- "currentVersion": "1.6.7",
+ "currentVersion": "1.6.9",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -16,5 +16,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "MicrosoftGraphMail"
}
\ No newline at end of file
diff --git a/Packs/MicrosoftGraphSearch/ReleaseNotes/1_0_11.md b/Packs/MicrosoftGraphSearch/ReleaseNotes/1_0_11.md
new file mode 100644
index 000000000000..d3ea06471da3
--- /dev/null
+++ b/Packs/MicrosoftGraphSearch/ReleaseNotes/1_0_11.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Microsoft Graph Search
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/MicrosoftGraphSearch/ReleaseNotes/1_0_12.md b/Packs/MicrosoftGraphSearch/ReleaseNotes/1_0_12.md
new file mode 100644
index 000000000000..97a079160ff4
--- /dev/null
+++ b/Packs/MicrosoftGraphSearch/ReleaseNotes/1_0_12.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Microsoft Graph Search
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/MicrosoftGraphSearch/pack_metadata.json b/Packs/MicrosoftGraphSearch/pack_metadata.json
index 340baeb63a7c..5bc670197548 100644
--- a/Packs/MicrosoftGraphSearch/pack_metadata.json
+++ b/Packs/MicrosoftGraphSearch/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Graph Search",
"description": "Use the Microsoft Search API in Microsoft Graph to search content stored in OneDrive or SharePoint: files, folders, lists, list items, or sites.",
"support": "community",
- "currentVersion": "1.0.10",
+ "currentVersion": "1.0.12",
"author": "randomizerxd",
"url": "",
"email": "",
diff --git a/Packs/MicrosoftGraphSecurity/ReleaseNotes/2_2_16.md b/Packs/MicrosoftGraphSecurity/ReleaseNotes/2_2_16.md
new file mode 100644
index 000000000000..1322d9c25228
--- /dev/null
+++ b/Packs/MicrosoftGraphSecurity/ReleaseNotes/2_2_16.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Microsoft Graph Security
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/MicrosoftGraphSecurity/ReleaseNotes/2_2_17.md b/Packs/MicrosoftGraphSecurity/ReleaseNotes/2_2_17.md
new file mode 100644
index 000000000000..e46c712e736c
--- /dev/null
+++ b/Packs/MicrosoftGraphSecurity/ReleaseNotes/2_2_17.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Microsoft Graph Security
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/MicrosoftGraphSecurity/pack_metadata.json b/Packs/MicrosoftGraphSecurity/pack_metadata.json
index 283d1b93313c..517f960c7832 100644
--- a/Packs/MicrosoftGraphSecurity/pack_metadata.json
+++ b/Packs/MicrosoftGraphSecurity/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Graph Security",
"description": "Unified gateway to security insights - all from a unified Microsoft Graph\n Security API.",
"support": "xsoar",
- "currentVersion": "2.2.15",
+ "currentVersion": "2.2.17",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/MicrosoftGraphTeams/ReleaseNotes/1_1_4.md b/Packs/MicrosoftGraphTeams/ReleaseNotes/1_1_4.md
new file mode 100644
index 000000000000..42fa13c0c6ed
--- /dev/null
+++ b/Packs/MicrosoftGraphTeams/ReleaseNotes/1_1_4.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### O365 Teams (Using Graph API)
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/MicrosoftGraphTeams/ReleaseNotes/1_1_5.md b/Packs/MicrosoftGraphTeams/ReleaseNotes/1_1_5.md
new file mode 100644
index 000000000000..4ce303bd418b
--- /dev/null
+++ b/Packs/MicrosoftGraphTeams/ReleaseNotes/1_1_5.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### O365 Teams (Using Graph API)
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/MicrosoftGraphTeams/pack_metadata.json b/Packs/MicrosoftGraphTeams/pack_metadata.json
index 8b22ad9ca4cf..13cd0ba52c42 100644
--- a/Packs/MicrosoftGraphTeams/pack_metadata.json
+++ b/Packs/MicrosoftGraphTeams/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "MicrosoftGraphTeams",
"description": "O365 Teams (Using Graph API) gives you authorized access to a user’s Teams enabling you to facilitate communication through teams as that user, or read conversations and/or messages of that user.",
"support": "community",
- "currentVersion": "1.1.3",
+ "currentVersion": "1.1.5",
"author": "Joachim Bockland",
"url": "",
"email": "",
diff --git a/Packs/MicrosoftGraphUser/Integrations/MicrosoftGraphUser/MicrosoftGraphUser.yml b/Packs/MicrosoftGraphUser/Integrations/MicrosoftGraphUser/MicrosoftGraphUser.yml
index 391e2783c4b3..b7d70f8c5f22 100644
--- a/Packs/MicrosoftGraphUser/Integrations/MicrosoftGraphUser/MicrosoftGraphUser.yml
+++ b/Packs/MicrosoftGraphUser/Integrations/MicrosoftGraphUser/MicrosoftGraphUser.yml
@@ -618,7 +618,7 @@ script:
- description: Run this command if for some reason you need to rerun the authentication process.
name: msgraph-user-auth-reset
arguments: []
- dockerimage: demisto/crypto:1.0.0.86361
+ dockerimage: demisto/crypto:1.0.0.96042
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/MicrosoftGraphUser/Integrations/MicrosoftGraphUser/MicrosoftGraphUser_test.py b/Packs/MicrosoftGraphUser/Integrations/MicrosoftGraphUser/MicrosoftGraphUser_test.py
index 73eadac0d7a3..40b1bf9c233a 100644
--- a/Packs/MicrosoftGraphUser/Integrations/MicrosoftGraphUser/MicrosoftGraphUser_test.py
+++ b/Packs/MicrosoftGraphUser/Integrations/MicrosoftGraphUser/MicrosoftGraphUser_test.py
@@ -95,7 +95,7 @@ def test_get_user_command_url_saved_chars(mocker):
http_mock = mocker.patch.object(BaseClient, '_http_request')
mocker.patch.object(MicrosoftClient, 'get_access_token')
hr, _, _ = get_user_command(client, {'user': user_name})
- assert 'users/dbot%5E' == http_mock.call_args[1]["url_suffix"]
+ assert http_mock.call_args[1]["url_suffix"] == 'users/dbot%5E'
def test_get_unsupported_chars_in_user():
@@ -341,7 +341,7 @@ def test_generate_login_url(mocker):
# assert
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
f'response_type=code&scope=offline_access%20{Scopes.graph}' \
- f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
+ f'&client_id={client_id}&redirect_uri={redirect_uri})'
res = MicrosoftGraphUser.return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/MicrosoftGraphUser/ReleaseNotes/1_5_34.md b/Packs/MicrosoftGraphUser/ReleaseNotes/1_5_34.md
new file mode 100644
index 000000000000..5bee28e91bee
--- /dev/null
+++ b/Packs/MicrosoftGraphUser/ReleaseNotes/1_5_34.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Azure Active Directory Users
+- Updated the Docker image to: *demisto/crypto:1.0.0.96042*.
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/MicrosoftGraphUser/ReleaseNotes/1_5_35.md b/Packs/MicrosoftGraphUser/ReleaseNotes/1_5_35.md
new file mode 100644
index 000000000000..71c21796db72
--- /dev/null
+++ b/Packs/MicrosoftGraphUser/ReleaseNotes/1_5_35.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Azure Active Directory Users
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/MicrosoftGraphUser/ReleaseNotes/1_5_36.md b/Packs/MicrosoftGraphUser/ReleaseNotes/1_5_36.md
new file mode 100644
index 000000000000..eab064bc7e58
--- /dev/null
+++ b/Packs/MicrosoftGraphUser/ReleaseNotes/1_5_36.md
@@ -0,0 +1,3 @@
+## Microsoft Graph User
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/MicrosoftGraphUser/pack_metadata.json b/Packs/MicrosoftGraphUser/pack_metadata.json
index fe4ac05518a9..eac7fd5d6804 100644
--- a/Packs/MicrosoftGraphUser/pack_metadata.json
+++ b/Packs/MicrosoftGraphUser/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Graph User",
"description": "Use the Microsoft Graph integration to connect to and interact with user objects on Microsoft Platforms.",
"support": "xsoar",
- "currentVersion": "1.5.33",
+ "currentVersion": "1.5.36",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/MicrosoftManagementActivity/Integrations/MicrosoftManagementActivity/MicrosoftManagementActivity.yml b/Packs/MicrosoftManagementActivity/Integrations/MicrosoftManagementActivity/MicrosoftManagementActivity.yml
index fd28172d9b6c..e83bdfe199be 100644
--- a/Packs/MicrosoftManagementActivity/Integrations/MicrosoftManagementActivity/MicrosoftManagementActivity.yml
+++ b/Packs/MicrosoftManagementActivity/Integrations/MicrosoftManagementActivity/MicrosoftManagementActivity.yml
@@ -290,7 +290,7 @@ script:
- description: Run this command if for some reason you need to rerun the authentication process.
name: ms-management-activity
arguments: []
- dockerimage: demisto/auth-utils:1.0.0.87472
+ dockerimage: demisto/auth-utils:1.0.0.96154
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/MicrosoftManagementActivity/Integrations/MicrosoftManagementActivity/MicrosoftManagementActivity_test.py b/Packs/MicrosoftManagementActivity/Integrations/MicrosoftManagementActivity/MicrosoftManagementActivity_test.py
index e6d216845304..609e5bf323a0 100644
--- a/Packs/MicrosoftManagementActivity/Integrations/MicrosoftManagementActivity/MicrosoftManagementActivity_test.py
+++ b/Packs/MicrosoftManagementActivity/Integrations/MicrosoftManagementActivity/MicrosoftManagementActivity_test.py
@@ -733,7 +733,7 @@ def test_generate_login_url(mocker):
# assert
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
'response_type=code&scope=offline_access%20https://management.azure.com/.default' \
- f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
+ f'&client_id={client_id}&redirect_uri={redirect_uri})'
res = MicrosoftManagementActivity.return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/MicrosoftManagementActivity/ReleaseNotes/1_3_42.md b/Packs/MicrosoftManagementActivity/ReleaseNotes/1_3_42.md
new file mode 100644
index 000000000000..a39121752305
--- /dev/null
+++ b/Packs/MicrosoftManagementActivity/ReleaseNotes/1_3_42.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Microsoft Management Activity API (O365 Azure Events)
+- Updated the Docker image to: *demisto/auth-utils:1.0.0.96154*.
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/MicrosoftManagementActivity/ReleaseNotes/1_3_43.md b/Packs/MicrosoftManagementActivity/ReleaseNotes/1_3_43.md
new file mode 100644
index 000000000000..53bc5fd68ae2
--- /dev/null
+++ b/Packs/MicrosoftManagementActivity/ReleaseNotes/1_3_43.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Microsoft Management Activity API (O365 Azure Events)
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/MicrosoftManagementActivity/pack_metadata.json b/Packs/MicrosoftManagementActivity/pack_metadata.json
index 287a0c099614..76763c4a3855 100644
--- a/Packs/MicrosoftManagementActivity/pack_metadata.json
+++ b/Packs/MicrosoftManagementActivity/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Management Activity API (O365/Azure Events)",
"description": "An integration for Microsoft's management activity API, which enables you to fetch content records and manage your subscriptions.",
"support": "xsoar",
- "currentVersion": "1.3.41",
+ "currentVersion": "1.3.43",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/MicrosoftTeams/Integrations/MicrosoftTeams/MicrosoftTeams.py b/Packs/MicrosoftTeams/Integrations/MicrosoftTeams/MicrosoftTeams.py
index 071e6447427e..7e7752400001 100644
--- a/Packs/MicrosoftTeams/Integrations/MicrosoftTeams/MicrosoftTeams.py
+++ b/Packs/MicrosoftTeams/Integrations/MicrosoftTeams/MicrosoftTeams.py
@@ -2753,7 +2753,7 @@ def generate_login_url_command():
" and the bot is added to a team.")
login_url = f'https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
f'response_type=code&scope=offline_access%20https://graph.microsoft.com/.default' \
- f'&client_id={BOT_ID}&redirect_uri={REDIRECT_URI}&prompt=consent'
+ f'&client_id={BOT_ID}&redirect_uri={REDIRECT_URI}'
result_msg = f"""### Authorization instructions
1. Click on the [login URL]({login_url}) to sign in and grant Cortex XSOAR permissions for your Azure Service Management.
diff --git a/Packs/MicrosoftTeams/Integrations/MicrosoftTeams/MicrosoftTeams_test.py b/Packs/MicrosoftTeams/Integrations/MicrosoftTeams/MicrosoftTeams_test.py
index a0b188c14a0f..ae15fdca6b36 100644
--- a/Packs/MicrosoftTeams/Integrations/MicrosoftTeams/MicrosoftTeams_test.py
+++ b/Packs/MicrosoftTeams/Integrations/MicrosoftTeams/MicrosoftTeams_test.py
@@ -2276,7 +2276,7 @@ def test_generate_login_url(mocker):
# assert
expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \
'response_type=code&scope=offline_access%20https://graph.microsoft.com/.default' \
- f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)'
+ f'&client_id={client_id}&redirect_uri={redirect_uri})'
res = MicrosoftTeams.return_results.call_args[0][0].readable_output
assert expected_url in res
diff --git a/Packs/MicrosoftTeams/Integrations/MicrosoftTeams/README.md b/Packs/MicrosoftTeams/Integrations/MicrosoftTeams/README.md
index 5a99b223214e..292751f61ba7 100644
--- a/Packs/MicrosoftTeams/Integrations/MicrosoftTeams/README.md
+++ b/Packs/MicrosoftTeams/Integrations/MicrosoftTeams/README.md
@@ -47,7 +47,7 @@ In order to verify that the messaging endpoint is open as expected, you can surf
- microsoft.com
- botframework.com
- microsoftonline.com
-
+When [installing the bot in Microsoft Teams](#add-the-demisto-bot-to-a-team), according to [Microsoft](https://learn.microsoft.com/en-us/answers/questions/1600179/ms-teams-custom-app-takes-very-long-time-to-show-u), it usually takes up to 3-5 business days for the app to reflect in the "built for your org" section.
## Migration from Cortex XSOAR 6 to Cortex XSOAR 8 and Cortex XSIAM.
@@ -231,6 +231,8 @@ Note: The [microsoft-teams-ring-user](https://learn.microsoft.com/en-us/graph/ap
- Chat.Create
- TeamsAppInstallation.ReadWriteForChat
- TeamsAppInstallation.ReadWriteSelfForChat
+ - User.Read.All
+ - AppCatalog.Read.All
5. Verify that all permissions were added, and click **Grant admin consent for Demisto**.
6. When prompted to verify granting permissions, click **Yes**, and verify that permissions were successfully added.
7. Click **Expose an API** and add **Application ID URI**
@@ -299,7 +301,9 @@ Note: The [microsoft-teams-ring-user](https://learn.microsoft.com/en-us/graph/ap
### Add the Demisto Bot to a Team
-- Note: The following needs to be done after configuring the integration on Cortex XSOAR/Cortex XSIAM (the previous step).
+**Notes:**
+- The following needs to be done after configuring the integration on Cortex XSOAR/Cortex XSIAM (the previous step).
+- According to [Microsoft](https://learn.microsoft.com/en-us/answers/questions/1600179/ms-teams-custom-app-takes-very-long-time-to-show-u) it usually takes up to 3-5 business days for the app to reflect in the "built for your org" section.
1. Download the ZIP file located at the bottom of this article.
2. Uncompress the ZIP file. You should see 3 files (`manifest.json`, `color.png` and `outline.png`).
diff --git a/Packs/MicrosoftTeams/Integrations/MicrosoftTeamsManagement/MicrosoftTeamsManagement.yml b/Packs/MicrosoftTeams/Integrations/MicrosoftTeamsManagement/MicrosoftTeamsManagement.yml
index 86cbd5b5ce27..4b7f3f815d4b 100644
--- a/Packs/MicrosoftTeams/Integrations/MicrosoftTeamsManagement/MicrosoftTeamsManagement.yml
+++ b/Packs/MicrosoftTeams/Integrations/MicrosoftTeamsManagement/MicrosoftTeamsManagement.yml
@@ -679,7 +679,7 @@ script:
- contextPath: MicrosoftTeams.Team.description
description: An optional description for the group.
type: String
- dockerimage: demisto/crypto:1.0.0.87358
+ dockerimage: demisto/crypto:1.0.0.96042
runonce: false
script: '-'
subtype: python3
diff --git a/Packs/MicrosoftTeams/Integrations/MicrosoftTeamsManagement/MicrosoftTeamsManagement_test.py b/Packs/MicrosoftTeams/Integrations/MicrosoftTeamsManagement/MicrosoftTeamsManagement_test.py
index 6441e07ba2f8..56a994886162 100644
--- a/Packs/MicrosoftTeams/Integrations/MicrosoftTeamsManagement/MicrosoftTeamsManagement_test.py
+++ b/Packs/MicrosoftTeams/Integrations/MicrosoftTeamsManagement/MicrosoftTeamsManagement_test.py
@@ -1,4 +1,3 @@
-import io
import json
import pytest
@@ -19,7 +18,7 @@ def client(mocker):
def load_test_data(path):
- with io.open(path, mode='r', encoding='utf-8') as f:
+ with open(path, encoding='utf-8') as f:
return json.loads(f.read())
diff --git a/Packs/MicrosoftTeams/ReleaseNotes/1_4_60.md b/Packs/MicrosoftTeams/ReleaseNotes/1_4_60.md
new file mode 100644
index 000000000000..f649404ca00d
--- /dev/null
+++ b/Packs/MicrosoftTeams/ReleaseNotes/1_4_60.md
@@ -0,0 +1,11 @@
+
+#### Integrations
+
+##### Microsoft Teams
+
+- Updated the **microsoft-teams-generate-login-url** command to better handle the Authorization Code flow in the supported integrations.
+
+##### Microsoft Teams Management
+
+- Added support for Microsoft Defender 365 Endpoint in the Microsoft API Module.
+- Updated the **MicrosoftApiModule** to better handle the Authorization Code flow in the supported integrations.
\ No newline at end of file
diff --git a/Packs/MicrosoftTeams/ReleaseNotes/1_4_61.md b/Packs/MicrosoftTeams/ReleaseNotes/1_4_61.md
new file mode 100644
index 000000000000..2ddee38825ff
--- /dev/null
+++ b/Packs/MicrosoftTeams/ReleaseNotes/1_4_61.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### Microsoft Teams Management
+
+- Fixed an issue in where the GCC endpoints were incorrect in the supported integrations.
diff --git a/Packs/MicrosoftTeams/pack_metadata.json b/Packs/MicrosoftTeams/pack_metadata.json
index 607d5dc2e90b..4d610b7f7c76 100644
--- a/Packs/MicrosoftTeams/pack_metadata.json
+++ b/Packs/MicrosoftTeams/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Teams",
"description": "Send messages and notifications to your team members.",
"support": "xsoar",
- "currentVersion": "1.4.59",
+ "currentVersion": "1.4.61",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/MicrosoftWindowsSysmon/ModelingRules/MicrosoftWindowsSysmon/MicrosoftWindowsSysmon.xif b/Packs/MicrosoftWindowsSysmon/ModelingRules/MicrosoftWindowsSysmon/MicrosoftWindowsSysmon.xif
index bb65e5a79655..26c78da69f05 100644
--- a/Packs/MicrosoftWindowsSysmon/ModelingRules/MicrosoftWindowsSysmon/MicrosoftWindowsSysmon.xif
+++ b/Packs/MicrosoftWindowsSysmon/ModelingRules/MicrosoftWindowsSysmon/MicrosoftWindowsSysmon.xif
@@ -72,12 +72,12 @@ alter
clean_eventdata_details = lowercase(get_eventdata_Details),
clean_target_localip_10 = incidr(check_avilable_ip,"10.0.0.0/8"),
clean_target_localip_127 = incidr(check_avilable_ip,"127.0.0.0/8"),
- clean_target_localip_169 = incidr(check_avilable_ip,"169.254.0.0/168"),
+ clean_target_localip_169 = incidr(check_avilable_ip,"169.254.0.0/16"),
clean_target_localip_172 = incidr(check_avilable_ip,"172.16.0.0/12"),
clean_target_localip_192 = incidr(check_avilable_ip,"192.168.0.0/16"),
clean_source_localip_10 = incidr(check_avilable_ip2,"10.0.0.0/8"),
clean_source_localip_127 = incidr(check_avilable_ip2,"127.0.0.0/8"),
- clean_source_localip_169 = incidr(check_avilable_ip,"169.254.0.0/168"),
+ clean_source_localip_169 = incidr(check_avilable_ip,"169.254.0.0/16"),
clean_source_localip_172 = incidr(check_avilable_ip2,"172.16.0.0/12"),
clean_source_localip_192 = incidr(check_avilable_ip2,"192.168.0.0/16")
| alter
diff --git a/Packs/MicrosoftWindowsSysmon/ReleaseNotes/1_0_1.md b/Packs/MicrosoftWindowsSysmon/ReleaseNotes/1_0_1.md
new file mode 100644
index 000000000000..cf356b6896a6
--- /dev/null
+++ b/Packs/MicrosoftWindowsSysmon/ReleaseNotes/1_0_1.md
@@ -0,0 +1,6 @@
+
+#### Modeling Rules
+
+##### Microsoft Windows Sysmon Modeling Rule
+
+Updated the Modeling Rule logic for 169.254.0.0 local IP range.
diff --git a/Packs/MicrosoftWindowsSysmon/pack_metadata.json b/Packs/MicrosoftWindowsSysmon/pack_metadata.json
index a6822b959078..d87d4059df5c 100644
--- a/Packs/MicrosoftWindowsSysmon/pack_metadata.json
+++ b/Packs/MicrosoftWindowsSysmon/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Microsoft Sysmon",
"description": "System Monitor (Sysmon) is a Windows system service and device driver that, once installed on a system, remains resident across system reboots to monitor and log system activity to the Windows event log.",
"support": "xsoar",
- "currentVersion": "1.0.0",
+ "currentVersion": "1.0.1",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Mimecast/Integrations/MimecastEventCollector/README.md b/Packs/Mimecast/Integrations/MimecastEventCollector/README.md
index 5c308c44b11a..ab526d244537 100644
--- a/Packs/Mimecast/Integrations/MimecastEventCollector/README.md
+++ b/Packs/Mimecast/Integrations/MimecastEventCollector/README.md
@@ -1,3 +1,4 @@
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
## Configure Mimecast Event Collector on Cortex XSOAR
diff --git a/Packs/Mimecast/pack_metadata.json b/Packs/Mimecast/pack_metadata.json
index cfb9c2b1c635..1c086810c428 100644
--- a/Packs/Mimecast/pack_metadata.json
+++ b/Packs/Mimecast/pack_metadata.json
@@ -16,5 +16,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "Mimecast Event Collector"
}
\ No newline at end of file
diff --git a/Packs/MobileIronUEM/Integrations/MobileIronCORE/README.md b/Packs/MobileIronUEM/Integrations/MobileIronCORE/README.md
index 4a613eb72362..5c0a9002512a 100644
--- a/Packs/MobileIronUEM/Integrations/MobileIronCORE/README.md
+++ b/Packs/MobileIronUEM/Integrations/MobileIronCORE/README.md
@@ -2,6 +2,8 @@
This integration was created and tested with version *11.0.0* of MobileIronCORE
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## MobileIron Core - Getting Started
1. Log in to the MobileIron Core Admin console.
diff --git a/Packs/MobileIronUEM/pack_metadata.json b/Packs/MobileIronUEM/pack_metadata.json
index 02f798ff8012..f9a960e3dda4 100644
--- a/Packs/MobileIronUEM/pack_metadata.json
+++ b/Packs/MobileIronUEM/pack_metadata.json
@@ -25,5 +25,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "MobileIronCORE"
}
\ No newline at end of file
diff --git a/Packs/Ncurion/Integrations/Ncurion/Ncurion.py b/Packs/Ncurion/Integrations/Ncurion/Ncurion.py
index 6ce46987a9bf..74923048ab5b 100644
--- a/Packs/Ncurion/Integrations/Ncurion/Ncurion.py
+++ b/Packs/Ncurion/Integrations/Ncurion/Ncurion.py
@@ -2,7 +2,6 @@
from CommonServerPython import * # noqa: F401
import json
import traceback
-from typing import Dict, List, Union, Tuple
import requests
from datetime import datetime
import time
@@ -45,7 +44,7 @@ def loglist(base_url, access_token, refresh_token, headers1):
return log_list
-def raw_response_to_context_rules(items: Union[Dict, List]) -> Union[Dict, List]:
+def raw_response_to_context_rules(items: dict | list) -> dict | list:
if isinstance(items, list):
return [raw_response_to_context_rules(item) for item in items]
return {
@@ -79,8 +78,8 @@ def get_log_list(base_url, username, password):
return_results(results)
-def fetch_incidents(base_url, username, password, last_run: Dict[str, int],
- first_fetch_time: int) -> Tuple[Dict[str, int], List[dict]]:
+def fetch_incidents(base_url, username, password, last_run: dict[str, int],
+ first_fetch_time: int) -> tuple[dict[str, int], list[dict]]:
access_token, refresh_token, headers1 = login(base_url, username, password)
log_list = loglist(base_url, access_token, refresh_token, headers1)
log_server_id = [e["id"] for e in log_list if e["is_connected"] is True]
diff --git a/Packs/Ncurion/Integrations/Ncurion/Ncurion.yml b/Packs/Ncurion/Integrations/Ncurion/Ncurion.yml
index 132522d25357..fdd2348b294b 100644
--- a/Packs/Ncurion/Integrations/Ncurion/Ncurion.yml
+++ b/Packs/Ncurion/Integrations/Ncurion/Ncurion.yml
@@ -50,10 +50,10 @@ name: Ncurion
script:
commands:
- arguments: []
- description: Get the Ncurion Log server list
+ description: Get the Ncurion Log server list.
name: ncurion-get-log-list
outputs: []
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.95956
isFetchSamples: true
isfetch: true
script: ''
diff --git a/Packs/Ncurion/ReleaseNotes/1_0_2.md b/Packs/Ncurion/ReleaseNotes/1_0_2.md
new file mode 100644
index 000000000000..7e9286bc30f2
--- /dev/null
+++ b/Packs/Ncurion/ReleaseNotes/1_0_2.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Ncurion
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/Ncurion/pack_metadata.json b/Packs/Ncurion/pack_metadata.json
index cb365dcc7231..1171b61a3447 100644
--- a/Packs/Ncurion/pack_metadata.json
+++ b/Packs/Ncurion/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Ncurion",
"description": "Ncurion is a container-based, standardized detection engine that protects the nature of intrusion detection.",
"support": "partner",
- "currentVersion": "1.0.1",
+ "currentVersion": "1.0.2",
"author": "Ncurity",
"url": "www.ncurity.com",
"email": "paloalto@ncurity.com",
@@ -20,4 +20,4 @@
"xsoar",
"marketplacev2"
]
-}
+}
\ No newline at end of file
diff --git a/Packs/Netskope/Integrations/NetskopeEventCollector/README.md b/Packs/Netskope/Integrations/NetskopeEventCollector/README.md
index 07d5b917aec1..b9168ef5976c 100644
--- a/Packs/Netskope/Integrations/NetskopeEventCollector/README.md
+++ b/Packs/Netskope/Integrations/NetskopeEventCollector/README.md
@@ -1,3 +1,5 @@
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure Netskope Event Collector on Cortex XSIAM
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
diff --git a/Packs/Netskope/pack_metadata.json b/Packs/Netskope/pack_metadata.json
index 077462c79aa4..0d9a46419efd 100644
--- a/Packs/Netskope/pack_metadata.json
+++ b/Packs/Netskope/pack_metadata.json
@@ -16,5 +16,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "NetskopeEventCollector"
}
\ No newline at end of file
diff --git a/Packs/NucleonCyber/Integrations/NucleonCyberFeed/NucleonCyberFeed.py b/Packs/NucleonCyber/Integrations/NucleonCyberFeed/NucleonCyberFeed.py
index 2f39a9f8b9df..930b15c053c6 100644
--- a/Packs/NucleonCyber/Integrations/NucleonCyberFeed/NucleonCyberFeed.py
+++ b/Packs/NucleonCyber/Integrations/NucleonCyberFeed/NucleonCyberFeed.py
@@ -75,7 +75,7 @@ def get_urls(self) -> List:
return result
def get_ips(self, params: Dict[str, Any], limit: int) -> List:
- credentials: Dict = params.get('credentials', dict())
+ credentials: Dict = params.get('credentials', {})
global_username = credentials.get('identifier')
global_password = credentials.get('password')
global_usrn = params.get('usrn')
diff --git a/Packs/NucleonCyber/Integrations/NucleonCyberFeed/NucleonCyberFeed.yml b/Packs/NucleonCyber/Integrations/NucleonCyberFeed/NucleonCyberFeed.yml
index d85689699926..2692a4a6ec16 100644
--- a/Packs/NucleonCyber/Integrations/NucleonCyberFeed/NucleonCyberFeed.yml
+++ b/Packs/NucleonCyber/Integrations/NucleonCyberFeed/NucleonCyberFeed.yml
@@ -129,13 +129,13 @@ script:
- contextPath: NucleonCyber.Indicators.exp
description: Indicators exp.
type: String
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.95956
feed: true
runonce: false
script: '-'
subtype: python3
type: python
fromversion: 6.0.0
-description: This is the NucleonCyber Feed integration
+description: This is the NucleonCyber Feed integration.
tests:
- No tests (auto formatted)
diff --git a/Packs/NucleonCyber/ReleaseNotes/1_0_4.md b/Packs/NucleonCyber/ReleaseNotes/1_0_4.md
new file mode 100644
index 000000000000..33f46b680159
--- /dev/null
+++ b/Packs/NucleonCyber/ReleaseNotes/1_0_4.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### NucleonCyberFeed
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/NucleonCyber/pack_metadata.json b/Packs/NucleonCyber/pack_metadata.json
index a17909a4e6ce..36c2188d50e2 100644
--- a/Packs/NucleonCyber/pack_metadata.json
+++ b/Packs/NucleonCyber/pack_metadata.json
@@ -2,15 +2,14 @@
"name": "NucleonCyber",
"description": "NucleonCyber indicator data feed ",
"support": "partner",
- "currentVersion": "1.0.3",
+ "currentVersion": "1.0.4",
"author": "NucleonCyber",
"url": "",
"email": "support@nucleon.sh",
"categories": [
"Data Enrichment & Threat Intelligence"
],
- "tags": [
- ],
+ "tags": [],
"useCases": [],
"keywords": [],
"githubUser": [],
diff --git a/Packs/OctoxLabs/.pack-ignore b/Packs/OctoxLabs/.pack-ignore
index e69de29bb2d1..9e9f85aab1ad 100644
--- a/Packs/OctoxLabs/.pack-ignore
+++ b/Packs/OctoxLabs/.pack-ignore
@@ -0,0 +1,8 @@
+[known_words]
+Octox
+hostname
+Dict
+ldap
+URLs
+DBot
+Boolean
\ No newline at end of file
diff --git a/Packs/OctoxLabs/Integrations/OctoxLabs/OctoxLabs.py b/Packs/OctoxLabs/Integrations/OctoxLabs/OctoxLabs.py
index 6c036ac4c4ca..df459c9d3065 100644
--- a/Packs/OctoxLabs/Integrations/OctoxLabs/OctoxLabs.py
+++ b/Packs/OctoxLabs/Integrations/OctoxLabs/OctoxLabs.py
@@ -32,6 +32,17 @@ def run_command(
"octoxlabs-get-queries": get_queries,
"octoxlabs-get-query-by-id": get_query_by_id,
"octoxlabs-get-query-by-name": get_query_by_name,
+ "octoxlabs-get-companies": get_companies,
+ "octoxlabs-get-company-by-id": get_company_by_id,
+ "octoxlabs-get-company-by-name": get_company_by_name,
+ "octoxlabs-get-domains": get_domains,
+ "octoxlabs-get-domain-by-id": get_domain_by_id,
+ "octoxlabs-get-domain-by-domain-name": get_domain_by_domain_name,
+ "octoxlabs-get-users": get_users,
+ "octoxlabs-get-user-by-id": get_user_by_id,
+ "octoxlabs-get-user-by-username": get_user_by_username,
+ "octoxlabs-get-groups": get_groups,
+ "octoxlabs-get-permissions": get_permissions,
}
command_function: Optional[Callable] = commands.get(command_name, None)
if command_function:
@@ -256,6 +267,246 @@ def get_query_by_name(octox: OctoxLabs, args: Dict[str, Any]) -> CommandResults:
)
+def get_companies(octox: OctoxLabs, args: Dict[str, Any]) -> CommandResults:
+ count, companies = octox.get_companies(
+ page=args.get("page", 1),
+ search=args.get("search", ""),
+ size=args.get("size", 20),
+ )
+
+ return CommandResults(
+ outputs_prefix="OctoxLabs.Companies",
+ outputs={
+ "count": count,
+ "results": [
+ convert_to_json(
+ c,
+ keys=[
+ "id",
+ "name",
+ "domain",
+ "is_active",
+ ],
+ )
+ for c in companies
+ ],
+ },
+ )
+
+
+def get_company_by_id(octox: OctoxLabs, args: Dict[str, Any]) -> CommandResults:
+ company = octox.get_company_by_id(company_id=args.get("company_id"))
+
+ return CommandResults(
+ outputs_prefix="OctoxLabs.Company",
+ outputs=convert_to_json(
+ obj=company,
+ keys=[
+ "id",
+ "name",
+ "domain",
+ "is_active",
+ ],
+ ),
+ )
+
+
+def get_company_by_name(octox: OctoxLabs, args: Dict[str, Any]) -> CommandResults:
+ company = octox.get_company_by_name(company_name=args.get("company_name"))
+
+ return CommandResults(
+ outputs_prefix="OctoxLabs.Company",
+ outputs=convert_to_json(
+ obj=company,
+ keys=[
+ "id",
+ "name",
+ "domain",
+ "is_active",
+ ],
+ ),
+ )
+
+
+def get_domains(octox: OctoxLabs, args: Dict[str, Any]) -> CommandResults:
+ count, domains = octox.get_domains(
+ page=args.get("page", 1),
+ search=args.get("search", ""),
+ size=args.get("size", 20),
+ )
+
+ return CommandResults(
+ outputs_prefix="OctoxLabs.Domains",
+ outputs={
+ "count": count,
+ "results": [
+ convert_to_json(
+ d,
+ keys=[
+ "id",
+ "domain",
+ "tenant_name",
+ "tenant",
+ ],
+ )
+ for d in domains
+ ],
+ },
+ )
+
+
+def get_domain_by_id(octox: OctoxLabs, args: Dict[str, Any]) -> CommandResults:
+ domain = octox.get_domain_by_id(domain_id=args.get("domain_id"))
+
+ return CommandResults(
+ outputs_prefix="OctoxLabs.Domain",
+ outputs=convert_to_json(
+ obj=domain,
+ keys=[
+ "id",
+ "domain",
+ "tenant_name",
+ "tenant",
+ ],
+ ),
+ )
+
+
+def get_domain_by_domain_name(octox: OctoxLabs, args: Dict[str, Any]) -> CommandResults:
+ domain = octox.get_domains_by_domain_name(domain_name=args.get("domain_name"))
+
+ return CommandResults(
+ outputs_prefix="OctoxLabs.Domain",
+ outputs=convert_to_json(
+ obj=domain,
+ keys=[
+ "id",
+ "domain",
+ "tenant_name",
+ "tenant" "is_primary",
+ ],
+ ),
+ )
+
+
+def get_users(octox: OctoxLabs, args: Dict[str, Any]) -> CommandResults:
+ count, users = octox.get_users(
+ page=args.get("page", 1),
+ search=args.get("search", ""),
+ size=args.get("size", 20),
+ )
+
+ return CommandResults(
+ outputs_prefix="OctoxLabs.Users",
+ outputs={
+ "count": count,
+ "results": [
+ convert_to_json(
+ u,
+ keys=[
+ "id",
+ "name",
+ "email",
+ "username",
+ "first_name",
+ "last_name",
+ "is_active",
+ "is_ldap",
+ "groups",
+ ],
+ )
+ for u in users
+ ],
+ },
+ )
+
+
+def get_user_by_id(octox: OctoxLabs, args: Dict[str, Any]) -> CommandResults:
+ user = octox.get_user_by_id(user_id=args.get("user_id"))
+ return CommandResults(
+ outputs_prefix="OctoxLabs.User",
+ outputs=convert_to_json(
+ obj=user,
+ keys=[
+ "id",
+ "name",
+ "email",
+ "username",
+ "first_name",
+ "last_name",
+ "is_active",
+ "is_ldap",
+ "groups",
+ ],
+ ),
+ )
+
+
+def get_user_by_username(octox: OctoxLabs, args: Dict[str, Any]) -> CommandResults:
+ user = octox.get_user_by_username(username=args.get("username"))
+ return CommandResults(
+ outputs_prefix="OctoxLabs.User",
+ outputs=convert_to_json(
+ obj=user,
+ keys=[
+ "id",
+ "name",
+ "email",
+ "username",
+ "first_name",
+ "last_name",
+ "is_active",
+ "is_ldap",
+ "groups",
+ ],
+ ),
+ )
+
+
+def get_groups(octox: OctoxLabs, args: Dict[str, Any]) -> CommandResults:
+ count, groups = octox.get_groups(
+ page=args.get("page", 1),
+ search=args.get("search", ""),
+ size=args.get("size", 20),
+ )
+
+ return CommandResults(
+ outputs_prefix="OctoxLabs.Groups",
+ outputs={
+ "count": count,
+ "results": [
+ convert_to_json(
+ g,
+ keys=["id", "name", "users_count"],
+ )
+ for g in groups
+ ],
+ },
+ )
+
+
+def get_permissions(octox: OctoxLabs, args: Dict[str, Any]) -> CommandResults:
+ count, permissions = octox.get_permissions(
+ page=args.get("page", 1),
+ search=args.get("search", ""),
+ size=args.get("size", 20),
+ )
+
+ return CommandResults(
+ outputs_prefix="OctoxLabs.Permissions",
+ outputs={
+ "count": count,
+ "results": [
+ convert_to_json(
+ p,
+ keys=["id", "name", "app"],
+ )
+ for p in permissions
+ ],
+ },
+ )
+
+
""" MAIN FUNCTION """
diff --git a/Packs/OctoxLabs/Integrations/OctoxLabs/OctoxLabs.yml b/Packs/OctoxLabs/Integrations/OctoxLabs/OctoxLabs.yml
index 4c27bd859f28..bff736a12b76 100644
--- a/Packs/OctoxLabs/Integrations/OctoxLabs/OctoxLabs.yml
+++ b/Packs/OctoxLabs/Integrations/OctoxLabs/OctoxLabs.yml
@@ -288,11 +288,308 @@ script:
- contextPath: OctoxLabs.Queries.results.is_temporary
description: "Query is temporary."
type: Boolean
+ - name: octoxlabs-get-companies
+ description: 'Fetch your Companies.'
+ arguments:
+ - default:
+ name: page
+ description: 'Company list page.'
+ - default:
+ name: search
+ description: 'Search text.'
+ - default:
+ name: size
+ description: 'Company list size.'
+ outputs:
+ - contextPath: OctoxLabs.Companies.count
+ description: 'Companies count.'
+ type: Number
+ - contextPath: OctoxLabs.Companies.results.id
+ description: 'Company id.'
+ type: Number
+ - contextPath: OctoxLabs.Companies.results.name
+ description: 'Company name.'
+ type: String
+ - contextPath: OctoxLabs.Companies.results.domain
+ description: 'Company domain.'
+ type: String
+ - contextPath: OctoxLabs.Companies.results.is_active
+ description: 'Company is active.'
+ type: Boolean
+ - name: octoxlabs-get-company-by-id
+ description: 'Fetch your Company by id.'
+ arguments:
+ - required: true
+ name: company_id
+ description: 'Company id.'
+ outputs:
+ - contextPath: OctoxLabs.Company.results.id
+ description: 'Company id.'
+ type: Number
+ - contextPath: OctoxLabs.Company.results.name
+ description: 'Company name.'
+ type: String
+ - contextPath: OctoxLabs.Company.results.domain
+ description: 'Company domain.'
+ type: String
+ - contextPath: OctoxLabs.Company.results.is_active
+ description: 'Company is active.'
+ type: Boolean
+ - name: octoxlabs-get-company-by-name
+ description: 'Fetch your Company by name.'
+ arguments:
+ - required: true
+ name: company_name
+ description: 'Company name.'
+ outputs:
+ - contextPath: OctoxLabs.Company.count
+ description: 'Companies count.'
+ type: Number
+ - contextPath: OctoxLabs.Company.results.id
+ description: 'Company id.'
+ type: Number
+ - contextPath: OctoxLabs.Company.results.name
+ description: 'Company name.'
+ type: String
+ - contextPath: OctoxLabs.Company.results.domain
+ description: 'Company domain.'
+ type: String
+ - contextPath: OctoxLabs.Company.results.is_active
+ description: 'Company is active.'
+ type: Boolean
+ - name: octoxlabs-get-domains
+ description: 'Fetch your Domains.'
+ arguments:
+ - default:
+ name: page
+ description: 'Domain list page.'
+ - default:
+ name: search
+ description: 'Search text.'
+ - default:
+ name: size
+ description: 'Domain list size.'
+ outputs:
+ - contextPath: OctoxLabs.Domains.count
+ description: 'Domains count.'
+ type: Number
+ - contextPath: OctoxLabs.Domains.results.id
+ description: 'Domain id.'
+ type: Number
+ - contextPath: OctoxLabs.Domains.results.domain
+ description: 'Domain domain.'
+ type: String
+ - contextPath: OctoxLabs.Domains.results.tenant_name
+ description: 'Domain tenant name.'
+ type: String
+ - contextPath: OctoxLabs.Domains.results.tenant
+ description: 'Domain tenant.'
+ type: Number
+ - name: octoxlabs-get-domain-by-id
+ description: 'Fetch your Domain by id.'
+ arguments:
+ - required: true
+ name: domain_id
+ description: 'Domain id.'
+ outputs:
+ - contextPath: OctoxLabs.Domain.results.id
+ description: 'Domain id.'
+ type: Number
+ - contextPath: OctoxLabs.Domain.results.domain
+ description: 'Domain domain.'
+ type: String
+ - contextPath: OctoxLabs.Domain.results.tenant_name
+ description: 'Domain tenant name.'
+ type: String
+ - contextPath: OctoxLabs.Domain.results.tenant
+ description: 'Domain tenant.'
+ type: Number
+ - name: octoxlabs-get-domain-by-domain-name
+ description: 'Fetch your Domain by Domain name.'
+ arguments:
+ - required: true
+ name: domain_name
+ description: 'Domain name.'
+ outputs:
+ - contextPath: OctoxLabs.Domain.results.id
+ description: 'Domain id.'
+ type: Number
+ - contextPath: OctoxLabs.Domain.results.domain
+ description: 'Domain domain.'
+ type: String
+ - contextPath: OctoxLabs.Domain.results.tenant_name
+ description: 'Domain tenant name.'
+ type: String
+ - contextPath: OctoxLabs.Domain.results.tenant
+ description: 'Domain tenant.'
+ type: Number
+ - name: octoxlabs-get-users
+ description: 'Fetch your Users.'
+ arguments:
+ - default:
+ name: page
+ description: 'User list page.'
+ - default:
+ name: search
+ description: 'Search text.'
+ - default:
+ name: size
+ description: 'User list size.'
+ outputs:
+ - contextPath: OctoxLabs.Users.count
+ description: 'Users count.'
+ type: Number
+ - contextPath: OctoxLabs.Users.results.id
+ description: 'User id.'
+ type: Number
+ - contextPath: OctoxLabs.Users.results.email
+ description: 'User email.'
+ type: String
+ - contextPath: OctoxLabs.Users.results.username
+ description: 'User username.'
+ type: String
+ - contextPath: OctoxLabs.Users.results.name
+ description: 'User name.'
+ type: String
+ - contextPath: OctoxLabs.Users.results.first_name
+ description: 'User first name.'
+ type: String
+ - contextPath: OctoxLabs.Users.results.last_name
+ description: 'User last name.'
+ type: String
+ - contextPath: OctoxLabs.Users.results.is_active
+ description: 'User is active.'
+ type: Boolean
+ - contextPath: OctoxLabs.Users.results.is_ldap
+ description: 'User is ldap.'
+ type: Boolean
+ - contextPath: OctoxLabs.Users.results.groups
+ description: 'List User groups.'
+ type: Unknown
+ - name: octoxlabs-get-user-by-id
+ description: 'Fetch your User by id.'
+ arguments:
+ - required: true
+ name: user_id
+ description: 'User id.'
+ outputs:
+ - contextPath: OctoxLabs.User.results.id
+ description: 'User id.'
+ type: Number
+ - contextPath: OctoxLabs.User.results.email
+ description: 'User email.'
+ type: String
+ - contextPath: OctoxLabs.User.results.username
+ description: 'User username.'
+ type: String
+ - contextPath: OctoxLabs.User.results.name
+ description: 'User name.'
+ type: String
+ - contextPath: OctoxLabs.User.results.first_name
+ description: 'User first name.'
+ type: String
+ - contextPath: OctoxLabs.User.results.last_name
+ description: 'User last name.'
+ type: String
+ - contextPath: OctoxLabs.User.results.is_active
+ description: 'User is active.'
+ type: Boolean
+ - contextPath: OctoxLabs.User.results.is_ldap
+ description: 'User is ldap.'
+ type: Boolean
+ - contextPath: OctoxLabs.User.results.groups
+ description: 'List User groups.'
+ type: Unknown
+ - name: octoxlabs-get-user-by-username
+ description: 'Fetch your Users by username.'
+ arguments:
+ - required: true
+ name: username
+ description: 'User username.'
+ outputs:
+ - contextPath: OctoxLabs.User.results.id
+ description: 'User id.'
+ type: Number
+ - contextPath: OctoxLabs.User.results.email
+ description: 'User email.'
+ type: String
+ - contextPath: OctoxLabs.User.results.username
+ description: 'User username.'
+ type: String
+ - contextPath: OctoxLabs.User.results.name
+ description: 'User name.'
+ type: String
+ - contextPath: OctoxLabs.User.results.first_name
+ description: 'User first name.'
+ type: String
+ - contextPath: OctoxLabs.User.results.last_name
+ description: 'User last name.'
+ type: String
+ - contextPath: OctoxLabs.User.results.is_active
+ description: 'User is active.'
+ type: Boolean
+ - contextPath: OctoxLabs.User.results.is_ldap
+ description: 'User is ldap.'
+ type: Boolean
+ - contextPath: OctoxLabs.Users.results.groups
+ description: 'List User groups.'
+ type: Unknown
+ - name: octoxlabs-get-groups
+ description: 'Fetch your Groups.'
+ arguments:
+ - default:
+ name: page
+ description: 'Group list page.'
+ - default:
+ name: search
+ description: 'Search text.'
+ - default:
+ name: size
+ description: 'Group list size.'
+ outputs:
+ - contextPath: OctoxLabs.Groups.count
+ description: 'Groups count.'
+ type: Number
+ - contextPath: OctoxLabs.Groups.results.id
+ description: 'Group id.'
+ type: Number
+ - contextPath: OctoxLabs.Groups.results.name
+ description: 'Group name.'
+ type: String
+ - contextPath: OctoxLabs.Groups.results.users_count
+ description: 'Group users count.'
+ type: Number
+ - name: octoxlabs-get-permissions
+ description: 'Fetch your Permissions.'
+ arguments:
+ - default:
+ name: page
+ description: 'Permission list page.'
+ - default:
+ name: search
+ description: 'Search text.'
+ - default:
+ name: size
+ description: 'Permission list size.'
+ outputs:
+ - contextPath: OctoxLabs.Permissions.count
+ description: 'Permissions count.'
+ type: Number
+ - contextPath: OctoxLabs.Permissions.results.id
+ description: 'Permission id.'
+ type: Number
+ - contextPath: OctoxLabs.Permissions.results.name
+ description: 'Permission name.'
+ type: String
+ - contextPath: OctoxLabs.Permissions.results.app
+ description: 'Permission app.'
+ type: String
+
runonce: false
script: '-'
type: python
subtype: python3
- dockerimage: demisto/octoxlabs:1.0.0.87398
+ dockerimage: demisto/octoxlabs:1.0.0.96081
fromversion: 6.0.0
tests:
- No tests (auto formatted)
diff --git a/Packs/OctoxLabs/Integrations/OctoxLabs/OctoxLabs_test.py b/Packs/OctoxLabs/Integrations/OctoxLabs/OctoxLabs_test.py
index 488e8e90bc5d..cfaee19ede2e 100644
--- a/Packs/OctoxLabs/Integrations/OctoxLabs/OctoxLabs_test.py
+++ b/Packs/OctoxLabs/Integrations/OctoxLabs/OctoxLabs_test.py
@@ -10,17 +10,18 @@
import io
import json
+
import pytest
import requests_mock as rm
-
from octoxlabs import OctoxLabs
+from OctoxLabs import convert_to_json, run_command
from octoxlabs.exceptions import NotFound
from octoxlabs.models.adapter import Adapter
-from OctoxLabs import convert_to_json, run_command
@pytest.fixture()
-def octox_client() -> OctoxLabs:
+def octox_client(requests_mock) -> OctoxLabs:
+ requests_mock.post("/api/token/token", json={"access": "token"})
return OctoxLabs(ip="octoxlabs.test", token="xsoar")
@@ -170,3 +171,128 @@ def test_get_query_by_name(requests_mock, octox_client):
data = result.outputs
assert data["name"] == "cisco ise machines"
+
+
+def test_get_companies(requests_mock, octox_client):
+ companies_data = util_load_json(path="test_data/get_companies.json")
+ requests_mock.get("/companies/companies", json=companies_data)
+ result = run_command(
+ octox=octox_client, command_name="octoxlabs-get-companies", args={}
+ )
+ first_data = result.outputs
+ assert first_data["count"] == 1
+ assert first_data["results"][0]["name"] == "Octoxlabs"
+
+
+def test_get_company_by_id(requests_mock, octox_client):
+ company_data = util_load_json(path="test_data/get_company.json")
+ requests_mock.get("/companies/companies/1", json=company_data)
+ result = run_command(
+ octox=octox_client,
+ command_name="octoxlabs-get-company-by-id",
+ args={"company_id": 1},
+ )
+ first_data = result.outputs
+ assert first_data["name"] == "Octoxlabs"
+
+
+def test_get_company_by_name(requests_mock, octox_client):
+ company_data = util_load_json(path="test_data/get_companies.json")
+ requests_mock.get("/companies/companies", json=company_data)
+ result = run_command(
+ octox=octox_client,
+ command_name="octoxlabs-get-company-by-name",
+ args={"company_name": "Octoxlabs"},
+ )
+ first_data = result.outputs
+ assert first_data["name"] == "Octoxlabs"
+
+
+def test_get_domains(requests_mock, octox_client):
+ domains_data = util_load_json(path="test_data/get_domains.json")
+ requests_mock.get("/companies/domains", json=domains_data)
+ result = run_command(
+ octox=octox_client, command_name="octoxlabs-get-domains", args={}
+ )
+ first_data = result.outputs
+ assert first_data["count"] == 1
+ assert first_data["results"][0]["tenant_name"] == "Octoxlabs"
+
+
+def test_get_domain_by_id(requests_mock, octox_client):
+ domain_data = util_load_json(path="test_data/get_domain.json")
+ requests_mock.get("/companies/domains/1", json=domain_data)
+ result = run_command(
+ octox=octox_client,
+ command_name="octoxlabs-get-domain-by-id",
+ args={"domain_id": 1},
+ )
+ first_data = result.outputs
+ assert first_data["tenant_name"] == "Octoxlabs"
+
+
+def test_get_domain_by_domain_name(requests_mock, octox_client):
+ domain_data = util_load_json(path="test_data/get_domains.json")
+ requests_mock.get("/companies/domains", json=domain_data)
+ result = run_command(
+ octox=octox_client,
+ command_name="octoxlabs-get-domain-by-domain-name",
+ args={"domain_name": "localhost"},
+ )
+ first_data = result.outputs
+ assert first_data["domain"] == "localhost"
+
+
+def test_get_users(requests_mock, octox_client):
+ users_data = util_load_json(path="test_data/get_users.json")
+ requests_mock.get("/users/users", json=users_data)
+ result = run_command(
+ octox=octox_client, command_name="octoxlabs-get-users", args={}
+ )
+ first_data = result.outputs
+ assert first_data["count"] == 1
+ assert first_data["results"][0]["name"] == "XSOAR OctoxLabs"
+
+
+def test_get_user_by_id(requests_mock, octox_client):
+ user_data = util_load_json(path="test_data/get_user.json")
+ requests_mock.get("/users/users/1", json=user_data)
+ result = run_command(
+ octox=octox_client, command_name="octoxlabs-get-user-by-id", args={"user_id": 1}
+ )
+ first_data = result.outputs
+ assert first_data["name"] == "XSOAR OctoxLabs"
+
+
+def test_get_user_by_username(requests_mock, octox_client):
+ users_data = util_load_json(path="test_data/get_users.json")
+ requests_mock.get("/users/users", json=users_data)
+ result = run_command(
+ octox=octox_client,
+ command_name="octoxlabs-get-user-by-username",
+ args={"username": "xsoar"},
+ )
+ first_data = result.outputs
+ assert first_data["username"] == "xsoar"
+
+
+def test_get_groups(requests_mock, octox_client):
+ groups_data = util_load_json(path="test_data/get_groups.json")
+ requests_mock.get("/users/groups", json=groups_data)
+ result = run_command(
+ octox=octox_client, command_name="octoxlabs-get-groups", args={}
+ )
+ first_data = result.outputs
+ assert first_data["count"] == 2
+ assert first_data["results"][0]["name"] == "Auditors"
+
+
+def test_get_permissions(requests_mock, octox_client):
+ permissions_data = util_load_json(path="test_data/get_permissions.json")
+ requests_mock.get("/users/permissions", json=permissions_data)
+ result = run_command(
+ octox=octox_client, command_name="octoxlabs-get-permissions", args={}
+ )
+ first_data = result.outputs
+ assert first_data["count"] == 1
+ assert first_data["results"][0]["app"] == "activities"
diff --git a/Packs/OctoxLabs/Integrations/OctoxLabs/README.md b/Packs/OctoxLabs/Integrations/OctoxLabs/README.md
index 5aa860a0eb9d..298b03239abe 100644
--- a/Packs/OctoxLabs/Integrations/OctoxLabs/README.md
+++ b/Packs/OctoxLabs/Integrations/OctoxLabs/README.md
@@ -265,3 +265,297 @@ Fetch your queries by id
| OctoxLabs.Queries.results.updated_at | String | Query updated at |
| OctoxLabs.Queries.results.username | String | Query creator |
| OctoxLabs.Queries.results.is_temporary | Boolean | Query is temporary |
+
+### octoxlabs-get-user-by-username
+
+***
+Fetch your Users by username
+
+#### Base Command
+
+`octoxlabs-get-user-by-username`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| username | User username. | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| OctoxLabs.User.results.id | Number | User id. |
+| OctoxLabs.User.results.email | String | User email. |
+| OctoxLabs.User.results.username | String | User username. |
+| OctoxLabs.User.results.name | String | User name. |
+| OctoxLabs.User.results.first_name | String | User first name |
+| OctoxLabs.User.results.last_name | String | User last name |
+| OctoxLabs.User.results.is_active | Boolean | User is active |
+| OctoxLabs.User.results.is_ldap | Boolean | User is ldap |
+| OctoxLabs.Users.results.groups | Unknown | List<Dict> User groups |
+
+### octoxlabs-get-groups
+
+***
+Fetch your Groups
+
+#### Base Command
+
+`octoxlabs-get-groups`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| page | Group list page. | Optional |
+| search | Search text. | Optional |
+| size | Group list size. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| OctoxLabs.Groups.count | Number | Groups count. |
+| OctoxLabs.Groups.results.id | Number | Group id. |
+| OctoxLabs.Groups.results.name | String | Group name. |
+| OctoxLabs.Groups.results.users_count | Number | Group users count. |
+
+### octoxlabs-get-companies
+
+***
+Fetch your Companies
+
+#### Base Command
+
+`octoxlabs-get-companies`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| page | Company list page. | Optional |
+| search | Search text. | Optional |
+| size | Company list size. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| OctoxLabs.Companies.count | Number | Companies count. |
+| OctoxLabs.Companies.results.id | Number | Company id. |
+| OctoxLabs.Companies.results.name | String | Company name. |
+| OctoxLabs.Companies.results.domain | String | Company domain. |
+| OctoxLabs.Companies.results.is_active | Boolean | Company is active. |
+
+### octoxlabs-get-domain-by-domain-name
+
+***
+Fetch your Domain by Domain name.
+
+#### Base Command
+
+`octoxlabs-get-domain-by-domain-name`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| domain_name | Domain name. | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| OctoxLabs.Domain.results.id | Number | Domain id. |
+| OctoxLabs.Domain.results.domain | String | Domain domain. |
+| OctoxLabs.Domain.results.tenant_name | String | Domain tenant name. |
+| OctoxLabs.Domain.results.tenant | Number | Domain tenant. |
+
+### octoxlabs-get-company-by-id
+
+***
+Fetch your Company by id.
+
+#### Base Command
+
+`octoxlabs-get-company-by-id`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| company_id | Company id. | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| OctoxLabs.Company.results.id | Number | Company id. |
+| OctoxLabs.Company.results.name | String | Company name. |
+| OctoxLabs.Company.results.domain | String | Company domain. |
+| OctoxLabs.Company.results.is_active | Boolean | Company is active. |
+
+### octoxlabs-get-permissions
+
+***
+Fetch your Permissions
+
+#### Base Command
+
+`octoxlabs-get-permissions`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| page | Permission list page. | Optional |
+| search | Search text. | Optional |
+| size | Permission list size. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| OctoxLabs.Permissions.count | Number | Permissions count. |
+| OctoxLabs.Permissions.results.id | Number | Permission id. |
+| OctoxLabs.Permissions.results.name | String | Permission name. |
+| OctoxLabs.Permissions.results.app | String | Permission app. |
+
+### octoxlabs-get-domains
+
+***
+Fetch your Domains
+
+#### Base Command
+
+`octoxlabs-get-domains`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| page | Domain list page. | Optional |
+| search | Search text. | Optional |
+| size | Domain list size. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| OctoxLabs.Domains.count | Number | Domains count. |
+| OctoxLabs.Domains.results.id | Number | Domain id. |
+| OctoxLabs.Domains.results.domain | String | Domain domain. |
+| OctoxLabs.Domains.results.tenant_name | String | Domain tenant name. |
+| OctoxLabs.Domains.results.tenant | Number | Domain tenant. |
+
+### octoxlabs-get-domain-by-id
+
+***
+Fetch your Domain by id.
+
+#### Base Command
+
+`octoxlabs-get-domain-by-id`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| domain_id | Domain id. | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| OctoxLabs.Domain.results.id | Number | Domain id. |
+| OctoxLabs.Domain.results.domain | String | Domain domain. |
+| OctoxLabs.Domain.results.tenant_name | String | Domain tenant name. |
+| OctoxLabs.Domain.results.tenant | Number | Domain tenant. |
+
+### octoxlabs-get-company-by-name
+
+***
+Fetch your Company by name.
+
+#### Base Command
+
+`octoxlabs-get-company-by-name`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| company_name | Company name. | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| OctoxLabs.Company.count | Number | Companies count. |
+| OctoxLabs.Company.results.id | Number | Company id. |
+| OctoxLabs.Company.results.name | String | Company name. |
+| OctoxLabs.Company.results.domain | String | Company domain. |
+| OctoxLabs.Company.results.is_active | Boolean | Company is active. |
+
+### octoxlabs-get-users
+
+***
+Fetch your Users
+
+#### Base Command
+
+`octoxlabs-get-users`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| page | User list page. | Optional |
+| search | Search text. | Optional |
+| size | User list size. | Optional |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| OctoxLabs.Users.count | Number | Users count. |
+| OctoxLabs.Users.results.id | Number | User id. |
+| OctoxLabs.Users.results.email | String | User email. |
+| OctoxLabs.Users.results.username | String | User username. |
+| OctoxLabs.Users.results.name | String | User name. |
+| OctoxLabs.Users.results.first_name | String | User first name |
+| OctoxLabs.Users.results.last_name | String | User last name |
+| OctoxLabs.Users.results.is_active | Boolean | User is active |
+| OctoxLabs.Users.results.is_ldap | Boolean | User is ldap |
+| OctoxLabs.Users.results.groups | Unknown | List<Dict> User groups |
+
+### octoxlabs-get-user-by-id
+
+***
+Fetch your User by id
+
+#### Base Command
+
+`octoxlabs-get-user-by-id`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| user_id | User id. | Required |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| OctoxLabs.User.results.id | Number | User id. |
+| OctoxLabs.User.results.email | String | User email. |
+| OctoxLabs.User.results.username | String | User username. |
+| OctoxLabs.User.results.name | String | User name. |
+| OctoxLabs.User.results.first_name | String | User first name |
+| OctoxLabs.User.results.last_name | String | User last name |
+| OctoxLabs.User.results.is_active | Boolean | User is active |
+| OctoxLabs.User.results.is_ldap | Boolean | User is ldap |
+| OctoxLabs.User.results.groups | Unknown | List<Dict> User groups |
+
diff --git a/Packs/OctoxLabs/Integrations/OctoxLabs/command_examples b/Packs/OctoxLabs/Integrations/OctoxLabs/command_examples
index 8736327b7a4a..bae3fad5fe5d 100644
--- a/Packs/OctoxLabs/Integrations/OctoxLabs/command_examples
+++ b/Packs/OctoxLabs/Integrations/OctoxLabs/command_examples
@@ -3,4 +3,15 @@
!octoxlabs-get-discoveries page=1
!octoxlabs-get-last-discovery
!octoxlabs-search-devices query="Adapters = active-directory AND (Hostname = a* OR IpAddresses ~ 192)" fields="Adapters,Hostname,Groups"
-!octoxlabs-get-device hostname="octoxlabs01"
\ No newline at end of file
+!octoxlabs-get-device hostname="octoxlabs01"
+!octoxlabs-get-companies page=1
+!octoxlabs-get-company-by-id company_id=1
+!octoxlabs-get-company-by-name company_name="Octoxlabs"
+!octoxlabs-get-domains page=1
+!octoxlabs-get-domain-by-id domain_id=1
+!octoxlabs-get-domain-by-domain-name domain_name="localhost"
+!octoxlabs-get-users page=1
+!octoxlabs-get-user-by-id user_id=1
+!octoxlabs-get-user-by-username username="xsoar"
+!octoxlabs-get-groups page=1
+!octoxlabs-get-permissions page=1
\ No newline at end of file
diff --git a/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_companies.json b/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_companies.json
new file mode 100644
index 000000000000..b90ef223cbae
--- /dev/null
+++ b/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_companies.json
@@ -0,0 +1,12 @@
+{
+ "count": 1,
+ "results": [
+ {
+ "id": 1,
+ "name": "Octoxlabs",
+ "is_constant": true,
+ "domain": "localhost",
+ "is_active": true
+ }
+ ]
+}
\ No newline at end of file
diff --git a/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_company.json b/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_company.json
new file mode 100644
index 000000000000..012e5998d2a2
--- /dev/null
+++ b/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_company.json
@@ -0,0 +1,7 @@
+{
+ "id": 1,
+ "name": "Octoxlabs",
+ "is_constant": true,
+ "domain": "localhost",
+ "is_active": true
+}
diff --git a/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_domain.json b/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_domain.json
new file mode 100644
index 000000000000..d15808cc25d8
--- /dev/null
+++ b/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_domain.json
@@ -0,0 +1,7 @@
+{
+ "id": 1,
+ "domain": "localhost",
+ "tenant": 1,
+ "is_primary": true,
+ "tenant_name": "Octoxlabs"
+}
diff --git a/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_domains.json b/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_domains.json
new file mode 100644
index 000000000000..b527b424c066
--- /dev/null
+++ b/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_domains.json
@@ -0,0 +1,12 @@
+{
+ "count": 1,
+ "results": [
+ {
+ "id": 1,
+ "domain": "localhost",
+ "tenant": 1,
+ "is_primary": true,
+ "tenant_name": "Octoxlabs"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_groups.json b/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_groups.json
new file mode 100644
index 000000000000..9716e3b5c5ed
--- /dev/null
+++ b/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_groups.json
@@ -0,0 +1,15 @@
+{
+ "count": 2,
+ "results": [
+ {
+ "id": 2,
+ "name": "Auditors",
+ "user_count": 0
+ },
+ {
+ "id": 1,
+ "name": "Administrators",
+ "user_count": 1
+ }
+ ]
+}
diff --git a/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_permissions.json b/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_permissions.json
new file mode 100644
index 000000000000..a17e6af2d7d8
--- /dev/null
+++ b/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_permissions.json
@@ -0,0 +1,11 @@
+{
+ "count": 1,
+ "results": [
+ {
+ "id": 1,
+ "name": "Can clear system logs",
+ "codename": "clear_system_log",
+ "app": "activities"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_user.json b/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_user.json
new file mode 100644
index 000000000000..ad1dee314256
--- /dev/null
+++ b/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_user.json
@@ -0,0 +1,16 @@
+{
+ "id": 1,
+ "email": "",
+ "username": "xsoar",
+ "name": "XSOAR OctoxLabs",
+ "first_name": "XSOAR",
+ "last_name": "OctoxLabs",
+ "is_active": true,
+ "is_ldap": false,
+ "groups": [
+ {
+ "id": 1,
+ "name": "Administrators"
+ }
+ ]
+}
diff --git a/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_users.json b/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_users.json
new file mode 100644
index 000000000000..5c56e5c48034
--- /dev/null
+++ b/Packs/OctoxLabs/Integrations/OctoxLabs/test_data/get_users.json
@@ -0,0 +1,21 @@
+{
+ "count": 1,
+ "results": [
+ {
+ "id": 1,
+ "email": "",
+ "username": "xsoar",
+ "name": "XSOAR OctoxLabs",
+ "first_name": "XSOAR",
+ "last_name": "OctoxLabs",
+ "is_active": true,
+ "is_ldap": false,
+ "groups": [
+ {
+ "id": 1,
+ "name": "Administrators"
+ }
+ ]
+ }
+ ]
+}
\ No newline at end of file
diff --git a/Packs/OctoxLabs/ReleaseNotes/2_0_0.md b/Packs/OctoxLabs/ReleaseNotes/2_0_0.md
new file mode 100644
index 000000000000..e7d975a195f9
--- /dev/null
+++ b/Packs/OctoxLabs/ReleaseNotes/2_0_0.md
@@ -0,0 +1,17 @@
+
+#### Integrations
+
+##### OctoxLabs
+- Updated the Docker image to: *demisto/octoxlabs:1.0.0.96081*.
+- Added new commands:
+ - **octoxlabs-get-companies**
+ - **octoxlabs-get-company-by-id**
+ - **octoxlabs-get-company-by-name**
+ - **octoxlabs-get-domains**
+ - **octoxlabs-get-domain-by-id**
+ - **octoxlabs-get-domain-by-domain-name**
+ - **octoxlabs-get-users**
+ - **octoxlabs-get-user-by-id**
+ - **octoxlabs-get-user-by-username**
+ - **octoxlabs-get-groups**
+ - **octoxlabs-get-permissions**
\ No newline at end of file
diff --git a/Packs/OctoxLabs/pack_metadata.json b/Packs/OctoxLabs/pack_metadata.json
index 9ac280b3a25b..5b8bc430e070 100644
--- a/Packs/OctoxLabs/pack_metadata.json
+++ b/Packs/OctoxLabs/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "OctoxLabs",
"description": "Octox Labs Cyber Security Asset Management platform",
"support": "partner",
- "currentVersion": "1.1.16",
+ "currentVersion": "2.0.0",
"author": "OctoxLabs",
"url": "https://octoxlabs.com",
"email": "info@octoxlabs.com",
diff --git a/Packs/Okta/Integrations/OktaEventCollector/README.md b/Packs/Okta/Integrations/OktaEventCollector/README.md
index 69aa24094c34..37b837528e16 100644
--- a/Packs/Okta/Integrations/OktaEventCollector/README.md
+++ b/Packs/Okta/Integrations/OktaEventCollector/README.md
@@ -1,5 +1,7 @@
Collects the events log for authentication and Audit provided by Okta admin API
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure Okta Log on Cortex XSIAM
1. Navigate to **Settings** > **Configurations** > **Automation & Feed Integrations**.
diff --git a/Packs/Okta/Integrations/Okta_IAM/Okta_IAM.py b/Packs/Okta/Integrations/Okta_IAM/Okta_IAM.py
index 73eef2710ac9..87e0041295f0 100644
--- a/Packs/Okta/Integrations/Okta_IAM/Okta_IAM.py
+++ b/Packs/Okta/Integrations/Okta_IAM/Okta_IAM.py
@@ -28,6 +28,8 @@
'"Fetch Query Filter" parameter instead.'
GET_USER_ATTRIBUTES = ['id', 'login', 'email']
+MAX_LOGS_LIMIT = 1000
+
'''CLIENT CLASS'''
@@ -246,7 +248,7 @@ def list_apps_batch(self, url_suffix='', params=None, full_url=''):
return logs_batch, next_page
def get_logs(self, next_page=None, last_run_time=None, time_now=None,
- query_filter=None, auto_generate_filter=False, context=None):
+ query_filter=None, auto_generate_filter=False, context=None, limit=None):
logs = []
uri = 'logs'
@@ -257,13 +259,27 @@ def get_logs(self, next_page=None, last_run_time=None, time_now=None,
params = {
'filter': query_filter,
'since': last_run_time,
- 'until': time_now
+ 'until': time_now,
+ 'limit': limit
}
+ limit = int(limit) if limit else None
+ if limit and limit <= MAX_LOGS_LIMIT:
+ return self._http_request(
+ method='GET',
+ url_suffix=uri,
+ params=params,
+ ), None
+
+ if limit and limit > MAX_LOGS_LIMIT:
+ params['limit'] = MAX_LOGS_LIMIT
+
logs_batch, next_page = self.get_logs_batch(url_suffix=uri, params=params, full_url=next_page)
try:
while logs_batch:
logs.extend(logs_batch)
+ if limit and len(logs) > limit:
+ return logs[:limit], next_page
logs_batch, next_page = self.get_logs_batch(full_url=next_page)
except DemistoException as e:
# in case of too many API calls, we return what we got and save the next_page for next fetch
@@ -899,7 +915,11 @@ def get_logs_command(client, args):
filter = args.get('filter')
since = args.get('since')
until = args.get('until')
- log_events, _ = client.get_logs(query_filter=filter, last_run_time=since, time_now=until)
+ limit = args.get('limit')
+ log_events, _ = client.get_logs(query_filter=filter, last_run_time=since, time_now=until, limit=limit)
+
+ if not log_events:
+ return CommandResults(readable_output='No logs found.', outputs={}, raw_response=log_events)
return CommandResults(
raw_response=log_events,
diff --git a/Packs/Okta/Integrations/Okta_IAM/Okta_IAM.yml b/Packs/Okta/Integrations/Okta_IAM/Okta_IAM.yml
index dce87724ee31..a627ed911e0d 100644
--- a/Packs/Okta/Integrations/Okta_IAM/Okta_IAM.yml
+++ b/Packs/Okta/Integrations/Okta_IAM/Okta_IAM.yml
@@ -387,7 +387,7 @@ script:
description: Status of the application.
type: String
- arguments:
- - description: Group SCIM Data
+ - description: Group SCIM Data.
name: scim
required: true
- auto: PREDEFINED
@@ -436,6 +436,9 @@ script:
predefined:
- ASCENDING
- DESCENDING
+ - defaultValue: '100'
+ description: The maximum number of results to return, between 0 and 1000 maximum. The default is 100.
+ name: limit
description: Gets logs by providing optional filters.
name: okta-get-logs
outputs:
@@ -532,7 +535,7 @@ script:
- contextPath: Okta.Logs.Events.target.displayName
description: Display name of a target.
type: String
- dockerimage: demisto/python3:3.10.13.86272
+ dockerimage: demisto/python3:3.10.14.95663
isfetch: true
runonce: false
script: '-'
diff --git a/Packs/Okta/Integrations/Okta_IAM/Okta_IAM_test.py b/Packs/Okta/Integrations/Okta_IAM/Okta_IAM_test.py
index e2bfd6ff6cb1..cb526d5d51cf 100644
--- a/Packs/Okta/Integrations/Okta_IAM/Okta_IAM_test.py
+++ b/Packs/Okta/Integrations/Okta_IAM/Okta_IAM_test.py
@@ -478,16 +478,21 @@ def test_fetch_incidents__last_run(mocker):
assert isinstance(last_run_time, datetime)
+LOGS = [
+ {'mock_log1': 'mock_value1'},
+ {'mock_log2': 'mock_value2'},
+ {'mock_log3': 'mock_value3'}
+]
+
+
def mock_get_logs_batch(url_suffix='', params=None, full_url=''):
- first_batch = [{'mock_log1': 'mock_value1'}, {'mock_log2': 'mock_value2'}]
- second_batch = [{'mock_log3': 'mock_value3'}]
if url_suffix:
# first iteration
- return first_batch, 'mock_next_page'
+ return LOGS[:2], 'mock_next_page'
elif full_url:
# second iteration
- return second_batch, None
+ return LOGS[2:], None
# third iteration - nothing is returned
return None, None
@@ -513,3 +518,31 @@ def mock_get_logs_batch(url_suffix='', params=None, full_url=''):
def test_should_drop_event(log_entry, email_to_user_profile, expected):
from Okta_IAM import should_drop_event
assert should_drop_event(log_entry, email_to_user_profile) == expected
+
+
+LOGS_WITH_LIMIT = [
+ (None, 3),
+ (1, 1),
+ (3, 3),
+ (1001, 3)
+]
+
+
+@pytest.mark.parametrize('limit, logs_amount', LOGS_WITH_LIMIT)
+def test_get_logs_command(mocker, requests_mock, limit, logs_amount):
+ """
+ Given:
+ - An Okta IAM client object.
+ When:
+ - Calling function okta-get-logs
+ - Events should come in two batches of two events in the first batch, and one event in the second batch.
+ Then:
+ - Ensure three events are returned in incident the correct format.
+ """
+ from Okta_IAM import get_logs_command
+
+ mocker.patch.object(Client, 'get_logs_batch', side_effect=mock_get_logs_batch)
+ requests_mock.get(f"{BASE_URL}/logs?limit={limit}", json=LOGS[:limit])
+ args = {'limit': limit}
+ results = get_logs_command(client=mock_client(), args=args)
+ assert len(results.outputs) == logs_amount
diff --git a/Packs/Okta/Integrations/Okta_IAM/README.md b/Packs/Okta/Integrations/Okta_IAM/README.md
index 8ab7a6503260..4d52f08f3ce3 100644
--- a/Packs/Okta/Integrations/Okta_IAM/README.md
+++ b/Packs/Okta/Integrations/Okta_IAM/README.md
@@ -423,6 +423,7 @@ Gets logs by providing optional filters.
| since | Filters the lower time bound of the log events in the Internet Date/Time Format profile of ISO 8601. For example: 2017-05-03T16:22:18Z. | Optional |
| until | Filters the upper time bound of the log events in the Internet Date/Time Format profile of ISO 8601. For example: 2017-05-03T16:22:18Z. | Optional |
| sortOrder | The order of the returned events. Can be "ASCENDING" or "DESCENDING". The default is "ASCENDING". Possible values are: ASCENDING, DESCENDING. Default is ASCENDING. | Optional |
+| limit | The maximum number of results to return. The default and maximum is 100. | Optional |
#### Context Output
diff --git a/Packs/Okta/Integrations/Okta_v2/Okta_v2.py b/Packs/Okta/Integrations/Okta_v2/Okta_v2.py
index d41c71c98dd3..873be199ab25 100644
--- a/Packs/Okta/Integrations/Okta_v2/Okta_v2.py
+++ b/Packs/Okta/Integrations/Okta_v2/Okta_v2.py
@@ -58,6 +58,8 @@
'description'
]
+MAX_LOGS_LIMIT = 1000
+
class Client(OktaClient):
# Getting Group Id with a given group name
@@ -513,7 +515,7 @@ def update_user(self, user_id, profile, cred):
json_data=body
)
- def get_paged_results(self, uri, query_param=None):
+ def get_paged_results(self, uri, query_param=None, max_limit=None):
response = self.http_request(
method="GET",
url_suffix=uri,
@@ -532,6 +534,8 @@ def get_paged_results(self, uri, query_param=None):
)
paged_results += response.json()
+ if max_limit and len(paged_results) >= max_limit:
+ return paged_results[:max_limit]
return paged_results
def get_group_members(self, group_id, limit):
@@ -606,13 +610,17 @@ def get_logs(self, args):
if key == 'query':
key = 'q'
query_params[key] = encode_string_results(value)
- if args.get('limit'):
+ limit = args.get('limit')
+ limit = int(limit) if limit else None
+ if limit and limit <= MAX_LOGS_LIMIT:
return self.http_request(
method='GET',
url_suffix=uri,
params=query_params
)
- return self.get_paged_results(uri, query_params)
+ if limit and limit > MAX_LOGS_LIMIT:
+ query_params['limit'] = MAX_LOGS_LIMIT
+ return self.get_paged_results(uri, query_params, max_limit=limit)
def delete_user(self, user_term):
uri = f"/api/v1/users/{encode_string_results(user_term)}"
diff --git a/Packs/Okta/Integrations/Okta_v2/Okta_v2.yml b/Packs/Okta/Integrations/Okta_v2/Okta_v2.yml
index 5f8981cb3fa9..ea46eea01fed 100644
--- a/Packs/Okta/Integrations/Okta_v2/Okta_v2.yml
+++ b/Packs/Okta/Integrations/Okta_v2/Okta_v2.yml
@@ -1485,7 +1485,7 @@ script:
type: String
- description: Reset OAuth authentication data (authentication process will start from the beginning, and a new token will be generated).
name: okta-auth-reset
- dockerimage: demisto/crypto:1.0.0.87358
+ dockerimage: demisto/crypto:1.0.0.95384
runonce: false
script: ""
subtype: python3
diff --git a/Packs/Okta/Integrations/Okta_v2/Okta_v2_test.py b/Packs/Okta/Integrations/Okta_v2/Okta_v2_test.py
index 9b190f6cbb18..7d0b6b22ecba 100644
--- a/Packs/Okta/Integrations/Okta_v2/Okta_v2_test.py
+++ b/Packs/Okta/Integrations/Okta_v2/Okta_v2_test.py
@@ -601,6 +601,12 @@
"type": "IP"
}
+LOGS = [
+ {'mock_log1': 'mock_value1'},
+ {'mock_log2': 'mock_value2'},
+ {'mock_log3': 'mock_value3'}
+]
+
def util_load_json(path: str):
"""
@@ -944,3 +950,40 @@ def test_set_temp_password_command():
result = set_password_command(client, {'username': 'test', 'password': 'a1b2c3', 'temporary_password': 'true'})
assert result[0] == 'test password was last changed on 2023-03-22T10:15:26.000Z'
+
+
+def mock_get_paged_results(url_suffix='', query_params=None, max_limit=None):
+ if max_limit:
+ return LOGS[:max_limit]
+ else:
+ return LOGS
+
+
+LOGS_WITH_LIMIT = [
+ (None, 3),
+ (1, 1),
+ (3, 3),
+ (1001, 3)
+]
+
+
+@pytest.mark.parametrize('limit, logs_amount', LOGS_WITH_LIMIT)
+def test_get_logs_command_with_limit(mocker, requests_mock, limit, logs_amount):
+ """
+ Given:
+ - An Okta IAM client object.
+ When:
+ - Calling function okta-get-logs
+ - Events should come in two batches of two events in the first batch, and one event in the second batch.
+ Then:
+ - Ensure three events are returned in incident the correct format.
+ """
+ from Okta_v2 import get_logs_command
+
+ client = Client(base_url='https://demisto.com', api_token="XXX")
+ mocker.patch.object(Client, 'get_paged_results', side_effect=mock_get_paged_results)
+ mocker.patch.object(Client, 'get_readable_logs', side_effect=mock_get_paged_results)
+ requests_mock.get(f"https://demisto.com/api/v1/logs?limit={limit}", json=LOGS[:limit])
+ args = {'limit': limit}
+ readable, outputs, raw_response = get_logs_command(client=client, args=args)
+ assert len(outputs.get('Okta.Logs.Events(val.uuid && val.uuid === obj.uuid)')) == logs_amount
diff --git a/Packs/Okta/ReleaseNotes/3_2_13.md b/Packs/Okta/ReleaseNotes/3_2_13.md
new file mode 100644
index 000000000000..6cd05e036284
--- /dev/null
+++ b/Packs/Okta/ReleaseNotes/3_2_13.md
@@ -0,0 +1,11 @@
+
+#### Integrations
+
+##### Okta v2
+
+- Improved implementation of ***okta-get-logs*** command to support pagination even with *limit* argument.
+- Updated the Docker image to *demisto/crypto:1.0.0.95384*.
+
+##### Okta IAM
+- Added the *limit* argument to ***okta-get-logs*** command.
+- Updated the Docker image to *demisto/python3:3.10.14.95663*.
\ No newline at end of file
diff --git a/Packs/Okta/ReleaseNotes/3_2_14.md b/Packs/Okta/ReleaseNotes/3_2_14.md
new file mode 100644
index 000000000000..2a53fd3a920a
--- /dev/null
+++ b/Packs/Okta/ReleaseNotes/3_2_14.md
@@ -0,0 +1,7 @@
+- Updated the Docker image to *demisto/python3:3.10.14.95663*.
+#### Scripts
+
+##### IAMInitOktaUser
+
+- Replaced the *pyminizip* library with *pyzipper*.
+- Updated the Docker image to *demisto/py3-tools:1.0.0.95440*.
\ No newline at end of file
diff --git a/Packs/Okta/Scripts/IAMInitOktaUser/IAMInitOktaUser.py b/Packs/Okta/Scripts/IAMInitOktaUser/IAMInitOktaUser.py
index 8f7820db63f2..34a733801e35 100644
--- a/Packs/Okta/Scripts/IAMInitOktaUser/IAMInitOktaUser.py
+++ b/Packs/Okta/Scripts/IAMInitOktaUser/IAMInitOktaUser.py
@@ -2,7 +2,7 @@
from CommonServerPython import * # noqa: F401
import uuid
-import pyminizip
+from pyzipper import AESZipFile, ZIP_DEFLATED, WZ_AES
DEFAULT_PWD_GENERATION_SCRIPT = "GeneratePassword"
TEXT_FILE_NAME = "Okta_Password" # File name for the text file (within the zip file) to use
@@ -209,7 +209,10 @@ def create_zip_with_password(args: dict, generated_password: str, zip_password:
with open(text_file_name, 'w') as text_file:
text_file.write(generated_password)
- pyminizip.compress(text_file_name, '', zip_file_name, zip_password, 1)
+ demisto.debug(f'zipping {text_file_name=}')
+ with AESZipFile(zip_file_name, mode='w', compression=ZIP_DEFLATED, encryption=WZ_AES) as zf:
+ zf.pwd = bytes(zip_password, 'utf-8')
+ zf.write(text_file_name)
with open(zip_file_name, 'rb') as zip_file:
zip_content = zip_file.read()
diff --git a/Packs/Okta/Scripts/IAMInitOktaUser/IAMInitOktaUser.yml b/Packs/Okta/Scripts/IAMInitOktaUser/IAMInitOktaUser.yml
index b329179c9ccc..bd72654a0248 100644
--- a/Packs/Okta/Scripts/IAMInitOktaUser/IAMInitOktaUser.yml
+++ b/Packs/Okta/Scripts/IAMInitOktaUser/IAMInitOktaUser.yml
@@ -81,7 +81,7 @@ outputs:
type: String
scripttarget: 0
subtype: python3
-dockerimage: demisto/py3-tools:1.0.0.81101
+dockerimage: demisto/py3-tools:1.0.0.95440
runas: DBotWeakRole
fromversion: 6.5.0
tests:
diff --git a/Packs/Okta/pack_metadata.json b/Packs/Okta/pack_metadata.json
index 56d073a9bacb..74787b663fb1 100644
--- a/Packs/Okta/pack_metadata.json
+++ b/Packs/Okta/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Okta",
"description": "Integration with Okta's cloud-based identity management service.",
"support": "xsoar",
- "currentVersion": "3.2.12",
+ "currentVersion": "3.2.14",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -16,5 +16,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "Okta Event Collector"
}
\ No newline at end of file
diff --git a/Packs/Oletools/ReleaseNotes/1_0_6.md b/Packs/Oletools/ReleaseNotes/1_0_6.md
new file mode 100644
index 000000000000..bd70a293ae29
--- /dev/null
+++ b/Packs/Oletools/ReleaseNotes/1_0_6.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### Oletools
+
+- Improved the message returned to the War Room when running the ***Oletools*** command with a corrupted file.
+- Updated the Docker image to: *demisto/parse-emails:1.0.0.95052*.
\ No newline at end of file
diff --git a/Packs/Oletools/Scripts/OletoolsScript/OletoolsScript.py b/Packs/Oletools/Scripts/OletoolsScript/OletoolsScript.py
index 3e9ebf55b188..4d92d9b01a59 100644
--- a/Packs/Oletools/Scripts/OletoolsScript/OletoolsScript.py
+++ b/Packs/Oletools/Scripts/OletoolsScript/OletoolsScript.py
@@ -1,15 +1,32 @@
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
-import oletools.oleid
-from oletools.olevba import VBA_Parser
+
import subprocess
-from oletools import crypto
import os
import hashlib
-# suppress logs from oletools
import logging
-vba_logger = logging.getLogger("olevba")
-vba_logger.setLevel(logging.CRITICAL)
+
+
+class CustomHandler(logging.Handler):
+ def __init__(self):
+ super().__init__()
+ self.last_log_msg = None
+
+ def emit(self, record):
+ self.last_log_msg = record.msg
+
+ def get_last_log_msg(self):
+ return self.last_log_msg
+
+
+custom_handler = CustomHandler()
+root_logger = logging.getLogger()
+root_logger.addHandler(custom_handler)
+root_logger.setLevel(logging.DEBUG)
+
+# should be imported after adding log handler to the root logger
+from oletools import crypto, oleid # noqa: E402
+from oletools.olevba import VBA_Parser # noqa: E402
class OleClient:
@@ -77,7 +94,7 @@ def replace_space_with_underscore(indicator: str):
return indicator.replace(' ', '_')
def oleid(self):
- oid = oletools.oleid.OleID(self.processed_file_path)
+ oid = oleid.OleID(self.processed_file_path)
indicators = oid.check()
indicators_list = []
dbot_score = None
@@ -208,7 +225,8 @@ def main(): # pragma: no cover
ole_client = OleClient(file_info, ole_command, password=password, decoded=show_decoded)
return_results(ole_client.run())
except Exception as e:
- return_error(f'The script failed with the following error:\n {e}')
+ return_error(f'The script failed with the following error:\n {e}'
+ f'\n Logs form oletools:\n {custom_handler.get_last_log_msg()}')
if __name__ in ('__builtin__', 'builtins', '__main__'):
diff --git a/Packs/Oletools/Scripts/OletoolsScript/OletoolsScript.yml b/Packs/Oletools/Scripts/OletoolsScript/OletoolsScript.yml
index fe537dd61c71..f233491f3e53 100644
--- a/Packs/Oletools/Scripts/OletoolsScript/OletoolsScript.yml
+++ b/Packs/Oletools/Scripts/OletoolsScript/OletoolsScript.yml
@@ -125,7 +125,7 @@ outputs:
- contextPath: DBotScore.Score
description: The actual score.
subtype: python3
-dockerimage: demisto/parse-emails:1.0.0.87403
+dockerimage: demisto/parse-emails:1.0.0.95052
tests:
- No tests (auto formatted)
fromversion: 6.5.0
diff --git a/Packs/Oletools/Scripts/OletoolsScript/OletoolsScript_test.py b/Packs/Oletools/Scripts/OletoolsScript/OletoolsScript_test.py
index aa781cc156f3..9f419be31952 100644
--- a/Packs/Oletools/Scripts/OletoolsScript/OletoolsScript_test.py
+++ b/Packs/Oletools/Scripts/OletoolsScript/OletoolsScript_test.py
@@ -13,10 +13,10 @@ def test_oleid(caplog):
ole_client = OleClient({
'path': 'test_data/ActiveBarcode-Demo-Bind-Text.docm',
'name': 'ActiveBarcode-Demo-Bind-Text.docm'}, 'oleid')
- caplog.clear()
cr = ole_client.run()
assert cr.outputs == oleid_output
assert cr.readable_output == read_file('test_data/oleid_readable.md')
+ caplog.clear()
def test_oleobj():
@@ -32,20 +32,20 @@ def test_olevba(caplog):
ole_client = OleClient({
'path': 'test_data/ActiveBarcode-Demo-Bind-Text.docm',
'name': 'ActiveBarcode-Demo-Bind-Text.docm'}, 'olevba')
- caplog.clear()
cr = ole_client.run()
assert cr.outputs == olevba_otuput
assert cr.readable_output == read_file('test_data/olevba_readable.md')
+ caplog.clear()
def test_oleid_decrypted(caplog):
ole_client = OleClient({
'path': 'test_data/protected.docm',
'name': 'ActiveBarcode-Demo-Bind-Text.docm'}, 'oleid', '123123')
- caplog.clear()
cr = ole_client.run()
assert cr.outputs == oleid_decrypted_output
assert cr.readable_output == read_file('test_data/oleid_decrypted_readable.md')
+ caplog.clear()
@pytest.mark.parametrize('password, non_secret_password, returned_password',
diff --git a/Packs/Oletools/pack_metadata.json b/Packs/Oletools/pack_metadata.json
index 141d4c871269..23c553c0318d 100644
--- a/Packs/Oletools/pack_metadata.json
+++ b/Packs/Oletools/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Oletools",
"description": "Oletools pack allows performing some basic oletools commands from Cortex XSOAR. oletools is a tool to analyze Microsoft OLE2 files",
"support": "xsoar",
- "currentVersion": "1.0.5",
+ "currentVersion": "1.0.6",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/OpenCTI/Integrations/OpenCTI/OpenCTI.py b/Packs/OpenCTI/Integrations/OpenCTI/OpenCTI.py
index 498f5bab8e08..ac88e1842cb0 100644
--- a/Packs/OpenCTI/Integrations/OpenCTI/OpenCTI.py
+++ b/Packs/OpenCTI/Integrations/OpenCTI/OpenCTI.py
@@ -107,14 +107,21 @@ def get_indicators(client: OpenCTIApiClient, indicator_types: list[str], score=N
indicators: dict of indicators
"""
indicator_type = build_indicator_list(indicator_types)
- filters = [{
- 'key': 'entity_type',
- 'values': indicator_type
- }]
+ filters: dict[str, Any] = {
+ 'mode': 'and',
+ 'filters': [{
+ 'key': 'entity_type',
+ 'values': indicator_type,
+ 'operator': 'eq',
+ 'mode': 'or'
+ }],
+ 'filterGroups': []}
if score:
- filters.append({
+ filters["filters"].append({
'key': 'x_opencti_score',
- 'values': score
+ 'values': score,
+ 'operator': 'eq',
+ 'mode': 'or'
})
indicators = client.stix_cyber_observable.list(after=last_run_id, first=limit,
@@ -133,11 +140,11 @@ def get_indicators_command(client: OpenCTIApiClient, args: dict) -> CommandResul
Returns:
readable_output, raw_response
"""
- indicator_types = argToList(args.get("indicator_types"))
+ indicator_types = argToList(args.get("indicator_types", "ALL"))
last_run_id = args.get("last_run_id")
limit = arg_to_number(args.get('limit', 50))
- start = arg_to_number(args.get('score_start'))
- end = arg_to_number(args.get('score_end')) # type:ignore
+ start = arg_to_number(args.get('score_start', 0))
+ end = arg_to_number(args.get('score_end', 100)) # type:ignore
score = args.get('score')
search = args.get("search", "")
scores = None
diff --git a/Packs/OpenCTI/Integrations/OpenCTI/OpenCTI.yml b/Packs/OpenCTI/Integrations/OpenCTI/OpenCTI.yml
index 532ae2c5b2b1..3815d2627105 100644
--- a/Packs/OpenCTI/Integrations/OpenCTI/OpenCTI.yml
+++ b/Packs/OpenCTI/Integrations/OpenCTI/OpenCTI.yml
@@ -26,7 +26,7 @@ configuration:
type: 8
additionalinfo: Used for troubleshooting purposes.
required: false
-description: Manages indicators from OpenCTI. Compatible with OpenCTI 4.X API and OpenCTI 5.X API versions.
+description: Manages indicators from OpenCTI. Compatible with OpenCTI 5.12.17 API version.
display: OpenCTI
name: OpenCTI
script:
@@ -293,7 +293,7 @@ script:
- contextPath: OpenCTI.MarkingDefinitions.markingsLastRun
description: The last ID of the previous fetch to use for pagination.
type: String
- dockerimage: demisto/vendors-sdk:1.0.0.76365
+ dockerimage: demisto/vendors-sdk:1.0.0.92984
runonce: false
script: '-'
subtype: python3
@@ -301,3 +301,4 @@ script:
tests:
- OpenCTI Test
fromversion: 5.0.0
+autoUpdateDockerImage : false
diff --git a/Packs/OpenCTI/Integrations/OpenCTI/OpenCTI_test.py b/Packs/OpenCTI/Integrations/OpenCTI/OpenCTI_test.py
index e25e182e2a2d..55004c81aed8 100644
--- a/Packs/OpenCTI/Integrations/OpenCTI/OpenCTI_test.py
+++ b/Packs/OpenCTI/Integrations/OpenCTI/OpenCTI_test.py
@@ -75,6 +75,54 @@ def test_get_indicators_command(mocker):
assert "Indicators" in results.readable_output
+def test_get_indicators_command_no_parameters(mocker):
+ """Test get_indicators_command function where there is no parameters to filter by
+ Given
+ No parameters to filter by
+ When
+ Calling the `get_indicators_command`
+ Then
+ Return all indicators
+ """
+ client = Client
+ mocker.patch.object(client.stix_cyber_observable, 'list', return_value=RESPONSE_DATA)
+ all_indicators = get_indicators_command(client, args={'indicator_types': 'ALL'})
+ default_indicators = get_indicators_command(client, {})
+ assert len(all_indicators.raw_response) == len(default_indicators.raw_response)
+
+
+def test_get_indicators_command_with_just_score_end(mocker):
+ """Test get_indicators_command function where there is just score_end parameter
+ Given
+ Filter score_end = 50
+ When
+ Calling the `get_indicators_command`
+ Then
+ Return all indicators with score = 0 until score = 50
+ """
+ client = Client
+ mocker.patch.object(client.stix_cyber_observable, 'list', return_value=RESPONSE_DATA)
+ indicators_with_end = get_indicators_command(client, args={'score_end': 50})
+ indicators_with_end_start = get_indicators_command(client, args={'score_end': 50, 'score_start': 0})
+ assert len(indicators_with_end.raw_response) == len(indicators_with_end_start.raw_response)
+
+
+def test_get_indicators_command_with_just_score_start(mocker):
+ """Test get_indicators_command function where there is just score_end parameter
+ Given
+ Filter score_start = 50
+ When
+ Calling the `get_indicators_command`
+ Then
+ Return all indicators with score = 50 until score = 100
+ """
+ client = Client
+ mocker.patch.object(client.stix_cyber_observable, 'list', return_value=RESPONSE_DATA)
+ indicators_with_end = get_indicators_command(client, args={'score_start': 50})
+ indicators_with_end_start = get_indicators_command(client, args={'score_start': 50, 'score_end': 100})
+ assert len(indicators_with_end.raw_response) == len(indicators_with_end_start.raw_response)
+
+
def test_get_indicators_command_with_score(mocker):
"""Tests get_indicators_command function with a specified score
Given
@@ -132,9 +180,9 @@ def test_indicator_delete_command(mocker):
assert "Indicator deleted" in results.readable_output
-@pytest.mark.parametrize(argnames="field, value",
- argvalues=[('score', '50'),
- ('description', 'new description')])
+@ pytest.mark.parametrize(argnames="field, value",
+ argvalues=[('score', '50'),
+ ('description', 'new description')])
def test_indicator_field_update_command(mocker, field, value):
"""Tests indicator_field_update_command function
Given
diff --git a/Packs/OpenCTI/Integrations/OpenCTI/README.md b/Packs/OpenCTI/Integrations/OpenCTI/README.md
index 39d0e075ef81..4fdf2cc86ac8 100644
--- a/Packs/OpenCTI/Integrations/OpenCTI/README.md
+++ b/Packs/OpenCTI/Integrations/OpenCTI/README.md
@@ -1,7 +1,5 @@
Manages indicators from OpenCTI.
-This integration is compatible with OpenCTI versions from 4.X to 5.11.X.
-
-**Note**: Due to [breaking changes to the OpenCTI API on version 5.12.0](https://github.com/OpenCTI-Platform/opencti/releases/tag/5.12.0), this integration is not currently compatible with OpenCTI versions 5.12.0 and above.
+This integration was tested with version 5.12.17 of OpenCTI.
## Configure OpenCTI on Cortex XSOAR
@@ -33,8 +31,8 @@ Gets indicators from OpenCTI.
| **Argument Name** | **Description** | **Required** |
| --- | --- | --- |
| limit | The maximum number of indicators to return. Default value is 50. Maximum value is 500. | Optional |
-| score_start | Score minimum value to filter by. Values range is 1-100. | Optional |
-| score_end | Score maximum value to filter by. Values range is 1-100.| Optional |
+| score_start | Score minimum value to filter by. Values range is 0-100. | Optional |
+| score_end | Score maximum value to filter by. Values range is 0-100.| Optional |
| indicator_types | The indicator types to fetch. Out-of-the-box indicator types supported in XSOAR are: Account, Domain, Email, File, Host, IP, IPv6, Registry Key, and URL. Possible values are: ALL, Account, Domain, Email, File, Host, IP, IPv6, Registry Key, URL. Default is ALL. | Optional |
| last_run_id | The last ID from the previous call, from which to begin pagination for this call. You can find this value at the OpenCTI.IndicatorsList.LastRunID context path. | Optional |
diff --git a/Packs/OpenCTI/ReleaseNotes/1_0_11.md b/Packs/OpenCTI/ReleaseNotes/1_0_11.md
new file mode 100644
index 000000000000..2d8518b6157f
--- /dev/null
+++ b/Packs/OpenCTI/ReleaseNotes/1_0_11.md
@@ -0,0 +1,9 @@
+
+#### Integrations
+
+##### OpenCTI
+
+- Updated the Docker image to: *demisto/vendors-sdk:1.0.0.92984*.
+- Updated the **opencti-get-indicators** command to work with version 5.12.17 of OpenCTI.
+- You can now use with the opencti_get_indicators command with no parameters.
+- Fixed an issue by adding default values for **score_end** and **just** score_start in the **opencti-get-indicators** command.
\ No newline at end of file
diff --git a/Packs/OpenCTI/pack_metadata.json b/Packs/OpenCTI/pack_metadata.json
index 14fd62d7b658..39c8cb79a9eb 100644
--- a/Packs/OpenCTI/pack_metadata.json
+++ b/Packs/OpenCTI/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "OpenCTI",
"description": "Manages indicators from OpenCTI.",
"support": "xsoar",
- "currentVersion": "1.0.10",
+ "currentVersion": "1.0.11",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Orca/Integrations/OrcaEventCollector/README.md b/Packs/Orca/Integrations/OrcaEventCollector/README.md
index 68966b5fb175..851ed63a7357 100644
--- a/Packs/Orca/Integrations/OrcaEventCollector/README.md
+++ b/Packs/Orca/Integrations/OrcaEventCollector/README.md
@@ -1,6 +1,8 @@
Orca Security event collector integration for Cortex XSIAM.
This integration was integrated and tested with version 0.1.0 of Orca Event Collector
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure Orca Event Collector on Cortex XSOAR
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
diff --git a/Packs/Orca/pack_metadata.json b/Packs/Orca/pack_metadata.json
index 37e1ee8696b3..c3984f9eee3d 100644
--- a/Packs/Orca/pack_metadata.json
+++ b/Packs/Orca/pack_metadata.json
@@ -17,5 +17,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "Orca Event Collector"
}
\ No newline at end of file
diff --git a/Packs/PAN-OS/Integrations/Panorama/Panorama.py b/Packs/PAN-OS/Integrations/Panorama/Panorama.py
index 0941e320487d..03252e1263d1 100644
--- a/Packs/PAN-OS/Integrations/Panorama/Panorama.py
+++ b/Packs/PAN-OS/Integrations/Panorama/Panorama.py
@@ -3632,6 +3632,7 @@ def prettify_rule(rule: dict):
'DeviceGroup': DEVICE_GROUP,
'Location': rule.get('@loc', ''),
+ 'UUID': rule.get('@uuid', ''),
'NegateDestination': rule.get('negate-destination', ''),
'Disabled': rule.get('disabled', ''),
'ICMPUnreachable': rule.get('icmp-unreachable', ''),
diff --git a/Packs/PAN-OS/Integrations/Panorama/Panorama.yml b/Packs/PAN-OS/Integrations/Panorama/Panorama.yml
index a889878b63b7..80da69c3d741 100644
--- a/Packs/PAN-OS/Integrations/Panorama/Panorama.yml
+++ b/Packs/PAN-OS/Integrations/Panorama/Panorama.yml
@@ -9468,7 +9468,7 @@ script:
name: rule_name
required: true
- auto: PREDEFINED
- description: The rule type.
+ description: The rule type.
name: rule_type
required: true
predefined:
@@ -9495,7 +9495,7 @@ script:
- contextPath: Panorama.AuditComment.rule_type
description: The rule type.
type: String
- dockerimage: demisto/pan-os-python:1.0.0.91683
+ dockerimage: demisto/pan-os-python:1.0.0.95792
isfetch: true
runonce: false
script: ''
@@ -9508,4 +9508,4 @@ tests:
- PAN-OS-firewall-topology-test-pb
- PAN-OS-panorama-topology-test-pb
defaultmapperin: Panorama Mapper
-defaultclassifier: Panorama Classifier
+defaultclassifier: Panorama Classifier
\ No newline at end of file
diff --git a/Packs/PAN-OS/Integrations/Panorama/test_data/prettify_rule.json b/Packs/PAN-OS/Integrations/Panorama/test_data/prettify_rule.json
index b20f9eee152b..c65b427e6ddf 100644
--- a/Packs/PAN-OS/Integrations/Panorama/test_data/prettify_rule.json
+++ b/Packs/PAN-OS/Integrations/Panorama/test_data/prettify_rule.json
@@ -1,6 +1,7 @@
{
"Location": "SA_FWs",
"NegateDestination": "",
+ "UUID": "4c2d39ea-d2bd-45c1-8003-91264c4ade3b",
"Disabled": "",
"ICMPUnreachable": "yes",
"Description": "bbbbbbbbbb",
diff --git a/Packs/PAN-OS/ReleaseNotes/2_1_27.md b/Packs/PAN-OS/ReleaseNotes/2_1_27.md
new file mode 100644
index 000000000000..4de516af1ce4
--- /dev/null
+++ b/Packs/PAN-OS/ReleaseNotes/2_1_27.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Palo Alto Networks PAN-OS
+
+- Added the *UUID* output to the ***pan-os-list-rules*** command.
+- Updated the Docker image to: *demisto/pan-os-python:1.0.0.95792*.
diff --git a/Packs/PAN-OS/ReleaseNotes/2_1_28.md b/Packs/PAN-OS/ReleaseNotes/2_1_28.md
new file mode 100644
index 000000000000..d481f02058d3
--- /dev/null
+++ b/Packs/PAN-OS/ReleaseNotes/2_1_28.md
@@ -0,0 +1,3 @@
+## PAN-OS by Palo Alto Networks
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/PAN-OS/pack_metadata.json b/Packs/PAN-OS/pack_metadata.json
index c2ae413384b3..20f2f3eea0e5 100644
--- a/Packs/PAN-OS/pack_metadata.json
+++ b/Packs/PAN-OS/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "PAN-OS by Palo Alto Networks",
"description": "Manage Palo Alto Networks Firewall and Panorama. Use this pack to manage Prisma Access through Panorama. For more information see Panorama documentation.",
"support": "xsoar",
- "currentVersion": "2.1.26",
+ "currentVersion": "2.1.28",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/PaloAltoNetworks_IoT/CONTRIBUTORS.json b/Packs/PaloAltoNetworks_IoT/CONTRIBUTORS.json
new file mode 100644
index 000000000000..06bbccec1766
--- /dev/null
+++ b/Packs/PaloAltoNetworks_IoT/CONTRIBUTORS.json
@@ -0,0 +1,3 @@
+[
+ "Masahiko Inoue"
+]
diff --git a/Packs/PaloAltoNetworks_IoT/Integrations/PaloAltoNetworks_IoT/PaloAltoNetworks_IoT.py b/Packs/PaloAltoNetworks_IoT/Integrations/PaloAltoNetworks_IoT/PaloAltoNetworks_IoT.py
index 1bf108f3f553..653c7c7e6711 100644
--- a/Packs/PaloAltoNetworks_IoT/Integrations/PaloAltoNetworks_IoT/PaloAltoNetworks_IoT.py
+++ b/Packs/PaloAltoNetworks_IoT/Integrations/PaloAltoNetworks_IoT/PaloAltoNetworks_IoT.py
@@ -526,7 +526,7 @@ def fetch_incidents(client, last_run, is_test=False):
break
if len(others) != max_fetch:
break
- demisto.debug(f"PaloAltoNetworks_IoT - Number of incidents- vulnerability before filtering: {len(alerts)}")
+ demisto.debug(f"PaloAltoNetworks_IoT - Number of incidents- vulnerability before filtering: {len(vulns)}")
for vuln in vulns:
detected_date = vuln['detected_date']
if detected_date and isinstance(detected_date, list):
@@ -538,7 +538,7 @@ def fetch_incidents(client, last_run, is_test=False):
incident = {
'name': vuln['name'],
"type": "IoT Vulnerability",
- 'occurred': vuln['detected_date'],
+ 'occurred': detected_date,
'rawJSON': json.dumps(vuln),
'details': f'Device {vuln["name"]} at IP {vuln["ip"]}: {vuln["vulnerability_name"]}',
'CustomFields': {
diff --git a/Packs/PaloAltoNetworks_IoT/Integrations/PaloAltoNetworks_IoT/PaloAltoNetworks_IoT.yml b/Packs/PaloAltoNetworks_IoT/Integrations/PaloAltoNetworks_IoT/PaloAltoNetworks_IoT.yml
index 42102094acf5..3734c64f4452 100644
--- a/Packs/PaloAltoNetworks_IoT/Integrations/PaloAltoNetworks_IoT/PaloAltoNetworks_IoT.yml
+++ b/Packs/PaloAltoNetworks_IoT/Integrations/PaloAltoNetworks_IoT/PaloAltoNetworks_IoT.yml
@@ -102,7 +102,7 @@ script:
commands:
- name: iot-security-get-device
arguments:
- - description: The device uid (mac address)
+ - description: The device uid (mac address).
name: id
required: true
description: IoT get device command - get a single device's details.
@@ -123,7 +123,7 @@ script:
description: The device profile vertical.
type: String
- contextPath: PaloAltoNetworksIoT.Device.category
- description: The device category
+ description: The device category.
type: String
- contextPath: PaloAltoNetworksIoT.Device.profile
description: The device profile.
@@ -144,7 +144,7 @@ script:
description: The device risk score.
type: Number
- contextPath: PaloAltoNetworksIoT.Device.risk_level
- description: "The device risk level: Low, Medium, High, Critical"
+ description: "The device risk level: Low, Medium, High, Critical."
type: String
- contextPath: PaloAltoNetworksIoT.Device.subnet
description: The device subnet.
@@ -219,7 +219,7 @@ script:
arguments:
- name: ip
required: true
- description: The device ip (ip address)
+ description: The device ip (ip address).
outputs:
- contextPath: PaloAltoNetworksIoT.Device
description: Device details.
@@ -237,7 +237,7 @@ script:
description: The device profile vertical.
type: String
- contextPath: PaloAltoNetworksIoT.Device.category
- description: The device category
+ description: The device category.
type: String
- contextPath: PaloAltoNetworksIoT.Device.profile
description: The device profile.
@@ -258,7 +258,7 @@ script:
description: The device risk score.
type: Number
- contextPath: PaloAltoNetworksIoT.Device.risk_level
- description: 'The device risk level: Low, Medium, High, Critical'
+ description: 'The device risk level: Low, Medium, High, Critical.'
type: String
- contextPath: PaloAltoNetworksIoT.Device.subnet
description: The device subnet.
@@ -336,7 +336,7 @@ script:
name: offset
- description: The maximum size of the list of the devices.
name: limit
- description: IoT list devices command
+ description: IoT list devices command.
outputs:
- contextPath: PaloAltoNetworksIoT.DeviceList
description: List of devices.
@@ -369,7 +369,7 @@ script:
type: unknown
- name: iot-security-resolve-alert
arguments:
- - description: The alert ID
+ - description: The alert ID.
name: id
required: true
- description: The alert resolution reason.
@@ -395,7 +395,7 @@ script:
script: '-'
type: python
subtype: python3
- dockerimage: demisto/python3:3.10.13.72123
+ dockerimage: demisto/python3:3.10.14.95956
fromversion: 5.0.0
tests:
- No tests (auto formatted)
diff --git a/Packs/PaloAltoNetworks_IoT/Integrations/PaloAltoNetworks_IoT/PaloAltoNetworks_IoT_test.py b/Packs/PaloAltoNetworks_IoT/Integrations/PaloAltoNetworks_IoT/PaloAltoNetworks_IoT_test.py
index 869c985da556..0f78d7587624 100644
--- a/Packs/PaloAltoNetworks_IoT/Integrations/PaloAltoNetworks_IoT/PaloAltoNetworks_IoT_test.py
+++ b/Packs/PaloAltoNetworks_IoT/Integrations/PaloAltoNetworks_IoT/PaloAltoNetworks_IoT_test.py
@@ -57,7 +57,8 @@ def test_fetch_incidents(requests_mock, monkeypatch):
mock_vuln_response = json.loads('''{"ver":"v4.0","api":"/vulnerability/list","items":[{"name":"HPD41936",
"ip":"10.55.132.114","deviceid":"a0:d3:c1:d4:19:36","detected_date":"2020-05-31T23:59:59.000Z",
-"vulnerability_name":"SMB v1 Usage"}]}''')
+"vulnerability_name":"SMB v1 Usage"},{"name":"HPD41936","ip":"10.55.132.114","deviceid":"a0:d3:c1:d4:19:36",
+"detected_date":["2020-05-31T23:59:59.000Z"],"vulnerability_name":"SMB v1 Usage"}]}''')
requests_mock.get('https://test.com/pub/v4.0/vulnerability/list?customerid=foobar&offset=0&pagelength=10'
'&stime=1970-01-01T00:00:00.001000Z&type=vulnerability&status=Confirmed&groupby=device',
json=mock_vuln_response)
@@ -71,7 +72,9 @@ def test_fetch_incidents(requests_mock, monkeypatch):
'last_alerts_fetch': 1579064810.54,
'last_vulns_fetch': 1590969599.0
}
- assert len(incidents) == 3
+ assert len(incidents) == 4
+ for incident in incidents:
+ assert (isinstance(incident.get('occurred'), str))
def test_fetch_incidents_special(requests_mock, monkeypatch):
diff --git a/Packs/PaloAltoNetworks_IoT/ReleaseNotes/1_0_34.md b/Packs/PaloAltoNetworks_IoT/ReleaseNotes/1_0_34.md
new file mode 100644
index 000000000000..3e86a86d2f15
--- /dev/null
+++ b/Packs/PaloAltoNetworks_IoT/ReleaseNotes/1_0_34.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Palo Alto Networks IoT
+
+- Fixed an issue where **fetch_incidents** would fail when retrieving vulnerabilities.
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
diff --git a/Packs/PaloAltoNetworks_IoT/pack_metadata.json b/Packs/PaloAltoNetworks_IoT/pack_metadata.json
index 9131fd45453f..af170597c237 100644
--- a/Packs/PaloAltoNetworks_IoT/pack_metadata.json
+++ b/Packs/PaloAltoNetworks_IoT/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "IoT by Palo Alto Networks",
"description": "Palo Alto Networks IoT",
"support": "xsoar",
- "currentVersion": "1.0.33",
+ "currentVersion": "1.0.34",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Palo_Alto_Networks_WildFire/ReleaseNotes/2_1_47.md b/Packs/Palo_Alto_Networks_WildFire/ReleaseNotes/2_1_47.md
new file mode 100644
index 000000000000..bab82524757c
--- /dev/null
+++ b/Packs/Palo_Alto_Networks_WildFire/ReleaseNotes/2_1_47.md
@@ -0,0 +1,3 @@
+## WildFire by Palo Alto Networks
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/Palo_Alto_Networks_WildFire/pack_metadata.json b/Packs/Palo_Alto_Networks_WildFire/pack_metadata.json
index 7b616812391a..b5c2ef4897dc 100644
--- a/Packs/Palo_Alto_Networks_WildFire/pack_metadata.json
+++ b/Packs/Palo_Alto_Networks_WildFire/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "WildFire by Palo Alto Networks",
"description": "Perform malware dynamic analysis",
"support": "xsoar",
- "currentVersion": "2.1.46",
+ "currentVersion": "2.1.47",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/PhishLabs/pack_metadata.json b/Packs/PhishLabs/pack_metadata.json
index 7b3aa6234069..f6d747ae939d 100644
--- a/Packs/PhishLabs/pack_metadata.json
+++ b/Packs/PhishLabs/pack_metadata.json
@@ -16,5 +16,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "PhishLabs IOC"
}
\ No newline at end of file
diff --git a/Packs/PhishTank/Integrations/PhishTankV2/PhishTankV2.yml b/Packs/PhishTank/Integrations/PhishTankV2/PhishTankV2.yml
index f07f3ec3d33c..f077e60efacc 100644
--- a/Packs/PhishTank/Integrations/PhishTankV2/PhishTankV2.yml
+++ b/Packs/PhishTank/Integrations/PhishTankV2/PhishTankV2.yml
@@ -81,11 +81,11 @@ script:
name: phishtank-reload
- description: Shows the status (timestamp) of the last time that PhishTank database was loaded.
name: phishtank-status
- dockerimage: demisto/python3:3.10.13.87159
+ dockerimage: demisto/python3:3.10.14.96411
runonce: false
script: '-'
subtype: python3
type: python
tests:
-- PhishTank Testing
+- No tests
fromversion: 5.0.0
diff --git a/Packs/PhishTank/ReleaseNotes/2_0_31.md b/Packs/PhishTank/ReleaseNotes/2_0_31.md
new file mode 100644
index 000000000000..c3c86b902876
--- /dev/null
+++ b/Packs/PhishTank/ReleaseNotes/2_0_31.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### PhishTank v2
+
+- Updated the Docker image to: *demisto/python3:3.10.14.96411*.
diff --git a/Packs/PhishTank/pack_metadata.json b/Packs/PhishTank/pack_metadata.json
index eab9df634856..1b39cc813201 100644
--- a/Packs/PhishTank/pack_metadata.json
+++ b/Packs/PhishTank/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "PhishTank",
"description": "PhishTank is a free community site where anyone can submit, verify, track and share phishing data",
"support": "xsoar",
- "currentVersion": "2.0.30",
+ "currentVersion": "2.0.31",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Phishing/TestPlaybooks/Phishing_v3_-_DomainSquatting+EML+MaliciousIndicators_-_Test.yml b/Packs/Phishing/TestPlaybooks/Phishing_v3_-_DomainSquatting+EML+MaliciousIndicators_-_Test.yml
index 3d310572fd15..4b784fdbc4bb 100644
--- a/Packs/Phishing/TestPlaybooks/Phishing_v3_-_DomainSquatting+EML+MaliciousIndicators_-_Test.yml
+++ b/Packs/Phishing/TestPlaybooks/Phishing_v3_-_DomainSquatting+EML+MaliciousIndicators_-_Test.yml
@@ -3,7 +3,12 @@ version: -1
contentitemexportablefields:
contentitemfields: {}
name: Phishing v3 - DomainSquatting+EML+MaliciousIndicators - Test
-description: Checks the domain-squatting flow, with an attached EML file. The EML file should contain some indicators. some of which should be detected as at least suspicious. The email authenticity should get a score of "Fail". Those results together with the domain-squatting, should eventually cause the incident to get a severity of 3.
+description: |-
+ Checks the domain-squatting flow, with an attached EML file. The EML file should contain some indicators. some of which should be detected as at least suspicious. The email authenticity should get a score of "Fail". Those results together with the domain-squatting, should eventually cause the incident to get a severity of 3.
+
+ Keep in mind when running Phishing tests:
+ - Never send an email from the phishing mailbox to the same phishing mailbox, as this can cause an infinite loop situation where an email is received, an acknowledgement email is sent to the sender, and then the acknowledgement email is received in the phishing mailbox, which causes a new incident to fetch, thus triggering another acknowledgement email, etc.
+ - It's important to ensure that acknowledgement emails are eventually deleted from the mailbox of the email that will be detected as the reporter of the email.
starttaskid: "0"
tasks:
"0":
diff --git a/Packs/Phishing/TestPlaybooks/Phishing_v3_-_Get_Original_Email_+_Search_&_Delete_-_Test.yml b/Packs/Phishing/TestPlaybooks/Phishing_v3_-_Get_Original_Email_+_Search_&_Delete_-_Test.yml
index 855193a278ee..215ba1538dea 100644
--- a/Packs/Phishing/TestPlaybooks/Phishing_v3_-_Get_Original_Email_+_Search_&_Delete_-_Test.yml
+++ b/Packs/Phishing/TestPlaybooks/Phishing_v3_-_Get_Original_Email_+_Search_&_Delete_-_Test.yml
@@ -8,6 +8,10 @@ description: |-
- A mail sender with sr-test02 is configured. That user will supposedly attack sr-test01.
- sr-test01 has a rule which forwards the test email to Testbox(but does not forward the acknowledgement emails it receives from the phishing playbook!)
- EWS O365 is configured as the mail listener with Testbox as the user.
+
+ Keep in mind when running Phishing tests:
+ - Never send an email from the phishing mailbox to the same phishing mailbox, as this can cause an infinite loop situation where an email is received, an acknowledgement email is sent to the sender, and then the acknowledgement email is received in the phishing mailbox, which causes a new incident to fetch, thus triggering another acknowledgement email, etc.
+ - It's important to ensure that acknowledgement emails are eventually deleted from the mailbox of the email that will be detected as the reporter of the email. In this case, the phishing playbook searches and deletes the email that gets sent.
starttaskid: '0'
tasks:
'0':
diff --git a/Packs/PhishingURL/ReleaseNotes/1_1_14.md b/Packs/PhishingURL/ReleaseNotes/1_1_14.md
new file mode 100644
index 000000000000..87b14eb71a42
--- /dev/null
+++ b/Packs/PhishingURL/ReleaseNotes/1_1_14.md
@@ -0,0 +1,7 @@
+
+#### Scripts
+
+##### DBotPredictURLPhishing
+
+- Fixed an issue in which an error was raised when the `rasterize` command failed.
+- Updated the Docker image to: *demisto/mlurlphishing:1.0.0.90588*.
diff --git a/Packs/PhishingURL/Scripts/DBotPredictURLPhishing/DBotPredictURLPhishing.py b/Packs/PhishingURL/Scripts/DBotPredictURLPhishing/DBotPredictURLPhishing.py
index e324094b938f..cbbadd76645e 100644
--- a/Packs/PhishingURL/Scripts/DBotPredictURLPhishing/DBotPredictURLPhishing.py
+++ b/Packs/PhishingURL/Scripts/DBotPredictURLPhishing/DBotPredictURLPhishing.py
@@ -292,13 +292,12 @@ def return_entry_summary(
return None
if verdict == BENIGN_VERDICT_WHITELIST:
verdict = BENIGN_VERDICT
- if is_white_listed or not pred_json:
+ if not pred_json:
url_score = SCORE_BENIGN
- url_score_colored = GREEN_COLOR.format(url_score) if url_score < SCORE_THRESHOLD else RED_COLOR.format(
- url_score)
+ url_score_colored = (GREEN_COLOR if url_score < SCORE_THRESHOLD else RED_COLOR).format(url_score)
else:
url_score = round(pred_json[MODEL_KEY_URL_SCORE], 2)
- url_score_colored = GREEN_COLOR.format(url_score) if url_score < SCORE_THRESHOLD else RED_COLOR.format(url_score)
+ url_score_colored = (GREEN_COLOR if url_score < SCORE_THRESHOLD else RED_COLOR).format(url_score)
pred_json_colored = get_colored_pred_json(pred_json) if pred_json else {}
domain = extract_domainv2(url)
explain = {
@@ -458,25 +457,21 @@ def extract_created_date(entry: dict):
return None
-def weed_rasterize_errors(urls: list[str], res_rasterize: list[dict]):
+def weed_rasterize_errors(urls: list[str], res_rasterize: list[Union[dict, str]]):
'''Remove the URLs that failed rasterization and return them.'''
- if len(urls) != len(res_rasterize):
- demisto.debug(f'{res_rasterize=}')
- raise DemistoException('Unexpected response from the "rasterize" command. '
- 'Please make sure the Rasterize pack version is above 2.0.7')
error_idx = [
i for (i, res) in enumerate(res_rasterize)
- if isinstance(res['Contents'], str)
+ if isinstance(res, str)
][::-1] # reverse the list as it will be used to remove elements.
if error_idx:
return_results(CommandResults(readable_output=tableToMarkdown(
'The following URLs failed rasterize and were skipped:',
- [{'URL': urls.pop(i), 'Message': res_rasterize.pop(i)['Contents']} for i in error_idx],
+ [{'URL': urls.pop(i), 'Message': res_rasterize.pop(i)} for i in error_idx],
['URL', 'Message']
)))
-def rasterize_urls(urls: list[str], rasterize_timeout: int) -> list[dict]:
+def rasterize_command(urls: Union[list[str], str], rasterize_timeout: int) -> list[Union[dict, str]]:
res_rasterize: list[dict] = demisto.executeCommand( # type: ignore
'rasterize',
{
@@ -487,10 +482,19 @@ def rasterize_urls(urls: list[str], rasterize_timeout: int) -> list[dict]:
}
)
demisto.debug(f'Rasterize Data: {res_rasterize}')
- weed_rasterize_errors(urls, res_rasterize)
return [res['Contents'] for res in res_rasterize]
+def rasterize_urls(urls: list[str], rasterize_timeout: int) -> list[dict]:
+ res_rasterize = rasterize_command(urls, rasterize_timeout)
+ if len(res_rasterize) < len(urls): # check for errors in the response
+ demisto.info(f'Rasterize response is too short, running command for each URL\n{res_rasterize=}\n{urls=}')
+ rasterize_runs = map(rasterize_command, urls, [rasterize_timeout] * len(urls))
+ res_rasterize = sum(rasterize_runs, [])
+ weed_rasterize_errors(urls, res_rasterize)
+ return cast(list[dict], res_rasterize)
+
+
def get_whois_verdict(domains: list[dict]) -> list:
default = [None] * len(domains)
@@ -535,7 +539,6 @@ def get_predictions_for_urls(model, urls, force_model, debug, rasterize_timeout)
x_pred = create_x_pred(output_rasterize, final_url)
pred_json = model.predict(x_pred)
-
if debug:
return_results(pred_json['debug_top_words'])
return_results(pred_json['debug_found_domains_list'])
diff --git a/Packs/PhishingURL/Scripts/DBotPredictURLPhishing/DBotPredictURLPhishing.yml b/Packs/PhishingURL/Scripts/DBotPredictURLPhishing/DBotPredictURLPhishing.yml
index 3d7b6c3dd406..73e2f7368070 100644
--- a/Packs/PhishingURL/Scripts/DBotPredictURLPhishing/DBotPredictURLPhishing.yml
+++ b/Packs/PhishingURL/Scripts/DBotPredictURLPhishing/DBotPredictURLPhishing.yml
@@ -85,7 +85,7 @@ tags:
- ml
timeout: 480ns
type: python
-dockerimage: demisto/mlurlphishing:1.0.0.88055
+dockerimage: demisto/mlurlphishing:1.0.0.90588
runas: DBotRole
tests:
- DBotPredictURLPhishing_test
diff --git a/Packs/PhishingURL/Scripts/DBotPredictURLPhishing/dbotpredicturlphishing_test.py b/Packs/PhishingURL/Scripts/DBotPredictURLPhishing/dbotpredicturlphishing_test.py
index f4afddd0a127..38da39131368 100644
--- a/Packs/PhishingURL/Scripts/DBotPredictURLPhishing/dbotpredicturlphishing_test.py
+++ b/Packs/PhishingURL/Scripts/DBotPredictURLPhishing/dbotpredicturlphishing_test.py
@@ -326,27 +326,51 @@ def test_weed_rasterize_errors(mocker: MockerFixture):
"""
return_results_mock = mocker.patch('DBotPredictURLPhishing.return_results')
urls = ['1', '2', '3']
- res_rasterize = [
- {'Contents': 'error 1'},
- {'Contents': {'success': True}},
- {'Contents': 'error 3'},
- ]
+ res_rasterize = ['error 1', {'success': True}, 'error 3']
weed_rasterize_errors(urls, res_rasterize)
assert urls == ['2']
- assert res_rasterize == [{'Contents': {'success': True}}]
+ assert res_rasterize == [{'success': True}]
assert 'error 1' in return_results_mock.call_args_list[0].args[0].readable_output
assert 'error 3' in return_results_mock.call_args_list[0].args[0].readable_output
-def test_weed_rasterize_errors_bad_rasterize_response():
+def test_return_entry_summary(mocker: MockerFixture):
+
+ mock_return_results = mocker.patch('DBotPredictURLPhishing.return_results')
+ pred_json = {
+ 'seo': True, 'login_form': False, 'debug_top_words': "['access']",
+ 'debug_found_domains_list': "['example.com']",
+ 'logo_name': float('nan'), 'logo_found': True, 'image_bytes': '',
+ 'debug_image': '{"example.png": "Less than MIN_MATCH_COUNT: 2"}',
+ 'url_score': 0.55, 'New domain (less than 6 months)': True
+ }
+ res = return_entry_summary(
+ pred_json=pred_json,
+ url='https://example.com', is_white_listed=False, output_rasterize={'image_b64': '1234'},
+ verdict='Benign - Top domains from Majestic', reliability=DBotScoreReliability.A_PLUS
+ )
+
+ assert res == {
+ 'BadSEOQuality': 'True', 'Domain': 'example.com', 'FinalVerdict': 'Benign', 'HasLoginForm': 'False', 'NewDomain': 'True',
+ 'TopMajesticDomain': 'False', 'URL': 'https://example.com', 'URLStaticScore': 0.55, 'UseOfSuspiciousLogo': 'True'}
+ assert mock_return_results.mock_calls[0].args[0]['HumanReadable'].startswith('### Phishing prediction evidence | example.com')
+ assert mock_return_results.mock_calls[1].args[0]['File'] == 'Logo detection engine'
+ assert mock_return_results.mock_calls[1].args[0]['Tags'] == ['DBOT_URL_PHISHING_MALICIOUS']
+
+
+def test_rasterize_urls_bad_rasterize_response(mocker: MockerFixture):
"""
Given: the results from calling rasterize are less than the amount of URLs given.
When: looking for rasterize error responses in the weed_rasterize_errors function.
- Then: Make sure the correct error is raised.
+ Then: Make sure the command is called for each URL.
"""
- with pytest.raises(DemistoException, match=(
- 'Unexpected response from the "rasterize" command. Please make sure the Rasterize pack version is above 2.0.7')
- ):
- weed_rasterize_errors(['1', '2'], [{}])
+ rasterize_command_mock = mocker.patch(
+ 'DBotPredictURLPhishing.rasterize_command', return_value=[{}]
+ )
+
+ res = rasterize_urls(['1', '2'], 0)
+
+ assert res == [{}, {}]
+ assert rasterize_command_mock.call_count == 3
diff --git a/Packs/PhishingURL/pack_metadata.json b/Packs/PhishingURL/pack_metadata.json
index 8e83523774be..15612c9cfaa3 100644
--- a/Packs/PhishingURL/pack_metadata.json
+++ b/Packs/PhishingURL/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Phishing URL",
"description": "Phishing URL is a project with the goal of detecting phishing URLs using machine learning",
"support": "xsoar",
- "currentVersion": "1.1.13",
+ "currentVersion": "1.1.14",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/PolySwarm/Integrations/PolySwarmV2/PolySwarmV2.py b/Packs/PolySwarm/Integrations/PolySwarmV2/PolySwarmV2.py
index 0b93e8cd7fe4..3eebb757fac1 100644
--- a/Packs/PolySwarm/Integrations/PolySwarmV2/PolySwarmV2.py
+++ b/Packs/PolySwarm/Integrations/PolySwarmV2/PolySwarmV2.py
@@ -230,8 +230,8 @@ def url_reputation(self,
artifacts = argToList(param[artifact_type])
for artifact in artifacts:
- title = 'PolySwarm %s Reputation for: %s' % (artifact_type.upper(),
- artifact)
+ title = 'PolySwarm {} Reputation for: {}'.format(artifact_type.upper(),
+ artifact)
demisto.debug(f'[url_reputation] {title}')
@@ -244,7 +244,7 @@ def url_reputation(self,
if artifact_type == 'ip':
try:
socket.inet_aton(artifact)
- except socket.error:
+ except OSError:
return_error('Invalid IP Address: {ip}'.
format(ip=artifact))
@@ -318,7 +318,7 @@ def get_report(self,
def main():
''' EXECUTION '''
- LOG('command is %s' % (demisto.command(),))
+ LOG(f'command is {demisto.command()}')
try:
polyswarm = PolyswarmConnector()
diff --git a/Packs/PolySwarm/Integrations/PolySwarmV2/PolySwarmV2.yml b/Packs/PolySwarm/Integrations/PolySwarmV2/PolySwarmV2.yml
index 205040e5e465..a86698e96b6b 100644
--- a/Packs/PolySwarm/Integrations/PolySwarmV2/PolySwarmV2.yml
+++ b/Packs/PolySwarm/Integrations/PolySwarmV2/PolySwarmV2.yml
@@ -405,5 +405,5 @@ script:
script: '-'
subtype: python3
type: python
- dockerimage: demisto/polyswarm:1.0.0.18926
+ dockerimage: demisto/polyswarm:1.0.0.96224
fromversion: 5.0.0
diff --git a/Packs/PolySwarm/ReleaseNotes/2_0_6.md b/Packs/PolySwarm/ReleaseNotes/2_0_6.md
new file mode 100644
index 000000000000..8dcf3b5cd548
--- /dev/null
+++ b/Packs/PolySwarm/ReleaseNotes/2_0_6.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### PolySwarm
+- Updated the Docker image to: *demisto/polyswarm:1.0.0.96224*.
+
+
diff --git a/Packs/PolySwarm/pack_metadata.json b/Packs/PolySwarm/pack_metadata.json
index 1353d56fddae..7595f21a8e56 100644
--- a/Packs/PolySwarm/pack_metadata.json
+++ b/Packs/PolySwarm/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "PolySwarm",
"description": "Real-time threat intelligence from a crowdsourced network of security experts and antivirus companies.",
"support": "partner",
- "currentVersion": "2.0.5",
+ "currentVersion": "2.0.6",
"author": "PolySwarm",
"url": "",
"email": "support@polyswarm.io",
diff --git a/Packs/PrismaCloud/ReleaseNotes/4_3_5.md b/Packs/PrismaCloud/ReleaseNotes/4_3_5.md
new file mode 100644
index 000000000000..460d4dda668d
--- /dev/null
+++ b/Packs/PrismaCloud/ReleaseNotes/4_3_5.md
@@ -0,0 +1,3 @@
+## Prisma Cloud by Palo Alto Networks
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/PrismaCloud/pack_metadata.json b/Packs/PrismaCloud/pack_metadata.json
index 468c5b7a959e..4a11ae85678e 100644
--- a/Packs/PrismaCloud/pack_metadata.json
+++ b/Packs/PrismaCloud/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Prisma Cloud by Palo Alto Networks",
"description": "Automate and unify security incident response across your cloud environments, while still giving a degree of control to dedicated cloud teams.",
"support": "xsoar",
- "currentVersion": "4.3.4",
+ "currentVersion": "4.3.5",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/PrismaSaasSecurity/Integrations/SaasSecurityEventCollector/README.md b/Packs/PrismaSaasSecurity/Integrations/SaasSecurityEventCollector/README.md
index 0dbe309d6f0e..ea7623ef82f5 100644
--- a/Packs/PrismaSaasSecurity/Integrations/SaasSecurityEventCollector/README.md
+++ b/Packs/PrismaSaasSecurity/Integrations/SaasSecurityEventCollector/README.md
@@ -4,6 +4,7 @@ the challenges of:
* maintaining compliance consistently in the cloud
* stopping threats to sensitive information, users, and resources
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
## Configure SaaS Security on Cortex XSIAM
diff --git a/Packs/PrismaSaasSecurity/pack_metadata.json b/Packs/PrismaSaasSecurity/pack_metadata.json
index d84784b6e42a..0d4425f61715 100644
--- a/Packs/PrismaSaasSecurity/pack_metadata.json
+++ b/Packs/PrismaSaasSecurity/pack_metadata.json
@@ -20,5 +20,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "SaaS Security Event Collector"
}
\ No newline at end of file
diff --git a/Packs/ProofpointThreatResponse/Integrations/ProofpointThreatResponseEventCollector/README.md b/Packs/ProofpointThreatResponse/Integrations/ProofpointThreatResponseEventCollector/README.md
index 32fb2a2ff273..5ca311daf6d4 100644
--- a/Packs/ProofpointThreatResponse/Integrations/ProofpointThreatResponseEventCollector/README.md
+++ b/Packs/ProofpointThreatResponse/Integrations/ProofpointThreatResponseEventCollector/README.md
@@ -1,5 +1,7 @@
Use the Proofpoint Threat Response integration to orchestrate and automate incident response.
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure Proofpoint Threat Response Event Collector on Cortex XSIAM
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
diff --git a/Packs/ProofpointThreatResponse/pack_metadata.json b/Packs/ProofpointThreatResponse/pack_metadata.json
index 925f63d368bc..1f16580ed2a4 100644
--- a/Packs/ProofpointThreatResponse/pack_metadata.json
+++ b/Packs/ProofpointThreatResponse/pack_metadata.json
@@ -16,5 +16,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "ProofpointThreatResponseEventCollector"
}
\ No newline at end of file
diff --git a/Packs/QRadar/Integrations/QRadar_v3/QRadar_v3.py b/Packs/QRadar/Integrations/QRadar_v3/QRadar_v3.py
index 467ada4eec1c..443d853d93c5 100644
--- a/Packs/QRadar/Integrations/QRadar_v3/QRadar_v3.py
+++ b/Packs/QRadar/Integrations/QRadar_v3/QRadar_v3.py
@@ -472,7 +472,8 @@ def http_request(self, method: str, url_suffix: str, params: Optional[dict] = No
headers=headers,
error_handler=self.qradar_error_handler,
timeout=timeout or self.timeout,
- resp_type=resp_type
+ resp_type=resp_type,
+ with_metrics=True
)
except (DemistoException, requests.ReadTimeout) as error:
demisto.error(f'Error {error} in time {_time}')
@@ -655,6 +656,18 @@ def search_status_get(self, search_id: str):
url_suffix=f'/ariel/searches/{search_id}',
)
+ def search_delete(self, search_id: str):
+ return self.http_request(
+ method='DELETE',
+ url_suffix=f'/ariel/searches/{search_id}',
+ )
+
+ def search_cancel(self, search_id: str):
+ return self.http_request(
+ method='POST',
+ url_suffix=f'/ariel/searches/{search_id}?status=CANCELED',
+ )
+
def search_results_get(self, search_id: str, range_: Optional[str] = None):
return self.http_request(
method='GET',
@@ -1232,6 +1245,7 @@ def safely_update_context_data(
should_update_last_mirror,
should_add_reset_key,
should_force_update)
+ print_debug_msg(f"{updated_context=}")
set_integration_context(updated_context, version=new_version)
print_debug_msg(f'Updated integration context after version {new_version}.')
@@ -2223,7 +2237,9 @@ def poll_offense_events_with_retry(
if retry < max_retries - 1:
time.sleep(EVENTS_INTERVAL_SECS)
- print_debug_msg(f'Max retries for getting events for offense {offense_id}.')
+ print_debug_msg(f'Max retries for getting events for offense {offense_id}. Cancel query search_id: {search_id}')
+ # need to cancel query
+ client.search_cancel(search_id=search_id)
return [], 'Fetching events is in progress'
@@ -3134,6 +3150,55 @@ def qradar_search_status_get_command(client: Client, args: dict) -> CommandResul
)
+def qradar_search_delete_command(client: Client, args: dict) -> CommandResults:
+ """
+ Delete search from QRadar service.
+ possible arguments:
+ - search_id (Required): The search ID to delete.
+ Args:
+ client (Client): QRadar client to perform the API call.
+ args (Dict): Demisto args.
+
+ Returns:
+ CommandResults.
+ """
+ search_id: str = args.get('search_id', '')
+
+ # if this call fails, raise an error and stop command execution
+ response = client.search_delete(search_id)
+
+ return CommandResults(
+ readable_output=f'Search ID {search_id} was successfully deleted.',
+ raw_response=response
+ )
+
+
+def qradar_search_cancel_command(client: Client, args: dict) -> CommandResults:
+ """
+ Cancelled search from QRadar service.
+ possible arguments:
+ - search_id (Required): The search ID to delete.
+ Args:
+ client (Client): QRadar client to perform the API call.
+ args (Dict): Demisto args.
+
+ Returns:
+ CommandResults.
+ """
+ search_id: str = args.get('search_id', '')
+
+ # if this call fails, raise an error and stop command execution
+ response = client.search_cancel(search_id)
+ if response.get('status') == 'COMPLETED':
+ output = f'Search ID {search_id} is already in a completed status.'
+ else:
+ output = f'Search ID {search_id} was successfully cancelled.'
+ return CommandResults(
+ readable_output=output,
+ raw_response=response
+ )
+
+
def qradar_search_results_get_command(client: Client, args: dict) -> CommandResults:
"""
Retrieves search results from QRadar service.
@@ -4189,7 +4254,12 @@ def qradar_search_retrieve_events_command(
print_debug_msg(f"Polling event failed due to {e}. Will try to poll again in the next interval.")
events = []
status = QueryStatus.WAIT.value
-
+ if is_last_run and status == QueryStatus.WAIT.value:
+ print_debug_msg("Its the last run of the polling, will cancel the query request. ")
+ client.search_cancel(search_id=search_id)
+ return CommandResults(
+ readable_output='Got polling timeout. Quary got cancelled.',
+ )
if is_last_run and args.get('success') and not events:
# if last run, we want to get the events that were fetched in the previous calls
return CommandResults(
@@ -5134,6 +5204,12 @@ def main() -> None: # pragma: no cover
]:
return_results(qradar_search_results_get_command(client, args))
+ elif command == 'qradar-search-cancel':
+ return_results(qradar_search_cancel_command(client, args))
+
+ elif command == 'qradar-search-delete':
+ return_results(qradar_search_delete_command(client, args))
+
elif command in [
'qradar-reference-sets-list',
'qradar-get-reference-by-name',
@@ -5261,6 +5337,11 @@ def main() -> None: # pragma: no cover
except Exception as e:
print_debug_msg(f"The integration context_data is {get_integration_context()}")
return_error(f'Failed to execute {demisto.command()} command.\nError:\n{traceback.format_exc()}\nException is: {str(e)}')
+ finally:
+ # CIAC-10628
+ if command not in ("test-module", "fetch-incidents", "long-running-execution"):
+ client._return_execution_metrics_results()
+ client.execution_metrics.metrics = None
''' ENTRY POINT '''
diff --git a/Packs/QRadar/Integrations/QRadar_v3/QRadar_v3.yml b/Packs/QRadar/Integrations/QRadar_v3/QRadar_v3.yml
index 2f524faf1dd2..ce16e6b4206c 100644
--- a/Packs/QRadar/Integrations/QRadar_v3/QRadar_v3.yml
+++ b/Packs/QRadar/Integrations/QRadar_v3/QRadar_v3.yml
@@ -880,6 +880,18 @@ script:
- name: range
description: 'Range of events to return. (e.g.: 0-20, 3-5, 3-3).'
defaultValue: 0-49
+ - name: qradar-search-delete
+ description: Deletes the search from Qradar.
+ arguments:
+ - name: search_id
+ required: true
+ description: The identifier for an Ariel search.
+ - name: qradar-search-cancel
+ description: Cancels the search in QRadar.
+ arguments:
+ - name: search_id
+ required: true
+ description: The identifier for an Ariel search.
- name: qradar-reference-sets-list
description: Retrieves a list of reference sets.
outputs:
@@ -3094,7 +3106,7 @@ script:
script: '-'
type: python
subtype: python3
- dockerimage: demisto/python3:3.10.14.92207
+ dockerimage: demisto/python3:3.10.14.96411
isremotesyncin: true
longRunning: true
isFetchSamples: true
diff --git a/Packs/QRadar/Integrations/QRadar_v3/QRadar_v3_test.py b/Packs/QRadar/Integrations/QRadar_v3/QRadar_v3_test.py
index 021b030184b1..870a58feaf4f 100644
--- a/Packs/QRadar/Integrations/QRadar_v3/QRadar_v3_test.py
+++ b/Packs/QRadar/Integrations/QRadar_v3/QRadar_v3_test.py
@@ -41,6 +41,7 @@
qradar_log_source_languages_list_command, qradar_log_source_groups_list_command, qradar_log_source_create_command, \
qradar_log_source_delete_command, qradar_log_source_update_command, convert_dict_to_actual_values, \
enrich_offense_with_events, perform_long_running_loop, validate_integration_context, convert_list_to_actual_values, \
+ qradar_search_cancel_command, \
MIRRORED_OFFENSES_FETCHED_CTX_KEY, FetchMode, IndicatorsSearcher
from CommonServerPython import DemistoException, set_integration_context, CommandResults, \
@@ -846,6 +847,7 @@ def test_outputs_enriches(mocker, enrich_func, mock_func_name, args, mock_respon
(qradar_search_create_command, 'search_create'),
(qradar_search_status_get_command, 'search_status_get'),
(qradar_search_results_get_command, 'search_results_get'),
+ (qradar_search_cancel_command, 'search_cancel'),
(qradar_reference_sets_list_command, 'reference_sets_list'),
(qradar_reference_set_create_command, 'reference_set_create'),
(qradar_reference_set_delete_command, 'reference_set_delete'),
diff --git a/Packs/QRadar/Integrations/QRadar_v3/README.md b/Packs/QRadar/Integrations/QRadar_v3/README.md
index 2aca88594b3b..aecade7fc576 100644
--- a/Packs/QRadar/Integrations/QRadar_v3/README.md
+++ b/Packs/QRadar/Integrations/QRadar_v3/README.md
@@ -1,6 +1,8 @@
IBM QRadar SIEM helps security teams accurately detect and prioritize threats across the enterprise, supports API versions 10.1 and above. Provides intelligent insights that enable teams to respond quickly to reduce the impact of incidents.
This integration was integrated and tested with version 14-20 of QRadar v3
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure IBM QRadar v3 on Cortex XSOAR
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
@@ -127,6 +129,14 @@ Alternatively, the [retrieve events command](#qradar-search-retrieve-events) can
If the command takes too long to finish executing, try setting the `interval_in_seconds` to a lower value (down to a minimum of 10 seconds).
+### API Call Metrics
+
+This feature collects metadata on QRadar API calls and their success status.
+
+API Call metrics are not available for long-running commands such as `fetch incidents`.
+
+API Metrics are shown in the built-in **API Execution Metrics** dashboard, and are available to use in custom widgets.
+
## Commands
You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook.
@@ -1485,6 +1495,40 @@ Retrieves search results.
| --- | --- | --- |
| QRadar.Search.Result | Unknown | The result of the search. |
+### qradar-search-delete
+
+***
+Deleted search from Qradar, based on the search ID.
+
+#### Base Command
+
+`qradar-search-delete`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| search_id | The identifier for an Ariel search. | Required |
+
+
+### qradar-reference-sets-list
+
+### qradar-search-cancel
+
+***
+Cancelled search in QRadar based on search_id.
+
+#### Base Command
+
+`qradar-search-cancel`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| search_id | The identifier for an Ariel search. | Required |
+
+
### qradar-reference-sets-list
***
diff --git a/Packs/QRadar/Integrations/QRadar_v3/command_examples.txt b/Packs/QRadar/Integrations/QRadar_v3/command_examples.txt
index 92200f302493..275c33a9877d 100644
--- a/Packs/QRadar/Integrations/QRadar_v3/command_examples.txt
+++ b/Packs/QRadar/Integrations/QRadar_v3/command_examples.txt
@@ -11,6 +11,7 @@
!qradar-search-create query_expression="""SELECT "destinationPort" AS 'Destination Port', UniqueCount("sourceIP") AS 'Source IP (Unique Count)', UniqueCount("destinationIP") AS 'Destination IP (Unique Count)', UniqueCount(qid) AS 'Event Name (Unique Count)', UniqueCount(logSourceId) AS 'Log Source (Unique Count)', UniqueCount(category) AS 'Low Level Category (Unique Count)', UniqueCount("protocolId") AS 'Protocol (Unique Count)', UniqueCount("userName") AS 'Username (Unique Count)', MAX("magnitude") AS 'Magnitude (Maximum)', SUM("eventCount") AS 'Event Count (Sum)', COUNT(*) AS 'Count' from events where ( ("creEventList"='100120') or ("creEventList"='100122') or ("creEventList"='100135') AND "eventDirection"='R2L' ) GROUP BY "destinationPort" order by "Event Count (Sum)" desc last 6 hours"""
!qradar-search-status-get search_id=36227863-bc6a-488f-9aa4-0f9c6f767655
!qradar-search-results-get search_id=36227863-bc6a-488f-9aa4-0f9c6f767655 range=0-3
+!qradar-search-delete search_id=36227863-bc6a-488f-9aa4-0f9c6f767655
!qradar-reference-sets-list filter="timeout_type=FIRST_SEEN" range=0-2
!qradar-reference-set-create element_type=IP ref_name="Malicious IPs" time_to_live="1 year" timeout_type=FIRST_SEEN
!qradar-reference-set-value-upsert ref_name="Malicious IPs" value="1.2.3.4,1.2.3.5,192.168.1.3"
diff --git a/Packs/QRadar/Integrations/QRadar_v3/test_data/command_test_data.json b/Packs/QRadar/Integrations/QRadar_v3/test_data/command_test_data.json
index 251fb63c7ea5..109a3328efd1 100644
--- a/Packs/QRadar/Integrations/QRadar_v3/test_data/command_test_data.json
+++ b/Packs/QRadar/Integrations/QRadar_v3/test_data/command_test_data.json
@@ -1370,6 +1370,39 @@
"search_id": "abcd-1234"
}
},
+ "search_cancel":{
+ "response":{
+ "cursor_id":"abcd-1234",
+ "status":"SORTING",
+ "compressed_data_file_count":0,
+ "compressed_data_total_size":0,
+ "data_file_count":8560,
+ "data_total_size":876521343,
+ "index_file_count":0,
+ "index_total_size":0,
+ "processed_record_count":0,
+ "desired_retention_time_msec":86400000,
+ "progress":8,
+ "progress_details":[
+
+ ],
+ "query_execution_time":18767,
+ "query_string":"SELECT QIDNAME(qid), LOGSOURCENAME(logsourceid), CATEGORYNAME(highlevelcategory), CATEGORYNAME(category), PROTOCOLNAME(protocolid), sourceip, sourceport, destinationip, destinationport, QIDDESCRIPTION(qid), username, PROTOCOLNAME(protocolid), RULENAME(\"creEventList\"), sourcegeographiclocation, sourceMAC, sourcev6, destinationgeographiclocation, destinationv6, LOGSOURCETYPENAME(devicetype), credibility, severity, magnitude, eventcount, eventDirection, postNatDestinationIP, postNatDestinationPort, postNatSourceIP, postNatSourcePort, preNatDestinationPort, preNatSourceIP, preNatSourcePort, UTF8(payload), starttime, devicetime FROM events WHERE INOFFENSE(210) limit 20 START 1713099871139",
+ "record_count":0,
+ "size_on_disk":24,
+ "save_results":false,
+ "completed":true,
+ "subsearch_ids":[
+
+ ],
+ "snapshot":"None",
+ "search_id":"abcd-1234"
+ },
+ "expected": {},
+ "args": {
+ "search_id": "abcd-1234"
+ }
+ },
"reference_sets_list": {
"response": [
{
diff --git a/Packs/QRadar/ReleaseNotes/2_4_59.md b/Packs/QRadar/ReleaseNotes/2_4_59.md
new file mode 100644
index 000000000000..97b2641e30cb
--- /dev/null
+++ b/Packs/QRadar/ReleaseNotes/2_4_59.md
@@ -0,0 +1,11 @@
+
+#### Integrations
+
+##### IBM QRadar v3
+
+- Added the following new commands:
+ - ***qradar-search-delete***
+ - ***qradar-search-cancel***
+- Improved implementation of the fetch mechanism to cancel unnecessary search query.
+- Updated the **qradar-search-retrieve-events** command to automatically cancel the search query when reaching the polling timeout.
+- Updated the Docker image to: *demisto/python3:3.10.14.96411*.
\ No newline at end of file
diff --git a/Packs/QRadar/ReleaseNotes/2_5_0.md b/Packs/QRadar/ReleaseNotes/2_5_0.md
new file mode 100644
index 000000000000..84283bac1f01
--- /dev/null
+++ b/Packs/QRadar/ReleaseNotes/2_5_0.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### IBM QRadar v3
+
+Added support for API Execution Metric reporting for QRadar commands, excluding long-running commands such as fetch incidents.
diff --git a/Packs/QRadar/pack_metadata.json b/Packs/QRadar/pack_metadata.json
index 8fc8bd9fbc33..83b869a1143d 100644
--- a/Packs/QRadar/pack_metadata.json
+++ b/Packs/QRadar/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "IBM QRadar",
"description": "Fetch offenses as incidents and search QRadar",
"support": "xsoar",
- "currentVersion": "2.4.58",
+ "currentVersion": "2.5.0",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
@@ -45,5 +45,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "QRadar v3"
}
\ No newline at end of file
diff --git a/Packs/Rapid7_Nexpose/ReleaseNotes/1_2_23.md b/Packs/Rapid7_Nexpose/ReleaseNotes/1_2_23.md
new file mode 100644
index 000000000000..fddb45c32152
--- /dev/null
+++ b/Packs/Rapid7_Nexpose/ReleaseNotes/1_2_23.md
@@ -0,0 +1,3 @@
+## Rapid7 InsightVM
+
+- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release.
\ No newline at end of file
diff --git a/Packs/Rapid7_Nexpose/pack_metadata.json b/Packs/Rapid7_Nexpose/pack_metadata.json
index 3224eb0c05bd..2984bc59bebb 100644
--- a/Packs/Rapid7_Nexpose/pack_metadata.json
+++ b/Packs/Rapid7_Nexpose/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Rapid7 InsightVM",
"description": "Vulnerability management solution to help reduce threat exposure.",
"support": "xsoar",
- "currentVersion": "1.2.22",
+ "currentVersion": "1.2.23",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/Reco/Integrations/Reco/README.md b/Packs/Reco/Integrations/Reco/README.md
index 34b249d2e711..42c714c88261 100644
--- a/Packs/Reco/Integrations/Reco/README.md
+++ b/Packs/Reco/Integrations/Reco/README.md
@@ -251,9 +251,8 @@ Get 3rd parties accessible to sensitive assets
| --- | --- | --- |
| Reco.Domains.domain | String | The domain of the 3rd party |
| Reco.Domains.last_activity | String | The last interaction time with the 3rd party |
-| Reco.Domains.num_files | Number | The number of files the 3rd party has access to |
-| Reco.Domains.num_users | Number | The number of users the 3rd party has access to |
-| Reco.Domains.data_category | String | The data category of the assets the 3rd party has access to |
+| Reco.Domains.files_num | Number | The number of files the 3rd party has access to |
+| Reco.Domains.users_with_access_num | Number | The number of users the 3rd party has access to |
### reco-get-sensitive-assets-with-public-link
@@ -299,15 +298,16 @@ Get files shared with 3rd parties
#### Context Output
-| **Path** | **Type** | **Description** |
-| --- | --- | --- |
-| Reco.Assets.asset_id | String | The asset id of the file |
-| Reco.Assets.location | String | The location of the file |
-| Reco.Assets.users | String | Users the file is shared with |
-| Reco.Assets.asset | Unknown | The asset metadata |
-| Reco.Assets.data_category | String | The data category of the assets the 3rd party has access to |
-| Reco.Assets.last_access_date | String | The last access date of the asset |
-| Reco.Assets.domain | String | The domain of the 3rd party |
+| **Path** | **Type** | **Description** |
+|------------------------------|----------|-------------------------------------------------------------|
+| Reco.Assets.asset_id | String | The asset id of the file |
+| Reco.Assets.location | String | The location of the file |
+| Reco.Assets.users | String | Users the file is shared with |
+| Reco.Assets.file_owner | String | File Owner |
+| Reco.Assets.asset | Unknown | The asset metadata |
+| Reco.Assets.data_category | String | The data category of the assets the 3rd party has access to |
+| Reco.Assets.last_access_date | String | The last access date of the asset |
+| Reco.Assets.domain | String | The domain of the 3rd party |
### reco-change-alert-status
@@ -379,8 +379,8 @@ Get files exposed to a specific email address
| Reco.Assets.asset | Unknown | Json string of the asset's url and the name |
| Reco.Assets.data_category | String | The data category of the asset |
| Reco.Assets.data_categories | String | The data categories of the asset |
-| Reco.SensitiveAssets.location | String | The path of the asset. |
-| Reco.SensitiveAssets.source | String | SaaS tool source of the asset. |
+| Reco.Assets.location | String | The path of the asset. |
+| Reco.Assets.source | String | SaaS tool source of the asset. |
| Reco.Assets.last_access_date | String | The last access date of the asset |
| Reco.Assets.email_account | String | The last access date of the asset |
| Reco.Assets.file_owner | String | SaaS tool source of the asset |
@@ -408,7 +408,33 @@ Get files exposed to a specific email address
| Reco.Assets.asset | Unknown | Json string of the asset's url and the name |
| Reco.Assets.data_category | String | The data category of the asset |
| Reco.Assets.data_categories | String | The data categories of the asset |
-| Reco.Assets.location | String | The path of the asset |
-| Reco.Assets.source | String | SaaS tool source of the asset |
+| Reco.SensitiveAssets.location | String | The path of the asset |
+| Reco.SensitiveAssets.source | String | SaaS tool source of the asset |
| Reco.Assets.last_access_date | String | The last access date of the asset |
-| Reco.Assets.file_owner | String | SaaS tool source of the asset |
\ No newline at end of file
+| Reco.Assets.file_owner | String | SaaS tool source of the asset |
+
+
+### reco-get-private-email-list-with-access
+
+***
+Get private email list with access
+
+#### Base Command
+
+`reco-get-private-email-list-with-access`
+
+#### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+
+#### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- |----------|---------------------------|
+| Reco.privateEmails.email_account | String | The email account |
+| Reco.privateEmails.primary_email | String | The primary email account |
+| Reco.privateEmails.files_num | String | Number of files |
+| Reco.privateEmails.user_category | String | The category of the user |
+
+
diff --git a/Packs/Reco/Integrations/Reco/Reco.py b/Packs/Reco/Integrations/Reco/Reco.py
index 92b7b39fdbfe..ee374743481b 100644
--- a/Packs/Reco/Integrations/Reco/Reco.py
+++ b/Packs/Reco/Integrations/Reco/Reco.py
@@ -439,39 +439,78 @@ def get_files_exposed_to_email(self, email_account) -> list[dict[str, Any]]:
demisto.error(f"Validate API key ReadTimeout error: {str(e)}")
raise e
+ def get_list_of_private_emails_with_access(self) -> list[dict[str, Any]]:
+ """Get files exposed to email. Returns a list of private email addresses with access."""
+ params = {
+ "getTableRequest": {
+ "tableName": "data_posture_view_private_email_with_access",
+ "pageSize": PAGE_SIZE,
+ "fieldSorts": {
+ "sorts": [
+ {
+ "sortBy": "files_num",
+ "sortDirection": "SORT_DIRECTION_DESC"
+ }
+ ]
+ },
+ "fieldFilters": {
+ "relationship": "FILTER_RELATIONSHIP_AND",
+ "fieldFilterGroups": {
+ "fieldFilters": []
+ }
+ }
+ }
+ }
+ try:
+ response = self._http_request(
+ method="PUT",
+ url_suffix="/risk-management/get-data-risk-management-table",
+ timeout=RECO_API_TIMEOUT_IN_SECONDS,
+ data=json.dumps(params),
+ )
+ return extract_response(response)
+ except Exception as e:
+ demisto.error(f"Validate API key ReadTimeout error: {str(e)}")
+ raise e
+
def get_3rd_parties_risk_list(self, last_interaction_time_before_in_days: int) -> list[dict[str, Any]]:
formatted_date = self.get_date_time_before_days_formatted(last_interaction_time_before_in_days)
params = {
"getTableRequest": {
- "tableName": "DATA_RISK_MANAGEMENT_VIEW_TOP_3RD_PARTIES_DOMAIN",
+ "tableName": "data_posture_view_3rd_parties_domain",
"pageSize": PAGE_SIZE,
"fieldSorts": {
"sorts": [
{
- "sortBy": "last_activity",
- "sortDirection": "SORT_DIRECTION_ASC"
- },
- {
- "sortBy": "num_files",
+ "sortBy": "files_num",
"sortDirection": "SORT_DIRECTION_DESC"
}
]
},
"fieldFilters": {
"relationship": "FILTER_RELATIONSHIP_AND",
- "filters": {
- "filters": [
+ "fieldFilterGroups": {
+ "fieldFilters": [
{
- "field": "last_activity",
- "before": {
- "value": f"{formatted_date}"
+ "relationship": "FILTER_RELATIONSHIP_AND",
+ "fieldFilterGroups": {
+ "fieldFilters": [
+ {
+ "relationship": "FILTER_RELATIONSHIP_AND",
+ "filters": {
+ "filters": [
+ {
+ "field": "last_activity",
+ "before": {
+ "value": f"{formatted_date}"
+ }
+ }
+ ]
+ }
+ }
+ ]
}
- },
- {
- "field": "data_category",
- "stringNotContains": {
- "value": "ALL"
- }}
+ }
]
}
}
@@ -504,12 +543,12 @@ def get_files_shared_with_3rd_parties(self,
formatted_date = self.get_date_time_before_days_formatted(last_interaction_time_before_in_days)
params = {
"getTableRequest": {
- "tableName": "DATA_RISK_MANAGEMENT_VIEW_SHARED_TOP_EXT_DOMAIN_FILES",
- "pageSize": 100,
+ "tableName": "data_posture_view_files_by_domain_slider",
+ "pageSize": PAGE_SIZE,
"fieldSorts": {
"sorts": [
{
- "sortBy": "data_category",
+ "sortBy": "last_access_date",
"sortDirection": "SORT_DIRECTION_ASC"
}
]
@@ -519,7 +558,7 @@ def get_files_shared_with_3rd_parties(self,
"fieldFilterGroups": {
"fieldFilters": [
{
- "relationship": "FILTER_RELATIONSHIP_OR",
+ "relationship": "FILTER_RELATIONSHIP_AND",
"filters": {
"filters": [
{
@@ -529,7 +568,7 @@ def get_files_shared_with_3rd_parties(self,
}
},
{
- "field": "last_access_time",
+ "field": "last_access_date",
"before": {
"value": f"{formatted_date}"
}
@@ -1219,9 +1258,8 @@ def get_3rd_parties_list(reco_client: RecoClient, last_interaction_time_in_days:
headers=[
"domain",
"last_activity",
- "num_files",
- "num_users",
- "data_category",
+ "files_num",
+ "users_with_access_num",
],
),
outputs_prefix="Reco.Domains",
@@ -1248,6 +1286,7 @@ def get_files_shared_with_3rd_parties(reco_client: RecoClient,
"domain",
"location",
"users",
+ "file_owner",
"data_category",
"asset",
"last_access_date",
@@ -1410,6 +1449,30 @@ def get_max_fetch(max_fetch: int) -> int:
return max_fetch
+def get_private_email_list_with_access(reco_client):
+ result = reco_client.get_list_of_private_emails_with_access()
+ identities_list = []
+ for identity in result:
+ asset_as_dict = parse_table_row_to_dict(identity.get("cells", {}))
+ identities_list.append(asset_as_dict)
+ return CommandResults(
+ readable_output=tableToMarkdown(
+ "PrivateEmails",
+ identities_list,
+ headers=[
+ "email_account",
+ "primary_email",
+ "files_num",
+ "user_category",
+ ],
+ ),
+ outputs_prefix="Reco.privateEmails",
+ outputs_key_field="email_account",
+ outputs=identities_list,
+ raw_response=result,
+ )
+
+
def main() -> None:
"""main function, parses params and runs command functions
@@ -1550,6 +1613,9 @@ def main() -> None:
elif command == "reco-get-assets-shared-externally":
result = get_assets_shared_externally_command(reco_client, demisto.args()["email_address"])
return_results(result)
+ elif command == "reco-get-private-email-list-with-access":
+ result = get_private_email_list_with_access(reco_client)
+ return_results(result)
else:
raise NotImplementedError(f"{command} is not an existing reco command")
except Exception as e:
diff --git a/Packs/Reco/Integrations/Reco/Reco.yml b/Packs/Reco/Integrations/Reco/Reco.yml
index b25c3b1076b8..626e03203792 100644
--- a/Packs/Reco/Integrations/Reco/Reco.yml
+++ b/Packs/Reco/Integrations/Reco/Reco.yml
@@ -193,15 +193,12 @@ script:
- contextPath: Reco.Domains.last_activity
description: The last interaction time with the 3rd party.
type: String
- - contextPath: Reco.Domains.num_files
+ - contextPath: Reco.Domains.files_num
description: The number of files the 3rd party has access to.
type: Number
- - contextPath: Reco.Domains.num_users
+ - contextPath: Reco.Domains.users_with_access_num
description: The number of users the 3rd party has access to.
type: Number
- - contextPath: Reco.Domains.data_category
- description: The data category of the assets the 3rd party has access to.
- type: String
- arguments: []
description: Get all sensitive assets with public link from Reco.
name: reco-get-sensitive-assets-with-public-link
@@ -243,6 +240,9 @@ script:
- contextPath: Reco.Assets.location
description: The location of the file.
type: String
+ - contextPath: Reco.Assets.file_owner
+ description: The owner of the file.
+ type: String
- contextPath: Reco.Assets.users
description: Users the file is shared with.
type: String
@@ -317,10 +317,10 @@ script:
- contextPath: Reco.Assets.data_categories
description: The data categories of the asset.
type: String
- - contextPath: Reco.SensitiveAssets.location
+ - contextPath: Reco.Assets.location
description: The path of the asset.
type: String
- - contextPath: Reco.SensitiveAssets.source
+ - contextPath: Reco.Assets.source
description: SaaS tool source of the asset.
type: String
- contextPath: Reco.Assets.last_access_date
@@ -365,6 +365,22 @@ script:
type: String
description: Get files user has access to from Reco.
name: reco-get-assets-shared-externally
+ - arguments: [ ]
+ description: Get Private emails with access
+ name: reco-get-private-email-list-with-access
+ outputs:
+ - contextPath: Reco.privateEmails.email_account
+ description: Private email account.
+ type: String
+ - contextPath: Reco.privateEmails.primary_email
+ description: Primary email account.
+ type: String
+ - contextPath: Reco.privateEmails.files_num
+ description: Number of files the private email account has access to.
+ type: Number
+ - contextPath: Reco.privateEmails.user_category
+ description: User category.
+ type: String
tests:
- No tests
fromversion: 6.5.0
diff --git a/Packs/Reco/Integrations/Reco/Reco_test.py b/Packs/Reco/Integrations/Reco/Reco_test.py
index 137b55ee5439..243b780efe4e 100644
--- a/Packs/Reco/Integrations/Reco/Reco_test.py
+++ b/Packs/Reco/Integrations/Reco/Reco_test.py
@@ -19,7 +19,8 @@
get_sensitive_assets_by_name,
get_sensitive_assets_by_id, get_link_to_user_overview_page, get_sensitive_assets_shared_with_public_link,
get_3rd_parties_list, get_files_shared_with_3rd_parties, map_reco_alert_score_to_demisto_score,
- get_user_context_by_email_address, get_assets_shared_externally_command, get_files_exposed_to_email_command
+ get_user_context_by_email_address, get_assets_shared_externally_command, get_files_exposed_to_email_command,
+ get_private_email_list_with_access
)
from test_data.structs import (
@@ -31,7 +32,6 @@
GetIncidentTableResponse,
)
-
DUMMY_RECO_API_DNS_NAME = "https://dummy.reco.ai/api"
INCIDET_ID_UUID = "87799f2f-c012-43b6-ace2-78ec984427f3"
ALERT_ID = "ee593dc2-a50e-415e-bed0-8403c18b26ca"
@@ -791,6 +791,18 @@ def test_get_exposed_publicly(requests_mock, reco_client: RecoClient) -> None:
assert actual_result.outputs[0].get("source") is not None
+def test_get_private_email_list_with_access(requests_mock, reco_client: RecoClient) -> None:
+ requests_mock.put(
+ f"{DUMMY_RECO_API_DNS_NAME}/risk-management/get-data-risk-management-table",
+ json={"getTableResponse": {}},
+ status_code=200
+ )
+ actual_result = get_private_email_list_with_access(
+ reco_client=reco_client
+ )
+ assert 0 == len(actual_result.outputs)
+
+
def test_get_assets_shared_externally_command(requests_mock, reco_client: RecoClient) -> None:
raw_result = get_random_assets_user_has_access_to_response()
requests_mock.post(
@@ -801,7 +813,6 @@ def test_get_assets_shared_externally_command(requests_mock, reco_client: RecoCl
email_address="g@example.com"
)
assert len(actual_result.outputs) == len(raw_result.getTableResponse.data.rows)
- assert actual_result.outputs[0].get("source") is not None
def test_get_files_exposed_to_email_command(requests_mock, reco_client: RecoClient) -> None:
diff --git a/Packs/Reco/README.md b/Packs/Reco/README.md
index 7989beeaa0f1..2f25cde3ac82 100644
--- a/Packs/Reco/README.md
+++ b/Packs/Reco/README.md
@@ -36,6 +36,7 @@ The Reco and Palo Alto Networks Cortex XSOAR integration empower organizations t
- **reco-get-3rd-parties-accessible-to-data-list** - Get files shared with 3rd parties
- **reco-get-sensitive-assets-with-public-link** - Get sensitive assets publicly exposed
- **reco-get-user-context-by-email-address** - Get user context by email address
+- **reco-get-private-email-list-with-access** - Get private email list with access
For more information on Reco, please visit www.reco.ai
diff --git a/Packs/Reco/ReleaseNotes/1_4_0.md b/Packs/Reco/ReleaseNotes/1_4_0.md
new file mode 100644
index 000000000000..4d76c8f2e33d
--- /dev/null
+++ b/Packs/Reco/ReleaseNotes/1_4_0.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Reco
+
+- Fix get-3rd-parties-list ep to use the new ep
+- Add new command reco-get-private-email-list-with-access
diff --git a/Packs/Reco/pack_metadata.json b/Packs/Reco/pack_metadata.json
index 5201eb989cbd..f628ab8e93a5 100644
--- a/Packs/Reco/pack_metadata.json
+++ b/Packs/Reco/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Reco",
"description": "Reco is an identity-first SaaS security solution that empowers organizations with full visibility into every app, identity, and their actions to seamlessly prioritize and control risks in the SaaS ecosystem",
"support": "partner",
- "currentVersion": "1.3.0",
+ "currentVersion": "1.4.0",
"author": "Reco",
"url": "https://reco.ai",
"email": "support@reco.ai",
diff --git a/Packs/RecordedFuture/Integrations/RecordedFutureEventCollector/README.md b/Packs/RecordedFuture/Integrations/RecordedFutureEventCollector/README.md
index 41c9c88dc64c..b88671076d14 100644
--- a/Packs/RecordedFuture/Integrations/RecordedFutureEventCollector/README.md
+++ b/Packs/RecordedFuture/Integrations/RecordedFutureEventCollector/README.md
@@ -1,6 +1,8 @@
This integration fetches alerts from Recorded Future.
This integration was integrated and tested with version 2 of the Recorded Future API.
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure Recorded Future Event Collector on Cortex XSOAR
1. Navigate to **Settings** > **Configurations** > **Data Collection** > **Automations & Feed Integrations**.
diff --git a/Packs/RecordedFuture/pack_metadata.json b/Packs/RecordedFuture/pack_metadata.json
index 6186cea246b9..989dc60006a8 100644
--- a/Packs/RecordedFuture/pack_metadata.json
+++ b/Packs/RecordedFuture/pack_metadata.json
@@ -41,5 +41,6 @@
"marketplaces": [
"xsoar",
"marketplacev2"
- ]
+ ],
+ "defaultDataSource": "RecordedFutureEventCollector"
}
\ No newline at end of file
diff --git a/Packs/Redmine/Integrations/Redmine/README.md b/Packs/Redmine/Integrations/Redmine/README.md
index acff48c141a1..664a54af9d46 100644
--- a/Packs/Redmine/Integrations/Redmine/README.md
+++ b/Packs/Redmine/Integrations/Redmine/README.md
@@ -53,7 +53,7 @@ After you successfully execute a command, a DBot message appears in the War Room
| fixed_version_id | The target version ID for this issue. | Optional |
| assigned_to_id | The ID of the user to assign the issue to. | Optional |
| parent_issue_id | The ID of the parent issue. | Optional |
-| custom_fields | The custom field to update. The format is costumFieldID:Value,costumFieldID:Value, etc. | Optional |
+| custom_fields | The custom field to update. - "The format is `{\"customFieldID2\": \"value3\", \"customFieldID1\": [\"value1\",\"value2\"]}`." - Use an array if the field is of multiselect type. - Instruction for each custom field type: - Boolean: use "0" for False and "1" for True. - Date: the format is "yyyy-mm-dd". - Float: A number with two digits after the decimal point. - Integer: A number. - Key/Value list: Use the Key (ID) of the value. - User: Use the user ID. - Version- use the version ID.| Optional |
| watcher_user_ids | An array with watcher user IDs for this issue -> 1,2,3. | Optional |
| is_private | Is the issue private?. Possible values are: True, False. | Optional |
| estimated_hours | The number of hours estimated for this issue. | Optional |
@@ -301,7 +301,7 @@ Update an existing issue. When attaching a file to an issue, include the entry I
| fixed_version_id | The ID of the fixed version for the issue. | Optional |
| assigned_to_id | The ID of the user to whom the issue is assigned. | Optional |
| parent_issue_id | The ID of the parent issue, if applicable. | Optional |
-| custom_fields | The custom field to update. The format is costumFieldID:Value,costumFieldID:Value etc. | Optional |
+| custom_fields | The custom field to update. - "The format is `{\"customFieldID2\": \"value3\", \"customFieldID1\": [\"value1\",\"value2\"]}`." - Use an array if the field is of multiselect type. - Instruction for each custom field type: - Boolean: use "0" for False and "1" for True. - Date: the format is "yyyy-mm-dd". - Float: A number with two digits after the decimal point. - Integer: A number. - Key/Value list: Use the Key (ID) of the value. - User: Use the user ID. - Version- use the version ID.| Optional |
| watcher_user_ids | A comma-separated list of watcher IDs. -> 1,2,3. | Optional |
| is_private | Is the issue private?. Possible values are: True, False. | Optional |
| estimated_hours | The estimated number of hours to complete the issue. | Optional |
diff --git a/Packs/Redmine/Integrations/Redmine/Redmine.py b/Packs/Redmine/Integrations/Redmine/Redmine.py
index 397fd58552d2..4d363947b1e6 100644
--- a/Packs/Redmine/Integrations/Redmine/Redmine.py
+++ b/Packs/Redmine/Integrations/Redmine/Redmine.py
@@ -165,15 +165,18 @@ def adjust_paging_to_request(page_number, page_size, limit):
def format_custom_field_to_request(args: Dict[str, Any]):
- if custom_fields := args.pop('custom_fields', None):
- custom_fields = argToList(custom_fields)
+ if custom_fields := args.pop('custom_fields', ''):
try:
- args['custom_fields'] = [{'id': field.split(":", 1)[0], 'value': field.split(":", 1)[1]}
- for field in custom_fields if field]
+ custom_fields_for_request = []
+ custom_fields_dict = json.loads(custom_fields)
+ for key, value in custom_fields_dict.items():
+ custom_fields_for_request.append({"id": key, "value": value})
+ args['custom_fields'] = custom_fields_for_request
except Exception as e:
- if 'list index out of range' in e.args[0] or 'substring not found' in e.args[0]:
- raise DemistoException("Custom fields not in format, please follow the instructions")
- raise
+ raise DemistoException(
+ f"Custom fields not in format, please follow this format:"
+ f" `{{\"customFieldID2\": \"value3\", \"customFieldID1\": [\"value1\",\"value2\"]}}` - Please use an array if "
+ f"the field is of multiselect type. with error: {e}")
def get_file_content(entry_id: str) -> bytes:
diff --git a/Packs/Redmine/Integrations/Redmine/Redmine.yml b/Packs/Redmine/Integrations/Redmine/Redmine.yml
index 79bbe5babaa2..e15e95002475 100644
--- a/Packs/Redmine/Integrations/Redmine/Redmine.yml
+++ b/Packs/Redmine/Integrations/Redmine/Redmine.yml
@@ -66,7 +66,18 @@ script:
- name: parent_issue_id
description: The ID of the parent issue.
- name: custom_fields
- description: The custom field to update. The format is costumFieldID:Value,costumFieldID:Value, etc.
+ description: |-
+ The custom field to update.
+ - "The format is `{\"customFieldID2\": \"value3\, \"customFieldID1\": [\"value1\",\"value2\"]"}`."
+ - Please use an array if the field is of multiselect type.
+ - Instruction for each custom field type:
+ - Boolean: use "0" for False and "1" for True.
+ - Date: the format is "yyyy-mm-dd".
+ - Float: A number with two digits after the decimal point.
+ - Integer: A number.
+ - Key/Value list: Use the Key (ID) of the value.
+ - User: Use the user ID.
+ - Version- use the version ID.
isArray: true
- name: watcher_user_ids
description: An array with watcher user IDs for this issue -> 1,2,3.
@@ -202,7 +213,18 @@ script:
- name: parent_issue_id
description: The ID of the parent issue, if applicable.
- name: custom_fields
- description: The custom field to update. The format is costumFieldID:Value,costumFieldID:Value etc.
+ description: |-
+ The custom field to update.
+ - "The format is `{\"customFieldID2\": \"value3\, \"customFieldID1\": [\"value1\",\"value2\"]"}`."
+ - Please use an array if the field is of multiselect type.
+ - Instruction for each custom field type:
+ - Boolean: use "0" for False and "1" for True.
+ - Date: the format is "yyyy-mm-dd".
+ - Float: A number with two digits after the decimal point.
+ - Integer: A number.
+ - Key/Value list: Use the Key (ID) of the value.
+ - User: Use the user ID.
+ - Version- use the version ID.
isArray: true
- name: watcher_user_ids
description: A comma-separated list of watcher IDs. -> 1,2,3.
diff --git a/Packs/Redmine/Integrations/Redmine/Redmine_test.py b/Packs/Redmine/Integrations/Redmine/Redmine_test.py
index 58d4ab74c736..8b4b707c54da 100644
--- a/Packs/Redmine/Integrations/Redmine/Redmine_test.py
+++ b/Packs/Redmine/Integrations/Redmine/Redmine_test.py
@@ -13,7 +13,7 @@ def redmine_client(url: str = 'url', verify_certificate: bool = True, proxy: boo
def test_create_issue_command(mocker, redmine_client):
"""
Given:
- - All relevant arguments for the command that is executed without list id
+ - All relevant arguments for the command that is executed
When:
- redmine-issue-create command is executed
Then:
@@ -23,7 +23,7 @@ def test_create_issue_command(mocker, redmine_client):
http_request = mocker.patch.object(redmine_client, '_http_request')
http_request.return_value = {"issue": {"id": "1"}}
args = {'project_id': '1', 'issue_id': '1', 'subject': 'changeFromCode', 'priority_id': '1', 'tracker_id': '1',
- 'watcher_user_ids': '[1]', 'custom_fields': '1:https://test:appear'}
+ 'watcher_user_ids': '[1]', 'custom_fields': '{"1":"https://test:appear"}'}
create_issue_command(redmine_client, args=args)
http_request.assert_called_with('POST', '/issues.json',
params={},
@@ -40,6 +40,34 @@ def test_create_issue_command(mocker, redmine_client):
headers={'Content-Type': 'application/json', 'X-Redmine-API-Key': True})
+def test_create_issue_command_with_multiselect_cf(mocker, redmine_client):
+ """
+ Given:
+ - All relevant arguments for the command that is executed (with multiselect custom field)
+ When:
+ - redmine-issue-create command is executed
+ Then:
+ - The http request is called with the right arguments
+ """
+ from Redmine import create_issue_command
+ http_request = mocker.patch.object(redmine_client, '_http_request')
+ http_request.return_value = {"issue": {"id": "1"}}
+ args = {'project_id': '1', 'issue_id': '1', 'subject': 'changeFromCode', 'priority_id': '1', 'tracker_id': '1',
+ 'watcher_user_ids': '[1]', 'custom_fields': '{"4":["a","b","c"],"1":"hello [], my name is paloalto, paloalto"}'}
+ create_issue_command(redmine_client, args=args)
+ http_request.assert_called_with('POST', '/issues.json', params={}, json_data={'issue':
+ {'issue_id': '1', 'subject': 'changeFromCode',
+ 'priority_id': '1', 'tracker_id': '1',
+ 'custom_fields': [
+ {'id': '4', 'value': ['a', 'b', 'c']},
+ {'id': '1',
+ 'value': ('hello [], my name is paloalto,'
+ ' paloalto')}
+ ],
+ 'project_id': '1', 'watcher_user_ids': [1]}},
+ headers={'Content-Type': 'application/json', 'X-Redmine-API-Key': True})
+
+
def test_create_issue_command_not_url_cf(mocker, redmine_client):
"""
Given:
@@ -53,7 +81,7 @@ def test_create_issue_command_not_url_cf(mocker, redmine_client):
http_request = mocker.patch.object(redmine_client, '_http_request')
http_request.return_value = {"issue": {"id": "1"}}
args = {'project_id': '1', 'issue_id': '1', 'subject': 'changeFromCode', 'tracker_id': '2', 'priority_id': '2',
- 'watcher_user_ids': '[1]', 'custom_fields': '1:hello'}
+ 'watcher_user_ids': '[1]', 'custom_fields': '{"1":"hello"}'}
create_issue_command(redmine_client, args=args)
http_request.assert_called_with('POST', '/issues.json',
params={},
@@ -82,7 +110,7 @@ def test_create_issue_command_response(mocker, redmine_client):
'issue_id': '1',
'subject': 'testResponse',
'tracker_id': '1',
- 'custom_fields': '1:https://test:appear,,,,'
+ 'custom_fields': '{"1":"https://test:appear,,,,"}'
}
create_issue_request_mock = mocker.patch.object(redmine_client, 'create_issue_request')
create_issue_request_mock.return_value = {'issue': {'id': '789', 'project': {'name': 'testing', 'id': '1'},
@@ -96,6 +124,38 @@ def test_create_issue_command_response(mocker, redmine_client):
'https://test:appear |\n')
+def test_create_issue_command_with_multiselect_cf_response(mocker, redmine_client):
+ """
+ Given:
+ - All relevant arguments for the command that is executed without list id (multiselect custom field)
+ When:
+ - redmine-issue-create command is executed
+ Then:
+ - The http request return with the expected response
+ """
+ from Redmine import create_issue_command
+ args = {
+ 'project_id': '1',
+ 'issue_id': '1',
+ 'subject': 'testResponse',
+ 'tracker_id': '1',
+ 'custom_fields': '{"4":["a","b","c"],"1":"hello [], my name is paloalto, paloalto"}'
+ }
+ create_issue_request_mock = mocker.patch.object(redmine_client, 'create_issue_request')
+ create_issue_request_mock.return_value = {'issue': {'id': '789', 'project': {'name': 'testing', 'id': '1'},
+ 'subject': 'testResponse', 'tracker': {'name': 'bug', 'id': '1'},
+ 'custom_fields': [{'name': 'test', 'value': ["a", "b", "c"]},
+ {'name': 'test2', 'value': ("hello [], my name is "
+ "paloalto, paloalto")}]
+ }
+ }
+ result = create_issue_command(redmine_client, args)
+ assert result.readable_output == ('### The issue you created:\n|Id|Project|Tracker|Subject|Custom Fields|\n|---|---|---|---'
+ '|---|\n| 789 | testing | bug | testResponse | **-**\t***name***: test \t**value**: '
+ '\t\t***values***: a, b, c **-**\t***name***: test2 \t***value***: hello [], my name'
+ ' is paloalto, paloalto |\n')
+
+
def test_create_issue_command_invalid_custom_fields(redmine_client):
"""
Given:
@@ -107,11 +167,13 @@ def test_create_issue_command_invalid_custom_fields(redmine_client):
"""
from Redmine import create_issue_command
from CommonServerPython import DemistoException
- args = {'project_id': '1', 'custom_fields': '1:https://test:appear,111', 'issue_id': '1', 'subject': 'testSub',
+ args = {'project_id': '1', 'custom_fields': '{"1https://test:appear,111"}', 'issue_id': '1', 'subject': 'testSub',
'tracker': 'bug', 'watcher_user_ids': '[1]', 'priority': 'high'}
with pytest.raises(DemistoException) as e:
create_issue_command(redmine_client, args)
- assert e.value.message == "Custom fields not in format, please follow the instructions"
+ assert e.value.message == ('Custom fields not in format, please follow this format: `{"customFieldID2": "value3", '
+ '"customFieldID1": ["value1","value2"]}` - Please use an array if the field is of multiselect type'
+ '. with error: Expecting \':\' delimiter: line 1 column 28 (char 27)')
def test_create_issue_command_no_token_created_for_file(mocker, redmine_client):
@@ -247,6 +309,33 @@ def test_update_issue_command(mocker, redmine_client):
empty_valid_codes=[204], return_empty_response=True)
+def test_update_issue_command_with_multiselect_cf(mocker, redmine_client):
+ """
+ Given:
+ - All relevant arguments for the command that is executed (with multiselect custom field)
+ When:
+ - redmine-issue-update command is executed
+ Then:
+ - The http request is called with the right arguments
+ """
+ from Redmine import update_issue_command
+ http_request = mocker.patch.object(redmine_client, '_http_request')
+ args = {'issue_id': '1', 'subject': 'changeFromCode', 'tracker_id': '1', 'status_id': '1', 'priority_id': '1',
+ 'watcher_user_ids': '[1]', 'custom_fields': '{"4":["a","b","c"],"1":"hello [], my name is paloalto, paloalto"}'
+ }
+ update_issue_command(redmine_client, args=args)
+ http_request.assert_called_with('PUT', '/issues/1.json', json_data={'issue': {'subject': 'changeFromCode', 'tracker_id': '1',
+ 'status_id': '1', 'priority_id': '1',
+ 'custom_fields': [{'id': '4', 'value':
+ ['a', 'b', 'c']},
+ {'id': '1', 'value':
+ ('hello [], my name is '
+ 'paloalto, paloalto')}],
+ 'watcher_user_ids': [1]}},
+ headers={'Content-Type': 'application/json', 'X-Redmine-API-Key': True},
+ empty_valid_codes=[204], return_empty_response=True)
+
+
def test_update_issue_command_response(mocker, redmine_client):
"""
Given:
@@ -260,7 +349,7 @@ def test_update_issue_command_response(mocker, redmine_client):
update_issue_request_mock = mocker.patch.object(redmine_client, 'update_issue_request')
args = {'issue_id': '1', 'subject': 'changefortest', 'tracker_id': '1',
'status_id': '1', 'priority_id': '1', 'watcher_user_ids': '[1]',
- 'custom_fields': '1:https://test:appear'}
+ 'custom_fields': '{"4":["a","b","c"],"1":"hello [], my name is paloalto, paloalto"}'}
update_issue_request_mock.return_value = {}
result = update_issue_command(redmine_client, args=args)
assert result.readable_output == 'Issue with id 1 was successfully updated.'
@@ -281,7 +370,9 @@ def test_update_issue_command_invalid_custom_fields(redmine_client):
'status_id': '1', 'priority_id': 'high'}
with pytest.raises(DemistoException) as e:
update_issue_command(redmine_client, args)
- assert e.value.message == "Custom fields not in format, please follow the instructions"
+ assert e.value.message == ('Custom fields not in format, please follow this format: `{"customFieldID2": "value3", '
+ '"customFieldID1": ["value1","value2"]}` - Please use an array if the field is of multiselect type'
+ '. with error: Expecting value: line 1 column 1 (char 0)')
def test_update_issue_command_no_token_created_for_file(mocker, redmine_client):
diff --git a/Packs/Redmine/ReleaseNotes/1_0_4.md b/Packs/Redmine/ReleaseNotes/1_0_4.md
new file mode 100644
index 000000000000..7f27caa062a6
--- /dev/null
+++ b/Packs/Redmine/ReleaseNotes/1_0_4.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### Redmine
+
+Fixed an issue where the *custom_fields* argument in the ***redmine-issue-create*** and ***redmine-issue-update*** commands didn't handle multiselect fields.
+
diff --git a/Packs/Redmine/pack_metadata.json b/Packs/Redmine/pack_metadata.json
index 69071053edbc..b70fc7272e92 100644
--- a/Packs/Redmine/pack_metadata.json
+++ b/Packs/Redmine/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Redmine",
"description": "A project management and issue tracking system.",
"support": "xsoar",
- "currentVersion": "1.0.3",
+ "currentVersion": "1.0.4",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
diff --git a/Packs/ReversingLabs_Titanium_Cloud/ReleaseNotes/2_5_1.json b/Packs/ReversingLabs_Titanium_Cloud/ReleaseNotes/2_5_1.json
new file mode 100644
index 000000000000..ba72d3e7f2f1
--- /dev/null
+++ b/Packs/ReversingLabs_Titanium_Cloud/ReleaseNotes/2_5_1.json
@@ -0,0 +1,4 @@
+{
+ "breakingChanges": true,
+ "breakingChangesNotes": "Due to a fixed issue in `Base` pack (Version `1.33.52`) ReversingLabs TitaniumCloud v2 will now correctly input email addresses into context under `Account.Email` and not under `Email` as it did up until now."
+}
\ No newline at end of file
diff --git a/Packs/ReversingLabs_Titanium_Cloud/ReleaseNotes/2_5_1.md b/Packs/ReversingLabs_Titanium_Cloud/ReleaseNotes/2_5_1.md
new file mode 100644
index 000000000000..9f81bb860543
--- /dev/null
+++ b/Packs/ReversingLabs_Titanium_Cloud/ReleaseNotes/2_5_1.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### ReversingLabs TitaniumCloud v2
+
+Fixed an issue in `Base` pack (Version `1.33.52`) so now ReversingLabs TitaniumCloud v2 will correctly input email addresses into context under `Account.Email` and not under `Email`.
diff --git a/Packs/ReversingLabs_Titanium_Cloud/pack_metadata.json b/Packs/ReversingLabs_Titanium_Cloud/pack_metadata.json
index bd387523c406..a8679dda8228 100644
--- a/Packs/ReversingLabs_Titanium_Cloud/pack_metadata.json
+++ b/Packs/ReversingLabs_Titanium_Cloud/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "ReversingLabs TitaniumCloud",
"description": "ReversingLabs TitaniumCloud provides file reputation services, threat classification and rich context on over 10 billion files.",
"support": "partner",
- "currentVersion": "2.5.0",
+ "currentVersion": "2.5.1",
"author": "ReversingLabs",
"url": "https://www.reversinglabs.com/products/malware-analysis-platform",
"email": "support@reversinglabs.com",
diff --git a/Packs/RubrikPolaris/ReleaseNotes/1_3_3.md b/Packs/RubrikPolaris/ReleaseNotes/1_3_3.md
new file mode 100644
index 000000000000..a1a4a923051e
--- /dev/null
+++ b/Packs/RubrikPolaris/ReleaseNotes/1_3_3.md
@@ -0,0 +1,35 @@
+
+#### Scripts
+
+##### RubrikSonarSensitiveHits
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
+##### RubrikCDMClusterConnectionState
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
+##### RubrikRadarFilesDeleted
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
+##### RubrikRadarFilesModified
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
+##### RubrikSonarTotalHits
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
+##### RubrikSonarOpenAccessFiles
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
+##### RubrikRadarFilesAdded
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
+##### DateToTimeStamp
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/RubrikPolaris/Scripts/DateToTimeStamp/DateToTimeStamp.yml b/Packs/RubrikPolaris/Scripts/DateToTimeStamp/DateToTimeStamp.yml
index 79473626f132..69014de9103a 100644
--- a/Packs/RubrikPolaris/Scripts/DateToTimeStamp/DateToTimeStamp.yml
+++ b/Packs/RubrikPolaris/Scripts/DateToTimeStamp/DateToTimeStamp.yml
@@ -12,10 +12,10 @@ enabled: true
args:
- name: value
required: true
- description: Date to convert
+ description: Date to convert.
scripttarget: 0
subtype: python3
-dockerimage: demisto/python3:3.10.11.61265
+dockerimage: demisto/python3:3.10.14.95956
runas: DBotWeakRole
fromversion: 6.0.0
tests:
diff --git a/Packs/RubrikPolaris/Scripts/RubrikCDMClusterConnectionState/RubrikCDMClusterConnectionState.yml b/Packs/RubrikPolaris/Scripts/RubrikCDMClusterConnectionState/RubrikCDMClusterConnectionState.yml
index ad73bcd8105c..880ebb94784b 100644
--- a/Packs/RubrikPolaris/Scripts/RubrikCDMClusterConnectionState/RubrikCDMClusterConnectionState.yml
+++ b/Packs/RubrikPolaris/Scripts/RubrikCDMClusterConnectionState/RubrikCDMClusterConnectionState.yml
@@ -2,7 +2,7 @@ comment: Shows the Rubrik Radar amount of Files Added.
commonfields:
id: RubrikCDMClusterConnectionState
version: -1
-dockerimage: demisto/python3:3.10.11.61265
+dockerimage: demisto/python3:3.10.14.95956
enabled: true
name: RubrikCDMClusterConnectionState
runas: DBotWeakRole
diff --git a/Packs/RubrikPolaris/Scripts/RubrikRadarFilesAdded/RubrikRadarFilesAdded.yml b/Packs/RubrikPolaris/Scripts/RubrikRadarFilesAdded/RubrikRadarFilesAdded.yml
index 890ae9eaad89..458e14a136fd 100644
--- a/Packs/RubrikPolaris/Scripts/RubrikRadarFilesAdded/RubrikRadarFilesAdded.yml
+++ b/Packs/RubrikPolaris/Scripts/RubrikRadarFilesAdded/RubrikRadarFilesAdded.yml
@@ -2,7 +2,7 @@ comment: Shows the Rubrik Radar amount of Files Added.
commonfields:
id: RubrikRadarFilesAdded
version: -1
-dockerimage: demisto/python3:3.10.11.61265
+dockerimage: demisto/python3:3.10.14.95956
enabled: true
name: RubrikRadarFilesAdded
runas: DBotWeakRole
diff --git a/Packs/RubrikPolaris/Scripts/RubrikRadarFilesDeleted/RubrikRadarFilesDeleted.yml b/Packs/RubrikPolaris/Scripts/RubrikRadarFilesDeleted/RubrikRadarFilesDeleted.yml
index b448f3866b45..793c7cbdf7ee 100644
--- a/Packs/RubrikPolaris/Scripts/RubrikRadarFilesDeleted/RubrikRadarFilesDeleted.yml
+++ b/Packs/RubrikPolaris/Scripts/RubrikRadarFilesDeleted/RubrikRadarFilesDeleted.yml
@@ -2,7 +2,7 @@ comment: Shows the Rubrik Radar amount of Files Deleted.
commonfields:
id: RubrikRadarFilesDeleted
version: -1
-dockerimage: demisto/python3:3.10.11.61265
+dockerimage: demisto/python3:3.10.14.95956
enabled: true
name: RubrikRadarFilesDeleted
runas: DBotWeakRole
diff --git a/Packs/RubrikPolaris/Scripts/RubrikRadarFilesModified/RubrikRadarFilesModified.yml b/Packs/RubrikPolaris/Scripts/RubrikRadarFilesModified/RubrikRadarFilesModified.yml
index 7acc2816bbce..09998966112d 100644
--- a/Packs/RubrikPolaris/Scripts/RubrikRadarFilesModified/RubrikRadarFilesModified.yml
+++ b/Packs/RubrikPolaris/Scripts/RubrikRadarFilesModified/RubrikRadarFilesModified.yml
@@ -2,7 +2,7 @@ comment: Shows the Rubrik Radar amount of Files Modified.
commonfields:
id: RubrikRadarFilesModified
version: -1
-dockerimage: demisto/python3:3.10.11.61265
+dockerimage: demisto/python3:3.10.14.95956
enabled: true
name: RubrikRadarFilesModified
runas: DBotWeakRole
diff --git a/Packs/RubrikPolaris/Scripts/RubrikSonarOpenAccessFiles/RubrikSonarOpenAccessFiles.yml b/Packs/RubrikPolaris/Scripts/RubrikSonarOpenAccessFiles/RubrikSonarOpenAccessFiles.yml
index ae0eee8ba2ee..5d289478e6eb 100644
--- a/Packs/RubrikPolaris/Scripts/RubrikSonarOpenAccessFiles/RubrikSonarOpenAccessFiles.yml
+++ b/Packs/RubrikPolaris/Scripts/RubrikSonarOpenAccessFiles/RubrikSonarOpenAccessFiles.yml
@@ -2,7 +2,7 @@ comment: Shows the Rubrik Polaris Sonar Open Access Files Count.
commonfields:
id: RubrikSonarOpenAccessFiles
version: -1
-dockerimage: demisto/python3:3.10.11.61265
+dockerimage: demisto/python3:3.10.14.95956
enabled: true
name: RubrikSonarOpenAccessFiles
runas: DBotWeakRole
diff --git a/Packs/RubrikPolaris/Scripts/RubrikSonarSensitiveHits/RubrikSonarSensitiveHits.yml b/Packs/RubrikPolaris/Scripts/RubrikSonarSensitiveHits/RubrikSonarSensitiveHits.yml
index e3b15426fb06..bd5a0d757e62 100644
--- a/Packs/RubrikPolaris/Scripts/RubrikSonarSensitiveHits/RubrikSonarSensitiveHits.yml
+++ b/Packs/RubrikPolaris/Scripts/RubrikSonarSensitiveHits/RubrikSonarSensitiveHits.yml
@@ -2,7 +2,7 @@ comment: Shows the Rubrik Polaris Sonar data classification results.
commonfields:
id: RubrikSonarSensitiveHits
version: -1
-dockerimage: demisto/python3:3.10.11.61265
+dockerimage: demisto/python3:3.10.14.95956
name: RubrikSonarSensitiveHits
runas: DBotWeakRole
script: ''
diff --git a/Packs/RubrikPolaris/Scripts/RubrikSonarTotalHits/RubrikSonarTotalHits.yml b/Packs/RubrikPolaris/Scripts/RubrikSonarTotalHits/RubrikSonarTotalHits.yml
index 34814ed3a786..fceb58103404 100644
--- a/Packs/RubrikPolaris/Scripts/RubrikSonarTotalHits/RubrikSonarTotalHits.yml
+++ b/Packs/RubrikPolaris/Scripts/RubrikSonarTotalHits/RubrikSonarTotalHits.yml
@@ -2,7 +2,7 @@ comment: Shows the Rubrik Polaris Sonar Total Hits.
commonfields:
id: RubrikSonarTotalHits
version: -1
-dockerimage: demisto/python3:3.10.11.61265
+dockerimage: demisto/python3:3.10.14.95956
enabled: true
name: RubrikSonarTotalHits
runas: DBotWeakRole
diff --git a/Packs/RubrikPolaris/pack_metadata.json b/Packs/RubrikPolaris/pack_metadata.json
index 830ea6cf704f..0cd21f4104e0 100644
--- a/Packs/RubrikPolaris/pack_metadata.json
+++ b/Packs/RubrikPolaris/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Rubrik Security Cloud",
"description": "Rubrik Security Cloud revolutionizes the data management market, specifically backup/recovery, archival, and DR, by providing a global policy framework, workflow orchestration, and deep data intelligence as cloud-delivered applications. The content pack fetches Rubrik Anomaly Event and provides playbooks to analyze, discover and recover -- to mention a few -- organizational data. The content pack is rich with commands to perform on-demand scans, backups, recoveries and many more features exposed by the RSC API.",
"support": "partner",
- "currentVersion": "1.3.2",
+ "currentVersion": "1.3.3",
"author": "Rubrik",
"url": "https://www.rubrik.com/support/",
"email": "support@rubrik.com",
diff --git a/Packs/SCADAfence_CNM/Integrations/SCADAfenceCNM/SCADAfenceCNM.py b/Packs/SCADAfence_CNM/Integrations/SCADAfenceCNM/SCADAfenceCNM.py
index c0be918c4749..6b734a29d622 100644
--- a/Packs/SCADAfence_CNM/Integrations/SCADAfenceCNM/SCADAfenceCNM.py
+++ b/Packs/SCADAfence_CNM/Integrations/SCADAfenceCNM/SCADAfenceCNM.py
@@ -194,6 +194,7 @@ def get_assets(asset_data):
api_suffix = "/assets"
return call_api('GET', api_suffix, asset_data)
return_error("Invalid call for assets data (missing parameters)")
+ return None
def get_asset_map(asset_details):
@@ -206,6 +207,7 @@ def get_asset_map(asset_details):
api_suffix = "/asset/map"
return call_api('GET', api_suffix, asset_details)
return_error("Invalid call for asset map (missing parameters)")
+ return None
def get_assets_map():
@@ -228,6 +230,7 @@ def get_asset_traffic(asset_details):
api_suffix = "/asset/traffic"
return call_api('GET', api_suffix, asset_details)
return_error("Invalid call for asset traffic (missing parameters)")
+ return None
def dest_endpoint(ep):
diff --git a/Packs/SCADAfence_CNM/Integrations/SCADAfenceCNM/SCADAfenceCNM.yml b/Packs/SCADAfence_CNM/Integrations/SCADAfenceCNM/SCADAfenceCNM.yml
index 6c7a3c5f99bc..98006e4ff70a 100644
--- a/Packs/SCADAfence_CNM/Integrations/SCADAfenceCNM/SCADAfenceCNM.yml
+++ b/Packs/SCADAfence_CNM/Integrations/SCADAfenceCNM/SCADAfenceCNM.yml
@@ -4,7 +4,7 @@ commonfields:
name: SCADAfence CNM
display: SCADAfence CNM
category: Network Security
-description: fetching data from CNM
+description: fetching data from CNM.
configuration:
- display: API auth secret
name: APISecret
@@ -57,32 +57,32 @@ script:
- Threat
- Critical
- Severe
- description: Required severity level of alert
+ description: Required severity level of alert.
- name: ipAddress
- description: get alerts for specified IP
+ description: get alerts for specified IP.
outputs:
- contextPath: SCADAfence.Alert.id
- description: alert ID
+ description: alert ID.
type: string
- contextPath: SCADAfence.Alert.ip
- description: asset IP
+ description: asset IP.
type: string
- contextPath: SCADAfence.Alert.severity
- description: alert severity level
+ description: alert severity level.
type: string
- contextPath: SCADAfence.Alert.type
- description: short alert description
+ description: short alert description.
type: string
- contextPath: SCADAfence.Alert.details
- description: extended alert description
+ description: extended alert description.
type: string
- description: query alerts data from SCADAfence CNM
+ description: query alerts data from SCADAfence CNM.
- name: scadafence-getAsset
arguments:
- name: ipAddress
- description: asset IP address
+ description: asset IP address.
- name: hostName
- description: Hostname
+ description: Hostname.
- name: assetType
auto: PREDEFINED
predefined:
@@ -99,93 +99,93 @@ script:
- terminal
- ftp
- Telnet
- description: type of the asset (one from list of options)
+ description: type of the asset (one from list of options).
outputs:
- contextPath: SCADAfence.Asset.ip
- description: IP address of the asset
+ description: IP address of the asset.
type: string
- contextPath: SCADAfence.Asset.assetTypes
- description: types of the asset (if detected)
+ description: types of the asset (if detected).
type: string
- contextPath: operatingSystem
- description: OS of the asset (if available)
+ description: OS of the asset (if available).
type: string
- contextPath: vendor
- description: asset vendor
+ description: asset vendor.
type: string
- description: fetch asset data from SCADAfence CNM
+ description: fetch asset data from SCADAfence CNM.
- name: scadafence-setAlertStatus
arguments:
- name: alertId
required: true
- description: Alert ID
+ description: Alert ID.
- name: alertStatus
required: true
auto: PREDEFINED
predefined:
- InProgress
- Resolved
- description: Alert status
+ description: Alert status.
outputs:
- contextPath: SCADAfence.Alert.status
- description: new status for the alert
+ description: new status for the alert.
type: string
- description: setting alert status
+ description: setting alert status.
- name: scadafence-getAssetConnections
arguments:
- name: ipAddress
- description: optional - ip address of the asset
+ description: optional - ip address of the asset.
- name: hostName
- description: hostname that corresponds to the asset of interest
+ description: hostname that corresponds to the asset of interest.
- name: macAddress
- description: MAC address of the asset
+ description: MAC address of the asset.
outputs:
- contextPath: SCADAfence.Asset.Conn.ip
- description: another endpoint's IP address
+ description: another endpoint's IP address.
type: string
- contextPath: SCADAfence.Asset.Conn.port
- description: another endpoint's port
+ description: another endpoint's port.
type: number
- contextPath: SCADAfence.Asset.Conn.proto
- description: L4 protocol used for the connection
+ description: L4 protocol used for the connection.
type: string
- contextPath: SCADAfence.Asset.Conn.traffic
- description: total bytes sent (both directions)
+ description: total bytes sent (both directions).
type: number
- contextPath: SCADAfence.Asset.Conn.hostname
- description: another endpoint's hostname
+ description: another endpoint's hostname.
type: string
- contextPath: SCADAfence.Asset.Conn.mac
- description: another endpoint's MAC address
+ description: another endpoint's MAC address.
type: string
- description: fetches asset connections data by one or more (combined) parameters
+ description: fetches asset connections data by one or more (combined) parameters.
- name: scadafence-getAssetTraffic
arguments:
- name: ipAddress
- description: optional - ip address of the asset
+ description: optional - ip address of the asset.
- name: macAddress
- description: optional - MAC address of the asset
+ description: optional - MAC address of the asset.
- name: hostName
- description: optional - hostname of the asset
+ description: optional - hostname of the asset.
outputs:
- contextPath: SCADAfence.AssetTraffic.TCP_tx_bytes
- description: bytes sent by the asset via TCP
+ description: bytes sent by the asset via TCP.
type: number
- contextPath: SCADAfence.AssetTraffic.TCP_rx_bytes
- description: bytes received by the asset via TCP
+ description: bytes received by the asset via TCP.
type: number
- contextPath: SCADAfence.AssetTraffic.UDP_tx_bytes
- description: bytes sent by the asset via UDP
+ description: bytes sent by the asset via UDP.
type: number
- contextPath: SCADAfence.AssetTraffic.UDP_rx_bytes
- description: bytes received by the asset via UDP
+ description: bytes received by the asset via UDP.
type: number
- description: fetch asset network activity data by one or more (combined) parameters
+ description: fetch asset network activity data by one or more (combined) parameters.
- name: scadafence-createAlert
arguments:
- name: ipAddress
required: true
- description: IP address of the asset that alert's related to
+ description: IP address of the asset that alert's related to.
- name: severity
required: true
auto: PREDEFINED
@@ -195,13 +195,13 @@ script:
- Threat
- Severe
- Critical
- description: desired alert severity level
+ description: desired alert severity level.
defaultValue: Information
- name: description
required: true
- description: human readable alert description
+ description: human readable alert description.
- name: remediationText
- description: instructions on issue remediation
+ description: instructions on issue remediation.
defaultValue: not provided
- name: alertIsActive
required: true
@@ -209,53 +209,53 @@ script:
predefined:
- "True"
- "False"
- description: set active=True to make the alert appear on SCADAfence UI
+ description: set active=True to make the alert appear on SCADAfence UI.
defaultValue: "True"
outputs:
- contextPath: SCADAfence.Alert.alertCreated
- description: flag defining alert creation status
+ description: flag defining alert creation status.
type: boolean
- contextPath: SCADAfence.Alert.id
- description: unique ID set to a new alert
+ description: unique ID set to a new alert.
type: string
- description: create alert in SCADAfence CNM
+ description: create alert in SCADAfence CNM.
- name: scadafence-getAllConnections
arguments: []
outputs:
- contextPath: SCADAfence.Connection.src_ip
- description: IP address of endpoint A
+ description: IP address of endpoint A.
type: string
- contextPath: SCADAfence.Connection.dest_ip
- description: IP address of endpoint B
+ description: IP address of endpoint B.
type: string
- contextPath: SCADAfence.Connection.src_port
- description: port of endpoint A
+ description: port of endpoint A.
type: number
- contextPath: SCADAfence.Connection.dest_port
- description: port of endpoint B
+ description: port of endpoint B.
type: number
- contextPath: SCADAfence.Connection.src_mac
- description: endpoint A MAC address
+ description: endpoint A MAC address.
type: string
- contextPath: SCADAfence.Connection.dest_mac
- description: endpoint B MAC address
+ description: endpoint B MAC address.
type: string
- contextPath: SCADAfence.Connection.src_cname
- description: endpoint A hostname
+ description: endpoint A hostname.
type: string
- contextPath: SCADAfence.Connection.dest_cname
- description: endpoint B hostname
+ description: endpoint B hostname.
type: string
- contextPath: SCADAfence.Connection.proto
- description: L4 protocol
+ description: L4 protocol.
type: string
- contextPath: SCADAfence.Connection.traffic
- description: total number of bytes sent in both directions
+ description: total number of bytes sent in both directions.
type: number
- description: Fetches all connections from the CNM
+ description: Fetches all connections from the CNM.
isfetch: true
runonce: false
- dockerimage: demisto/python3:3.10.12.63474
+ dockerimage: demisto/python3:3.10.14.95956
tests:
- SCADAfence_test
fromversion: 5.0.0
diff --git a/Packs/SCADAfence_CNM/ReleaseNotes/1_0_10.md b/Packs/SCADAfence_CNM/ReleaseNotes/1_0_10.md
new file mode 100644
index 000000000000..512850d06f26
--- /dev/null
+++ b/Packs/SCADAfence_CNM/ReleaseNotes/1_0_10.md
@@ -0,0 +1,7 @@
+
+#### Integrations
+
+##### SCADAfence CNM
+- Updated the Docker image to: *demisto/python3:3.10.14.95956*.
+
+
diff --git a/Packs/SCADAfence_CNM/pack_metadata.json b/Packs/SCADAfence_CNM/pack_metadata.json
index d6c5eca0099d..1348be23262f 100644
--- a/Packs/SCADAfence_CNM/pack_metadata.json
+++ b/Packs/SCADAfence_CNM/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "SCADAfence CNM",
"description": "Fetching data from CNM",
"support": "partner",
- "currentVersion": "1.0.9",
+ "currentVersion": "1.0.10",
"author": "SCADAfence CNM",
"url": "www.scadafence.com",
"email": "support@scadafence.com",
diff --git a/Packs/SEKOIAIntelligenceCenter/ReleaseNotes/1_2_32.json b/Packs/SEKOIAIntelligenceCenter/ReleaseNotes/1_2_32.json
new file mode 100644
index 000000000000..e8e51c2a4afd
--- /dev/null
+++ b/Packs/SEKOIAIntelligenceCenter/ReleaseNotes/1_2_32.json
@@ -0,0 +1,4 @@
+{
+ "breakingChanges": true,
+ "breakingChangesNotes": "Due to a fixed issue in `Base` pack (Version `1.33.52`) SEKOIAIntelligenceCenter will now correctly input email addresses into context under `Account.Email` and not under `Email` as it did up until now."
+}
\ No newline at end of file
diff --git a/Packs/SEKOIAIntelligenceCenter/ReleaseNotes/1_2_32.md b/Packs/SEKOIAIntelligenceCenter/ReleaseNotes/1_2_32.md
new file mode 100644
index 000000000000..66ddc982c365
--- /dev/null
+++ b/Packs/SEKOIAIntelligenceCenter/ReleaseNotes/1_2_32.md
@@ -0,0 +1,6 @@
+
+#### Integrations
+
+##### SEKOIAIntelligenceCenter
+
+Fixed an issue in `Base` pack (Version `1.33.52`) so now SEKOIAIntelligenceCenter will correctly input email addresses into context under `Account.Email` and not under `Email`.
diff --git a/Packs/SEKOIAIntelligenceCenter/pack_metadata.json b/Packs/SEKOIAIntelligenceCenter/pack_metadata.json
index 289f651c8186..6d05f8dea994 100644
--- a/Packs/SEKOIAIntelligenceCenter/pack_metadata.json
+++ b/Packs/SEKOIAIntelligenceCenter/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "SEKOIAIntelligenceCenter",
"description": "Request SEKOIA.IO Intelligence Center from Cortex XSOAR",
"support": "partner",
- "currentVersion": "1.2.31",
+ "currentVersion": "1.2.32",
"author": "SEKOIA.IO",
"url": "https://www.sekoia.io/en/contact/",
"email": "contact@sekoia.io",
diff --git a/Packs/Safewalk/Integrations/SafewalkManagement/README.md b/Packs/Safewalk/Integrations/SafewalkManagement/README.md
index 348574c7736c..d137f41faeea 100644
--- a/Packs/Safewalk/Integrations/SafewalkManagement/README.md
+++ b/Packs/Safewalk/Integrations/SafewalkManagement/README.md
@@ -1,6 +1,8 @@
Safewalk server integration
This integration was integrated and tested with version 3 of SafewalkManagement
+This is the default integration for this content pack when configured by the Data Onboarder in Cortex XSIAM.
+
## Configure SafewalkManagement on Cortex XSOAR
1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
diff --git a/Packs/Safewalk/Integrations/SafewalkManagement/SafewalkManagement.py b/Packs/Safewalk/Integrations/SafewalkManagement/SafewalkManagement.py
index 1b8a9a4af356..214889dba029 100644
--- a/Packs/Safewalk/Integrations/SafewalkManagement/SafewalkManagement.py
+++ b/Packs/Safewalk/Integrations/SafewalkManagement/SafewalkManagement.py
@@ -19,11 +19,11 @@ def get_transactionlog(self, page, search, locked) -> Dict[str, Any]:
p_locked = ""
if locked is not None and locked:
- p_locked = '&locked=%s' % "true"
+ p_locked = '&locked={}'.format("true")
return self._http_request(
method='GET',
- url_suffix='/transactionlog/?page=%s%s%s' % (page, p_search, p_locked),
+ url_suffix=f'/transactionlog/?page={page}{p_search}{p_locked}',
resp_type='text'
)
@@ -37,11 +37,11 @@ def get_users(self, page, search, locked) -> Dict[str, Any]:
p_locked = ""
if locked is not None and locked:
- p_locked = '&locked=%s' % "true"
+ p_locked = '&locked={}'.format("true")
return self._http_request(
method='GET',
- url_suffix='/userlist/?page=%s%s%s' % (page, p_search, p_locked),
+ url_suffix=f'/userlist/?page={page}{p_search}{p_locked}',
resp_type='text'
)
@@ -55,11 +55,11 @@ def get_ldap_users(self, page, search, locked, ldap) -> Dict[str, Any]:
p_locked = ""
if locked is not None and locked:
- p_locked = '&locked=%s' % "true"
+ p_locked = '&locked={}'.format("true")
return self._http_request(
method='GET',
- url_suffix='/userlist/%s/?page=%s%s%s' % (ldap, page, p_search, p_locked),
+ url_suffix=f'/userlist/{ldap}/?page={page}{p_search}{p_locked}',
resp_type='text'
)
@@ -118,7 +118,7 @@ def create_user_token(self, username, post_params) -> Dict[str, Any]:
def update_user_token(self, username, token_devicetype, token_serialnumber, post_params) -> Dict[str, Any]:
return self._http_request(
method='PUT',
- url_suffix='/user/%s/devices/%s/%s/' % (username, token_devicetype, token_serialnumber),
+ url_suffix=f'/user/{username}/devices/{token_devicetype}/{token_serialnumber}/',
json_data=post_params,
resp_type='text'
)
@@ -133,21 +133,21 @@ def get_user_tokens(self, username) -> Dict[str, Any]:
def delete_user_token(self, username, token_devicetype, token_serialnumber) -> Dict[str, Any]:
return self._http_request(
method='DELETE',
- url_suffix='/user/%s/devices/%s/%s/' % (username, token_devicetype, token_serialnumber),
+ url_suffix=f'/user/{username}/devices/{token_devicetype}/{token_serialnumber}/',
resp_type='text'
)
def send_user_token(self, username, token_devicetype, token_serialnumber) -> Dict[str, Any]:
return self._http_request(
method='POST',
- url_suffix='/user/%s/devices/%s/%s/send/' % (username, token_devicetype, token_serialnumber),
+ url_suffix=f'/user/{username}/devices/{token_devicetype}/{token_serialnumber}/send/',
resp_type='text'
)
def send_user_virtualtoken(self, username, token_devicetype, token_serialnumber) -> Dict[str, Any]:
return self._http_request(
method='POST',
- url_suffix='/devices/%s/%s/code/' % (token_devicetype, token_serialnumber),
+ url_suffix=f'/devices/{token_devicetype}/{token_serialnumber}/code/',
resp_type='text'
)
@@ -188,7 +188,7 @@ def add_user_group(self, username, new_group_name) -> Dict[str, Any]:
def remove_user_group(self, username, old_group_name) -> Dict[str, Any]:
return self._http_request(
method='DELETE',
- url_suffix='/group/%s/member/%s/' % (old_group_name, username),
+ url_suffix=f'/group/{old_group_name}/member/{username}/',
resp_type='text'
)
@@ -1449,6 +1449,7 @@ def test_module(client):
return 'Failed to run test, invalid credentials.'
else:
return 'ok'
+ return None
else:
return 'Failed to run test.'
diff --git a/Packs/Safewalk/Integrations/SafewalkManagement/SafewalkManagement.yml b/Packs/Safewalk/Integrations/SafewalkManagement/SafewalkManagement.yml
index ce67cee198fd..8edca4eeb65b 100644
--- a/Packs/Safewalk/Integrations/SafewalkManagement/SafewalkManagement.yml
+++ b/Packs/Safewalk/Integrations/SafewalkManagement/SafewalkManagement.yml
@@ -4,7 +4,7 @@ commonfields:
name: SafewalkManagement
display: Safewalk Management
category: Authentication & Identity Management
-description: Safewalk server integration
+description: Safewalk server integration.
configuration:
- display: First fetch timestamp (