diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index d8a9a0b3fd2a..05266fb4689f 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,33 +5,12 @@ /Config/approved_categories.json @yaakovpraisler @bakatzir @GuyLibermanPA @demisto/content-leaders /Config/core_packs_list.json @yaakovpraisler @bakatzir @GuyLibermanPA @demisto/content-leaders /Config/core_packs_mpv2_list.json @yaakovpraisler @bakatzir @GuyLibermanPA @demisto/content-leaders -/Tests/Marketplace/versions-metadata.json @yaakovpraisler /Config/corepacks_override.json @yaakovpraisler # Docker native image /Tests/docker_native_image_config.json @JudahSchwartz @shmuel44 -# Marketplace & Upload-Flow -/Tests/scripts/create_artifacts_graph/create_artifacts.py @RosenbergYehuda -/Tests/Marketplace/upload_git_snapshot.py @yaakovpraisler -/Tests/Marketplace/install_packs.sh @yaakovpraisler -/Tests/Marketplace/configure_and_install_packs.py @yaakovpraisler -/Tests/Marketplace/copy_and_upload_packs.py @yaakovpraisler -/Tests/Marketplace/marketplace_services.py @yaakovpraisler @RosenbergYehuda -/Tests/Marketplace/marketplace_statistics.py @yaakovpraisler -/Tests/Marketplace/marketplace_constants.py @yaakovpraisler -/Tests/Marketplace/zip_packs.py @yaakovpraisler -/Tests/Marketplace/upload_packs.py @yaakovpraisler @RosenbergYehuda -/Tests/Marketplace/packs_dependencies.py @yaakovpraisler -/Tests/Marketplace/search_and_install_packs.py @yaakovpraisler -/Tests/scripts/prepare_content_packs_for_testing.sh @yaakovpraisler -/Utils/trigger_test_upload_flow.sh @yaakovpraisler -/Utils/trigger_upload_packs_to_production.sh @yaakovpraisler -/Utils/should_trigger_test_upload.sh @yaakovpraisler -/Utils/test_upload_flow/* @yaakovpraisler - # Test Collection -/Tests/scripts/collect_tests @dorschw @michal-dagan /Tests/conf.json @sapirshuker # PANW Products @@ -73,28 +52,11 @@ /Packs/CommonScripts/Scripts/ExtractEmailV2/* @Ni-Knight /Packs/CommonScripts/Scripts/UnEscapeIPs/* @Ni-Knight - # Build related .circleci/config.yml @yucohen -.gitlab/ci/* @yucohen -.gitlab/* @yucohen -.gitlab-ci.yml @yucohen -/Tests/scripts/wait_in_line_for_cloud_env.sh @yucohen -/Tests/scripts/uninstall_packs_and_reset_bucket_cloud.sh @yucohen -/Tests/Marketplace/search_and_uninstall_pack.py @yucohen -/Tests/scripts/install_content_and_test_integrations.sh @yucohen -/Tests/configure_and_test_integration_instances.py @yucohen -/Tests/scripts/print_cloud_machine_details.sh @yucohen -/Tests/scripts/run_tests.sh @yucohen -/Tests/scripts/download_demisto_conf.sh @yucohen -Tests/scripts/test_modeling_rules.sh @AradCarmi -Tests/scripts/lock_cloud_machines.py @yucohen -Tests/Marketplace/server_content_items.json @dantavori -validation_config.toml @YuvHayun @JudahSchwartz @anara123 @SamuelFain # SDK Related -.gitlab/ci/.gitlab-ci.sdk-nightly.yml @SamuelFain -.pre-commit-config_template.yaml @SamuelFain +.pre-commit-config_template.yaml @SamuelFain @YuvHayun # XDR Related /Packs/CortexXDR/Integrations/ @maimorag diff --git a/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.py b/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.py index 00307f4f6b12..cb8ee6515367 100644 --- a/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.py +++ b/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.py @@ -2102,6 +2102,47 @@ def get_cps_change_status(self, headers=headers, ) + def cancel_cps_change(self, change_path: str, account_switch_key: str = "") -> dict: + """ + Cancels a pending change. + + Args: + change_path: Change path on which to perform the desired operation. + account_switch_key: For customers who manage more than one account, + this runs the operation from another account. The Identity and + Access Management API provides a list of available account switch keys. + + Returns: + The response provides a dict of change_path. + + """ + method = 'delete' + headers = {"accept": "application/vnd.akamai.cps.change-id.v1+json"} + params = {"accountSwitchKey": account_switch_key} + return self._http_request(method=method, + url_suffix=change_path, + headers=headers, + params=params, + ) + + def get_cps_enrollment_by_id(self, + enrollment_id: int) -> dict: + """ + Returns the Enarollment by enrollment id + Args: + enrollment_id: Unique Identifier of the Enrollment on which to perform the desired operation. + + Returns: + The response provides a deployment associcated to the enrollment id + + """ + headers = {"accept": "application/vnd.akamai.cps.enrollment.v12+json"} + method = "GET" + return self._http_request(method=method, + url_suffix=f'cps/v2/enrollments/{enrollment_id}', + headers=headers, + ) + ''' HELPER FUNCTIONS ''' @@ -5912,6 +5953,91 @@ def get_cps_change_status_command(client: Client, return human_readable, context_entry, raw_response +@logger +def cancel_cps_change_command(client: Client, + change_id: str = '0', + enrollment_id: str = '0', + change_path: str = "", + account_switch_key: str = "", + ) -> tuple[str, dict, Union[list, dict]]: + """ + Cancels a pending change. + Reference: https://techdocs.akamai.com/cps/reference/delete-enrollment-change + Args: + client: + change_id: The change for this enrollment on which to perform the desired operation. Default is 0. + enrollment_id: Enrollment on which to perform the desired operation. Default is 0. + change_path: Change path on which to perform the desired operation. + - Sample: /cps/v2/enrollments/100000/changes/88888888 + - Note: change_path is not listed in the reference as a parameter. + However it can be extracted directly from "list_enrollments_command". + This should be the most common useage when generate RestAPI's URL. + account_switch_key: For customers who manage more than one account, this runs + the operation from another account. The Identity and Access Management API + provides a list of available account switch keys. + - Sample: "1-5C0YLB:1-8BYUX" + + NOTE: There is no need to provice "change_id"/"enrollment_id" and "change_path" + at the same time. "change_id"/"enrollment_id" can be used to generate + "change_path" as well. + + Returns: + human readable (markdown format), entry context and raw response + """ + + if not (change_id == '0' and enrollment_id == '0'): + change_path = f'/cps/v2/enrollments/{enrollment_id}/changes/{change_id}' + + raw_response: dict = client.cancel_cps_change(change_path=change_path, account_switch_key=account_switch_key) + + title = f'{INTEGRATION_NAME} - cps cancel change' + entry_context = raw_response + human_readable_ec = raw_response + context_entry: dict = { + f"{INTEGRATION_CONTEXT_NAME}.Cps.Change.Canceled": entry_context + } + + human_readable = tableToMarkdown( + name=title, + t=human_readable_ec, + removeNull=True, + ) + return human_readable, context_entry, raw_response + + +# Created by D.S. 2024-06-18 +@logger +def get_cps_enrollment_by_id_command(client: Client, + enrollment_id: int) -> tuple[str, dict, Union[list, dict]]: + """ + Returns the certification/Enarollment. + + Args: + client: + enrollment_id: Unique Identifier of the Enrollment on which to perform the desired operation. + And it can be retrived via list_enrollments_command + + Returns: + human readable (markdown format), entry context and raw response + """ + + raw_response: dict = client.get_cps_enrollment_by_id(enrollment_id=enrollment_id) + + title = f'{INTEGRATION_NAME} - get cps enrollment by id command' + entry_context = raw_response + human_readable_ec = raw_response + context_entry: dict = { + f"{INTEGRATION_CONTEXT_NAME}.Cps.Enrollments": entry_context + } + + human_readable = tableToMarkdown( + name=title, + t=human_readable_ec, + removeNull=True, + ) + return human_readable, context_entry, raw_response + + ''' COMMANDS MANAGER / SWITCH PANEL ''' @@ -6006,6 +6132,8 @@ def main(): f'{INTEGRATION_COMMAND_NAME}-update-cps-enrollment': update_cps_enrollment_command, f'{INTEGRATION_COMMAND_NAME}-update-cps-enrollment-schedule': update_cps_enrollment_schedule_command, f'{INTEGRATION_COMMAND_NAME}-get-cps-change-status': get_cps_change_status_command, + f'{INTEGRATION_COMMAND_NAME}-cancel-cps-change': cancel_cps_change_command, + f'{INTEGRATION_COMMAND_NAME}-get-cps-enrollment-by-id': get_cps_enrollment_by_id_command, } try: readable_output, outputs, raw_response = commands[command](client=client, **demisto.args()) diff --git a/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.yml b/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.yml index 092af00bb75d..610e839c0b5d 100644 --- a/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.yml +++ b/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.yml @@ -1489,7 +1489,28 @@ script: - contextPath: Akamai.Enrollments.Change.Status description: Akamai enrollments change status. type: Dictionary - dockerimage: demisto/auth-utils:1.0.0.94075 + - arguments: + - defaultValue: '0' + description: The change for this enrollment on which to perform the desired operation. Default is 0. "change_path" is used. + name: change_id + required: true + - defaultValue: '0' + description: Enrollment on which to perform the desired operation. Default is 0. "change_path" is used. + name: enrollment_id + required: true + - description: "Change path on which to perform the desired operation. Sample: /cps/v2/enrollments/100000/changes/88888888. Note: change_path is not listed in the reference as a parameter. However it can be extracted directly from \"list_enrollments_command\". This should be the most common usage when generating the RestAPI's URL." + name: change_path + - description: For customers who manage more than one account, this runs the operation from another account. The Identity and Access Management API provides a list of available account switch keys. + name: account_switch_key + description: Cancels a pending change on CPS. + name: akamai-cancel-cps-change + - arguments: + - description: Enrollment ID on which to perform the desired operation. + name: enrollment_id + required: true + description: Get an enrollment in CPS by enrollment id. + name: akamai-get-cps-enrollment-by-id + dockerimage: demisto/auth-utils:1.0.0.105764 script: '' subtype: python3 type: python diff --git a/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF_test.py b/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF_test.py index 72ae7e3c887e..83cb1a6f8f5a 100644 --- a/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF_test.py +++ b/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF_test.py @@ -10,6 +10,14 @@ def util_load_json(path): return json.loads(f.read()) +def util_load_txt(path: str): + """ + Utility to load text data from a local folder. + """ + with open(path, encoding='utf-8') as file: + return file.read() + + @pytest.fixture(scope='module') def akamai_waf_client(): return Client(base_url="https://hostname/", @@ -250,3 +258,52 @@ def test_acknowledge_warning_command(mocker, akamai_waf_client): assert expected_raw_response == raw_response assert expected_human_readable == human_readable assert expected_context_entry == context_entry + + +def test_cancel_cps_change_command(mocker, akamai_waf_client): + """ + Given: + - enrollment ID and change ID. + When: + - running the command cancel_cps_change_command. + Then: + - enrollment ID is cancelled correctly. + """ + from Akamai_WAF import cancel_cps_change_command + expected_raw_response = { + "change": "/cps/v2/enrollments/193622/changes/3914270" + } + expected_human_readable = "### Akamai WAF - cps cancel change\n|change|\n|---|\n|\ + /cps/v2/enrollments/193622/changes/3914270 |\n" + expected_context_entry = { + 'Akamai.Cps.Change.Canceled': { + 'change': '/cps/v2/enrollments/193622/changes/3914270' + } + } + mocker.patch.object(akamai_waf_client, 'cancel_cps_change', return_value=expected_raw_response) + human_readable, context_entry, raw_response = cancel_cps_change_command(client=akamai_waf_client, + enrollment_id="193622", + change_id="3914270") + assert expected_raw_response == raw_response + assert expected_human_readable == human_readable + assert expected_context_entry == context_entry + + +def test_get_cps_enrollment_by_id_command(mocker, akamai_waf_client): + """ + Given: + - enrollment ID. + When: + - running the command get_cps_enrollment_by_id_command. + Then: + - we get details of enrollment. + """ + from Akamai_WAF import get_cps_enrollment_by_id_command + test_data = util_load_json('test_data/get_cps_enrollment_by_id_test.json') + expected_raw_response = test_data + expected_context_entry = util_load_json('test_data/get_cps_enrollment_by_id_context.json') + + mocker.patch.object(akamai_waf_client, 'get_cps_enrollment_by_id', return_value=expected_raw_response) + _, context_entry, raw_response = get_cps_enrollment_by_id_command(client=akamai_waf_client, enrollment_id=193622) + assert expected_raw_response == raw_response + assert expected_context_entry == context_entry diff --git a/Packs/Akamai_WAF/Integrations/Akamai_WAF/README.md b/Packs/Akamai_WAF/Integrations/Akamai_WAF/README.md index f37d8639a161..0026b3f0eb1d 100644 --- a/Packs/Akamai_WAF/Integrations/Akamai_WAF/README.md +++ b/Packs/Akamai_WAF/Integrations/Akamai_WAF/README.md @@ -1683,4 +1683,43 @@ Gets the status of a pending change. } } }}}} -``` \ No newline at end of file +``` +### akamai-get-cps-enrollment-by-id + +*** +Get an enrollment in CPS by enrollment id + +#### Base Command + +`akamai-get-cps-enrollment-by-id` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| enrollment_id | Enrollment on which to perform the desired operation. | Required | + +#### Context Output + +There is no context output for this command. +### akamai-cancel-cps-change + +*** +Cancels a pending change on CPS. + +#### Base Command + +`akamai-cancel-cps-change` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| change_id | The change for this enrollment on which to perform the desired operation. Default is 0. "change_path" is used. Default is 0. | Required | +| enrollment_id | Enrollment on which to perform the desired operation. Default is 0. "change_path" is used. Default is 0. | Required | +| change_path | Change path on which to perform the desired operation. Sample: /cps/v2/enrollments/100000/changes/88888888. Note: change_path is not listed in the reference as a parameter. However it can be extracted directly from "list_enrollments_command". This should be the most common useage when generate RestAPI's URL. | Optional | +| account_switch_key | For customers who manage more than one account, this runs the operation from another account. The Identity and Access Management API provides a list of available account switch keys. | Optional | + +#### Context Output + +There is no context output for this command. diff --git a/Packs/Akamai_WAF/Integrations/Akamai_WAF/test_data/get_cps_enrollment_by_id_context.json b/Packs/Akamai_WAF/Integrations/Akamai_WAF/test_data/get_cps_enrollment_by_id_context.json new file mode 100644 index 000000000000..83f96240ab77 --- /dev/null +++ b/Packs/Akamai_WAF/Integrations/Akamai_WAF/test_data/get_cps_enrollment_by_id_context.json @@ -0,0 +1 @@ +{"Akamai.Cps.Enrollments": {"adminContact": {"addressLineOne": "601 Riverside Avenue", "addressLineTwo": null, "city": null, "country": null, "email": "Akamaizers@test.com", "firstName": "FIS", "lastName": "Akamaizers", "organizationName": null, "phone": "123-123-1234", "postalCode": null, "region": null, "title": null}, "assignedSlots": [168334], "autoRenewalStartTime": null, "certificateChainType": "default", "certificateType": "third-party", "changeManagement": true, "csr": {"c": "US", "cn": "tools-portal-app-mbp-amex-batest.dev.fiscloudservices.com", "l": "Jacksonville", "o": "Fidelity National Information Services", "ou": "Fidelity National Information Services", "preferredTrustChain": null, "sans": ["tools-portal-app-mbp-amex-batest.dev.fiscloudservices.com"], "st": "Florida"}, "enableMultiStackedCertificates": true, "id": 190080, "location": "/cps/v2/enrollments/190080", "maxAllowedSanNames": 100, "maxAllowedWildcardSanNames": 100, "networkConfiguration": {"clientMutualAuthentication": null, "disallowedTlsVersions": ["TLSv1","TLSv1_1"], "dnsNameSettings": {"cloneDnsNames":true,"dnsNames":["tools-portal-app-mbp-amex-batest.dev.fiscloudservices.com"]}, "fipsMode": null, "geography": "core", "mustHaveCiphers": "ak-akamai-2020q1", "ocspStapling": "on", "preferredCiphers": "ak-akamai-2020q1", "quicEnabled": true, "secureNetwork": "enhanced-tls", "sniOnly": true}, "org": {"addressLineOne": "601 Riverside Avenue", "addressLineTwo": null, "city": "Jacksonville", "country": "US", "name": "Fidelity National Information Services", "phone": "501-220-5100", "postalCode": "32204", "region": "Florida"}, "orgId": null, "pendingChanges": [{"changeType":"renewal","location":"/cps/v2/enrollments/190080/changes/5231996"}], "productionSlots": [168334], "ra": "third-party", "signatureAlgorithm": null, "stagingSlots": [168334], "techContact": {"addressLineOne": null, "addressLineTwo": null, "city": null, "country": null, "email": "test-ps@akamai.com", "firstName": "FIS", "lastName": "PS", "organizationName": null, "phone": "877-425-2832", "postalCode": null, "region": null, "title": null}, "thirdParty": {"excludeSans": false}, "validationType": "third-party"}} \ No newline at end of file diff --git a/Packs/Akamai_WAF/Integrations/Akamai_WAF/test_data/get_cps_enrollment_by_id_test.json b/Packs/Akamai_WAF/Integrations/Akamai_WAF/test_data/get_cps_enrollment_by_id_test.json new file mode 100644 index 000000000000..467d13885840 --- /dev/null +++ b/Packs/Akamai_WAF/Integrations/Akamai_WAF/test_data/get_cps_enrollment_by_id_test.json @@ -0,0 +1,24 @@ +{ + "adminContact":{ + "addressLineOne":"601 Riverside Avenue", + "addressLineTwo":null, + "city":null, + "country":null, + "email":"Akamaizers@test.com", + "firstName":"FIS", + "lastName":"Akamaizers", + "organizationName":null, + "phone":"123-123-1234", + "postalCode":null, + "region":null, + "title":null + }, + "assignedSlots":[168334], + "autoRenewalStartTime":null, + "certificateChainType":"default", + "certificateType":"third-party", + "changeManagement":true, + "csr":{"c":"US","cn":"tools-portal-app-mbp-amex-batest.dev.fiscloudservices.com","l":"Jacksonville","o":"Fidelity National Information Services","ou":"Fidelity National Information Services","preferredTrustChain":null,"sans":["tools-portal-app-mbp-amex-batest.dev.fiscloudservices.com"],"st":"Florida"},"enableMultiStackedCertificates":true,"id":190080,"location":"/cps/v2/enrollments/190080","maxAllowedSanNames":100,"maxAllowedWildcardSanNames":100,"networkConfiguration":{"clientMutualAuthentication":null,"disallowedTlsVersions":["TLSv1","TLSv1_1"],"dnsNameSettings":{"cloneDnsNames":true,"dnsNames":["tools-portal-app-mbp-amex-batest.dev.fiscloudservices.com"]},"fipsMode":null,"geography":"core","mustHaveCiphers":"ak-akamai-2020q1","ocspStapling":"on","preferredCiphers":"ak-akamai-2020q1","quicEnabled":true,"secureNetwork":"enhanced-tls","sniOnly":true},"org":{"addressLineOne":"601 Riverside Avenue","addressLineTwo":null,"city":"Jacksonville","country":"US","name":"Fidelity National Information Services","phone":"501-220-5100","postalCode":"32204","region":"Florida"},"orgId":null,"pendingChanges":[{"changeType":"renewal","location":"/cps/v2/enrollments/190080/changes/5231996"}],"productionSlots":[168334],"ra":"third-party","signatureAlgorithm":null,"stagingSlots":[168334],"techContact":{"addressLineOne":null,"addressLineTwo":null,"city":null,"country":null,"email":"test-ps@akamai.com","firstName":"FIS","lastName":"PS","organizationName":null,"phone":"877-425-2832","postalCode":null,"region":null,"title":null}, + "thirdParty":{"excludeSans":false}, + "validationType":"third-party" +} \ No newline at end of file diff --git a/Packs/Akamai_WAF/ReleaseNotes/2_0_12.md b/Packs/Akamai_WAF/ReleaseNotes/2_0_12.md new file mode 100644 index 000000000000..09a70b100e2e --- /dev/null +++ b/Packs/Akamai_WAF/ReleaseNotes/2_0_12.md @@ -0,0 +1,9 @@ + +#### Integrations + +##### Akamai WAF +- Updated the Docker image to: *demisto/auth-utils:1.0.0.105764*. + + - Added 2 commands: + - ***akamai-cancel-cps-change*** + - ***akamai-get-cps-enrollment-by-id*** diff --git a/Packs/Akamai_WAF/pack_metadata.json b/Packs/Akamai_WAF/pack_metadata.json index de330bb2ea05..5ea59016041d 100644 --- a/Packs/Akamai_WAF/pack_metadata.json +++ b/Packs/Akamai_WAF/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Akamai WAF", "description": "Use the Akamai WAF integration to manage common sets of lists used by various Akamai security products and features.", "support": "xsoar", - "currentVersion": "2.0.11", + "currentVersion": "2.0.12", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/ApiModules/Scripts/MicrosoftGraphMailApiModule/MicrosoftGraphMailApiModule.py b/Packs/ApiModules/Scripts/MicrosoftGraphMailApiModule/MicrosoftGraphMailApiModule.py index 1cd98dd89b22..b297a5d3b0f1 100644 --- a/Packs/ApiModules/Scripts/MicrosoftGraphMailApiModule/MicrosoftGraphMailApiModule.py +++ b/Packs/ApiModules/Scripts/MicrosoftGraphMailApiModule/MicrosoftGraphMailApiModule.py @@ -37,6 +37,7 @@ def __init__(self, mailbox_to_fetch, folder_to_fetch, first_fetch_interval, emai mark_fetched_read: bool = False, look_back: int | None = 0, fetch_html_formatting=True, + legacy_name=False, **kwargs): super().__init__(retry_on_rate_limit=True, managed_identities_resource_uri=Resources.graph, command_prefix="msgraph-mail", @@ -49,6 +50,7 @@ def __init__(self, mailbox_to_fetch, folder_to_fetch, first_fetch_interval, emai self._mark_fetched_read = mark_fetched_read self._look_back = look_back self.fetch_html_formatting = fetch_html_formatting + self.legacy_name = legacy_name @classmethod def _build_inline_layout_attachments_input(cls, inline_from_layout_attachments): @@ -277,11 +279,11 @@ def _get_email_attachments(self, message_id, user_id=None, overwrite_rate_limit_ for attachment in attachments: attachment_type = attachment.get('@odata.type', '') - attachment_identifier_id = attachment.get('contentId') - if not attachment_identifier_id or attachment_identifier_id == "None": - attachment_identifier_id = attachment.get('id', '') - attachment_name = f"{attachment_identifier_id}-attachmentName-{attachment.get('name', 'untitled_attachment')}" - + attachment_content_id = attachment.get('contentId') + attachment_is_inline = attachment.get('isInline') + attachment_name = attachment.get('name', 'untitled_attachment') + if attachment_is_inline and not self.legacy_name and attachment_content_id and attachment_content_id != "None": + attachment_name = f"{attachment_content_id}-attachmentName-{attachment_name}" if not attachment_name.isascii(): try: demisto.debug(f"Trying to decode the attachment file name: {attachment_name}") @@ -1287,7 +1289,7 @@ def item_result_creator(raw_attachment, user_id) -> CommandResults: return CommandResults(readable_output=human_readable, raw_response=raw_attachment) @staticmethod - def file_result_creator(raw_attachment: dict) -> dict: + def file_result_creator(raw_attachment: dict, legacy_name=False) -> dict: """Create FileResult from the attachment Args: @@ -1299,10 +1301,11 @@ def file_result_creator(raw_attachment: dict) -> dict: Returns: dict: FileResult with the b64decode of the attachment content """ - identifier_id = raw_attachment.get('contentId') - if not identifier_id or identifier_id == "None": - identifier_id = raw_attachment.get('id', '') - name = f"{identifier_id}-attachmentName-{raw_attachment.get('name','')}" + name = raw_attachment.get('name', '') + content_id = raw_attachment.get('contentId') + is_inline = raw_attachment.get('isInline') + if is_inline and content_id and content_id != "None" and not legacy_name: + name = f"{content_id}-attachmentName-{name}" data = raw_attachment.get('contentBytes') try: data = base64.b64decode(data) # type: ignore @@ -1311,7 +1314,7 @@ def file_result_creator(raw_attachment: dict) -> dict: raise DemistoException('Attachment could not be decoded') @staticmethod - def create_attachment(raw_attachment, user_id) -> CommandResults | dict: + def create_attachment(raw_attachment, user_id, legacy_name=False) -> CommandResults | dict: attachment_type = raw_attachment.get('@odata.type', '') # Documentation about the different attachment types @@ -1319,7 +1322,7 @@ def create_attachment(raw_attachment, user_id) -> CommandResults | dict: if 'itemAttachment' in attachment_type: return GraphMailUtils.item_result_creator(raw_attachment, user_id) elif 'fileAttachment' in attachment_type: - return GraphMailUtils.file_result_creator(raw_attachment) + return GraphMailUtils.file_result_creator(raw_attachment, legacy_name) else: human_readable = f'Integration does not support attachments from type {attachment_type}' return CommandResults(readable_output=human_readable, raw_response=raw_attachment) @@ -1828,14 +1831,8 @@ def get_attachment_command(client: MsGraphMailBaseClient, args) -> list[CommandR kwargs = {arg_key: args.get(arg_key) for arg_key in ['message_id', 'folder_id', 'attachment_id']} kwargs['user_id'] = args.get('user_id', client._mailbox_to_fetch) raw_response = client.get_attachment(**kwargs) - identifiers_filter = argToList(args.get('identifiers_filter')) - return [GraphMailUtils.create_attachment(attachment, user_id=kwargs['user_id']) - for attachment in raw_response - if ( - (not identifiers_filter) - or (attachment.get('contentId') in identifiers_filter or attachment.get('id') in identifiers_filter) - ) - ] + return [GraphMailUtils.create_attachment(attachment, user_id=kwargs['user_id'], legacy_name=client.legacy_name) + for attachment in raw_response] def create_folder_command(client: MsGraphMailBaseClient, args) -> CommandResults: diff --git a/Packs/CrowdStrikeFalcon/Classifiers/classifier-CrowdStrike_Falcon_Mapper_6.5.json b/Packs/CrowdStrikeFalcon/Classifiers/classifier-CrowdStrike_Falcon_Mapper_6.5.json index 0e41d719d4d5..5c376f2eeca5 100644 --- a/Packs/CrowdStrikeFalcon/Classifiers/classifier-CrowdStrike_Falcon_Mapper_6.5.json +++ b/Packs/CrowdStrikeFalcon/Classifiers/classifier-CrowdStrike_Falcon_Mapper_6.5.json @@ -808,28 +808,6 @@ "dbot_classification_incident_type_all": { "dontMapEventToLabels": false, "internalMapping": { - "External Start Time": { - "complex": { - "filters": [], - "root": "first_behavior", - "transformers": [ - { - "args": { - "item": { - "isContext": true, - "value": { - "simple": "start" - } - } - }, - "operator": "append" - } - ] - } - }, - "Additional Data": { - "simple": "behaviors" - }, "Agent Version": { "simple": "device.agent_version" }, @@ -854,10 +832,10 @@ } }, "CMD line": { - "simple": "behaviors.cmdline" + "simple": "cmdline" }, "Description": { - "simple": "behaviors.description" + "simple": "description" }, "Device External IPs": { "complex": { @@ -966,26 +944,7 @@ "simple": "incident_type" }, "External Confidence": { - "simple": "max_confidence" - }, - "External End Time": { - "complex": { - "filters": [], - "root": "last_behavior", - "transformers": [ - { - "args": { - "item": { - "isContext": true, - "value": { - "simple": "end" - } - } - }, - "operator": "append" - } - ] - } + "simple": "confidence" }, "External ID": { "complex": { @@ -1096,16 +1055,16 @@ } }, "File MD5": { - "simple": "behaviors.md5" + "simple": "md5" }, "File Names": { - "simple": "behaviors.filename" + "simple": "filename" }, "File Paths": { - "simple": "behaviors.filepath" + "simple": "filepath" }, "File SHA256": { - "simple": "behaviors.sha256" + "simple": "sha256" }, "Hostnames": { "complex": { @@ -1146,22 +1105,22 @@ "simple": "parent_details.cmdline" }, "Parent Process SHA256": { - "simple": "behaviors.parent_details.parent_sha256" + "simple": "parent_details.sha256" }, "Parent Process MD5": { - "simple": "behaviors.parent_details.parent_md5" + "simple": "parent_details.md5" }, "Process CMD": { "simple": "parent_details.cmdline" }, "Process MD5": { - "simple": "behaviors.md5" + "simple": "md5" }, "Process SHA256": { - "simple": "behaviors.sha256" + "simple": "sha256" }, "Scenario": { - "simple": "behaviors.scenario" + "simple": "scenario" }, "Source MAC Address": { "simple": "device.mac_address" @@ -1182,7 +1141,7 @@ "args": { "conditions": { "value": { - "simple": "[\n {\n \"condition\": \"#{incident_type} == 'detection'\",\n \"return\": \"Falcon Detection - \" + #{behaviors\\.display_name} + \" - Detection ID: \" + #{composite_id}\n },\n {\n \"condition\": \"#{incident_type} == 'incident'\",\n \"return\": \"Falcon Incident - ID: \" + #{incident_id}\n },\n {\n \"condition\": \"#{incident_type} == 'IDP detection'\",\n \"return\": #{id}\n },\n {\n \"condition\": \"#{incident_type} == 'iom_configurations'\",\n \"return\": #{id}\n },\n {\n \"condition\": \"#{incident_type} == 'ioa_events'\",\n \"return\": #{event_id}\n },\n {\n \"condition\": \"#{incident_type} == 'MOBILE detection'\",\n \"return\": #{mobile_detection_id}\n },\n {\n \"condition\": \"#{device\\.hostname} != None\",\n \"return\": #{incident_type} + \" - \" + #{composite_id} + \" - \" + #{device\\.hostname}\n },\n {\n \"condition\": \"#{hosts\\.hostname} != None\",\n \"return\": #{incident_type} + \" - \" + #{incident_id} + \" - \" + #{hosts\\.hostname}\n },\n {\n \"condition\": \"#{incident_id} != None\",\n \"return\": #{incident_type} + \" - \" + #{incident_id}\n },\n {\n \"condition\": \"#{composite_id} != None\",\n \"return\": #{incident_type} + \" - \" + #{composite_id}\n },\n {\n \"default\": #{incident_type}\n }\n]" + "simple": "[\n {\n \"condition\": \"#{incident_type} == 'detection'\",\n \"return\": \"Falcon Detection - \" + #{display_name} + \" - Detection ID: \" + #{composite_id}\n },\n {\n \"condition\": \"#{incident_type} == 'incident'\",\n \"return\": \"Falcon Incident - ID: \" + #{incident_id}\n },\n {\n \"condition\": \"#{incident_type} == 'IDP detection'\",\n \"return\": #{id}\n },\n {\n \"condition\": \"#{incident_type} == 'iom_configurations'\",\n \"return\": #{id}\n },\n {\n \"condition\": \"#{incident_type} == 'ioa_events'\",\n \"return\": #{event_id}\n },\n {\n \"condition\": \"#{incident_type} == 'MOBILE detection'\",\n \"return\": #{mobile_detection_id}\n },\n {\n \"condition\": \"#{device\\.hostname} != None\",\n \"return\": #{incident_type} + \" - \" + #{composite_id} + \" - \" + #{device\\.hostname}\n },\n {\n \"condition\": \"#{hosts\\.hostname} != None\",\n \"return\": #{incident_type} + \" - \" + #{incident_id} + \" - \" + #{hosts\\.hostname}\n },\n {\n \"condition\": \"#{incident_id} != None\",\n \"return\": #{incident_type} + \" - \" + #{incident_id}\n },\n {\n \"condition\": \"#{composite_id} != None\",\n \"return\": #{incident_type} + \" - \" + #{composite_id}\n },\n {\n \"default\": #{incident_type}\n }\n]" } }, "flags": {} diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py index 789f09a6e55c..2fd27dd34d0b 100644 --- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py +++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py @@ -2026,14 +2026,15 @@ def get_username_uuid(username: str): return resources[0] -def resolve_detection(ids, status, assigned_to_uuid, show_in_ui, comment): +def resolve_detection(ids, status, assigned_to_uuid, show_in_ui, comment, tag): """ Sends a resolve detection request - :param ids: Single or multiple ids in an array string format - :param status: New status of the detection - :param assigned_to_uuid: uuid to assign the detection to - :param show_in_ui: Boolean flag in string format (true/false) - :param comment: Optional comment to add to the detection + :param ids: Single or multiple ids in an array string format. + :param status: New status of the detection. + :param assigned_to_uuid: uuid to assign the detection to. + :param show_in_ui: Boolean flag in string format (true/false). + :param comment: Optional comment to add to the detection. + :param The tag to add. :return: Resolve detection response json """ payload = { @@ -2054,6 +2055,8 @@ def resolve_detection(ids, status, assigned_to_uuid, show_in_ui, comment): payload["assign_to_user_id"] = payload.pop("assigned_to_uuid") if "assigned_to_uuid" in payload else None payload["update_status"] = payload.pop("status") if "status" in payload else None payload["append_comment"] = payload.pop("comment") if "comment" in payload else None + if tag: + payload["add_tag"] = tag data = json.dumps(resolve_detections_prepare_body_request(ids, payload)) else: @@ -2213,7 +2216,7 @@ def update_detection_request(ids: list[str], status: str) -> dict: if status not in DETECTION_STATUS: raise DemistoException(f'CrowdStrike Falcon Error: ' f'Status given is {status} and it is not in {DETECTION_STATUS}') - return resolve_detection(ids=ids, status=status, assigned_to_uuid=None, show_in_ui=None, comment=None) + return resolve_detection(ids=ids, status=status, assigned_to_uuid=None, show_in_ui=None, comment=None, tag=None) def update_idp_or_mobile_detection_request(ids: list[str], status: str) -> dict: @@ -4213,10 +4216,13 @@ def resolve_detection_command(): assigned_to_uuid = get_username_uuid(username) status = args.get('status') + tag = args.get('tag') show_in_ui = args.get('show_in_ui') - if not (username or assigned_to_uuid or comment or status or show_in_ui): + if not (username or assigned_to_uuid or comment or status or show_in_ui or tag): raise DemistoException("Please provide at least one argument to resolve the detection with.") - raw_res = resolve_detection(ids, status, assigned_to_uuid, show_in_ui, comment) + if LEGACY_VERSION and tag: + raise DemistoException("tag argument is only relevant when running with API V3.") + raw_res = resolve_detection(ids, status, assigned_to_uuid, show_in_ui, comment, tag) args.pop('ids') hr = f"Detection {str(ids)[1:-1]} updated\n" hr += 'With the following values:\n' diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml index 5802e63e835b..41b585c46faa 100644 --- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml +++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml @@ -453,6 +453,8 @@ script: - 'false' - description: Username to assign the detections to. (This is usually the user's email address, but may vary based on your configuration). username and assigned_to_uuid are mutually exclusive. name: username + - description: The tag to add to the detection, supported only for API V3. + name: tag description: Resolves and updates a detection using the provided arguments. At least one optional argument must be passed, otherwise no change will take place. Note that IDP detections are not supported. name: cs-falcon-resolve-detection - arguments: diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py index 6364eca26093..b403410d9200 100644 --- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py +++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py @@ -7209,13 +7209,13 @@ def test_get_detection___url_and_params(mocker, Legacy_version, url_suffix, expe assert len(http_request_mocker.call_args_list[0][0]) == expected_len -@pytest.mark.parametrize('Legacy_version, url_suffix, data', [ - (False, "/alerts/entities/alerts/v3", - '{"action_parameters": [{"name": "show_in_ui", "value": "True"}, {"name": "assign_to_user_id", "value": "123"}, {"name": "update_status", "value": "resolved"}, {"name": "append_comment", "value": "comment"}], "composite_ids": ["123"]}'), # noqa: E501 - (True, '/detects/entities/detects/v2', +@pytest.mark.parametrize('Legacy_version, tag, url_suffix, data', [ + (False, "test_tag", "/alerts/entities/alerts/v3", + '{"action_parameters": [{"name": "show_in_ui", "value": "True"}, {"name": "assign_to_user_id", "value": "123"}, {"name": "update_status", "value": "resolved"}, {"name": "append_comment", "value": "comment"}, {"name": "add_tag", "value": "test_tag"}], "composite_ids": ["123"]}'), # noqa: E501 + (True, None, '/detects/entities/detects/v2', '{"ids": ["123"], "status": "resolved", "assigned_to_uuid": "123", "show_in_ui": "True", "comment": "comment"}') ]) -def test_resolve_detection(mocker, Legacy_version, url_suffix, data): +def test_resolve_detection(mocker, Legacy_version, tag, url_suffix, data): """ Given: - The Legacy_version flag @@ -7230,7 +7230,7 @@ def test_resolve_detection(mocker, Legacy_version, url_suffix, data): mocker.patch('CrowdStrikeFalcon.LEGACY_VERSION', Legacy_version) http_request_mocker = mocker.patch('CrowdStrikeFalcon.http_request') - resolve_detection(ids=["123"], status="resolved", assigned_to_uuid="123", show_in_ui="True", comment="comment") + resolve_detection(ids=["123"], status="resolved", assigned_to_uuid="123", show_in_ui="True", comment="comment", tag=tag) assert http_request_mocker.call_args_list[0][0][1] == url_suffix assert http_request_mocker.call_args_list[0][1]["data"] == data diff --git a/Packs/CrowdStrikeFalcon/ReleaseNotes/2_0_7.md b/Packs/CrowdStrikeFalcon/ReleaseNotes/2_0_7.md new file mode 100644 index 000000000000..0c540869a76e --- /dev/null +++ b/Packs/CrowdStrikeFalcon/ReleaseNotes/2_0_7.md @@ -0,0 +1,6 @@ + +#### Mappers + +##### CrowdStrike Falcon Mapper + +Fixed an issue where some fields did not align with the newest version of CrowdStrike Falcon. Some fields have been modified, and others have been deleted as they no longer return from CrowdStrike Falcon. diff --git a/Packs/CrowdStrikeFalcon/ReleaseNotes/2_0_8.md b/Packs/CrowdStrikeFalcon/ReleaseNotes/2_0_8.md new file mode 100644 index 000000000000..714460ed9e82 --- /dev/null +++ b/Packs/CrowdStrikeFalcon/ReleaseNotes/2_0_8.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### CrowdStrike Falcon + +- Added the **tag** argument to **cs-falcon-resolve-detection** command. diff --git a/Packs/CrowdStrikeFalcon/pack_metadata.json b/Packs/CrowdStrikeFalcon/pack_metadata.json index f28c755a7c56..d3e0620adc2c 100644 --- a/Packs/CrowdStrikeFalcon/pack_metadata.json +++ b/Packs/CrowdStrikeFalcon/pack_metadata.json @@ -2,7 +2,7 @@ "name": "CrowdStrike Falcon", "description": "The CrowdStrike Falcon OAuth 2 API (formerly the Falcon Firehose API), enables fetching and resolving detections, searching devices, getting behaviors by ID, containing hosts, and lifting host containment.", "support": "xsoar", - "currentVersion": "2.0.6", + "currentVersion": "2.0.8", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/CyberChef/CONTRIBUTORS.json b/Packs/CyberChef/CONTRIBUTORS.json new file mode 100644 index 000000000000..97cf21c7eaa9 --- /dev/null +++ b/Packs/CyberChef/CONTRIBUTORS.json @@ -0,0 +1,3 @@ +[ + "nikstuckenbrock" +] \ No newline at end of file diff --git a/Packs/CyberChef/Integrations/CyberChef/CyberChef.py b/Packs/CyberChef/Integrations/CyberChef/CyberChef.py index b1f5424e954a..865bb6313e0a 100644 --- a/Packs/CyberChef/Integrations/CyberChef/CyberChef.py +++ b/Packs/CyberChef/Integrations/CyberChef/CyberChef.py @@ -1,24 +1,48 @@ +from typing import Optional import demistomock as demisto # noqa: F401 from CommonServerPython import * # noqa: F401 import json +from subprocess import run -def test_module(client): +def build_params(data: dict) -> list[str]: + + params: list[str] = [] + for value in data.values(): + params.append(f'{json.dumps(value)}') + return params + +def test_module(client: Optional[BaseClient], local_execution: bool): data = {'input': 'One, two, three, four.', 'recipe': 'to decimal'} - result = client._http_request('POST', '/bake', json_data=data) + if not local_execution and client: + result = client._http_request('POST', '/bake', json_data=data) + else: + params = build_params(data) + + cmd = ['node', '/bake.js'] + cmd.extend(params) + process = run(cmd, capture_output=True, text=True) + result = process.stdout if result: return 'ok' else: return 'Test failed: ' + str(result) -def run_command(client, data, endpoint): - response = client._http_request('POST', endpoint, json_data=data) +def run_command(client: Optional[BaseClient], data: dict, endpoint: str, local_execution: bool): + if not local_execution and client: + response = client._http_request('POST', endpoint, json_data=data) + else: + params = build_params(data) + cmd = ['node', '/bake.js'] + cmd.extend(params) + process = run(cmd, capture_output=True, text=True) + response = process.stdout return response -def create_output(results, endpoint): +def create_output(results, endpoint: str): output = CommandResults( outputs_prefix=f'CyberChef.{endpoint}', outputs_key_field='', @@ -29,41 +53,42 @@ def create_output(results, endpoint): def main(): apikey = demisto.params().get('apikey') + local_execution = argToBoolean(demisto.params().get('local_execution', 'false')) # get the service API url - base_url = urljoin(demisto.params()['url'], '/cyberchef') - - verify_certificate = not demisto.params().get('insecure', False) - - proxy = demisto.params().get('proxy', False) - - headers = {'Content-Type': 'application/json', - 'x-api-key': apikey} + if not local_execution: + base_url = urljoin(demisto.params()['url'], '/cyberchef') + verify_certificate = not demisto.params().get('insecure', False) + proxy = demisto.params().get('proxy', False) + headers = {'Content-Type': 'application/json', 'x-api-key': apikey} demisto.info(f'Command being called is {demisto.command()}') try: - client = BaseClient( - base_url=base_url, - verify=verify_certificate, - headers=headers, - proxy=proxy) + if not local_execution: + client = BaseClient( + base_url=base_url, + verify=verify_certificate, + headers=headers, + proxy=proxy) + else: + client = None if demisto.command() == 'test-module': # This is the call made when pressing the integration Test button. - result = test_module(client) + result = test_module(client, local_execution) demisto.results(result) elif demisto.command() == 'cyberchef-bake': data = {'input': demisto.args().get('input'), 'recipe': json.loads(demisto.args().get('recipe')), 'outputType': demisto.args().get('outputType')} data = remove_empty_elements(data) - results = run_command(client, data, '/bake') + results = run_command(client, data, '/bake', local_execution) return_results(create_output(results, 'Bake')) elif demisto.command() == 'cyberchef-magic': data = {'input': demisto.args().get('input'), 'args': demisto.args().get('args')} data = remove_empty_elements(data) - results = run_command(client, data, '/magic') + results = run_command(client, data, '/magic', local_execution) return_results(create_output(results, 'Magic')) # Log exceptions except Exception as e: diff --git a/Packs/CyberChef/Integrations/CyberChef/CyberChef.yml b/Packs/CyberChef/Integrations/CyberChef/CyberChef.yml index 299688251ad2..74ab43db5d66 100644 --- a/Packs/CyberChef/Integrations/CyberChef/CyberChef.yml +++ b/Packs/CyberChef/Integrations/CyberChef/CyberChef.yml @@ -3,16 +3,21 @@ commonfields: id: CyberChef version: -1 configuration: +- display: Local execution + additionalinfo: Uses the [cyberchef node package](https://github.com/polarityio/cyberchef-node) for local execution of your commands + name: local_execution + required: false + type: 8 - additionalinfo: URL or your CyberChef server or https://prod.apifor.io/ display: Server URL (e.g. https://prod.apifor.io/) name: url - required: true type: 0 -- additionalinfo: API key if you use https://prod.apifor.io/ - display: API Key + required: false +- display: API Key name: apikey type: 4 required: false + additionalinfo: API key if you use https://prod.apifor.io/ - display: Trust any certificate (not secure) name: insecure type: 8 @@ -53,7 +58,7 @@ script: - contextPath: CyberChef.Magic description: Output of the Magic operation. type: string - dockerimage: demisto/python3:3.11.9.101916 + dockerimage: demisto/cyberchef:1.0.0.104247 runonce: false script: '' subtype: python3 diff --git a/Packs/CyberChef/Integrations/CyberChef/README.md b/Packs/CyberChef/Integrations/CyberChef/README.md index 615ec2dd6bbc..1581efff1f8d 100644 --- a/Packs/CyberChef/Integrations/CyberChef/README.md +++ b/Packs/CyberChef/Integrations/CyberChef/README.md @@ -12,6 +12,7 @@ CyberChef is a web-application developed by GCHQ that's been called the “Cyber | API Key | API key if you use https://prod.apifor.io/ | False | | Trust any certificate (not secure) | | False | | Use system proxy settings | | False | + | Local execution | Uses the [cyberchef node package](https://github.com/polarityio/cyberchef-node) for local execution of your commands | False | 4. Click **Test** to validate the URLs, token, and connection. ## Commands diff --git a/Packs/CyberChef/ReleaseNotes/1_0_6.md b/Packs/CyberChef/ReleaseNotes/1_0_6.md index 571c7075133a..115c1c10b8a6 100644 --- a/Packs/CyberChef/ReleaseNotes/1_0_6.md +++ b/Packs/CyberChef/ReleaseNotes/1_0_6.md @@ -3,4 +3,4 @@ ##### CyberChef -- Updated the Docker image to: *demisto/python3:3.11.9.101916*. +- Updated the Docker image to: *demisto/python3:3.11.9.101916*. \ No newline at end of file diff --git a/Packs/CyberChef/ReleaseNotes/1_0_7.md b/Packs/CyberChef/ReleaseNotes/1_0_7.md new file mode 100644 index 000000000000..f1f6c79c7399 --- /dev/null +++ b/Packs/CyberChef/ReleaseNotes/1_0_7.md @@ -0,0 +1,7 @@ + +#### Integrations + +##### CyberChef + +- Updated the Docker image to: *demisto/cyberchef:1.0.0.104247*. +- Added support for cyberchef node package as dependency to execute cyberchef locally. diff --git a/Packs/CyberChef/pack_metadata.json b/Packs/CyberChef/pack_metadata.json index eb6313832f7f..c32ecdbc9624 100644 --- a/Packs/CyberChef/pack_metadata.json +++ b/Packs/CyberChef/pack_metadata.json @@ -2,7 +2,7 @@ "name": "CyberChef", "description": "Integration with your CyberChef server or https://prod.apifor.io service for CyberChef.", "support": "community", - "currentVersion": "1.0.6", + "currentVersion": "1.0.7", "author": "Harri Ruuttila", "url": "", "email": "", diff --git a/Packs/EmailCommunication/ReleaseNotes/2_0_30.md b/Packs/EmailCommunication/ReleaseNotes/2_0_30.md new file mode 100644 index 000000000000..fc3199e770a4 --- /dev/null +++ b/Packs/EmailCommunication/ReleaseNotes/2_0_30.md @@ -0,0 +1,7 @@ + +#### Scripts + +##### PreprocessEmail + +Updated the script to handle both attachments names in the original format (e.g., image.png) and in the new format -attachmentName- (e.g., 123-attachmentName-image.png). + diff --git a/Packs/EmailCommunication/Scripts/PreprocessEmail/PreprocessEmail.py b/Packs/EmailCommunication/Scripts/PreprocessEmail/PreprocessEmail.py index 6352f3fe24ce..8a0eb3c5914f 100644 --- a/Packs/EmailCommunication/Scripts/PreprocessEmail/PreprocessEmail.py +++ b/Packs/EmailCommunication/Scripts/PreprocessEmail/PreprocessEmail.py @@ -53,7 +53,7 @@ def remove_html_conversation_history(email_html): return email_html -def create_email_html(email_html='', entry_id_list=None): +def create_email_html(email_html='', entry_id_list=[]): """Modify the email's html body to use entry IDs instead of CIDs and remove the original message body if exists. Args: email_html (str): The attachments of the email. @@ -95,14 +95,21 @@ def get_entry_id_list(attachments, files, email_html): matches = re.findall(r'src="cid:([^"]+)"', email_html) or [] entry_id_list = [] files = [files] if not isinstance(files, list) else files + legacy_name = not any('-attachmentName-' in attachment.get('name') for attachment in attachments) for attachment in attachments: attachment_name = attachment.get('name', '') - if '-attachmentName-' in attachment_name: - identifier_id = attachment_name.split('-attachmentName-', 1)[0] + if not legacy_name: + if '-attachmentName-' in attachment_name: + identifier_id = attachment_name.split('-attachmentName-', 1)[0] + for file in files: + file_name = file.get('Name') + if attachment_name == file_name and identifier_id in matches: + entry_id_list.append((attachment_name, file.get('EntryID'))) + else: for file in files: - file_name = file.get('Name') - if attachment_name == file_name and identifier_id in matches: + if attachment_name == file.get('Name') and attachment.get('description', '') != FileAttachmentType.ATTACHED: entry_id_list.append((attachment_name, file.get('EntryID'))) + return entry_id_list @@ -221,53 +228,22 @@ def get_attachments_using_instance(email_related_incident, labels, email_to, ide if integration_name in ['EWS v2', 'EWSO365']: demisto.executeCommand("executeCommandAt", {'command': 'ews-get-attachment', 'incidents': email_related_incident, - 'arguments': {'item-id': str(message_id), - 'identifiers-filter': identifier_ids, - 'using': instance_name}}) + 'arguments': {'item-id': str(message_id), 'using': instance_name}}) elif integration_name in ['Gmail', 'Gmail Single User']: demisto.executeCommand("executeCommandAt", {'command': 'gmail-get-attachments', 'incidents': email_related_incident, - 'arguments': {'user-id': 'me', 'message-id': str(message_id), - 'identifiers-filter': identifier_ids, - 'using': instance_name}}) + 'arguments': {'user-id': 'me', 'message-id': str(message_id), 'using': instance_name}}) elif integration_name in ['MicrosoftGraphMail', 'Microsoft Graph Mail Single User']: demisto.executeCommand("executeCommandAt", {'command': 'msgraph-mail-get-attachment', 'incidents': email_related_incident, - 'arguments': {'user_id': email_to, 'message_id': str(message_id), - 'identifiers_filter': identifier_ids, 'using': instance_name}}) + 'arguments': {'user_id': email_to, 'message_id': str(message_id), 'using': instance_name}}) else: demisto.debug('Attachments could only be retrieved from EWS v2 or Gmail') -def find_attachments_to_download(attachments, email_related_incident): - """ Filter only new attachment and their identifier. - - Args: - attachments (Attachment): All attachments from the current thread mail - email_related_incident (str): email related incident to retrieve previous files - labels : labels to find the integration name - """ - if attachments: - new_attachment_identifiers_list = ["dummyFileIdentifier"] - new_attachments = [] - previous_files = get_incident_related_files(email_related_incident) - previous_files = [previous_files] if not isinstance(previous_files, list) else previous_files - previous_file_names = [file.get("Name") for file in previous_files] - for attachment in attachments: - attachment_name = attachment.get('name', '') - if attachment_name not in previous_file_names: - if new_attachment_identifiers_list == ["dummyFileIdentifier"]: - new_attachment_identifiers_list = [] - identifier_id = attachment.get('name', '').split('-attachmentName-', 1)[0] - new_attachment_identifiers_list.append(identifier_id) - new_attachments.append(attachment) - return ",".join(new_attachment_identifiers_list), new_attachments - return "", [] - - def get_incident_related_files(incident_id): """Get the email reply attachments after they were uploaded to the server and saved to context of the email reply related incident. @@ -455,11 +431,7 @@ def main(): email_html = remove_html_conversation_history(email_html) - # Get attachments IDs for new attacments - attachment_identifiers_array, attachments = find_attachments_to_download(attachments, - email_related_incident - ) - get_attachments_using_instance(email_related_incident, incident.get('labels'), email_to, attachment_identifiers_array) + get_attachments_using_instance(email_related_incident, incident.get('labels'), email_to) # Adding a 5 seconds sleep in order to wait for all the attachments to get uploaded to the server. time.sleep(5) diff --git a/Packs/EmailCommunication/Scripts/PreprocessEmail/PreprocessEmail_test.py b/Packs/EmailCommunication/Scripts/PreprocessEmail/PreprocessEmail_test.py index dd9d12860982..cb20b92d219c 100644 --- a/Packs/EmailCommunication/Scripts/PreprocessEmail/PreprocessEmail_test.py +++ b/Packs/EmailCommunication/Scripts/PreprocessEmail/PreprocessEmail_test.py @@ -288,6 +288,98 @@ def test_get_entry_id_list(): def test_get_entry_id_list_with_attached_file(): + """ + Given + - List of the email's attachments - but one attachment is marked as ATTACHED (not inline image) + - List of files of the email's related incident + When + - building an entry id list in order to replace the email's attachments source path. + Then + - Ensures that the attached file (attachment_1.pdf) is excluded since it is marked as ATTACHED + """ + from PreprocessEmail import get_entry_id_list + attachments = [ + { + "description": "attached_file", + "name": "attachment_1.pdf", + "path": "131_dd98957a-d5c3-42e0-8a81-f3ce7fa68215", + "showMediaFile": False, + "type": "" + }, + { + "description": "", + "name": "image_1.png", + "path": "131_dd98957a-d5c3-42e0-8a81-f3ce7fa68215", + "showMediaFile": False, + "type": "" + }, + { + "description": "", + "name": "image_2.png", + "path": "131_17545998-4b16-4e58-8e6c-2221ada856d4", + "showMediaFile": False, + "type": "" + } + ] + files = [ + { + "EntryID": "30@119", + "Extension": "pdf", + "Info": "application/pdf", + "MD5": "md5", + "Name": "attachment_1.pdf", + "SHA1": "sha1", + "SHA256": "sha256", + "SHA512": "sha512", + "SSDeep": "ssdeep", + "Size": 63111, + "Type": "PDF document, version 1.4" + }, + { + "EntryID": "34@119", + "Extension": "png", + "Info": "image/png", + "MD5": "md5", + "Name": "attachment_2.png", + "SHA1": "4sha1", + "SHA256": "sha256", + "SHA512": "sha512", + "SSDeep": "ssdeep", + "Size": 9580, + "Type": "PNG image data, 264 x 60, 8-bit/color RGBA, non-interlaced" + }, + { + "EntryID": "35@119", + "Extension": "png", + "Info": "image/png", + "MD5": "md5", + "Name": "image_1.png", + "SHA1": "4sha1", + "SHA256": "sha256", + "SHA512": "sha512", + "SSDeep": "ssdeep", + "Size": 9580, + "Type": "PNG image data, 264 x 60, 8-bit/color RGBA, non-interlaced" + }, + { + "EntryID": "36@119", + "Extension": "png", + "Info": "image/png", + "MD5": "md5", + "Name": "image_2.png", + "SHA1": "4sha1", + "SHA256": "sha256", + "SHA512": "sha512", + "SSDeep": "ssdeep", + "Size": 9580, + "Type": "PNG image data, 264 x 60, 8-bit/color RGBA, non-interlaced" + }] + expected = [('image_1.png', '35@119'), ('image_2.png', '36@119')] + email_html = '' + assert expected == get_entry_id_list(attachments, files, email_html) + + +def test_get_entry_id_list_no_attachmentName(): """ Given - List of the email's attachments - but one attachment is marked as ATTACHED (not inline image) @@ -309,14 +401,14 @@ def test_get_entry_id_list_with_attached_file(): }, { "description": "", - "name": "123-attachmentName-image_1.png", + "name": "image_1.png", "path": "131_dd98957a-d5c3-42e0-8a81-f3ce7fa68215", "showMediaFile": False, "type": "" }, { - "description": "", - "name": "456-attachmentName-image_2.png", + "description": "attached_file", + "name": "image_2.png", "path": "131_17545998-4b16-4e58-8e6c-2221ada856d4", "showMediaFile": False, "type": "" @@ -354,7 +446,7 @@ def test_get_entry_id_list_with_attached_file(): "Extension": "png", "Info": "image/png", "MD5": "md5", - "Name": "123-attachmentName-image_1.png", + "Name": "image_1.png", "SHA1": "4sha1", "SHA256": "sha256", "SHA512": "sha512", @@ -367,7 +459,7 @@ def test_get_entry_id_list_with_attached_file(): "Extension": "png", "Info": "image/png", "MD5": "md5", - "Name": "456-attachmentName-image_2.png", + "Name": "image_2.png", "SHA1": "4sha1", "SHA256": "sha256", "SHA512": "sha512", @@ -375,7 +467,7 @@ def test_get_entry_id_list_with_attached_file(): "Size": 9580, "Type": "PNG image data, 264 x 60, 8-bit/color RGBA, non-interlaced" }] - expected = [('123-attachmentName-image_1.png', '35@119'), ('456-attachmentName-image_2.png', '36@119')] + expected = [('attachment_1.pdf', '30@119'), ('image_1.png', '35@119')] email_html = '' assert expected == get_entry_id_list(attachments, files, email_html) @@ -652,7 +744,6 @@ def test_main_untagged_email(mocker): Then - Validate that no relevant incident was created """ - from PreprocessEmail import main mocker.patch.object(demisto, 'incident', return_value={'CustomFields': {}}) mocker.patch.object(demisto, 'args', return_value={"CreateIncidentUntaggedEmail": False}) @@ -669,18 +760,18 @@ def test_main_untagged_email(mocker): [ ([{'type': 'Email/ID', 'value': 'foo@test.com'}, {'type': 'Instance', 'value': 'ews'}, {'type': 'Brand', 'value': 'EWSO365'}], 'test@test.com', - {'arguments': {'item-id': 'foo@test.com', 'identifiers-filter': '', 'using': 'ews'}, + {'arguments': {'item-id': 'foo@test.com', 'using': 'ews'}, 'command': 'ews-get-attachment', 'incidents': None} ), ([{'type': 'Email/ID', 'value': 'foo@test.com'}, {'type': 'Instance', 'value': 'gmail'}, {'type': 'Brand', 'value': 'Gmail'}], 'test@gmail.com', - {'arguments': {'message-id': 'foo@test.com', 'user-id': 'me', 'identifiers-filter': '', 'using': 'gmail'}, + {'arguments': {'message-id': 'foo@test.com', 'user-id': 'me', 'using': 'gmail'}, 'command': 'gmail-get-attachments', 'incidents': None} ), ([{'type': 'Email/ID', 'value': 'foo@outlook.com'}, {'type': 'Instance', 'value': 'MicrosoftGraphMail'}, {'type': 'Brand', 'value': 'MicrosoftGraphMail'}], 'test@outlook.com', {'command': 'msgraph-mail-get-attachment', 'incidents': None, - 'arguments': {'user_id': 'test@outlook.com', 'message_id': 'foo@outlook.com', 'identifiers_filter': '', + 'arguments': {'user_id': 'test@outlook.com', 'message_id': 'foo@outlook.com', 'using': 'MicrosoftGraphMail'}} ), ] @@ -693,6 +784,20 @@ def test_get_attachments_using_instance(labels, email_to, result, mocker): ATTACHMENTS = [{ + "description": "", + "name": "image_1.png", + "path": "131_dd98957a-d5c3-42e0-8a81-f3ce7fa68215", + "showMediaFile": False, + "type": "" +}, { + "description": "", + "name": "image_2.png", + "path": "131_dd98957a-d5c3-42e0-8a81-f3ce7fa68215", + "showMediaFile": False, + "type": "" +}] + +ATTACHMENTS_2 = [{ "description": "", "name": "123-attachmentName-image_1.png", "path": "131_dd98957a-d5c3-42e0-8a81-f3ce7fa68215", @@ -704,6 +809,12 @@ def test_get_attachments_using_instance(labels, email_to, result, mocker): "path": "131_dd98957a-d5c3-42e0-8a81-f3ce7fa68215", "showMediaFile": False, "type": "" +}, { + "description": "", + "name": "image_3.png", + "path": "131_dd98957a-d5c3-42e0-8a81-f3ce7fa68215", + "showMediaFile": False, + "type": "" }] FILES_TEST1 = [{ @@ -711,7 +822,7 @@ def test_get_attachments_using_instance(labels, email_to, result, mocker): "Extension": "png", "Info": "image/png", "MD5": "605ebf7bc83a00840a3ea90c8ed56515", - "Name": "123-attachmentName-image_1.png", + "Name": "image_1.png", "SHA1": "SHA1", "SHA256": "SHA256", "SHA512": "SHA512", @@ -723,7 +834,7 @@ def test_get_attachments_using_instance(labels, email_to, result, mocker): "Extension": "png", "Info": "image/png", "MD5": "605ebf7bc83a00840a3ea90c8ed56515", - "Name": "456-attachmentName-image_2.png", + "Name": "image_2.png", "SHA1": "SHA1", "SHA256": "SHA256", "SHA512": "SHA512", @@ -732,7 +843,7 @@ def test_get_attachments_using_instance(labels, email_to, result, mocker): "Type": "PNG image data, 275 x 184, 8-bit/color RGBA, non-interlaced" }] -FILES_TEST2 = { +FILES_TEST2 = [{ "EntryID": "4@131", "Extension": "png", "Info": "image/png", @@ -744,46 +855,157 @@ def test_get_attachments_using_instance(labels, email_to, result, mocker): "SSDeep": "SSDeep", "Size": 127884, "Type": "PNG image data, 275 x 184, 8-bit/color RGBA, non-interlaced" -} - -FILES_TEST3 = {} +}, { + "EntryID": "5@131", + "Extension": "png", + "Info": "image/png", + "MD5": "605ebf7bc83a00840a3ea90c8ed56515", + "Name": "456-attachmentName-image_2.png", + "SHA1": "SHA1", + "SHA256": "SHA256", + "SHA512": "SHA512", + "SSDeep": "SSDeep", + "Size": 127884, + "Type": "PNG image data, 275 x 184, 8-bit/color RGBA, non-interlaced" +}, { + "EntryID": "5@131", + "Extension": "png", + "Info": "image/png", + "MD5": "605ebf7bc83a00840a3ea90c8ed56515", + "Name": "image_3.png", + "SHA1": "SHA1", + "SHA256": "SHA256", + "SHA512": "SHA512", + "SSDeep": "SSDeep", + "Size": 127884, + "Type": "PNG image data, 275 x 184, 8-bit/color RGBA, non-interlaced" +}] @pytest.mark.parametrize( - "attachments, email_related_incident, files, expected_result", + "attachments, files, html, expected_result", [ - (ATTACHMENTS, 'test@gmail.com', FILES_TEST1, ('dummyFileIdentifier', [])), - (ATTACHMENTS, 'test@gmail.com', FILES_TEST2, ('456', [{'description': '', 'name': '456-attachmentName-image_2.png', - 'path': '131_dd98957a-d5c3-42e0-8a81-f3ce7fa68215', - 'showMediaFile': False, 'type': ''}])), - (ATTACHMENTS, 'test@gmail.com', FILES_TEST3, ('123,456', [{'description': '', - 'name': '123-attachmentName-image_1.png', - 'path': '131_dd98957a-d5c3-42e0-8a81-f3ce7fa68215', - 'showMediaFile': False, 'type': ''}, - {'description': '', - 'name': '456-attachmentName-image_2.png', - 'path': '131_dd98957a-d5c3-42e0-8a81-f3ce7fa68215', - 'showMediaFile': False, 'type': ''}])), + (ATTACHMENTS, FILES_TEST1, "", [('image_1.png', '4@131'), ('image_2.png', '5@131')]), + ( + ATTACHMENTS_2, FILES_TEST2, """ + + + Inline Images + + +

Hello, World!

+

This is a test email with inline images.

+ Inline Image 1 + Inline Image 2 + +""", + [('123-attachmentName-image_1.png', '4@131'), ('456-attachmentName-image_2.png', '5@131')] + ) ] ) -def test_find_attachments_to_download(attachments, email_related_incident, files, expected_result, mocker): +def test_get_entry_id_list_only_attachments(attachments, files, html, expected_result): """ - Test case to find attachments to download based on different scenarios. - - Given: - - Attachments of the incident. - - Email which is related to the incident. + Given + - case 1: all attachments are attached or in the original (plain) format. + - case 2: 2 attachments in the new format (-attachmentName-) 1 attachment attached. + When + - running get_entry_id_list. + Then + - case 1: returns the two original entry IDs. + - case 2: return only the new formatted attachments - as they should be replaced in the html. + """ + from PreprocessEmail import get_entry_id_list + assert get_entry_id_list(attachments, files, html) == expected_result - When: - - Scenario 1: no new images. - - Scenario 2: one new image. - - Scenario 3: all images are new. - Then: - - Return the corresponding amount of entry IDs. +@pytest.mark.parametrize( + "html, entry_id_list, expected_result", + [ + ( + """ + + + Inline Images + + +

Hello, World!

+

This is a test email with inline images.

+ image_1.png + +""", + [('123-attachmentName-image_1.png', '4@131'), ('image_2.png', '5@131')], + """ + + + Inline Images + + +

Hello, World!

+

This is a test email with inline images.

+ image_1.png + +""" + ), + ( + """ + + + Inline Images + + +

Hello, World!

+

This is a test email with inline images.

+ image_1.png + +""", + [('image_1.png', '4@131'), ('image_2.png', '5@131')], + """ + + + Inline Images + + +

Hello, World!

+

This is a test email with inline images.

+ image_1.png + +""" + ), + ( + """ + + + Inline Images + + +

Hello, World!

+

This is a test email with inline images.

+ +""", + [('image_1.png', '4@131'), ('image_2.png', '5@131')], + """ + + + Inline Images + + +

Hello, World!

+

This is a test email with inline images.

+ +""" + ) + ] +) +def test_create_email_html_no_image_to_insert(html, entry_id_list, expected_result): """ - import PreprocessEmail - mocker.patch.object(PreprocessEmail, 'get_incident_related_files', return_value=files) - result = PreprocessEmail.find_attachments_to_download(attachments, email_related_incident) - assert result[0] == expected_result[0] - assert result[1] == expected_result[1] + Given + - case 1: one image to replace in html with new name format. + - case 2: one image to replace in html with old name format. + - case 3: no images to replace. + When + - running create_email_html. + Then + returns the expected html. + """ + from PreprocessEmail import create_email_html + assert create_email_html(html, entry_id_list) == expected_result diff --git a/Packs/EmailCommunication/pack_metadata.json b/Packs/EmailCommunication/pack_metadata.json index f7f4adcd99e6..57a1eec31b8a 100644 --- a/Packs/EmailCommunication/pack_metadata.json +++ b/Packs/EmailCommunication/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Email Communication", "description": "Do you have to send multiple emails to end users? This content pack helps you streamline the process and automate updates, notifications and more.\n", "support": "xsoar", - "currentVersion": "2.0.29", + "currentVersion": "2.0.30", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "videos": [ diff --git a/Packs/FeedGitHub/Integrations/FeedGitHub/FeedGitHub.yml b/Packs/FeedGitHub/Integrations/FeedGitHub/FeedGitHub.yml index fdf410b67c19..d80c63c3538a 100644 --- a/Packs/FeedGitHub/Integrations/FeedGitHub/FeedGitHub.yml +++ b/Packs/FeedGitHub/Integrations/FeedGitHub/FeedGitHub.yml @@ -151,7 +151,7 @@ script: name: limit description: Gets indicators from the feed within a specified date range and up to a maximum limit.. name: github-get-indicators - dockerimage: demisto/taxii2:1.0.0.98729 + dockerimage: demisto/taxii2:1.0.0.105766 feed: true isfetch: false longRunning: false @@ -167,4 +167,3 @@ marketplaces: - marketplacev2 tests: - No tests (auto formatted) - diff --git a/Packs/FeedGitHub/ReleaseNotes/1_0_4.md b/Packs/FeedGitHub/ReleaseNotes/1_0_4.md new file mode 100644 index 000000000000..91385cf0a373 --- /dev/null +++ b/Packs/FeedGitHub/ReleaseNotes/1_0_4.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Github Feed + +- Updated the Docker image to: *demisto/taxii2:1.0.0.105766*. diff --git a/Packs/FeedGitHub/ReleaseNotes/1_0_5.md b/Packs/FeedGitHub/ReleaseNotes/1_0_5.md new file mode 100644 index 000000000000..157b6188b5e9 --- /dev/null +++ b/Packs/FeedGitHub/ReleaseNotes/1_0_5.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Github Feed + +- Documentation and metadata improvements. diff --git a/Packs/FeedGitHub/pack_metadata.json b/Packs/FeedGitHub/pack_metadata.json index c28a0fa44da8..d4972dc15bb8 100644 --- a/Packs/FeedGitHub/pack_metadata.json +++ b/Packs/FeedGitHub/pack_metadata.json @@ -2,7 +2,7 @@ "name": "GitHub Feed", "description": "A feed to ingest indicators of compromise from Github repositories. The feed supports general extraction of IOCs, extracting from STIX data format and parsing of YARA Rules out of the box.", "support": "xsoar", - "currentVersion": "1.0.3", + "currentVersion": "1.0.5", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/Flashpoint/Integrations/Ignite/README.md b/Packs/Flashpoint/Integrations/Ignite/README.md index 75adecbe0eb3..6d3a59fd6c34 100644 --- a/Packs/Flashpoint/Integrations/Ignite/README.md +++ b/Packs/Flashpoint/Integrations/Ignite/README.md @@ -760,7 +760,7 @@ Retrieves a list of alerts based on the filter values provided in the command ar >### Alerts >|ID|Created at (UTC)|Query|Source|Resource URL|Site Title|Shodan Host|Repository|Owner|Origin|Ports|Services|Highlight Text| >|---|---|---|---|---|---|---|---|---|---|---|---|---| ->| 00000000-0000-0000-0000-000000000001 | Jun 17, 2024 05:54 | facebook | data_exposure__github | [https://dummyurl.com/naive-gabrie-white](https://dummyurl.com/naive-gabrie-white) | | | naive-gabrie-white.github.io | naive-gabrie-white | searches | | | data\-image="https://i.dummyurl.net/2021/02/24/000000000000001.png" data\-sites="facebook,twitter,wechat,weibo,qq">") - if not identifier_id or identifier_id == "None": - identifier_id = part['body'].get('attachmentId').strip("<>") + content_id = header.get('value').strip("<>") + if header.get('name') == 'Content-Disposition': + is_inline = 'inline' in header.get('value').strip('<>') + if is_inline and content_id and content_id != "None" and not LEGACY_NAME: + attachmentName = f"{content_id}-attachmentName-{attachmentName}" attachments.append({ 'ID': part['body']['attachmentId'], - 'Name': f"{identifier_id}-attachmentName-{part['filename']}", + 'Name': attachmentName, }) return body, html, attachments @@ -1526,14 +1531,13 @@ def get_attachments_command(): args = demisto.args() user_id = args.get('user-id') _id = args.get('message-id') - identifiers_filter = args.get('identifiers-filter', "") - attachments = get_attachments(user_id, _id, identifiers_filter) + attachments = get_attachments(user_id, _id) return [fileResult(name, data) for name, data in attachments] -def get_attachments(user_id, _id, identifiers_filter=""): +def get_attachments(user_id, _id): mail_args = { 'userId': user_id, 'id': _id, @@ -1553,14 +1557,10 @@ def get_attachments(user_id, _id, identifiers_filter=""): } files = [] for attachment in result.get('Attachments', []): - identifiers_filter_array = argToList(identifiers_filter) command_args['id'] = attachment['ID'] result = service.users().messages().attachments().get(**command_args).execute() - if (not identifiers_filter_array - or ('-attachmentName-' in attachment['Name'] - and attachment['Name'].split('-attachmentName-')[0] in identifiers_filter_array)): - file_data = base64.urlsafe_b64decode(result['data'].encode('ascii')) - files.append((attachment['Name'], file_data)) + file_data = base64.urlsafe_b64decode(result['data'].encode('ascii')) + files.append((attachment['Name'], file_data)) return files diff --git a/Packs/Gmail/Integrations/Gmail/Gmail.yml b/Packs/Gmail/Integrations/Gmail/Gmail.yml index 7994a7f6fc5e..1044b7b9ab65 100644 --- a/Packs/Gmail/Integrations/Gmail/Gmail.yml +++ b/Packs/Gmail/Integrations/Gmail/Gmail.yml @@ -69,6 +69,12 @@ configuration: section: Collect additionalinfo: A maximum of 200 emails per fetch (even if a higher number is configured). The default is 50. required: false +- display: Use legacy attachment name + name: legacy_name + section: Collect + type: 8 + advanced: true + defaultvalue: 'false' description: Gmail API and user management (This integration replaces the Gmail functionality in the GoogleApps API and G Suite integration). display: Gmail name: Gmail @@ -205,10 +211,6 @@ script: - description: The user's email address. The "me" special value can be used to indicate the authenticated user. name: user-id required: true - - description: Identifier ids to download to XSOAR (to avoid downloading all files to XSOAR during the preprocessing script). - name: identifiers-filter - required: false - hidden: true description: Retrieves attachments from a sent Gmail message. name: gmail-get-attachments outputs: diff --git a/Packs/Gmail/Integrations/Gmail/Gmail_test.py b/Packs/Gmail/Integrations/Gmail/Gmail_test.py index 05f914c9cc8a..d3bab63599e3 100644 --- a/Packs/Gmail/Integrations/Gmail/Gmail_test.py +++ b/Packs/Gmail/Integrations/Gmail/Gmail_test.py @@ -991,7 +991,7 @@ def test_handle_html_image_with_new_line(mocker): [ (part_test1, ('', '', [{'ID': '1234', 'Name': '5678-attachmentName-image-1.png'}])), (part_test2, ('', '', [{'ID': '1234', 'Name': '5678-attachmentName-image-1.png'}])), - (part_test3, ('', '', [{'ID': '1234', 'Name': '1234-attachmentName-image-1.png'}])), + (part_test3, ('', '', [{'ID': '1234', 'Name': 'image-1.png'}])), ], ) def test_parse_mail_parts(part, expected_result): @@ -1006,3 +1006,26 @@ def test_parse_mail_parts(part, expected_result): from Gmail import parse_mail_parts result = parse_mail_parts(part) assert result == expected_result + + +@pytest.mark.parametrize( + "part, expected_result", + [ + (part_test1, ('', '', [{'ID': '1234', 'Name': 'image-1.png'}])), + (part_test2, ('', '', [{'ID': '1234', 'Name': 'image-1.png'}])), + (part_test3, ('', '', [{'ID': '1234', 'Name': 'image-1.png'}])), + ], +) +def test_parse_mail_parts_use_legacy_name(monkeypatch, part, expected_result): + """ + Given: + - Part of message from Gmail API response. + When: + - Run parse_mail_parts function LEGACY_NAME is true. + Then: + - Ensure attachment's name was correctly constructed and parsing was correctly done. + """ + from Gmail import parse_mail_parts + monkeypatch.setattr('Gmail.LEGACY_NAME', True) + result = parse_mail_parts(part) + assert result == expected_result diff --git a/Packs/Gmail/ReleaseNotes/1_3_22.md b/Packs/Gmail/ReleaseNotes/1_3_22.md new file mode 100644 index 000000000000..87695e6d23d6 --- /dev/null +++ b/Packs/Gmail/ReleaseNotes/1_3_22.md @@ -0,0 +1,7 @@ + +#### Integrations + +##### Gmail + +- Added a Use Legacy Name configuration parameter in order to use the original attachment name. +- Updated the attachment name to be in the following format: -attachmentName- only if the attachment is inline and **Use Legacy Name** is disabled. diff --git a/Packs/Gmail/pack_metadata.json b/Packs/Gmail/pack_metadata.json index 962913feb018..3d001a3627d0 100644 --- a/Packs/Gmail/pack_metadata.json +++ b/Packs/Gmail/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Gmail", "description": "Gmail API and user management (This integration replaces the Gmail functionality in the GoogleApps API and G Suite integration).", "support": "xsoar", - "currentVersion": "1.3.21", + "currentVersion": "1.3.22", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser.py b/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser.py index a74e39b09a05..e53e4df7f78a 100644 --- a/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser.py +++ b/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser.py @@ -40,6 +40,7 @@ MAX_FETCH = int(params.get('fetch_limit') or 50) AUTH_CODE = params.get('auth_code_creds', {}).get('password') or params.get('code') AUTH_CODE_UNQUOTE_PREFIX = 'code=' +LEGACY_NAME = argToBoolean(params.get('legacy_name', False)) OOB_CLIENT_ID = "391797357217-pa6jda1554dbmlt3hbji2bivphl0j616.apps.googleusercontent.com" # guardrails-disable-line CLIENT_ID = params.get('credentials', {}).get('identifier') or params.get('client_id') or OOB_CLIENT_ID @@ -307,22 +308,25 @@ def parse_mail_parts(self, parts: list[dict]): body += text else: - if part['body'].get('attachmentId') is not None and part.get('headers'): - identifier_id = "" - for header in part['headers']: + if part['body'].get('attachmentId') is not None: + content_id = "" + is_inline = False + attachmentName = part['filename'] + for header in part.get('headers', []): if header.get('name') == 'Content-ID': - identifier_id = header.get('value') - if not identifier_id or identifier_id == "None": - identifier_id = part['body'].get('attachmentId') - identifier_id = identifier_id.strip("<>") + content_id = header.get('value').strip("<>") + if header.get('name') == 'Content-Disposition': + is_inline = 'inline' in header.get('value').strip('<>') + if is_inline and content_id and content_id != "None" and not LEGACY_NAME: + attachmentName = f"{content_id}-attachmentName-{attachmentName}" attachments.append({ 'ID': part['body']['attachmentId'], - 'Name': f"{identifier_id}-attachmentName-{part['filename']}", + 'Name': attachmentName, }) return body, html, attachments - def get_attachments(self, user_id, _id, identifiers_filter=""): + def get_attachments(self, user_id, _id): mail_args = { 'userId': user_id, 'id': _id, @@ -340,14 +344,10 @@ def get_attachments(self, user_id, _id, identifiers_filter=""): } files = [] for attachment in result.get('Attachments', []): - identifiers_filter_array = argToList(identifiers_filter) command_args['id'] = attachment['ID'] result = execute_gmail_action(service, "get_attachments", command_args) - if (not identifiers_filter_array - or ('-attachmentName-' in attachment['Name'] - and attachment['Name'].split('-attachmentName-')[0] in identifiers_filter_array)): - file_data = base64.urlsafe_b64decode(result['data'].encode('ascii')) - files.append((attachment['Name'], file_data)) + file_data = base64.urlsafe_b64decode(result['data'].encode('ascii')) + files.append((attachment['Name'], file_data)) return files @staticmethod @@ -1136,9 +1136,8 @@ def reply_mail_command(client: Client): def get_attachments_command(client: Client): args = demisto.args() _id = args.get('message-id') - content_ids = args.get('identifiers-filter', "") - attachments = client.get_attachments('me', _id, content_ids) + attachments = client.get_attachments('me', _id) return [fileResult(name, data) for name, data in attachments] diff --git a/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser.yml b/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser.yml index 2ad0bee222e8..61a57faf35ba 100644 --- a/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser.yml +++ b/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser.yml @@ -114,6 +114,12 @@ configuration: section: Connect advanced: true required: false +- display: Use legacy attachment name + name: legacy_name + section: Collect + type: 8 + advanced: true + defaultvalue: 'false' description: Gmail API using OAuth 2.0. display: Gmail Single User name: Gmail Single User @@ -346,10 +352,6 @@ script: - description: The ID of the message to retrieve. name: message-id required: true - - description: Identifier ids to download to XSOAR (to avoid downloading all files to XSOAR during the preprocessing script). - name: identifiers-filter - required: false - hidden: true description: Retrieves attachments from a sent Gmail message. name: gmail-get-attachments dockerimage: demisto/google-api-py3:1.0.0.98645 diff --git a/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser_test.py b/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser_test.py index f4337201d6f8..fd28ebb4eedb 100644 --- a/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser_test.py +++ b/Packs/GmailSingleUser/Integrations/GmailSingleUser/GmailSingleUser_test.py @@ -516,7 +516,7 @@ def test_handle_html_image_with_new_line(mocker): 'filename': 'image-1.png', 'headers': [{ 'name': 'Content-ID', 'value': '5678'}, - {'name': 'Content-Disposition', 'value': 'inline'}], + {'name': 'Content-Disposition', 'value': 'attachment'}], 'body': { 'attachmentId': '1234'}, 'mimeType': '' @@ -537,8 +537,8 @@ def test_handle_html_image_with_new_line(mocker): "part, expected_result", [ (part_test1, ('', '', [{'ID': '1234', 'Name': '5678-attachmentName-image-1.png'}])), - (part_test2, ('', '', [{'ID': '1234', 'Name': '5678-attachmentName-image-1.png'}])), - (part_test3, ('', '', [{'ID': '1234', 'Name': '1234-attachmentName-image-1.png'}])), + (part_test2, ('', '', [{'ID': '1234', 'Name': 'image-1.png'}])), + (part_test3, ('', '', [{'ID': '1234', 'Name': 'image-1.png'}])), ], ) def test_parse_mail_parts(part, expected_result): @@ -546,10 +546,33 @@ def test_parse_mail_parts(part, expected_result): Given: - Part of message from Gmail API response. When: - - Run parse_mail_parts function. + - Run parse_mail_parts function with LEGACY_NAME is false. + Then: + - Ensure attachment's name was correctly constructed and parsing was correctly done. + """ + client = Client() + result = client.parse_mail_parts(part) + assert result == expected_result + + +@pytest.mark.parametrize( + "part, expected_result", + [ + (part_test1, ('', '', [{'ID': '1234', 'Name': 'image-1.png'}])), + (part_test2, ('', '', [{'ID': '1234', 'Name': 'image-1.png'}])), + (part_test3, ('', '', [{'ID': '1234', 'Name': 'image-1.png'}])), + ], +) +def test_parse_mail_parts_use_legacy_name(monkeypatch, part, expected_result): + """ + Given: + - Part of message from Gmail API response. + When: + - Run parse_mail_parts function LEGACY_NAME is true. Then: - Ensure attachment's name was correctly constructed and parsing was correctly done. """ client = Client() + monkeypatch.setattr('GmailSingleUser.LEGACY_NAME', True) result = client.parse_mail_parts(part) assert result == expected_result diff --git a/Packs/GmailSingleUser/ReleaseNotes/1_4_3.md b/Packs/GmailSingleUser/ReleaseNotes/1_4_3.md new file mode 100644 index 000000000000..b5feb94348a4 --- /dev/null +++ b/Packs/GmailSingleUser/ReleaseNotes/1_4_3.md @@ -0,0 +1,7 @@ + +#### Integrations + +##### Gmail Single User + +- Added a Use Legacy Name configuration parameter in order to use the original attachment name. +- Updated the attachment name to be in the following format: -attachmentName- only if the attachment is inline and **Use Legacy Name** is disabled. \ No newline at end of file diff --git a/Packs/GmailSingleUser/pack_metadata.json b/Packs/GmailSingleUser/pack_metadata.json index b406eeae0e6b..a4fad5b582ab 100644 --- a/Packs/GmailSingleUser/pack_metadata.json +++ b/Packs/GmailSingleUser/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Gmail Single User", "description": "Gmail API using OAuth 2.0.", "support": "xsoar", - "currentVersion": "1.4.2", + "currentVersion": "1.4.3", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/GoogleThreatIntelligence/README.md b/Packs/GoogleThreatIntelligence/README.md index 420b0e864813..8d8dcf23cc57 100644 --- a/Packs/GoogleThreatIntelligence/README.md +++ b/Packs/GoogleThreatIntelligence/README.md @@ -35,8 +35,8 @@ Security team challenges | Solving with Google Threat Intelligence + XSOAR - Is a vulnerability (CVE) that appeared in my environment being currently leveraged by malware? How popular is it? **Technical capabilities** -- Threat reputation for {files, hashes, domains, IPs, URLs} coming from [over 90 security vendors](https://support.virustotal.com/hc/en-us/articles/115002146809-Contributors) (antivirus solutions, nextgen EDRs, domain blocklists, network perimeter solutions, etc.). -- Multi-angular detection for files via crowdsourced {[YARA](https://virustotal.github.io/yara/), [SIGMA](https://blog.virustotal.com/2021/05/context-is-king-part-i-crowdsourced.html), [IDS](https://support.virustotal.com/hc/en-us/articles/360018436078-Crowdsourced-IDS-Rules)} rules. +- Threat reputation for {files, hashes, domains, IPs, URLs} coming from [over 90 security vendors](https://docs.virustotal.com/docs/contributors) (antivirus solutions, nextgen EDRs, domain blocklists, network perimeter solutions, etc.). +- Multi-angular detection for files via crowdsourced {[YARA](https://virustotal.github.io/yara/), [SIGMA](https://blog.virustotal.com/2021/05/context-is-king-part-i-crowdsourced.html), [IDS](https://docs.virustotal.com/docs/crowdsourced-ids-rules)} rules. - Allowlist (benign) information through the [aggregation of goodware indicators and provenance details](https://blog.virustotal.com/2021/08/introducing-known-distributors.html). - Dynamic analysis for files through detonation in [multiple home-grown and 3rd-party partner](https://blog.virustotal.com/search/label/multisandbox) sandbox solutions. - Extended file context and metadata through static analysis tools such as sigcheck’s authenticode signature extractor, MS Office macro VBA dissectors, Didier Stevens’ PDF tools, etc. diff --git a/Packs/LogPoint_SIEM_Integration/Integrations/LogPoint_SIEM_Integration/LogPoint_SIEM_Integration.py b/Packs/LogPoint_SIEM_Integration/Integrations/LogPoint_SIEM_Integration/LogPoint_SIEM_Integration.py index c2c57e77cedf..7f6e0d956c0c 100644 --- a/Packs/LogPoint_SIEM_Integration/Integrations/LogPoint_SIEM_Integration/LogPoint_SIEM_Integration.py +++ b/Packs/LogPoint_SIEM_Integration/Integrations/LogPoint_SIEM_Integration/LogPoint_SIEM_Integration.py @@ -1,15 +1,15 @@ import demistomock as demisto # noqa: F401 from CommonServerPython import * # noqa: F401 -from CommonServerUserPython import * ''' IMPORTS ''' -import dateparser import json import traceback +from datetime import datetime, timedelta + +import dateparser import urllib3 -from datetime import timedelta, datetime # Disable insecure warnings urllib3.disable_warnings() @@ -302,7 +302,7 @@ def get_livesearches(self): data=data ) - def get_search_id(self, query, time_range, limit=100, repos=[]): + def get_search_id(self, query, time_range, limit=100, repos=[], timeout=60): """ :param query: LogPoint search query @@ -312,6 +312,8 @@ def get_search_id(self, query, time_range, limit=100, repos=[]): :param repos: LogPoint repos from where logs should be fetched + :param timeout: LogPoint search timeout + :return: dict containing response from API call """ data = { @@ -321,7 +323,8 @@ def get_search_id(self, query, time_range, limit=100, repos=[]): "query": query, "time_range": time_range, "limit": limit, - "repos": repos + "repos": repos, + "timeout": timeout }) } return self._http_request( @@ -701,12 +704,13 @@ def get_searchid_command(client, args): time_range = args.get('time_range', 'Last 5 minutes') limit = args.get('limit', '100') repos = argToList(args.get('repos')) + timeout = args.get('timeout', '60') if limit: try: limit = int(limit) except ValueError: raise DemistoException(f"The provided argument '{limit}' for limit is not a valid integer.") - result = client.get_search_id(query, time_range, limit, repos) + result = client.get_search_id(query, time_range, limit, repos, timeout) if not result.get('success'): raise DemistoException(result.get('message')) search_id = result.get('search_id') @@ -729,10 +733,14 @@ def get_searchid_command(client, args): def search_logs_command(client, args): search_id = args.get('search_id') - search_result = client.get_search_results(search_id) - if not search_result.get('success'): - raise DemistoException(search_result.get('message')) - rows = search_result.get('rows', []) + rows = [] + while True: + search_result = client.get_search_results(search_id) + if not search_result.get('success'): + raise DemistoException(search_result.get('message')) + rows += search_result.get('rows', []) + if search_result.get('final'): + break if rows and len(rows) > 0: display_title = f"Found {len(rows)} logs" markdown = tableToMarkdown(display_title, rows, headers=None, diff --git a/Packs/LogPoint_SIEM_Integration/Integrations/LogPoint_SIEM_Integration/LogPoint_SIEM_Integration.yml b/Packs/LogPoint_SIEM_Integration/Integrations/LogPoint_SIEM_Integration/LogPoint_SIEM_Integration.yml index 75592883b978..78f124419a9c 100644 --- a/Packs/LogPoint_SIEM_Integration/Integrations/LogPoint_SIEM_Integration/LogPoint_SIEM_Integration.yml +++ b/Packs/LogPoint_SIEM_Integration/Integrations/LogPoint_SIEM_Integration/LogPoint_SIEM_Integration.yml @@ -46,15 +46,20 @@ configuration: name: max_fetch type: 0 required: false +- defaultvalue: '1' + display: Incidents Fetch Interval + name: incidentFetchInterval + required: false + type: 19 description: Use this Content Pack to search logs, fetch incident logs from LogPoint, analyze them for underlying threats, and respond to these threats in real-time. display: LogPoint SIEM Integration name: LogPoint SIEM Integration script: commands: - arguments: - - description: From Timestamp + - description: From Timestamp. name: ts_from - - description: To Timestamp + - description: To Timestamp. name: ts_to - description: Number of incidents to fetch. Accepts integer value. name: limit @@ -62,82 +67,82 @@ script: name: lp-get-incidents outputs: - contextPath: LogPoint.Incidents.name - description: LogPoint Incident Name + description: LogPoint Incident Name. type: String - contextPath: LogPoint.Incidents.type - description: LogPoint Incident Type + description: LogPoint Incident Type. type: String - contextPath: LogPoint.Incidents.incident_id - description: LogPoint Incident ID + description: LogPoint Incident ID. type: String - contextPath: LogPoint.Incidents.assigned_to - description: LogPoint Incidents Assigned To + description: LogPoint Incidents Assigned To. type: String - contextPath: LogPoint.Incidents.status - description: LogPoint Incidents Status + description: LogPoint Incidents Status. type: String - contextPath: LogPoint.Incidents.id - description: LogPoint Incident Object ID + description: LogPoint Incident Object ID. type: String - contextPath: LogPoint.Incidents.detection_timestamp - description: LogPoint Incidents Detection Timestamp + description: LogPoint Incidents Detection Timestamp. type: Number - contextPath: LogPoint.Incidents.username - description: LogPoint Incident Username + description: LogPoint Incident Username. type: String - contextPath: LogPoint.Incidents.user_id - description: LogPoint Incidents User ID + description: LogPoint Incidents User ID. type: String - contextPath: LogPoint.Incidents.assigned_to - description: LogPoint Incidents Assigned To + description: LogPoint Incidents Assigned To. type: String - contextPath: LogPoint.Incidents.visible_to - description: LogPoint Incidents Visible To + description: LogPoint Incidents Visible To. type: String - contextPath: LogPoint.Incidents.tid - description: LogPoint Incidents Tid + description: LogPoint Incidents Tid. type: String - contextPath: LogPoint.Incidents.rows_count - description: LogPoint Incidents Rows Count + description: LogPoint Incidents Rows Count. type: String - contextPath: LogPoint.Incidents.risk_level - description: LogPoint Incidents Risk Level + description: LogPoint Incidents Risk Level. type: String - contextPath: LogPoint.Incidents.detection_timestamp - description: LogPoint Incidents Detection Timestamp + description: LogPoint Incidents Detection Timestamp. type: String - contextPath: LogPoint.Incidents.loginspect_ip_dns - description: LogPoint Incidents Loginspect IP DNS + description: LogPoint Incidents Loginspect IP DNS. type: String - contextPath: LogPoint.Incidents.status - description: LogPoint Incidents Status + description: LogPoint Incidents Status. type: String - contextPath: LogPoint.Incidents.comments - description: LogPoint Incidents Comments + description: LogPoint Incidents Comments. type: String - contextPath: LogPoint.Incidents.commentscount - description: LogPoint Incidents Comments Count + description: LogPoint Incidents Comments Count. type: Number - contextPath: LogPoint.Incidents.query - description: LogPoint Incidents Query + description: LogPoint Incidents Query. type: String - contextPath: LogPoint.Incidents.repos - description: LogPoint Incidents Repos + description: LogPoint Incidents Repos. type: String - contextPath: LogPoint.Incidents.time_range - description: LogPoint Incidents Time Range + description: LogPoint Incidents Time Range. type: String - contextPath: LogPoint.Incidents.alert_obj_id - description: LogPoint Incidents Alert Obj Id + description: LogPoint Incidents Alert Obj Id. type: String - contextPath: LogPoint.Incidents.throttle_enabled - description: LogPoint Incidents Throttle Enabled + description: LogPoint Incidents Throttle Enabled. type: Boolean - contextPath: LogPoint.Incidents.lastaction - description: LogPoint Incidents Last Action + description: LogPoint Incidents Last Action. type: String - contextPath: LogPoint.Incidents.description - description: LogPoint Incidents Description + description: LogPoint Incidents Description. type: String - arguments: - description: Object ID of a particular incident. It is the value contained in 'id' key of the incidents obtained from 'lp-get-incidents' command. @@ -149,403 +154,403 @@ script: - description: Incident Detection TImestamp. It is the value contained in 'detection_timestamp' key of the incidents obtained from 'lp-get-incidents' command. name: date required: true - description: Retrieves a Particular Incident's Data + description: Retrieves a Particular Incident's Data. name: lp-get-incident-data outputs: - contextPath: LogPoint.Incidents.data.use - description: LogPoint Incidents Data Use + description: LogPoint Incidents Data Use. type: String - contextPath: LogPoint.Incidents.data.used - description: LogPoint Incidents Data Used + description: LogPoint Incidents Data Used. type: String - contextPath: LogPoint.Incidents.data.log_ts - description: LogPoint Incidents Data Log Ts + description: LogPoint Incidents Data Log Ts. type: Number - contextPath: LogPoint.Incidents.data._type_str - description: LogPoint Incidents Data Type Str + description: LogPoint Incidents Data Type Str. type: String - contextPath: LogPoint.Incidents.data.msg - description: LogPoint Incidents Data Msg + description: LogPoint Incidents Data Msg. type: String - contextPath: LogPoint.Incidents.data.total - description: LogPoint Incidents Data Total + description: LogPoint Incidents Data Total. type: String - contextPath: LogPoint.Incidents.data.device_name - description: LogPoint Incidents Data Device Name + description: LogPoint Incidents Data Device Name. type: String - contextPath: LogPoint.Incidents.data._offset - description: LogPoint Incidents Data Offset + description: LogPoint Incidents Data Offset. type: String - contextPath: LogPoint.Incidents.data.logpoint_name - description: LogPoint Incidents Data LogPoint Name + description: LogPoint Incidents Data LogPoint Name. type: String - contextPath: LogPoint.Incidents.data.repo_name - description: LogPoint Incidents Data Repo Name + description: LogPoint Incidents Data Repo Name. type: String - contextPath: LogPoint.Incidents.data.free - description: LogPoint Incidents Data Free + description: LogPoint Incidents Data Free. type: String - contextPath: LogPoint.Incidents.data.source_name - description: LogPoint Incidents Data Source Name + description: LogPoint Incidents Data Source Name. type: String - contextPath: LogPoint.Incidents.data.col_ts - description: LogPoint Incidents Data Col Ts + description: LogPoint Incidents Data Col Ts. type: Number - contextPath: LogPoint.Incidents.data._tz - description: LogPoint Incidents Data Tz + description: LogPoint Incidents Data Tz. type: String - contextPath: LogPoint.Incidents.data.norm_id - description: LogPoint Incidents Data Norm Id + description: LogPoint Incidents Data Norm Id. type: String - contextPath: LogPoint.Incidents.data._identifier - description: LogPoint Incidents Data Identifier + description: LogPoint Incidents Data Identifier. type: String - contextPath: LogPoint.Incidents.data.collected_at - description: LogPoint Incidents Data Collected At + description: LogPoint Incidents Data Collected At. type: String - contextPath: LogPoint.Incidents.data.device_ip - description: LogPoint Incidents Data Device IP + description: LogPoint Incidents Data Device IP. type: String - contextPath: LogPoint.Incidents.data._fromV550 - description: LogPoint Incidents Data From V550 + description: LogPoint Incidents Data From V550. type: String - contextPath: LogPoint.Incidents.data._enrich_policy - description: LogPoint Incidents Data Enrich Policy + description: LogPoint Incidents Data Enrich Policy. type: String - contextPath: LogPoint.Incidents.data._type_num - description: LogPoint Incidents Data Type Num + description: LogPoint Incidents Data Type Num. type: String - contextPath: LogPoint.Incidents.data._type_ip - description: LogPoint Incidents Data Type IP + description: LogPoint Incidents Data Type IP. type: String - contextPath: LogPoint.Incidents.data.sig_id - description: LogPoint Incidents Data Sig Id + description: LogPoint Incidents Data Sig Id. type: String - contextPath: LogPoint.Incidents.data.col_type - description: LogPoint Incidents Data Col Type + description: LogPoint Incidents Data Col Type. type: String - contextPath: LogPoint.Incidents.data.object - description: LogPoint Incidents Data Object + description: LogPoint Incidents Data Object. type: String - contextPath: LogPoint.Incidents.data._labels - description: LogPoint Incidents Data Labels + description: LogPoint Incidents Data Labels. type: String - contextPath: LogPoint.Incidents.data.source_address - description: Source Address + description: Source Address. type: String - contextPath: LogPoint.Incidents.data.destination_address - description: Destination Address + description: Destination Address. type: String - contextPath: LogPoint.Incidents.data.workstation - description: Workstation + description: Workstation. type: String - contextPath: LogPoint.Incidents.data.domain - description: Domain + description: Domain. type: String - contextPath: LogPoint.Incidents.data.user - description: User + description: User. type: String - contextPath: LogPoint.Incidents.data.caller_user - description: Caller User + description: Caller User. type: String - contextPath: LogPoint.Incidents.data.target_user - description: Target User + description: Target User. type: String - contextPath: LogPoint.Incidents.data.source_machine_id - description: Source Machie Id + description: Source Machie Id. type: String - contextPath: LogPoint.Incidents.data.destination_machine_id - description: Destination Machine Id + description: Destination Machine Id. type: String - contextPath: LogPoint.Incidents.data.destination_port - description: Destination Port + description: Destination Port. type: String - contextPath: LogPoint.Incidents.data.event_type - description: Event Type + description: Event Type. type: String - contextPath: LogPoint.Incidents.data.share_path - description: Share Path + description: Share Path. type: String - contextPath: LogPoint.Incidents.data.object_name - description: Object Name + description: Object Name. type: String - contextPath: LogPoint.Incidents.data.sub_status_code - description: Sub Status Code + description: Sub Status Code. type: String - contextPath: LogPoint.Incidents.data.object_type - description: Object Type + description: Object Type. type: String - contextPath: LogPoint.Incidents.data.request_method - description: Request Method + description: Request Method. type: String - contextPath: LogPoint.Incidents.data.status_code - description: Status Code + description: Status Code. type: String - contextPath: LogPoint.Incidents.data.received_datasize - description: Received Datasize + description: Received Datasize. type: String - contextPath: LogPoint.Incidents.data.received_packet - description: Received Packet + description: Received Packet. type: String - contextPath: LogPoint.Incidents.data.user_agent - description: User Agent + description: User Agent. type: String - contextPath: LogPoint.Incidents.data.sent_datasize - description: Sent Datasize + description: Sent Datasize. type: String - contextPath: LogPoint.Incidents.data.sender - description: Sender + description: Sender. type: String - contextPath: LogPoint.Incidents.data.receiver - description: Receiver + description: Receiver. type: String - contextPath: LogPoint.Incidents.data.datasize - description: Datasize + description: Datasize. type: String - contextPath: LogPoint.Incidents.data.file - description: File + description: File. type: String - contextPath: LogPoint.Incidents.data.subject - description: Subject + description: Subject. type: String - contextPath: LogPoint.Incidents.data.status - description: Status + description: Status. type: String - contextPath: LogPoint.Incidents.data.file_count - description: File Count + description: File Count. type: String - contextPath: LogPoint.Incidents.data.protocol_id - description: Protocol Id + description: Protocol Id. type: String - contextPath: LogPoint.Incidents.data.sent_packet - description: Sent Packet + description: Sent Packet. type: String - contextPath: LogPoint.Incidents.data.service - description: Service + description: Service. type: String - contextPath: LogPoint.Incidents.data.printer - description: Printer + description: Printer. type: String - contextPath: LogPoint.Incidents.data.print_count - description: Print Count + description: Print Count. type: String - contextPath: LogPoint.Incidents.data.event_id - description: Event Id + description: Event Id. type: String - contextPath: LogPoint.Incidents.data.country_name - description: Country Name + description: Country Name. type: String - contextPath: LogPoint.Incidents.data.host - description: Host + description: Host. type: String - contextPath: LogPoint.Incidents.data.hash - description: Hash + description: Hash. type: String - contextPath: LogPoint.Incidents.data.hash_sha1 - description: Hash SHA1 + description: Hash SHA1. type: String - contextPath: LogPoint.Incidents.data.agent_address - description: Agent Address + description: Agent Address. type: String - contextPath: LogPoint.Incidents.data.attacker_address - description: Attacker Address + description: Attacker Address. type: String - contextPath: LogPoint.Incidents.data.broadcast_address - description: Broadcast Address + description: Broadcast Address. type: String - contextPath: LogPoint.Incidents.data.client_address - description: Client Address + description: Client Address. type: String - contextPath: LogPoint.Incidents.data.client_hardware_address - description: Client Hardware Address + description: Client Hardware Address. type: String - contextPath: LogPoint.Incidents.data.destination_hardware_address - description: Destination Hardware Address + description: Destination Hardware Address. type: String - contextPath: LogPoint.Incidents.data.destination_nat_address - description: Destination NAT Address + description: Destination NAT Address. type: String - contextPath: LogPoint.Incidents.data.device_address - description: Device Address + description: Device Address. type: String - contextPath: LogPoint.Incidents.data.external_address - description: External Address + description: External Address. type: String - contextPath: LogPoint.Incidents.data.gateway_address - description: Gateway Address + description: Gateway Address. type: String - contextPath: LogPoint.Incidents.data.hardware_address - description: Hardware Address + description: Hardware Address. type: String - contextPath: LogPoint.Incidents.data.host_address - description: Host Address + description: Host Address. type: String - contextPath: LogPoint.Incidents.data.interface_address - description: Interface Address + description: Interface Address. type: String - contextPath: LogPoint.Incidents.data.lease_address - description: Lease Address + description: Lease Address. type: String - contextPath: LogPoint.Incidents.data.local_address - description: Local Address + description: Local Address. type: String - contextPath: LogPoint.Incidents.data.nas_address - description: Nas ddress + description: Nas ddress. type: String - contextPath: LogPoint.Incidents.data.nas_ipv6_address - description: Nas_IPV6 Address + description: Nas_IPV6 Address. type: String - contextPath: LogPoint.Incidents.data.nat_address - description: NAT Address + description: NAT Address. type: String - contextPath: LogPoint.Incidents.data.nat_source_address - description: NAT Source Address + description: NAT Source Address. type: String - contextPath: LogPoint.Incidents.data.network_address - description: Network Address + description: Network Address. type: String - contextPath: LogPoint.Incidents.data.new_hardware_address - description: New Hardware Address + description: New Hardware Address. type: String - contextPath: LogPoint.Incidents.data.old_hardware_address - description: Old Hardware Address + description: Old Hardware Address. type: String - contextPath: LogPoint.Incidents.data.original_address - description: Original Address + description: Original Address. type: String - contextPath: LogPoint.Incidents.data.original_client_address - description: Original Client Address + description: Original Client Address. type: String - contextPath: LogPoint.Incidents.data.original_destination_address - description: Original Destination Address + description: Original Destination Address. type: String - contextPath: LogPoint.Incidents.data.original_server_address - description: Original Server Address + description: Original Server Address. type: String - contextPath: LogPoint.Incidents.data.original_source_address - description: Original Source Address + description: Original Source Address. type: String - contextPath: LogPoint.Incidents.data.originating_address - description: Originating Address + description: Originating Address. type: String - contextPath: LogPoint.Incidents.data.peer_address - description: Peer Address + description: Peer Address. type: String - contextPath: LogPoint.Incidents.data.private_address - description: Private Address + description: Private Address. type: String - contextPath: LogPoint.Incidents.data.proxy_address - description: Proxy Address + description: Proxy Address. type: String - contextPath: LogPoint.Incidents.data.proxy_source_address - description: Proxy Source Address + description: Proxy Source Address. type: String - contextPath: LogPoint.Incidents.data.relay_address - description: Relay Address + description: Relay Address. type: String - contextPath: LogPoint.Incidents.data.remote_address - description: Remote Address + description: Remote Address. type: String - contextPath: LogPoint.Incidents.data.resolved_address - description: Resolved Address + description: Resolved Address. type: String - contextPath: LogPoint.Incidents.data.route_address - description: Route Address + description: Route Address. type: String - contextPath: LogPoint.Incidents.data.scanner_address - description: Scanner Address + description: Scanner Address. type: String - contextPath: LogPoint.Incidents.data.server_address - description: Server Address + description: Server Address. type: String - contextPath: LogPoint.Incidents.data.server_hardware_address - description: Server Hardware Address + description: Server Hardware Address. type: String - contextPath: LogPoint.Incidents.data.source_hardware_address - description: Source Hardware Address + description: Source Hardware Address. type: String - contextPath: LogPoint.Incidents.data.start_address - description: Start Address + description: Start Address. type: String - contextPath: LogPoint.Incidents.data.supplier_address - description: Supplier Address + description: Supplier Address. type: String - contextPath: LogPoint.Incidents.data.switch_address - description: Switch Address + description: Switch Address. type: String - contextPath: LogPoint.Incidents.data.translated_address - description: Translated Address + description: Translated Address. type: String - contextPath: LogPoint.Incidents.data.virtual_address - description: Virtual Address + description: Virtual Address. type: String - contextPath: LogPoint.Incidents.data.virtual_server_address - description: Virtual Server Address + description: Virtual Server Address. type: String - contextPath: LogPoint.Incidents.data.vpn_address - description: VPN Address + description: VPN Address. type: String - contextPath: LogPoint.Incidents.data.hash_length - description: Hash Length + description: Hash Length. type: String - contextPath: LogPoint.Incidents.data.hash_sha256 - description: Hash SHA256 + description: Hash SHA256. type: String - contextPath: LogPoint.Incidents.data.alternate_user - description: Alternate User + description: Alternate User. type: String - contextPath: LogPoint.Incidents.data.authenticated_user - description: Authenticated User + description: Authenticated User. type: String - contextPath: LogPoint.Incidents.data.authorized_user - description: Authorized User + description: Authorized User. type: String - contextPath: LogPoint.Incidents.data.certificate_user - description: Certificate User + description: Certificate User. type: String - contextPath: LogPoint.Incidents.data.current_user - description: Current User + description: Current User. type: String - contextPath: LogPoint.Incidents.data.database_user - description: Database User + description: Database User. type: String - contextPath: LogPoint.Incidents.data.destination_user - description: Destination User + description: Destination User. type: String - contextPath: LogPoint.Incidents.data.logon_user - description: Logon User + description: Logon User. type: String - contextPath: LogPoint.Incidents.data.new_max_user - description: New Max User + description: New Max User. type: String - contextPath: LogPoint.Incidents.data.new_user - description: New User + description: New User. type: String - contextPath: LogPoint.Incidents.data.old_max_user - description: Old Max User + description: Old Max User. type: String - contextPath: LogPoint.Incidents.data.os_user - description: OS User + description: OS User. type: String - contextPath: LogPoint.Incidents.data.remote_user - description: Remote User + description: Remote User. type: String - contextPath: LogPoint.Incidents.data.source_user - description: Source User + description: Source User. type: String - contextPath: LogPoint.Incidents.data.system_user - description: System User + description: System User. type: String - contextPath: LogPoint.Incidents.data.target_logon_user - description: Target Logon User + description: Target Logon User. type: String - contextPath: LogPoint.Incidents.data.zone_user - description: Zone User + description: Zone User. type: String - arguments: - - description: From Timestamp + - description: From Timestamp. name: ts_from - - description: To Timestamp + - description: To Timestamp. name: ts_to - description: Number of incident states data to fetch. Accepts integer value. name: limit @@ -553,16 +558,16 @@ script: name: lp-get-incident-states outputs: - contextPath: LogPoint.Incidents.states.id - description: LogPoint Incidents States Id + description: LogPoint Incidents States Id. type: String - contextPath: LogPoint.Incidents.states.status - description: LogPoint Incidents States Status + description: LogPoint Incidents States Status. type: String - contextPath: LogPoint.Incidents.states.assigned_to - description: LogPoint Incidents States Assigned To + description: LogPoint Incidents States Assigned To. type: String - contextPath: LogPoint.Incidents.states.comments - description: LogPoint Incidents States Comments + description: LogPoint Incidents States Comments. type: String - arguments: - description: Object ID of a particular incident. It is the value contained in 'id' key of the incidents obtained from 'lp-get-incidents' command. @@ -571,11 +576,11 @@ script: - description: Comment to be added to the incidents. name: comment required: true - description: Add comments to the incidents + description: Add comments to the incidents. name: lp-add-incident-comment outputs: - contextPath: LogPoint.Incidents.comment - description: LogPoint Incidents Comment + description: LogPoint Incidents Comment. type: String - arguments: - description: Object ID of a particular incident. It is the value contained in 'id' key of the incidents obtained from 'lp-get-incidents' command. Multiple id can be provided by separating them using comma. @@ -585,11 +590,11 @@ script: - description: Id of the user whom the incidents are assigned. It can be displayed using 'lp-get-users' command. name: new_assignee required: true - description: Assigning/Re-assigning Incidents + description: Assigning/Re-assigning Incidents. name: lp-assign-incidents outputs: - contextPath: LogPoint.Incidents.assign - description: LogPoint Incidents Assign + description: LogPoint Incidents Assign. type: String - arguments: - description: Object ID of a particular incident. It is the value contained in 'id' key of the incidents obtained from 'lp-get-incidents' command. Multiple id can be provided by separating them using comma. @@ -600,7 +605,7 @@ script: name: lp-resolve-incidents outputs: - contextPath: LogPoint.Incidents.resolve - description: LogPoint Incidents Resolve + description: LogPoint Incidents Resolve. type: String - arguments: - description: Object ID of a particular incident. It is the value contained in 'id' key of the incidents obtained from 'lp-get-incidents' command. Multiple id can be provided by separating them using comma. @@ -611,31 +616,32 @@ script: name: lp-close-incidents outputs: - contextPath: LogPoint.Incidents.close - description: LogPoint Incidents Close + description: LogPoint Incidents Close. type: String - arguments: - description: Object ID of a particular incident. It is the value contained in 'id' key of the incidents obtained from 'lp-get-incidents' command. Multiple id can be provided by separating them using comma. isArray: true name: incident_obj_ids required: true - description: Re-opens the closed incidents + description: Re-opens the closed incidents. name: lp-reopen-incidents outputs: - contextPath: LogPoint.Incidents.reopen - description: LogPoint Incidents Reopen + description: LogPoint Incidents Reopen. type: String - description: Gets Incident users and user groups. name: lp-get-users outputs: - contextPath: LogPoint.Incidents.users.id - description: LogPoint Incidents Users Id + description: LogPoint Incidents Users Id. type: String - contextPath: LogPoint.Incidents.users.name - description: LogPoint Incidents Users Name + description: LogPoint Incidents Users Name. type: String - contextPath: LogPoint.Incidents.users.usergroups - description: LogPoint Incidents Users Usergroups + description: LogPoint Incidents Users Usergroups. type: String + arguments: [] - description: Gets LogPoint user's preference such as timezone, date format, etc. name: lp-get-users-preference outputs: @@ -648,6 +654,7 @@ script: - contextPath: LogPoint.User.Preference.hour_format description: LogPoint user's hour format. type: String + arguments: [] - description: Gets user's LogPoints. name: lp-get-logpoints outputs: @@ -657,6 +664,7 @@ script: - contextPath: LogPoint.LogPoints.ip description: LogPoint's IP address. type: String + arguments: [] - description: Gets the list of LogPoint repos that can be accessed by the user. name: lp-get-repos outputs: @@ -666,6 +674,7 @@ script: - contextPath: LogPoint.Repos.address description: LogPoint repo address. type: String + arguments: [] - description: Gets devices associated with LogPoint. name: lp-get-devices outputs: @@ -675,6 +684,7 @@ script: - contextPath: LogPoint.Devices.address description: Device IP address. type: String + arguments: [] - description: Gets live search results of the alerts and dashboards. name: lp-get-livesearches outputs: @@ -690,6 +700,7 @@ script: - contextPath: LogPoint.LiveSearches.query description: The live search query. type: String + arguments: [] - arguments: - description: LogPoint search query. This should be the exact query to use to search logs in the LogPoint UI. name: query @@ -703,6 +714,9 @@ script: - description: A comma-separated list of LogPoint repos from which logs are to be fetched. If not provided, it will display logs from all repos. isArray: true name: repos + - defaultValue: '60' + description: LogPoint search timeout in seconds. + name: timeout description: Gets the search ID based on the provided search parameters. name: lp-get-searchid outputs: @@ -719,10 +733,10 @@ script: - contextPath: LogPoint.SearchLogs description: Search results. type: String - dockerimage: demisto/python3:3.10.14.90585 + dockerimage: demisto/python3:3.10.14.99865 isfetch: true runonce: false - script: '-' + script: '' subtype: python3 type: python tests: @@ -730,5 +744,4 @@ tests: - LogPoint SIEM Integration - Test Playbook 2 - LogPoint SIEM Integration - Test Playbook 3 defaultmapperin: LogPoint SIEM Integration - Incoming Mapper -defaultclassifier: fromversion: 6.0.0 diff --git a/Packs/LogPoint_SIEM_Integration/Integrations/LogPoint_SIEM_Integration/README.md b/Packs/LogPoint_SIEM_Integration/Integrations/LogPoint_SIEM_Integration/README.md index d86a8c1325ee..622b51cdbb9b 100644 --- a/Packs/LogPoint_SIEM_Integration/Integrations/LogPoint_SIEM_Integration/README.md +++ b/Packs/LogPoint_SIEM_Integration/Integrations/LogPoint_SIEM_Integration/README.md @@ -17,17 +17,19 @@ This integration was integrated and tested with version 6.7.4 of LogPoint. | **Parameter** | **Description** | **Required** | | --- | --- | --- | - | url | LogPoint URL | True | - | username | LogPoint Username | True | - | apikey | API Key | True | - | insecure | Trust any certificate \(not secure\) | False | - | proxy | Use system proxy settings | False | - | first_fetch | First fetch timestamp (\ \