Skip to content

Commit

Permalink
Merge branch 'contrib/SEKOIA-IO_Add/SekoiaXDR' into Add/SekoiaXDR
Browse files Browse the repository at this point in the history
  • Loading branch information
TOUFIKIzakarya authored Nov 18, 2024
2 parents 59ec574 + 6411782 commit 8c2bdd0
Show file tree
Hide file tree
Showing 428 changed files with 7,262 additions and 643 deletions.
12 changes: 6 additions & 6 deletions .github/content_roles.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,16 @@
"TIM_REVIEWER": "The GitHub username for TIM reviews owner"
},
"CONTRIBUTION_REVIEWERS": [
"tcarmeli1",
"yaakovpraisler",
"aaron1535"
"MLainer1",
"YairGlik",
"amshamah419"
],
"CONTRIBUTION_TL": "jbabazadeh",
"CONTRIBUTION_TL": "samuelFain",
"CONTRIBUTION_SECURITY_REVIEWER": ["idovandijk"],
"ON_CALL_DEVS": [
"sshuker",
"yhayun"
"sberman"
],
"DOC_REVIEWER": "richardbluestone",
"DOC_REVIEWER": "ShirleyDenkberg",
"TIM_REVIEWER": "MLainer1"
}
7 changes: 7 additions & 0 deletions Packs/AbuseDB/ReleaseNotes/1_0_32.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@

#### Scripts

##### AbuseIPDBPopulateIndicators


- Updated the Docker image to: *demisto/python3:3.11.10.115186*.
Original file line number Diff line number Diff line change
Expand Up @@ -28,4 +28,4 @@ runas: DBotWeakRole
tests:
- AbuseIPDB PopulateIndicators Test
fromversion: 5.0.0
dockerimage: demisto/python3:3.11.10.113941
dockerimage: demisto/python3:3.11.10.115186
2 changes: 1 addition & 1 deletion Packs/AbuseDB/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "AbuseIPDB",
"description": "Central repository to report and identify IP addresses that have been associated with malicious activity online. Check the Detailed Information section for more information on how to configure the integration.",
"support": "xsoar",
"currentVersion": "1.0.31",
"currentVersion": "1.0.32",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
Expand Down
7 changes: 7 additions & 0 deletions Packs/Active_Directory_Query/ReleaseNotes/1_6_41.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@

#### Scripts

##### SendEmailToManager


- Updated the Docker image to: *demisto/python3:3.11.10.115186*.
Original file line number Diff line number Diff line change
Expand Up @@ -42,4 +42,4 @@ dependson:
timeout: 0s
tests:
- No test
dockerimage: demisto/python3:3.11.10.113941
dockerimage: demisto/python3:3.11.10.115186
2 changes: 1 addition & 1 deletion Packs/Active_Directory_Query/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "Active Directory Query",
"description": "Active Directory Query integration enables you to access and manage Active Directory objects (users, contacts, and computers).",
"support": "xsoar",
"currentVersion": "1.6.40",
"currentVersion": "1.6.41",
"author": "Cortex XSOAR",
"url": "",
"email": "",
Expand Down
17 changes: 12 additions & 5 deletions Packs/Akamai_SIEM/Integrations/Akamai_SIEM/Akamai_SIEM.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,9 +103,12 @@ def get_events_with_offset(
'limit': limit
}
if offset:
demisto.info(f"received {offset=} will run an offset based request.")
params["offset"] = offset
else:
params["from"] = int(from_epoch)
from_param = int(from_epoch)
params["from"] = from_param
demisto.info(f"did not receive an offset. will run a time based request with {from_param=}")
raw_response: str = self._http_request(
method='GET',
url_suffix=f'/{config_ids}',
Expand Down Expand Up @@ -427,7 +430,7 @@ def fetch_events_command(
total_events_count = 0
from_epoch, _ = parse_date_range(date_range=fetch_time, date_format='%s')
offset = ctx.get("offset")
hashed_events_from_previous_run = ctx.get("hashed_events_from_previous_run", set())
hashed_events_from_previous_run = set(ctx.get("hashed_events_from_previous_run", set()))
while total_events_count < int(fetch_limit):
demisto.info(f"Preparing to get events with {offset=}, {page_size=}, and {fetch_limit=}")
events, offset = client.get_events_with_offset(config_ids, offset, page_size, from_epoch)
Expand All @@ -452,7 +455,7 @@ def fetch_events_command(
config_id = event.get('attackData', {}).get('configId', "")
policy_id = event.get('attackData', {}).get('policyId', "")
demisto.debug(f"Couldn't decode event with {config_id=} and {policy_id=}, reason: {e}")
demisto.info("Preparing to deduplicate events, currently got {len(events)} events.")
demisto.info(f"Preparing to deduplicate events, currently got {len(events)} events.")
deduped_events, hashed_events_from_current_run = dedup_events(hashed_events_mapping, hashed_events_from_previous_run)
total_events_count += len(deduped_events)
demisto.info(f"After deduplicate events, Got {len(deduped_events)} events, and {offset=}")
Expand Down Expand Up @@ -518,7 +521,7 @@ def main(): # pragma: no cover
elif command == "fetch-events":
page_size = int(params.get("page_size", FETCH_EVENTS_PAGE_SIZE))
limit = int(params.get("fetchLimit", 300000))
for events, offset, total_events_count, hashed_events_from_previous_run in fetch_events_command( # noqa: B007
for events, offset, total_events_count, hashed_events_from_current_run in fetch_events_command( # noqa: B007
client,
"5 minutes",
fetch_limit=limit,
Expand All @@ -529,11 +532,15 @@ def main(): # pragma: no cover
if events:
demisto.info(f"Sending events to xsiam with latest event time is: {events[-1]['_time']}")
send_events_to_xsiam(events, VENDOR, PRODUCT, should_update_health_module=False)
set_integration_context({"offset": offset, "hashed_events_from_previous_run": hashed_events_from_previous_run})
set_integration_context({"offset": offset,
"hashed_events_from_previous_run": list(hashed_events_from_current_run)})
demisto.updateModuleHealth({'eventsPulled': (total_events_count or 0)})
next_run = {}
if total_events_count >= limit:
demisto.info(f"got at least {limit} events this interval - will automatically trigger next run.")
next_run["nextTrigger"] = "0"
else:
demisto.info(f"Got less than {limit} events this interval - will not trigger next run automatically.")
demisto.setLastRun(next_run)

else:
Expand Down
2 changes: 1 addition & 1 deletion Packs/Akamai_SIEM/Integrations/Akamai_SIEM/Akamai_SIEM.yml
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ script:
- contextPath: IP.Geo.Country
description: The country in which the IP address is located.
type: String
dockerimage: demisto/auth-utils:1.0.0.115527
dockerimage: demisto/auth-utils:1.0.0.116752
isfetch: true
isfetch:marketplacev2: false
isfetchevents: true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -288,6 +288,7 @@ def test_deduplication(self, mocker, client):
- Ensure that the events list returned doesn't include the filtered events and that the length of the list is 247.
- Ensure that on each iteration, the number of events in the hashed events is 50
(even in intervals where some events were deduped).
- Ensure the returned offset and hashed events are json serializable.
"""
num_of_results = 500
page_size = 50
Expand Down Expand Up @@ -318,6 +319,8 @@ def test_deduplication(self, mocker, client):
assert total_events_count == 247
for event_not_in_list in events_not_in_list:
assert event_not_in_list not in events
ctx = {"offset": offset, "hashed_events_from_previous_run": list(hashed)}
assert isinstance(json.dumps(ctx), str)


@pytest.mark.parametrize(
Expand Down
8 changes: 7 additions & 1 deletion Packs/Akamai_SIEM/Integrations/Akamai_SIEM/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -269,4 +269,10 @@ The proposed solution in that case is to use the two parameters **Fetch limit**
**Fetch limit** is the number of total events we want to retrieve each fetch interval.
**Akamai Page size** is the number of events we want to retrieve each request. Note that the suggested maximum for **Akamai Page size** is 200k.
Meaning that an interval may execute multiple requests and therefore you should configure **Akamai Page size** < **Fetch limit**
You should work to find the balance between them in a way that both the command, and the request won't get any timeout.
You should work to find the balance between them in a way that both the command, and the request won't get any timeout.

### Known limitations

## The config ID can only be configured on one instance:
Due to limitations from Akamai, the config ID can only be configured on one instance on the same machine or on different machines (i.e. the same config ID can't be configured both on dev and prod tenants or twice on the same tenant).
Configuring on multiple machines may lead to duplications or missing events.
1 change: 1 addition & 0 deletions Packs/Akamai_SIEM/ReleaseNotes/1_1_6.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
***WARNING: This version is invalid. Please install a different version.***

#### Integrations

Expand Down
8 changes: 8 additions & 0 deletions Packs/Akamai_SIEM/ReleaseNotes/1_1_7.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@

#### Integrations

##### Akamai WAF SIEM

- Fixed an issue where the **fetch-events** command failed with a docker time out error.
- Documentation and metadata improvements.
- Updated the Docker image to: *demisto/auth-utils:1.0.0.116752*.
2 changes: 1 addition & 1 deletion Packs/Akamai_SIEM/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "Akamai WAF SIEM",
"description": "Use the Akamai WAF SIEM integration to retrieve security events from Akamai Web Application Firewall (WAF) service.",
"support": "xsoar",
"currentVersion": "1.1.6",
"currentVersion": "1.1.7",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
Expand Down
168 changes: 168 additions & 0 deletions Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.py
Original file line number Diff line number Diff line change
Expand Up @@ -2102,6 +2102,22 @@ def get_cps_change_status(self,
headers=headers,
)

def list_cps_active_certificates(self, contract_id: str,) -> dict:
"""
Lists enrollments with active certificates.
Note that the rate limit for this operation is 10 requests per minute per account.
Args:
contract_id: Specify the contract on which to operate or view.
Returns:
The response provides a list of active certificates
"""
return self._http_request(method="GET",
url_suffix=f'cps/v2/active-certificates?contractId={contract_id}',
headers={"accept": "application/vnd.akamai.cps.active-certificates.v1+json"},
)

def cancel_cps_change(self, change_path: str, account_switch_key: str = "") -> dict:
"""
Cancels a pending change.
Expand Down Expand Up @@ -2143,6 +2159,36 @@ def get_cps_enrollment_by_id(self,
headers=headers,
)

# created by D.S.
def list_dns_zones(self):
"""
List Edge DNS Zones
Args:
Returns:
<Response [200]>
"""

return self._http_request(method='Get',
url_suffix='config-dns/v2/zones?showAll=true',
)

# created by D.S.

def list_dns_zone_recordsets(self, zone: str):
"""
List Edge DNS zone recordsets
Args:
zone: string. The name of the zone.
Returns:
<Response [200]>
"""

return self._http_request(method='Get',
url_suffix=f'config-dns/v2/zones/{zone}/recordsets?showAll=true',
)


''' HELPER FUNCTIONS '''

Expand Down Expand Up @@ -6038,6 +6084,124 @@ def get_cps_enrollment_by_id_command(client: Client,
return human_readable, context_entry, raw_response


@logger
def list_appsec_config_command(client: Client) -> tuple[str, dict, Union[list, dict]]:
"""
Lists available security configurations.
Args:
client:
Returns:
human readable (markdown format), entry context and raw response
"""

raw_response: dict = client.list_appsec_config()
title = f'{INTEGRATION_NAME} - list application configuration command'
entry_context = raw_response
human_readable_ec = raw_response
context_entry: dict = {
f"{INTEGRATION_CONTEXT_NAME}.AppSecurity": entry_context
}

human_readable = tableToMarkdown(
name=title,
t=human_readable_ec.get("configurations", ""),
removeNull=True,
)
return human_readable, context_entry, raw_response


@logger
def list_dns_zones_command(client: Client) -> tuple[str, dict, Union[list, dict]]:
"""
Lists all zones that the current user has access to manage.
Args:
client:
Returns:
human readable (markdown format), entry context and raw response
"""

raw_response: dict = client.list_dns_zones()
title = f'{INTEGRATION_NAME} - list dns zones command'
entry_context = raw_response
human_readable_ec = raw_response
context_entry: dict = {
f"{INTEGRATION_CONTEXT_NAME}.EdgeDns.Zones": entry_context
}

human_readable = tableToMarkdown(
name=title,
t=human_readable_ec,
removeNull=True,
)
return human_readable, context_entry, raw_response


@logger
def list_dns_zone_recordsets_command(client: Client, zone: str) -> tuple[str, dict, Union[list, dict]]:
"""
Lists all record sets for this Zone. It works only for PRIMARY and SECONDARY zones.
Args:
client:
zone: The name of the zone.
Returns:
human readable (markdown format), entry context and raw response
"""

raw_response: dict = client.list_dns_zone_recordsets(zone)
title = f'{INTEGRATION_NAME} - list dns zones command'
entry_context = raw_response
human_readable_ec = raw_response
context_entry: dict = {
f"{INTEGRATION_CONTEXT_NAME}.EdgeDns.ZoneRecordSets": entry_context
}

human_readable = tableToMarkdown(
name=title,
t=human_readable_ec.get("recordsets"),
removeNull=True,
)
return human_readable, context_entry, raw_response


@logger
def list_cps_active_certificates_command(client: Client,
contract_id: str,
) -> tuple[str, dict, Union[list, dict]]:
"""
lists enrollments with active certificates. Note that the rate limit for this
operation is 10 requests per minute per account.
Args:
client:
contract_id: Unique Identifier of the contract on which to operate or view.
Returns:
human readable (markdown format), entry context and raw response
"""

raw_response: dict = client.list_cps_active_certificates(contract_id=contract_id)

title = f'{INTEGRATION_NAME} - cps list active certificates command'
entry_context = raw_response
human_readable_ec = raw_response
context_entry: dict = {
f"{INTEGRATION_CONTEXT_NAME}.Cps.Active.Certificates.Enrollments": entry_context.get("enrollments")
}

human_readable = tableToMarkdown(
name=title,
t=human_readable_ec.get("enrollments"),
removeNull=True,
)
return human_readable, context_entry, raw_response


''' COMMANDS MANAGER / SWITCH PANEL '''


Expand Down Expand Up @@ -6134,6 +6298,10 @@ def main():
f'{INTEGRATION_COMMAND_NAME}-get-cps-change-status': get_cps_change_status_command,
f'{INTEGRATION_COMMAND_NAME}-cancel-cps-change': cancel_cps_change_command,
f'{INTEGRATION_COMMAND_NAME}-get-cps-enrollment-by-id': get_cps_enrollment_by_id_command,
f'{INTEGRATION_COMMAND_NAME}-list-appsec-config': list_appsec_config_command,
f'{INTEGRATION_COMMAND_NAME}-list-dns-zones': list_dns_zones_command,
f'{INTEGRATION_COMMAND_NAME}-list-dns-zone-recordsets': list_dns_zone_recordsets_command,
f'{INTEGRATION_COMMAND_NAME}-list-cps-active-certificates': list_cps_active_certificates_command,
}
try:
readable_output, outputs, raw_response = commands[command](client=client, **demisto.args())
Expand Down
Loading

0 comments on commit 8c2bdd0

Please sign in to comment.