Skip to content

Commit

Permalink
Merge branch 'contrib/SEKOIA-IO_Add/SekoiaXDR' into Add/SekoiaXDR
Browse files Browse the repository at this point in the history
  • Loading branch information
TOUFIKIzakarya authored Jul 11, 2024
2 parents 09f648b + 9a79774 commit 5866479
Show file tree
Hide file tree
Showing 77 changed files with 5,973 additions and 1,487 deletions.
18 changes: 7 additions & 11 deletions Packs/Akamai_SIEM/Integrations/Akamai_SIEM/Akamai_SIEM.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,11 +105,11 @@ def get_events_with_offset(
if offset:
params["offset"] = offset
else:
params["from"] = from_epoch
params["from"] = int(from_epoch)
raw_response: str = self._http_request(
method='GET',
url_suffix=f'/{config_ids}',
params=assign_params(**params),
params=params,
resp_type='text',
)
events: list[dict] = [json.loads(e) for e in raw_response.split('\n') if e]
Expand Down Expand Up @@ -396,16 +396,13 @@ def fetch_events_command(
(list[dict], str, int, str): events, new offset, total number of events fetched, and new last_run time to set.
"""
total_events_count = 0
new_from_time = ""
if not (from_epoch := ctx.get("last_run_time")):
from_epoch, _ = parse_date_range(date_range=fetch_time, date_format='%s')
from_epoch, _ = parse_date_range(date_range=fetch_time, date_format='%s')
offset = ctx.get("offset")
while total_events_count < int(fetch_limit):
demisto.info(f"Preparing to get events with {offset=}, {from_epoch=}, and {fetch_limit=}")
events, offset = client.get_events_with_offset(config_ids, offset, FETCH_EVENTS_PAGE_SIZE, from_epoch)
if not events:
demisto.info("Didn't receive any events, breaking.")
offset = None
break
for event in events:
try:
Expand All @@ -423,10 +420,9 @@ def fetch_events_command(
policy_id = event.get('attackData', {}).get('policyId', "")
demisto.debug(f"Couldn't decode event with {config_id=} and {policy_id=}, reason: {e}")
total_events_count += len(events)
new_from_time = str(max([int(event.get('httpMessage', {}).get('start')) for event in events]) + 1)
demisto.info(f"Got {len(events)} events, and {offset=}")
yield events, offset, total_events_count, new_from_time
yield [], offset, total_events_count, new_from_time or from_epoch
yield events, offset, total_events_count
yield [], offset, total_events_count


def decode_url(headers: str) -> dict:
Expand Down Expand Up @@ -483,7 +479,7 @@ def main(): # pragma: no cover
demisto.incidents(incidents)
demisto.setLastRun(new_last_run)
elif command == "fetch-events":
for events, offset, total_events_count, new_from_time in fetch_events_command( # noqa: B007
for events, offset, total_events_count in fetch_events_command( # noqa: B007
client,
params.get("fetchTime"),
int(params.get("fetchLimit", 20)),
Expand All @@ -492,7 +488,7 @@ def main(): # pragma: no cover
):
if events:
send_events_to_xsiam(events, VENDOR, PRODUCT, should_update_health_module=False)
set_integration_context({"offset": offset, "last_run_time": new_from_time})
set_integration_context({"offset": offset})
demisto.updateModuleHealth({'eventsPulled': (total_events_count or 0)})
else:
human_readable, entry_context, raw_response = commands[command](client, **demisto.args())
Expand Down
16 changes: 7 additions & 9 deletions Packs/Akamai_SIEM/Integrations/Akamai_SIEM/Akamai_SIEM_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ def test_fetch_events_command__sanity(self, client, mocker):
])
total_events_count = 0

for events, offset, total_events_count, _ in Akamai_SIEM.fetch_events_command(client, # noqa: B007
for events, offset, total_events_count in Akamai_SIEM.fetch_events_command(client, # noqa: B007
'3 days',
220,
'',
Expand All @@ -184,11 +184,11 @@ def test_fetch_events_command__no_results(self, client, requests_mock):
last_offset = "11111"
requests_mock.get(f'{BASE_URL}/50170?limit={size}&offset={last_offset}', text=SEC_EVENTS_EMPTY_TXT)

for _, offset, total_events_count, _ in Akamai_SIEM.fetch_events_command(client, '12 hours', 6, # noqa: B007
for _, offset, total_events_count in Akamai_SIEM.fetch_events_command(client, '12 hours', 6, # noqa: B007
'50170', {"offset": last_offset}):
last_offset = offset
assert total_events_count == 0
assert not last_offset
assert last_offset == "318d8"

def test_fetch_events_command__limit_is_smaller_than_page_size(self, client, requests_mock, mocker):
"""
Expand All @@ -209,14 +209,13 @@ def test_fetch_events_command__limit_is_smaller_than_page_size(self, client, req
requests_mock.get(f'{BASE_URL}/50170?limit=6&from=1575966002&offset=218d9', text=SEC_EVENTS_TXT)
requests_mock.get(f'{BASE_URL}/50170?limit=6&from=1575966002&offset=318d8', text=SEC_EVENTS_EMPTY_TXT)

for _, offset, total_events_count, new_from_time in Akamai_SIEM.fetch_events_command(client, # noqa: B007
for _, offset, total_events_count in Akamai_SIEM.fetch_events_command(client, # noqa: B007
'12 hours',
4, '50170',
{}):
last_offset = offset
assert total_events_count == 6
assert last_offset == "218d9"
assert new_from_time == '1576002514'

def test_fetch_events_command__limit_is_higher_than_page_size(self, client, requests_mock, mocker):
"""
Expand All @@ -237,16 +236,15 @@ def test_fetch_events_command__limit_is_higher_than_page_size(self, client, requ
requests_mock.get(f'{BASE_URL}/50170?limit=6&offset=218d9', text=SEC_EVENTS_TXT)
requests_mock.get(f'{BASE_URL}/50170?limit=6&offset=318d8', text=SEC_EVENTS_EMPTY_TXT)

for _, offset, total_events_count, new_from_time in Akamai_SIEM.fetch_events_command(client, # noqa: B007
for _, offset, total_events_count in Akamai_SIEM.fetch_events_command(client, # noqa: B007
'12 hours',
20,
'50170',
{}
):
last_offset = offset
assert total_events_count == 8
assert not last_offset
assert new_from_time == '1576002508'
assert last_offset == "318d8"

def test_fetch_events_command__limit_reached(self, client, requests_mock, mocker):
"""
Expand All @@ -267,7 +265,7 @@ def test_fetch_events_command__limit_reached(self, client, requests_mock, mocker
requests_mock.get(f'{BASE_URL}/50170?limit=2&from=1575966002', text=SEC_EVENTS_TWO_RESULTS_TXT)
requests_mock.get(f'{BASE_URL}/50170?limit=2&offset=117d9', text=SEC_EVENTS_TXT)

for _, offset, total_events_count, _ in Akamai_SIEM.fetch_events_command(client, # noqa: B007
for _, offset, total_events_count in Akamai_SIEM.fetch_events_command(client, # noqa: B007
'12 hours',
2,
'50170',
Expand Down
6 changes: 6 additions & 0 deletions Packs/Akamai_SIEM/ReleaseNotes/1_1_5.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@

#### Integrations

##### Akamai WAF SIEM

- Fixed an issue were the ***fetch-events*** command caused duplications and missing events.
2 changes: 1 addition & 1 deletion Packs/Akamai_SIEM/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "Akamai WAF SIEM",
"description": "Use the Akamai WAF SIEM integration to retrieve security events from Akamai Web Application Firewall (WAF) service.",
"support": "xsoar",
"currentVersion": "1.1.4",
"currentVersion": "1.1.5",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
Expand Down
Empty file.
10 changes: 10 additions & 0 deletions Packs/AzureResourceGraph/.secrets-ignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
https://management.azure.com
https://xsoar.pan.dev*
https://portal.azure.com
https://login.microsoftonline.com'
AzureResourceGraphClient
Microsoft.ResourceGraph/*
"microsoft.network/*
azure-rg
Query
11.22.33.44
Loading

0 comments on commit 5866479

Please sign in to comment.