Skip to content

Commit

Permalink
PAPP-35103 ingestion of crowdstrike incidents functionality added
Browse files Browse the repository at this point in the history
  • Loading branch information
grokas-splunk committed Dec 6, 2024
1 parent a806930 commit 57ab553
Show file tree
Hide file tree
Showing 3 changed files with 209 additions and 8 deletions.
16 changes: 11 additions & 5 deletions crowdstrikeoauthapi.json
Original file line number Diff line number Diff line change
Expand Up @@ -75,33 +75,39 @@
"description": "Maximum events to get while POLL NOW",
"default": 2000
},
"collate": {
"ingest_incidents": {
"data_type": "boolean",
"order": 7,
"description": "Should ingest incidents in addition to detections during scheduled and interval polling",
"default": false
},
"collate": {
"data_type": "boolean",
"order": 8,
"description": "Merge containers for hostname and eventname",
"default": true
},
"merge_time_interval": {
"data_type": "numeric",
"order": 8,
"order": 9,
"description": "Merge same containers within specified seconds",
"default": 0
},
"max_crlf": {
"data_type": "numeric",
"order": 9,
"order": 10,
"default": 50,
"description": "Maximum allowed continuous blank lines"
},
"preprocess_script": {
"data_type": "file",
"description": "Script with functions to preprocess containers and artifacts",
"order": 10
"order": 11
},
"detonate_timeout": {
"data_type": "numeric",
"description": "Timeout for detonation result in minutes (Default: 15 minutes)",
"order": 11,
"order": 12,
"default": 15
}
},
Expand Down
63 changes: 60 additions & 3 deletions crowdstrikeoauthapi_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
from requests_toolbelt.multipart.encoder import MultipartEncoder

import parse_cs_events as events_parser
import parse_cs_incidents as incidents_parser

# THIS Connector imports
from crowdstrikeoauthapi_consts import *
Expand Down Expand Up @@ -1738,7 +1739,7 @@ def _handle_create_session(self, param):
params = {
"device_id": param["device_id"],
"origin": "phantom",
"queue_offline": param.get("queue_offline", False) # default to False to maintain original behavior
"queue_offline": param.get("queue_offline", False), # default to False to maintain original behavior
}

ret_val, resp_json = self._make_rest_call_helper_oauth2(action_result, CROWDSTRIKE_RTR_SESSION_ENDPOINT, json_data=params, method="post")
Expand Down Expand Up @@ -2541,6 +2542,8 @@ def _validate_on_poll_config_params(self, action_result, config):
action_result, config.get("merge_time_interval", 0), "merge_time_interval", allow_zero=True
)

ingest_incidents = config.get("ingest_incidents", False)

if self.is_poll_now():
# Manual Poll Now
try:
Expand All @@ -2559,7 +2562,7 @@ def _validate_on_poll_config_params(self, action_result, config):
except Exception as ex:
max_events = "{}: {}".format(DEFAULT_EVENTS_COUNT, self._get_error_message_from_exception(ex))

return max_crlf, merge_time_interval, max_events
return max_crlf, merge_time_interval, max_events, ingest_incidents

def _on_poll(self, param):

Expand All @@ -2575,7 +2578,7 @@ def _on_poll(self, param):

config = self.get_config()

max_crlf, merge_time_interval, max_events = self._validate_on_poll_config_params(action_result, config)
max_crlf, merge_time_interval, max_events, ingest_incidents = self._validate_on_poll_config_params(action_result, config)

if max_crlf is None or merge_time_interval is None or max_events is None:
return action_result.get_status()
Expand Down Expand Up @@ -2703,6 +2706,60 @@ def _on_poll(self, param):
last_offset_id = last_event["metadata"]["offset"]
self._state["last_offset_id"] = last_offset_id + 1

# Handle incident ingestion if enabled
if ingest_incidents:
self.save_progress("Starting incident ingestion...")
try:
# Get incidents
params = {"limit": max_events, "sort": "modified_timestamp.asc"}

if not self.is_poll_now():
try:
last_ingestion_time = self._state.get("last_incident_timestamp", "")
params["filter"] = f"modified_timestamp:>'{last_ingestion_time}'"
except Exception as e:
self.debug_print(f"Error getting last incident timestamp, starting from epoch: {str(e)}")

self.send_progress(f"Fetching incidents with filter: {params}")

# Get incident IDs
incident_ids = self._get_ids(action_result, CROWDSTRIKE_LIST_INCIDENTS_ENDPOINT, params)
if incident_ids is None:
return action_result.get_status()

if not incident_ids:
self.save_progress("No incidents found")
return phantom.APP_SUCCESS

# Get incident details
ret_val, response = self._make_rest_call_helper_oauth2(
action_result, CROWDSTRIKE_GET_INCIDENT_DETAILS_ID_ENDPOINT, json_data={"ids": incident_ids}, method="post"
)

if phantom.is_fail(ret_val):
return action_result.get_status()

incidents = response.get("resources", [])

if incidents:
# Update timestamp for next poll if not poll_now
if not self.is_poll_now():
latest_timestamp = max(incident.get("modified_timestamp", 0) for incident in incidents)
self._state["last_incident_timestamp"] = latest_timestamp

# Process incidents through parser
self.save_progress(f"Processing {len(incidents)} incidents...")
incident_results = incidents_parser.process_incidents(incidents)
self._save_results(incident_results, param)
self.save_progress("Successfully processed incidents")
else:
self.save_progress("No incidents found in response")

except Exception as e:
error_message = self._get_error_message_from_exception(e)
self.save_progress(f"Error ingesting incidents: {error_message}")
return action_result.set_status(phantom.APP_ERROR, f"Error ingesting incidents: {error_message}")

return action_result.set_status(phantom.APP_SUCCESS)

def _handle_list_processes(self, param):
Expand Down
138 changes: 138 additions & 0 deletions parse_cs_incidents.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
# File: parse_cs_incidents.py
#
# Copyright (c) 2019-2024 Splunk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific language governing permissions
# and limitations under the License.

import sys

from bs4 import UnicodeDammit

_container_common = {
"description": "Container added by Phantom",
"run_automation": False, # Don't run any playbooks when this container is added
}

_artifact_common = {
"label": "incident",
"type": "network",
"description": "Artifact added by Phantom",
"run_automation": False, # Don't run any playbooks when this artifact is added
}

_host_artifact_common = {
"label": "host",
"type": "host",
"description": "Artifact added by Phantom",
"run_automation": False, # Don't run any playbooks when this artifact is added
}


def _get_incident_severity(fine_score):
if fine_score >= 80:
return "high"
elif fine_score >= 60:
return "medium"
elif fine_score >= 40:
return "low"
else:
return "low"


def _create_incident_artifact(incident):
artifact = dict(_artifact_common)
artifact["name"] = "Incident Details"
artifact["source_data_identifier"] = incident.get("incident_id")
artifact["severity"] = _get_incident_severity(incident.get("fine_score", 0))

# Key CEF mapping
artifact["cef"] = {
"status": incident.get("status"),
"name": incident.get("name"),
"description": incident.get("description"),
"severity": incident.get("fine_score"),
"state": incident.get("state"),
"tags": incident.get("tags", []),
"created_time": incident.get("created"),
"modified_time": incident.get("modified_timestamp"),
"incident_id": incident.get("incident_id"),
"tactics": incident.get("tactics", []),
"techniques": incident.get("techniques", []),
"objectives": incident.get("objectives", []),
}

return artifact


def _create_host_artifact(host, incident):
artifact = dict(_host_artifact_common)
artifact["name"] = "Affected Host"
artifact["source_data_identifier"] = f"{incident.get('incident_id')}_{host.get('device_id')}"
artifact["severity"] = _get_incident_severity(incident.get("fine_score", 0))

# Key CEF mapping
artifact["cef"] = {
"hostname": host.get("hostname"),
"host_id": host.get("device_id"),
"local_ip": host.get("local_ip"),
"external_ip": host.get("external_ip"),
"platform": host.get("platform_name"),
"os_version": host.get("os_version"),
"mac_address": host.get("mac_address"),
"system_manufacturer": host.get("system_manufacturer"),
"last_seen": host.get("last_seen"),
"status": host.get("status"),
}

return artifact


def process_incidents(incidents):
results = []

for incident in incidents:
ingest_event = dict()
results.append(ingest_event)

# Incident
artifacts = [_create_incident_artifact(incident)]

# Host
for host in incident.get("hosts", []):
artifacts.append(_create_host_artifact(host, incident))

# Container
container = dict()
ingest_event["container"] = container
container.update(_container_common)

if sys.version_info[0] == 2:
container["name"] = "{0} on {1} at {2}".format(
UnicodeDammit(incident.get("name", "Unnamed Incident")).unicode_markup.encode("utf-8"),
UnicodeDammit(incident.get("hosts", [{}])[0].get("hostname", "Unknown Host")).unicode_markup.encode("utf-8"),
incident.get("start", "Unknown Time"),
)
else:
container["name"] = "{0} on {1} at {2}".format(
incident.get("name", "Unnamed Incident"),
incident.get("hosts", [{}])[0].get("hostname", "Unknown Host"),
incident.get("start", "Unknown Time"),
)

# Set container properties
container["description"] = incident.get("description", "No description available")
container["source_data_identifier"] = incident.get("incident_id")
container["severity"] = _get_incident_severity(incident.get("fine_score", 0))

ingest_event["artifacts"] = artifacts

return results

0 comments on commit 57ab553

Please sign in to comment.