Skip to content

Commit 52e8eb8

Browse files
Sumit Tamgalecopybara-github
authored andcommitted
Internal change
PiperOrigin-RevId: 485118204
1 parent cce11bc commit 52e8eb8

2 files changed

Lines changed: 80 additions & 12 deletions

File tree

detect/v2/stream_detection_alerts.py

Lines changed: 26 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -182,15 +182,18 @@ def callback_slack_webhook(detection_batch: DetectionBatch):
182182
for detection in detections:
183183
# detection["detection"] is always a list that has one element.
184184
meta = detection["detection"][0]
185-
detection_metadatas.append(
186-
tuple((meta["ruleName"], meta["ruleId"], meta["ruleVersion"])))
185+
# ruleVersion is only populated for RULE_DETECTION type detections.
186+
rule_info = tuple((meta["ruleName"], meta["ruleId"], meta["ruleVersion"]
187+
)) if detection["type"] == "RULE_DETECTION" else tuple(
188+
(meta["ruleName"], meta["ruleId"]))
189+
detection_metadatas.append(rule_info)
187190

188191
for detection_metadata, count in collections.Counter(
189192
detection_metadatas).items():
190-
report_lines.append(
191-
f"\t{count} detections from Rule `{detection_metadata[0]}`" +
192-
f" (Rule ID `{detection_metadata[1]}`," +
193-
f" Version ID `{detection_metadata[2]}`)")
193+
line = f"\t{count} detections from Rule `{detection_metadata[0]}`" + f" (Rule ID `{detection_metadata[1]}`,"
194+
if len(detection_metadata) >= 3:
195+
line = line + f" Version ID `{detection_metadata[2]}`)"
196+
report_lines.append(line)
194197

195198
if batch_size > MAX_BATCH_SIZE_TO_REPORT_IN_DETAIL:
196199
# Avoid flooding our output channels.
@@ -207,8 +210,8 @@ def callback_slack_webhook(detection_batch: DetectionBatch):
207210
for idx, detection in enumerate(detections):
208211
report_lines.append(f"{idx})")
209212

210-
# This for loop includes rule name, rule ID, version ID,
211-
# rule type, and fields.
213+
# This for loop includes rule name, rule ID, rule type, rule version,
214+
# rule set and other fields.
212215
for meta_key, meta_value in detection["detection"][0].items():
213216
report_lines.append(f"\t{meta_key}: {meta_value}")
214217
report_lines.append(f"\tTime Window: {detection['timeWindow']}")
@@ -298,7 +301,7 @@ def stream_detection_alerts(
298301
The contents of a detection follow this format:
299302
{
300303
"id": "de_<UUID>",
301-
"type": "RULE_DETECTION",
304+
"type": "RULE_DETECTION"/"GCTI_FINDING",
302305
"createdTime": "yyyy-mm-ddThh:mm:ssZ",
303306
"detectionTime": "yyyy-mm-ddThh:mm:ssZ",
304307
"timeWindow": {
@@ -323,8 +326,9 @@ def stream_detection_alerts(
323326
],
324327
"detection": [ <-- this is always a list that has one element.
325328
{
326-
"ruleId": "ru_<UUID>",
329+
"ruleId": "ru_<UUID>"/"ur_ruleID",
327330
"ruleName": "<rule_name>",
331+
// ruleVersion is only populated for RULE_DETECTION type detections.
328332
"ruleVersion": "ru_<UUID>@v_<seconds>_<nanoseconds>",
329333
"urlBackToProduct": "<URL>",
330334
"alertState": "ALERTING"/"NOT_ALERTING",
@@ -334,9 +338,20 @@ def stream_detection_alerts(
334338
"key": "<field name>",
335339
"value": "<field value>"
336340
}
337-
]
341+
],
342+
// Following fields are only populated for "GCTI_FINDING" type
343+
// detections.
344+
"summary": "Rule Detection",
345+
"ruleSet": "<rule set ID>",
346+
"ruleSetDisplayName": "<rule set display name>",
347+
"description": "<rule description>",
348+
"severity": "INFORMATIONAL"/"LOW"/"HIGH"
338349
},
339350
],
351+
// Following fields are only populated for "GCTI_FINDING" type
352+
// detections.
353+
"lastUpdatedTime": "yyyy-mm-ddThh:mm:ssZ",
354+
"tags": ["<tag1>", "<tag2>", ...]
340355
}
341356
342357
Args:

detect/v2/stream_detection_alerts_test.py

Lines changed: 54 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -169,13 +169,62 @@ def tests_happy_path(self, mock_session, mock_init_session, mock_sleep):
169169
}],
170170
}
171171

172+
mock_uppercase_detection_template = {
173+
"id":
174+
"PLACEHOLDER", # To be replaced with unique ID.
175+
"type":
176+
"GCTI_FINDING",
177+
"createdTime":
178+
"2020-11-05T12:00:00Z",
179+
"detectionTime":
180+
"2020-11-05T01:00:00Z",
181+
"timeWindow": {
182+
"startTime": "2020-11-05T00:00:00Z",
183+
"endTime": "2020-11-05T01:00:00Z",
184+
},
185+
"lastUpdatedTime": "2020-11-05T12:00:00Z",
186+
"tags": ["TA0005", "TA0003", "T1098.004"],
187+
"detection": [{
188+
"ruleId":
189+
"ur_ttp_GCP__GlobalSSHKeys_Added",
190+
"ruleName":
191+
"GCP Global SSH Keys",
192+
"urlBackToProduct":
193+
"https://chronicle.security",
194+
"alertState":
195+
"ALERTING",
196+
"ruleType":
197+
"SINGLE_EVENT",
198+
"detectionFields": [{
199+
"key": "fieldName",
200+
"value": "fieldValue",
201+
}],
202+
"summary":
203+
"Rule Detection",
204+
"ruleSet":
205+
"11c505d4-b424-65e3-d918-1a81232cc76b",
206+
"ruleSetDisplayName":
207+
"Admin Action",
208+
"description":
209+
"Identifies instances of project-wide SSH keys being added "
210+
"where there were previously none.",
211+
"severity":
212+
"LOW"
213+
}],
214+
}
215+
172216
# Prepare string representations of detection batches that can
173217
# passed to callback functions.
174218
mock_detections = []
175219
for i in range(7):
176220
mock_detection = mock_detection_template.copy()
177221
mock_detection["id"] = str(i) # Not a valid ID format, just for tests.
178222
mock_detections.append(mock_detection)
223+
mock_uppercase_detections = []
224+
for i in range(5):
225+
mock_detection = mock_uppercase_detection_template.copy()
226+
mock_detection["id"] = str(i+7)
227+
mock_uppercase_detections.append(mock_detection)
179228

180229
mock_detection_batches = [
181230
# Normal stream responses, which will all be passed to the callback.
@@ -184,10 +233,14 @@ def tests_happy_path(self, mock_session, mock_init_session, mock_sleep):
184233
tuple(([], "2020-12-06T22:39:55.633014925Z")),
185234
tuple(([mock_detections[:3]], "2020-12-07T22:39:55.633014925Z")),
186235
tuple(([], "2020-12-08T22:39:55.633014925Z")),
187-
tuple(([mock_detections[3:4]], "2020-12-09T22:39:55.633014925Z")),
236+
tuple(([mock_detections[3:4], mock_uppercase_detections[0:3]],
237+
"2020-12-09T22:39:55.633014925Z")),
188238
tuple(([], "2020-12-10T22:39:55.633014925Z")),
189239
tuple(([mock_detections[4:]], "2020-12-11T22:39:55.633014925Z")),
190240
tuple(([], "2020-12-12T22:39:55.633014925Z")),
241+
tuple(([mock_uppercase_detections[3:]],
242+
"2020-12-12T22:39:55.633014925Z")),
243+
tuple(([], "2020-12-13T22:39:55.633014925Z")),
191244
]
192245

193246
# Serialize detection batches into dumps that will be sent as incremental

0 commit comments

Comments
 (0)