diff --git a/README.md b/README.md index dd37c73..e6c0055 100644 --- a/README.md +++ b/README.md @@ -57,3 +57,11 @@ python3 -m detect.v2. -h ```shell python3 -m lists. -h ``` + +### Lists API v1alpha + +``` +python -m lists.v1alpha.create_list -h +python -m lists.v1alpha.get_list -h +python -m lists.v1alpha.patch_list -h +``` diff --git a/common/project_id.py b/common/project_id.py new file mode 100644 index 0000000..baedf03 --- /dev/null +++ b/common/project_id.py @@ -0,0 +1,24 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Support for Project ID for v1alpha Chronicle API calls.""" +import argparse + + +def add_argument_project_id(parser: argparse.ArgumentParser): + """Adds a shared command-line argument to all the sample modules.""" + parser.add_argument( + "-p", "--project_id", type=str, required=True, + help="Your BYOP, project id", + ) diff --git a/common/project_instance.py b/common/project_instance.py new file mode 100644 index 0000000..764f501 --- /dev/null +++ b/common/project_instance.py @@ -0,0 +1,28 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Support for Project INSTANCE for v1alpha Chronicle API calls.""" + +import argparse + + +def add_argument_project_instance(parser: argparse.ArgumentParser): + """Adds a shared command-line argument to all the sample modules.""" + parser.add_argument( + "-i", + "--project_instance", + type=str, + required=True, + help="Customer ID for Chronicle instance", + ) diff --git a/common/regions.py b/common/regions.py index 2a90402..9c71bbb 100644 --- a/common/regions.py +++ b/common/regions.py @@ -19,6 +19,26 @@ import argparse +REGION_LIST = ( + "asia-northeast1", + "asia-south1", + "asia-southeast1", + "australia-southeast1", + "eu", + "europe", + "europe-west12", + "europe-west2", + "europe-west3", + "europe-west6", + "europe-west9", + "me-central1", + "me-central2", + "me-west1", + "northamerica-northeast2", + "southamerica-east1", + "us", +) + def add_argument_region(parser: argparse.ArgumentParser): """Adds a shared command-line argument to all the sample modules.""" @@ -28,8 +48,9 @@ def add_argument_region(parser: argparse.ArgumentParser): type=str, required=False, default="us", - choices=("asia-southeast1", "europe", "us"), - help="the region where the customer is located (default: us)") + choices=REGION_LIST, + help="the region where the customer is located (default: us)", + ) def url(base_url: str, region: str) -> str: @@ -37,3 +58,21 @@ def url(base_url: str, region: str) -> str: if region != "us": base_url = base_url.replace("https://", f"https://{region}-") return base_url + + +def url_always_prepend_region(base_url: str, region: str) -> str: + """Returns a regionalized URL. + + Args: + base_url: URL pointing to Chronicle API + region: region in which the target project is located + + Returns: + A string containing a regionalized URL. Unlike the url() function, + this function always prepends region; this function also checks whether + the URL already has the region prefix, and if so, returns the URL unchanged. + v1alpha samples should use this function. + """ + if not base_url.startswith(f"https://{region}-"): + base_url = base_url.replace("https://", f"https://{region}-") + return base_url diff --git a/common/regions_test.py b/common/regions_test.py index 7ec47e9..99ba28a 100644 --- a/common/regions_test.py +++ b/common/regions_test.py @@ -26,13 +26,33 @@ def test_url_asia_southeast1(self): regions.url("https://test", "asia-southeast1"), "https://asia-southeast1-test") - def test_url_europe(self): + def test_url_eu(self): self.assertEqual( - regions.url("https://test", "europe"), "https://europe-test") + regions.url("https://test", "eu"), "https://eu-test") def test_url_us(self): self.assertEqual(regions.url("https://test", "us"), "https://test") + def test_url_always_prepend_region_us(self): + self.assertEqual( + regions.url_always_prepend_region("https://test", "us"), + "https://us-test", + ) + + def test_url_always_prepend_region_e(self): + self.assertEqual( + regions.url_always_prepend_region("https://test", "eu"), + "https://eu-test", + ) + + def test_url_always_prepend_region_twice(self): + url_once = regions.url_always_prepend_region("https://test", "eu") + url_twice = regions.url_always_prepend_region(url_once, "eu") + self.assertEqual( + "https://eu-test", + url_twice, + ) + if __name__ == "__main__": unittest.main() diff --git a/datatap/__init__.py b/datatap/__init__.py new file mode 100644 index 0000000..b00563f --- /dev/null +++ b/datatap/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/datatap/create_datatap.py b/datatap/create_datatap.py new file mode 100644 index 0000000..fa27aca --- /dev/null +++ b/datatap/create_datatap.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python3 + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for creating a Datatap. + +API reference: +https://cloud.google.com/chronicle/docs/preview/datatap-config/datatapconfig-api?hl=en#create +""" + +import argparse +import json +import sys +from typing import Any, Mapping +from typing import Optional +from typing import Sequence + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com" + + +def initialize_command_line_args( + args: Optional[Sequence[str]] = None) -> Optional[argparse.Namespace]: + """Initializes and checks all the command-line arguments.""" + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-n", "--name", type=str, required=True, help="display name") + parser.add_argument( + "-t", + "--topic", + type=str, + required=True, + help="Topic in format projects//topics/") + parser.add_argument( + "-f", + "--filter", + type=str, + required=True, + help="filter Type, Options: ALL_UDM_EVENTS/ALERT_UDM_EVENTS") + parser.add_argument( + "-sf", + "--serialization_format", + type=str, + required=False, + help="serialization Format, Options : MARSHALLED_PROTO/JSON") + + # Sanity check for the filter type. + parsed_args = parser.parse_args(args) + if parsed_args.filter not in ("ALL_UDM_EVENTS", "ALERT_UDM_EVENTS"): + print("Error: filter type must be ") + return None + + return parser.parse_args(args) + + +def create_datatap(http_session: requests.AuthorizedSession, name: str, + topic: str, + filter_type: str, + serialization_format: str,) -> Mapping[str, Sequence[Any]]: + """Creates a datatap. + + Args: + http_session: Authorized session for HTTP requests. + name: name of the config to be created. + topic: topicId of the pubsub topic where events should be published. + filter_type: The filter type to filter events, e.g., ALL_UDM_EVENTS + or ALERT_UDM_EVENTS. + serialization_format: The serialization format in which events are + needed, e.g., MARSHALLED_PROTO or JSON. + + Returns: + Information about the newly created data in the form: + { + "customerId": "cccccccc-cccc-cccc-cccc-cccccccccccc", + "tapId": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "displayName": "tap1", + "filter": "ALL_UDM_EVENTS", + "serializationFormat": "MARSHALLED_PROTO" + "cloudPubsubSink": { + "topic": "projects/sample-project/topics/sample-topic", + } + } + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{CHRONICLE_API_BASE_URL}/v1/dataTaps" + + body = { + "displayName": name, + "cloudPubsubSink": { + "topic": topic, + }, + "filter": filter_type, + "serializationFormat": serialization_format + } + + response = http_session.request("POST", url, json=body) + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + cli = initialize_command_line_args() + if not cli: + sys.exit(1) # A sanity check failed. + + CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, cli.region) + session = chronicle_auth.initialize_http_session(cli.credentials_file) + print( + json.dumps( + create_datatap(session, cli.name, cli.topic, cli.filter, + cli.serialization_format), + indent=2)) diff --git a/datatap/create_datatap_test.py b/datatap/create_datatap_test.py new file mode 100644 index 0000000..7e718d7 --- /dev/null +++ b/datatap/create_datatap_test.py @@ -0,0 +1,105 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "create_datatap" module.""" + +import unittest +import argparse + +from unittest import mock + +from google.auth.transport import requests + +from . import create_datatap + + +class CreateDatatapTest(unittest.TestCase): + + def test_initialize_command_line_args(self): + actual = create_datatap.initialize_command_line_args([ + "--name=sink1", "--topic=projects/sample-project/topics/sample-topic", + "--filter=ALL_UDM_EVENTS", "--serialization_format=JSON" + ]) + self.assertEqual( + actual, + argparse.Namespace( + credentials_file=None, + name="sink1", + topic="projects/sample-project/topics/sample-topic", + filter="ALL_UDM_EVENTS", + serialization_format="JSON", + region="us")) + + def test_initialize_command_line_args_filter_type(self): + actual = create_datatap.initialize_command_line_args([ + "--name=sink1", "--topic=projects/sample-project/topics/sample-topic", + "--filter=INVALID_FILTER" + ]) + self.assertIsNone(actual) + + def test_initialize_command_line_args_filter_missing(self): + with self.assertRaises(SystemExit) as error: + create_datatap.initialize_command_line_args([ + "--name=sink1", + "--topic=projects/sample-project/topics/sample-topic", + ]) + self.assertEqual(error.exception.code, 2) + + def test_initialize_command_line_args_name_missing(self): + with self.assertRaises(SystemExit) as error: + create_datatap.initialize_command_line_args([ + "--topic=projects/sample-project/topics/sample-topic", + "--filter=ALL_UDM_EVENTS", + ]) + self.assertEqual(error.exception.code, 2) + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_create_datatap_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + create_datatap.create_datatap(mock_session, "", "", "", "") + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_create_datatap(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + name = "tap1" + topic = "projects/sample-project/topics/sample-topic" + filter_type = "ALL_UDM_EVENTS" + serialization_format = "JSON" + expected = { + "customerId": "cccccccc-cccc-cccc-cccc-cccccccccccc", + "tapId": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "displayName": "tap1", + "filter": "ALL_UDM_EVENTS", + "serializationFormat": "JSON", + "cloudPubsubSink": { + "topic": "projects/sample-project/topics/sample-topic", + } + } + + mock_response.json.return_value = expected + actual = create_datatap.create_datatap(mock_session, name, topic, + filter_type, serialization_format) + self.assertEqual(actual, expected) + + +if __name__ == "__main__": + unittest.main() diff --git a/datatap/delete_datatap.py b/datatap/delete_datatap.py new file mode 100644 index 0000000..a78742e --- /dev/null +++ b/datatap/delete_datatap.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python3 + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for deleting a Datatap. + +API reference: +https://cloud.google.com/chronicle/docs/preview/datatap-config/datatapconfig-api?hl=en#delete +""" + +import argparse +import json +import sys +from typing import Any, Mapping +from typing import Optional +from typing import Sequence + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com" + + +def initialize_command_line_args( + args: Optional[Sequence[str]] = None) -> Optional[argparse.Namespace]: + """Initializes and checks all the command-line arguments.""" + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-id", "--tapId", type=str, required=True, help="tap Id") + + return parser.parse_args(args) + + +def delete_datatap(http_session: requests.AuthorizedSession, + tap_id: str) -> Mapping[str, Sequence[Any]]: + """Deletes the given datatap. + + Args: + http_session: Authorized session for HTTP requests. + tap_id: unique datatap Id returned on Datatap creation. + + Returns: + Empty with 200 if success. + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{CHRONICLE_API_BASE_URL}/v1/dataTaps/{tap_id}" + + response = http_session.request("DELETE", url) + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + cli = initialize_command_line_args() + if not cli: + sys.exit(1) # A sanity check failed. + + CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, cli.region) + session = chronicle_auth.initialize_http_session(cli.credentials_file) + print( + json.dumps( + delete_datatap(session, cli.tapId), + indent=2)) diff --git a/datatap/delete_datatap_test.py b/datatap/delete_datatap_test.py new file mode 100644 index 0000000..2f4b762 --- /dev/null +++ b/datatap/delete_datatap_test.py @@ -0,0 +1,65 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "delete_datatap" module.""" + +import unittest +import argparse + +from unittest import mock + +from google.auth.transport import requests + +from . import delete_datatap + + +class DeleteDatatapTest(unittest.TestCase): + + def test_initialize_command_line_args(self): + actual = delete_datatap.initialize_command_line_args( + ["--tapId=aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa"]) + self.assertEqual( + actual, + argparse.Namespace( + credentials_file=None, + tapId="aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + region="us")) + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_delete_datatap_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + delete_datatap.delete_datatap(mock_session, "") + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_delete_datatap(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + tap_id = "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa" + expected = {} + + mock_response.json.return_value = expected + actual = delete_datatap.delete_datatap(mock_session, tap_id) + self.assertEqual(actual, expected) + + +if __name__ == "__main__": + unittest.main() + \ No newline at end of file diff --git a/datatap/get_datatap.py b/datatap/get_datatap.py new file mode 100644 index 0000000..cdd4b2a --- /dev/null +++ b/datatap/get_datatap.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python3 + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for Fetching a Datatap. + +API reference: +https://cloud.google.com/chronicle/docs/preview/datatap-config/datatapconfig-api?hl=en#get +""" + +import argparse +import json +import sys +from typing import Any, Mapping +from typing import Optional +from typing import Sequence + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com" + + +def initialize_command_line_args( + args: Optional[Sequence[str]] = None) -> Optional[argparse.Namespace]: + """Initializes and checks all the command-line arguments.""" + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-id", "--tapId", type=str, required=True, help="tap Id") + + return parser.parse_args(args) + + +def get_datatap(http_session: requests.AuthorizedSession, + tap_id: str) -> Mapping[str, Sequence[Any]]: + """Fetches the given datatap. + + Args: + http_session: Authorized session for HTTP requests. + tap_id: unique datatap Id returned on Datatap creation. + + Returns: + Information about the fetched data in the form: + { + "customerId": "cccccccc-cccc-cccc-cccc-cccccccccccc", + "tapId": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "displayName": "tap1", + "filter": "ALL_UDM_EVENTS" + "cloudPubsubSink": { + "topic": "projects/sample-project/topics/sample-topic", + } + } + + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{CHRONICLE_API_BASE_URL}/v1/dataTaps/{tap_id}" + response = http_session.request("GET", url) + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + cli = initialize_command_line_args() + if not cli: + sys.exit(1) # A sanity check failed. + + CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, cli.region) + session = chronicle_auth.initialize_http_session(cli.credentials_file) + print( + json.dumps( + get_datatap(session, cli.tapId), + indent=2)) diff --git a/datatap/get_datatap_test.py b/datatap/get_datatap_test.py new file mode 100644 index 0000000..16ad2c9 --- /dev/null +++ b/datatap/get_datatap_test.py @@ -0,0 +1,73 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "get_datatap" module.""" + +import unittest +import argparse + +from unittest import mock + +from google.auth.transport import requests + +from . import get_datatap + + +class GetDatatapTest(unittest.TestCase): + + def test_initialize_command_line_args(self): + actual = get_datatap.initialize_command_line_args( + ["--tapId=aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa"]) + self.assertEqual( + actual, + argparse.Namespace( + credentials_file=None, + tapId="aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + region="us")) + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_get_datatap_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + get_datatap.get_datatap(mock_session, "") + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_get_datatap(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + tap_id = "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa" + expected = { + "customerId": "cccccccc-cccc-cccc-cccc-cccccccccccc", + "tapId": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "displayName": "tap1", + "cloudPubsubSink": { + "topic": "projects/sample-project/topics/sample-topic", + "filter": "ALL_UDM_EVENTS" + } + } + + mock_response.json.return_value = expected + actual = get_datatap.get_datatap(mock_session, tap_id) + self.assertEqual(actual, expected) + + +if __name__ == "__main__": + unittest.main() + \ No newline at end of file diff --git a/datatap/list_datatap.py b/datatap/list_datatap.py new file mode 100644 index 0000000..2348af2 --- /dev/null +++ b/datatap/list_datatap.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python3 + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for listing all Datataps for customer. + +API reference: +https://cloud.google.com/chronicle/docs/preview/datatap-config/datatapconfig-api?hl=en#list +""" + +import argparse +import json +import sys +from typing import Any, Mapping +from typing import Optional +from typing import Sequence + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com" + + +def initialize_command_line_args( + args: Optional[Sequence[str]] = None) -> Optional[argparse.Namespace]: + """Initializes and checks all the command-line arguments.""" + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + + return parser.parse_args(args) + + +def list_datatap( + http_session: requests.AuthorizedSession) -> Mapping[str, Sequence[Any]]: + """Lists all datataps of a customer. + + Args: + http_session: Authorized session for HTTP requests. + + Returns: + Information about the all the datataps in the form: + { + "dataTaps": [ + { + "customerId": "cccccccc-cccc-cccc-cccc-cccccccccccc", + "tapId": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "displayName": "tap1", + "filter": "ALL_UDM_EVENTS", + "cloudPubsubSink": { + "topic": "projects/sample-project/topics/sample-topic", + } + } + ] + } + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{CHRONICLE_API_BASE_URL}/v1/dataTaps" + response = http_session.request("GET", url) + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + cli = initialize_command_line_args() + if not cli: + sys.exit(1) # A sanity check failed. + + CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, cli.region) + session = chronicle_auth.initialize_http_session(cli.credentials_file) + print(json.dumps(list_datatap(session), indent=2)) diff --git a/datatap/list_datatap_test.py b/datatap/list_datatap_test.py new file mode 100644 index 0000000..2928ab7 --- /dev/null +++ b/datatap/list_datatap_test.py @@ -0,0 +1,72 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "list_datatap" module.""" + +import unittest +import argparse + +from unittest import mock + +from google.auth.transport import requests + +from . import list_datatap + + +class ListDatatapTest(unittest.TestCase): + + def test_initialize_command_line_args(self): + actual = list_datatap.initialize_command_line_args([]) + self.assertEqual( + actual, + argparse.Namespace( + credentials_file=None, + region="us")) + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_list_datatap_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + list_datatap.list_datatap(mock_session) + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_list_datatap(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + expected = { + "dataTaps": [{ + "customerId": "cccccccc-cccc-cccc-cccc-cccccccccccc", + "tapId": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "displayName": "tap1", + "filter": "ALL_UDM_EVENTS", + "cloudPubsubSink": { + "topic": "projects/sample-project/topics/sample-topic", + } + }] + } + + mock_response.json.return_value = expected + actual = list_datatap.list_datatap(mock_session) + self.assertEqual(actual, expected) + + +if __name__ == "__main__": + unittest.main() + \ No newline at end of file diff --git a/datatap/update_datatap.py b/datatap/update_datatap.py new file mode 100644 index 0000000..0def10f --- /dev/null +++ b/datatap/update_datatap.py @@ -0,0 +1,141 @@ +#!/usr/bin/env python3 + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for updating a Datatap. + +API reference: +https://cloud.google.com/chronicle/docs/preview/datatap-config/datatapconfig-api?hl=en#update +""" + +import argparse +import json +import sys +from typing import Any, Mapping +from typing import Optional +from typing import Sequence + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com" + + +def initialize_command_line_args( + args: Optional[Sequence[str]] = None) -> Optional[argparse.Namespace]: + """Initializes and checks all the command-line arguments.""" + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-n", "--name", type=str, required=True, help="display name") + parser.add_argument( + "-t", + "--topic", + type=str, + required=True, + help="Topic in format projects//topics/") + parser.add_argument( + "-f", + "--filter", + type=str, + required=True, + help="filter Type, Options: ALL_UDM_EVENTS/ALERT_UDM_EVENTS") + parser.add_argument( + "-sf", + "--serialization_format", + type=str, + required=False, + help="serialization Format, Options : MARSHALLED_PROTO/JSON") + parser.add_argument("-id", "--tapId", type=str, required=True, help="tap Id") + + # Sanity check for the filter type. + parsed_args = parser.parse_args(args) + if parsed_args.filter not in ("ALL_UDM_EVENTS", "ALERT_UDM_EVENTS"): + print("Error: filter type must be ") + return None + + return parser.parse_args(args) + + +def update_datatap(http_session: requests.AuthorizedSession, name: str, + topic: str, + filter_type: str, + serialization_format: str, + tap_id: str) -> Mapping[str, Sequence[Any]]: + """Update a datatap. + + Args: + http_session: Authorized session for HTTP requests. + name: name of the config to be created. + topic: topicId of the pubsub topic where events should be published. + filter_type: The filter type to filter events, e.g., ALL_UDM_EVENTS + or ALERT_UDM_EVENTS. + serialization_format: The serialization format in which events are + needed, e.g., MARSHALLED_PROTO or JSON. + tap_id: unique datatap Id returned on Datatap creation. + + Returns: + Information about the newly created data in the form: + { + "customerId": "cccccccc-cccc-cccc-cccc-cccccccccccc", + "tapId": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "displayName": "tap1", + "filter": "ALL_UDM_EVENTS", + "serializationFormat": "MARSHALLED_PROTO", + "cloudPubsubSink": { + "topic": "projects/sample-project/topics/sample-topic", + } + } + + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{CHRONICLE_API_BASE_URL}/v1/dataTaps/{tap_id}" + + body = { + "name": "dataTaps/" + tap_id, + "displayName": name, + "cloudPubsubSink": { + "topic": topic, + }, + "filter": filter_type, + "serializationFormat": serialization_format + } + + response = http_session.request("PATCH", url, json=body) + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + cli = initialize_command_line_args() + if not cli: + sys.exit(1) # A sanity check failed. + + CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, cli.region) + session = chronicle_auth.initialize_http_session(cli.credentials_file) + print( + json.dumps( + update_datatap(session, cli.name, cli.topic, cli.filter, + cli.serialization_format, cli.tapId), + indent=2)) diff --git a/datatap/update_datatap_test.py b/datatap/update_datatap_test.py new file mode 100644 index 0000000..20f923e --- /dev/null +++ b/datatap/update_datatap_test.py @@ -0,0 +1,123 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "update_datatap" module.""" + +import unittest +import argparse + +from unittest import mock + +from google.auth.transport import requests + +from . import update_datatap + + +class UpdateDatatapTest(unittest.TestCase): + + def test_initialize_command_line_args(self): + actual = update_datatap.initialize_command_line_args([ + "--name=sink1", "--topic=projects/sample-project/topics/sample-topic", + "--filter=ALL_UDM_EVENTS", + "--serialization_format=JSON", + "--tapId=aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa" + ]) + self.assertEqual( + actual, + argparse.Namespace( + credentials_file=None, + name="sink1", + topic="projects/sample-project/topics/sample-topic", + filter="ALL_UDM_EVENTS", + serialization_format="JSON", + tapId="aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + region="us")) + + def test_initialize_command_line_args_filter_type(self): + actual = update_datatap.initialize_command_line_args([ + "--name=sink1", "--topic=projects/sample-project/topics/sample-topic", + "--filter=INVALID_FILTER", + "--tapId=aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa" + ]) + self.assertIsNone(actual) + + def test_initialize_command_line_args_tap_id_missing(self): + with self.assertRaises(SystemExit) as error: + update_datatap.initialize_command_line_args([ + "--name=sink1", + "--topic=projects/sample-project/topics/sample-topic", + "--filter=ALL_UDM_EVENTS", + ]) + self.assertEqual(error.exception.code, 2) + + def test_initialize_command_line_args_filter_missing(self): + with self.assertRaises(SystemExit) as error: + update_datatap.initialize_command_line_args([ + "--name=sink1", + "--topic=projects/sample-project/topics/sample-topic", + "--tapId=aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa" + ]) + self.assertEqual(error.exception.code, 2) + + def test_initialize_command_line_args_name_missing(self): + with self.assertRaises(SystemExit) as error: + update_datatap.initialize_command_line_args([ + "--topic=projects/sample-project/topics/sample-topic", + "--tapId=aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "--filter=ALL_UDM_EVENTS", + ]) + self.assertEqual(error.exception.code, 2) + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_update_datatap_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + update_datatap.update_datatap(mock_session, "", "", "", "", "") + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_update_datatap(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + name = "tap1" + topic = "projects/sample-project/topics/sample-topic" + filter_type = "ALL_UDM_EVENTS" + serialization_format = "JSON" + tap_id = "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa" + expected = { + "customerId": "cccccccc-cccc-cccc-cccc-cccccccccccc", + "tapId": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", + "displayName": "tap1", + "filter": "ALL_UDM_EVENTS", + "serializationFormat": "JSON", + "cloudPubsubSink": { + "topic": "projects/sample-project/topics/sample-topic", + } + } + + mock_response.json.return_value = expected + actual = update_datatap.update_datatap(mock_session, name, topic, + filter_type, serialization_format, + tap_id) + self.assertEqual(actual, expected) + + +if __name__ == "__main__": + unittest.main() + diff --git a/detect/v1alpha/__init__.py b/detect/v1alpha/__init__.py new file mode 100644 index 0000000..1ee14b7 --- /dev/null +++ b/detect/v1alpha/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/detect/v1alpha/batch_update_curated_rule_set_deployments.py b/detect/v1alpha/batch_update_curated_rule_set_deployments.py new file mode 100644 index 0000000..abc9ac3 --- /dev/null +++ b/detect/v1alpha/batch_update_curated_rule_set_deployments.py @@ -0,0 +1,168 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +r"""Executable sample for batch updating curated rule sets deployments. + +Sample Commands (run from api_samples_python dir): + # Modify the script to update the constants that point to deployments. + python3 -m detect.v1alpha.batch_update_curated_rule_set_deployments \ + -r= -p= -i= + +API reference: + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.curatedRuleSetCategories.curatedRuleSets.curatedRuleSetDeployments/batchUpdate + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.curatedRuleSetCategories.curatedRuleSets.curatedRuleSetDeployments#CuratedRuleSetDeployment +""" +import argparse +import json +from typing import Any, Mapping +from common import chronicle_auth +from common import project_id +from common import project_instance +from common import regions +from google.auth.transport import requests + +CHRONICLE_API_BASE_URL = "https://chronicle.googleapis.com" + +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] + + +def batch_update_curated_rule_set_deployments( + http_session: requests.AuthorizedSession, + proj_region: str, + proj_id: str, + proj_instance: str, +) -> Mapping[str, Any]: + """Batch update curated rule set deployments. + + Args: + http_session: Authorized session for HTTP requests. + proj_region: region in which the target project is located + proj_id: GCP project id or number which the target instance belongs to + proj_instance: uuid of the instance (with dashes) + + Returns: + an object with information about the modified deployments + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + + base_url_with_region = regions.url_always_prepend_region( + CHRONICLE_API_BASE_URL, + args.region + ) + # pylint: disable-next=line-too-long + parent = f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}" + + # We use "-" in the URL because we provide category and rule_set IDs + # in the request data. + url = f"{base_url_with_region}/v1alpha/{parent}/curatedRuleSetCategories/-/curatedRuleSets/-/curatedRuleSetDeployments:batchUpdate" + + # Helper function for making a deployment name. Use this as the + # curated_rule_set_deployment.name field in the request data below. + def make_deployment_name(category, rule_set, precision): + return f"{parent}/curatedRuleSetCategories/{category}/curatedRuleSets/{rule_set}/curatedRuleSetDeployments/{precision}" + + # Note that IDs are hard-coded below, as examples. + print("\nCategories, rule sets, and precisions are hard-coded as " + + "examples. Update the script to provide actual IDs.\n" + ) + + # Modify the category/rule_set/precision for each deployment below. + # Deployment A. + category_a = "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa" + rule_set_a = "bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb" + precision_a = "broad" + + # Deployment B. + category_b = "cccccccc-cccc-cccc-cccc-cccccccccccc" + rule_set_b = "dddddddd-dddd-dddd-dddd-dddddddddddd" + precision_b = "precise" + + # Modify the data below to change the behavior of the request. + # - Add elements to `requests` to batch update multiple deployments + # - Change the enabled and alerting fields as needed + # - Change the update_mask to modify only certain properties + json_data = { + "parent": f"{parent}/curatedRuleSetCategories/-/curatedRuleSets/-", + "requests": [ + { + "curated_rule_set_deployment": { + "name": make_deployment_name( + category_a, + rule_set_a, + precision_a, + ), + "enabled": True, + "alerting": False, + }, + "update_mask": { + "paths": ["alerting", "enabled"], + }, + }, + { + "curated_rule_set_deployment": { + "name": make_deployment_name( + category_b, + rule_set_b, + precision_b, + ), + "enabled": True, + "alerting": True, + }, + "update_mask": { + "paths": ["alerting", "enabled"], + }, + }, + ], + } + + # See API reference links at top of this file, for response format. + response = http_session.request("POST", url, json=json_data) + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + project_instance.add_argument_project_instance(parser) + project_id.add_argument_project_id(parser) + + args = parser.parse_args() + auth_session = chronicle_auth.initialize_http_session( + args.credentials_file, + SCOPES + ) + session = chronicle_auth.initialize_http_session(args.credentials_file) + print( + json.dumps( + batch_update_curated_rule_set_deployments( + auth_session, + args.region, + args.project_id, + args.project_instance, + ), + indent=2, + ) + ) diff --git a/detect/v1alpha/bulk_update_alerts.py b/detect/v1alpha/bulk_update_alerts.py new file mode 100644 index 0000000..705dd77 --- /dev/null +++ b/detect/v1alpha/bulk_update_alerts.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +r"""Executable and reusable sample for bulk updating alerts. + +The file provided to the --alert_ids_file parameter should have one alert + ID per line like so: +``` +de_ad9d2771-a567-49ee-6452-1b2db13c1d33 +de_3c2e2556-aba1-a253-7518-b4ddb666cc32 +``` +Usage: + python -m alerts.v1alpha.bulk_update_alerts \ + --project_id= \ + --project_instance= \ + --alert_ids_file= \ + --confidence_score= \ + --priority= \ + --reason= \ + --reputation= \ + --priority= \ + --status= \ + --verdict= \ + --risk_score= \ + --disregarded= \ + --severity= \ + --comment= \ + --root_cause= \ + --severity_display= + +# pylint: disable=line-too-long +API reference: + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.legacy/legacyUpdateAlert + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/Noun#Priority + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/Noun#Reason + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/Noun#Reputation + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/Noun#Priority + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/Noun#Status + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/Noun#Verdict +""" +# pylint: enable=line-too-long + +import json + +from common import chronicle_auth + +from . import update_alert + + +CHRONICLE_API_BASE_URL = "https://chronicle.googleapis.com" +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] +DEFAULT_FEEDBACK = { + "comment": "automated cleanup", + "reason": "REASON_MAINTENANCE", + "reputation": "REPUTATION_UNSPECIFIED", + "root_cause": "Other", + "status": "CLOSED", + "verdict": "VERDICT_UNSPECIFIED", +} + + +if __name__ == "__main__": + parser = update_alert.get_update_parser() + parser.add_argument( + "--alert_ids_file", type=str, required=True, + help="File with one alert ID per line." + ) + parser.set_defaults( + comment=DEFAULT_FEEDBACK["comment"], + reason=DEFAULT_FEEDBACK["reason"], + reputation=DEFAULT_FEEDBACK["reputation"], + root_cause=DEFAULT_FEEDBACK["root_cause"], + status=DEFAULT_FEEDBACK["status"], + verdict=DEFAULT_FEEDBACK["verdict"], + ) + args = parser.parse_args() + + # raise error if required args are not present + update_alert.check_args(parser, args) + + auth_session = chronicle_auth.initialize_http_session( + args.credentials_file, + SCOPES, + ) + with open(args.alert_ids_file) as fh: + for alert_id in fh: + a_list = update_alert.update_alert( + auth_session, + args.project_id, + args.project_instance, + args.region, + alert_id.strip(), + args.confidence_score, + args.reason, + args.reputation, + args.priority, + args.status, + args.verdict, + args.risk_score, + args.disregarded, + args.severity, + args.comment, + args.root_cause, + ) + print(json.dumps(a_list, indent=2)) diff --git a/detect/v1alpha/create_retrohunt.py b/detect/v1alpha/create_retrohunt.py new file mode 100644 index 0000000..6215209 --- /dev/null +++ b/detect/v1alpha/create_retrohunt.py @@ -0,0 +1,142 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +r"""Executable sample for creating a retrohunt. + +Sample Commands (run from api_samples_python dir): + python3 -m detect.v1alpha.create_retrohunt \ + -r= -p= -i= -rid= \ + -st="2023-10-02T18:00:00Z" -et="2023-10-02T20:00:00Z" + +API reference: + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.rules.retrohunts/create + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.operations#Operation +""" +import argparse +import datetime +import json +from typing import Any, Mapping +from common import chronicle_auth +from common import datetime_converter +from common import project_id +from common import project_instance +from common import regions +from google.auth.transport import requests + +CHRONICLE_API_BASE_URL = "https://chronicle.googleapis.com" + +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] + + +def create_retrohunt( + http_session: requests.AuthorizedSession, + proj_region: str, + proj_id: str, + proj_instance: str, + rule_id: str, + start_time: datetime.datetime, + end_time: datetime.datetime, +) -> Mapping[str, Any]: + """Creates a retrohunt. + + Args: + http_session: Authorized session for HTTP requests. + proj_region: region in which the target project is located + proj_id: GCP project id or number which the target instance belongs to + proj_instance: uuid of the instance (with dashes) + rule_id: Unique ID of the detection rule to retrieve ("ru_"). + start_time: the start time of the event time range this retrohunt will be + executed over + end_time: the end time of the event time range this retrohunt will be + executed over + + Returns: + an Operation resource object containing relevant retrohunt's information + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + base_url_with_region = regions.url_always_prepend_region( + CHRONICLE_API_BASE_URL, + args.region + ) + # pylint: disable-next=line-too-long + parent = f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}" + url = f"{base_url_with_region}/v1alpha/{parent}/rules/{rule_id}/retrohunts" + body = { + "process_interval": { + "start_time": datetime_converter.strftime(start_time), + "end_time": datetime_converter.strftime(end_time), + }, + } + + # See API reference links at top of this file, for response format. + response = http_session.request("POST", url, json=body) + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + project_instance.add_argument_project_instance(parser) + project_id.add_argument_project_id(parser) + parser.add_argument( + "-rid", + "--rule_id", + type=str, + required=True, + help='rule ID to create retrohunt for. In the form of "ru_"', + ) + parser.add_argument( + "-st", + "--start_time", + type=datetime_converter.iso8601_datetime_utc, + required=True, + help="Retrohunt start time in UTC ('yyyy-mm-ddThh:mm:ssZ')", + ) + parser.add_argument( + "-et", + "--end_time", + type=datetime_converter.iso8601_datetime_utc, + required=True, + help="Retrohunt end time in UTC ('yyyy-mm-ddThh:mm:ssZ')", + ) + args = parser.parse_args() + auth_session = chronicle_auth.initialize_http_session( + args.credentials_file, + SCOPES + ) + print( + json.dumps( + create_retrohunt( + auth_session, + args.region, + args.project_id, + args.project_instance, + args.rule_id, + args.start_time, + args.end_time, + ), + indent=2, + ) + ) diff --git a/detect/v1alpha/create_rule.py b/detect/v1alpha/create_rule.py new file mode 100644 index 0000000..9bfa13c --- /dev/null +++ b/detect/v1alpha/create_rule.py @@ -0,0 +1,125 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +r"""Executable and reusable sample for creating a detection rule. + +Sample Commands (run from api_samples_python dir): + # From file + python3 -m detect.v1alpha.create_rule \ + --region $region \ + --project_instance $project_instance \ + --project_id $PROJECT_ID \ + --rule_file=./path/to/rule/rulename.yaral + + # From stdin + cat ./path/rulename.yaral | python3 -m detect.v1alpha.create_rule \ + --region $region \ + --project_instance $project_instance \ + --project_id $PROJECT_ID \ + --rule_file - + +API reference: + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.rules/create + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.rules#Rule +""" + +import argparse +import json +from typing import Any, Mapping + +from common import chronicle_auth +from common import project_id +from common import project_instance +from common import regions +from google.auth.transport import requests + +CHRONICLE_API_BASE_URL = "https://chronicle.googleapis.com" + +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] + + +def create_rule( + http_session: requests.AuthorizedSession, + proj_id: str, + proj_instance: str, + proj_region: str, + rule_file_path: str, +) -> Mapping[str, Any]: + """Creates a new detection rule to find matches in logs. + + Args: + http_session: Authorized session for HTTP requests. + proj_id: GCP project id or number to which the target instance belongs. + proj_instance: Customer ID (uuid with dashes) for the Chronicle instance. + proj_region: region in which the target project is located. + rule_file_path: Content of the new detection rule, used to evaluate logs. + + Returns: + New detection rule. + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + base_url_with_region = regions.url_always_prepend_region( + CHRONICLE_API_BASE_URL, + args.region + ) + # pylint: disable-next=line-too-long + parent = f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}" + url = f"{base_url_with_region}/v1alpha/{parent}/rules" + + body = { + "text": rule_file_path.read(), + } + + # See API reference links at top of this file, for response format. + response = http_session.request("POST", url, json=body) + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + # common + chronicle_auth.add_argument_credentials_file(parser) + project_instance.add_argument_project_instance(parser) + project_id.add_argument_project_id(parser) + regions.add_argument_region(parser) + # local + parser.add_argument( + "-f", + "--rule_file", + type=argparse.FileType("r"), + required=True, + help="path of a file with the desired rule's content, or - for STDIN", + ) + args = parser.parse_args() + + auth_session = chronicle_auth.initialize_http_session( + args.credentials_file, + SCOPES + ) + new_rule = create_rule(auth_session, + args.project_id, + args.project_instance, + args.region, + args.rule_file) + print(json.dumps(new_rule, indent=2)) diff --git a/detect/v1alpha/delete_rule.py b/detect/v1alpha/delete_rule.py new file mode 100644 index 0000000..cffb068 --- /dev/null +++ b/detect/v1alpha/delete_rule.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +r"""Executable sample for deleting a rule. + +Sample Commands (run from api_samples_python dir): + python3 -m detect.v1alpha.delete_rule -r= \ + -p= -i= \ + -rid= + +API reference: + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.rules/delete +""" + +import argparse +import json +from typing import Any, Mapping + +from common import chronicle_auth +from common import project_id +from common import project_instance +from common import regions +from google.auth.transport import requests + +CHRONICLE_API_BASE_URL = "https://chronicle.googleapis.com" + +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] + + +def delete_rule( + http_session: requests.AuthorizedSession, + proj_region: str, + proj_id: str, + proj_instance: str, + rule_id: str, +) -> Mapping[str, Any]: + """Deletes a rule. + + Args: + http_session: Authorized session for HTTP requests. + proj_region: region in which the target project is located + proj_id: GCP project id or number which the target instance belongs to + proj_instance: uuid of the instance (with dashes) + rule_id: Unique ID of the detection rule to retrieve ("ru_"). + + Returns: + an empty response + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + base_url_with_region = regions.url_always_prepend_region( + CHRONICLE_API_BASE_URL, + args.region + ) + # pylint: disable-next=line-too-long + parent = f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}" + url = f"{base_url_with_region}/v1alpha/{parent}/rules/{rule_id}" + + # See API reference links at top of this file, for response format. + response = http_session.request("DELETE", url) + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + project_instance.add_argument_project_instance(parser) + project_id.add_argument_project_id(parser) + parser.add_argument( + "-rid", + "--rule_id", + type=str, + required=True, + help='ID of rule to be deleted. In the form of "ru_"', + ) + args = parser.parse_args() + auth_session = chronicle_auth.initialize_http_session( + args.credentials_file, + SCOPES + ) + print( + json.dumps( + delete_rule( + auth_session, + args.region, + args.project_id, + args.project_instance, + args.rule_id, + ), + indent=2, + ) + ) diff --git a/detect/v1alpha/enable_rule.py b/detect/v1alpha/enable_rule.py new file mode 100644 index 0000000..5323327 --- /dev/null +++ b/detect/v1alpha/enable_rule.py @@ -0,0 +1,118 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +r"""Executable sample for enabling a rule. + +Sample Commands (run from api_samples_python dir): + python3 detect.v1alpha.enable_rule -r= \ + -p= -i= \ + -rid= + +API reference: + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.rules/updateDeployment + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/RuleDeployment +""" +import argparse +import json +from typing import Any, Mapping +from common import chronicle_auth +from common import project_id +from common import project_instance +from common import regions +from google.auth.transport import requests + +CHRONICLE_API_BASE_URL = "https://chronicle.googleapis.com" + +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] + + +def enable_rule( + http_session: requests.AuthorizedSession, + proj_region: str, + proj_id: str, + proj_instance: str, + rule_id: str, +) -> Mapping[str, Any]: + """Enables a rule. + + Args: + http_session: Authorized session for HTTP requests. + proj_region: region in which the target project is located + proj_id: GCP project id or number which the target instance belongs to + proj_instance: uuid of the instance whose rules are being + created (with dashes) + rule_id: Unique ID of the detection rule to retrieve ("ru_"). + + Returns: + a rule deployment object containing relevant rule's deployment information + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + base_url_with_region = regions.url_always_prepend_region( + CHRONICLE_API_BASE_URL, + args.region + ) + # pylint: disable-next=line-too-long + parent = f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}" + url = f"{base_url_with_region}/v1alpha/{parent}/rules/{rule_id}/deployment" + body = { + # You can set enabled to False to disable a rule. + "enabled": True, + } + params = {"update_mask": "enabled"} + + # See API reference links at top of this file, for response format. + response = http_session.request("PATCH", url, params=params, json=body) + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + project_instance.add_argument_project_instance(parser) + project_id.add_argument_project_id(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-rid", + "--rule_id", + type=str, + required=True, + help='ID of rule to be enabled. In the form of "ru_"', + ) + args = parser.parse_args() + auth_session = chronicle_auth.initialize_http_session( + args.credentials_file, + SCOPES + ) + print( + json.dumps( + enable_rule( + auth_session, + args.region, + args.project_id, + args.project_instance, + args.rule_id, + ), + indent=2, + ) + ) diff --git a/detect/v1alpha/get_alert.py b/detect/v1alpha/get_alert.py new file mode 100644 index 0000000..a56f768 --- /dev/null +++ b/detect/v1alpha/get_alert.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +r"""Executable and reusable sample for getting a Reference List. + +Usage: + python -m alerts.v1alpha.get_alert \ + --project_id= \ + --project_instance= \ + --alert_id= + +API reference: + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.legacy/legacyGetAlert + +""" + +import argparse +import json +from typing import Any, Mapping + +from common import chronicle_auth +from common import project_id +from common import project_instance +from common import regions + +from google.auth.transport import requests + +CHRONICLE_API_BASE_URL = "https://chronicle.googleapis.com" +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] + + +def get_alert( + http_session: requests.AuthorizedSession, + proj_id: str, + proj_instance: str, + proj_region: str, + alert_id: str, + include_detections: bool = False, +) -> Mapping[str, Any]: + """Gets an Alert. + + Args: + http_session: Authorized session for HTTP requests. + proj_id: GCP project id or number to which the target instance belongs. + proj_instance: Customer ID (uuid with dashes) for the Chronicle instance. + proj_region: region in which the target project is located. + alert_id: Identifier for the alert. + include_detections: Flag to include detections. + + Returns: + Dictionary representation of the Alert + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + base_url_with_region = regions.url_always_prepend_region( + CHRONICLE_API_BASE_URL, + proj_region + ) + # pylint: disable-next=line-too-long + parent = f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}" + + query_params = {"alertId": alert_id} + if include_detections: + query_params["includeDetections"] = True + + url = f"{base_url_with_region}/v1alpha/{parent}/legacy:legacyGetAlert" + + response = http_session.request("GET", url, params=query_params) + # Expected server response is described in: + # https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.legacy/legacyGetAlert + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + project_instance.add_argument_project_instance(parser) + project_id.add_argument_project_id(parser) + regions.add_argument_region(parser) + parser.add_argument( + "--alert_id", type=str, required=True, + help="identifier for the alert" + ) + parser.add_argument( + "-d", "--include-detections", type=bool, default=False, required=False, + help="flag to include detections" + ) + args = parser.parse_args() + + auth_session = chronicle_auth.initialize_http_session( + args.credentials_file, + SCOPES, + ) + alert = get_alert( + auth_session, + args.project_id, + args.project_instance, + args.region, + args.alert_id, + args.include_detections, + ) + print(json.dumps(alert, indent=2)) diff --git a/detect/v1alpha/get_retrohunt.py b/detect/v1alpha/get_retrohunt.py new file mode 100644 index 0000000..8977b58 --- /dev/null +++ b/detect/v1alpha/get_retrohunt.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +r"""Executable sample for getting a retrohunt. + +Sample Commands (run from api_samples_python dir): + python3 -m detect.v1alpha.get_retrohunt -r= \ + -p= -i= \ + -rid= -oid= + + python3 -m detect.v1alpha.get_retrohunt -r= \ + -p= -i= \ + -rid=@v__ -oid= + +API reference: + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.rules.retrohunts/get + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.rules.retrohunts#Retrohunt +""" +import argparse +import json +from typing import Any, Mapping +from common import chronicle_auth +from common import project_id +from common import project_instance +from common import regions +from google.auth.transport import requests + +CHRONICLE_API_BASE_URL = "https://chronicle.googleapis.com" + +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] + + +def get_retrohunt( + http_session: requests.AuthorizedSession, + proj_region: str, + proj_id: str, + proj_instance: str, + rule_id: str, + op_id: str, +) -> Mapping[str, Any]: + """Get a retrohunt for a given rule. + + Args: + http_session: Authorized session for HTTP requests. + proj_region: region in which the target project is located + proj_id: GCP project id or number which the target instance belongs to + proj_instance: uuid of the instance (with dashes) + rule_id: Unique ID of the detection rule to retrieve ("ru_" or + "ru_@v__"). If a version suffix isn't + specified we use the rule's latest version. + op_id: the operation ID of the retrohunt + + Returns: + a retrohunt object containing relevant retrohunt's information + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + base_url_with_region = regions.url_always_prepend_region( + CHRONICLE_API_BASE_URL, + args.region + ) + # pylint: disable-next=line-too-long + parent = f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}" + url = f"{base_url_with_region}/v1alpha/{parent}/rules/{rule_id}/retrohunts/{op_id}" + + # See API reference links at top of this file, for response format. + response = http_session.request("GET", url) + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + project_instance.add_argument_project_instance(parser) + project_id.add_argument_project_id(parser) + parser.add_argument( + "-rid", + "--rule_id", + type=str, + required=True, + help=( + 'rule ID to get retrohunt for. can use both "ru_" or' + ' "ru_@v__"' + ), + ) + parser.add_argument( + "-oid", + "--op_id", + type=str, + required=True, + help="operation ID for the retrohunt", + ) + args = parser.parse_args() + auth_session = chronicle_auth.initialize_http_session( + args.credentials_file, + SCOPES + ) + print( + json.dumps( + get_retrohunt( + auth_session, + args.region, + args.project_id, + args.project_instance, + args.rule_id, + args.op_id, + ), + indent=2, + ) + ) diff --git a/detect/v1alpha/get_rule.py b/detect/v1alpha/get_rule.py new file mode 100644 index 0000000..661cf60 --- /dev/null +++ b/detect/v1alpha/get_rule.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +r"""Executable sample for getting a rule. + +Sample Commands (run from api_samples_python dir): + python3 -m detect.v1alpha.get_rule -r= -p= \ + -i= -rid= + + python3 -m detect.v1alpha.get_rule -r= -p= \ + -i= -rid=@v__ + +API reference: + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.rules/get + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.rules#Rule +""" + +import argparse +import json + +from typing import Any, Mapping +from common import chronicle_auth +from common import project_id +from common import project_instance +from common import regions +from google.auth.transport import requests + +CHRONICLE_API_BASE_URL = "https://chronicle.googleapis.com" + +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] + + +def get_rule( + http_session: requests.AuthorizedSession, + proj_region: str, + proj_id: str, + proj_instance: str, + rule_id: str, +) -> Mapping[str, Any]: + """Get a rule. + + Args: + http_session: Authorized session for HTTP requests. + proj_region: region in which the target project is located + proj_id: GCP project id or number which the target instance belongs to + proj_instance: uuid of the instance (with dashes) + rule_id: Unique ID of the detection rule to retrieve ("ru_" or + "ru_@v__"). If a version suffix isn't + specified we use the rule's latest version. + + Returns: + a rule object containing relevant rule's information + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + base_url_with_region = regions.url_always_prepend_region( + CHRONICLE_API_BASE_URL, + args.region + ) + # pylint: disable-next=line-too-long + parent = f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}" + url = f"{base_url_with_region}/v1alpha/{parent}/rules/{rule_id}" + + # See API reference links at top of this file, for response format. + response = http_session.request("GET", url) + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + project_instance.add_argument_project_instance(parser) + project_id.add_argument_project_id(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-rid", + "--rule_id", + type=str, + required=True, + help=( + 'rule ID to get rule for. can use both "ru_" or' + ' "ru_@v__"' + ), + ) + args = parser.parse_args() + auth_session = chronicle_auth.initialize_http_session( + args.credentials_file, + SCOPES + ) + print( + json.dumps( + get_rule( + auth_session, + args.region, + args.project_id, + args.project_instance, + args.rule_id + ), + indent=2, + ) + ) diff --git a/detect/v1alpha/list_detections.py b/detect/v1alpha/list_detections.py new file mode 100644 index 0000000..00d5f95 --- /dev/null +++ b/detect/v1alpha/list_detections.py @@ -0,0 +1,176 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +r"""Executable sample for listing detections for a rule. + +Sample Commands (run from api_samples_python dir): + python3 -m detect.v1alpha.list_detections -r= \ + -p= -i= \ + -rid= + + # Different variation on rid + python3 -m detect.v1alpha.list_detections -r= \ + -p= -i= \ + -rid=@v__ + + # Different variation on rid + python3 -m detect.v1alpha.list_detections -r= \ + -p= -i= \ + -rid=@- + + # With pagination options + python3 -m detect.v1alpha.list_detections -r= \ + -p= -i= -rid= \ + --page_size= --alert_state= --page_token= + + +API reference: + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.legacy/legacySearchDetections + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/Collection +""" +import argparse +import json +from typing import Any, Mapping +from common import chronicle_auth +from common import project_id +from common import project_instance +from common import regions +from google.auth.transport import requests + +CHRONICLE_API_BASE_URL = "https://chronicle.googleapis.com" + +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] + +ALERT_STATES = ( + "UNSPECIFIED", + "NOT_ALERTING", + "ALERTING", +) + + +def list_detections( + http_session: requests.AuthorizedSession, + proj_region: str, + proj_id: str, + proj_instance: str, + rule_id: str, + alert_state: str | None = None, + page_size: int | None = None, + page_token: str | None = None +) -> Mapping[str, Any]: + """List detections for a rule. + + Args: + http_session: Authorized session for HTTP requests. + proj_region: region in which the target project is located + proj_id: GCP project id or number which the target instance belongs to + proj_instance: uuid of the instance (with dashes) + rule_id: Unique id of the rule to retrieve errors for. Options are (1) + {rule_id} (2) {rule_id}@v__ (3) {rule_id}@- which + matches on all versions. + alert_state: if provided, filter on alert_state + page_size: if provided, ask for a specific amount of detections + page_token: if provided, serves as a continuation token for pagination + + Returns: + a list of detections + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + base_url_with_region = regions.url_always_prepend_region( + CHRONICLE_API_BASE_URL, + args.region + ) + # pylint: disable-next=line-too-long + parent = f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}" + url = f"{base_url_with_region}/v1alpha/{parent}/legacy:legacySearchDetections" + params = { + "rule_id": rule_id, + } + if alert_state: + params["alertState"] = alert_state + if page_size: + params["pageSize"] = page_size + if page_token: + params["pageToken"] = page_token + + # See API reference links at top of this file, for response format. + response = http_session.request("GET", url, params=params) + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + project_instance.add_argument_project_instance(parser) + project_id.add_argument_project_id(parser) + parser.add_argument( + "-rid", + "--rule_id", + type=str, + required=True, + help=( + "rule id to list detections for. Options are (1) rule_id (2)" + " rule_id@v__ (3) rule_id@- which matches on" + " all versions." + ), + ) + parser.add_argument( + "--alert_state", + choices=ALERT_STATES, + required=False, + default=None, + ) + parser.add_argument( + "--page_size", + type=int, + required=False, + default=None, + ) + parser.add_argument( + "--page_token", + type=str, + required=False, + default=None, + ) + args = parser.parse_args() + auth_session = chronicle_auth.initialize_http_session( + args.credentials_file, + SCOPES + ) + print( + json.dumps( + list_detections( + auth_session, + args.region, + args.project_id, + args.project_instance, + args.rule_id, + args.alert_state, + args.page_size, + args.page_token, + ), + indent=2, + ) + ) diff --git a/detect/v1alpha/list_errors.py b/detect/v1alpha/list_errors.py new file mode 100644 index 0000000..bb68b03 --- /dev/null +++ b/detect/v1alpha/list_errors.py @@ -0,0 +1,132 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +r"""Executable sample for listing errors for a rule. + +Sample Commands (run from api_samples_python dir): + python3 -m detect.v1alpha.list_errors -r= \ + -p= -i= \ + -rid= + + python3 -m detect.v1alpha.list_errors -r= \ + -p= -i= \ + -rid=@v__ + + python3 -m detect.v1alpha.list_errors -r= \ + -p= -i= \ + -rid=@- + +API reference: + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.ruleExecutionErrors/list + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.ruleExecutionErrors/list#RuleExecutionError +""" +import argparse +import json +from typing import Any, Mapping +from common import chronicle_auth +from common import project_id +from common import project_instance +from common import regions +from google.auth.transport import requests + +CHRONICLE_API_BASE_URL = "https://chronicle.googleapis.com" + +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] + + +def list_errors( + http_session: requests.AuthorizedSession, + proj_region: str, + proj_id: str, + proj_instance: str, + rule_id: str, +) -> Mapping[str, Any]: + """Listing errors for rules. + + Args: + http_session: Authorized session for HTTP requests. + proj_region: region in which the target project is located + proj_id: GCP project id or number which the target instance belongs to + proj_instance: uuid of the instance (with dashes) + rule_id: Unique id of the rule to retrieve errors for. Options are (1) + {rule_id} (2) {rule_id}@v__ (3) {rule_id}@- which + matches on all versions. + + Returns: + a rule execution error object containing relevant error's information + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + base_url_with_region = regions.url_always_prepend_region( + CHRONICLE_API_BASE_URL, + args.region + ) + # pylint: disable-next=line-too-long + parent = f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}" + url = f"{base_url_with_region}/v1alpha/{parent}/ruleExecutionErrors" + rule_filter = ( + "rule =" + f' "projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}/rules/{rule_id}"' + ) + params = { + "filter": rule_filter, + } + + # See API reference links at top of this file, for response format. + response = http_session.request("GET", url, params=params) + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + project_instance.add_argument_project_instance(parser) + project_id.add_argument_project_id(parser) + parser.add_argument( + "-rid", + "--rule_id", + type=str, + required=True, + help=( + "rule id to list errors for. Options are (1) rule_id (2)" + " rule_id@v__ (3) rule_id@- which matches on" + " all versions." + ), + ) + args = parser.parse_args() + auth_session = chronicle_auth.initialize_http_session( + args.credentials_file, + SCOPES + ) + print( + json.dumps( + list_errors( + auth_session, + args.region, + args.project_id, + args.project_instance, + args.rule_id, + ), + indent=2, + ) + ) diff --git a/detect/v1alpha/list_rules.py b/detect/v1alpha/list_rules.py new file mode 100644 index 0000000..d870104 --- /dev/null +++ b/detect/v1alpha/list_rules.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +r"""Executable and reusable sample for retrieving a list of rules. + +Sample Commands (run from api_samples_python dir): + python3 -m detect.v1alpha.list_rules -r= \ + -p= -i= + +API reference: + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.rules/list + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.rules#Rule +""" + +import argparse +import json +from typing import Any, Mapping + +from common import chronicle_auth +from common import project_id +from common import project_instance +from common import regions +from google.auth.transport import requests + +CHRONICLE_API_BASE_URL = "https://chronicle.googleapis.com" + +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] + + +def list_rules( + http_session: requests.AuthorizedSession, + proj_id: str, + proj_instance: str, + proj_region: str, + ) -> Mapping[str, Any]: + """Gets a list of rules. + + Args: + http_session: Authorized session for HTTP requests. + proj_id: GCP project id or number to which the target instance belongs. + proj_instance: Customer ID (uuid with dashes) for the Chronicle instance. + proj_region: region in which the target project is located. + Returns: + Array containing information about rules. + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + base_url_with_region = regions.url_always_prepend_region( + CHRONICLE_API_BASE_URL, + args.region + ) + # pylint: disable-next=line-too-long + parent = f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}" + url = f"{base_url_with_region}/v1alpha/{parent}/rules" + + # See API reference links at top of this file, for response format. + response = http_session.request("GET", url) + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + project_instance.add_argument_project_instance(parser) + project_id.add_argument_project_id(parser) + regions.add_argument_region(parser) + args = parser.parse_args() + session = chronicle_auth.initialize_http_session( + args.credentials_file, + SCOPES + ) + rules = list_rules( + session, + args.project_id, + args.project_instance, + args.region + ) + print(json.dumps(rules, indent=2)) diff --git a/detect/v1alpha/search_rules_alerts.py b/detect/v1alpha/search_rules_alerts.py new file mode 100644 index 0000000..32f60de --- /dev/null +++ b/detect/v1alpha/search_rules_alerts.py @@ -0,0 +1,164 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +r"""Executable sample for getting a list of generated alerts. + +Sample Command (run from api_samples_python dir): + python3 -m detect.v1alpha.search_rules_alerts \ + --region=$REGION \ + --project_id=$PROJECT_ID \ + --project_instance=$PROJECT_INSTANCE \ + --credentials_file=$CREDENTIALS_FILE \ + --start_time="2024-11-11T13:37:32Z" \ + --start_time="2024-11-19T13:37:32Z" \ + --rule_status=ALL \ + --page_size=10 + +API reference: + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.legacy/legacySearchRulesAlerts +""" +import argparse +import datetime +import json +from typing import Any, Mapping +from common import chronicle_auth +from common import project_id +from common import project_instance +from common import regions +from google.auth.transport import requests + +CHRONICLE_API_BASE_URL = "https://chronicle.googleapis.com" + +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] + +RULE_STATUS = ( + "ACTIVE", + "ARCHIVED", + "ALL", +) + + +def search_rules_alerts( + http_session: requests.AuthorizedSession, + proj_region: str, + proj_id: str, + proj_instance: str, + start_time: str, + end_time: str, + rule_status: str | None = None, + page_size: int | None = None, +) -> Mapping[str, Any]: + """... + + Args: + http_session: Authorized session for HTTP requests. + proj_region: region in which the target project is located + proj_id: GCP project id or number which the target instance belongs to + proj_instance: uuid of the instance (with dashes) + start_time: A timestamp in RFC3339 UTC "Zulu" format, with nanosecond + resolution and up to nine fractional digits. + end_time: A timestamp in RFC3339 UTC "Zulu" format, with nanosecond + resolution and up to nine fractional digits. + rule_status: if provided, limit the alerts to ACTIVE | ARCHIVED | ALL + page_size: if provided, limit the number of alerts returned + + Returns: + a list of detections + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + base_url_with_region = regions.url_always_prepend_region( + CHRONICLE_API_BASE_URL, args.region + ) + # pylint: disable-next=line-too-long + instance = f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}" + url = f"{base_url_with_region}/v1alpha/{instance}/legacy:legacySearchRulesAlerts" + params = {"timeRange.start_time": start_time, "timeRange.end_time": end_time} + if rule_status: + if rule_status not in RULE_STATUS: + raise ValueError( + f"rule_status must be one of {RULE_STATUS}, got {rule_status}" + ) + params["ruleStatus"] = rule_status + if page_size: + params["maxNumAlertsToReturn"] = page_size + + # See API reference links at top of this file, for response format. + response = http_session.request("GET", url, params=params) + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + now = datetime.datetime.now() + yesterday = now - datetime.timedelta(hours=24) + # Format the datetime object into the desired string + start_time_string = yesterday.strftime("%Y-%m-%dT%H:%M:%SZ") + + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + project_instance.add_argument_project_instance(parser) + project_id.add_argument_project_id(parser) + parser.add_argument( + "--start_time", + type=str, + required=False, + default=start_time_string, + ) + parser.add_argument( + "--end_time", + type=str, + required=False, + default=now.strftime("%Y-%m-%dT%H:%M:%SZ"), + ) + parser.add_argument( + "--rule_status", + choices=RULE_STATUS, + required=False, + default="ALL", + ) + parser.add_argument( + "--page_size", + type=int, + required=False, + default=10, + ) + args = parser.parse_args() + auth_session = chronicle_auth.initialize_http_session( + args.credentials_file, SCOPES + ) + print( + json.dumps( + search_rules_alerts( + auth_session, + args.region, + args.project_id, + args.project_instance, + args.start_time, + args.end_time, + args.rule_status, + args.page_size, + ), + indent=2, + ) + ) diff --git a/detect/v1alpha/update_alert.py b/detect/v1alpha/update_alert.py new file mode 100644 index 0000000..cf3aaf6 --- /dev/null +++ b/detect/v1alpha/update_alert.py @@ -0,0 +1,348 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +r"""Executable and reusable sample for updating an Alert. + +Usage: + python -m alerts.v1alpha.update_alert \ + --project_id= \ + --project_instance= \ + --alert_id= \ + --confidence_score= \ + --priority= \ + --reason= \ + --reputation= \ + --priority= \ + --status= \ + --verdict= \ + --risk_score= \ + --disregarded= \ + --severity= \ + --comment= \ + --root_cause= \ + --severity_display= + +# pylint: disable=line-too-long +API reference: + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.legacy/legacyUpdateAlert + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/Noun#Priority + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/Noun#Reason + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/Noun#Reputation + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/Noun#Priority + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/Noun#Status + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/Noun#Verdict +""" +# pylint: enable=line-too-long + +import argparse +import json +from typing import Any, Literal, Mapping + +from common import chronicle_auth +from common import project_id +from common import project_instance +from common import regions + +from google.auth.transport import requests + +CHRONICLE_API_BASE_URL = "https://chronicle.googleapis.com" +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] + +PRIORITY_ENUM = ( + "PRIORITY_UNSPECIFIED", + "PRIORITY_INFO", + "PRIORITY_LOW", + "PRIORITY_MEDIUM", + "PRIORITY_HIGH", + "PRIORITY_CRITICAL", +) +REASON_ENUM = ( + "REASON_UNSPECIFIED", + "REASON_NOT_MALICIOUS", + "REASON_MALICIOUS", + "REASON_MAINTENANCE", +) +REPUTATION_ENUM = ( + "REPUTATION_UNSPECIFIED", + "USEFUL", + "NOT_USEFUL", +) +STATUS_ENUM = ( + "STATUS_UNSPECIFIED", + "NEW", + "REVIEWED", + "CLOSED", + "OPEN", +) +VERDICT_ENUM = ( + "VERDICT_UNSPECIFIED", + "TRUE_POSITIVE", + "FALSE_POSITIVE", +) + + +def get_update_parser(): + """Returns an argparse.ArgumentParser for the update_alert command.""" + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + project_instance.add_argument_project_instance(parser) + project_id.add_argument_project_id(parser) + regions.add_argument_region(parser) + parser.add_argument( + "--comment", + type=str, + required=False, + default=None, + help="Analyst comment.", + ) + parser.add_argument( + "--confidence_score", + type=int, + required=False, + default=None, + help="confidence score [1-100] of the finding", + ) + parser.add_argument( + "--disregarded", + type=bool, + required=False, + default=None, + help="Analyst disregard (or un-disregard) the event", + ) + parser.add_argument( + "--priority", + choices=PRIORITY_ENUM, + required=False, + default=None, + help="alert priority.", + ) + parser.add_argument( + "--reason", + choices=REASON_ENUM, + required=False, + default=None, + help="reason for closing an Alert", + ) + parser.add_argument( + "--reputation", + choices=REPUTATION_ENUM, + required=False, + default=None, + help="A categorization of the finding as useful or not useful", + ) + parser.add_argument( + "--risk_score", + type=int, + required=False, + default=None, + help="risk score [0-100] of the finding", + ) + parser.add_argument( + "--root_cause", + type=str, + required=False, + default=None, + help="Alert root cause.", + ) + parser.add_argument( + "--status", + choices=STATUS_ENUM, + required=False, + default=None, + help="alert status", + ) + parser.add_argument( + "--verdict", + choices=VERDICT_ENUM, + required=False, + default=None, + help="a verdict on whether the finding reflects a security incident", + ) + parser.add_argument( + "--severity", + type=int, + required=False, + default=None, + help="severity score [0-100] of the finding", + ) + return parser + + +def check_args( + parser: argparse.ArgumentParser, + args_to_check: argparse.Namespace): + """Checks if at least one of the required arguments is provided. + + Args: + parser: instance of argparse.ArgumentParser (to raise error if needed). + args_to_check: instance of argparse.Namespace with the arguments to check. + """ + if not any( + [ + args_to_check.comment or args_to_check.comment == "", # pylint: disable=g-explicit-bool-comparison + args_to_check.disregarded, + args_to_check.priority, + args_to_check.reason, + args_to_check.reputation, + args_to_check.risk_score or args_to_check.risk_score == 0, + args_to_check.root_cause or args_to_check.root_cause == "", # pylint: disable=g-explicit-bool-comparison + args_to_check.severity or args_to_check.severity == 0, + args_to_check.status, + args_to_check.verdict, + ] + ): + parser.error("At least one of the arguments " + "--comment, " + "--disregarded, " + "--priority, " + "--reason, " + "--reputation, " + "--risk_score, " + "--root_cause, " + "--severity, " + "--status, " + "or --verdict " + "is required.") + + +def update_alert( + http_session: requests.AuthorizedSession, + proj_id: str, + proj_instance: str, + proj_region: str, + alert_id: str, + confidence_score: int | None = None, + reason: str | None = None, + reputation: str | None = None, + priority: str | None = None, + status: str | None = None, + verdict: str | None = None, + risk_score: int | None = None, + disregarded: bool | None = None, + severity: int | None = None, + comment: str | Literal[""] | None = None, + root_cause: str | Literal[""] | None = None, + ) -> Mapping[str, Any]: + """Updates an Alert. + + Args: + http_session: Authorized session for HTTP requests. + proj_id: GCP project id or number to which the target instance belongs. + proj_instance: Customer ID (uuid with dashes) for the Chronicle instance. + proj_region: Region in which the target project is located. + alert_id: Identifier for the alert. + confidence_score: Confidence score [0-100] of the finding. + reason: Reason for closing an Alert. + reputation: A categorization of the finding as useful or not useful. + priority: Alert priority. + status: Status of the alert. + verdict: Verdict of the alert. + risk_score: Risk score [0-100] of the finding. + disregarded: Analyst disregard (or un-disregard) the event. + severity: Severity score [0-100] of the finding. + comment: Analyst comment in free text. Empty string is a valid value. + root_cause: Alert root cause in free text. Empty string unsets the value. + + Returns: + Dictionary representation of the Alert + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + base_url_with_region = regions.url_always_prepend_region( + CHRONICLE_API_BASE_URL, + proj_region + ) + # pylint: disable-next=line-too-long + parent = f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}" + url = f"{base_url_with_region}/v1alpha/{parent}/legacy:legacyUpdateAlert/" + + feedback = {} + if confidence_score or confidence_score == 0: + feedback["confidence_score"] = confidence_score + if reason: + feedback["reason"] = reason + if reputation: + feedback["reputation"] = reputation + if priority: + feedback["priority"] = priority + if status: + feedback["status"] = status + if verdict: + feedback["verdict"] = verdict + if risk_score or risk_score == 0: + feedback["risk_score"] = risk_score + if disregarded: + feedback["disregarded"] = disregarded + if severity or severity == 0: + feedback["severity"] = severity + if comment or comment == "": # pylint: disable=g-explicit-bool-comparison + feedback["comment"] = comment + if root_cause or root_cause == "": # pylint: disable=g-explicit-bool-comparison + feedback["root_cause"] = root_cause + + payload = { + "alert_id": alert_id, + "feedback": feedback, + } + + response = http_session.request("POST", url, json=payload) + + # Expected server response is described in: + # https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.legacy/legacyUpdateAlert + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + main_parser = get_update_parser() + main_parser.add_argument( + "--alert_id", type=str, required=True, + help="identifier for the alert" + ) + args = main_parser.parse_args() + + # Check if at least one of the specific arguments is provided + check_args(main_parser, args) + + auth_session = chronicle_auth.initialize_http_session( + args.credentials_file, + SCOPES, + ) + a_list = update_alert( + auth_session, + args.project_id, + args.project_instance, + args.region, + args.alert_id, + args.confidence_score, + args.reason, + args.reputation, + args.priority, + args.status, + args.verdict, + args.risk_score, + args.disregarded, + args.severity, + args.comment, + args.root_cause, + ) + print(json.dumps(a_list, indent=2)) diff --git a/detect/v1alpha/update_rule.py b/detect/v1alpha/update_rule.py new file mode 100644 index 0000000..745dc60 --- /dev/null +++ b/detect/v1alpha/update_rule.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +r"""Executable sample for updating a rule. + +Sample Commands (run from api_samples_python dir): + # From file + python3 -m detect.v1alpha.update_rule -r= -p= \ + -i= -rid= \ + --rule_file=./path/to/rule/rulename.yaral + + # From stdin + cat ./path/rulename.yaral | python3 -m detect.v1alpha.update_rule \ + -r= -p= \ + -i= -rid= \ + --rule_file - + +API reference: + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.rules/patch + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.rules#Rule +""" +import argparse +import json +from typing import Any, Mapping +from common import chronicle_auth +from common import project_id +from common import project_instance +from common import regions +from google.auth.transport import requests + +CHRONICLE_API_BASE_URL = "https://chronicle.googleapis.com" + +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] + + +def update_rule( + http_session: requests.AuthorizedSession, + proj_region: str, + proj_id: str, + proj_instance: str, + rule_id: str, + rule_file_path: str, +) -> Mapping[str, Any]: + """Updates a rule. + + Args: + http_session: Authorized session for HTTP requests. + proj_region: region in which the target project is located + proj_id: GCP project id or number which the target instance belongs to + proj_instance: uuid of the instance (with dashes) + rule_id: Unique ID of the detection rule to retrieve ("ru_"). + rule_file_path: Content of the new detection rule. + + Returns: + a rule object containing relevant rule's information + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + + """ + base_url_with_region = regions.url_always_prepend_region( + CHRONICLE_API_BASE_URL, + args.region + ) + # pylint: disable-next=line-too-long + parent = f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}" + url = f"{base_url_with_region}/v1alpha/{parent}/rules/{rule_id}" + + body = { + "text": rule_file_path.read(), + } + params = {"update_mask": "text"} + + # See API reference links at top of this file, for response format. + response = http_session.request("PATCH", url, params=params, json=body) + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + project_instance.add_argument_project_instance(parser) + project_id.add_argument_project_id(parser) + parser.add_argument( + "-rid", + "--rule_id", + type=str, + required=True, + help='ID of the rule to be updated. In the form of "ru_"', + ) + parser.add_argument( + "-f", + "--rule_file", + type=argparse.FileType("r"), + required=True, + help="path of a file with the desired rule's content, or - for STDIN", + ) + args = parser.parse_args() + auth_session = chronicle_auth.initialize_http_session( + args.credentials_file, + SCOPES + ) + print( + json.dumps( + update_rule( + auth_session, + args.region, + args.project_id, + args.project_instance, + args.rule_id, + args.rule_file, + ), + indent=2, + ) + ) diff --git a/detect/v2/create_rule.py b/detect/v2/create_rule.py index 3f26508..6cc45d7 100755 --- a/detect/v2/create_rule.py +++ b/detect/v2/create_rule.py @@ -68,6 +68,7 @@ def create_rule(http_session: requests.AuthorizedSession, # "compilationState": "SUCCEEDED"/"FAILED", # "compilationError": "", <-- IFF compilation failed. # "archivedTime": "yyyy-mm-ddThh:mm:ss.ssssssZ", <-- IFF archived. + # "ruleType": "MULTI_EVENT"/"SINGLE_EVENT", # } if response.status_code >= 400: diff --git a/detect/v2/create_rule_version.py b/detect/v2/create_rule_version.py index 3605b4c..0436c3c 100755 --- a/detect/v2/create_rule_version.py +++ b/detect/v2/create_rule_version.py @@ -69,6 +69,7 @@ def create_rule_version(http_session: requests.AuthorizedSession, rule_id: str, # "compilationState": "SUCCEEDED"/"FAILED", # "compilationError": "", <-- IFF compilation failed. # "archivedTime": "yyyy-mm-ddThh:mm:ss.ssssssZ", <-- IFF archived. + # "ruleType": "MULTI_EVENT"/"SINGLE_EVENT", # } if response.status_code >= 400: diff --git a/detect/v2/get_rule.py b/detect/v2/get_rule.py index eb0ab57..9feac3a 100755 --- a/detect/v2/get_rule.py +++ b/detect/v2/get_rule.py @@ -70,6 +70,7 @@ def get_rule(http_session: requests.AuthorizedSession, # "compilationState": "SUCCEEDED"/"FAILED", # "compilationError": "", <-- IFF compilation failed. # "archivedTime": "yyyy-mm-ddThh:mm:ss.ssssssZ", <-- IFF archived. + # "ruleType": "MULTI_EVENT"/"SINGLE_EVENT", # } if response.status_code >= 400: diff --git a/detect/v2/list_curated_rule_detections.py b/detect/v2/list_curated_rule_detections.py new file mode 100644 index 0000000..0bc5787 --- /dev/null +++ b/detect/v2/list_curated_rule_detections.py @@ -0,0 +1,244 @@ +#!/usr/bin/env python3 + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for listing curated rule detections. + +API reference: +https://cloud.google.com/chronicle/docs/reference/detection-engine-api#listcuratedruledetections +""" + +import argparse +import datetime +import json +import sys +from typing import Any, Mapping, Optional, Sequence, Tuple + +from google.auth.transport import requests + +from common import chronicle_auth +from common import datetime_converter +from common import regions + +_chronicle_api_base_url = "https://backstory.googleapis.com" + + +def initialize_command_line_args( + args: Optional[Sequence[str]] = None) -> Optional[argparse.Namespace]: + """Initializes and checks all the command-line arguments.""" + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-ri", + "--rule_id", + type=str, + required=True, + help=("rule ID of the curated rule to list detections for " + "('ur_xxx')")) + parser.add_argument( + "-a", + "--alert_state", + type=str, + required=False, + help="alert state (i.e. 'ALERTING', 'NOT_ALERTING')") + parser.add_argument( + "-st", + "--start_time", + type=datetime_converter.iso8601_datetime_utc, + required=False, + help="detection start time in UTC ('yyyy-mm-ddThh:mm:ssZ')") + parser.add_argument( + "-et", + "--end_time", + type=datetime_converter.iso8601_datetime_utc, + required=False, + help="detection end time in UTC ('yyyy-mm-ddThh:mm:ssZ')") + parser.add_argument( + "-lb", + "--list_basis", + type=str, + required=False, + help="list basis (i.e. 'DETECTION_TIME', 'CREATED_TIME')") + parser.add_argument( + "-s", + "--page_size", + type=int, + required=False, + help="maximum number of rules to return") + parser.add_argument( + "-t", + "--page_token", + type=str, + required=False, + help="page token from a previous ListCuratedRuleDetections call used for pagination" + ) + + # Sanity checks for the command-line arguments. + parsed_args = parser.parse_args(args) + + s, e = parsed_args.start_time, parsed_args.end_time + if s is not None and s > datetime.datetime.now().astimezone( + datetime.timezone.utc): + print("Error: start time should not be in the future") + return None + if s is not None and e is not None and s >= e: + print("Error: start time should not be same as or later than the end time") + return None + if parsed_args.alert_state not in (None, "ALERTING", "NOT_ALERTING"): + print( + "Error: alert_state should be one of ALERTING, NOT_ALERTING, or empty") + return None + if parsed_args.list_basis not in (None, "DETECTION_TIME", "CREATED_TIME"): + print( + "Error: list_basis should be one of DETECTION_TIME, CREATED_TIME, or empty" + ) + return None + + return parsed_args + + +def list_curated_rule_detections( + http_session: requests.AuthorizedSession, + rule_id: str, + alert_state: str = "", + start_time: Optional[datetime.datetime] = None, + end_time: Optional[datetime.datetime] = None, + list_basis: str = "", + page_size: int = 0, + page_token: str = "") -> Tuple[Sequence[Mapping[str, Any]], str]: + """Retrieves all the detections of the specified curated rule by rule_id. + + Args: + http_session: Authorized session for HTTP requests. + rule_id: Unique ID of the curated rule to list detections for (i.e. ur_xxx). + alert_state: A string that filters which detections are returned, based on + their AlertState: "ALERTING" or "NOT_ALERTING" (default = no filtering). + start_time: The time to start listing detections from, inclusive (default = + no min detection_start_time). + end_time: The time to end listing detections to, exclusive (default = no max + detection_end_time). + list_basis: A string that determines whether start_time and end_time refer + to the detection time (DETECTION_TIME) or creation time (CREATED_TIME) of + the detection results (default = filter by detection time). + page_size: Maximum number of detections in the response. Must be + non-negative, and is capped at a server-side limit of 1000. Optional - we + use a server-side default of 100 if the size is 0 or a None value. + page_token: Base64-encoded string token to retrieve a specific page of + results. Optional - we retrieve the first page if the token is an empty + string or a None value. + + Returns: + All the detections (within the defined page) ordered by descending + detection_time, as well as a Base64 token for getting the detections of the + next page (an empty token string means the currently retrieved page is the + last one). + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{_chronicle_api_base_url}/v2/detect/curatedRules/{rule_id}/detections" + params_list = [ + ("alert_state", alert_state), + ("start_time", datetime_converter.strftime(start_time)), + ("end_time", datetime_converter.strftime(end_time)), + ("list_basis", list_basis), + ("page_size", page_size), + ("page_token", page_token), + ] + + params = {k: v for k, v in params_list if v} + + response = http_session.request("GET", url, params=params) + # Expected server response: + # { + # "curatedRuleDetections": [ + # { + # "id": "de_", + # "type": "GCTI_FINDING", + # "createdTime": "yyyy-mm-ddThh:mm:ssZ", + # "lastUpdatedTime": "yyyy-mm-ddThh:mm:ssZ", + # "detectionTime": "yyyy-mm-ddThh:mm:ssZ", + # "tags": [ + # "TA###", + # "T###", + # ... additional MITRE tactics and techniques + # ], + # "timeWindow": { + # "startTime": "yyyy-mm-ddThh:mm:ssZ", + # "endTime": "yyyy-mm-ddThh:mm:ssZ", + # }, + # "collectionElements": [ + # { + # "label": "e1", + # "references": [ + # { + # "event": ... + # }, + # ... + # ], + # }, + # { + # "label": "e2", + # ... + # }, + # ... + # ], + # "detection": [ + # { + # "ruleId": "ur_xxx", + # "ruleName": "", + # "summary": "Rule Detection", + # "description": "", + # "urlBackToProduct": "", + # "alertState": "ALERTING"/"NOT_ALERTING", + # "ruleType": "SINGLE_EVENT"/"MULTI_EVENT", + # "severity": "INFO"/"LOW"/"HIGH", + # "ruleLabels": [ + # { + # "key": "", + # "value": "" + # } + # ] + # "ruleSet": "", + # "ruleSetDisplayName": "", + # }, + # ], + # }, + # ... + # ], + # "nextPageToken": "" + # } + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + j = response.json() + return j.get("curatedRuleDetections", []), j.get("nextPageToken", "") + + +if __name__ == "__main__": + cli = initialize_command_line_args() + if not cli: + sys.exit(1) # A sanity check failed. + + _chronicle_api_base_url = regions.url(_chronicle_api_base_url, cli.region) + session = chronicle_auth.initialize_http_session(cli.credentials_file) + detections, next_page_token = list_curated_rule_detections( + session, cli.rule_id, cli.alert_state, cli.start_time, cli.end_time, + cli.list_basis, cli.page_size, cli.page_token) + print(json.dumps(detections, indent=2)) + print(f"Next page token: {next_page_token}") diff --git a/detect/v2/list_curated_rule_detections_test.py b/detect/v2/list_curated_rule_detections_test.py new file mode 100644 index 0000000..8f07128 --- /dev/null +++ b/detect/v2/list_curated_rule_detections_test.py @@ -0,0 +1,139 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "list_curated_rule_detections" module.""" + +import datetime +import unittest +from unittest import mock + +from google.auth.transport import requests + +from . import list_curated_rule_detections + + +class ListCuratedRuleDetectionsTest(unittest.TestCase): + + def test_initialize_command_line_args_rule_id(self): + actual = list_curated_rule_detections.initialize_command_line_args([ + "--rule_id=ur_sample_rule", + ]) + self.assertIsNotNone(actual) + + def test_initialize_command_line_args_with_valid_parameters(self): + actual = list_curated_rule_detections.initialize_command_line_args([ + "--rule_id=ur_sample_rule", + "--start_time=2023-01-04T00:00:00", + "--end_time=2023-01-05T00:00:00", + "--page_size=1000", + "--alert_state=ALERTING", + "--list_basis=CREATED_TIME", + ]) + self.assertIsNotNone(actual) + + def test_initialize_command_line_args_future_start_time(self): + start_time = datetime.datetime.now().astimezone( + datetime.timezone.utc) + datetime.timedelta(hours=1) + end_time = start_time + datetime.timedelta(hours=1) + actual = list_curated_rule_detections.initialize_command_line_args([ + "--rule_id=ur_sample_rule", + start_time.strftime("-st=%Y-%m-%dT%H:%M:%SZ"), + end_time.strftime("-et=%Y-%m-%dT%H:%M:%SZ"), + ]) + self.assertIsNone(actual) + + def test_initialize_command_line_args_end_time_before_start_time(self): + actual = list_curated_rule_detections.initialize_command_line_args([ + "--rule_id=ur_sample_rule", + "--start_time=2023-01-05T00:00:00", + "--end_time=2023-01-04T00:00:00", + ]) + self.assertIsNone(actual) + + def test_initialize_command_line_args_invalid_alert_state(self): + actual = list_curated_rule_detections.initialize_command_line_args( + ["--rule_id=ur_sample_rule", "--alert_state=ALERT"]) + self.assertIsNone(actual) + + def test_initialize_command_line_args_invalid_list_basis(self): + actual = list_curated_rule_detections.initialize_command_line_args( + ["--rule_id=ur_sample_rule", "--list_basis=COMMIT_TIME"]) + self.assertIsNone(actual) + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_http_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + list_curated_rule_detections.list_curated_rule_detections( + mock_session, "") + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_happy_path_without_page_size(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + rule_id = "ur_sample_rule" + expected_detection = { + "id": + "de_12345678-1234-1234-1234-1234567890ab", + "type": + "GCTI_FINDING", + "createdTime": + "2023-01-01T12:00:00Z", + "detectionTime": + "2023-01-01T01:00:00Z", + "tags": [ + "TA0043", + "T1595.001", + ], + "timeWindow": { + "startTime": "2023-01-01T00:00:00Z", + "endTime": "2023-01-01T01:00:00Z", + }, + "detection": [{ + "ruleId": "ur_sample_rule", + "ruleName": "Sample Rule", + "summary": "Sample Rule Summary", + "description": "Sample Rule Description", + "urlBackToProduct": "https://chronicle.security", + "alertState": "ALERTING", + "ruleType": "MULTI_EVENT", + "detectionFields": [{ + "key": "fieldName", + "value": "fieldValue", + }], + "ruleSet": "87654321-4321-4321-4321-ba0987654321", + "ruleSetDisplayName": "Rule Set Display Name" + }], + } + expected_page_token = "page token here" + mock_response.json.return_value = { + "curatedRuleDetections": [expected_detection], + "nextPageToken": expected_page_token, + } + + detections, next_page_token = list_curated_rule_detections.list_curated_rule_detections( + mock_session, rule_id) + self.assertEqual(len(detections), 1) + self.assertEqual(detections[0], expected_detection) + self.assertEqual(next_page_token, expected_page_token) + + +if __name__ == "__main__": + unittest.main() diff --git a/detect/v2/list_curated_rules.py b/detect/v2/list_curated_rules.py new file mode 100644 index 0000000..6000b50 --- /dev/null +++ b/detect/v2/list_curated_rules.py @@ -0,0 +1,123 @@ +#!/usr/bin/env python3 + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for listing curated rules. + +API reference: +https://cloud.google.com/chronicle/docs/reference/detection-engine-api#listcuratedrules +""" + +import argparse +import json +from typing import Any, Mapping, Sequence, Tuple + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +_chronicle_api_base_url = "https://backstory.googleapis.com" + + +def list_curated_rules( + http_session: requests.AuthorizedSession, + page_size: int = 0, + page_token: str = "") -> Tuple[Sequence[Mapping[str, Any]], str]: + """List curated rules. + + Args: + http_session: Authorized session for HTTP requests. + page_size: Maximum number of rules to return. Must be non-negative, and is + capped at a server-side limit of 1000. Optional - a server-side default of + 100 is used if the size is 0 or a None value. + page_token: Page token from a previous ListCuratedRules call used for + pagination. Optional - the first page is retrieved if the token is the + empty string or a None value. + + Returns: + List of curated rules and a page token for the next page of rules, if there + are any. + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{_chronicle_api_base_url}/v2/detect/curatedRules" + params_list = [("page_size", page_size), ("page_token", page_token)] + params = {k: v for k, v in params_list if v} + + response = http_session.request("GET", url, params=params) + # Expected server response: + # { + # "curatedRules": [ + # { + # "ruleId": "ur_xxx", + # "ruleName": "", + # "metadata": { <-- IFF there is additional metadata + # "": "", + # "": "", + # ... + # }, + # "severity": "Info"/"Low"/"High", + # "ruleType": "MULTI_EVENT"/"SINGLE_EVENT", + # "precision": "PRECISE"/"BROAD", + # "tactics": [ + # "TA####" + # ], + # "techniques": [ + # "T####" + # ], + # "updateTime": "yyyy-mm-ddThh:mm:ss.ssssssZ", + # "ruleSet": "", + # "description": "", + # }, + # ... + # ], + # "nextPageToken": "" + # } + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + j = response.json() + return j.get("curatedRules", []), j.get("nextPageToken", "") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-s", + "--page_size", + type=int, + required=False, + help="maximum number of rules to return") + parser.add_argument( + "-t", + "--page_token", + type=str, + required=False, + help="page token from a previous ListCuratedRules call used for pagination" + ) + + args = parser.parse_args() + _chronicle_api_base_url = regions.url(_chronicle_api_base_url, args.region) + session = chronicle_auth.initialize_http_session(args.credentials_file) + curated_rules, next_page_token = list_curated_rules(session, args.page_size, + args.page_token) + print(json.dumps(curated_rules, indent=2)) + print(f"Next page token: {next_page_token}") diff --git a/detect/v2/list_curated_rules_and_detections.py b/detect/v2/list_curated_rules_and_detections.py new file mode 100644 index 0000000..a645b1b --- /dev/null +++ b/detect/v2/list_curated_rules_and_detections.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python3 + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for listing curated rules followed by a sample of their detections. + +This module demonstrates combining multiple single-purpose modules into a larger +workflow. +""" + +import argparse +import json +import time +from typing import Any, List, Mapping, Sequence, Tuple + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions +from . import list_curated_rule_detections +from . import list_curated_rules + +_chronicle_api_base_url = "https://backstory.googleapis.com" + +# Sleep duration to ensure we don't exceed QPM limit. +_DEFAULT_SLEEP_SECONDS = 6 + + +def list_curated_rules_and_detections( + http_session: requests.AuthorizedSession, + page_size: int = 10, + sleep_seconds: int = _DEFAULT_SLEEP_SECONDS +) -> List[Tuple[str, Sequence[Mapping[str, Any]], str]]: + """Retrieves all curated rules with detections and the first page of up to page_size detections per curated rule. + + Args: + http_session: Authorized session for HTTP requests. + page_size: The maximum number of detections to retrieve for the first page + of detections. Defaults to 10 if not specified. + sleep_seconds: The maximum time to wait before making an additional call to + ListCuratedRuleDetections. Defaults to 6 seconds if not specified. + + Returns: + The curated rule ID, a maximum of page_size detections ordered by descending + detection_time, and a Base64 token for getting the detections of the + next page (an empty token string means the currently retrieved page is the + last one) for each curated rule. + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + # Keep retrieving curated rules until there are no more. + page_token = "" + all_curated_rules = [] + while True: + curated_rules, page_token = list_curated_rules.list_curated_rules( + http_session, page_token=page_token) + all_curated_rules.append(curated_rules) + if not page_token: + break + + all_detections_and_tokens = [] + for page in all_curated_rules: + for rule in page: + rule_id = rule["ruleId"] + first_page = list_curated_rule_detections.list_curated_rule_detections( + http_session, rule_id, page_size=page_size) + all_detections_and_tokens.append((rule_id, first_page[0], first_page[1])) + print( + f"Received {len(first_page[0])} detection(s) for rule {rule_id} with next_page_token {first_page[1]}" + ) + time.sleep(sleep_seconds) + + return all_detections_and_tokens + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-s", + "--page_size", + type=int, + required=False, + help="maximum number of detections to return in the first page per curated rule" + ) + + args = parser.parse_args() + _chronicle_api_base_url = regions.url(_chronicle_api_base_url, args.region) + session = chronicle_auth.initialize_http_session(args.credentials_file) + responses = list_curated_rules_and_detections(session, args.page_size) + + for response in responses: + print("====================================") + print( + f"Displaying detections for the first page of detections for rule ID {response[0]}" + ) + print(json.dumps(response[1], indent=2)) + print(f"Next page token: {response[2]}") diff --git a/detect/v2/list_curated_rules_and_detections_test.py b/detect/v2/list_curated_rules_and_detections_test.py new file mode 100644 index 0000000..a6795c7 --- /dev/null +++ b/detect/v2/list_curated_rules_and_detections_test.py @@ -0,0 +1,153 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "list_curated_rules_and_detections" module.""" + +import unittest +from unittest import mock + +from google.auth.transport import requests + +from . import list_curated_rules_and_detections + + +class ListCuratedRulesAndDetectionsTest(unittest.TestCase): + + @mock.patch("time.sleep", return_value=None) + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_happy_path(self, mock_response, mock_session, mock_sleep): + # Both list_curated_rules and list_curated_rule_detections succeed. + type(mock_response).status_code = mock.PropertyMock(side_effect=[200, 200]) + mock_response.raise_for_status.side_effect = [None, None] + + # Response for ListCuratedRules. + expected_rule = { + "ruleId": "ur_sample_rule", + "ruleName": "Sample Rule", + "severity": "Info", + "ruleType": "SINGLE_EVENT", + "precision": "PRECISE", + "tactics": ["TA0042",], + "techniques": ["T1595.001",], + "updateTime": "2023-01-01T00:00:00Z", + "ruleSet": "87654321-4321-4321-4321-ba0987654321", + "description": "Sample Rule Description", + } + + # Response for ListCuratedRuleDetections. + rule_id = "ur_sample_rule" + expected_detection = { + "id": + "de_12345678-1234-1234-1234-1234567890ab", + "type": + "GCTI_FINDING", + "createdTime": + "2023-01-01T12:00:00Z", + "detectionTime": + "2023-01-01T01:00:00Z", + "tags": [ + "TA0043", + "T1595.001", + ], + "timeWindow": { + "startTime": "2023-01-01T00:00:00Z", + "endTime": "2023-01-01T01:00:00Z", + }, + "detection": [{ + "ruleId": "ur_sample_rule", + "ruleName": "Sample Rule", + "summary": "Sample Rule Summary", + "description": "Sample Rule Description", + "urlBackToProduct": "https://chronicle.security", + "alertState": "ALERTING", + "ruleType": "MULTI_EVENT", + "detectionFields": [{ + "key": "fieldName", + "value": "fieldValue", + }], + "ruleSet": "87654321-4321-4321-4321-ba0987654321", + "ruleSetDisplayName": "Rule Set Display Name" + }], + } + expected_page_token = "page token here" + mock_response.json.side_effect = [{ + "curatedRules": [expected_rule], + }, { + "curatedRuleDetections": [expected_detection], + "nextPageToken": expected_page_token, + }] + mock_session.request.return_value = mock_response + + responses = list_curated_rules_and_detections.list_curated_rules_and_detections( + mock_session, page_size=1) + self.assertEqual(len(responses), 1) + # There should be one 3-tuple in the list. + self.assertEqual(responses[0][0], rule_id) + self.assertEqual(responses[0][1], [expected_detection]) + self.assertEqual(responses[0][2], expected_page_token) + # Sleep should be called only once since there is only one call to + # ListCuratedRuleDetections. + self.assertEqual(mock_sleep.call_count, 1) + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_list_curated_rules_error(self, mock_response, mock_session): + # list_curated_rules fails causing an overall early failure. + type(mock_response).status_code = mock.PropertyMock(side_effect=[400]) + mock_response.raise_for_status.side_effect = [ + requests.requests.exceptions.HTTPError() + ] + mock_response.json.side_effect = [None] + mock_session.request.return_value = mock_response + + with self.assertRaises(requests.requests.exceptions.HTTPError): + list_curated_rules_and_detections.list_curated_rules_and_detections( + mock_session) + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_list_curated_rule_detections_error(self, mock_response, + mock_session): + # list_curated_rules succeeds but list_curated_rule_detections fails. + type(mock_response).status_code = mock.PropertyMock(side_effect=[200, 400]) + mock_response.raise_for_status.side_effect = [ + None, requests.requests.exceptions.HTTPError() + ] + + # Response for ListCuratedRules. + expected_rule = { + "ruleId": "ur_sample_rule", + "ruleName": "Sample Rule", + "severity": "Info", + "ruleType": "SINGLE_EVENT", + "precision": "PRECISE", + "tactics": ["TA0042",], + "techniques": ["T1595.001",], + "updateTime": "2023-01-01T00:00:00Z", + "ruleSet": "87654321-4321-4321-4321-ba0987654321", + "description": "Sample Rule Description", + } + mock_response.json.side_effect = [{ + "curatedRules": [expected_rule], + }, None] + mock_session.request.return_value = mock_response + + with self.assertRaises(requests.requests.exceptions.HTTPError): + list_curated_rules_and_detections.list_curated_rules_and_detections( + mock_session) + + +if __name__ == "__main__": + unittest.main() diff --git a/detect/v2/list_curated_rules_test.py b/detect/v2/list_curated_rules_test.py new file mode 100644 index 0000000..f04c86a --- /dev/null +++ b/detect/v2/list_curated_rules_test.py @@ -0,0 +1,68 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "list_curated_rules" module.""" + +import unittest +from unittest import mock + +from google.auth.transport import requests + +from . import list_curated_rules + + +class ListCuratedRulesTest(unittest.TestCase): + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_http_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + list_curated_rules.list_curated_rules(mock_session) + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_happy_path_without_page_size(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + expected_rule = { + "ruleId": "ur_sample_rule", + "ruleName": "Sample Rule", + "severity": "Info", + "ruleType": "SINGLE_EVENT", + "precision": "PRECISE", + "tactics": ["TA0042",], + "techniques": ["T1595.001",], + "updateTime": "2023-01-01T00:00:00Z", + "ruleSet": "87654321-4321-4321-4321-ba0987654321", + "description": "Sample Rule Description", + } + expected_page_token = "page token here" + mock_response.json.return_value = { + "curatedRules": [expected_rule], + "nextPageToken": expected_page_token, + } + + rules, next_page_token = list_curated_rules.list_curated_rules(mock_session) + self.assertEqual(len(rules), 1) + self.assertEqual(rules[0], expected_rule) + self.assertEqual(next_page_token, expected_page_token) + + +if __name__ == "__main__": + unittest.main() diff --git a/detect/v2/list_rule_versions.py b/detect/v2/list_rule_versions.py index a3e7b12..0e677d3 100755 --- a/detect/v2/list_rule_versions.py +++ b/detect/v2/list_rule_versions.py @@ -84,6 +84,7 @@ def list_rule_versions( # "compilationState": "SUCCEEDED"/"FAILED", # "compilationError": "", <-- IFF compilation failed. # "archivedTime": "yyyy-mm-ddThh:mm:ss.ssssssZ", <-- IFF archived. + # "ruleType": "MULTI_EVENT"/"SINGLE_EVENT", # }, # ... # ], diff --git a/detect/v2/list_rules.py b/detect/v2/list_rules.py index 9ca9a75..a83a72d 100755 --- a/detect/v2/list_rules.py +++ b/detect/v2/list_rules.py @@ -84,6 +84,7 @@ def list_rules( # "compilationState": "SUCCEEDED"/"FAILED", # "compilationError": "", <-- IFF compilation failed. # "archivedTime": "yyyy-mm-ddThh:mm:ss.ssssssZ", <-- IFF archived. + # "ruleType": "MULTI_EVENT"/"SINGLE_EVENT", # }, # ... # ], diff --git a/detect/v2/stream_detection_alerts.py b/detect/v2/stream_detection_alerts.py index 32d9241..35a0662 100644 --- a/detect/v2/stream_detection_alerts.py +++ b/detect/v2/stream_detection_alerts.py @@ -124,10 +124,12 @@ def parse_stream( # and retry limit). yield { "error": { - "code": 500, + "code": 503, "status": "UNAVAILBLE", - "message": "exception caught while reading stream response (your " - "streaming client should retry connection): {}".format( + "message": "exception caught while reading stream response. This " + "python client is catching all errors and is returning " + "error code 503 as a catch-all. The original error " + "message is as follows: {}".format( repr(e)), } } @@ -182,15 +184,18 @@ def callback_slack_webhook(detection_batch: DetectionBatch): for detection in detections: # detection["detection"] is always a list that has one element. meta = detection["detection"][0] - detection_metadatas.append( - tuple((meta["ruleName"], meta["ruleId"], meta["ruleVersion"]))) + # ruleVersion is only populated for RULE_DETECTION type detections. + rule_info = tuple((meta["ruleName"], meta["ruleId"], meta["ruleVersion"] + )) if detection["type"] == "RULE_DETECTION" else tuple( + (meta["ruleName"], meta["ruleId"])) + detection_metadatas.append(rule_info) for detection_metadata, count in collections.Counter( detection_metadatas).items(): - report_lines.append( - f"\t{count} detections from Rule `{detection_metadata[0]}`" + - f" (Rule ID `{detection_metadata[1]}`," + - f" Version ID `{detection_metadata[2]}`)") + line = f"\t{count} detections from Rule `{detection_metadata[0]}`" + f" (Rule ID `{detection_metadata[1]}`," + if len(detection_metadata) >= 3: + line = line + f" Version ID `{detection_metadata[2]}`)" + report_lines.append(line) if batch_size > MAX_BATCH_SIZE_TO_REPORT_IN_DETAIL: # Avoid flooding our output channels. @@ -207,8 +212,8 @@ def callback_slack_webhook(detection_batch: DetectionBatch): for idx, detection in enumerate(detections): report_lines.append(f"{idx})") - # This for loop includes rule name, rule ID, version ID, - # rule type, and fields. + # This for loop includes rule name, rule ID, rule type, rule version, + # rule set and other fields. for meta_key, meta_value in detection["detection"][0].items(): report_lines.append(f"\t{meta_key}: {meta_value}") report_lines.append(f"\tTime Window: {detection['timeWindow']}") @@ -298,7 +303,7 @@ def stream_detection_alerts( The contents of a detection follow this format: { "id": "de_", - "type": "RULE_DETECTION", + "type": "RULE_DETECTION"/"GCTI_FINDING", "createdTime": "yyyy-mm-ddThh:mm:ssZ", "detectionTime": "yyyy-mm-ddThh:mm:ssZ", "timeWindow": { @@ -323,8 +328,9 @@ def stream_detection_alerts( ], "detection": [ <-- this is always a list that has one element. { - "ruleId": "ru_", + "ruleId": "ru_"/"ur_ruleID", "ruleName": "", + // ruleVersion is only populated for RULE_DETECTION type detections. "ruleVersion": "ru_@v__", "urlBackToProduct": "", "alertState": "ALERTING"/"NOT_ALERTING", @@ -334,9 +340,20 @@ def stream_detection_alerts( "key": "", "value": "" } - ] + ], + // Following fields are only populated for "GCTI_FINDING" type + // detections. + "summary": "Rule Detection", + "ruleSet": "", + "ruleSetDisplayName": "", + "description": "", + "severity": "INFORMATIONAL"/"LOW"/"HIGH" }, ], + // Following fields are only populated for "GCTI_FINDING" type + // detections. + "lastUpdatedTime": "yyyy-mm-ddThh:mm:ssZ", + "tags": ["", "", ...] } Args: @@ -358,11 +375,17 @@ def stream_detection_alerts( disconnection_reason = "" continuation_time = "" - # Heartbeats are sent by the server, approximately every 15s. We impose a - # client-side timeout; if more than 60s pass between messages from the - # server, the client cancels connection (then retries). + # Heartbeats are sent by the server, approximately every 15s. Even if + # no new detections are being produced, the server sends empty + # batches. + # We impose a client-side timeout of 300s (5 mins) between messages from the + # server. We expect the server to send messages much more frequently due + # to the heratbeats though; this timeout should never be hit, and serves + # as a safety measure. + # If no messages are received after this timeout, the client cancels + # connection (then retries). with http_session.post( - url, stream=True, data=req_data, timeout=60) as response: + url, stream=True, data=req_data, timeout=300) as response: # Expected server response is a continuous stream of # bytes that represent a never-ending JSON array. The parsing # is handed by parse_stream. See docstring above for diff --git a/detect/v2/stream_detection_alerts_test.py b/detect/v2/stream_detection_alerts_test.py index d49e5c8..abb9a3b 100644 --- a/detect/v2/stream_detection_alerts_test.py +++ b/detect/v2/stream_detection_alerts_test.py @@ -169,6 +169,50 @@ def tests_happy_path(self, mock_session, mock_init_session, mock_sleep): }], } + mock_uppercase_detection_template = { + "id": + "PLACEHOLDER", # To be replaced with unique ID. + "type": + "GCTI_FINDING", + "createdTime": + "2020-11-05T12:00:00Z", + "detectionTime": + "2020-11-05T01:00:00Z", + "timeWindow": { + "startTime": "2020-11-05T00:00:00Z", + "endTime": "2020-11-05T01:00:00Z", + }, + "lastUpdatedTime": "2020-11-05T12:00:00Z", + "tags": ["TA0005", "TA0003", "T1098.004"], + "detection": [{ + "ruleId": + "ur_ttp_GCP__GlobalSSHKeys_Added", + "ruleName": + "GCP Global SSH Keys", + "urlBackToProduct": + "https://chronicle.security", + "alertState": + "ALERTING", + "ruleType": + "SINGLE_EVENT", + "detectionFields": [{ + "key": "fieldName", + "value": "fieldValue", + }], + "summary": + "Rule Detection", + "ruleSet": + "11c505d4-b424-65e3-d918-1a81232cc76b", + "ruleSetDisplayName": + "Admin Action", + "description": + "Identifies instances of project-wide SSH keys being added " + "where there were previously none.", + "severity": + "LOW" + }], + } + # Prepare string representations of detection batches that can # passed to callback functions. mock_detections = [] @@ -176,6 +220,11 @@ def tests_happy_path(self, mock_session, mock_init_session, mock_sleep): mock_detection = mock_detection_template.copy() mock_detection["id"] = str(i) # Not a valid ID format, just for tests. mock_detections.append(mock_detection) + mock_uppercase_detections = [] + for i in range(5): + mock_detection = mock_uppercase_detection_template.copy() + mock_detection["id"] = str(i+7) + mock_uppercase_detections.append(mock_detection) mock_detection_batches = [ # Normal stream responses, which will all be passed to the callback. @@ -184,10 +233,14 @@ def tests_happy_path(self, mock_session, mock_init_session, mock_sleep): tuple(([], "2020-12-06T22:39:55.633014925Z")), tuple(([mock_detections[:3]], "2020-12-07T22:39:55.633014925Z")), tuple(([], "2020-12-08T22:39:55.633014925Z")), - tuple(([mock_detections[3:4]], "2020-12-09T22:39:55.633014925Z")), + tuple(([mock_detections[3:4], mock_uppercase_detections[0:3]], + "2020-12-09T22:39:55.633014925Z")), tuple(([], "2020-12-10T22:39:55.633014925Z")), tuple(([mock_detections[4:]], "2020-12-11T22:39:55.633014925Z")), tuple(([], "2020-12-12T22:39:55.633014925Z")), + tuple(([mock_uppercase_detections[3:]], + "2020-12-12T22:39:55.633014925Z")), + tuple(([], "2020-12-13T22:39:55.633014925Z")), ] # Serialize detection batches into dumps that will be sent as incremental diff --git a/detect/v2/stream_test_rule.py b/detect/v2/stream_test_rule.py index ecc6e31..c1b5070 100644 --- a/detect/v2/stream_test_rule.py +++ b/detect/v2/stream_test_rule.py @@ -102,10 +102,12 @@ def parse_stream( # so the client may report the error. yield { "error": { - "code": 500, + "code": 503, "status": "UNAVAILABLE", - "message": "exception caught while reading " - "stream response: {}".format(repr(e)), + "message": "exception caught while reading stream response. This " + "python client is catching all errors and is returning " + "error code 503 as a catch-all. The original error " + "message is as follows: {}".format(repr(e)), } } diff --git a/detect/v2/stream_test_rule_test.py b/detect/v2/stream_test_rule_test.py index 9a97c46..d0ecde6 100644 --- a/detect/v2/stream_test_rule_test.py +++ b/detect/v2/stream_test_rule_test.py @@ -104,9 +104,9 @@ def test_connection_failure_error(self, mock_session): # This error should not be considered a rule execution error, # and should stop any further stream processing. mock_stream_error = { - "code": 500, + "code": 503, "status": "UNAVAILABLE", - "message": "exception caught while reading stream response" + "message": "exception caught while reading..." } mock_detections = [] diff --git a/feeds/create_azure_ad_context_feed.py b/feeds/create_azure_ad_context_feed.py index 5e22baa..40adbdc 100644 --- a/feeds/create_azure_ad_context_feed.py +++ b/feeds/create_azure_ad_context_feed.py @@ -32,18 +32,20 @@ def create_azure_ad_context_feed(http_session: requests.AuthorizedSession, - tokenendpoint: str, clientid: str, + tenantid: str, clientid: str, clientsecret: str, retrievedevices: bool, - retrievegroups: bool) -> Mapping[str, Any]: + retrievegroups: bool, displayname: str + ) -> Mapping[str, Any]: """Creates a new Azure AD Context feed. Args: http_session: Authorized session for HTTP requests. - tokenendpoint: A string which represents endpoint to connect to. + tenantid: A string which represents the tenant id. clientid: A string which represents Id of the credential to use. clientsecret: A string which represents secret of the credential to use. retrievedevices: A boolean to indicate whether to retrieve devices or not. retrievegroups: A boolean to indicate whether to retrieve groups or not. + displayname: A string which describes the feed. Returns: New Azure AD Feed. @@ -59,33 +61,35 @@ def create_azure_ad_context_feed(http_session: requests.AuthorizedSession, "logType": "AZURE_AD_CONTEXT", "azureAdContextSettings": { "authentication": { - "tokenEndpoint": tokenendpoint, "clientId": clientid, "clientSecret": clientsecret }, "retrieveDevices": retrievedevices, - "retrieveGroups": retrievegroups + "retrieveGroups": retrievegroups, + "tenantId": tenantid, } - } + }, + "display_name": displayname, } response = http_session.request("POST", url, json=body) # Expected server response: # { # "name": "feeds/e0eb5fb0-8fbd-4f0f-b063-710943ad7812", + # "display_name": "my feed name", # "details": { # "logType": "AZURE_AD_CONTEXT", # "feedSourceType": "API", # "azureAdContextSettings": { # "authentication": { - # "tokenEndpoint": "tokenendpoint.example.com", # "clientId": "clientid_example", # "clientSecret": "clientsecret_example" # }, - # "retrieveDevices": true + # "retrieveDevices": true, + # "tenantId": "0fc279f9-fe30-41be-97d3-abe1d7681418" # } # }, - # "feedState": "PENDING_ENABLEMENT" + # "feedState": "ACTIVE" # } if response.status_code >= 400: @@ -99,23 +103,11 @@ def create_azure_ad_context_feed(http_session: requests.AuthorizedSession, chronicle_auth.add_argument_credentials_file(parser) regions.add_argument_region(parser) parser.add_argument( - "-te", - "--tokenendpoint", - type=str, - required=True, - help="token endpoint") + "-ti", "--tenantid", type=str, required=True, help="tenant id") parser.add_argument( - "-ci", - "--clientid", - type=str, - required=True, - help="client id") + "-ci", "--clientid", type=str, required=True, help="client id") parser.add_argument( - "-cs", - "--clientsecret", - type=str, - required=True, - help="client secret") + "-cs", "--clientsecret", type=str, required=True, help="client secret") parser.add_argument( "-rd", "--retrievedevices", @@ -128,12 +120,19 @@ def create_azure_ad_context_feed(http_session: requests.AuthorizedSession, type=str, required=True, help="retrieve groups") + parser.add_argument( + "-dn", + "--displayname", + type=str, + required=False, + help="display name") args = parser.parse_args() CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) session = chronicle_auth.initialize_http_session(args.credentials_file) - new_feed = create_azure_ad_context_feed(session, args.tokenendpoint, - args.clientid, args.clientsecret, + new_feed = create_azure_ad_context_feed(session, args.tenantid, args.clientid, + args.clientsecret, args.retrievedevices, - args.retrievegroups) + args.retrievegroups, + args.displayname) print(json.dumps(new_feed, indent=2)) diff --git a/feeds/create_azure_ad_context_feed_test.py b/feeds/create_azure_ad_context_feed_test.py index 84aa9cd..44b045e 100644 --- a/feeds/create_azure_ad_context_feed_test.py +++ b/feeds/create_azure_ad_context_feed_test.py @@ -35,7 +35,7 @@ def test_http_error(self, mock_response, mock_session): with self.assertRaises(requests.requests.exceptions.HTTPError): create_azure_ad_context_feed.create_azure_ad_context_feed( mock_session, "tokenendpoint.example.com", "clientid_example", - "clientsecret_example", False, False) + "clientsecret_example", False, False, "my feed name") @mock.patch.object(requests, "AuthorizedSession", autospec=True) @mock.patch.object(requests.requests, "Response", autospec=True) @@ -44,6 +44,7 @@ def test_happy_path(self, mock_response, mock_session): type(mock_response).status_code = mock.PropertyMock(return_value=200) expected_feed = { "name": "feeds/cf49ebc5-e7bf-4562-8061-cab43cecba35", + "display_name": "my feed name", "details": { "logType": "AZURE_AD", "feedSourceType": "API", @@ -64,7 +65,7 @@ def test_happy_path(self, mock_response, mock_session): actual_feed = create_azure_ad_context_feed.create_azure_ad_context_feed( mock_session, "tokenendpoint.example.com", "clientid_example", - "clientsecret_example", False, False) + "clientsecret_example", False, False, "my feed name") self.assertEqual(actual_feed, expected_feed) diff --git a/feeds/create_azure_ad_feed.py b/feeds/create_azure_ad_feed.py index e1b6a02..a3a9b66 100644 --- a/feeds/create_azure_ad_feed.py +++ b/feeds/create_azure_ad_feed.py @@ -32,15 +32,17 @@ def create_azure_ad_feed(http_session: requests.AuthorizedSession, - tokenendpoint: str, clientid: str, - clientsecret: str) -> Mapping[str, Any]: + tenantid: str, clientid: str, + clientsecret: str, displayname: str + ) -> Mapping[str, Any]: """Creates a new Azure AD feed. Args: http_session: Authorized session for HTTP requests. - tokenendpoint: A string which represents endpoint to connect to. + tenantid: A string which represents the tenant id. clientid: A string which represents Id of the credential to use. clientsecret: A string which represents secret of the credential to use. + displayname: A string which describes the feed. Returns: New Azure AD Feed. @@ -56,30 +58,32 @@ def create_azure_ad_feed(http_session: requests.AuthorizedSession, "logType": "AZURE_AD", "azureAdSettings": { "authentication": { - "tokenEndpoint": tokenendpoint, "clientId": clientid, "clientSecret": clientsecret }, + "tenantId": tenantid, } - } + }, + "display_name": displayname, } response = http_session.request("POST", url, json=body) # Expected server response: # { # "name": "feeds/cf49ebc5-e7bf-4562-8061-cab43cecba35", + # "display_name": "my feed name", # "details": { # "logType": "AZURE_AD", # "feedSourceType": "API", # "azureAdSettings": { # "authentication": { - # "tokenEndpoint": "tokenendpoint.example.com", # "clientId": "clientid_example", # "clientSecret": "clientsecret_example" - # } + # }, + # "tenantId": "0fc279f9-fe30-41be-97d3-abe1d7681418" # } # }, - # "feedState": "PENDING_ENABLEMENT" + # "feedState": "ACTIVE" # } if response.status_code >= 400: @@ -93,27 +97,18 @@ def create_azure_ad_feed(http_session: requests.AuthorizedSession, chronicle_auth.add_argument_credentials_file(parser) regions.add_argument_region(parser) parser.add_argument( - "-te", - "--tokenendpoint", - type=str, - required=True, - help="token endpoint") + "-ti", "--tenantid", type=str, required=True, help="tenant id") parser.add_argument( - "-ci", - "--clientid", - type=str, - required=True, - help="client id") + "-ci", "--clientid", type=str, required=True, help="client id") parser.add_argument( - "-cs", - "--clientsecret", - type=str, - required=True, - help="client secret") + "-cs", "--clientsecret", type=str, required=True, help="client secret") + parser.add_argument( + "-dn", "--displayname", type=str, required=False, + help="display name") args = parser.parse_args() CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) session = chronicle_auth.initialize_http_session(args.credentials_file) - new_feed = create_azure_ad_feed(session, args.tokenendpoint, args.clientid, - args.clientsecret) + new_feed = create_azure_ad_feed(session, args.tenantid, args.clientid, + args.clientsecret, args.displayname) print(json.dumps(new_feed, indent=2)) diff --git a/feeds/create_azure_ad_feed_test.py b/feeds/create_azure_ad_feed_test.py index a4c1f35..2d1d532 100644 --- a/feeds/create_azure_ad_feed_test.py +++ b/feeds/create_azure_ad_feed_test.py @@ -36,7 +36,8 @@ def test_http_error(self, mock_response, mock_session): create_azure_ad_feed.create_azure_ad_feed(mock_session, "tokenendpoint.example.com", "clientid_example", - "clientsecret_example") + "clientsecret_example", + "my feed name") @mock.patch.object(requests, "AuthorizedSession", autospec=True) @mock.patch.object(requests.requests, "Response", autospec=True) @@ -45,6 +46,7 @@ def test_happy_path(self, mock_response, mock_session): type(mock_response).status_code = mock.PropertyMock(return_value=200) expected_feed = { "name": "feeds/cf49ebc5-e7bf-4562-8061-cab43cecba35", + "display_name": "my feed name", "details": { "logType": "AZURE_AD", "feedSourceType": "API", @@ -63,7 +65,7 @@ def test_happy_path(self, mock_response, mock_session): actual_feed = create_azure_ad_feed.create_azure_ad_feed( mock_session, "tokenendpoint.example.com", "clientid_example", - "clientsecret_example") + "clientsecret_example", "my feed name") self.assertEqual(actual_feed, expected_feed) diff --git a/feeds/create_okta_feed.py b/feeds/create_okta_feed.py index f6e06f6..f9b75b3 100644 --- a/feeds/create_okta_feed.py +++ b/feeds/create_okta_feed.py @@ -32,13 +32,15 @@ def create_okta_feed(http_session: requests.AuthorizedSession, - secret: str, hostname: str) -> Mapping[str, Any]: + secret: str, hostname: str, displayname: str + ) -> Mapping[str, Any]: """Creates a new Okta feed. Args: http_session: Authorized session for HTTP requests. secret: A string which represents Okta auth user's secret. hostname: A string which represents hostname to connect to. + displayname: A string which describes the feed. Returns: New Okta Feed. @@ -61,13 +63,15 @@ def create_okta_feed(http_session: requests.AuthorizedSession, }, "hostname": hostname } - } + }, + "display_name": displayname, } response = http_session.request("POST", url, json=body) # Expected server response: # { # "name": "feeds/7921585c-b0b5-490a-a8bd-ff011d7011a5", + # "display_name": "my feed name", # "details": { # "logType": "OKTA", # "feedSourceType": "API", @@ -108,9 +112,16 @@ def create_okta_feed(http_session: requests.AuthorizedSession, type=str, required=True, help="hostname") + parser.add_argument( + "-dn", + "--displayname", + type=str, + required=False, + help="display name") args = parser.parse_args() CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) session = chronicle_auth.initialize_http_session(args.credentials_file) - new_feed = create_okta_feed(session, args.secret, args.hostname) + new_feed = create_okta_feed(session, args.secret, args.hostname, + args.displayname) print(json.dumps(new_feed, indent=2)) diff --git a/feeds/create_okta_feed_test.py b/feeds/create_okta_feed_test.py index a008cb0..4471176 100644 --- a/feeds/create_okta_feed_test.py +++ b/feeds/create_okta_feed_test.py @@ -34,7 +34,7 @@ def test_http_error(self, mock_response, mock_session): with self.assertRaises(requests.requests.exceptions.HTTPError): create_okta_feed.create_okta_feed(mock_session, "secret_example", - "hostname.example.com") + "hostname.example.com", "my feed name") @mock.patch.object(requests, "AuthorizedSession", autospec=True) @mock.patch.object(requests.requests, "Response", autospec=True) @@ -43,6 +43,7 @@ def test_happy_path(self, mock_response, mock_session): type(mock_response).status_code = mock.PropertyMock(return_value=200) expected_feed = { "name": "feeds/cf49ebc5-e7bf-4562-8061-cab43cecba35", + "display_name": "my feed name", "details": { "logType": "OKTA", "feedSourceType": "API", @@ -61,7 +62,7 @@ def test_happy_path(self, mock_response, mock_session): mock_response.json.return_value = expected_feed actual_feed = create_okta_feed.create_okta_feed( - mock_session, "secret_example", "hostname.example.com") + mock_session, "secret_example", "hostname.example.com", "my feed name") self.assertEqual(actual_feed, expected_feed) diff --git a/feeds/create_okta_user_context_feed.py b/feeds/create_okta_user_context_feed.py index 632fbec..4c87a50 100644 --- a/feeds/create_okta_user_context_feed.py +++ b/feeds/create_okta_user_context_feed.py @@ -33,13 +33,15 @@ def create_okta_user_context_feed(http_session: requests.AuthorizedSession, secret: str, - hostname: str) -> Mapping[str, Any]: + hostname: str, + displayname: str) -> Mapping[str, Any]: """Creates a new Okta User Context feed. Args: http_session: Authorized session for HTTP requests. secret: A string which represents Okta auth user's secret. hostname: A string which represents hostname to connect to. + displayname: A string which represents customer-provided feed name. Returns: New Okta Feed. @@ -62,13 +64,15 @@ def create_okta_user_context_feed(http_session: requests.AuthorizedSession, }, "hostname": hostname } - } + }, + "display_name": displayname, } response = http_session.request("POST", url, json=body) # Expected server response: # { # "name": "feeds/7c420442-6b73-439e-ae8b-563618b8fc71", + # "display_name": "my feed name", # "details": { # "logType": "OKTA_USER_CONTEXT", # "feedSourceType": "API", @@ -109,9 +113,16 @@ def create_okta_user_context_feed(http_session: requests.AuthorizedSession, type=str, required=True, help="hostname") + parser.add_argument( + "-dn", + "--displayname", + type=str, + required=False, + help="display name") args = parser.parse_args() CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) session = chronicle_auth.initialize_http_session(args.credentials_file) - new_feed = create_okta_user_context_feed(session, args.secret, args.hostname) + new_feed = create_okta_user_context_feed(session, args.secret, args.hostname, + args.displayname) print(json.dumps(new_feed, indent=2)) diff --git a/feeds/create_okta_user_context_feed_test.py b/feeds/create_okta_user_context_feed_test.py index 7e25f92..2860958 100644 --- a/feeds/create_okta_user_context_feed_test.py +++ b/feeds/create_okta_user_context_feed_test.py @@ -34,7 +34,8 @@ def test_http_error(self, mock_response, mock_session): with self.assertRaises(requests.requests.exceptions.HTTPError): create_okta_user_context_feed.create_okta_user_context_feed( - mock_session, "secret_example", "hostname.example.com") + mock_session, "secret_example", "hostname.example.com", + "my feed name") @mock.patch.object(requests, "AuthorizedSession", autospec=True) @mock.patch.object(requests.requests, "Response", autospec=True) @@ -43,6 +44,7 @@ def test_happy_path(self, mock_response, mock_session): type(mock_response).status_code = mock.PropertyMock(return_value=200) expected_feed = { "name": "feeds/cf49ebc5-e7bf-4562-8061-cab43cecba35", + "display_name": "my feed name", "details": { "logType": "OKTA_USER_CONTEXT", "feedSourceType": "API", @@ -61,7 +63,7 @@ def test_happy_path(self, mock_response, mock_session): mock_response.json.return_value = expected_feed actual_feed = create_okta_user_context_feed.create_okta_user_context_feed( - mock_session, "secret_example", "hostname.example.com") + mock_session, "secret_example", "hostname.example.com", "my feed name") self.assertEqual(actual_feed, expected_feed) diff --git a/feeds/create_workspace_activity_feed.py b/feeds/create_workspace_activity_feed.py index f0abac6..3e2266d 100644 --- a/feeds/create_workspace_activity_feed.py +++ b/feeds/create_workspace_activity_feed.py @@ -35,7 +35,8 @@ def create_workspace_activity_feed(http_session: requests.AuthorizedSession, tokenendpoint: str, issuer: str, subject: str, audience: str, privatekey: str, workspacecustomerid: str, - applications: str) -> Mapping[str, Any]: + applications: str, displayname: str + ) -> Mapping[str, Any]: """Creates a new Workspace Activity feed. Args: @@ -49,6 +50,7 @@ def create_workspace_activity_feed(http_session: requests.AuthorizedSession, http://phpseclib.sourceforge.net/rsa/examples.html workspacecustomerid: A string which represents workspace customer id. applications: A string which represents list of applications to be allowed. + displayname: A string which represents customer-provided feed name. Returns: New Workspace Activity Feed. @@ -79,13 +81,15 @@ def create_workspace_activity_feed(http_session: requests.AuthorizedSession, applications ] } - } + }, + "display_name": displayname, } response = http_session.request("POST", url, json=body) # Expected server response: # { # "name": "feeds/cf91de35-1256-48f5-8a36-9503e532b879", + # "display_name": "my feed name", # "details": { # "logType": "WORKSPACE_ACTIVITY", # "feedSourceType": "API", @@ -162,6 +166,12 @@ def create_workspace_activity_feed(http_session: requests.AuthorizedSession, type=str, required=True, help="applications") + parser.add_argument( + "-dn", + "--displayname", + type=str, + required=False, + help="display name") args = parser.parse_args() CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) @@ -169,5 +179,5 @@ def create_workspace_activity_feed(http_session: requests.AuthorizedSession, new_feed = create_workspace_activity_feed( session, args.tokenendpoint, args.claimsissuer, args.claimssubject, args.claimsaudience, args.credentialsprivatekey, args.workspacecustomerid, - args.applications) + args.applications, args.displayname) print(json.dumps(new_feed, indent=2)) diff --git a/feeds/create_workspace_activity_feed_test.py b/feeds/create_workspace_activity_feed_test.py index 93aadad..7408803 100644 --- a/feeds/create_workspace_activity_feed_test.py +++ b/feeds/create_workspace_activity_feed_test.py @@ -36,7 +36,7 @@ def test_http_error(self, mock_response, mock_session): create_workspace_activity_feed.create_workspace_activity_feed( mock_session, "hostname.example.com", "issuer_example", "subject_example", "audience_example", "privatekey_example", - "customerid_example", "applications_example") + "customerid_example", "applications_example", "my feed name") @mock.patch.object(requests, "AuthorizedSession", autospec=True) @mock.patch.object(requests.requests, "Response", autospec=True) @@ -45,6 +45,7 @@ def test_happy_path(self, mock_response, mock_session): type(mock_response).status_code = mock.PropertyMock(return_value=200) expected_feed = { "name": "feeds/cf91de35-1256-48f5-8a36-9503e532b879", + "display_name": "my feed name", "details": { "logType": "WORKSPACE_ACTIVITY", "feedSourceType": "API", @@ -72,7 +73,7 @@ def test_happy_path(self, mock_response, mock_session): actual_feed = create_workspace_activity_feed.create_workspace_activity_feed( mock_session, "hostname.example.com", "issuer_example", "subject_example", "audience_example", "privatekey_example", - "customerid_example", "applications_example") + "customerid_example", "applications_example", "my feed name") self.assertEqual(actual_feed, expected_feed) diff --git a/feeds/create_workspace_alerts_feed.py b/feeds/create_workspace_alerts_feed.py index 730e0d1..28cea63 100644 --- a/feeds/create_workspace_alerts_feed.py +++ b/feeds/create_workspace_alerts_feed.py @@ -34,7 +34,8 @@ def create_workspace_alerts_feed(http_session: requests.AuthorizedSession, tokenendpoint: str, issuer: str, subject: str, audience: str, privatekey: str, - workspacecustomerid: str) -> Mapping[str, Any]: + workspacecustomerid: str, displayname: str + ) -> Mapping[str, Any]: """Creates a new Workspace Alerts feed. Args: @@ -47,6 +48,7 @@ def create_workspace_alerts_feed(http_session: requests.AuthorizedSession, note private key should have new line characters in it, sample at: http://phpseclib.sourceforge.net/rsa/examples.html workspacecustomerid: A string which represents workspace customer id. + displayname: A string which represents customer-provided feed name. Returns: New Workspace Alerts Feed. @@ -74,13 +76,15 @@ def create_workspace_alerts_feed(http_session: requests.AuthorizedSession, }, "workspaceCustomerId": workspacecustomerid.lstrip("C"), } - } + }, + "display_name": displayname, } response = http_session.request("POST", url, json=body) # Expected server response: # { # "name": "feeds/cf91de35-1256-48f5-8a36-9503e532b879", + # "display_name": "my feed name", # "details": { # "logType": "WORKSPACE_ACTIVITY", # "feedSourceType": "API", @@ -148,6 +152,12 @@ def create_workspace_alerts_feed(http_session: requests.AuthorizedSession, type=str, required=True, help="workspace customer id") + parser.add_argument( + "-dn", + "--displayname", + type=str, + required=False, + help="display name") args = parser.parse_args() CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) @@ -156,5 +166,6 @@ def create_workspace_alerts_feed(http_session: requests.AuthorizedSession, args.claimsissuer, args.claimssubject, args.claimsaudience, args.credentialsprivatekey, - args.workspacecustomerid) + args.workspacecustomerid, + args.displayname) print(json.dumps(new_feed, indent=2)) diff --git a/feeds/create_workspace_alerts_feed_test.py b/feeds/create_workspace_alerts_feed_test.py index d03f06e..37e7046 100644 --- a/feeds/create_workspace_alerts_feed_test.py +++ b/feeds/create_workspace_alerts_feed_test.py @@ -36,7 +36,7 @@ def test_http_error(self, mock_response, mock_session): create_workspace_alerts_feed.create_workspace_alerts_feed( mock_session, "hostname.example.com", "issuer_example", "subject_example", "audience_example", "privatekey_example", - "Ccustomerid_example") + "Ccustomerid_example", "my feed name") @mock.patch.object(requests, "AuthorizedSession", autospec=True) @mock.patch.object(requests.requests, "Response", autospec=True) @@ -45,6 +45,7 @@ def test_happy_path(self, mock_response, mock_session): type(mock_response).status_code = mock.PropertyMock(return_value=200) expected_feed = { "name": "feeds/cf91de35-1256-48f5-8a36-9503e532b879", + "display_name": "my feed name", "details": { "logType": "WORKSPACE_ALERTS", "feedSourceType": "API", @@ -71,7 +72,7 @@ def test_happy_path(self, mock_response, mock_session): actual_feed = create_workspace_alerts_feed.create_workspace_alerts_feed( mock_session, "hostname.example.com", "issuer_example", "subject_example", "audience_example", "privatekey_example", - "customerid_example") + "customerid_example", "my feed name") self.assertEqual(actual_feed, expected_feed) diff --git a/feeds/disable_feed.py b/feeds/disable_feed.py new file mode 100644 index 0000000..a748c53 --- /dev/null +++ b/feeds/disable_feed.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python3 + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for disabling a feed.""" + +import argparse + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com" + + +def disable_feed(http_session: requests.AuthorizedSession, name: str): + """Disable a specific feed. + + Args: + http_session: Authorized session for HTTP requests. + name: Unique name for the feed. + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{CHRONICLE_API_BASE_URL}/v1/feeds/{name}:disable" + + response = http_session.request("POST", url) + # Expected server response: + # {} + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-n", "--name", type=str, required=True, help="unique name for the feed") + + args = parser.parse_args() + CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) + session = chronicle_auth.initialize_http_session(args.credentials_file) + disable_feed(session, args.name) diff --git a/feeds/disable_feed_test.py b/feeds/disable_feed_test.py new file mode 100644 index 0000000..e8c1128 --- /dev/null +++ b/feeds/disable_feed_test.py @@ -0,0 +1,48 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "disable_feed" module.""" + +import unittest +from unittest import mock + +from google.auth.transport import requests + +from . import disable_feed + + +class DeleteFeedTest(unittest.TestCase): + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_http_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + disable_feed.disable_feed(mock_session, "feed name") + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_happy_path(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + + disable_feed.disable_feed(mock_session, "feed name") + + +if __name__ == "__main__": + unittest.main() diff --git a/feeds/enable_feed.py b/feeds/enable_feed.py new file mode 100644 index 0000000..598f58e --- /dev/null +++ b/feeds/enable_feed.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python3 + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for enabling a feed.""" + +import argparse + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com" + + +def enable_feed(http_session: requests.AuthorizedSession, name: str): + """Enable a specific feed. + + Args: + http_session: Authorized session for HTTP requests. + name: Unique name for the feed. + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{CHRONICLE_API_BASE_URL}/v1/feeds/{name}:enable" + + response = http_session.request("POST", url) + # Expected server response: + # {} + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-n", "--name", type=str, required=True, help="unique name for the feed") + + args = parser.parse_args() + CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) + session = chronicle_auth.initialize_http_session(args.credentials_file) + enable_feed(session, args.name) diff --git a/feeds/enable_feed_test.py b/feeds/enable_feed_test.py new file mode 100644 index 0000000..42563f1 --- /dev/null +++ b/feeds/enable_feed_test.py @@ -0,0 +1,48 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "enable_feed" module.""" + +import unittest +from unittest import mock + +from google.auth.transport import requests + +from . import enable_feed + + +class DeleteFeedTest(unittest.TestCase): + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_http_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + enable_feed.enable_feed(mock_session, "feed name") + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_happy_path(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + + enable_feed.enable_feed(mock_session, "feed name") + + +if __name__ == "__main__": + unittest.main() diff --git a/forwarders/README.md b/forwarders/README.md new file mode 100644 index 0000000..fe69678 --- /dev/null +++ b/forwarders/README.md @@ -0,0 +1,528 @@ +# Forwarder Management APIs + +Forwarders are used to ingest security telemetry into a Chronicle instance. While typically found in on-premise environments, they can be deployed almost anywhere that a Docker contain can. For more information regarding the installation and hardware requirements, please see [this installation guide](https://cloud.google.com/chronicle/docs/install/forwarder-linux). + +## Overview + +At a high-level, a Forwarder is composed of one or more Collectors. Each Collector has its own **ingestion mechanism** (e.g. File, Kafka, PCAP, Splunk, Syslog) and ingests data for a specific **log type**. + +Assuming hardware requirements are met, there may be many Collectors on the same Forwarder to ingest data from a variety of mechanisms and log types. For example, a Forwarder with two syslog Collectors listening for PAN_FIREWALL and CISCO_ASA_FIREWALL data on separate ports, respectively. + +## Forwarder API Samples + +A Forwarder **must** be created first, and one or more Collectors can be created on that Forwarder. Optionally, configuration settings for Metadata and Regex Filters **may** be configured at the Forwarder level and apply to all of a Forwarder's Collectors. These may be optionally overridden at the Collector level, within the Collector's configuration. + +Additionally, default settings for Forwarders and Collectors **may** be set depending on the configuration provided. + +### Create Forwarder + +Creates a new Forwarder on the Chronicle instance using the configuration specified in the request body. + +```shell +$ python -m forwarders.create_forwarder -h +usage: create_forwarder.py [-h] [-c CREDENTIALS_FILE] [-r {asia-southeast1,europe,us}] + +optional arguments: + -h, --help show this help message and exit + -c CREDENTIALS_FILE, --credentials_file CREDENTIALS_FILE + credentials file path (default: '~/.chronicle_credentials.json') + -r {asia-southeast1,europe,us}, --region {asia-southeast1,europe,us} + the region where the customer is located (default: us) +``` +**Example**: Creating a forwarder with the minimum required configuration (`display_name`). By default, the **upload_compression** option will be set. +```shell +$ python -m forwarders.create_forwarder +{ + "name": "forwarders/928b3c1e-1430-4511-892d-2202206b4d8c", + "displayName": "TestForwarder", + "config": { + "uploadCompression": true + }, + "state": "ACTIVE" +} +``` + +### Get Forwarder + +The format of a Forwarder's **name** is `forwarders/{UUID}`. + +```shell +$ python -m forwarders.get_forwarder -h +usage: get_forwarder.py [-h] [-c CREDENTIALS_FILE] [-r {asia-southeast1,europe,us}] -n NAME + +optional arguments: + -h, --help show this help message and exit + -c CREDENTIALS_FILE, --credentials_file CREDENTIALS_FILE + credentials file path (default: '~/.chronicle_credentials.json') + -r {asia-southeast1,europe,us}, --region {asia-southeast1,europe,us} + the region where the customer is located (default: us) + -n NAME, --name NAME unique name for the forwarder +``` + +**Example**: Retrieving the configuration for `forwarders/928b3c1e-1430-4511-892d-2202206b4d8c`. To retrieve the configuration for the Forwarder's corresponding collectors, the `ListCollectors` API must be used (see below). + +```shell +$ python -m forwarders.get_forwarder -n forwarders/928b3c1e-1430-4511-892d-2202206b4d8c +{ + "name": "forwarders/928b3c1e-1430-4511-892d-2202206b4d8c", + "displayName": "TestForwarder", + "config": { + "uploadCompression": true + }, + "state": "ACTIVE" +} +``` + +### List Forwarders + +Retrieves all Forwarders for the Chronicle instance. + +```shell +$ python -m forwarders.list_forwarders -h +usage: list_forwarders.py [-h] [-c CREDENTIALS_FILE] [-r {asia-southeast1,europe,us}] + +optional arguments: + -h, --help show this help message and exit + -c CREDENTIALS_FILE, --credentials_file CREDENTIALS_FILE + credentials file path (default: '~/.chronicle_credentials.json') + -r {asia-southeast1,europe,us}, --region {asia-southeast1,europe,us} + the region where the customer is located (default: us) +``` +Example: Retrieving two Forwarders. +```shell +$ python -m forwarders.list_forwarders +{ + "forwarders": [ + { + "name": "forwarders/86372ddf-3736-42e8-a78e-aee1a6e3517b", + "displayName": "TestForwarder2", + "config": { + "uploadCompression": true, + "metadata": { + "assetNamespace": "FORWARDER", + "labels": [ + { + "key": "office", + "value": "corporate" + }, + { + "key": "building", + "value": "001" + } + ] + }, + "regexFilters": [ + { + "description": "TestFilter", + "regexp": ".*", + "behavior": "ALLOW" + } + ], + "serverSettings": { + "gracefulTimeout": 15, + "drainTimeout": 10, + "httpSettings": { + "port": 8080, + "host": "0.0.0.0", + "readTimeout": 3, + "readHeaderTimeout": 3, + "writeTimeout": 3, + "idleTimeout": 3, + "routeSettings": { + "availableStatusCode": 204, + "readyStatusCode": 204, + "unreadyStatusCode": 503 + } + }, + "state": "ACTIVE" + } + }, + "state": "ACTIVE" + }, + { + "name": "forwarders/928b3c1e-1430-4511-892d-2202206b4d8c", + "displayName": "TestForwarder", + "config": { + "uploadCompression": true + }, + "state": "ACTIVE" + } + ] +} +``` + +### Update Forwarder + +An update mask **must** be provided as a request parameter, not in the request body, and indicates which Forwarder fields to update. When updating a list, the entire list will be replaced. In order to append an item to a list, the entire new list must be provided. + +```shell +$ python -m forwarders.update_forwarder -h +usage: update_forwarder.py [-h] [-c CREDENTIALS_FILE] [-r {asia-southeast1,europe,us}] -n NAME + +optional arguments: + -h, --help show this help message and exit + -c CREDENTIALS_FILE, --credentials_file CREDENTIALS_FILE + credentials file path (default: '~/.chronicle_credentials.json') + -r {asia-southeast1,europe,us}, --region {asia-southeast1,europe,us} + the region where the customer is located (default: us) + -n NAME, --name NAME unique name for the forwarder +``` + +**Example**: Updating the display name and adding a metadata label to a Forwarder. +```shell +$ python -m forwarders.create_forwarder +{ + "name": "forwarders/fd9ef30f-79f7-4acb-adfc-32650f6b4c83", + "displayName": "TestForwarder", + "config": { + "uploadCompression": true + }, + "state": "ACTIVE" +} + +$ python -m forwarders.update_forwarder -n forwarders/fd9ef30f-79f7-4acb-adfc-32650f6b4c83 +{ + "name": "forwarders/fd9ef30f-79f7-4acb-adfc-32650f6b4c83", + "displayName": "UpdatedForwarder", + "config": { + "uploadCompression": true, + "metadata": { + "labels": [ + { + "key": "office", + "value": "corporate" + } + ] + } + }, + "state": "ACTIVE" +} +``` + +### Delete Forwarder + +After successfully deleting a Forwarder, the response message is expected to be **empty**. + +```shell +$ python -m forwarders.delete_forwarder -h +usage: delete_forwarder.py [-h] [-c CREDENTIALS_FILE] [-r {asia-southeast1,europe,us}] -n NAME + +optional arguments: + -h, --help show this help message and exit + -c CREDENTIALS_FILE, --credentials_file CREDENTIALS_FILE + credentials file path (default: '~/.chronicle_credentials.json') + -r {asia-southeast1,europe,us}, --region {asia-southeast1,europe,us} + the region where the customer is located (default: us) + -n NAME, --name NAME unique name for the forwarder +``` +Example: Deleting forwarder with name `forwarders/928b3c1e-1430-4511-892d-2202206b4d8c`. +```shell +$ python -m forwarders.delete_forwarder -n forwarders/928b3c1e-1430-4511-892d-2202206b4d8c +{} +``` + +## Collector Commands + +A Forwarder must exist **before** a Collector can be created. + +### Create Collector + +```shell +$ python -m forwarders.create_collector -h +usage: create_collector.py [-h] [-c CREDENTIALS_FILE] [-r {asia-southeast1,europe,us}] -f FORWARDER + +optional arguments: + -h, --help show this help message and exit + -c CREDENTIALS_FILE, --credentials_file CREDENTIALS_FILE + credentials file path (default: '~/.chronicle_credentials.json') + -r {asia-southeast1,europe,us}, --region {asia-southeast1,europe,us} + the region where the customer is located (default: us) + -f FORWARDER, --forwarder FORWARDER + name of the forwarder on which to add the collector +``` + +**Example**: Creating a syslog Collector which uses TCP to listen on port 10514 on Forwarder (`forwarders/86372ddf-3736-42e8-a78e-aee1a6e3517b`). + +```shell +$ python -m forwarders.create_collector -f forwarders/86372ddf-3736-42e8-a78e-aee1a6e3517b +{ + "name": "forwarders/86372ddf-3736-42e8-a78e-aee1a6e3517b/collectors/5d346ec1-ece1-44c6-94bc-04681e5d9d8a", + "displayName": "SyslogCollector1", + "config": { + "logType": "PAN_FIREWALL", + "maxSecondsPerBatch": 10, + "maxBytesPerBatch": "1048576", + "syslogSettings": { + "protocol": "TCP", + "address": "0.0.0.0", + "port": 10514, + "bufferSize": "65536", + "connectionTimeout": 60 + } + }, + "state": "ACTIVE" +} +``` + +### Get Collector + +The format of a Collector's **name** is `forwarders/{forwarderUUID}/collectors/{collectorUUID}`. + +```shell +$ python -m forwarders.get_collector -h +usage: get_collector.py [-h] [-c CREDENTIALS_FILE] [-r {asia-southeast1,europe,us}] -n NAME + +optional arguments: + -h, --help show this help message and exit + -c CREDENTIALS_FILE, --credentials_file CREDENTIALS_FILE + credentials file path (default: '~/.chronicle_credentials.json') + -r {asia-southeast1,europe,us}, --region {asia-southeast1,europe,us} + the region where the customer is located (default: us) + -n NAME, --name NAME unique name for the collector +``` + +**Example**: Retrieving the configuration for Collector with name `forwarders/86372ddf-3736-42e8-a78e-aee1a6e3517b/collectors/5d346ec1-ece1-44c6-94bc-04681e5d9d8a`. + +```shell +$ python -m forwarders.get_collector -n forwarders/86372ddf-3736-42e8-a78e-aee1a6e3517b/collectors/5d346ec1-ece1-44c6-94bc-04681e5d9d8a +{ + "name": "forwarders/86372ddf-3736-42e8-a78e-aee1a6e3517b/collectors/5d346ec1-ece1-44c6-94bc-04681e5d9d8a", + "displayName": "SyslogCollector1", + "config": { + "logType": "PAN_FIREWALL", + "maxSecondsPerBatch": 10, + "maxBytesPerBatch": "1048576", + "syslogSettings": { + "protocol": "TCP", + "address": "0.0.0.0", + "port": 10514, + "bufferSize": "65536", + "connectionTimeout": 60 + } + }, + "state": "ACTIVE" +} +``` + +### List Collectors + +Retrieves all Collectors belonging to the specified Forwarder. + +```shell +$ python -m forwarders.list_collectors -h +usage: list_collectors.py [-h] [-c CREDENTIALS_FILE] [-r {asia-southeast1,europe,us}] -f FORWARDER + +optional arguments: + -h, --help show this help message and exit + -c CREDENTIALS_FILE, --credentials_file CREDENTIALS_FILE + credentials file path (default: '~/.chronicle_credentials.json') + -r {asia-southeast1,europe,us}, --region {asia-southeast1,europe,us} + the region where the customer is located (default: us) + -f FORWARDER, --forwarder FORWARDER + unique name for the forwarder +``` + +Example: Retrieving all Collectors associated with Forwarder (`forwarders/86372ddf-3736-42e8-a78e-aee1a6e3517b`) + +```shell +$ python -m forwarders.list_collectors -f forwarders/86372ddf-3736-42e8-a78e-aee1a6e3517b +{ + "collectors": [ + { + "name": "forwarders/86372ddf-3736-42e8-a78e-aee1a6e3517b/collectors/52d658fc-2d51-4a8a-8986-425195a28ffb", + "displayName": "SplunkCollector", + "config": { + "logType": "WINDOWS_DNS", + "maxSecondsPerBatch": 10, + "maxBytesPerBatch": "1048576", + "splunkSettings": { + "host": "127.0.0.1", + "minimumWindowSize": 10, + "maximumWindowSize": 30, + "queryString": "search index=* sourcetype=dns", + "queryMode": "realtime", + "port": 8089 + } + }, + "state": "ACTIVE" + }, + { + "name": "forwarders/86372ddf-3736-42e8-a78e-aee1a6e3517b/collectors/5d346ec1-ece1-44c6-94bc-04681e5d9d8a", + "displayName": "SyslogCollector1", + "config": { + "logType": "PAN_FIREWALL", + "maxSecondsPerBatch": 10, + "maxBytesPerBatch": "1048576", + "syslogSettings": { + "protocol": "TCP", + "address": "0.0.0.0", + "port": 10514, + "bufferSize": "65536", + "connectionTimeout": 60 + } + }, + "state": "ACTIVE" + } + ] +} +``` + +### Update Collector + +An update mask **must** be provided as a request parameter, not in the request body, and indicates which Collector fields to update. When updating a list, the entire list will be replaced. In order to append an item to a list, the entire new list must be provided. + +```shell +$ python -m forwarders.update_collector -h +usage: update_collector.py [-h] [-c CREDENTIALS_FILE] [-r {asia-southeast1,europe,us}] -n NAME + +optional arguments: + -h, --help show this help message and exit + -c CREDENTIALS_FILE, --credentials_file CREDENTIALS_FILE + credentials file path (default: '~/.chronicle_credentials.json') + -r {asia-southeast1,europe,us}, --region {asia-southeast1,europe,us} + the region where the customer is located (default: us) +``` + +**Example**: Creating and then updating a file Collector's `display_name`, `log_type`, and `file_path`. + +```shell +$ python -m forwarders.create_collector -f forwarders/86372ddf-3736-42e8-a78e-aee1a6e3517b +{ + "name": "forwarders/86372ddf-3736-42e8-a78e-aee1a6e3517b/collectors/f6da4e72-52e1-4a41-8979-17d5cabd78c5", + "displayName": "FileCollector", + "config": { + "logType": "WINDOWS_DNS", + "maxSecondsPerBatch": 10, + "maxBytesPerBatch": "1048576", + "fileSettings": { + "filePath": "/path/to/log.file" + } + }, + "state": "ACTIVE" +} + +$ python -m forwarders.update_collector -n forwarders/86372ddf-3736-42e8-a78e-aee1a6e3517b/collectors/f6da4e72-52e1-4a41-8979-17d5cabd78c5 +{ + "name": "forwarders/86372ddf-3736-42e8-a78e-aee1a6e3517b/collectors/f6da4e72-52e1-4a41-8979-17d5cabd78c5", + "displayName": "UpdatedCollector", + "config": { + "logType": "WINDOWS_DNS", + "maxSecondsPerBatch": 10, + "maxBytesPerBatch": "1048576", + "fileSettings": { + "filePath": "/new/path/to/file.txt" + } + }, + "state": "ACTIVE" +} +``` + +### Delete Collector + +The format of a Collector's **name** is `forwarders/{forwarderUUID}/collectors/{collectorUUID}`. + +```shell +python -m forwarders.delete_collector -h +usage: delete_collector.py [-h] [-c CREDENTIALS_FILE] [-r {asia-southeast1,europe,us}] -n NAME + +optional arguments: + -h, --help show this help message and exit + -c CREDENTIALS_FILE, --credentials_file CREDENTIALS_FILE + credentials file path (default: '~/.chronicle_credentials.json') + -r {asia-southeast1,europe,us}, --region {asia-southeast1,europe,us} + the region where the customer is located (default: us) + -n NAME, --name NAME unique name for the collector +``` + +**Example**: Deleting a Collector with name `forwarders/86372ddf-3736-42e8-a78e-aee1a6e3517b/collectors/f6da4e72-52e1-4a41-8979-17d5cabd78c5`. + +```shell +$ python -m forwarders.delete_collector -n forwarders/86372ddf-3736-42e8-a78e-aee1a6e3517b/collectors/f6da4e72-52e1-4a41-8979-17d5cabd78c5 +{} +``` + +## Generating Configuration Files + +To generate a Forwarder's configuration files, at least one Collector **must** exist. By default, this command will print the file contents to the terminal, though the `-o` option may be provided to write the configuration and auth files to `forwarder.conf` and `forwarder_auth.conf`, respectively. These files will need to be transferred to the Forwarder's host and the Forwarder must be restarted for changes to take effect. + +Note: The files **must not** be modified. Any changes should be applied using the Update methods above, and then the configuration may be re-generated. + +```shell +$ python -m forwarders.generate_files -h +usage: generate_files.py [-h] [-c CREDENTIALS_FILE] [-r {asia-southeast1,europe,us}] -n NAME [-o OUTPUT] + +optional arguments: + -h, --help show this help message and exit + -c CREDENTIALS_FILE, --credentials_file CREDENTIALS_FILE + credentials file path (default: '~/.chronicle_credentials.json') + -r {asia-southeast1,europe,us}, --region {asia-southeast1,europe,us} + the region where the customer is located (default: us) + -n NAME, --name NAME name of the forwarder + -o OUTPUT, --output OUTPUT + Writes configuration files to the specified output directory. +``` + +**Example**: Printing the file contents to the terminal. + +```shell +$ python -m forwarders.generate_files -v -n forwarders/86372ddf-3736-42e8-a78e-aee1a6e3517b +forwarder.conf: + output: + compression: true + url: test-malachiteingestion-pa.sandbox.googleapis.com:443 + identity: + collector_id: 86372ddf-3736-42e8-a78e-aee1a6e3517b + customer_id: c2966ae6-d4c3-4c3b-a315-e672b3a0d498 +regex_filters: + TestFilter: + regexp: .* + behavior_on_match: allow +metadata: + labels: + building: "001" + office: corporate + namespace: FORWARDER +collectors: +- splunk: + common: + enabled: true + data_type: WINDOWS_DNS + batch_n_seconds: 10 + batch_n_bytes: 1048576 + url: 127.0.0.1:8089 + minimum_window_size: 10 + maximum_window_size: 30 + query_string: search index=* sourcetype=dns + query_mode: realtime +- syslog: + common: + enabled: true + data_type: PAN_FIREWALL + batch_n_seconds: 10 + batch_n_bytes: 1048576 + tcp_address: 0.0.0.0:10514 + tcp_buffer_size: 65536 + connection_timeout_sec: 60 + +forwarder_auth.conf: + output: + identity: + secret_key: +collectors: +- splunk: + auth: true + username: admin + password: pass +- syslog: + auth: true +``` + +**Example**: Writing the configuration files to the `~/Downloads` directory. + +```shell +$ python -m forwarders.generate_files -n forwarders/86372ddf-3736-42e8-a78e-aee1a6e3517b -o ~/Downloads + +$ ll ~/Downloads +-rw-r--r-- 1 user primarygroup 1.3K Oct 31 15:08 forwarder.conf +-rw-r--r-- 1 user primarygroup 2.5K Oct 31 15:08 forwarder_auth.conf +``` \ No newline at end of file diff --git a/forwarders/__init__.py b/forwarders/__init__.py new file mode 100644 index 0000000..b00563f --- /dev/null +++ b/forwarders/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/forwarders/create_collector.py b/forwarders/create_collector.py new file mode 100644 index 0000000..13d6864 --- /dev/null +++ b/forwarders/create_collector.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python3 + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable sample for creating a collector. + +Creating other collectors requires changing this sample code. +""" + +import argparse +import json +from typing import Any, Mapping + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com" + + +def create_collector(http_session: requests.AuthorizedSession, + forwarder_name: str) -> Mapping[str, Any]: + """Creates a collector on an existing forwarder. + + Args: + http_session: Authorized session for HTTP requests. + forwarder_name: Resource name for the Forwarder (forwarders/{UUID}). + + Returns: + Collector + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{CHRONICLE_API_BASE_URL}/v2/{forwarder_name}/collectors" + + body = { + "display_name": "SyslogCollector", + "config": { + "log_type": "PAN_FIREWALL", + "syslog_settings": { + "protocol": "TCP", + "address": "0.0.0.0", + "port": 10514, + } + } + } + + # # Example of a File collector: + # body = { + # "display_name": "FileCollector", + # "config": { + # "log_type": "WINDOWS_DNS", + # "file_settings": { + # "file_path": "/path/to/log.file" + # } + # } + # } + +# # Example of a Splunk collector: + # body = { + # "display_name": "SplunkCollector", + # "config": { + # "log_type": "WINDOWS_DNS", + # "splunk_settings": { + # "host": "127.0.0.1", + # "port": 8089, + # "query_string": "search index=* sourcetype=dns", + # "query_mode": "realtime", + # "authentication": { + # "username": "admin", + # "password": "pass", + # } + # } + # } + # } + + response = http_session.request("POST", url, json=body) + # Example server response for Syslog collector: + # { + # "name": "forwarders/{forwarderUUID}/collectors/{collectorUUID}", + # "displayName": "TestCollector1", + # "config": { + # "logType": "PAN_FIREWALL", + # "maxSecondsPerBatch": 10, + # "maxBytesPerBatch": "1048576", + # "syslogSettings": { + # "protocol": "TCP", + # "address": "0.0.0.0", + # "port": 10514, + # "bufferSize": "65536", + # "connectionTimeout": 60 + # } + # }, + # "state": "ACTIVE" + # } + + # Example server response for Splunk collector: + # { + # "name": "forwarders/{forwarderUUID}/collectors/{collectorUUID}", + # "displayName": "SplunkCollector", + # "config": { + # "logType": "WINDOWS_DNS", + # "maxSecondsPerBatch": 10, + # "maxBytesPerBatch": "1048576", + # "splunkSettings": { + # "host": "127.0.0.1", + # "minimumWindowSize": 10, + # "maximumWindowSize": 30, + # "queryString": "search index=* sourcetype=dns", + # "queryMode": "realtime", + # "port": 8089 + # } + # }, + # "state": "ACTIVE" + # } + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-f", + "--forwarder_name", + type=str, + required=True, + help="resource name for the Forwarder (forwarders/{UUID})") + + args = parser.parse_args() + CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) + session = chronicle_auth.initialize_http_session(args.credentials_file) + new_forwarder = create_collector(session, args.forwarder_name) + print(json.dumps(new_forwarder, indent=2)) diff --git a/forwarders/create_collector_test.py b/forwarders/create_collector_test.py new file mode 100644 index 0000000..109c8ec --- /dev/null +++ b/forwarders/create_collector_test.py @@ -0,0 +1,69 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "create_collector" module.""" + +import unittest +from unittest import mock + +from google.auth.transport import requests + +from . import create_collector + + +class CreateCollectorTest(unittest.TestCase): + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_http_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + create_collector.create_collector(mock_session, "forwarder name") + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_happy_path(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + expected_collector = { + "name": "forwarders/uuid/collectors/uuid", + "displayName": "TestCollector1", + "config": { + "logType": "PAN_FIREWALL", + "maxSecondsPerBatch": 10, + "maxBytesPerBatch": "1048576", + "syslogSettings": { + "protocol": "TCP", + "address": "0.0.0.0", + "port": 10514, + "bufferSize": "65536", + "connectionTimeout": 60 + } + }, + "state": "ACTIVE" + } + + mock_response.json.return_value = expected_collector + + actual_collector = create_collector.create_collector( + mock_session, "forwarder name") + self.assertEqual(actual_collector, expected_collector) + + +if __name__ == "__main__": + unittest.main() diff --git a/forwarders/create_forwarder.py b/forwarders/create_forwarder.py new file mode 100644 index 0000000..039a16f --- /dev/null +++ b/forwarders/create_forwarder.py @@ -0,0 +1,160 @@ +#!/usr/bin/env python3 + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable sample for creating a Chronicle forwarder. + +Creating other forwarders requires changing this sample code. +""" + +import argparse +import json +from typing import Any, Mapping + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com" + + +def create_forwarder( + http_session: requests.AuthorizedSession) -> Mapping[str, Any]: + """Creates a new Chronicle forwarder. + + Args: + http_session: Authorized session for HTTP requests. + + Returns: + Newly created Chronicle Forwarder. + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{CHRONICLE_API_BASE_URL}/v2/forwarders/" + + # The minimal configuration requires only a name. + body = { + "display_name": "TestForwarder", + } + + # Example of a more advanced Forwarder configuration. + # body = { + # "display_name": "TestForwarder2", + # "config": { + # "upload_compression": True, + # "metadata": { + # "asset_namespace": "FORWARDER", + # "labels": [ + # { + # "key": "office", + # "value": "corporate", + # }, + # { + # "key": "building", + # "value": "001" + # } + # ] + # }, + # "regex_filters": [ + # { + # "description": "TestFilter", + # "regexp": ".*", # Must be valid RE2 syntax + # "behavior": "ALLOW", # Allowed values: ALLOW, BLOCK + # }, + # ], + # "server_settings": { + # "state": "ACTIVE", # Allowed values: ACTIVE, SUSPENDED + # } + # } + # } + + response = http_session.request("POST", url, json=body) + # Expected server response: + # { + # "name": "forwarders/{forwarderUUID}", + # "displayName": "TestForwarder", + # "config": { + # "uploadCompression": true + # }, + # "state": "ACTIVE" + # } + + # # Expected server response for advanced configuration: + # { + # "name": "forwarders/{forwarderUUID}", + # "displayName": "TestForwarder2", + # "config": { + # "uploadCompression": true, + # "metadata": { + # "assetNamespace": "FORWARDER", + # "labels": [ + # { + # "key": "office", + # "value": "corporate" + # }, + # { + # "key": "building", + # "value": "001" + # } + # ] + # }, + # "regexFilters": [ + # { + # "description": "TestFilter", + # "regexp": ".*", + # "behavior": "ALLOW" + # } + # ], + # "serverSettings": { + # "gracefulTimeout": 15, + # "drainTimeout": 10, + # "httpSettings": { + # "port": 8080, + # "host": "0.0.0.0", + # "readTimeout": 3, + # "readHeaderTimeout": 3, + # "writeTimeout": 3, + # "idleTimeout": 3, + # "routeSettings": { + # "availableStatusCode": 204, + # "readyStatusCode": 204, + # "unreadyStatusCode": 503 + # } + # }, + # "state": "ACTIVE" + # } + # }, + # "state": "ACTIVE" + # } + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + + args = parser.parse_args() + CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) + session = chronicle_auth.initialize_http_session(args.credentials_file) + new_forwarder = create_forwarder(session) + print(json.dumps(new_forwarder, indent=2)) diff --git a/forwarders/create_forwarder_test.py b/forwarders/create_forwarder_test.py new file mode 100644 index 0000000..f9fdad4 --- /dev/null +++ b/forwarders/create_forwarder_test.py @@ -0,0 +1,59 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "create_forwarder" module.""" + +import unittest +from unittest import mock + +from google.auth.transport import requests + +from . import create_forwarder + + +class CreateForwarderTest(unittest.TestCase): + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_http_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + create_forwarder.create_forwarder(mock_session) + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_happy_path(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + expected_forwarder = { + "name": "forwarders/c37e1d99-f6ba-43da-b591-788261d32cae", + "displayName": "TestForwarder", + "config": { + "uploadCompression": True + }, + "state": "ACTIVE" + } + + mock_response.json.return_value = expected_forwarder + + actual_forwarder = create_forwarder.create_forwarder(mock_session) + self.assertEqual(actual_forwarder, expected_forwarder) + + +if __name__ == "__main__": + unittest.main() diff --git a/forwarders/delete_collector.py b/forwarders/delete_collector.py new file mode 100644 index 0000000..548e44c --- /dev/null +++ b/forwarders/delete_collector.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python3 + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for deleting a collector.""" + +import argparse + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com" + + +def delete_collector(http_session: requests.AuthorizedSession, name: str): + """Delete a specific collector. + + Args: + http_session: Authorized session for HTTP requests. + name: Resource name for the Collector (collectors/{UUID}). + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{CHRONICLE_API_BASE_URL}/v2/{name}" + + response = http_session.request("DELETE", url) + # Expected server response: + # {} + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-n", + "--name", + type=str, + required=True, + help="resource name for the Collector (collectors/{UUID})") + + args = parser.parse_args() + CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) + session = chronicle_auth.initialize_http_session(args.credentials_file) + delete_collector(session, args.name) diff --git a/forwarders/delete_collector_test.py b/forwarders/delete_collector_test.py new file mode 100644 index 0000000..08a3886 --- /dev/null +++ b/forwarders/delete_collector_test.py @@ -0,0 +1,48 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "delete_collector" module.""" + +import unittest +from unittest import mock + +from google.auth.transport import requests + +from . import delete_collector + + +class DeleteCollectorTest(unittest.TestCase): + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_http_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + delete_collector.delete_collector(mock_session, "collector name") + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_happy_path(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + + delete_collector.delete_collector(mock_session, "collector name") + + +if __name__ == "__main__": + unittest.main() diff --git a/forwarders/delete_forwarder.py b/forwarders/delete_forwarder.py new file mode 100644 index 0000000..fa6c7b4 --- /dev/null +++ b/forwarders/delete_forwarder.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python3 + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for deleting a forwarder.""" + +import argparse + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com" + + +def delete_forwarder(http_session: requests.AuthorizedSession, name: str): + """Delete a specific forwarder. + + Args: + http_session: Authorized session for HTTP requests. + name: Resource name for the Forwarder (forwarders/{UUID}). + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{CHRONICLE_API_BASE_URL}/v2/{name}" + + response = http_session.request("DELETE", url) + # Expected server response: + # {} + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-n", + "--name", + type=str, + required=True, + help="resource name for the Forwarder (forwarders/{UUID})") + + args = parser.parse_args() + CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) + session = chronicle_auth.initialize_http_session(args.credentials_file) + delete_forwarder(session, args.name) diff --git a/forwarders/delete_forwarder_test.py b/forwarders/delete_forwarder_test.py new file mode 100644 index 0000000..65f4324 --- /dev/null +++ b/forwarders/delete_forwarder_test.py @@ -0,0 +1,48 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "delete_forwarder" module.""" + +import unittest +from unittest import mock + +from google.auth.transport import requests + +from . import delete_forwarder + + +class DeleteForwarderTest(unittest.TestCase): + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_http_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + delete_forwarder.delete_forwarder(mock_session, "forwarder name") + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_happy_path(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + + delete_forwarder.delete_forwarder(mock_session, "forwarder name") + + +if __name__ == "__main__": + unittest.main() diff --git a/forwarders/generate_files.py b/forwarders/generate_files.py new file mode 100644 index 0000000..de8d92b --- /dev/null +++ b/forwarders/generate_files.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python3 + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for retrieving a list of feeds.""" + +import argparse +from typing import Mapping, Any + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com" + + +def generate_files(http_session: requests.AuthorizedSession, + name: str) -> Mapping[str, Any]: + """Retrieves all forwarders for the tenant. + + Args: + http_session: Authorized session for HTTP requests. + name: Unique name for the forwarder. + + Returns: + Array containing each line of the forwarder's content. + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{CHRONICLE_API_BASE_URL}/v2/{name}:generateForwarderFiles" + + response = http_session.request("GET", url) + # Expected server response: + # { + # "config": "", + # "auth": "", + # } + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-n", + "--name", + type=str, + required=True, + help="resource name for the Forwarder (forwarders/{UUID})") + parser.add_argument( + "-o", + "--output", + type=str, + help="Writes configuration files to the specified output directory.") + + args = parser.parse_args() + CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) + session = chronicle_auth.initialize_http_session(args.credentials_file) + res = generate_files(session, args.name) + + output_dir = args.output + if "config" in res: + config = res["config"] + if output_dir: + with open(output_dir + "/forwarder.conf", "w") as f: + f.write(config) + else: + print("forwarder.conf:\n", config) + if "auth" in res: + auth = res["auth"] + if output_dir: + with open(output_dir + "/forwarder_auth.conf", "w") as f: + f.write(auth) + else: + print("forwarder_auth.conf:\n", auth) diff --git a/forwarders/generate_files_test.py b/forwarders/generate_files_test.py new file mode 100644 index 0000000..e9f8b2a --- /dev/null +++ b/forwarders/generate_files_test.py @@ -0,0 +1,55 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "generate_files" module.""" + +import unittest +from unittest import mock + +from google.auth.transport import requests + +from . import generate_files + + +class GenerateFilesTest(unittest.TestCase): + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_http_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + generate_files.generate_files(mock_session, "forwarder name") + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_happy_path(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + expected_files = { + "config": "CONFIG FILE CONTENTS", + "auth": "AUTH FILE CONTENTS", + } + + mock_response.json.return_value = expected_files + + actual_files = generate_files.generate_files(mock_session, "forwarder name") + self.assertEqual(actual_files, expected_files) + + +if __name__ == "__main__": + unittest.main() diff --git a/forwarders/get_collector.py b/forwarders/get_collector.py new file mode 100644 index 0000000..0d469f6 --- /dev/null +++ b/forwarders/get_collector.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python3 + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for retrieving a collector.""" + +import argparse +import json +from typing import Mapping, Any + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com" + + +def get_collector(http_session: requests.AuthorizedSession, + name: str) -> Mapping[str, Any]: + """Retrieves a collector. + + Args: + http_session: Authorized session for HTTP requests. + name: Resource name for the Collector (collectors/{UUID}). + + Returns: + Array containing each line of the collector's content. + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{CHRONICLE_API_BASE_URL}/v2/{name}" + + response = http_session.request("GET", url) + # Example server response: + # { + # "name": "forwarders/{UUID}/collectors/{UUID}", + # "displayName": "SyslogCollector1", + # "config": { + # "logType": "PAN_FIREWALL", + # "maxSecondsPerBatch": 10, + # "maxBytesPerBatch": "1048576", + # "syslogSettings": { + # "protocol": "TCP", + # "address": "0.0.0.0", + # "port": 10514, + # "bufferSize": "65536", + # "connectionTimeout": 60 + # } + # }, + # "state": "ACTIVE" + # } + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-n", + "--name", + type=str, + required=True, + help="resource name for the Collector (collectors/{UUID})") + + args = parser.parse_args() + CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) + session = chronicle_auth.initialize_http_session(args.credentials_file) + print(json.dumps(get_collector(session, args.name), indent=2)) diff --git a/forwarders/get_collector_test.py b/forwarders/get_collector_test.py new file mode 100644 index 0000000..e9425ea --- /dev/null +++ b/forwarders/get_collector_test.py @@ -0,0 +1,69 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "get_collector" module.""" + +import unittest +from unittest import mock + +from google.auth.transport import requests + +from . import get_collector + + +class GetCollectorTest(unittest.TestCase): + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_http_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + get_collector.get_collector(mock_session, "collector name") + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_happy_path(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + expected_collector = { + "name": "forwarders/{UUID}/collectors/{UUID}", + "displayName": "SyslogCollector1", + "config": { + "logType": "PAN_FIREWALL", + "maxSecondsPerBatch": 10, + "maxBytesPerBatch": "1048576", + "syslogSettings": { + "protocol": "TCP", + "address": "0.0.0.0", + "port": 10514, + "bufferSize": "65536", + "connectionTimeout": 60 + } + }, + "state": "ACTIVE" + } + + mock_response.json.return_value = expected_collector + + actual_collector = get_collector.get_collector(mock_session, + "collector name") + self.assertEqual(actual_collector, expected_collector) + + +if __name__ == "__main__": + unittest.main() diff --git a/forwarders/get_forwarder.py b/forwarders/get_forwarder.py new file mode 100644 index 0000000..418e2db --- /dev/null +++ b/forwarders/get_forwarder.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python3 + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for retrieving a forwarder.""" + +import argparse +import json +from typing import Mapping, Any + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com" + + +def get_forwarder(http_session: requests.AuthorizedSession, + name: str) -> Mapping[str, Any]: + """Retrieves a forwarder. + + Args: + http_session: Authorized session for HTTP requests. + name: Resource name for the Forwarder (forwarders/{UUID}). + + Returns: + Array containing each line of the forwarder's content. + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{CHRONICLE_API_BASE_URL}/v2/{name}" + + response = http_session.request("GET", url) + # Expected server response: + # { + # "name": "forwarders/{forwarderUUID}", + # "displayName": "TestForwarder", + # "config": { + # "uploadCompression": true + # }, + # "state": "ACTIVE" + # } + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-n", + "--name", + type=str, + required=True, + help="resource name for the Forwarder (forwarders/{UUID})") + + args = parser.parse_args() + CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) + session = chronicle_auth.initialize_http_session(args.credentials_file) + print(json.dumps(get_forwarder(session, args.name), indent=2)) diff --git a/forwarders/get_forwarder_test.py b/forwarders/get_forwarder_test.py new file mode 100644 index 0000000..d53eba3 --- /dev/null +++ b/forwarders/get_forwarder_test.py @@ -0,0 +1,60 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "get_forwarder" module.""" + +import unittest +from unittest import mock + +from google.auth.transport import requests + +from . import get_forwarder + + +class GetForwarderTest(unittest.TestCase): + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_http_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + get_forwarder.get_forwarder(mock_session, "forwarder name") + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_happy_path(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + expected_forwarder = { + "name": "forwarders/928b3c1e-1430-4511-892d-2202206b4d8c", + "displayName": "TestForwarder", + "config": { + "uploadCompression": True + }, + "state": "ACTIVE" + } + + mock_response.json.return_value = expected_forwarder + + actual_forwarder = get_forwarder.get_forwarder(mock_session, + "forwarder name") + self.assertEqual(actual_forwarder, expected_forwarder) + + +if __name__ == "__main__": + unittest.main() diff --git a/forwarders/list_collectors.py b/forwarders/list_collectors.py new file mode 100644 index 0000000..a98111b --- /dev/null +++ b/forwarders/list_collectors.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python3 + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for retrieving a list of collectors.""" + +import argparse +import json +from typing import Mapping, Any + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com" + + +def list_collectors(http_session: requests.AuthorizedSession, + name: str) -> Mapping[str, Any]: + """Retrieves all collectors for the tenant. + + Args: + http_session: Authorized session for HTTP requests. + name: Resource name for the Forwarder (forwarders/{UUID}). + + Returns: + Array containing each collector belonging to the forwarder. + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{CHRONICLE_API_BASE_URL}/v2/{name}/collectors" + + response = http_session.request("GET", url) + # Example server response: + # { + # "collectors": [ + # { + # "name": "forwarders/{forwarderUUID}/collectors/{collectorUUID}", + # "displayName": "SplunkCollector", + # "config": { + # "logType": "WINDOWS_DNS", + # "maxSecondsPerBatch": 10, + # "maxBytesPerBatch": "1048576", + # "splunkSettings": { + # "host": "127.0.0.1", + # "minimumWindowSize": 10, + # "maximumWindowSize": 30, + # "queryString": "search index=* sourcetype=dns", + # "queryMode": "realtime", + # "port": 8089 + # } + # }, + # "state": "ACTIVE" + # }, + # { + # "name": "forwarders/{forwarderUUID}/collectors/{collectorUUID}", + # "displayName": "SyslogCollector1", + # "config": { + # "logType": "PAN_FIREWALL", + # "maxSecondsPerBatch": 10, + # "maxBytesPerBatch": "1048576", + # "syslogSettings": { + # "protocol": "TCP", + # "address": "0.0.0.0", + # "port": 10514, + # "bufferSize": "65536", + # "connectionTimeout": 60 + # } + # }, + # "state": "ACTIVE" + # } + # ] + # } + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-f", + "--forwarder_name", + type=str, + required=True, + help="resource name for the Forwarder (forwarders/{UUID})") + + args = parser.parse_args() + CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) + session = chronicle_auth.initialize_http_session(args.credentials_file) + print(json.dumps(list_collectors(session, args.forwarder_name), indent=2)) diff --git a/forwarders/list_collectors_test.py b/forwarders/list_collectors_test.py new file mode 100644 index 0000000..d2d07a7 --- /dev/null +++ b/forwarders/list_collectors_test.py @@ -0,0 +1,88 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "list_collectors" function.""" + +import unittest +from unittest import mock + +from google.auth.transport import requests + +from . import list_collectors + + +class ListCollectorsTest(unittest.TestCase): + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_http_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + list_collectors.list_collectors(mock_session, "forwarder name") + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_happy_path(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + expected_collectors = { + "collectors": [{ + "name": "forwarders/uuid/collectors/uuid", + "displayName": "SplunkCollector", + "config": { + "logType": "WINDOWS_DNS", + "maxSecondsPerBatch": 10, + "maxBytesPerBatch": "1048576", + "splunkSettings": { + "host": "127.0.0.1", + "minimumWindowSize": 10, + "maximumWindowSize": 30, + "queryString": "search index=* sourcetype=dns", + "queryMode": "realtime", + "port": 8089 + } + }, + "state": "ACTIVE" + }, { + "name": "forwarders/uuid/collectors/uuid", + "displayName": "SyslogCollector", + "config": { + "logType": "PAN_FIREWALL", + "maxSecondsPerBatch": 10, + "maxBytesPerBatch": "1048576", + "syslogSettings": { + "protocol": "TCP", + "address": "0.0.0.0", + "port": 10514, + "bufferSize": "65536", + "connectionTimeout": 60 + } + }, + "state": "ACTIVE" + }] + } + + mock_response.json.return_value = expected_collectors + + actual_collectors = list_collectors.list_collectors(mock_session, + "forwarder name") + self.assertEqual(actual_collectors, expected_collectors) + + +if __name__ == "__main__": + unittest.main() diff --git a/forwarders/list_forwarders.py b/forwarders/list_forwarders.py new file mode 100644 index 0000000..f1a1d16 --- /dev/null +++ b/forwarders/list_forwarders.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python3 + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for retrieving a list of forwarders.""" + +import argparse +import json +from typing import Mapping, Any + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com" + + +def list_forwarders( + http_session: requests.AuthorizedSession) -> Mapping[str, Any]: + """Retrieves all forwarders for the tenant. + + Args: + http_session: Authorized session for HTTP requests. + + Returns: + Array containing each forwarder associated with the instance. + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{CHRONICLE_API_BASE_URL}/v2/forwarders" + + response = http_session.request("GET", url) + # Expected server response: + # { + # "forwarders": [ + # { + # "name": "forwarders/{forwarderUUID}", + # "displayName": "TestForwarder1", + # "config": { + # "uploadCompression": true, + # }, + # "state": "ACTIVE" + # } + # ] + # } + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + + args = parser.parse_args() + CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) + session = chronicle_auth.initialize_http_session(args.credentials_file) + print(json.dumps(list_forwarders(session), indent=2)) diff --git a/forwarders/list_forwarders_test.py b/forwarders/list_forwarders_test.py new file mode 100644 index 0000000..1664092 --- /dev/null +++ b/forwarders/list_forwarders_test.py @@ -0,0 +1,61 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "list_forwarders" function.""" + +import unittest +from unittest import mock + +from google.auth.transport import requests + +from . import list_forwarders + + +class ListForwardersTest(unittest.TestCase): + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_http_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + list_forwarders.list_forwarders(mock_session) + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_happy_path(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + expected_forwarders = { + "forwarders": [{ + "name": "forwarders/af2235e5-08d0-45e6-aaf8-98dbfa83a178", + "displayName": "TestForwarder1", + "config": { + "uploadCompression": True, + }, + "state": "ACTIVE" + }] + } + + mock_response.json.return_value = expected_forwarders + + actual_forwarders = list_forwarders.list_forwarders(mock_session) + self.assertEqual(actual_forwarders, expected_forwarders) + + +if __name__ == "__main__": + unittest.main() diff --git a/forwarders/update_collector.py b/forwarders/update_collector.py new file mode 100644 index 0000000..9a433e4 --- /dev/null +++ b/forwarders/update_collector.py @@ -0,0 +1,103 @@ +#!/usr/bin/env python3 + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for updating a collector.""" + +import argparse +import json +from typing import Mapping, Any + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com" + + +def update_collector(http_session: requests.AuthorizedSession, + name: str) -> Mapping[str, Any]: + """Updates a collector. + + Args: + http_session: Authorized session for HTTP requests. + name: Resource name for the Collector (collectors/{UUID}). + + Returns: + The entire collector configuration with update(s) appled. + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{CHRONICLE_API_BASE_URL}/v2/{name}" + + body = { + "display_name": "UpdatedCollector", + "config": { + "metadata": { + "asset_namespace": "COLLECTOR", + }, + "log_type": "CISCO_ASA_FIREWALL", + "syslog_settings": { + "protocol": "TCP", + } + } + } + + update_fields = [ + "display_name", "config.log_type", "config.metadata.asset_namespace", + "config.syslog_settings.protocol" + ] + params = {"update_mask": ",".join(update_fields)} + + response = http_session.request("PATCH", url, params=params, json=body) + # Example server response: + # { + # "name": "forwarders/{forwarderUUID}/collectors/{collectorUUID}", + # "displayName": "UpdatedCollector", + # "config": { + # "logType": "WINDOWS_DHCP", + # "maxSecondsPerBatch": 10, + # "maxBytesPerBatch": "1048576", + # "fileSettings": { + # "filePath": "/new/path/to/file.txt" + # } + # }, + # "state": "ACTIVE" + # } + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-n", + "--name", + type=str, + required=True, + help="resource name for the Collector (collectors/{UUID})") + + args = parser.parse_args() + CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) + session = chronicle_auth.initialize_http_session(args.credentials_file) + print(json.dumps(update_collector(session, args.name), indent=2)) diff --git a/forwarders/update_collector_test.py b/forwarders/update_collector_test.py new file mode 100644 index 0000000..b16e6b7 --- /dev/null +++ b/forwarders/update_collector_test.py @@ -0,0 +1,65 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "update_collector" module.""" + +import unittest +from unittest import mock + +from google.auth.transport import requests + +from . import update_collector + + +class UpdateCollectorTest(unittest.TestCase): + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_http_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + update_collector.update_collector(mock_session, "collector name") + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_happy_path(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + expected_collector = { + "name": "forwarders/{forwarderUUID}/collectors/{collectorUUID}", + "displayName": "UpdatedCollector", + "config": { + "logType": "WINDOWS_DHCP", + "maxSecondsPerBatch": 10, + "maxBytesPerBatch": "1048576", + "fileSettings": { + "filePath": "/new/path/to/file.txt" + } + }, + "state": "ACTIVE" + } + + mock_response.json.return_value = expected_collector + + actual_collector = update_collector.update_collector( + mock_session, "collector name") + self.assertEqual(actual_collector, expected_collector) + + +if __name__ == "__main__": + unittest.main() diff --git a/forwarders/update_forwarder.py b/forwarders/update_forwarder.py new file mode 100644 index 0000000..dc2f58a --- /dev/null +++ b/forwarders/update_forwarder.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python3 + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for updating a forwarder.""" + +import argparse +import json +from typing import Mapping, Any + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com" + + +def update_forwarder(http_session: requests.AuthorizedSession, + name: str) -> Mapping[str, Any]: + """Updates a forwarder. + + Args: + http_session: Authorized session for HTTP requests. + name: Resource name for the Forwarder (forwarders/{UUID}). + + Returns: + The entire forwarder configuration with update(s) appled. + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{CHRONICLE_API_BASE_URL}/v2/{name}" + + body = { + "display_name": "UpdatedForwarder", + "config": { + "metadata": { + "labels": [{ + "key": "office", + "value": "corporate", + }] + } + } + } + + update_fields = ["display_name", "config.metadata.labels"] + params = {"update_mask": ",".join(update_fields)} + + response = http_session.request("PATCH", url, params=params, json=body) + # Expected server response: + # { + # "name": "forwarders/{forwarderUUID}", + # "displayName": "UpdatedForwarder", + # "config": { + # "uploadCompression": true, + # "metadata": { + # "labels": [ + # { + # "key": "office", + # "value": "corporate" + # } + # ] + # } + # }, + # "state": "ACTIVE" + # } + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-n", + "--name", + type=str, + required=True, + help="resource name for the Forwarder (forwarders/{UUID})") + + args = parser.parse_args() + CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) + session = chronicle_auth.initialize_http_session(args.credentials_file) + print(json.dumps(update_forwarder(session, args.name), indent=2)) diff --git a/forwarders/update_forwarder_test.py b/forwarders/update_forwarder_test.py new file mode 100644 index 0000000..c02df5f --- /dev/null +++ b/forwarders/update_forwarder_test.py @@ -0,0 +1,60 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "update_forwarder" module.""" + +import unittest +from unittest import mock + +from google.auth.transport import requests + +from . import update_forwarder + + +class UpdateForwarderTest(unittest.TestCase): + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_http_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + update_forwarder.update_forwarder(mock_session, "forwarder name") + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_happy_path(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + expected_forwarder = { + "name": "forwarders/c37e1d99-f6ba-43da-b591-788261d32cae", + "displayName": "UpdatedForwarder", + "config": { + "uploadCompression": True + }, + "state": "ACTIVE" + } + + mock_response.json.return_value = expected_forwarder + + actual_forwarder = update_forwarder.update_forwarder( + mock_session, "forwarder name") + self.assertEqual(actual_forwarder, expected_forwarder) + + +if __name__ == "__main__": + unittest.main() diff --git a/ingestion/v1alpha/create_udm_events.py b/ingestion/v1alpha/create_udm_events.py new file mode 100644 index 0000000..04925b2 --- /dev/null +++ b/ingestion/v1alpha/create_udm_events.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +r"""Executable and reusable sample for ingesting events in UDM format. + + Usage: + python3 -m ingestion.v1alpha.create_udm_events \ + --project_instance $PROJECT_INSTANCE \ + --project_id $PROJECT_ID \ + --json_events_file=./ingestion/example_input/sample_udm_events.json + + API reference: + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.events/import + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.events/import#EventsInlineSource + https://cloud.google.com/chronicle/docs/reference/udm-field-list + https://cloud.google.com/chronicle/docs/unified-data-model/udm-usage +""" + +import argparse +import json + +from google.auth.transport import requests + +from common import chronicle_auth +from common import project_id +from common import project_instance +from common import regions + +CHRONICLE_API_BASE_URL = "https://chronicle.googleapis.com" +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] + + +def create_udm_events( + http_session: requests.AuthorizedSession, json_events: str +) -> None: + """Sends a collection of UDM events to the Google SecOps backend for ingestion. + + A Unified Data Model (UDM) event is a structured representation of an event + regardless of the log source. + + Args: + http_session: Authorized session for HTTP requests. + json_events: A collection of UDM events in (serialized) JSON format. + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + + Requires the following IAM permission on the parent resource: + chronicle.events.import + + POST https://chronicle.googleapis.com/v1alpha/{parent}/events:import + + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.events/import + """ + + base_url_with_region = regions.url_always_prepend_region( + CHRONICLE_API_BASE_URL, + args.region + ) + # pylint: disable-next=line-too-long + parent = f"projects/{args.project_id}/locations/{args.region}/instances/{args.project_instance}" + url = f"{base_url_with_region}/v1alpha/{parent}/events:import" + body = {"inline_source": {"events": [{"udm": json.loads(json_events)[0],}]}} + + response = http_session.request("POST", url, json=body) + print(response) + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return None + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + # common + chronicle_auth.add_argument_credentials_file(parser) + project_instance.add_argument_project_instance(parser) + project_id.add_argument_project_id(parser) + regions.add_argument_region(parser) + # local + parser.add_argument( + "--json_events_file", + type=argparse.FileType("r"), + required=True, + help=( + "path to a file containing a list of UDM events in json format" + ), + ) + args = parser.parse_args() + + auth_session = chronicle_auth.initialize_http_session( + args.credentials_file, + SCOPES, + ) + create_udm_events(auth_session, args.json_events_file.read()) diff --git a/ingestion/v1alpha/get_udm_event.py b/ingestion/v1alpha/get_udm_event.py new file mode 100644 index 0000000..6bf1b87 --- /dev/null +++ b/ingestion/v1alpha/get_udm_event.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# pylint: disable=line-too-long +r"""Executable and reusable v1alpha API sample for getting a UDM event by ID. + + API reference: + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.events/get +""" +# pylint: enable=line-too-long + +import argparse +import json + +from google.auth.transport import requests + +from common import chronicle_auth +from common import project_id +from common import project_instance +from common import regions + +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] + + +def get_udm_event( + http_session: requests.AuthorizedSession, + proj_id: str, + proj_instance: str, + proj_region: str, + event_id: str): + """Get a UDM event by metadata.id. + + A Unified Data Model (UDM) event is a structured representation of an event + regardless of the log source. + + Args: + http_session: Authorized session for HTTP requests. + proj_id: GCP project id or number to which the target instance belongs. + proj_instance: Customer ID (uuid with dashes) for the Chronicle instance. + proj_region: region in which the target project is located. + event_id: URL-encoded Base64 for the UDM Event ID. + Returns: + dict/json respresentation of UDM Event + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + + Requires the following IAM permission on the parent resource: + chronicle.events.get + + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.events/get + """ + # pylint: disable=line-too-long + parent = f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}" + url = f"https://{proj_region}-chronicle.googleapis.com/v1alpha/{parent}/events/{event_id}" + # pylint: enable=line-too-long + + response = http_session.request("GET", url) + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + # common + chronicle_auth.add_argument_credentials_file(parser) + project_instance.add_argument_project_instance(parser) + project_id.add_argument_project_id(parser) + regions.add_argument_region(parser) + # local + parser.add_argument( + "--event_id", + type=str, + required=True, + help=("URL-encoded Base64 ID of the Event"), + ) + args = parser.parse_args() + + auth_session = chronicle_auth.initialize_http_session( + args.credentials_file, + SCOPES, + ) + event = get_udm_event( + auth_session, + args.project_id, + args.project_instance, + args.region, + args.event_id) + print(json.dumps(event, indent=2)) diff --git a/lists/append_to_list.py b/lists/append_to_list.py new file mode 100644 index 0000000..cd3c5a0 --- /dev/null +++ b/lists/append_to_list.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for appending to a list.""" + +import argparse +from typing import Sequence + +from common import chronicle_auth +from common import regions + +from google.auth.transport import requests + +from . import get_list + +BACKSTORY_API_BASE_URL = "https://backstory.googleapis.com" + + +# pylint: disable=bad-continuation +def append_to_list(http_session: requests.AuthorizedSession, + list_api_url: str, + list_id: str, + content_lines: Sequence[str]) -> str: + """Append items to an existing reference list. + + Args: + http_session: Authorized session for HTTP requests. + list_api_url: Regionalizied API endpoint. + list_id: ID of existing list. + content_lines: Iterable containing items to append to the existing list. + + Returns: + List update timestamp. + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + current_list = get_list.get_list(http_session, list_id) + seen = set(current_list) + deduplicated_list = [x for x in content_lines + if not (x in seen or seen.add(x))] + content_lines = current_list + deduplicated_list + + body = { + "name": list_id, + "lines": content_lines, + } + update_fields = ["list.lines"] + params = {"update_mask": ",".join(update_fields)} + + response = http_session.request("PATCH", + list_api_url, + params=params, + json=body) + # Expected server response: + # { + # "name": "", + # "description": "", + # "createTime": "yyyy-mm-ddThh:mm:ss.ssssssZ", + # "lines": [ + # "", + # "", + # ... + # ], + # "contentType": "" + # } + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json()["createTime"] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-n", "--name", type=str, required=True, help="unique name for the list") + parser.add_argument( + "-f", + "--list_file", + type=argparse.FileType("r"), + required=True, + # File example: + # python3 -m lists.append_to_list -f + # STDIN example: + # cat | python3 -m lists.append_to_list -f - + help="path to file containing the list content to append, or - for STDIN") + args = parser.parse_args() + + session = chronicle_auth.initialize_http_session(args.credentials_file) + api_url = f"{regions.url(BACKSTORY_API_BASE_URL, args.region)}/v2/lists" + new_list_create_time = append_to_list( + session, + api_url, + args.name, + args.list_file.read().splitlines() + ) + print(f"List successfully appended at {new_list_create_time}") diff --git a/lists/create_list.py b/lists/create_list.py index 179d43e..2928abc 100644 --- a/lists/create_list.py +++ b/lists/create_list.py @@ -28,7 +28,8 @@ def create_list(http_session: requests.AuthorizedSession, name: str, - description: str, content_lines: Sequence[str]) -> str: + description: str, content_lines: Sequence[str], + content_type: str) -> str: """Creates a list. Args: @@ -36,6 +37,7 @@ def create_list(http_session: requests.AuthorizedSession, name: str, name: Unique name for the list. description: Description of the list. content_lines: Array containing each line of the list's content. + content_type: Type of list content, indicating how to interpret this list. Returns: Creation timestamp of the new list. @@ -49,6 +51,7 @@ def create_list(http_session: requests.AuthorizedSession, name: str, "name": name, "description": description, "lines": content_lines, + "content_type": content_type, } response = http_session.request("POST", url, json=body) @@ -61,7 +64,8 @@ def create_list(http_session: requests.AuthorizedSession, name: str, # "", # "", # ... - # ] + # ], + # "contentType": "" # } if response.status_code >= 400: @@ -82,6 +86,12 @@ def create_list(http_session: requests.AuthorizedSession, name: str, type=str, required=True, help="description of the list") + parser.add_argument( + "-t", + "--content_type", + type=str, + default="CONTENT_TYPE_DEFAULT_STRING", + help="type of list lines") parser.add_argument( "-f", "--list_file", @@ -97,5 +107,7 @@ def create_list(http_session: requests.AuthorizedSession, name: str, CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) session = chronicle_auth.initialize_http_session(args.credentials_file) new_list_create_time = create_list(session, args.name, args.description, - args.list_file.read().splitlines()) + args.list_file.read().splitlines(), + args.content_type, + ) print(f"New list created successfully, at {new_list_create_time}") diff --git a/lists/create_list_test.py b/lists/create_list_test.py index 58ac154..e7c3f6e 100644 --- a/lists/create_list_test.py +++ b/lists/create_list_test.py @@ -34,7 +34,7 @@ def test_http_error(self, mock_response, mock_session): with self.assertRaises(requests.requests.exceptions.HTTPError): create_list.create_list(mock_session, "name", "description", - ["content line"]) + ["content line"], "CONTENT_TYPE_DEFAULT_STRING") @mock.patch.object(requests, "AuthorizedSession", autospec=True) @mock.patch.object(requests.requests, "Response", autospec=True) @@ -46,13 +46,15 @@ def test_happy_path(self, mock_response, mock_session): "name": "name", "description": "description", "lines": ["content line 1", "content line 2"], + "contentType": "CONTENT_TYPE_DEFAULT_STRING", "createTime": expected_create_time } mock_response.json.return_value = expected_list actual_create_time = create_list.create_list( mock_session, "name", "description", - ["content line 1", "content line 2"]) + ["content line 1", "content line 2"], + "CONTENT_TYPE_DEFAULT_STRING",) self.assertEqual(actual_create_time, expected_create_time) diff --git a/lists/example_input/coldriver_sha256.txt b/lists/example_input/coldriver_sha256.txt new file mode 100644 index 0000000..b87193e --- /dev/null +++ b/lists/example_input/coldriver_sha256.txt @@ -0,0 +1,6 @@ +0f6b9d2ada67cebc8c0f03786c442c61c05cef5b92641ec4c1bdd8f5baeb2ee1 +A949ec428116489f5e77cefc67fea475017e0f50d2289e17c3eb053072adcf24 +C97acea1a6ef59d58a498f1e1f0e0648d6979c4325de3ee726038df1fc2e831d +Ac270310b5410e7430fe7e36a079525cd8724b002b38e13a6ee6e09b326f4847 +84523ddad722e205e2d52eedfb682026928b63f919a7bf1ce6f1ad4180d0f507 +37c52481711631a5c73a6341bd8bea302ad57f02199db7624b580058547fb5a9 \ No newline at end of file diff --git a/lists/example_input/foo.txt b/lists/example_input/foo.txt new file mode 100644 index 0000000..5e140a5 --- /dev/null +++ b/lists/example_input/foo.txt @@ -0,0 +1,6 @@ +foo +bar +baz +foo +bar +foo \ No newline at end of file diff --git a/lists/get_list.py b/lists/get_list.py index a63787d..39975b8 100755 --- a/lists/get_list.py +++ b/lists/get_list.py @@ -54,7 +54,8 @@ def get_list(http_session: requests.AuthorizedSession, # "", # "", # ... - # ] + # ], + # "contentType": "" # } if response.status_code >= 400: diff --git a/lists/list_lists.py b/lists/list_lists.py index e263b42..e257e7e 100755 --- a/lists/list_lists.py +++ b/lists/list_lists.py @@ -65,7 +65,8 @@ def list_lists(http_session: requests.AuthorizedSession, # "lines": [ # "rule_line", # ... - # ] + # ], + # "contentType": "" # }, # ... # ], diff --git a/lists/remove_from_list.py b/lists/remove_from_list.py new file mode 100644 index 0000000..bde347c --- /dev/null +++ b/lists/remove_from_list.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for appending to an existing reference list.""" + +import argparse +from typing import Sequence + +from common import chronicle_auth +from common import regions + +from google.auth.transport import requests + +from . import get_list + +BACKSTORY_API_BASE_URL = "https://backstory.googleapis.com" + + +# pylint: disable=bad-continuation +def remove_from_list(http_session: requests.AuthorizedSession, + list_api_url: str, + list_id: str, + content_lines: Sequence[str]) -> str: + """Remove items from an existing reference list. + + Args: + http_session: Authorized session for HTTP requests. + list_api_url: Regionalizied API endpoint. + list_id: ID of existing list. + content_lines: Iterable containing items to remove from the existing list. + + Returns: + List update timestamp. + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + current_list = get_list.get_list(http_session, list_id) + to_remove = set(content_lines) + updated_list = [x for x in current_list if x not in to_remove] + + body = { + "name": list_id, + "lines": updated_list, + } + update_fields = ["list.lines"] + params = {"update_mask": ",".join(update_fields)} + + response = http_session.request("PATCH", + list_api_url, + params=params, + json=body) + # Expected server response: + # { + # "name": "", + # "description": "", + # "createTime": "yyyy-mm-ddThh:mm:ss.ssssssZ", + # "lines": [ + # "", + # "", + # ... + # ], + # "contentType": "" + # } + + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json()["createTime"] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-n", "--name", type=str, required=True, help="unique name for the list") + parser.add_argument( + "-f", + "--list_file", + type=argparse.FileType("r"), + required=True, + # File example: + # python3 -m lists.remove_from_list -f + # STDIN example: + # cat | python3 -m lists.remove_from_list -f - + help="path to file containing the list content to remove, " + "or - for STDIN") + args = parser.parse_args() + + api_url = f"{regions.url(BACKSTORY_API_BASE_URL, args.region)}/v2/lists" + session = chronicle_auth.initialize_http_session(args.credentials_file) + new_list_create_time = remove_from_list(session, + api_url, + args.name, + args.list_file.read().splitlines() + ) + print(f"Items successfully removed from list at {new_list_create_time}") diff --git a/lists/update_list.py b/lists/update_list.py index cb12875..5635ca4 100644 --- a/lists/update_list.py +++ b/lists/update_list.py @@ -29,7 +29,8 @@ def update_list(http_session: requests.AuthorizedSession, name: str, description: Optional[str], - content_lines: Sequence[str]) -> str: + content_lines: Sequence[str], + content_type: str) -> str: """Updates a list. Args: @@ -37,6 +38,7 @@ def update_list(http_session: requests.AuthorizedSession, name: str, name: Name of existing list. description: Optional new description of the list. content_lines: Array containing each line of the list's content. + content_type: Type of list content, indicating how to interpret this list. Returns: Timestamp of when the updated list was written. @@ -49,6 +51,7 @@ def update_list(http_session: requests.AuthorizedSession, name: str, body = { "name": name, "lines": content_lines, + "content_type": content_type, } update_fields = ["list.lines"] @@ -68,7 +71,8 @@ def update_list(http_session: requests.AuthorizedSession, name: str, # "", # "", # ... - # ] + # ], + # "contentType": "" # } if response.status_code >= 400: @@ -89,6 +93,12 @@ def update_list(http_session: requests.AuthorizedSession, name: str, type=str, help="description of the list. Omit this to use previous description." ) + parser.add_argument( + "-t", + "--content_type", + type=str, + default="CONTENT_TYPE_DEFAULT_STRING", + help="type of list lines") parser.add_argument( "-f", "--list_file", @@ -104,5 +114,6 @@ def update_list(http_session: requests.AuthorizedSession, name: str, CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) session = chronicle_auth.initialize_http_session(args.credentials_file) t = update_list(session, args.name, args.description, - args.list_file.read().splitlines()) + args.list_file.read().splitlines(), + args.content_type) print(f"List updated successfully, at {t}") diff --git a/lists/update_list_test.py b/lists/update_list_test.py index 1c07a52..499b553 100644 --- a/lists/update_list_test.py +++ b/lists/update_list_test.py @@ -34,7 +34,7 @@ def test_http_error(self, mock_response, mock_session): with self.assertRaises(requests.requests.exceptions.HTTPError): update_list.update_list(mock_session, "name", "description", - ["content line"]) + ["content line"], "CONTENT_TYPE_DEFAULT_STRING") @mock.patch.object(requests, "AuthorizedSession", autospec=True) @mock.patch.object(requests.requests, "Response", autospec=True) @@ -46,13 +46,16 @@ def test_happy_path(self, mock_response, mock_session): "name": "name", "description": "description", "lines": ["content line 1", "content line 2"], - "createTime": expected_time + "createTime": expected_time, + "contentType": "CONTENT_TYPE_DEFAULT_STRING", } mock_response.json.return_value = expected_list actual_time = update_list.update_list( mock_session, "name", "description", - ["content line 1", "content line 2"]) + ["content line 1", "content line 2"], + "CONTENT_TYPE_DEFAULT_STRING", + ) self.assertEqual(actual_time, expected_time) diff --git a/lists/v1alpha/create_list.py b/lists/v1alpha/create_list.py new file mode 100644 index 0000000..3700889 --- /dev/null +++ b/lists/v1alpha/create_list.py @@ -0,0 +1,179 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# pylint: disable=line-too-long +"""Executable and reusable sample for creating a Reference List. + + Requires the following IAM permission on the parent resource: + chronicle.referenceLists.create + + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.referenceLists/create +""" +# pylint: enable=line-too-long + +import argparse +from typing import Any, Dict, Optional, Sequence + +from google.auth.transport import requests + +from common import chronicle_auth +from common import project_id +from common import project_instance +from common import regions + +CHRONICLE_API_BASE_URL = "https://chronicle.googleapis.com" +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] +PREFIX = "REFERENCE_LIST_SYNTAX_TYPE_" +SYNTAX_TYPE_ENUM = [ + f"{PREFIX}UNSPECIFIED", # Defaults to ..._PLAIN_TEXT_STRING. + f"{PREFIX}PLAIN_TEXT_STRING", # List contains plain text patterns. + f"{PREFIX}REGEX", # List contains only Regular Expression patterns. + f"{PREFIX}CIDR", # List contains only CIDR patterns. +] + + +def create_list( + http_session: requests.AuthorizedSession, + proj_id: str, + proj_instance: str, + proj_region: str, + name: str, + description: str, + content_lines: Sequence[str], + content_type: str, + scope_name: Optional[str] | None = None, +) -> Dict[str, Any]: + """Creates a list. + + Args: + http_session: Authorized session for HTTP requests. + proj_id: GCP project id or number to which the target instance belongs. + proj_instance: Customer ID (uuid with dashes) for the Chronicle instance. + proj_region: region in which the target project is located. + name: Unique name for the list. + description: Description of the list. + content_lines: Array containing each line of the list's content. + content_type: Type of list content, indicating how to interpret this list. + scope_name: (Optional) Data RBAC scope name for the list. + Returns: + Dictionary representation of the created Reference List. + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + # pylint: disable=line-too-long + base_url_with_region = regions.url_always_prepend_region( + CHRONICLE_API_BASE_URL, + proj_region + ) + parent = f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}" + url = f"{base_url_with_region}/v1alpha/{parent}/referenceLists" + # pylint: enable=line-too-long + + # entries are list like [{"value": }, ...] + # pylint: disable-next=line-too-long + # https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.referenceLists#resource:-referencelist + entries = [] + for content_line in content_lines: + entries.append({"value": content_line.strip()}) + + body = { + "name": name, + "description": description, + "entries": entries, + "syntax_type": content_type, + } + if scope_name: + body["scope_info"] = { + "referenceListScope": { + "scopeNames": [ + f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}/dataAccessScopes/{scope_name}" + ] + } + } + else: + body["scope_info"] = None + params = {"referenceListId": name} + response = http_session.request("POST", url, params=params, json=body) + # Expected server response: + # ['name', 'displayName', 'revisionCreateTime', 'description', + # 'entries', 'syntaxType']) + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + project_instance.add_argument_project_instance(parser) + project_id.add_argument_project_id(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-n", "--name", type=str, required=True, help="unique name for the list" + ) + parser.add_argument( + "-d", + "--description", + type=str, + required=True, + help="description of the list", + ) + parser.add_argument( + "-s", "--scope_name", type=str, help="data RBAC scope name for the list" + ) + parser.add_argument( + "-t", + "--syntax_type", + type=str, + required=False, + default="REFERENCE_LIST_SYNTAX_TYPE_PLAIN_TEXT_STRING", + choices=SYNTAX_TYPE_ENUM, + # pylint: disable-next=line-too-long + help="syntax type of the list, used for validation (default: REFERENCE_LIST_SYNTAX_TYPE_PLAIN_TEXT_STRING)", + ) + parser.add_argument( + "-f", + "--list_file", + type=argparse.FileType("r"), + required=True, + # File example: + # python3 -m lists.v1alpha.create_list -f + # STDIN example: + # cat | python3 -m lists.v1alpha.create_list -f - + help="path of a file containing the list content, or - for STDIN", + ) + args = parser.parse_args() + + # pylint: disable-next=line-too-long + auth_session = chronicle_auth.initialize_http_session(args.credentials_file, SCOPES) + response_json = create_list( + auth_session, + args.project_id, + args.project_instance, + args.region, + args.name, + args.description, + args.list_file.read().splitlines(), + args.syntax_type, + args.scope_name, + ) + print("New list created successfully, at " + f"{response_json.get('revisionCreateTime')}") diff --git a/lists/v1alpha/get_list.py b/lists/v1alpha/get_list.py new file mode 100644 index 0000000..f329631 --- /dev/null +++ b/lists/v1alpha/get_list.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +r"""Executable and reusable sample for getting a Reference List. + +Usage: + python -m lists.v1alpha.get_list \ + --project_id= \ + --project_instance= \ + --name="COLDRIVER_SHA256" + +API reference: + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.referenceLists/get + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.referenceLists#ReferenceList + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.referenceLists#ReferenceListEntry +""" + +import argparse +import json +from typing import Dict + +from common import chronicle_auth +from common import project_id +from common import project_instance +from common import regions + +from google.auth.transport import requests + +CHRONICLE_API_BASE_URL = "https://chronicle.googleapis.com" +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] + + +def get_list( + http_session: requests.AuthorizedSession, + proj_id: str, + proj_instance: str, + proj_region: str, + name: str, +) -> Dict[str, any]: + """Gets a Reference List. + + Args: + http_session: Authorized session for HTTP requests. + proj_id: GCP project id or number to which the target instance belongs. + proj_instance: Customer ID (uuid with dashes) for the Chronicle instance. + proj_region: region in which the target project is located. + name: name that identifies the list to get. + + Returns: + Dictionary representation of the Reference List + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + base_url_with_region = regions.url_always_prepend_region( + CHRONICLE_API_BASE_URL, + proj_region + ) + # pylint: disable-next=line-too-long + parent = f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}" + url = f"{base_url_with_region}/v1alpha/{parent}/referenceLists/{name}" + + response = http_session.request("GET", url) + # Expected server response is described in: + # https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.referenceLists#ReferenceList + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + project_instance.add_argument_project_instance(parser) + project_id.add_argument_project_id(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-n", "--name", type=str, required=True, + help="name that identifies the list to get" + ) + args = parser.parse_args() + + auth_session = chronicle_auth.initialize_http_session( + args.credentials_file, + SCOPES, + ) + a_list = get_list( + auth_session, + args.project_id, + args.project_instance, + args.region, + args.name, + ) + print(json.dumps(a_list, indent=2)) diff --git a/lists/v1alpha/patch_list.py b/lists/v1alpha/patch_list.py new file mode 100644 index 0000000..2092ed0 --- /dev/null +++ b/lists/v1alpha/patch_list.py @@ -0,0 +1,292 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +r"""Executable and reusable sample for patching a Reference List. + +Command supports add, remove, and replace via [--add, --remove, ]. + +Sample Commands (run from api_samples_python dir): + +# Add +python -m lists.v1alpha.patch_list \ + --project_id=$PROJECT_ID \ + --project_instance=$PROJECT_INSTANCE \ + --name="COLDRIVER_SHA256" \ + --list_file=./lists/example_input/foo.txt \ + --add + +# Remove +python -m lists.v1alpha.patch_list \ + --project_id=$PROJECT_ID \ + --project_instance=$PROJECT_INSTANCE \ + --name="COLDRIVER_SHA256" + --list_file=./lists/example_input/foo.txt \ + --remove + +# Replace (when no --add or --remove flags are provided) +python -m lists.v1alpha.patch_list \ + --project_id=$PROJECT_ID \ + --project_instance=$PROJECT_INSTANCE \ + --name="COLDRIVER_SHA256" + --list_file=./lists/example_input/coldriver_sha256.txt + +API reference: + + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.referenceLists/patch + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.referenceLists#ReferenceList + https://cloud.google.com/chronicle/docs/reference/rest/v1alpha/projects.locations.instances.referenceLists#resource:-referencelist +""" + +import argparse +import json +import random +import sys +import time +from typing import Any, Dict, Optional, Sequence + +from common import chronicle_auth +from common import project_id +from common import project_instance +from common import regions +from google.auth.transport import requests + +# pylint: disable=g-import-not-at-top +try: + from . import get_list +except ImportError: + from lists.v1alpha import get_list + +CHRONICLE_API_BASE_URL = "https://chronicle.googleapis.com" +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", +] +PREFIX = "REFERENCE_LIST_SYNTAX_TYPE_" +SYNTAX_TYPE_ENUM = [ + f"{PREFIX}UNSPECIFIED", # Defaults to ..._PLAIN_TEXT_STRING. + f"{PREFIX}PLAIN_TEXT_STRING", # List contains plain text patterns. + f"{PREFIX}REGEX", # List contains only Regular Expression patterns. + f"{PREFIX}CIDR", # List contains only CIDR patterns. +] + + +def patch_list( + http_session: requests.AuthorizedSession, + proj_id: str, + proj_instance: str, + proj_region: str, + name: str, + content_lines: Sequence[str], + syntax_type: Optional[str] = None, + description: Optional[str] = None, + scope_name: Optional[str] = None, +) -> Dict[str, Any]: + """Updates a Reference List. + + After update, the contents of the list are verified with Get List. + If the contents are found to be different, updates are retried + N times (configurable) with exponential backoff. + + Args: + http_session: Authorized session for HTTP requests. + proj_id: GCP project id or number to which the target instance belongs. + proj_instance: Customer ID (uuid with dashes) for the Chronicle instance. + proj_region: region in which the target project is located. + name: name that identifies the list to update. + content_lines: Array containing each line of the list's content. + syntax_type: (Optional) List content type; how to interpret this list. + description: (Optional) Description of the list. + scope_name: (Optional) Data RBAC scope name. + Returns: + Dictionary representation of the updated Reference List. + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + base_url_with_region = regions.url_always_prepend_region( + CHRONICLE_API_BASE_URL, + proj_region + ) + + # pylint: disable-next=line-too-long + parent = f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}" + url = f"{base_url_with_region}/v1alpha/{parent}/referenceLists/{name}" + body = { + "entries": [{"value": line.strip()} for line in content_lines], + } + if scope_name: + body["scope_info"] = { + "referenceListScope": { + "scopeNames": [ + f"projects/{proj_id}/locations/{proj_region}/instances/{proj_instance}/dataAccessScopes/{scope_name}" + ] + } + } + else: + body["scope_info"] = None # does *not* remove scope_info + if description: + body["description"] = description + if syntax_type: + body["syntax_type"] = syntax_type + + params = {"updateMask": ",".join(body.keys())} + body["name"] = name + response = http_session.request("PATCH", url, params=params, json=body) + if response.status_code >= 400: + print(response.text) + response.raise_for_status() + return response.json() + + +def parse_arguments(): + """Parses command line arguments.""" + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + chronicle_auth.add_argument_credentials_file(parser) + project_instance.add_argument_project_instance(parser) + project_id.add_argument_project_id(parser) + regions.add_argument_region(parser) + parser.add_argument("-n", "--name", type=str, required=True, + help="unique name for the list") + parser.add_argument("-f", "--list_file", type=argparse.FileType("r"), + required=True, + help="path of a file containing the list content") + parser.add_argument("-d", "--description", type=str, + help="description of the list") + parser.add_argument( + "-s", "--scope_name", type=str, help="data RBAC scope name for the list" + ) + parser.add_argument("-t", "--syntax_type", type=str, default=None, + choices=SYNTAX_TYPE_ENUM, + help="syntax type of the list, used for validation") + add_delete_group = parser.add_mutually_exclusive_group() + add_delete_group.add_argument("--add", action="store_true", + help="only append to the existing list") + add_delete_group.add_argument("--remove", action="store_true", + help="only remove from the existing list") + parser.add_argument("--force", action="store_true", + help="patch regardless of pre-check on changes to list") + parser.add_argument("--max_attempts", type=int, default=6, + help="how many times to attempt the patch operation") + parser.add_argument("--quiet", action="store_true", + help="only print the updated list") + return parser.parse_args() + + +def read_content_lines(file_handle): + """Reads content lines from a file into a list.""" + file_handle.seek(0) # rewind in case this isn't 1st read + return [line.strip() for line in file_handle] + + +def exponential_backoff(attempt, max_attempts, wait_time=1, quiet=False): + """Exponential backoff for a given attempt.""" + if attempt > 0: + jitter = random.uniform(0, wait_time / 2) + wait_time = wait_time * 1.5 + time_to_sleep = wait_time + jitter + if not quiet: + print(f"Attempt {attempt} of {max_attempts} failed, " + f"retrying in {time_to_sleep:.2f} seconds...") + time.sleep(time_to_sleep) + return wait_time + + +def get_current_state(auth_session, args): + """Gets the current state of a Reference List.""" + curr_json = get_list.get_list( + auth_session, + args.project_id, + args.project_instance, + args.region, + args.name, + ) + curr_list = [_["value"] for _ in curr_json.get("entries", [])] + return curr_list, curr_json["revisionCreateTime"] + + +def op_update_content_lines(operation_type, curr_list, content_lines, + force=False): + """Updates the content lines of a Reference List.""" + if operation_type == "add": + seen = set(curr_list) + deduplicated_list = [x for x in content_lines + if not (x in seen or seen.add(x))] + content_lines = curr_list + deduplicated_list + elif operation_type == "remove": + content_lines = [item for item in curr_list if item not in content_lines] + if set(curr_list) == set(content_lines) and not force: + print(f"Patch {operation_type or ''} would not change list. Exiting.") + sys.exit(0) + return content_lines + + +def main(): + args = parse_arguments() + og_content_lines = read_content_lines(args.list_file) + + auth_session = chronicle_auth.initialize_http_session( + args.credentials_file, + SCOPES + ) + + operation_type = "add" if args.add else "remove" if args.remove else None + + curr_list, _ = get_current_state(auth_session, args) + + content_lines = op_update_content_lines( + operation_type, curr_list, og_content_lines, args.force) + + attempt, wait_time = 0, 1 + while attempt < args.max_attempts: + patched_json = patch_list( + auth_session, + args.project_id, + args.project_instance, + args.region, + args.name, + content_lines, + args.syntax_type, + args.description, + args.scope_name, + ) + updated_list, ts = get_current_state(auth_session, args) + # no need to compare sets if updated ts matches + success = ts == patched_json["revisionCreateTime"] + if not success: + success = set(content_lines) == set(updated_list) + + if success: + if not args.quiet: + print(f"Patch {operation_type or ''} success.") + print(json.dumps(patched_json, indent=2)) + break + wait_time = exponential_backoff(attempt, args.max_attempts, + wait_time, args.quiet) + # read and verify again in case other processes updated while waiting + og_content_lines = read_content_lines(args.list_file) + curr_list, _ = get_current_state(auth_session, args) + content_lines = op_update_content_lines(operation_type, + curr_list, + og_content_lines, + args.force) + attempt += 1 + + +if __name__ == "__main__": + main() diff --git a/lists/v1alpha/patch_list_test.py b/lists/v1alpha/patch_list_test.py new file mode 100644 index 0000000..a957574 --- /dev/null +++ b/lists/v1alpha/patch_list_test.py @@ -0,0 +1,192 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for "v1alpha.patch_list" module.""" +import unittest +from unittest import mock + +from lists.v1alpha import patch_list + + +class MockArgs: + pass + + +class PatchListGetCurrentState(unittest.TestCase): + + def setUp(self): + super().setUp() + self.args = MockArgs() + self.args.project_id = None + self.args.project_instance = None + self.args.region = None + self.args.name = None + + @mock.patch("lists.v1alpha.get_list.get_list") + def test_extract_values_with_valid_entries(self, mocked_get_list): + """Test extraction of values when entries are well-formed.""" + + mocked_get_list.return_value = { + "entries": [ + {"value": 1}, + {"value": 2}, + {"value": 3}, + ], + "revisionCreateTime": None, + } + expected = [1, 2, 3], None + self.assertEqual(patch_list.get_current_state(None, self.args), expected) + + @mock.patch("lists.v1alpha.get_list.get_list") + def test_get_current_state_without_entries(self, mocked_get_list): + """Test when "entries" key is missing.""" + mocked_get_list.return_value = {"revisionCreateTime": None} + expected_result = [], None + self.assertEqual(patch_list.get_current_state(None, self.args), + expected_result) + + @mock.patch("lists.v1alpha.get_list.get_list") + def test_get_current_state_with_empty_entries(self, mocked_get_list): + """Test when "entries" is an empty list.""" + mocked_get_list.return_value = { + "entries": [], + "revisionCreateTime": None, + } + expected_result = [], None + self.assertEqual(patch_list.get_current_state(None, self.args), + expected_result) + + @mock.patch("lists.v1alpha.get_list.get_list") + def test_get_current_state_with_some_entries_missing_value(self, + mocked_get_list): + """Test when some "entries" are missing the "value" key.""" + mocked_get_list.return_value = { + "entries": [ + {"value": 1}, + {}, # Missing "value" + {"value": 3}, + ], + "revisionCreateTime": None, + } + with self.assertRaises(KeyError): + patch_list.get_current_state(None, self.args) + + +class PatchListExponentialBackoffTest(unittest.TestCase): + + @mock.patch("time.sleep", return_value=None) + @mock.patch("random.uniform", return_value=0.5) + def test_wait_time_calculation(self, _, mock_sleep): + actual_wait_time = patch_list.exponential_backoff(1, 3) + expected_wait_time = 1.5 # 1 * 1.5 + expected_time_to_sleep = 2.0 # 1 * 1.5 + 0.5 + self.assertEqual(actual_wait_time, expected_wait_time) + mock_sleep.assert_called_once_with(expected_time_to_sleep) + + @mock.patch("time.sleep", return_value=None) + def test_function_returns_correct_wait_time_without_jitter(self, mock_sleep): + # Testing without jitter to simplify calculation + with unittest.mock.patch("random.uniform", return_value=0): + wait_time = patch_list.exponential_backoff(1, 3, wait_time=2) + self.assertEqual(wait_time, 3) # 2 * 1.5 + 0 + mock_sleep.assert_called_once_with(3.0) + + @mock.patch("time.sleep", return_value=None) + @mock.patch("random.uniform", return_value=0.5) + def test_quiet_mode_suppresses_output(self, _, mock_sleep): + with unittest.mock.patch( + "sys.stdout", + new_callable=unittest.mock.MagicMock) as mock_stdout: + patch_list.exponential_backoff(1, 3, quiet=True) + mock_stdout.write.assert_not_called() + mock_sleep.assert_called_once_with(2.0) + + @mock.patch("time.sleep", return_value=None) + def test_negative_attempt_number(self, mock_sleep): + """Test handling of a non-positive attempt number, which should ideally return the base wait time.""" + # Attempt is not greater than 0 + wait_time = patch_list.exponential_backoff(0, 3) + self.assertEqual(wait_time, 1) # Base wait_time returned directly + mock_sleep.assert_not_called() + + +class PatchListOpUpdateContentLinesTest(unittest.TestCase): + + def test_add_with_duplicates(self): + curr_list = ["item1", "item2"] + content_lines = ["item2", "item3"] + result = patch_list.op_update_content_lines("add", + curr_list, content_lines) + self.assertEqual(result, ["item1", "item2", "item3"]) + + def test_remove(self): + curr_list = ["item1", "item2", "item3"] + content_lines = ["item2"] # Items to remove + result = patch_list.op_update_content_lines("remove", + curr_list, content_lines) + self.assertEqual(result, ["item1", "item3"]) + + def test_no_change_no_force(self): + curr_list = ["item1"] + content_lines = ["item1"] + with self.assertRaises(SystemExit) as cm: # Expect the exit behavior + patch_list.op_update_content_lines("add", + curr_list, content_lines, force=False) + self.assertEqual(cm.exception.code, 0) + + def test_no_change_force(self): + curr_list = ["item1"] + content_lines = ["item1"] + result = patch_list.op_update_content_lines("add", + curr_list, content_lines, + force=True) + self.assertEqual(result, ["item1"]) + + +class PatchListReadContentLinesTest(unittest.TestCase): + + def helper(self, mock_file_content, expected_result): + with unittest.mock.patch( + "builtins.open", + unittest.mock.mock_open(read_data=mock_file_content)) as mocked_file: + # Call the function with the mocked file handle + file_handle = open("dummy_file", "r") # mocked file name doesn't matter + result = patch_list.read_content_lines(file_handle) + # Assert that the read content matches the expected result + self.assertEqual(result, expected_result) + # Ensure that seek(0) was called on the file handle to rewind it + file_handle.seek.assert_called_with(0) + # Ensure the file was opened in read mode + mocked_file.assert_called_with("dummy_file", "r") + + def test_three_entries(self): + # Mock content to be read + mock_file_content = "Line 1\nLine 2\nLine 3" + # Expected result after stripping + expected_result = ["Line 1", "Line 2", "Line 3"] + self.helper(mock_file_content, expected_result) + + def test_three_crlf_entries(self): + mock_file_content = "Line 1\r\nLine 2\r\nLine 3" + expected_result = ["Line 1", "Line 2", "Line 3"] + self.helper(mock_file_content, expected_result) + + def test_zero_entries(self): + mock_file_content = "" + expected_result = [] + self.helper(mock_file_content, expected_result) + + +if __name__ == "__main__": + unittest.main() diff --git a/lists/verify_list.py b/lists/verify_list.py new file mode 100644 index 0000000..a28f6aa --- /dev/null +++ b/lists/verify_list.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python3 +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Executable and reusable sample for creating a list.""" + +import argparse +from typing import Sequence + +from google.auth.transport import requests + +from common import chronicle_auth +from common import regions + +CHRONICLE_API_BASE_URL = "https://backstory.googleapis.com" + + +def verify_list(http_session: requests.AuthorizedSession, + content_lines: Sequence[str], content_type: str): + """Verifies a list. + + Args: + http_session: Authorized session for HTTP requests. + content_lines: Array containing each line of the list's content. + content_type: Type of list content, indicating how to interpret this list. + + Returns: + None + + Raises: + requests.exceptions.HTTPError: HTTP request resulted in an error + (response.status_code >= 400). + """ + url = f"{CHRONICLE_API_BASE_URL}/v2/lists:verifyReferenceList" + body = { + "lines": content_lines, + "content_type": content_type, + } + + response = http_session.request("POST", url, json=body) + # Expected server response: + # { "success": true } + # + # or + # + # { + # "errors": [ + # { + # "lineNumber": , + # "errorMessage": "" + # } + # ] + # } + + if response.ok: + # Verification request succeeded. Response contents indicates whether or not + # the list is valid. + if response.json().get("success"): + print("List content is valid.") + else: + print("List content is invalid. Errors below.") + for e in response.json().get("errors"): + print(e) + + if response.status_code >= 400: + # There was an error performing the verification. + print(response.text) + response.raise_for_status() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + chronicle_auth.add_argument_credentials_file(parser) + regions.add_argument_region(parser) + parser.add_argument( + "-t", + "--content_type", + type=str, + default="CONTENT_TYPE_DEFAULT_STRING", + help="type of list lines") + parser.add_argument( + "-f", + "--list_file", + type=argparse.FileType("r"), + required=True, + # File example: + # python3 -m lists.verify_list -f + # STDIN example: + # cat | python3 -m lists.verify_list -f - + help="path of a file containing the list content, or - for STDIN") + + args = parser.parse_args() + CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, args.region) + session = chronicle_auth.initialize_http_session(args.credentials_file) + verify_list( + session, + args.list_file.read().splitlines(), + args.content_type, + ) diff --git a/lists/verify_list_test.py b/lists/verify_list_test.py new file mode 100644 index 0000000..d963a87 --- /dev/null +++ b/lists/verify_list_test.py @@ -0,0 +1,52 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Unit tests for the "verify_list" module.""" + +import unittest +from unittest import mock + +from google.auth.transport import requests + +from . import verify_list + + +class verifyListTest(unittest.TestCase): + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_http_error(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=400) + mock_response.raise_for_status.side_effect = ( + requests.requests.exceptions.HTTPError()) + + with self.assertRaises(requests.requests.exceptions.HTTPError): + verify_list.verify_list(mock_session, ["content line"], + "CONTENT_TYPE_DEFAULT_STRING") + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_happy_path(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + expected_resp = { + "success": True, + } + + mock_response.json.return_value = expected_resp + + +if __name__ == "__main__": + unittest.main() diff --git a/requirements.txt b/requirements.txt index b5d334d..43c3784 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-auth -requests +requests < 2.32 diff --git a/search/list_alerts.py b/search/list_alerts.py index db8ea7d..fee7c65 100644 --- a/search/list_alerts.py +++ b/search/list_alerts.py @@ -45,12 +45,14 @@ def initialize_command_line_args( "-ts", "--start_time", type=datetime_converter.iso8601_datetime_utc, + required=True, help=("beginning of time range, as an ISO 8601 string " + "('yyyy-mm-ddThh:mm:ss')")) parser.add_argument( "-te", "--end_time", type=datetime_converter.iso8601_datetime_utc, + required=True, help=("end of time range, as an ISO 8601 string ('yyyy-mm-ddThh:mm:ss')")) parser.add_argument( "-tl", diff --git a/search/list_iocs.py b/search/list_iocs.py index 3f53fc1..28ea819 100644 --- a/search/list_iocs.py +++ b/search/list_iocs.py @@ -45,6 +45,7 @@ def initialize_command_line_args( "-ts", "--start_time", type=datetime_converter.iso8601_datetime_utc, + required=True, help=("beginning of time range, as an ISO 8601 string " + "('yyyy-mm-ddThh:mm:ss')")) parser.add_argument( diff --git a/search/list_structured_query_events_test.py b/search/list_structured_query_events_test.py deleted file mode 100644 index 96aa8dc..0000000 --- a/search/list_structured_query_events_test.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -"""Tests for the "list_structured_query_events" module.""" - -import datetime -import unittest -from unittest import mock - -from google.auth.transport import requests - -from . import list_structured_query_events - - -class ListStructuredQueryEventsTest(unittest.TestCase): - - def test_initialize_command_line_args_local_time(self): - actual = list_structured_query_events.initialize_command_line_args([ - "--raw_query=\"principal.hostname=\"hostname\"\"", - "--start_time=2021-05-07T11:22:33", "--end_time=2021-05-08T11:22:33", - "--local_time" - ]) - self.assertIsNotNone(actual) - - def test_initialize_command_line_args_utc(self): - actual = list_structured_query_events.initialize_command_line_args([ - "-rq=\"principal.hostname=\"hostname\"\"", "-ts=2021-05-07T11:22:33Z", - "-te=2021-05-08T11:22:33Z" - ]) - self.assertIsNotNone(actual) - - def test_initialize_command_line_args_future_start(self): - start_time = datetime.datetime.utcnow().astimezone(datetime.timezone.utc) - start_time += datetime.timedelta(days=2) - end_time = start_time + datetime.timedelta(days=1) - actual = list_structured_query_events.initialize_command_line_args([ - "-rq=\"principal.hostname=\"hostname\"\"", - start_time.strftime("-ts=%Y-%m-%dT%H:%M:%SZ"), - end_time.strftime("-te=%Y-%m-%dT%H:%M:%SZ") - ]) - self.assertIsNone(actual) - - def test_initialize_command_line_args_future_end(self): - start_time = datetime.datetime.utcnow().astimezone(datetime.timezone.utc) - start_time -= datetime.timedelta(days=2) - end_time = start_time + datetime.timedelta(days=4) - actual = list_structured_query_events.initialize_command_line_args([ - "-rq=\"principal.hostname=\"hostname\"\"", - start_time.strftime("-ts=%Y-%m-%dT%H:%M:%SZ"), - end_time.strftime("-te=%Y-%m-%dT%H:%M:%SZ") - ]) - self.assertIsNone(actual) - - def test_initialize_command_line_args_empty_range(self): - start_time = datetime.datetime.utcnow().astimezone(datetime.timezone.utc) - start_time -= datetime.timedelta(days=2) - actual = list_structured_query_events.initialize_command_line_args([ - "-rq=\"principal.hostname=\"hostname\"\"", - start_time.strftime("-ts=%Y-%m-%dT%H:%M:%SZ"), - start_time.strftime("-te=%Y-%m-%dT%H:%M:%SZ") - ]) - self.assertIsNone(actual) - - def test_initialize_command_line_args_negative_range(self): - start_time = datetime.datetime.utcnow().astimezone(datetime.timezone.utc) - start_time -= datetime.timedelta(days=2) - end_time = start_time - datetime.timedelta(days=4) - actual = list_structured_query_events.initialize_command_line_args([ - "-rq=\"principal.hostname=\"hostname\"\"", - start_time.strftime("-ts=%Y-%m-%dT%H:%M:%SZ"), - end_time.strftime("-te=%Y-%m-%dT%H:%M:%SZ"), - ]) - self.assertIsNone(actual) - - @mock.patch.object(requests, "AuthorizedSession", autospec=True) - @mock.patch.object(requests.requests, "Response", autospec=True) - def test_list_structured_query_events(self, mock_response, mock_session): - mock_session.request.return_value = mock_response - type(mock_response).status_code = mock.PropertyMock(return_value=200) - mock_response.json.return_value = {"mock": "json"} - actual = list_structured_query_events.list_structured_query_events( - mock_session, "principal.hostname=\"hostname\"", - datetime.datetime(2021, 5, 7, 11, 22, 33), - datetime.datetime(2021, 5, 8, 11, 22, 33), 10) - self.assertEqual(actual, {"mock": "json"}) - - -if __name__ == "__main__": - unittest.main() diff --git a/search/list_structured_query_events.py b/search/udm_search.py similarity index 59% rename from search/list_structured_query_events.py rename to search/udm_search.py index db6ef52..b6fe69e 100644 --- a/search/list_structured_query_events.py +++ b/search/udm_search.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2021 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,7 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # -"""Executable and reusable sample for listing structured query events.""" +"""Executable and reusable sample for UDM Search. + +API reference: +https://cloud.google.com/chronicle/docs/reference/search-api#udmsearch +""" import argparse import datetime @@ -38,23 +42,19 @@ def initialize_command_line_args( chronicle_auth.add_argument_credentials_file(parser) regions.add_argument_region(parser) parser.add_argument( - "-rq", - "--raw_query", - type=str, - required=True, - help="query to search for UDM events") + "-q", "--query", type=str, required=True, help=("UDM Search query")) parser.add_argument( "-ts", "--start_time", type=datetime_converter.iso8601_datetime_utc, required=True, - help=("beginning of time range, as an ISO 8601 string " + - "('yyyy-mm-ddThh:mm:ss')")) + help=( + "start of time range, as an ISO 8601 string ('yyyy-mm-ddThh:mm:ss')")) parser.add_argument( "-te", "--end_time", - type=datetime_converter.iso8601_datetime_utc, required=True, + type=datetime_converter.iso8601_datetime_utc, help=("end of time range, as an ISO 8601 string ('yyyy-mm-ddThh:mm:ss')")) parser.add_argument( "-tl", @@ -62,15 +62,16 @@ def initialize_command_line_args( action="store_true", help=("time is specified in the system's local timezone (default = UTC)")) parser.add_argument( - "-s", - "--page_size", + "-l", + "--limit", type=int, - required=False, - help="maximum number of events to return") + default=1000, + help=("Limit on the maximum number of matches to return, up to 1,000" + + "(default = 1,000)")) # Sanity checks for the command-line arguments. parsed_args = parser.parse_args(args) - s, e = parsed_args.start_time, parsed_args.end_time + s, e, limit = parsed_args.start_time, parsed_args.end_time, parsed_args.limit if parsed_args.local_time: s = s.replace(tzinfo=None).astimezone(datetime.timezone.utc) e = e.replace(tzinfo=None).astimezone(datetime.timezone.utc) @@ -83,50 +84,50 @@ def initialize_command_line_args( if s >= e: print("Error: start time should not be same as or later than end time") return None + if limit > 1000 or limit < 1: + print("Error: limit can not be more than 1,000 or less than 1") + return None return parsed_args -def list_structured_query_events( - http_session: requests.AuthorizedSession, - raw_query: str, - start_time: datetime.datetime, - end_time: datetime.datetime, - page_size: Optional[int] = 100000) -> Mapping[str, Sequence[Any]]: - """Lists up to 10,000 UDM events that match the query. - - If you receive the maximum number of results, there might still be more - discovered within the specified time range. You might want to narrow the time - range and issue the call again to ensure you have visibility on the results. - - You can use the API call ListStructuredQueryEvents to search for UDM events - by UDM field. +def udm_search(http_session: requests.AuthorizedSession, + query: str, + start_time: datetime.datetime, + end_time: datetime.datetime, + limit: Optional[int] = 1000) -> Mapping[str, Any]: + """Performs a UDM search across the specified time range. Args: http_session: Authorized session for HTTP requests. - raw_query: Query for searching UDM events. - start_time: Inclusive beginning of the time range of events to return, with - any timezone (even a timezone-unaware datetime object, i.e. local time). - end_time: The exclusive end of the time range of events to return, with any + query: UDM search query. + start_time: Inclusive beginning of the time range to search, with any timezone (even a timezone-unaware datetime object, i.e. local time). - page_size: Maximum number of events to return, up to 10,000 (default = - 10,000). + end_time: Exclusive end of the time range to search, with any timezone (even + a timezone-unaware datetime object, i.e. local time). + limit: Maximum number of matched events to return, up to 1,000 (default = + 1,000). Returns: { - "results": [ + "events": [ { - "event": "..." <-- UDM event - "eventLogToken": "..." + "name": "...", + "udm": { + "metadata": { ... }, + "principal": { ... }, + "target": { ... }, + }, }, - ...More events... - ], - "moreDataAvailable": true - "runtimeErrors": [ { - "errorText": "..." + "name": "...", + "udm": { + "metadata": { ... }, + "principal": { ... }, + "target": { ... }, + }, }, - ...More errors... + ...More matched events... ] } @@ -134,12 +135,14 @@ def list_structured_query_events( requests.exceptions.HTTPError: HTTP request resulted in an error (response.status_code >= 400). """ - url = f"{CHRONICLE_API_BASE_URL}/v1/events/liststructuredqueryevents" + url = f"{CHRONICLE_API_BASE_URL}/v1/events:udmSearch" + s = datetime_converter.strftime(start_time) + e = datetime_converter.strftime(end_time) params = { - "raw_query": raw_query, - "start_time": datetime_converter.strftime(start_time), - "end_time": datetime_converter.strftime(end_time), - "page_size": page_size + "query": query, + "time_range.start_time": s, + "time_range.end_time": e, + "limit": limit } response = http_session.request("GET", url, params=params) @@ -154,12 +157,10 @@ def list_structured_query_events( if not cli: sys.exit(1) # A sanity check failed. - start, end = cli.start_time, cli.end_time + q, start, end, l = cli.query, cli.start_time, cli.end_time, cli.limit if cli.local_time: - start, end = start.replace(tzinfo=None), end.replace(tzinfo=None) + start = start.replace(tzinfo=None) CHRONICLE_API_BASE_URL = regions.url(CHRONICLE_API_BASE_URL, cli.region) session = chronicle_auth.initialize_http_session(cli.credentials_file) - events = list_structured_query_events(session, cli.raw_query, start, end, - cli.page_size) - print(json.dumps(events, indent=2)) + print(json.dumps(udm_search(session, q, start, end, l), indent=2)) diff --git a/search/udm_search_test.py b/search/udm_search_test.py new file mode 100644 index 0000000..375eee7 --- /dev/null +++ b/search/udm_search_test.py @@ -0,0 +1,86 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Tests for the "udm_search" module.""" + +import datetime +import unittest +from unittest import mock + +from google.auth.transport import requests + +from . import udm_search + + +class UDMSearchTest(unittest.TestCase): + + def test_initialize_command_line_args_utc(self): + actual = udm_search.initialize_command_line_args([ + "--query=metadata.event_type=\"NETWORK_CONNECTION\"", + "--start_time=2022-08-01T00:00:00", "--end_time=2022-08-01T01:00:00" + ]) + self.assertIsNotNone(actual) + + def test_initialize_command_line_args_local_time(self): + actual = udm_search.initialize_command_line_args([ + "--query=metadata.event_type=\"NETWORK_CONNECTION\"", + "--start_time=2022-08-01T00:00:00", "--end_time=2022-08-01T01:00:00", + "--local_time" + ]) + self.assertIsNotNone(actual) + + def test_initialize_command_line_args_limit(self): + actual = udm_search.initialize_command_line_args([ + "--query=metadata.event_type=\"NETWORK_CONNECTION\"", + "--start_time=2022-08-01T00:00:00", "--end_time=2022-08-01T01:00:00", + "--limit=100" + ]) + self.assertIsNotNone(actual) + + def test_initialize_command_line_args_invalid_limit(self): + actual = udm_search.initialize_command_line_args([ + "--query=metadata.event_type=\"NETWORK_CONNECTION\"", + "--start_time=2022-08-01T00:00:00", "--end_time=2022-08-01T01:00:00", + "--limit=100000" + ]) + self.assertIsNone(actual) + + def test_initialize_command_line_args_invalid_start_time(self): + actual = udm_search.initialize_command_line_args([ + "--query=metadata.event_type=\"NETWORK_CONNECTION\"", + "--start_time=2100-08-01T00:00:00", "--end_time=2022-08-01T01:00:00" + ]) + self.assertIsNone(actual) + + def test_initialize_command_line_args_invalid_end_time(self): + actual = udm_search.initialize_command_line_args([ + "--query=metadata.event_type=\"NETWORK_CONNECTION\"", + "--start_time=2022-08-01T00:00:00", "--end_time=2022-07-01T01:00:00" + ]) + self.assertIsNone(actual) + + @mock.patch.object(requests, "AuthorizedSession", autospec=True) + @mock.patch.object(requests.requests, "Response", autospec=True) + def test_udm_search(self, mock_response, mock_session): + mock_session.request.return_value = mock_response + type(mock_response).status_code = mock.PropertyMock(return_value=200) + mock_response.json.return_value = {"mock": "json"} + actual = udm_search.udm_search(mock_session, "principal.ip=\"10.1.2.3\"", + datetime.datetime(2022, 8, 1, 00, 00, 00), + datetime.datetime(2022, 8, 2, 0, 0, 0)) + self.assertEqual(actual, {"mock": "json"}) + + +if __name__ == "__main__": + unittest.main()