From ca114ad56984bb9246ef80f0821161cba326a2c1 Mon Sep 17 00:00:00 2001 From: israelpoli <72099621+israelpoli@users.noreply.github.com> Date: Thu, 14 Nov 2024 14:55:19 +0200 Subject: [PATCH] Symantec Endpoint Security (New pack) (#36694) * folder structure * commit * commit * continue development * continue development * Add UTs * Add UT and add docstring for some functions and update Docker * pre-commit * add docstring * add UT and fix filter_duplicate_events function * add UTs * change the Client Secret to Token * add image * commit * add UTs * commit * commit * add UT * autopep8 * commit * more changes * commit * commit * corrections * commit end * update Docker and add UT * some comment corrections * some comment corrections * fix UTs * generated docs * comment corrections * comment corrections * Apply suggestions from code review docs review Co-authored-by: ShirleyDenkberg <62508050+ShirleyDenkberg@users.noreply.github.com> * add link for more information to obtain the Token parameter * commit * ruff fix * change the test-module command * remove UT un-necessary * commit * comment corrections * comment corrections * comment correction --------- Co-authored-by: ShirleyDenkberg <62508050+ShirleyDenkberg@users.noreply.github.com> --- Packs/SymantecEndpointSecurity/.pack-ignore | 0 .../SymantecEndpointSecurity/.secrets-ignore | 0 .../SymantecEndpointSecurity/README.md | 38 ++ .../SymantecEndpointSecurity.py | 474 ++++++++++++++++++ .../SymantecEndpointSecurity.yml | 60 +++ .../SymantecEndpointSecurity_description.md | 18 + .../SymantecEndpointSecurity_image.png | Bin 0 -> 3675 bytes .../SymantecEndpointSecurity_test.py | 336 +++++++++++++ Packs/SymantecEndpointSecurity/README.md | 0 .../pack_metadata.json | 20 + 10 files changed, 946 insertions(+) create mode 100644 Packs/SymantecEndpointSecurity/.pack-ignore create mode 100644 Packs/SymantecEndpointSecurity/.secrets-ignore create mode 100644 Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/README.md create mode 100644 Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/SymantecEndpointSecurity.py create mode 100644 Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/SymantecEndpointSecurity.yml create mode 100644 Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/SymantecEndpointSecurity_description.md create mode 100644 Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/SymantecEndpointSecurity_image.png create mode 100644 Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/SymantecEndpointSecurity_test.py create mode 100644 Packs/SymantecEndpointSecurity/README.md create mode 100644 Packs/SymantecEndpointSecurity/pack_metadata.json diff --git a/Packs/SymantecEndpointSecurity/.pack-ignore b/Packs/SymantecEndpointSecurity/.pack-ignore new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/Packs/SymantecEndpointSecurity/.secrets-ignore b/Packs/SymantecEndpointSecurity/.secrets-ignore new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/README.md b/Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/README.md new file mode 100644 index 000000000000..bec18affd393 --- /dev/null +++ b/Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/README.md @@ -0,0 +1,38 @@ +Symantec Endpoint Security Event Collector for Cortex XSIAM. + +## Configure Symantec Endpoint Security on Cortex XSIAM + +1. Navigate to Settings > Configurations > Data Collection > Automations & Feed Integrations. +2. Search for Symantec Endpoint Security. +3. Click **Add instance** to create and configure a new integration instance. + + | **Parameter** | **Required** | + | --- | --- | + | Server URL | True | + | OAuth credential | True | + | Stream ID | True | + | Channel ID | True | + | Fetch interval in seconds | True | + | Use system proxy settings | False | + | Trust any certificate (not secure) | False | + +4. Click **Test** to validate the URLs, token, and connection. + + +### To generate a token for the ***Token*** parameter: + +1. Log in to the Symantec Endpoint Security console. +2. Click **Integration** > **Client Applications**. +3. Choose `Add Client Application`. +4. Choose a name for the application, then click `Add`. The client application details screen will appear. +5. Click `⋮` and select `Client Secret`. +6. Click the ellipsis and select **Client Secret**. +7. Click the `copy` icon next to `OAuth Credentials`. + +For more information on obtaining *OAuth Credentials*, refer to [this documentation](https://apidocs.securitycloud.symantec.com/#/doc?id=ses_auth) or watch [this video](https://youtu.be/d7LRygRfDLc?si=NNlERXtfzv4LjpsB). + +**Note:** + +- No need to generate the bearer token, the integration uses the provided `OAuth Credentials` to generate one. +- The `test_module` test checks only the validity of the `OAuth credential` parameter and does not validate the `Channel ID` and `Stream ID` parameters. +- Fetching events that occurred at a specific time may be delayed due to delays in event ingestion on Symantec's side. \ No newline at end of file diff --git a/Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/SymantecEndpointSecurity.py b/Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/SymantecEndpointSecurity.py new file mode 100644 index 000000000000..160fc0a00d21 --- /dev/null +++ b/Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/SymantecEndpointSecurity.py @@ -0,0 +1,474 @@ +import itertools +import demistomock as demisto +from CommonServerPython import * # noqa # pylint: disable=unused-wildcard-import +from CommonServerUserPython import * # noqa +from datetime import datetime +import dateparser +import time + + +# CONSTANTS +VENDOR = "symantec" +PRODUCT = "endpoint_security" +DEFAULT_CONNECTION_TIMEOUT = 30 +MAX_CHUNK_SIZE_TO_READ = 1024 * 1024 * 150 # 150 MB +DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ" + +""" +Sleep time between fetch attempts when an error occurs in the retrieval process, +primarily used to avoid overloading with consecutive API calls +if an error is received from the API. +""" +FETCH_INTERVAL = 60 + + +class UnauthorizedToken(Exception): + """ + Exception raised when the authentication token is unauthorized. + """ + + ... + + +class NextPointingNotAvailable(Exception): + """ + Exception raised when the next pointing is not available. + """ + + ... + + +class Client(BaseClient): + def __init__( + self, + base_url: str, + token: str, + stream_id: str, + channel_id: str, + verify: bool, + proxy: bool, + ) -> None: + + self.headers: dict[str, str] = {} + self.token = token + self.stream_id = stream_id + self.channel_id = channel_id + + super().__init__( + base_url=base_url, + verify=verify, + proxy=proxy, + timeout=180, + ) + + self._update_access_token_in_headers() + + def _update_access_token_in_headers(self): + """ + Retrieves an access token using the `token` provided in the params, and updates `self.headers`. + """ + get_token_headers: dict[str, str] = { + "accept": "application/json", + "content-type": "application/x-www-form-urlencoded", + "Authorization": f"Basic {self.token}", + } + try: + res = self._http_request( + "POST", + url_suffix="/v1/oauth2/tokens", + headers=get_token_headers, + data={}, + ) + except Exception as e: + raise DemistoException("Failed getting an access token") from e + + if "access_token" not in res: + raise DemistoException( + f"The key 'access_token' does not exist in response, Response from API: {res}", + res=res, + ) + self.headers = { + "Authorization": f'Bearer {res["access_token"]}', + "Accept": "application/x-ndjson", + "Content-Type": "application/json", + "Accept-Encoding": "gzip", + } + + def get_events(self, payload: dict[str, str]) -> dict: + """ + API call in streaming to fetch events + """ + res = self._http_request( + method="POST", + url_suffix=f"/v1/event-export/stream/{self.stream_id}/{self.channel_id}", + json_data=payload, + params={"connectionTimeout": DEFAULT_CONNECTION_TIMEOUT}, + resp_type="text", + headers=self.headers, + ) + # Formats a string into a valid JSON array + res = res.replace("}\n{", ",") + if not res.startswith("["): + res = f"[{res}]" + return json.loads(res) + + +def sleep_if_necessary(last_run_duration: float) -> None: + """ + Manages the fetch interval by sleeping if necessary. + + This function calculates the fetch runtime against FETCH_INTERVAL. + If the runtime is less than the FETCH_INTERVAL time, it will sleep + for the time difference between FETCH_INTERVAL and the fetch runtime. + Otherwise, the next fetch will occur immediately. + """ + fetch_sleep = FETCH_INTERVAL - last_run_duration + if fetch_sleep > 0: + demisto.debug(f"Sleeping for {fetch_sleep} seconds") + time.sleep(fetch_sleep) + return + + demisto.debug("Not sleeping, next fetch will take place immediately") + + +def normalize_date_format(date_str: str) -> str: + """ + Normalize the given date string by removing milliseconds. + + Args: + date_str (str): The input date string to be normalized. + + Returns: + str: The normalized date string without milliseconds. + """ + # Parse the original date string with milliseconds + if not (timestamp := dateparser.parse(date_str)): + raise DemistoException(f"Failed to parse date string: {date_str}") + + # Convert back to the desired format without milliseconds + return timestamp.strftime(DATE_FORMAT) + + +def calculate_next_fetch( + filtered_events: list[dict[str, str]], + next_hash: str, + include_last_fetch_events: bool, + last_integration_context: dict[str, str], +) -> None: + """ + Calculate and update the integration context for the next fetch operation. + + - Extracts the time of the latest event + - Extracts all event IDs with time matching the latest event time + - If the latest event time matches the latest time from the previous fetch, + extend the suspected duplicate IDs from the previous fetch. + - If a push to XSIAM fails, store all events in the `integration_context` + to be pushed in the next fetch. + - Update the integration_context + + Args: + filtered_events (list[dict[str, str]]): A list of filtered events. + next_hash (str): The hash for the next fetch operation. + include_last_fetch_events (bool): Flag to include last fetched events in the integration context. + last_integration_context (dict[str, str]): The previous integration context. + """ + + if filtered_events: + events_suspected_duplicates = extract_events_suspected_duplicates( + filtered_events + ) + + # Determine the latest event time: Extract the last time of the filtered event, + latest_event_time = normalize_date_format( + max(filtered_events, key=parse_event_time)["log_time"] + ) + else: + events_suspected_duplicates = [] + latest_event_time = last_integration_context.get("latest_event_time", "") + + if latest_event_time == last_integration_context.get("latest_event_time", ""): + # If the latest event time matches the previous one, + # extend the suspected duplicates list with events from the previous context, + # to control deduplication across multiple fetches. + demisto.debug( + "The latest event time equals the latest event time from the previous fetch," + " adding the suspect duplicates from last time" + ) + events_suspected_duplicates.extend( + last_integration_context.get("events_suspected_duplicates", []) + ) + + integration_context = { + "latest_event_time": latest_event_time, + "events_suspected_duplicates": events_suspected_duplicates, + "next_fetch": {"next": next_hash} if next_hash else {}, + "last_fetch_events": filtered_events if include_last_fetch_events else [], + } + + demisto.debug(f"Updating integration context with new data: {integration_context}") + set_integration_context(integration_context) + + +def push_events(events: list[dict]): + """ + Push events to XSIAM. + """ + demisto.debug(f"Pushing {len(events)} to XSIAM") + send_events_to_xsiam(events=events, vendor=VENDOR, product=PRODUCT) + demisto.debug(f"Pushed {len(events)} to XSIAM successfully") + + +def parse_event_time(event) -> datetime: + """ + Parse the event time from the given event dict to datetime object. + """ + return datetime.strptime(normalize_date_format(event["log_time"]), DATE_FORMAT) + + +def extract_events_suspected_duplicates(events: list[dict]) -> list[str]: + """ + Extract event IDs of potentially duplicate events. + + This function identifies events with the latest timestamp and considers them as + potential duplicates. It returns a list of their unique identifiers (UUIDs). + """ + + # Find the maximum event time + latest_event_time = normalize_date_format( + max(events, key=parse_event_time)["log_time"] + ) + + # Filter all JSONs with the maximum event time + filtered_events = filter( + lambda event: normalize_date_format(event["log_time"]) == latest_event_time, + events, + ) + + # Extract the event_ids from the filtered events + return [event["uuid"] for event in filtered_events] + + +def is_duplicate( + event_id: str, + event_time: datetime, + latest_event_time: datetime, + events_suspected_duplicates: set[str], +) -> bool: + """ + Determine if an event is a duplicate based on its time and ID. + + This function checks if an event is considered a duplicate by comparing its + timestamp with the latest event time and checking if its ID is in the set of + suspected duplicates. + + Args: + event_id (str): The unique identifier of the event. + event_time (datetime): The timestamp of the event. + latest_event_time (datetime): The timestamp of the last event from the last fetch. + events_suspected_duplicates (set): A set of event IDs suspected to be duplicates. + + Returns: + bool: whether the event's time is earlier than the latest, OR + (its time is identical to the latest AND + its id is in the list of suspected duplicates) + """ + if event_time < latest_event_time: + return True + elif event_time == latest_event_time and event_id in events_suspected_duplicates: + return True + return False + + +def filter_duplicate_events( + events: list[dict[str, str]], integration_context: dict +) -> list[dict[str, str]]: + """ + Filter out duplicate events from the given list of events. + + Args: + events (list[dict[str, str]]): A list of event dicts, each containing 'uuid' and 'log_time' keys. + + Returns: + list[dict[str, str]]: A list of event dicts without fear of duplication. + """ + events_suspected_duplicates = set( + integration_context.get("events_suspected_duplicates", []) + ) + latest_event_time = integration_context.get( + "latest_event_time" + ) or datetime.min.strftime(DATE_FORMAT) + + latest_event_time = datetime.strptime( + normalize_date_format(latest_event_time), DATE_FORMAT + ) + + filtered_events: list[dict[str, str]] = [] + + for event in events: + if not is_duplicate( + event["uuid"], + datetime.strptime(normalize_date_format(event["log_time"]), DATE_FORMAT), + latest_event_time, + events_suspected_duplicates, + ): + event["_time"] = event["time"] + filtered_events.append(event) + + return filtered_events + + +def get_events_command(client: Client, integration_context: dict) -> None: + next_fetch: dict[str, str] = integration_context.get("next_fetch", {}) + + try: + json_res = client.get_events(payload=next_fetch) + except DemistoException as e: + if e.res is not None: + if e.res.status_code == 401: + demisto.info( + "Unauthorized access token, trying to obtain a new access token" + ) + raise UnauthorizedToken + if e.res.status_code == 410: + raise NextPointingNotAvailable + raise + + events: list[dict] = list( + itertools.chain.from_iterable(chunk["events"] for chunk in json_res) + ) + next_hash = json_res[0].get("next", "") if json_res else "" + + if not events: + demisto.info("No events received") + return + + demisto.debug(f"Starting event filtering. Initial number of events: {len(events)}") + filtered_events = filter_duplicate_events(events, integration_context) + demisto.debug( + f"Filtering completed. Total number of events: {len(filtered_events)}" + ) + + filtered_events.extend(integration_context.get("last_fetch_events", [])) + demisto.debug( + f"Total number of events after merging with last fetch events: {len(filtered_events)}" + ) + + try: + push_events(filtered_events) + except Exception as e: + # If the push of events to XSIAM fails, + # the current fetch's events are stored in `integration_context`, + # ensuring they are pushed in the next fetch operation. + calculate_next_fetch( + filtered_events=filtered_events, + next_hash=next_hash, + include_last_fetch_events=True, + last_integration_context=integration_context, + ) + raise DemistoException( + "Failed to push events to XSIAM, The integration_context updated" + ) from e + + calculate_next_fetch( + filtered_events=filtered_events, + next_hash=next_hash, + include_last_fetch_events=False, + last_integration_context=integration_context, + ) + + +def perform_long_running_loop(client: Client): + """ + Manages the fetch process. + Due to a limitation on Symantec's side, + the integration is configured as long-running + since API calls can take over 5 minutes. + + Fetch process: + - In every iteration except the first, + fetch is performed with the `next_fetch` argument, + which acts as a pointer for Symantec. + - When an error is encountered from Symantec, + it is handled based on the error type, and before the next iteration, + the process enters a brief sleep period defined by `FETCH_INTERVAL` + to avoid overloading with API calls. + """ + while True: + # Used to calculate the duration of the fetch run. + start_timestamp = time.time() + try: + integration_context = get_integration_context() + demisto.info(f"Starting new fetch with {integration_context=}") + get_events_command(client, integration_context=integration_context) + + except UnauthorizedToken: + try: + client._update_access_token_in_headers() + except Exception as e: + raise DemistoException("Failed obtaining a new access token") from e + except NextPointingNotAvailable: + + demisto.debug( + "Next is pointing to older event which is not available for streaming. " + "Clearing next_fetch, The integration's dedup mechanism will make sure we don't insert duplicate events. " + "We will eventually get a different pointer and fetching will overcome this edge case" + ) + integration_context.pop("next_fetch") + set_integration_context(integration_context) + except Exception as e: + raise DemistoException("Failed to fetch logs from API") from e + + # Used to calculate the duration of the fetch run. + end_timestamp = time.time() + + sleep_if_necessary(end_timestamp - start_timestamp) + + +def test_module() -> str: + """ + The test is performed by obtaining the `access_token` during `Client`'s initialization. + avoiding the use of `test_module` with get_events due to the one-minute timeout + set for the `test_module` command by the our server. + """ + return "ok" + + +def main() -> None: # pragma: no cover + params = demisto.params() + + host = params["host"] + token = params["token"]["password"] + stream_id = params["stream_id"] + channel_id = params["channel_id"] + verify = not argToBoolean(params.get("insecure", False)) + proxy = argToBoolean(params.get("proxy", False)) + + command = demisto.command() + try: + client = Client( + base_url=host, + token=token, + stream_id=stream_id, + channel_id=channel_id, + verify=verify, + proxy=proxy, + ) + + if command == "test-module": + return_results(test_module()) + if command == "long-running-execution": + demisto.info("Starting long running execution") + perform_long_running_loop(client) + else: + raise NotImplementedError(f"Command {command} is not implemented.") + + except Exception as e: + return_error( + f"Failed to execute {command} command. Error in Symantec Endpoint Security Integration [{e}]." + ) + + +""" ENTRY POINT """ + +if __name__ in ("__main__", "__builtin__", "builtins"): + main() diff --git a/Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/SymantecEndpointSecurity.yml b/Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/SymantecEndpointSecurity.yml new file mode 100644 index 000000000000..0ecf35544252 --- /dev/null +++ b/Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/SymantecEndpointSecurity.yml @@ -0,0 +1,60 @@ +commonfields: + id: Symantec Endpoint Security + version: -1 +name: Symantec Endpoint Security +display: Symantec Endpoint Security +category: Analytics & SIEM +description: "Symantec Endpoint Security Event Collector for Cortex XSIAM." +configuration: +- display: Server URL + name: host + type: 0 + defaultvalue: https://api.sep.securitycloud.symantec.com + required: true + section: Connect +- displaypassword: OAuth credential + name: token + hiddenusername: true + type: 9 + required: true + section: Connect +- display: Stream ID + name: stream_id + type: 0 + required: true + additionalinfo: "" + section: Connect +- display: Channel ID + name: channel_id + type: 0 + required: true + additionalinfo: "" + section: Connect +- display: Use system proxy settings + name: proxy + required: false + type: 8 + section: Connect +- display: Trust any certificate (not secure) + name: insecure + required: false + type: 8 + section: Connect +- defaultvalue: 'true' + display: Long Running Instance + hidden: true + name: longRunning + type: 8 + section: Connect +script: + script: "" + type: python + commands: [] + dockerimage: demisto/python3:3.11.10.113941 + longRunning: true + subtype: python3 +marketplaces: +- marketplacev2 +fromversion: 6.8.0 +tests: +- No tests diff --git a/Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/SymantecEndpointSecurity_description.md b/Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/SymantecEndpointSecurity_description.md new file mode 100644 index 000000000000..83e0e540897b --- /dev/null +++ b/Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/SymantecEndpointSecurity_description.md @@ -0,0 +1,18 @@ +To generate a token for the *Token* parameter: + +1. Log in to the Symantec Endpoint Security console. +2. Click **Integration** > **Client Applications**. +3. Choose `Add Client Application`. +4. Choose a name for the application, then click `Add`. The client application details screen will appear. +5. Click `⋮` and select `Client Secret`. +6. Click the ellipsis and select **Client Secret**. +7. Click the `copy` icon next to `OAuth Credentials`. + +For more information on obtaining *OAuth Credentials*, refer to [this documentation](https://apidocs.securitycloud.symantec.com/#/doc?id=ses_auth) or watch [this video](https://youtu.be/d7LRygRfDLc?si=NNlERXtfzv4LjpsB). + + +**Note:** + +- No need to generate the bearer token, the integration uses the provided `OAuth Credentials` to generate one. +- The `test_module` test checks only the validity of the `OAuth credential` parameter and does not validate the `Channel ID` and `Stream ID` parameters. +- Fetching events that occurred at a specific time may be delayed due to delays in event ingestion on Symantec's side. \ No newline at end of file diff --git a/Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/SymantecEndpointSecurity_image.png b/Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/SymantecEndpointSecurity_image.png new file mode 100644 index 0000000000000000000000000000000000000000..f1cf202e307fcb7d1cf1a763dba9495d19584717 GIT binary patch literal 3675 zcmZvf=Q|q=!^NXU6rpb1ND-wsYAcGAShdwI+S+cdy;qGAF=|)sRVqaygw`HK?1mbp z(pGFSYR@3l(+}^b_jx{?bFS<62b}BrC7KxNfEd6G0001_r~AO{&xZcVU3$7dom&a{ z{%5EH%ycvW*ioJh0Dzf8?}55SDAjfWW7eZ@FQTu)*G4lJ7X|5VK=>a~PdG?`5&Wf2 zUvtK}s@=*S%ge)@Jf2LJXIEnjng*7ro&Wk+g%)ZQk}t)He9tp6b>OGJGhqq7CG<)v zhT$NOJo4$XvsYzuSQ-KDtFBnHkJySl`4L|7>5A}!JeN3hds3|eZb+R^cNg?8cu4NG z8~^_Y3tv1+8-%c)k6Gu#4h28H^~t3>@F6^)jNtY^+b)xe4lbl$G7 z7E_N~cR%c;U;U0fG@J2ooud%A4NH09DT zYV7k*mTJ0lIbS!usUU^EeCtj9lrU{C5bFpu$vXEHh>80}_13}(GgD#upvQsVOu529 z(*4!J&;qI0tJn!&v1(xD&c6iRn@xh&Q`VT%>#CHAT%f@o^aYc5x+ z(U9ZLFcE`$+6G1D^LzJ-{aTle4hI#_nsyC#wOf(DM$7jldpx}*2E4pxisNNGpIXV>YYgevJTntty?x>NJg7l#6$zhGZmWWh z3iLb|f6R;AvEypZ=nbjdpEMc?O1s~56xVtK|;M(XM462>>FBZtW{0i75 z_FzeV<{N{i?%qj%+3RPu5&E#x6)lxsFL;{MKVz_pQj*Gny4dngNquwU#IGn;)3B}0 ze-FOxvy8Uaejwhw&WGPY3@(_qaAM(^j_FqPge$A^f;bkqdY#U6gwDN&`a2cD)k&+< z*{1IOEw&gZ9ZLFA?IqU{9b;bKrHhGsb^6kpWJ=TSEdKPHsryroc4%P_)7jt6K&6`N;@x;=# z(Iw%k)W^$5e2#w$0bS=GZJYHvTqlIzgT&Fd3{p6hj>l!e+J)t(4z#+Q0u zuXSDbrzh1{n87m{ZKOG{l1kr=4KWAo~fQ@9&L;9^BajSp%dr zMP*SfzAl^>lgGLJg2l!A(k1CE{xE#~X9mqya#^wHipj6aoZ4IyJG#yQj{1S{ZkdYE z!NO@p?%S+J8M-7D7Jp?KW_XRm-&{<)*VYa@4;Op%sv2yCXQ-s$+h{p6C9V`Xd)Bu= zySVMwNzduX4ut`VtI{ZH3P!ki@&w_)B*LSaDc^dpK0A`5gcNEp#y{FlD@y2h5r7et zY`YFWF=K0MJ561yllEIRc<@xB?q|$ZpCoda^Ik{9yBJ>3kH#rU|K$z;QW^teL@N|j zbXlt2vh#Y(J*Yfs{TKO!%Tc4Y% zgql!yqX{Rx=;(Wq;K-2YHjVh9t~mp{ZgC*%3%sBSEOT}g=zyfr5YS~-fD)D^T9dQ{ z0|&Q!x2)$lX_FuGw3PIO!-YT-x*l=gbo;oVJ#XDXh`KFOm6mZ~ush-F1LB&s@8#&` zd5|OpMuTzm5ez_u#gcveTST#oyhQer3TIL8^9Ykx=C>^1^W9R5RSrD*weAeQh4@0b z`k#a*m;JLNJ1unO6`)A7H-c9(I@9sS`?8o>)MX02LLR=v?1Qa~TUPQg!TWSxee5?k zN}6&B6bH_f&<_6SWJsasSckT$A>9Lc`$r}~EIl_&x&=%oV8g9m^UTi?mv5t= z)y+1#%FP&2Blqq0xYKNzJaC;|fS?k5pr|o|+?2BkEh1ps0pWV^2IUM9gV%`DF&({2<4089!TczG!XC(EYqt^AM^%bi zXo&nf2!tZiyHOI}7?@|WlKs_BEqQrAQ-6`AjMmX{vGC;h z4&K!mePb6Ghf1@~tGNljX?Ck#8JBu3NSVMY+>}ITIJUeHvLMIBd~?Cnnht_6@BWfX z1M;~Y2?%sC+Wo4S(t)COSo5z#tkd;<{oHh@U$!2d9X-2(Mr0Lo2mk zTclWOsqmFd1!LTyK}fsC){y=+UJ%rzicFe{Ommt~wLH&M7G7;vQVLnK)=)55o^QQ* z?=~Bm@6ZcuX(2*OQ| z1fjKxKAfi&-O*L#7zHR)FH6MFy%8k@TYeS1Y@ES~oQAgm4Kvo@6DJ0`{l_OcAMXo) z$sYTK@gZ3g=B>T=lyYOjuf(~&6b^qM^L&HHxRJhO#SLulTMT!Hra_pr{}JB!#ibAt zu%yRwT%cAS#U_+J!)elKffepwaNJin8W&bHrh0VX9hCzSN*V8b;9?<)8~REAj5alr zDFr}Lq0&CwcYrq9P82o^HC|H2Je^KX*`#Bl=)o@pi>S4aW+a{^%N6EPNm9?S7q_tm z4n6N)HzL(F96ZA-56=(evXT!ogwF1~M&Cq@Ey+b%K-luGR9ONRf_-l#Ho*7<{oZ&e zz5hnZbh--x*4@g?Bpdvckk~{M0kSxRMtL5XMN*-|2+g(?4au5;87jTAxl+LE3*S-b zBK2%Jh)%a88J&n!15>+H&01l_jY#GE-KnWFVD=0InN?m2FNu@bvp6dd7>%W}#QWcM zY1?V2sT6;$Z?NsLBsqtJ45}p*+R2Q5=Gs`De&iQ)iT>=T!n+6N06k??L2n*cC<@tg z)So#y^K?nl9-a<=6*8I%xgP~>DeH(nuUtg#>FLfpgM;rf(YY+{2;?uT|0;nahrKF9 z1y8qcaMa3{`~RKIxu`2$Mb;rkL7OQQ_N`csy@%3*`SG<06bDWopt%FL4Q=OuT-$&O z$Qx18?R&~E4u3pOZ;q?3a84U!j~K3#o>nkUe%#pF!iccQc!D{r@}r}J1{3gkvizKk z{=3FyLpxxlj2aIo-$<*XVX22!j>KgdEb}s@8G{kT@+#g+jBW^bK?f)Ug>kwj@17w< z$6us~@NP)zYQ?SZFXKUAa-9FHzz>77wvTqU6N6{wXiuNgeHqtl^}S3c`6m+@5oxt? zfpfoC9my&Q@4&y%BgnF)FE;ixVba0t!jMpj+IpdW{B69)rP4u=75F>a$XF< z86p*pp_LTfex)56j@=71QM12k9&xLVwrDsolCed**j;YLflZm=nY7@O$H2ATLA7F~ zblfz}Je+|0Fx>sZJ|@9yJ)=f71qVv?lbATo4Xn9qD`peoIMifw2dX=(#gs2aDvNKh zOZf0X9-HczVFU}FoqCh}BjLiQPgvpn%vkCh^=t&6oVBp4tGQwMY0(b{yjez=e>v{x z=+CRNE1*wP;@>wtZy)2((m80wI`S)OyHW3Qn~qK}VTU~?-z`1au2g&}jj}2!0VlYB zytigz-ly*`#hZ(@RS}sBu`6yfDzcb;j)7p&Z$vHf?Je@23dZ8>yQ9H*>n=;9lX)c z=E!s1Rf5h=JJL2hy!xiUb;HWQ$@@Nq(3Pj1+wB|a4)?{c%|FaWrw<#RPj+`Q6{X-_ zDZRWB=@<$3w|+FW<(^nD5WgPUdLKHJUmSt^u2r=o2q%+=nu%{aDuK)l5 literal 0 HcmV?d00001 diff --git a/Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/SymantecEndpointSecurity_test.py b/Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/SymantecEndpointSecurity_test.py new file mode 100644 index 000000000000..9244a5f6ef63 --- /dev/null +++ b/Packs/SymantecEndpointSecurity/Integrations/SymantecEndpointSecurity/SymantecEndpointSecurity_test.py @@ -0,0 +1,336 @@ +import pytest +from CommonServerPython import * # noqa # pylint: disable=unused-wildcard-import +from pytest_mock import MockerFixture +from SymantecEndpointSecurity import ( + normalize_date_format, + extract_events_suspected_duplicates, + calculate_next_fetch, + filter_duplicate_events, + perform_long_running_loop, + UnauthorizedToken, + NextPointingNotAvailable, + Client, + test_module as _test_module, + get_events_command, + sleep_if_necessary, +) + + +def mock_client() -> Client: + return Client( + base_url="test", + token="test_token", + stream_id="test_stream_id", + channel_id="test_channel_id", + verify=True, + proxy=False, + ) + + +@pytest.mark.parametrize( + "date_str, expected_result", + [ + ("2024-10-09T12:34:56.789Z", "2024-10-09T12:34:56Z"), + ("2024-10-09T12:34:56.789324959595959959595Z", "2024-10-09T12:34:56Z"), + ], +) +def test_normalize_date_format(date_str: str, expected_result: str): + """ + Given: + - A date string with microseconds + When: + - The `normalize_date_format` function is called + Then: + - Ensure that return a date string without microseconds + """ + assert normalize_date_format(date_str) == expected_result + + +@pytest.mark.parametrize( + "events, expected_results", + [ + ( + [ + {"uuid": "123", "log_time": "2024-10-09T12:34:56Z"}, + {"uuid": "456", "log_time": "2024-10-09T12:34:56.789Z"}, + {"uuid": "789", "log_time": "2024-10-09T12:34:55.789Z"}, + ], + ["123", "456"], + ) + ], +) +def test_extract_events_suspected_duplicates( + events: list[dict], expected_results: list[str] +): + """ + Given + - A list of events with timestamps + When: + - The `extract_events_suspected_duplicates` function is called + Then: + - Ensure that return a list of UUIDs for events suspected to be duplicates + """ + assert extract_events_suspected_duplicates(events) == expected_results + + +@pytest.mark.parametrize( + "integration_context, events, expected_filtered_events", + [ + pytest.param( + { + "events_suspected_duplicates": ["123", "456"], + "latest_event_time": "2024-10-09T12:34:56Z", + }, + [ + { + "uuid": "123", + "log_time": "2024-10-09T12:34:56Z", + "time": "2024-10-09T12:34:56Z", + }, + { + "uuid": "456", + "log_time": "2024-10-09T12:34:56.789Z", + "time": "2024-10-09T12:34:56.789Z", + }, + { + "uuid": "789", + "log_time": "2024-10-09T12:34:55.789Z", + "time": "2024-10-09T12:34:55.789Z", + }, + ], + [], + id="Event time is equal to or less than last_event_time", + ), + pytest.param( + { + "events_suspected_duplicates": ["123"], + "latest_event_time": "2024-10-09T12:34:56Z", + }, + [ + { + "uuid": "123", + "log_time": "2024-10-09T12:34:56Z", + "time": "2024-10-09T12:34:56Z", + }, + { + "uuid": "456", + "log_time": "2024-10-09T12:34:56.789Z", + "time": "2024-10-09T12:34:56.789Z", + }, + ], + [ + { + "uuid": "456", + "log_time": "2024-10-09T12:34:56.789Z", + "time": "2024-10-09T12:34:56.789Z", + "_time": "2024-10-09T12:34:56.789Z", + } + ], + id="Events time is equal to last_event_time but one of them not include in suspected duplicates", + ), + pytest.param( + { + "events_suspected_duplicates": ["123"], + "latest_event_time": "2024-10-09T12:34:56Z", + }, + [ + { + "uuid": "456", + "log_time": "2024-10-09T12:35:56.789Z", + "time": "2024-10-09T12:35:56.789Z", + }, + ], + [ + { + "uuid": "456", + "log_time": "2024-10-09T12:35:56.789Z", + "time": "2024-10-09T12:35:56.789Z", + "_time": "2024-10-09T12:35:56.789Z", + } + ], + id="Events time is greater than last_event_time", + ), + ], +) +def test_filter_duplicate_events( + integration_context: dict[str, str], + events: list[dict[str, str]], + expected_filtered_events: list[dict[str, str]], +): + """ + Given: + - A list of events with timestamps + When: + - The `filter_duplicate_events` function is called + Then: + - Ensure that a list of the events that are not duplicates is returned + """ + filtered_events = filter_duplicate_events(events, integration_context) + assert filtered_events == expected_filtered_events + + +@pytest.mark.parametrize( + "filtered_events, next_hash, include_last_fetch_events, last_integration_context, expected_integration_context", + [ + ( + [ + {"uuid": "12", "log_time": "2024-10-09T12:34:56Z"}, + {"uuid": "34", "log_time": "2024-10-09T12:34:56Z"}, + {"uuid": "56", "log_time": "2024-10-09T12:34:56Z"}, + ], + "hash_test_1", + False, + { + "latest_event_time": "2024-10-09T12:34:56Z", + "events_suspected_duplicates": ["78", "90"], + "next_fetch": {"next": "hash_test"}, + "last_fetch_events": [], + }, + { + "latest_event_time": "2024-10-09T12:34:56Z", + "events_suspected_duplicates": ["12", "34", "56", "78", "90"], + "next_fetch": {"next": "hash_test_1"}, + "last_fetch_events": [], + }, + ) + ], +) +def test_calculate_next_fetch_last_latest_event_time_are_equal( + mocker: MockerFixture, + filtered_events: list[dict[str, str]], + next_hash: str, + include_last_fetch_events: bool, + last_integration_context: dict[str, str], + expected_integration_context: dict, +): + """ + Given: + - A set of filtered events, next hash, and last integration context + When: + - The `calculate_next_fetch` function is called + Then: + - Ensure that updated the 'integration_context' with new events in addition to the old ones, and the next hash + """ + mock_set_integration_context = mocker.patch( + "SymantecEndpointSecurity.set_integration_context" + ) + calculate_next_fetch( + filtered_events, next_hash, include_last_fetch_events, last_integration_context + ) + + assert mock_set_integration_context.call_args[0][0] == expected_integration_context + + +def test_perform_long_running_loop_unauthorized_token(mocker: MockerFixture): + """ + Given: + - The `perform_long_running_loop` function is called + When: + - The function is called + Then: + - Ensure that the function runs indefinitely until the container is stopped + """ + mocker.patch( + "SymantecEndpointSecurity.get_events_command", + side_effect=[UnauthorizedToken, Exception("Stop")], + ) + mock_get_token = mocker.patch.object(Client, "_update_access_token_in_headers") + mocker.patch("SymantecEndpointSecurity.sleep_if_necessary") + with pytest.raises(DemistoException, match="Failed to fetch logs from API"): + perform_long_running_loop(mock_client()) + assert mock_get_token.call_count == 2 + + +def test_perform_long_running_loop_next_pointing_not_available(mocker: MockerFixture): + """ + Given: + - No args for the function call + When: + - The function `perform_long_running_loop` is called + Then: + - + """ + mock_integration_context = {"next_fetch": {"next": "test"}} + mocker.patch( + "SymantecEndpointSecurity.get_events_command", + side_effect=[NextPointingNotAvailable, Exception("Stop")], + ) + mocker.patch.object(Client, "_update_access_token_in_headers") + mocker.patch( + "SymantecEndpointSecurity.get_integration_context", + return_value=mock_integration_context, + ) + mocker.patch("SymantecEndpointSecurity.sleep_if_necessary") + with pytest.raises(DemistoException, match="Failed to fetch logs from API"): + perform_long_running_loop(mock_client()) + assert mock_integration_context == {} + + +def test_test_module(mocker: MockerFixture): + """ + Given: + - Client + When: + - The function `test_module` is called + Then: + - Ensure there is no API call in the test_module function + (see the docstring in the `test_module` function). + """ + mock__http_request = mocker.patch.object(Client, "_http_request") + assert _test_module() == "ok" + mock__http_request.assert_not_called() + + +@pytest.mark.parametrize( + "mock_status_code, exception_type", + [ + (500, DemistoException), + (401, UnauthorizedToken), + (410, NextPointingNotAvailable), + ], +) +def test_get_events_command_with_raises( + mocker: MockerFixture, + mock_status_code: int, + exception_type: type[Exception], +): + """ + Given: + - Client and mock_integration_context + When: + - The function `get_events_command` is called + Then: + - Ensure that the function raises Exception based on the status code that returned from the API call + """ + + class MockException: + status_code = mock_status_code + + mocker.patch.object(Client, "_update_access_token_in_headers") + mocker.patch.object( + Client, "get_events", side_effect=DemistoException("Test", res=MockException()) + ) + + with pytest.raises(exception_type): + get_events_command(mock_client(), {"next_fetch": {"next": "test"}}) + + +@pytest.mark.parametrize( + "start_run, end_run, call_count", + [ + pytest.param(10, 20, 1, id="The sleep function should be called once"), + pytest.param(10, 70, 0, id="The sleep function should not be called"), + ] +) +def test_sleep_if_necessary(mocker: MockerFixture, start_run: int, end_run: int, call_count: int): + """ + Given: + - Mocked time passed duration + When: + - The function is called + Then: + - Ensure that the sleep function is called with the appropriate interval value or not called at all if unnecessary. + """ + mock_sleep = mocker.patch("SymantecEndpointSecurity.time.sleep") + sleep_if_necessary(end_run - start_run) + assert mock_sleep.call_count == call_count diff --git a/Packs/SymantecEndpointSecurity/README.md b/Packs/SymantecEndpointSecurity/README.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/Packs/SymantecEndpointSecurity/pack_metadata.json b/Packs/SymantecEndpointSecurity/pack_metadata.json new file mode 100644 index 000000000000..38246d153d43 --- /dev/null +++ b/Packs/SymantecEndpointSecurity/pack_metadata.json @@ -0,0 +1,20 @@ +{ + "name": "Symantec Endpoint Security", + "description": "Use Cloud Platform Connections, a cloud-based security feature in Symantec Endpoint Security to discover and protect instances of public cloud platforms, and their workloads.", + "support": "xsoar", + "currentVersion": "1.0.0", + "author": "Cortex XSOAR", + "url": "https://www.paloaltonetworks.com/cortex", + "email": "", + "categories": [ + "Analytics & SIEM" + ], + "tags": [], + "useCases": [], + "keywords": [ + "ses" + ], + "marketplaces": [ + "marketplacev2" + ] +} \ No newline at end of file