diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e210658..2b3392e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,11 +1,11 @@ repos: - repo: https://github.com/phantomcyber/dev-cicd-tools - rev: v1.17 + rev: v1.23 hooks: - id: org-hook - id: package-app-dependencies - repo: https://github.com/Yelp/detect-secrets - rev: v1.4.0 + rev: v1.5.0 hooks: - id: detect-secrets args: ['--no-verify', '--exclude-files', '^office365.json$'] diff --git a/README.md b/README.md index cd1ecbc..5121f71 100644 --- a/README.md +++ b/README.md @@ -2,11 +2,11 @@ # MS Graph for Office 365 Publisher: Splunk -Connector Version: 3.0.0 +Connector Version: 3.0.1 Product Vendor: Microsoft Product Name: Office 365 (MS Graph) Product Version Supported (regex): ".\*" -Minimum Product Version: 6.1.1 +Minimum Product Version: 6.2.2 This app connects to Office 365 using the MS Graph API to support investigate and generic actions related to the email messages and calendar events @@ -350,6 +350,10 @@ VARIABLE | REQUIRED | TYPE | DESCRIPTION [get folder id](#action-get-folder-id) - Get the API ID of the folder [send email](#action-send-email) - Sends an email with optional text rendering. Attachments are allowed a Content-ID tag for reference within the html [on poll](#action-on-poll) - Ingest emails from Office 365 using Graph API +[update email](#action-update-email) - Update an email on the server +[block sender](#action-block-sender) - Add the sender email into the block list +[unblock sender](#action-unblock-sender) - Remove the sender email from the block list +[resolve name](#action-resolve-name) - Verify aliases and resolve display names to the appropriate user ## action: 'test connectivity' Use supplied credentials to generate a token with MS Graph @@ -1523,4 +1527,162 @@ PARAMETER | REQUIRED | DESCRIPTION | TYPE | CONTAINS **artifact_count** | optional | Parameter Ignored in this app | numeric | #### Action Output -No Output \ No newline at end of file +No Output + +## action: 'update email' +Update an email on the server + +Type: **generic** +Read only: **False** + +Currently, this action only updates the categories and subject of an email. To set multiple categories, please pass a comma-separated list to the category parameter.
NOTE: If the user tries to update the categories, then the existing categories of the email will be replaced with the new categories provided as input. + +#### Action Parameters +PARAMETER | REQUIRED | DESCRIPTION | TYPE | CONTAINS +--------- | -------- | ----------- | ---- | -------- +**id** | required | Message ID to delete | string | `msgoffice365 message id` +**email_address** | required | Email address of the mailbox owner | string | `email` +**subject** | optional | Subject to set | string | +**categories** | optional | Categories to set | string | + +#### Action Output +DATA PATH | TYPE | CONTAINS | EXAMPLE VALUES +--------- | ---- | -------- | -------------- +action_result.status | string | | success failed +action_result.parameter.categories | string | | Yellow, Blue, Purple, red +action_result.parameter.email_address | string | `email` | test@sample.com +action_result.parameter.id | string | `msgoffice365 message id` | AAMkAGIyMTUxYTkzLWRjYjctNDFjMi04NTAxLTQzMDFkNDhlZmI5MQBGAAAAAACxQSnX8n2GS4cunBIQ2sV7BwCQhMsoV7EYSJF42ChR9SCxAAAAYCbsAACQhMsoV7EYSJF42ChR9SCxAAAAjh8bAAA= +action_result.parameter.subject | string | | Both value are modified +action_result.data.\*.@odata.context | string | `url` | https://test.abc.com/v1.0/$metadata#users('user%40.abc.com')/messages(internetMessageHeaders,body,uniqueBody,sender,subject)/$entity +action_result.data.\*.@odata.etag | string | | W/"CQAAABYAAABBKXVvwEWISZupmqX4mJS3AAO8DBJl" +action_result.data.\*.body.content | string | | `Have a good time with these.\\r\\n` +action_result.data.\*.body.contentType | string | | html +action_result.data.\*.bodyPreview | string | | Have a good time with these. +action_result.data.\*.changeKey | string | | CQAAABYAAADTteE6Q2eCQKSqg19j6T+NAAYzSv5R +action_result.data.\*.conversationId | string | | AAQkAGYxNGJmOWQyLTlhMjctNGRiOS1iODU0LTA1ZWE3ZmQ3NDU3MQAQAORC3aOpHnZMsHD4-7L40sY= +action_result.data.\*.conversationIndex | string | | AQHZopYz5ELdo6kedkywcPj/svjSxg== +action_result.data.\*.createdDateTime | string | | 2023-06-19T10:09:58Z +action_result.data.\*.flag.flagStatus | string | | notFlagged +action_result.data.\*.from.emailAddress.address | string | `email` | test@test.com +action_result.data.\*.from.emailAddress.name | string | | Ryan Edwards +action_result.data.\*.hasAttachments | boolean | | True False +action_result.data.\*.id | string | `msgoffice365 message id` | AQMkADU3NDk3MzJlLTY3MDQtNDE2Ny1iZDk1LTc4YjEwYzhmZDc5YQBGAAADyW3X5P7Hb0_MMHKonvdoWQcAQSl1b8BFiEmbqZql_JiUtwAAAgEMAAAAQSl1b8BFiEmbqZql_JiUtwADu9Tv8QAAAA== +action_result.data.\*.importance | string | | normal +action_result.data.\*.inferenceClassification | string | | focused +action_result.data.\*.internetMessageId | string | `msgoffice365 internet message id` | +action_result.data.\*.isDeliveryReceiptRequested | boolean | | True False +action_result.data.\*.isDraft | boolean | | True False +action_result.data.\*.isRead | boolean | | True False +action_result.data.\*.isReadReceiptRequested | boolean | | True False +action_result.data.\*.lastModifiedDateTime | string | | 2023-06-19T10:09:58Z +action_result.data.\*.parentFolderId | string | `msgoffice365 folder id` | AQMkAGYxNGJmOWQyLTlhMjctNGRiOS1iODU0LTA1ZWE3ZmQ3NDU3MQAuAAADeDDJKaEf4EihMWU6SZgKbAEA07XhOkNngkCkqoNfY_k-jQAAAgEPAAAA +action_result.data.\*.receivedDateTime | string | | 2020-06-18T09:11:31Z +action_result.data.\*.sender.emailAddress.address | string | `email` | notifications@testdomain.com +action_result.data.\*.sender.emailAddress.name | string | `email` | notifications@testdomain.com +action_result.data.\*.sentDateTime | string | | 2023-06-19T10:09:58Z +action_result.data.\*.subject | string | | test html +action_result.data.\*.toRecipients.\*.emailAddress.address | string | `email` | test@test.com +action_result.data.\*.toRecipients.\*.emailAddress.name | string | | Ryan Edwards +action_result.data.\*.webLink | string | | https://outlook.office365.com/owa/?ItemID=AAkALgAAAAAAHYQDEapmEc2byACqAC%2FEWg0A07XhOkNngkCkqoNfY%2Bk%2FjQAGNNQOowAA&exvsurl=1&viewmodel=ReadMessageItem +action_result.summary | string | | +action_result.message | string | | Create time: 2017-10-05T20:19:58Z +Subject: Both value are modified +Sent time: 2017-10-03T21:31:20Z +summary.total_objects | numeric | | 1 +summary.total_objects_successful | numeric | | 1 + +## action: 'block sender' +Add the sender email into the block list + +Type: **contain** +Read only: **False** + +This action takes as input an email whose sender will be added to the Block Senders List. The message ID changes after the execution and is a required parameter for request hence undo action would require unique ID. Note that a message from the email address must exist in the user's mailbox before you can add the email address to or remove it from the Blocked Senders List.To view the current Block Senders list, please read the following Powershell articles: + +#### Action Parameters +PARAMETER | REQUIRED | DESCRIPTION | TYPE | CONTAINS +--------- | -------- | ----------- | ---- | -------- +**message_id** | required | Message ID to pick the sender of | string | +**user_id** | required | User ID to base the action of | string | +**move_to_junk_folder** | optional | Should the email be moved to the junk folder | boolean | + +#### Action Output +DATA PATH | TYPE | CONTAINS | EXAMPLE VALUES +--------- | ---- | -------- | -------------- +action_result.parameter.message_id | string | | +action_result.parameter.move_to_junk_folder | boolean | | +action_result.parameter.user_id | boolean | | +action_result.status | string | | +action_result.summary | string | | +action_result.status | string | | success failed +action_result.message | string | | +summary.total_objects | numeric | | +summary.total_objects_successful | numeric | | + +## action: 'unblock sender' +Remove the sender email from the block list + +Type: **contain** +Read only: **False** + +This action takes as input an email whose sender will be removed from the Block Senders List. The message ID changes after the execution and is a required parameter for request hence undo action would require unique ID. Note that a message from the email address must exist in the user's mailbox before you can add the email address to or remove it from the Blocked Senders List.To view the current Block Senders list, please read the following Powershell articles: + +#### Action Parameters +PARAMETER | REQUIRED | DESCRIPTION | TYPE | CONTAINS +--------- | -------- | ----------- | ---- | -------- +**message_id** | required | Message ID to pick the sender of | string | +**user_id** | required | User ID to base the action of | string | +**move_to_inbox** | optional | Should the email be moved to the inbox folder | boolean | + +#### Action Output +DATA PATH | TYPE | CONTAINS | EXAMPLE VALUES +--------- | ---- | -------- | -------------- +action_result.parameter.message_id | string | | +action_result.parameter.move_to_inbox | boolean | | +action_result.parameter.user_id | boolean | | +action_result.status | string | | +action_result.summary | string | | +action_result.status | string | | success failed +action_result.message | string | | +summary.total_objects | numeric | | +summary.total_objects_successful | numeric | | + +## action: 'resolve name' +Verify aliases and resolve display names to the appropriate user + +Type: **investigate** +Read only: **True** + +Resolve an Alias name or email address, gathering complex data about the user. + +#### Action Parameters +PARAMETER | REQUIRED | DESCRIPTION | TYPE | CONTAINS +--------- | -------- | ----------- | ---- | -------- +**email** | required | Name to resolve | string | `email` `string` + +#### Action Output +DATA PATH | TYPE | CONTAINS | EXAMPLE VALUES +--------- | ---- | -------- | -------------- +action_result.parameter.email | string | `email` `string` | +action_result.data.\*.id | string | `msgoffice365 id` | +action_result.data.\*.userPrincipalName | string | `msgoffice365 user principal name` | +action_result.data.\*.givenName | string | `msgoffice365 given name` | +action_result.data.\*.surname | string | `msgoffice365 surname` | +action_result.data.\*.displayName | string | `msgoffice365 display name` | +action_result.data.\*.mailNickname | string | `msgoffice365 mail nickname` | +action_result.data.\*.mail | string | `email` | +action_result.data.\*.otherMails | string | `email list` | +action_result.data.\*.proxyAddresses | string | `email list` | +action_result.data.\*.jobTitle | string | `msgoffice365 job title` | +action_result.data.\*.officeLocation | string | `msgoffice365 office location` | +action_result.data.\*.value | string | `msgoffice365 user purpose` | +action_result.data.\*.mobilePhone | string | `msgoffice365 mobile phone` | +action_result.data.\*.businessPhones | string | `msgoffice365 buisness phones` | +action_result.data.\*.preferredLanguage | string | `msgoffice365 preferred language` | +action_result.data.\*.state | string | `msgoffice365 state` | +action_result.data.\*.postalCode | string | `msgoffice365 postal code` | +action_result.summary | string | | +action_result.status | string | | success failed +action_result.message | string | | +summary.total_objects | numeric | | +summary.total_objects_successful | numeric | | \ No newline at end of file diff --git a/office365.json b/office365.json index 38b781c..78ea226 100644 --- a/office365.json +++ b/office365.json @@ -33,11 +33,11 @@ } ], "license": "Copyright (c) 2017-2024 Splunk Inc.", - "app_version": "3.0.0", - "utctime_updated": "2023-10-12T11:20:32.000000Z", + "app_version": "3.0.1", + "utctime_updated": "2024-09-11T08:34:15.000000Z", "package_name": "phantom_msgraphoffice365", "main_module": "office365_connector.py", - "min_phantom_version": "6.1.1", + "min_phantom_version": "6.2.2", "latest_tested_versions": [ "Cloud 9 January, 2024" ], @@ -7130,37 +7130,800 @@ }, "output": [], "versions": "EQ(*)" + }, + { + "action": "update email", + "description": "Update an email on the server", + "verbose": "Currently, this action only updates the categories and subject of an email. To set multiple categories, please pass a comma-separated list to the category parameter.
NOTE: If the user tries to update the categories, then the existing categories of the email will be replaced with the new categories provided as input.", + "type": "generic", + "identifier": "update_email", + "read_only": false, + "parameters": { + "id": { + "description": "Message ID to delete", + "data_type": "string", + "required": true, + "primary": true, + "contains": [ + "msgoffice365 message id" + ], + "order": 0 + }, + "email_address": { + "description": "Email address of the mailbox owner", + "data_type": "string", + "required": true, + "primary": true, + "contains": [ + "email" + ], + "order": 1 + }, + "subject": { + "description": "Subject to set", + "data_type": "string", + "order": 2 + }, + "categories": { + "description": "Categories to set", + "data_type": "string", + "order": 3, + "allow_list": true + } + }, + "render": { + "type": "table" + }, + "output": [ + { + "data_path": "action_result.status", + "data_type": "string", + "example_values": [ + "success", + "failed" + ] + }, + { + "data_path": "action_result.parameter.categories", + "data_type": "string", + "example_values": [ + "Yellow, Blue, Purple, red" + ] + }, + { + "data_path": "action_result.parameter.email_address", + "data_type": "string", + "example_values": [ + "test@sample.com" + ], + "contains": [ + "email" + ] + }, + { + "data_path": "action_result.parameter.id", + "data_type": "string", + "example_values": [ + "AAMkAGIyMTUxYTkzLWRjYjctNDFjMi04NTAxLTQzMDFkNDhlZmI5MQBGAAAAAACxQSnX8n2GS4cunBIQ2sV7BwCQhMsoV7EYSJF42ChR9SCxAAAAYCbsAACQhMsoV7EYSJF42ChR9SCxAAAAjh8bAAA=" + ], + "contains": [ + "msgoffice365 message id" + ] + }, + { + "data_path": "action_result.parameter.subject", + "data_type": "string", + "example_values": [ + "Both value are modified" + ] + }, + { + "data_path": "action_result.data.*.@odata.context", + "data_type": "string", + "example_values": [ + "https://test.abc.com/v1.0/$metadata#users('user%40.abc.com')/messages(internetMessageHeaders,body,uniqueBody,sender,subject)/$entity" + ], + "contains": [ + "url" + ] + }, + { + "data_path": "action_result.data.*.@odata.etag", + "data_type": "string", + "example_values": [ + "W/\"CQAAABYAAABBKXVvwEWISZupmqX4mJS3AAO8DBJl\"" + ] + }, + { + "data_path": "action_result.data.*.body.content", + "data_type": "string", + "example_values": [ + "`Have a good time with these.\\r\\n`" + ] + }, + { + "data_path": "action_result.data.*.body.contentType", + "data_type": "string", + "example_values": [ + "html" + ] + }, + { + "data_path": "action_result.data.*.bodyPreview", + "data_type": "string", + "example_values": [ + "Have a good time with these." + ], + "column_name": "Body Preview", + "column_order": 2 + }, + { + "data_path": "action_result.data.*.changeKey", + "data_type": "string", + "example_values": [ + "CQAAABYAAADTteE6Q2eCQKSqg19j6T+NAAYzSv5R" + ] + }, + { + "data_path": "action_result.data.*.conversationId", + "data_type": "string", + "example_values": [ + "AAQkAGYxNGJmOWQyLTlhMjctNGRiOS1iODU0LTA1ZWE3ZmQ3NDU3MQAQAORC3aOpHnZMsHD4-7L40sY=" + ] + }, + { + "data_path": "action_result.data.*.conversationIndex", + "data_type": "string", + "example_values": [ + "AQHZopYz5ELdo6kedkywcPj/svjSxg==" + ] + }, + { + "data_path": "action_result.data.*.createdDateTime", + "data_type": "string", + "example_values": [ + "2023-06-19T10:09:58Z" + ] + }, + { + "data_path": "action_result.data.*.flag.flagStatus", + "data_type": "string", + "example_values": [ + "notFlagged" + ] + }, + { + "data_path": "action_result.data.*.from.emailAddress.address", + "data_type": "string", + "example_values": [ + "test@test.com" + ], + "contains": [ + "email" + ] + }, + { + "data_path": "action_result.data.*.from.emailAddress.name", + "data_type": "string", + "example_values": [ + "Ryan Edwards" + ] + }, + { + "data_path": "action_result.data.*.hasAttachments", + "data_type": "boolean", + "example_values": [ + true, + false + ], + "column_name": "Has Attachments", + "column_order": 3 + }, + { + "data_path": "action_result.data.*.id", + "data_type": "string", + "example_values": [ + "AQMkADU3NDk3MzJlLTY3MDQtNDE2Ny1iZDk1LTc4YjEwYzhmZDc5YQBGAAADyW3X5P7Hb0_MMHKonvdoWQcAQSl1b8BFiEmbqZql_JiUtwAAAgEMAAAAQSl1b8BFiEmbqZql_JiUtwADu9Tv8QAAAA==" + ], + "contains": [ + "msgoffice365 message id" + ], + "column_name": "Message ID", + "column_order": 0 + }, + { + "data_path": "action_result.data.*.importance", + "data_type": "string", + "example_values": [ + "normal" + ] + }, + { + "data_path": "action_result.data.*.inferenceClassification", + "data_type": "string", + "example_values": [ + "focused" + ] + }, + { + "data_path": "action_result.data.*.internetMessageId", + "data_type": "string", + "example_values": [ + "" + ], + "contains": [ + "msgoffice365 internet message id" + ] + }, + { + "data_path": "action_result.data.*.isDeliveryReceiptRequested", + "data_type": "boolean", + "example_values": [ + true, + false + ] + }, + { + "data_path": "action_result.data.*.isDraft", + "data_type": "boolean", + "example_values": [ + true, + false + ] + }, + { + "data_path": "action_result.data.*.isRead", + "data_type": "boolean", + "example_values": [ + true, + false + ] + }, + { + "data_path": "action_result.data.*.isReadReceiptRequested", + "data_type": "boolean", + "example_values": [ + true, + false + ] + }, + { + "data_path": "action_result.data.*.lastModifiedDateTime", + "data_type": "string", + "example_values": [ + "2023-06-19T10:09:58Z" + ] + }, + { + "data_path": "action_result.data.*.parentFolderId", + "data_type": "string", + "example_values": [ + "AQMkAGYxNGJmOWQyLTlhMjctNGRiOS1iODU0LTA1ZWE3ZmQ3NDU3MQAuAAADeDDJKaEf4EihMWU6SZgKbAEA07XhOkNngkCkqoNfY_k-jQAAAgEPAAAA" + ], + "contains": [ + "msgoffice365 folder id" + ] + }, + { + "data_path": "action_result.data.*.receivedDateTime", + "data_type": "string", + "example_values": [ + "2020-06-18T09:11:31Z" + ] + }, + { + "data_path": "action_result.data.*.sender.emailAddress.address", + "data_type": "string", + "example_values": [ + "notifications@testdomain.com" + ], + "contains": [ + "email" + ] + }, + { + "data_path": "action_result.data.*.sender.emailAddress.name", + "data_type": "string", + "example_values": [ + "notifications@testdomain.com" + ], + "contains": [ + "email" + ] + }, + { + "data_path": "action_result.data.*.sentDateTime", + "data_type": "string", + "example_values": [ + "2023-06-19T10:09:58Z" + ] + }, + { + "data_path": "action_result.data.*.subject", + "data_type": "string", + "example_values": [ + "test html" + ], + "column_name": "Subject", + "column_order": 1 + }, + { + "data_path": "action_result.data.*.toRecipients.*.emailAddress.address", + "data_type": "string", + "example_values": [ + "test@test.com" + ], + "contains": [ + "email" + ] + }, + { + "data_path": "action_result.data.*.toRecipients.*.emailAddress.name", + "data_type": "string", + "example_values": [ + "Ryan Edwards" + ] + }, + { + "data_path": "action_result.data.*.webLink", + "data_type": "string", + "example_values": [ + "https://outlook.office365.com/owa/?ItemID=AAkALgAAAAAAHYQDEapmEc2byACqAC%2FEWg0A07XhOkNngkCkqoNfY%2Bk%2FjQAGNNQOowAA&exvsurl=1&viewmodel=ReadMessageItem" + ] + }, + { + "data_path": "action_result.summary", + "data_type": "string" + }, + { + "data_path": "action_result.message", + "data_type": "string", + "example_values": [ + "Create time: 2017-10-05T20:19:58Z\nSubject: Both value are modified\nSent time: 2017-10-03T21:31:20Z" + ] + }, + { + "data_path": "summary.total_objects", + "data_type": "numeric", + "example_values": [ + 1 + ] + }, + { + "data_path": "summary.total_objects_successful", + "data_type": "numeric", + "example_values": [ + 1 + ] + } + ], + "versions": "EQ(*)" + }, + { + "action": "block sender", + "identifier": "block_sender", + "description": "Add the sender email into the block list", + "verbose": "This action takes as input an email whose sender will be added to the Block Senders List. The message ID changes after the execution and is a required parameter for request hence undo action would require unique ID. Note that a message from the email address must exist in the user's mailbox before you can add the email address to or remove it from the Blocked Senders List.To view the current Block Senders list, please read the following Powershell articles: ", + "type": "contain", + "read_only": false, + "parameters": { + "message_id": { + "description": "Message ID to pick the sender of", + "data_type": "string", + "required": true, + "primary": true, + "contains": [], + "value_list": [], + "default": "", + "order": 0, + "name": "message_id" + }, + "user_id": { + "description": "User ID to base the action of", + "data_type": "string", + "required": true, + "primary": true, + "contains": [], + "value_list": [], + "default": "", + "order": 1, + "name": "user_id" + }, + "move_to_junk_folder": { + "description": "Should the email be moved to the junk folder", + "data_type": "boolean", + "required": false, + "primary": false, + "contains": [], + "default": "", + "order": 2, + "name": "move_to_junk_folder" + } + }, + "output": [ + { + "data_path": "action_result.parameter.message_id", + "data_type": "string", + "contains": [], + "column_name": "message id", + "column_order": 0 + }, + { + "data_path": "action_result.parameter.move_to_junk_folder", + "data_type": "boolean", + "contains": [], + "column_name": "move to junk folder", + "column_order": 1 + }, + { + "data_path": "action_result.parameter.user_id", + "data_type": "boolean", + "contains": [], + "column_name": "user id", + "column_order": 2 + }, + { + "data_path": "action_result.status", + "data_type": "string", + "column_name": "status", + "column_order": 3 + }, + { + "data_path": "action_result.summary", + "data_type": "string" + }, + { + "data_path": "action_result.status", + "data_type": "string", + "example_values": [ + "success", + "failed" + ] + }, + { + "data_path": "action_result.message", + "data_type": "string" + }, + { + "data_path": "summary.total_objects", + "data_type": "numeric" + }, + { + "data_path": "summary.total_objects_successful", + "data_type": "numeric" + } + ], + "render": { + "type": "table" + }, + "versions": "EQ(*)" + }, + { + "action": "unblock sender", + "identifier": "unblock_sender", + "description": "Remove the sender email from the block list", + "verbose": "This action takes as input an email whose sender will be removed from the Block Senders List. The message ID changes after the execution and is a required parameter for request hence undo action would require unique ID. Note that a message from the email address must exist in the user's mailbox before you can add the email address to or remove it from the Blocked Senders List.To view the current Block Senders list, please read the following Powershell articles: ", + "type": "contain", + "read_only": false, + "parameters": { + "message_id": { + "description": "Message ID to pick the sender of", + "data_type": "string", + "required": true, + "primary": true, + "contains": [], + "value_list": [], + "default": "", + "order": 0, + "name": "message_id" + }, + "user_id": { + "description": "User ID to base the action of", + "data_type": "string", + "required": true, + "primary": true, + "contains": [], + "value_list": [], + "default": "", + "order": 1, + "name": "user_id" + }, + "move_to_inbox": { + "description": "Should the email be moved to the inbox folder", + "data_type": "boolean", + "required": false, + "primary": false, + "contains": [], + "default": "", + "order": 2, + "name": "move_to_inbox" + } + }, + "output": [ + { + "data_path": "action_result.parameter.message_id", + "data_type": "string", + "contains": [], + "column_name": "message id", + "column_order": 0 + }, + { + "data_path": "action_result.parameter.move_to_inbox", + "data_type": "boolean", + "contains": [], + "column_name": "move to inbox folder", + "column_order": 1 + }, + { + "data_path": "action_result.parameter.user_id", + "data_type": "boolean", + "contains": [], + "column_name": "user id", + "column_order": 2 + }, + { + "data_path": "action_result.status", + "data_type": "string", + "column_name": "status", + "column_order": 3 + }, + { + "data_path": "action_result.summary", + "data_type": "string" + }, + { + "data_path": "action_result.status", + "data_type": "string", + "example_values": [ + "success", + "failed" + ] + }, + { + "data_path": "action_result.message", + "data_type": "string" + }, + { + "data_path": "summary.total_objects", + "data_type": "numeric" + }, + { + "data_path": "summary.total_objects_successful", + "data_type": "numeric" + } + ], + "render": { + "type": "table" + }, + "versions": "EQ(*)" + }, + { + "action": "resolve name", + "identifier": "resolve_name", + "description": "Verify aliases and resolve display names to the appropriate user", + "verbose": "Resolve an Alias name or email address, gathering complex data about the user.", + "type": "investigate", + "read_only": true, + "parameters": { + "email": { + "description": "Name to resolve", + "data_type": "string", + "required": true, + "primary": true, + "contains": [ + "email", + "string" + ], + "value_list": [], + "default": "", + "order": 0, + "name": "email" + } + }, + "output": [ + { + "data_path": "action_result.parameter.email", + "data_type": "string", + "contains": [ + "email", + "string" + ], + "column_name": "email", + "column_order": 0 + }, + { + "data_path": "action_result.data.*.id", + "data_type": "string", + "contains": [ + "msgoffice365 id" + ], + "column_name": "id", + "column_order": 1 + }, + { + "data_path": "action_result.data.*.userPrincipalName", + "data_type": "string", + "contains": [ + "msgoffice365 user principal name" + ], + "column_name": "principal name", + "column_order": 2 + }, + { + "data_path": "action_result.data.*.givenName", + "data_type": "string", + "contains": [ + "msgoffice365 given name" + ], + "column_name": "given name", + "column_order": 3 + }, + { + "data_path": "action_result.data.*.surname", + "data_type": "string", + "contains": [ + "msgoffice365 surname" + ], + "column_name": "surname", + "column_order": 4 + }, + { + "data_path": "action_result.data.*.displayName", + "data_type": "string", + "contains": [ + "msgoffice365 display name" + ], + "column_name": "display name", + "column_order": 5 + }, + { + "data_path": "action_result.data.*.mailNickname", + "data_type": "string", + "contains": [ + "msgoffice365 mail nickname" + ], + "column_name": "mail nickname", + "column_order": 6 + }, + { + "data_path": "action_result.data.*.mail", + "data_type": "string", + "contains": [ + "email" + ], + "column_name": "mail", + "column_order": 7 + }, + { + "data_path": "action_result.data.*.otherMails", + "data_type": "string", + "contains": [ + "email list" + ], + "column_name": "other mails", + "column_order": 8 + }, + { + "data_path": "action_result.data.*.proxyAddresses", + "data_type": "string", + "contains": [ + "email list" + ], + "column_name": "proxy email addresses", + "column_order": 9 + }, + { + "data_path": "action_result.data.*.jobTitle", + "data_type": "string", + "contains": [ + "msgoffice365 job title" + ], + "column_name": "job title", + "column_order": 10 + }, + { + "data_path": "action_result.data.*.officeLocation", + "data_type": "string", + "contains": [ + "msgoffice365 office location" + ], + "column_name": "office location", + "column_order": 11 + }, + { + "data_path": "action_result.data.*.value", + "data_type": "string", + "contains": [ + "msgoffice365 user purpose" + ], + "column_name": "user purpose", + "column_order": 12 + }, + { + "data_path": "action_result.data.*.mobilePhone", + "data_type": "string", + "contains": [ + "msgoffice365 mobile phone" + ], + "column_name": "mobile phone", + "column_order": 13 + }, + { + "data_path": "action_result.data.*.businessPhones", + "data_type": "string", + "contains": [ + "msgoffice365 buisness phones" + ], + "column_name": "business phones", + "column_order": 14 + }, + { + "data_path": "action_result.data.*.preferredLanguage", + "data_type": "string", + "contains": [ + "msgoffice365 preferred language" + ], + "column_name": "preferred language", + "column_order": 15 + }, + { + "data_path": "action_result.data.*.state", + "data_type": "string", + "contains": [ + "msgoffice365 state" + ], + "column_name": "state", + "column_order": 16 + }, + { + "data_path": "action_result.data.*.postalCode", + "data_type": "string", + "contains": [ + "msgoffice365 postal code" + ], + "column_name": "postal code", + "column_order": 17 + }, + { + "data_path": "action_result.summary", + "data_type": "string" + }, + { + "data_path": "action_result.status", + "data_type": "string", + "example_values": [ + "success", + "failed" + ] + }, + { + "data_path": "action_result.message", + "data_type": "string" + }, + { + "data_path": "summary.total_objects", + "data_type": "numeric" + }, + { + "data_path": "summary.total_objects_successful", + "data_type": "numeric" + } + ], + "render": { + "width": 12, + "title": "resolve name", + "type": "custom", + "height": 5, + "view": "office365_view.display_view" + }, + "versions": "EQ(*)" } ], "pip_dependencies": { "wheel": [ - { - "module": "beautifulsoup4", - "input_file": "wheels/py3/beautifulsoup4-4.9.1-py3-none-any.whl" - }, - { - "module": "python_magic", - "input_file": "wheels/shared/python_magic-0.4.18-py2.py3-none-any.whl" - }, - { - "module": "soupsieve", - "input_file": "wheels/py3/soupsieve-2.3.2.post1-py3-none-any.whl" - } - ] - }, - "pip39_dependencies": { - "wheel": [ - { - "module": "beautifulsoup4", - "input_file": "wheels/py3/beautifulsoup4-4.9.1-py3-none-any.whl" - }, { "module": "python_magic", "input_file": "wheels/shared/python_magic-0.4.18-py2.py3-none-any.whl" - }, - { - "module": "soupsieve", - "input_file": "wheels/py3/soupsieve-2.5-py3-none-any.whl" } ] } diff --git a/office365_connector.py b/office365_connector.py index 45fc1fd..a15200e 100644 --- a/office365_connector.py +++ b/office365_connector.py @@ -43,7 +43,7 @@ TC_FILE = "oauth_task.out" SERVER_TOKEN_URL = "https://login.microsoftonline.com/{0}/oauth2/v2.0/token" -MSGRAPH_API_URL = "https://graph.microsoft.com/v1.0" +MSGRAPH_API_URL = "https://graph.microsoft.com" MAX_END_OFFSET_VAL = 2147483646 @@ -86,7 +86,7 @@ def _load_app_state(asset_id, app_connector=None): except Exception as e: if app_connector: error_msg = _get_error_msg_from_exception(e, app_connector) - app_connector.debug_print('In _load_app_state: {0}'.format(error_msg)) + app_connector.debug_print("In _load_app_state: {0}".format(error_msg)) if app_connector: app_connector.debug_print("Loaded state: ", state) @@ -144,9 +144,7 @@ def _save_app_state(state, asset_id, app_connector=None): except Exception as e: error_msg = _get_error_msg_from_exception(e, app_connector) if app_connector: - app_connector.debug_print( - "Unable to save state file: {0}".format(error_msg) - ) + app_connector.debug_print("Unable to save state file: {0}".format(error_msg)) print("Unable to save state file: {0}".format(error_msg)) return phantom.APP_ERROR @@ -197,18 +195,14 @@ def _validate_integer(action_result, parameter, key, allow_zero=False): try: if not float(parameter).is_integer(): return ( - action_result.set_status( - phantom.APP_ERROR, MSGOFFICE365_VALID_INT_MSG.format(param=key) - ), + action_result.set_status(phantom.APP_ERROR, MSGOFFICE365_VALID_INT_MSG.format(param=key)), None, ) parameter = int(parameter) except Exception: return ( - action_result.set_status( - phantom.APP_ERROR, MSGOFFICE365_VALID_INT_MSG.format(param=key) - ), + action_result.set_status(phantom.APP_ERROR, MSGOFFICE365_VALID_INT_MSG.format(param=key)), None, ) @@ -254,9 +248,7 @@ def _handle_oauth_result(request, path_parts): if not admin_consent and not code: return HttpResponse( - "ERROR: admin_consent or authorization code not found in URL\n{0}".format( - json.dumps(request.GET) - ), + "ERROR: admin_consent or authorization code not found in URL\n{0}".format(json.dumps(request.GET)), content_type="text/plain", status=400, ) @@ -300,9 +292,7 @@ def _handle_oauth_start(request, path_parts): # get the asset id, the state file is created for each asset asset_id = request.GET.get("asset_id") if not asset_id: - return HttpResponse( - "ERROR: Asset ID not found in URL", content_type="text/plain", status=404 - ) + return HttpResponse("ERROR: Asset ID not found in URL", content_type="text/plain", status=404) # Load the state that was created for the asset state = _load_app_state(asset_id) @@ -360,9 +350,7 @@ def handle_request(request, path_parts): auth_status_file_path = "{0}/{1}_{2}".format(app_dir, asset_id, TC_FILE) real_auth_status_file_path = os.path.abspath(auth_status_file_path) if not os.path.dirname(real_auth_status_file_path) == app_dir: - return HttpResponse( - "Error: Invalid asset_id", content_type="text/plain", status=400 - ) + return HttpResponse("Error: Invalid asset_id", content_type="text/plain", status=400) open(auth_status_file_path, "w").close() try: uid = pwd.getpwnam("apache").pw_uid @@ -374,9 +362,7 @@ def handle_request(request, path_parts): return ret_val - return HttpResponse( - "error: Invalid endpoint", content_type="text/plain", status=404 - ) + return HttpResponse("error: Invalid endpoint", content_type="text/plain", status=404) def _get_dir_name_from_app_name(app_name): @@ -410,9 +396,7 @@ def __init__(self): self._scope = None self._access_token = None self._refresh_token = None - self._REPLACE_CONST = ( - "C53CEA8298BD401BA695F247633D0542" # pragma: allowlist secret - ) + self._REPLACE_CONST = "C53CEA8298BD401BA695F247633D0542" # pragma: allowlist secret self._duplicate_count = 0 self._asset_id = None @@ -425,9 +409,7 @@ def load_state(self): state = super().load_state() if not isinstance(state, dict): self.debug_print("Resetting the state file with the default format") - state = { - "app_version": self.get_app_json().get('app_version') - } + state = {"app_version": self.get_app_json().get("app_version")} return state return self._decrypt_state(state) @@ -444,19 +426,16 @@ def update_state_fields(self, value, helper_function, error_message): try: return helper_function(value, self._asset_id) except Exception as ex: - self.debug_print("{}: {}".format(error_message, - _get_error_msg_from_exception(ex, self))) + self.debug_print("{}: {}".format(error_message, _get_error_msg_from_exception(ex, self))) return None def check_state_fields(self, state, helper_function, error_message): access_token = state.get("non_admin_auth", {}).get("access_token") if access_token: - state["non_admin_auth"]["access_token"] = self.update_state_fields(access_token, - helper_function, error_message) + state["non_admin_auth"]["access_token"] = self.update_state_fields(access_token, helper_function, error_message) refresh_token = state.get("non_admin_auth", {}).get("refresh_token") if refresh_token: - state["non_admin_auth"]["refresh_token"] = self.update_state_fields(refresh_token, - helper_function, error_message) + state["non_admin_auth"]["refresh_token"] = self.update_state_fields(refresh_token, helper_function, error_message) access_token = state.get("admin_auth", {}).get("access_token") if access_token: state["admin_auth"]["access_token"] = self.update_state_fields(access_token, helper_function, error_message) @@ -514,7 +493,7 @@ def _process_html_response(self, response, action_result): msg = "Status Code: {0}. Data from server:\n{1}\n".format(status_code, error_text) - msg = msg.replace('{', '{{').replace('}', '}}') + msg = msg.replace("{", "{{").replace("}", "}}") return RetVal(action_result.set_status(phantom.APP_ERROR, msg), None) @@ -536,17 +515,15 @@ def _process_json_response(self, r, action_result): error_text = "" error_msg = "" - error = resp_json.get('error', '') - error_desc = resp_json.get('error_description', '') + error = resp_json.get("error", "") + error_desc = resp_json.get("error_description", "") if isinstance(error, dict): - error_code = error.get('code') - error_msg = error.get('message') + error_code = error.get("code") + error_msg = error.get("message") if error_msg: try: - soup = BeautifulSoup( - resp_json.get("error", {}).get("message"), "html.parser" - ) + soup = BeautifulSoup(resp_json.get("error", {}).get("message"), "html.parser") # Remove the script, style, footer and navigation part from the HTML message for element in soup(["script", "style", "footer", "nav"]): element.extract() @@ -572,8 +549,7 @@ def _process_json_response(self, r, action_result): # You should process the error returned in the json - msg = "Error: Status Code: {0} Data from server: {1}".format( - r.status_code, error_text) + msg = "Error: Status Code: {0} Data from server: {1}".format(r.status_code, error_text) return RetVal(action_result.set_status(phantom.APP_ERROR, msg), None) @@ -600,9 +576,7 @@ def _process_response(self, r, action_result): return self._process_html_response(r, action_result) if r.status_code == 404: - return RetVal( - action_result.set_status(phantom.APP_ERROR, "Email not found"), None - ) + return RetVal(action_result.set_status(phantom.APP_ERROR, "Email not found"), None) if 200 <= r.status_code <= 204: return RetVal(phantom.APP_SUCCESS, None) @@ -614,7 +588,8 @@ def _process_response(self, r, action_result): # everything else is actually an error at this point msg = "Can't process response from server. Status Code: {0} Data from server: {1}".format( - r.status_code, r.text.replace('{', '{{').replace('}', '}}')) + r.status_code, r.text.replace("{", "{{").replace("}", "}}") + ) return RetVal(action_result.set_status(phantom.APP_ERROR, msg), None) @@ -636,9 +611,7 @@ def _make_rest_call( request_func = getattr(requests, method) except AttributeError: return RetVal( - action_result.set_status( - phantom.APP_ERROR, "Invalid method: {0}".format(method) - ), + action_result.set_status(phantom.APP_ERROR, "Invalid method: {0}".format(method)), resp_json, ) @@ -672,9 +645,7 @@ def _make_rest_call( def _get_asset_name(self, action_result): - rest_endpoint = SPLUNK_SOAR_ASSET_INFO_URL.format( - url=self.get_phantom_base_url(), asset_id=self._asset_id - ) + rest_endpoint = SPLUNK_SOAR_ASSET_INFO_URL.format(url=self.get_phantom_base_url(), asset_id=self._asset_id) ret_val, resp_json = self._make_rest_call(action_result, rest_endpoint, False) @@ -701,9 +672,7 @@ def _update_container(self, action_result, container_id, container): :param container: container's payload to update :return: status phantom.APP_ERROR/phantom.APP_SUCCESS with status message """ - rest_endpoint = SPLUNK_SOAR_CONTAINER_INFO_URL.format( - url=self.get_phantom_base_url(), container_id=container_id - ) + rest_endpoint = SPLUNK_SOAR_CONTAINER_INFO_URL.format(url=self.get_phantom_base_url(), container_id=container_id) try: data = json.dumps(container) @@ -716,9 +685,7 @@ def _update_container(self, action_result, container_id, container): ).format(container_id, error_msg) return action_result.set_status(phantom.APP_ERROR, msg) - ret_val, _ = self._make_rest_call( - action_result, rest_endpoint, False, data=data, method="post" - ) + ret_val, _ = self._make_rest_call(action_result, rest_endpoint, False, data=data, method="post") if phantom.is_fail(ret_val): return action_result.get_status() @@ -765,9 +732,7 @@ def _get_url_to_app_rest(self, action_result=None): if phantom.is_fail(ret_val): return (action_result.get_status(), None) - self.save_progress( - "Using Splunk SOAR base URL as: {0}".format(phantom_base_url) - ) + self.save_progress("Using Splunk SOAR base URL as: {0}".format(phantom_base_url)) app_json = self.get_app_json() @@ -775,9 +740,7 @@ def _get_url_to_app_rest(self, action_result=None): app_dir_name = _get_dir_name_from_app_name(app_name) - url_to_app_rest = "{0}/rest/handler/{1}_{2}/{3}".format( - phantom_base_url, app_dir_name, app_json["appid"], asset_name - ) + url_to_app_rest = "{0}/rest/handler/{1}_{2}/{3}".format(phantom_base_url, app_dir_name, app_json["appid"], asset_name) return (phantom.APP_SUCCESS, url_to_app_rest) @@ -792,29 +755,30 @@ def _make_rest_call_helper( method="get", nextLink=None, download=False, + beta=False, ): if nextLink: url = nextLink else: - url = "{0}{1}".format(MSGRAPH_API_URL, endpoint) + if not beta: + url = f"{MSGRAPH_API_URL}/v1.0{endpoint}" + else: + url = f"{MSGRAPH_API_URL}/beta{endpoint}" if headers is None: headers = {} - headers.update({ - 'Authorization': 'Bearer {0}'.format(self._access_token), - 'Accept': 'application/json', - 'Content-Type': 'application/json' - }) + headers.update( + {"Authorization": "Bearer {0}".format(self._access_token), "Accept": "application/json", "Content-Type": "application/json"} + ) ret_val, resp_json = self._make_rest_call(action_result, url, verify, headers, params, data, method, download=download) # If token is expired, generate a new token msg = action_result.get_message() - if msg and (('token' in msg and 'expired' in msg) or any(failure_msg in msg for failure_msg in AUTH_FAILURE_MSG)): - self.debug_print("MSGRAPH", - f"Error '{msg}' found in API response. Requesting new access token using refresh token") + if msg and (("token" in msg and "expired" in msg) or any(failure_msg in msg for failure_msg in AUTH_FAILURE_MSG)): + self.debug_print("MSGRAPH", f"Error '{msg}' found in API response. Requesting new access token using refresh token") ret_val = self._get_token(action_result) if phantom.is_fail(ret_val): return action_result.get_status(), None @@ -838,7 +802,7 @@ def _make_rest_call_helper( return phantom.APP_SUCCESS, resp_json def _sanitize_file_name(self, file_name): - return re.sub('[,"\']', '', file_name) + return re.sub("[,\"']", "", file_name) def _add_attachment_to_vault(self, attachment, container_id, file_data): fd, tmp_file_path = tempfile.mkstemp(dir=Vault.get_vault_tmp_dir()) @@ -865,19 +829,13 @@ def _handle_attachment(self, attachment, container_id, artifact_json=None): vault_id = None try: - if ( - "contentBytes" in attachment - ): # Check whether the attachment contains the data + if "contentBytes" in attachment: # Check whether the attachment contains the data file_data = base64.b64decode(attachment.pop("contentBytes")) - ret_val, vault_id = self._add_attachment_to_vault( - attachment, container_id, file_data - ) + ret_val, vault_id = self._add_attachment_to_vault(attachment, container_id, file_data) if phantom.is_fail(ret_val): return phantom.APP_ERROR else: - self.debug_print( - "No content found in the attachment. Hence, skipping the vault file creation." - ) + self.debug_print("No content found in the attachment. Hence, skipping the vault file creation.") except Exception as e: error_msg = _get_error_msg_from_exception(e, self) @@ -906,37 +864,25 @@ def _handle_attachment(self, attachment, container_id, artifact_json=None): return phantom.APP_SUCCESS - def _handle_item_attachment( - self, attachment, container_id, endpoint, action_result - ): + def _handle_item_attachment(self, attachment, container_id, endpoint, action_result): vault_id = None try: attach_endpoint = "{}/{}/$value".format(endpoint, attachment["id"]) - ret_val, rfc822_email = self._make_rest_call_helper( - action_result, attach_endpoint, download=True - ) + ret_val, rfc822_email = self._make_rest_call_helper(action_result, attach_endpoint, download=True) if phantom.is_fail(ret_val): - self.debug_print( - "Error while downloading the file content, for attachment id: {}".format( - attachment["id"] - ) - ) + self.debug_print("Error while downloading the file content, for attachment id: {}".format(attachment["id"])) return phantom.APP_ERROR attachment["name"] = "{}.eml".format(attachment["name"]) if rfc822_email: # Check whether the API returned any data - ret_val, vault_id = self._add_attachment_to_vault( - attachment, container_id, rfc822_email - ) + ret_val, vault_id = self._add_attachment_to_vault(attachment, container_id, rfc822_email) if phantom.is_fail(ret_val): return phantom.APP_ERROR else: - self.debug_print( - "No content found for the item attachment. Hence, skipping the vault file creation." - ) + self.debug_print("No content found for the item attachment. Hence, skipping the vault file creation.") except Exception as e: error_msg = _get_error_msg_from_exception(e, self) @@ -946,9 +892,7 @@ def _handle_item_attachment( attachment["vaultId"] = vault_id return phantom.APP_SUCCESS - def _create_reference_attachment_artifact( - self, container_id, attachment, artifact_json - ): + def _create_reference_attachment_artifact(self, container_id, attachment, artifact_json): """ Create reference attachment artifact. @@ -993,9 +937,7 @@ def _create_email_artifacts(self, container_id, email, artifact_id=None, create_ # Set email ID contains self._process_email._set_email_id_contains(email["id"]) - email_artifact["cef_types"] = { - "messageId": self._process_email._email_id_contains - } + email_artifact["cef_types"] = {"messageId": self._process_email._email_id_contains} email_artifact["source_data_identifier"] = artifact_id @@ -1013,9 +955,7 @@ def _create_email_artifacts(self, container_id, email, artifact_id=None, create_ # add first email to To recipients = v if len(recipients): - cef["toEmail"] = ( - recipients[0].get("emailAddress", {}).get("address", "") - ) + cef["toEmail"] = recipients[0].get("emailAddress", {}).get("address", "") elif k == "id": cef["messageId"] = v elif k == "internetMessageHeaders": @@ -1031,9 +971,7 @@ def _create_email_artifacts(self, container_id, email, artifact_id=None, create_ else: cef[k] = v - if cef.get("body", {}).get("content") and ( - cef.get("body", {}).get("contentType") == "html" - ): + if cef.get("body", {}).get("content") and (cef.get("body", {}).get("contentType") == "html"): html_body = cef["body"]["content"] try: @@ -1053,7 +991,7 @@ def _create_email_artifacts(self, container_id, email, artifact_id=None, create_ if not create_iocs: return [email_artifact] - body = email['body']['content'] + body = email["body"]["content"] ips = [] self._process_email._get_ips(body, ips) @@ -1133,12 +1071,8 @@ def _extract_attachments( # We need to expand the item attachment only once if first_time: - sub_email_endpoint = ( - attach_endpoint + "/{0}?$expand=microsoft.graph.itemattachment/item".format(attachment["id"]) - ) - ret_val, sub_email_resp = self._make_rest_call_helper( - action_result, sub_email_endpoint - ) + sub_email_endpoint = attach_endpoint + "/{0}?$expand=microsoft.graph.itemattachment/item".format(attachment["id"]) + ret_val, sub_email_resp = self._make_rest_call_helper(action_result, sub_email_endpoint) if phantom.is_fail(ret_val): return action_result.get_status() sub_email = sub_email_resp.get("item", {}) @@ -1147,7 +1081,7 @@ def _extract_attachments( sub_email = attachment.get("item", {}) if sub_email: - sub_artifacts = self._create_email_artifacts(container_id, sub_email, attachment['id'], create_iocs=False) + sub_artifacts = self._create_email_artifacts(container_id, sub_email, attachment["id"], create_iocs=False) artifacts += sub_artifacts # Use recursive approach to extract the reference attachment @@ -1162,27 +1096,15 @@ def _extract_attachments( container_id, ) if phantom.is_fail(ret_val): - self.debug_print( - "Error while processing nested attachments, for attachment id: {}".format( - attachment["id"] - ) - ) + self.debug_print("Error while processing nested attachments, for attachment id: {}".format(attachment["id"])) if first_time: # Fetch the rfc822 content for the item attachment - sub_email_endpoint = "{0}/{1}/$value".format( - attach_endpoint, attachment["id"] - ) + sub_email_endpoint = "{0}/{1}/$value".format(attach_endpoint, attachment["id"]) attachment["name"] = "{}.eml".format(attachment["name"]) - ret_val, rfc822_email = self._make_rest_call_helper( - action_result, sub_email_endpoint, download=True - ) + ret_val, rfc822_email = self._make_rest_call_helper(action_result, sub_email_endpoint, download=True) if phantom.is_fail(ret_val): - self.debug_print( - "Error while downloading the email content, for attachment id: {}".format( - attachment["id"] - ) - ) + self.debug_print("Error while downloading the email content, for attachment id: {}".format(attachment["id"])) if rfc822_email: # Create ProcessEmail Object for email item attachment @@ -1190,25 +1112,17 @@ def _extract_attachments( process_email_obj._trigger_automation = False ret_val, msg = process_email_obj.process_email( - rfc822_email, attachment['id'], epoch=None, - container_id=container_id, ingest_email=False) + rfc822_email, attachment["id"], epoch=None, container_id=container_id, ingest_email=False + ) if phantom.is_fail(ret_val): - self.debug_print( - "Error while processing the email content, for attachment id: {}".format( - attachment["id"] - ) - ) + self.debug_print("Error while processing the email content, for attachment id: {}".format(attachment["id"])) if config.get("ingest_eml", False): # Add eml file into the vault if ingest_email is checked - ret_val, vault_id = self._add_attachment_to_vault( - attachment, container_id, rfc822_email - ) + ret_val, vault_id = self._add_attachment_to_vault(attachment, container_id, rfc822_email) if phantom.is_fail(ret_val): - self.debug_print( - "Could not process item attachment. See logs for details" - ) + self.debug_print("Could not process item attachment. See logs for details") else: # If success, create vault artifact artifact_json = { @@ -1230,19 +1144,13 @@ def _extract_attachments( artifacts.append(artifact_json) else: - self.debug_print( - "No content found for the item attachment. Hence, skipping the email file processing." - ) + self.debug_print("No content found for the item attachment. Hence, skipping the email file processing.") - elif ( - attachment.get("@odata.type") == "#microsoft.graph.referenceAttachment" - ): + elif attachment.get("@odata.type") == "#microsoft.graph.referenceAttachment": attach_artifact = {} artifacts.append(attach_artifact) - self._create_reference_attachment_artifact( - container_id, attachment, attach_artifact - ) + self._create_reference_attachment_artifact(container_id, attachment, attach_artifact) elif attachment.get("name", "").endswith(".eml"): if "contentBytes" in attachment: @@ -1258,21 +1166,17 @@ def _extract_attachments( process_email_obj = ProcessEmail(self, config) process_email_obj._trigger_automation = False - ret_val, msg = process_email_obj.process_email(rfc822_email, attachment['id'], epoch=None, container_id=container_id) + ret_val, msg = process_email_obj.process_email(rfc822_email, attachment["id"], epoch=None, container_id=container_id) if phantom.is_fail(ret_val): return action_result.set_status(phantom.APP_ERROR, msg) else: - self.debug_print( - "No content found in the .eml file attachment. Hence, skipping the email file processing." - ) + self.debug_print("No content found in the .eml file attachment. Hence, skipping the email file processing.") elif first_time: attach_artifact = {} artifacts.append(attach_artifact) - if not self._handle_attachment( - attachment, container_id, artifact_json=attach_artifact - ): + if not self._handle_attachment(attachment, container_id, artifact_json=attach_artifact): return action_result.set_status( phantom.APP_ERROR, "Could not process attachment. See logs for details.", @@ -1292,11 +1196,11 @@ def _process_email_data(self, config, action_result, endpoint, email): """ container = {} - container['name'] = email['subject'] if email['subject'] else email['id'] - container_description = MSGOFFICE365_CONTAINER_DESCRIPTION.format(last_modified_time=email['lastModifiedDateTime']) - container['description'] = container_description - container['source_data_identifier'] = email['id'] - container['data'] = {'raw_email': email} + container["name"] = email["subject"] if email["subject"] else email["id"] + container_description = MSGOFFICE365_CONTAINER_DESCRIPTION.format(last_modified_time=email["lastModifiedDateTime"]) + container["description"] = container_description + container["source_data_identifier"] = email["id"] + container["data"] = {"raw_email": email} ret_val, msg, container_id = self.save_container(container) @@ -1308,21 +1212,15 @@ def _process_email_data(self, config, action_result, endpoint, email): self._duplicate_count += 1 # Prevent further processing if the email is not modified - ret_val, container_info, status_code = self.get_container_info( - container_id=container_id - ) + ret_val, container_info, status_code = self.get_container_info(container_id=container_id) if phantom.is_fail(ret_val): return action_result.set_status( phantom.APP_ERROR, - "Status Code: {}. Error occurred while fetching the container info for container ID: {}".format( - status_code, container_id - ), + "Status Code: {}. Error occurred while fetching the container info for container ID: {}".format(status_code, container_id), ) if container_info.get("description", "") == container_description: - msg = "Email ID: {} has not been modified. Hence, skipping the artifact ingestion.".format( - email["id"] - ) + msg = "Email ID: {} has not been modified. Hence, skipping the artifact ingestion.".format(email["id"]) self.debug_print(msg) return action_result.set_status(phantom.APP_SUCCESS, msg) else: @@ -1339,8 +1237,8 @@ def _process_email_data(self, config, action_result, endpoint, email): if config.get("extract_eml", True): subject = email.get("subject") email_message = { - "id": email['id'], - "name": subject if subject else "email_message_{}".format(email['id']), + "id": email["id"], + "name": subject if subject else "email_message_{}".format(email["id"]), } if not self._handle_item_attachment( email_message, @@ -1366,18 +1264,14 @@ def _process_email_data(self, config, action_result, endpoint, email): } if email_message["vaultId"]: - artifact_cef.update({ - "vaultId": email_message["vaultId"] - }) + artifact_cef.update({"vaultId": email_message["vaultId"]}) artifact_json["cef"] = artifact_cef attachment_artifacts.append(artifact_json) if email["hasAttachments"] and config.get("extract_attachments", False): attach_endpoint = endpoint + "/{0}/attachments".format(email["id"]) - ret_val, attach_resp = self._make_rest_call_helper( - action_result, attach_endpoint - ) + ret_val, attach_resp = self._make_rest_call_helper(action_result, attach_endpoint) if phantom.is_fail(ret_val): return action_result.get_status() @@ -1428,11 +1322,7 @@ def _handle_test_connectivity(self, param): ret_val, app_rest_url = self._get_url_to_app_rest(action_result) app_state = {} if phantom.is_fail(ret_val): - self.save_progress( - "Unable to get the URL to the app's REST Endpoint. Error: {0}".format( - action_result.get_message() - ) - ) + self.save_progress("Unable to get the URL to the app's REST Endpoint. Error: {0}".format(action_result.get_message())) return action_result.set_status(phantom.APP_ERROR) # create the url that the oauth server should re-direct to after the auth is completed @@ -1446,11 +1336,7 @@ def _handle_test_connectivity(self, param): if self._admin_access: # Create the url for fetching administrator consent - admin_consent_url = ( - "https://login.microsoftonline.com/{0}/adminconsent".format( - self._tenant - ) - ) + admin_consent_url = "https://login.microsoftonline.com/{0}/adminconsent".format(self._tenant) admin_consent_url += "?client_id={0}".format(self._client_id) admin_consent_url += "&redirect_uri={0}".format(redirect_uri) admin_consent_url += "&state={0}".format(self._asset_id) @@ -1462,9 +1348,7 @@ def _handle_test_connectivity(self, param): "Please provide scope for non-admin access in the asset configuration", ) # Create the url authorization, this is the one pointing to the oauth server side - admin_consent_url = "https://login.microsoftonline.com/{0}/oauth2/v2.0/authorize".format( - self._tenant - ) + admin_consent_url = "https://login.microsoftonline.com/{0}/oauth2/v2.0/authorize".format(self._tenant) admin_consent_url += "?client_id={0}".format(self._client_id) admin_consent_url += "&redirect_uri={0}".format(redirect_uri) admin_consent_url += "&state={0}".format(self._asset_id) @@ -1475,16 +1359,12 @@ def _handle_test_connectivity(self, param): # The URL that the user should open in a different tab. # This is pointing to a REST endpoint that points to the app - url_to_show = "{0}/start_oauth?asset_id={1}&".format( - app_rest_url, self._asset_id - ) + url_to_show = "{0}/start_oauth?asset_id={1}&".format(app_rest_url, self._asset_id) # Save the state, will be used by the request handler _save_app_state(app_state, self._asset_id, self) - self.save_progress( - "Please connect to the following URL from a different tab to continue the connectivity process" - ) + self.save_progress("Please connect to the following URL from a different tab to continue the connectivity process") self.save_progress(url_to_show) self.save_progress(MSGOFFICE365_AUTHORIZE_TROUBLESHOOT_MSG) @@ -1493,9 +1373,7 @@ def _handle_test_connectivity(self, param): completed = False app_dir = os.path.dirname(os.path.abspath(__file__)) - auth_status_file_path = "{0}/{1}_{2}".format( - app_dir, self._asset_id, TC_FILE - ) + auth_status_file_path = "{0}/{1}_{2}".format(app_dir, self._asset_id, TC_FILE) if self._admin_access: self.save_progress("Waiting for Admin Consent to complete") @@ -1514,9 +1392,7 @@ def _handle_test_connectivity(self, param): time.sleep(TC_STATUS_SLEEP) if not completed: - self.save_progress( - "Authentication process does not seem to be completed. Timing out" - ) + self.save_progress("Authentication process does not seem to be completed. Timing out") return action_result.set_status(phantom.APP_ERROR) self.send_progress("") @@ -1536,15 +1412,13 @@ def _handle_test_connectivity(self, param): return action_result.set_status(phantom.APP_ERROR) else: if not self._state.get("code"): - self.save_progress( - "Authorization code not received or not given" - ) + self.save_progress("Authorization code not received or not given") self.save_progress("Test Connectivity Failed") return action_result.set_status(phantom.APP_ERROR) # Deleting the local state file because of it replicates with actual state file while installing the app current_file_path = pathlib.Path(__file__).resolve() - input_file = f'{self._asset_id}_state.json' + input_file = f"{self._asset_id}_state.json" state_file_path = current_file_path.with_name(input_file) state_file_path.unlink() @@ -1555,21 +1429,17 @@ def _handle_test_connectivity(self, param): self._remove_tokens(action_result) return action_result.get_status() - params = {'$top': '1'} + params = {"$top": "1"} message_failed = "" if self._admin_access: message_failed = "API to fetch details of all the users failed" self.save_progress("Getting info about all users to verify token") - ret_val, response = self._make_rest_call_helper( - action_result, "/users", params=params - ) + ret_val, response = self._make_rest_call_helper(action_result, "/users", params=params) else: message_failed = "API to get user details failed" self.save_progress("Getting info about a single user to verify token") - ret_val, response = self._make_rest_call_helper( - action_result, "/me", params=params - ) + ret_val, response = self._make_rest_call_helper(action_result, "/me", params=params) if phantom.is_fail(ret_val): self.save_progress(message_failed) @@ -1587,9 +1457,7 @@ def _handle_test_connectivity(self, param): def _handle_copy_email(self, param): - self.save_progress( - "In action handler for: {0}".format(self.get_action_identifier()) - ) + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) action_result = self.add_action_result(ActionResult(dict(param))) email_addr = param["email_address"] @@ -1615,23 +1483,17 @@ def _handle_copy_email(self, param): self.save_progress(error) return action_result.set_status(phantom.APP_ERROR, error) - ret_val, response = self._make_rest_call_helper( - action_result, endpoint, data=json.dumps(body), method="post" - ) + ret_val, response = self._make_rest_call_helper(action_result, endpoint, data=json.dumps(body), method="post") if phantom.is_fail(ret_val): return action_result.get_status() action_result.add_data(response) - return action_result.set_status( - phantom.APP_SUCCESS, "Successfully copied email" - ) + return action_result.set_status(phantom.APP_SUCCESS, "Successfully copied email") def _handle_move_email(self, param): - self.save_progress( - "In action handler for: {0}".format(self.get_action_identifier()) - ) + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) action_result = self.add_action_result(ActionResult(dict(param))) email_addr = param["email_address"] @@ -1644,9 +1506,7 @@ def _handle_move_email(self, param): body = {"DestinationId": folder} if param.get("get_folder_id", True): try: - dir_id, error, _ = self._get_folder_id( - action_result, folder, email_addr - ) + dir_id, error, _ = self._get_folder_id(action_result, folder, email_addr) except ReturnException as e: self._dump_error_log(e) @@ -1659,9 +1519,7 @@ def _handle_move_email(self, param): self.save_progress(error) return action_result.set_status(phantom.APP_ERROR, error) - ret_val, response = self._make_rest_call_helper( - action_result, endpoint, data=json.dumps(body), method="post" - ) + ret_val, response = self._make_rest_call_helper(action_result, endpoint, data=json.dumps(body), method="post") if phantom.is_fail(ret_val): return action_result.get_status() @@ -1671,9 +1529,7 @@ def _handle_move_email(self, param): def _handle_delete_email(self, param): - self.save_progress( - "In action handler for: {0}".format(self.get_action_identifier()) - ) + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) action_result = self.add_action_result(ActionResult(dict(param))) email_addr = param["email_address"] @@ -1682,21 +1538,15 @@ def _handle_delete_email(self, param): endpoint += "/messages/{0}".format(message_id) - ret_val, _ = self._make_rest_call_helper( - action_result, endpoint, method="delete" - ) + ret_val, _ = self._make_rest_call_helper(action_result, endpoint, method="delete") if phantom.is_fail(ret_val): return action_result.get_status() - return action_result.set_status( - phantom.APP_SUCCESS, "Successfully deleted email" - ) + return action_result.set_status(phantom.APP_SUCCESS, "Successfully deleted email") def _handle_delete_event(self, param): - self.save_progress( - "In action handler for: {0}".format(self.get_action_identifier()) - ) + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) action_result = self.add_action_result(ActionResult(dict(param))) email_addr = param["email_address"] @@ -1710,29 +1560,21 @@ def _handle_delete_event(self, param): endpoint += "/decline" data = json.dumps({"sendResponse": True}) - ret_val, _ = self._make_rest_call_helper( - action_result, endpoint, method=method, data=data - ) + ret_val, _ = self._make_rest_call_helper(action_result, endpoint, method=method, data=data) if phantom.is_fail(ret_val): return action_result.get_status() - return action_result.set_status( - phantom.APP_SUCCESS, "Successfully deleted event" - ) + return action_result.set_status(phantom.APP_SUCCESS, "Successfully deleted event") def _handle_oof_check(self, param): - self.save_progress( - "In action handler for: {0}".format(self.get_action_identifier()) - ) + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) action_result = self.add_action_result(ActionResult(dict(param))) user_id = param["user_id"] endpoint = "/users/{0}/mailboxSettings/automaticRepliesSetting".format(user_id) - ret_val, response = self._make_rest_call_helper( - action_result, endpoint, method="get" - ) + ret_val, response = self._make_rest_call_helper(action_result, endpoint, method="get") if phantom.is_fail(ret_val): return action_result.get_status() @@ -1740,14 +1582,10 @@ def _handle_oof_check(self, param): action_result.update_summary({"events_matched": action_result.get_data_size()}) - return action_result.set_status( - phantom.APP_SUCCESS, "Successfully retrieved out of office status" - ) + return action_result.set_status(phantom.APP_SUCCESS, "Successfully retrieved out of office status") def _handle_list_events(self, param): - self.save_progress( - "In action handler for: {0}".format(self.get_action_identifier()) - ) + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) action_result = self.add_action_result(ActionResult(dict(param))) user_id = param.get("user_id") if param.get("user_id") else None @@ -1795,15 +1633,10 @@ def _handle_list_events(self, param): if not events: # No events found is a valid scenario that there can be 0 events returned # even if the API call is a success for the correct given inputs and hence, returning APP_SUCCESS. - return action_result.set_status( - phantom.APP_SUCCESS, MSGOFFICE365_NO_DATA_FOUND - ) + return action_result.set_status(phantom.APP_SUCCESS, MSGOFFICE365_NO_DATA_FOUND) for event in events: - attendees = [ - attendee.get("emailAddress", {}).get("name") - for attendee in event.get("attendees", []) - ] + attendees = [attendee.get("emailAddress", {}).get("name") for attendee in event.get("attendees", [])] event["attendee_list"] = ", ".join(attendees) action_result.add_data(event) @@ -1812,16 +1645,12 @@ def _handle_list_events(self, param): return action_result.set_status( phantom.APP_SUCCESS, - "Successfully retrieved {} event{}".format( - num_events, "" if num_events == 1 else "s" - ), + "Successfully retrieved {} event{}".format(num_events, "" if num_events == 1 else "s"), ) def _handle_list_groups(self, param): - self.save_progress( - "In action handler for: {0}".format(self.get_action_identifier()) - ) + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) action_result = self.add_action_result(ActionResult(dict(param))) limit = param.get("limit") @@ -1841,9 +1670,7 @@ def _handle_list_groups(self, param): return action_result.get_status() if not groups: - return action_result.set_status( - phantom.APP_SUCCESS, MSGOFFICE365_NO_DATA_FOUND - ) + return action_result.set_status(phantom.APP_SUCCESS, MSGOFFICE365_NO_DATA_FOUND) for group in groups: action_result.add_data(group) @@ -1853,16 +1680,12 @@ def _handle_list_groups(self, param): return action_result.set_status( phantom.APP_SUCCESS, - "Successfully retrieved {} group{}".format( - num_groups, "" if num_groups == 1 else "s" - ), + "Successfully retrieved {} group{}".format(num_groups, "" if num_groups == 1 else "s"), ) def _handle_list_group_members(self, param): - self.save_progress( - "In action handler for: {0}".format(self.get_action_identifier()) - ) + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) action_result = self.add_action_result(ActionResult(dict(param))) limit = param.get("limit") @@ -1874,16 +1697,16 @@ def _handle_list_group_members(self, param): if phantom.is_fail(ret_val): return action_result.get_status() - method = param.get('method', 'Group ID') - group_id = identificator = param['identificator'] - if param.get('filter'): - query = param.get('filter') + method = param.get("method", "Group ID") + group_id = identificator = param["identificator"] + if param.get("filter"): + query = param.get("filter") is_advance_query = True - if method.lower() not in ('group id', 'group e-mail'): + if method.lower() not in ("group id", "group e-mail"): return action_result.set_status(phantom.APP_ERROR, MSGOFFICE365_INVALID_METHOD) - if method.lower() == 'group e-mail': + if method.lower() == "group e-mail": if not util.is_email(identificator): return action_result.set_status(phantom.APP_ERROR, MSGOFFICE365_INVALID_EMAIL) @@ -1895,14 +1718,14 @@ def _handle_list_group_members(self, param): if not group: return action_result.set_status( - phantom.APP_ERROR, "There is no such {} group name, Please check the correct " - "spelling or existence".format(identificator)) - group_id = group[0]['id'] + phantom.APP_ERROR, "There is no such {} group name, Please check the correct " "spelling or existence".format(identificator) + ) + group_id = group[0]["id"] - transitive_members = param.get('get_transitive_members', True) - endpoint = '/groups/{0}/members'.format(group_id) + transitive_members = param.get("get_transitive_members", True) + endpoint = "/groups/{0}/members".format(group_id) if transitive_members: - endpoint = '/groups/{0}/transitiveMembers'.format(group_id) + endpoint = "/groups/{0}/transitiveMembers".format(group_id) ret_val, members = self._paginator(action_result, endpoint, limit, query=query, is_advance_query=is_advance_query) @@ -1910,9 +1733,7 @@ def _handle_list_group_members(self, param): return action_result.get_status() if not members: - return action_result.set_status( - phantom.APP_SUCCESS, MSGOFFICE365_NO_DATA_FOUND - ) + return action_result.set_status(phantom.APP_SUCCESS, MSGOFFICE365_NO_DATA_FOUND) for member in members: action_result.add_data(member) @@ -1922,16 +1743,12 @@ def _handle_list_group_members(self, param): return action_result.set_status( phantom.APP_SUCCESS, - "Successfully retrieved {} group member{}".format( - num_members, "" if num_members == 1 else "s" - ), + "Successfully retrieved {} group member{}".format(num_members, "" if num_members == 1 else "s"), ) def _handle_list_users(self, param): - self.save_progress( - "In action handler for: {0}".format(self.get_action_identifier()) - ) + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) action_result = self.add_action_result(ActionResult(dict(param))) limit = param.get("limit") @@ -1951,9 +1768,7 @@ def _handle_list_users(self, param): return action_result.get_status() if not users: - return action_result.set_status( - phantom.APP_SUCCESS, MSGOFFICE365_NO_DATA_FOUND - ) + return action_result.set_status(phantom.APP_SUCCESS, MSGOFFICE365_NO_DATA_FOUND) for user in users: action_result.add_data(user) @@ -1963,23 +1778,17 @@ def _handle_list_users(self, param): return action_result.set_status( phantom.APP_SUCCESS, - "Successfully retrieved {} user{}".format( - num_users, "" if num_users == 1 else "s" - ), + "Successfully retrieved {} user{}".format(num_users, "" if num_users == 1 else "s"), ) def _handle_list_rules(self, param): - self.save_progress( - "In action handler for: {0}".format(self.get_action_identifier()) - ) + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) action_result = self.add_action_result(ActionResult(dict(param))) user_id = param["user_id"] - endpoint = "/users/{user_id}/mailFolders/inbox/messageRules".format( - user_id=user_id - ) + endpoint = "/users/{user_id}/mailFolders/inbox/messageRules".format(user_id=user_id) ret_val, rules = self._paginator(action_result, endpoint) @@ -1987,9 +1796,7 @@ def _handle_list_rules(self, param): return action_result.get_status() if not rules: - return action_result.set_status( - phantom.APP_SUCCESS, MSGOFFICE365_NO_DATA_FOUND - ) + return action_result.set_status(phantom.APP_SUCCESS, MSGOFFICE365_NO_DATA_FOUND) for rule in rules: action_result.add_data(rule) @@ -1999,9 +1806,7 @@ def _handle_list_rules(self, param): return action_result.set_status( phantom.APP_SUCCESS, - "Successfully retrieved {} rule{}".format( - num_rules, "" if num_rules == 1 else "s" - ), + "Successfully retrieved {} rule{}".format(num_rules, "" if num_rules == 1 else "s"), ) def flatten_json(self, y): @@ -2024,17 +1829,13 @@ def flatten(x, name=""): def _handle_get_rule(self, param): - self.save_progress( - "In action handler for: {0}".format(self.get_action_identifier()) - ) + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) action_result = self.add_action_result(ActionResult(dict(param))) user_id = param["user_id"] rule_id = param["rule_id"] - endpoint = "/users/{user_id}/mailFolders/inbox/messageRules/{rule_id}".format( - user_id=user_id, rule_id=rule_id - ) + endpoint = "/users/{user_id}/mailFolders/inbox/messageRules/{rule_id}".format(user_id=user_id, rule_id=rule_id) ret_val, rule = self._make_rest_call_helper(action_result, endpoint) @@ -2042,23 +1843,17 @@ def _handle_get_rule(self, param): return action_result.get_status() if not rule: - return action_result.set_status( - phantom.APP_SUCCESS, MSGOFFICE365_NO_DATA_FOUND - ) + return action_result.set_status(phantom.APP_SUCCESS, MSGOFFICE365_NO_DATA_FOUND) rule = self.flatten_json(rule) self.debug_print(rule) action_result.add_data(rule) - return action_result.set_status( - phantom.APP_SUCCESS, "Successfully retrieved specified inbox rule" - ) + return action_result.set_status(phantom.APP_SUCCESS, "Successfully retrieved specified inbox rule") def _handle_list_folders(self, param): - self.save_progress( - "In action handler for: {0}".format(self.get_action_identifier()) - ) + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) action_result = self.add_action_result(ActionResult(dict(param))) list_folder = list() @@ -2091,9 +1886,7 @@ def _handle_list_folders(self, param): if phantom.is_fail(ret_val): return action_result.get_status() else: - ret_val = self._list_child_folders( - action_result, list_folder, user_id=user_id, folder_id=folder_id - ) + ret_val = self._list_child_folders(action_result, list_folder, user_id=user_id, folder_id=folder_id) if phantom.is_fail(ret_val): return action_result.get_status() @@ -2106,9 +1899,7 @@ def _handle_list_folders(self, param): return action_result.set_status( phantom.APP_SUCCESS, - "Successfully retrieved {} mail folder{}".format( - num_folders, "" if num_folders == 1 else "s" - ), + "Successfully retrieved {} mail folder{}".format(num_folders, "" if num_folders == 1 else "s"), ) def _fetch_root_folders(self, action_result, user_id): @@ -2122,27 +1913,19 @@ def _fetch_root_folders(self, action_result, user_id): if not folders: return ( - action_result.set_status( - phantom.APP_SUCCESS, MSGOFFICE365_NO_DATA_FOUND - ), + action_result.set_status(phantom.APP_SUCCESS, MSGOFFICE365_NO_DATA_FOUND), None, ) return phantom.APP_SUCCESS, folders - def _list_child_folders( - self, action_result, list_folder, user_id, parent_folder=None, folder_id=None - ): + def _list_child_folders(self, action_result, list_folder, user_id, parent_folder=None, folder_id=None): # fetching root level folders if not folder_id: - ret_val, child_folders = self._fetch_child_folders( - action_result, user_id, parent_folder["id"] - ) + ret_val, child_folders = self._fetch_child_folders(action_result, user_id, parent_folder["id"]) else: - ret_val, child_folders = self._fetch_child_folders( - action_result, user_id, folder_id - ) + ret_val, child_folders = self._fetch_child_folders(action_result, user_id, folder_id) if phantom.is_fail(ret_val): return action_result.get_status() @@ -2169,9 +1952,7 @@ def _list_child_folders( def _fetch_child_folders(self, action_result, user_id, folder_id): - endpoint = "/users/{user_id}/mailFolders/{folder_id}/childFolders".format( - user_id=user_id, folder_id=folder_id - ) + endpoint = "/users/{user_id}/mailFolders/{folder_id}/childFolders".format(user_id=user_id, folder_id=folder_id) ret_val, folders = self._paginator(action_result, endpoint) @@ -2200,9 +1981,7 @@ def _flatten_headers(self, headers): def _handle_get_email(self, param): - self.save_progress( - "In action handler for: {0}".format(self.get_action_identifier()) - ) + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) action_result = self.add_action_result(ActionResult(dict(param))) email_addr = param["email_address"] @@ -2217,26 +1996,18 @@ def _handle_get_email(self, param): if param.get("extract_headers"): header_endpoint = endpoint + "?$select=internetMessageHeaders" - ret_val, header_response = self._make_rest_call_helper( - action_result, header_endpoint - ) + ret_val, header_response = self._make_rest_call_helper(action_result, header_endpoint) if phantom.is_fail(ret_val): return action_result.get_status() # For Drafts there might not be any internetMessageHeaders, # so we have to use get() fetching instead of directly fetching from dictionary - response["internetMessageHeaders"] = header_response.get( - "internetMessageHeaders" - ) + response["internetMessageHeaders"] = header_response.get("internetMessageHeaders") if param.get("download_attachments", False) and response.get("hasAttachments"): endpoint += "/attachments" - attachment_endpoint = ( - "{}?$expand=microsoft.graph.itemattachment/item".format(endpoint) - ) - ret_val, attach_resp = self._make_rest_call_helper( - action_result, attachment_endpoint - ) + attachment_endpoint = "{}?$expand=microsoft.graph.itemattachment/item".format(endpoint) + ret_val, attach_resp = self._make_rest_call_helper(action_result, attachment_endpoint) if phantom.is_fail(ret_val): return action_result.get_status() @@ -2250,9 +2021,7 @@ def _handle_get_email(self, param): "Could not process attachment. See logs for details", ) elif attachment.get("@odata.type") == "#microsoft.graph.itemAttachment": - if not self._handle_item_attachment( - attachment, self.get_container_id(), endpoint, action_result - ): + if not self._handle_item_attachment(attachment, self.get_container_id(), endpoint, action_result): return action_result.set_status( phantom.APP_ERROR, "Could not process item attachment. See logs for details", @@ -2266,21 +2035,15 @@ def _handle_get_email(self, param): "#microsoft.graph.eventMessageResponse", ]: - event_endpoint = "{}/?$expand=Microsoft.Graph.EventMessage/Event".format( - endpoint - ) - ret_val, event_resp = self._make_rest_call_helper( - action_result, event_endpoint - ) + event_endpoint = "{}/?$expand=Microsoft.Graph.EventMessage/Event".format(endpoint) + ret_val, event_resp = self._make_rest_call_helper(action_result, event_endpoint) if phantom.is_fail(ret_val): return action_result.get_status() response["event"] = event_resp["event"] if "internetMessageHeaders" in response: - response["internetMessageHeaders"] = self._flatten_headers( - response["internetMessageHeaders"] - ) + response["internetMessageHeaders"] = self._flatten_headers(response["internetMessageHeaders"]) # If the response has attachments, update every attachment data with its type # 'attachmentType' key - indicates type of attachment @@ -2291,9 +2054,7 @@ def _handle_get_email(self, param): attachment_type = attachment.get("@odata.type", "") attachment["attachmentType"] = attachment_type if attachment_type == "#microsoft.graph.itemAttachment": - attachment["itemType"] = attachment.get("item", {}).get( - "@odata.type", "" - ) + attachment["itemType"] = attachment.get("item", {}).get("@odata.type", "") if param.get("download_email"): subject = response.get("subject") @@ -2315,15 +2076,11 @@ def _handle_get_email(self, param): action_result.add_data(response) - return action_result.set_status( - phantom.APP_SUCCESS, "Successfully fetched email" - ) + return action_result.set_status(phantom.APP_SUCCESS, "Successfully fetched email") def _handle_get_email_properties(self, param): - self.save_progress( - "In action handler for: {0}".format(self.get_action_identifier()) - ) + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) action_result = self.add_action_result(ActionResult(dict(param))) email_addr = param["email_address"] @@ -2343,11 +2100,7 @@ def _handle_get_email_properties(self, param): select_list.append("sender") if "properties_list" in param: properties_list = param["properties_list"] - properties_list = [ - property.strip() - for property in properties_list.strip().split(",") - if property.strip() - ] + properties_list = [property.strip() for property in properties_list.strip().split(",") if property.strip()] select_list += properties_list if select_list: @@ -2358,15 +2111,11 @@ def _handle_get_email_properties(self, param): return action_result.get_status() if "internetMessageHeaders" in response: - response["internetMessageHeaders"] = self._flatten_headers( - response["internetMessageHeaders"] - ) + response["internetMessageHeaders"] = self._flatten_headers(response["internetMessageHeaders"]) action_result.add_data(response) - return action_result.set_status( - phantom.APP_SUCCESS, "Successfully fetched email" - ) + return action_result.set_status(phantom.APP_SUCCESS, "Successfully fetched email") def _manage_data_duplication(self, emails, total_ingested, limit, max_emails): """ @@ -2399,9 +2148,7 @@ def _manage_data_duplication(self, emails, total_ingested, limit, max_emails): def _handle_on_poll(self, param): - self.save_progress( - "In action handler for: {0}".format(self.get_action_identifier()) - ) + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) action_result = self.add_action_result(ActionResult(dict(param))) config = self.get_config() @@ -2432,13 +2179,9 @@ def _handle_on_poll(self, param): start_time = self._state["last_time"] if not config.get("email_address"): - return action_result.set_status( - phantom.APP_ERROR, "Email Address to ingest must be supplied in asset!" - ) + return action_result.set_status(phantom.APP_ERROR, "Email Address to ingest must be supplied in asset!") elif not config.get("folder"): - return action_result.set_status( - phantom.APP_ERROR, "Folder to ingest from must be supplied in asset!" - ) + return action_result.set_status(phantom.APP_ERROR, "Folder to ingest from must be supplied in asset!") endpoint = "/users/{0}".format(config.get("email_address")) @@ -2446,7 +2189,7 @@ def _handle_on_poll(self, param): folder = config.get("folder", "") if config.get("get_folder_id", True): try: - dir_id, error, _ = self._get_folder_id(action_result, folder, config.get('email_address')) + dir_id, error, _ = self._get_folder_id(action_result, folder, config.get("email_address")) except ReturnException as e: self._dump_error_log(e) @@ -2463,7 +2206,7 @@ def _handle_on_poll(self, param): params = {"$orderBy": "lastModifiedDateTime {}".format(order)} - params['$select'] = ','.join(MSGOFFICE365_SELECT_PARAMETER_LIST) + params["$select"] = ",".join(MSGOFFICE365_SELECT_PARAMETER_LIST) if start_time: params["$filter"] = "lastModifiedDateTime ge {0}".format(start_time) @@ -2478,40 +2221,28 @@ def _handle_on_poll(self, param): while True: self._duplicate_count = 0 - ret_val, emails = self._paginator( - action_result, endpoint, limit=cur_limit, params=params - ) + ret_val, emails = self._paginator(action_result, endpoint, limit=cur_limit, params=params) if phantom.is_fail(ret_val): return action_result.get_status() if not emails: - return action_result.set_status( - phantom.APP_SUCCESS, MSGOFFICE365_NO_DATA_FOUND - ) + return action_result.set_status(phantom.APP_SUCCESS, MSGOFFICE365_NO_DATA_FOUND) failed_email_ids = 0 total_emails = len(emails) self.save_progress("Total emails fetched: {}".format(total_emails)) if self.is_poll_now(): - self.save_progress( - "Ingesting all possible artifacts (ignoring maximum artifacts value) for POLL NOW" - ) + self.save_progress("Ingesting all possible artifacts (ignoring maximum artifacts value) for POLL NOW") for index, email in enumerate(emails): try: - self.send_progress( - "Processing email # {} with ID ending in: {}".format( - index + 1, email["id"][-10:] - ) - ) - ret_val = self._process_email_data( - config, action_result, endpoint, email - ) + self.send_progress("Processing email # {} with ID ending in: {}".format(index + 1, email["id"][-10:])) + ret_val = self._process_email_data(config, action_result, endpoint, email) if phantom.is_fail(ret_val): failed_email_ids += 1 - self.debug_print("Error occurred while processing email ID: {}. {}".format(email.get('id'), action_result.get_message())) + self.debug_print("Error occurred while processing email ID: {}. {}".format(email.get("id"), action_result.get_message())) except Exception as e: failed_email_ids += 1 error_msg = _get_error_msg_from_exception(e, self) @@ -2524,9 +2255,7 @@ def _handle_on_poll(self, param): ) if not self.is_poll_now(): - last_time = datetime.strptime( - emails[email_index]["lastModifiedDateTime"], O365_TIME_FORMAT - ).strftime(O365_TIME_FORMAT) + last_time = datetime.strptime(emails[email_index]["lastModifiedDateTime"], O365_TIME_FORMAT).strftime(O365_TIME_FORMAT) self._state["last_time"] = last_time self.save_state(deepcopy(self._state)) @@ -2535,9 +2264,7 @@ def _handle_on_poll(self, param): # Duplication logic should only work for the oldest first order and if we have more data on the server. if total_emails >= cur_limit and email_index == -1: - cur_limit, total_ingested = self._manage_data_duplication( - emails, total_ingested, cur_limit, max_emails - ) + cur_limit, total_ingested = self._manage_data_duplication(emails, total_ingested, cur_limit, max_emails) if not cur_limit: break else: @@ -2576,18 +2303,14 @@ def _validate_range(self, email_range, action_result): if maxi > MAX_END_OFFSET_VAL: return action_result.set_status( phantom.APP_ERROR, - "Invalid range value. The max_offset value cannot be greater than {0}".format( - MAX_END_OFFSET_VAL - ), + "Invalid range value. The max_offset value cannot be greater than {0}".format(MAX_END_OFFSET_VAL), ) return phantom.APP_SUCCESS def _handle_generate_token(self, param): - self.save_progress( - "In action handler for: {0}".format(self.get_action_identifier()) - ) + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) action_result = self.add_action_result(ActionResult(dict(param))) ret_val = self._get_token(action_result) if phantom.is_fail(ret_val): @@ -2599,9 +2322,7 @@ def _handle_generate_token(self, param): def _handle_run_query(self, param): - self.save_progress( - "In action handler for: {0}".format(self.get_action_identifier()) - ) + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) action_result = self.add_action_result(ActionResult(dict(param))) limit = param.get("limit") @@ -2617,11 +2338,7 @@ def _handle_run_query(self, param): params = dict() if "internet_message_id" in param: - params = { - "$filter": "internetMessageId eq '{0}'".format( - param["internet_message_id"] - ) - } + params = {"$filter": "internetMessageId eq '{0}'".format(param["internet_message_id"])} elif "query" in param: query = "?{0}".format(param["query"]) @@ -2646,20 +2363,14 @@ def _handle_run_query(self, param): if param.get("search_well_known_folders", False): endpoint += "/mailFolders" - folder_params = { - "$filter": "{}".format(MSGOFFICE365_WELL_KNOWN_FOLDERS_FILTER) - } - ret_val, response = self._paginator( - action_result, endpoint, params=folder_params - ) + folder_params = {"$filter": "{}".format(MSGOFFICE365_WELL_KNOWN_FOLDERS_FILTER)} + ret_val, response = self._paginator(action_result, endpoint, params=folder_params) if phantom.is_fail(ret_val): return action_result.set_status(phantom.APP_ERROR) if not response: - return action_result.set_status( - phantom.APP_SUCCESS, "No well known folders found" - ) + return action_result.set_status(phantom.APP_SUCCESS, "No well known folders found") folders = response @@ -2709,9 +2420,7 @@ def _handle_run_query(self, param): messages.extend(folder_messages) else: - ret_val, messages = self._paginator( - action_result, endpoint, limit, params=params - ) + ret_val, messages = self._paginator(action_result, endpoint, limit, params=params) if phantom.is_fail(ret_val): msg = action_result.get_message() @@ -2722,9 +2431,7 @@ def _handle_run_query(self, param): return action_result.get_status() if not messages: - return action_result.set_status( - phantom.APP_SUCCESS, MSGOFFICE365_NO_DATA_FOUND - ) + return action_result.set_status(phantom.APP_SUCCESS, MSGOFFICE365_NO_DATA_FOUND) action_result.update_data(messages) action_result.update_summary({"emails_matched": action_result.get_data_size()}) @@ -2756,7 +2463,7 @@ def _get_folder_id(self, action_result, folder, email): try: for i, subf in enumerate(path[1:]): - subpath = "/".join(path[0: i + 2]) + subpath = "/".join(path[0 : i + 2]) parent_id = dir_id dir_id = self._get_child_folder(action_result, subf, parent_id, email) @@ -2780,9 +2487,7 @@ def _get_folder(self, action_result, folder, email): params["$filter"] = "displayName eq '{}'".format(folder) endpoint = "/users/{}/mailFolders".format(email) - ret_val, response = self._make_rest_call_helper( - action_result, endpoint, params=params - ) + ret_val, response = self._make_rest_call_helper(action_result, endpoint, params=params) if phantom.is_fail(ret_val): raise ReturnException(action_result.get_message()) @@ -2800,9 +2505,7 @@ def _get_child_folder(self, action_result, folder, parent_id, email): params["$filter"] = "displayName eq '{}'".format(folder) endpoint = "/users/{}/mailFolders/{}/childFolders".format(email, parent_id) - ret_val, response = self._make_rest_call_helper( - action_result, endpoint, params=params - ) + ret_val, response = self._make_rest_call_helper(action_result, endpoint, params=params) if phantom.is_fail(ret_val): raise ReturnException() @@ -2819,9 +2522,7 @@ def _new_folder(self, action_result, folder, email): data = json.dumps({"displayName": folder}) endpoint = "/users/{}/mailFolders".format(email) - ret_val, response = self._make_rest_call_helper( - action_result, endpoint, data=data, method="post" - ) + ret_val, response = self._make_rest_call_helper(action_result, endpoint, data=data, method="post") if phantom.is_fail(ret_val): raise ReturnException() @@ -2840,33 +2541,23 @@ def _new_child_folder(self, action_result, folder, parent_id, email, pathsofar): data = json.dumps({"displayName": folder}) endpoint = "/users/{}/mailFolders/{}/childFolders".format(email, parent_id) - ret_val, response = self._make_rest_call_helper( - action_result, endpoint, data=data, method="post" - ) + ret_val, response = self._make_rest_call_helper(action_result, endpoint, data=data, method="post") if phantom.is_fail(ret_val): raise ReturnException() if response.get("id", False): self._currentdir = response - self.save_progress( - "Success({}): created child folder in folder {}".format( - folder, pathsofar - ) - ) + self.save_progress("Success({}): created child folder in folder {}".format(folder, pathsofar)) return response["id"] - msg = "Error({}): unable to create child folder in folder {}".format( - folder, pathsofar - ) + msg = "Error({}): unable to create child folder in folder {}".format(folder, pathsofar) self.save_progress(msg) action_result.set_status(phantom.APP_ERROR, msg) raise ReturnException() def _handle_create_folder(self, param): - self.save_progress( - "In action handler for: {0}".format(self.get_action_identifier()) - ) + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) action_result = self.add_action_result(ActionResult(dict(param))) email = param["email_address"] @@ -2916,9 +2607,7 @@ def _handle_create_folder(self, param): action_result.add_data(self._currentdir) else: - msg = "Error({}): folder doesn't exists in mailbox".format( - path[0] - ) + msg = "Error({}): folder doesn't exists in mailbox".format(path[0]) self.save_progress(msg) return action_result.set_status(phantom.APP_ERROR, msg) @@ -2932,21 +2621,15 @@ def _handle_create_folder(self, param): # next all the childFolders in between for subf in path: - dir_id = self._get_child_folder( - action_result, subf, parent_id, email - ) + dir_id = self._get_child_folder(action_result, subf, parent_id, email) if not dir_id: if minusp: - dir_id = self._new_child_folder( - action_result, subf, parent_id, email, pathsofar - ) + dir_id = self._new_child_folder(action_result, subf, parent_id, email, pathsofar) action_result.add_data(self._currentdir) else: - msg = "Error({}): child folder doesn't exists in folder {}".format( - subf, pathsofar - ) + msg = "Error({}): child folder doesn't exists in folder {}".format(subf, pathsofar) self.save_progress(msg) return action_result.set_status(phantom.APP_ERROR, msg) @@ -2956,15 +2639,11 @@ def _handle_create_folder(self, param): # finally, the actual folder dir_id = self._get_child_folder(action_result, final, parent_id, email) if dir_id: - msg = "Error: child folder {0} already exists in the folder {1}".format( - final, pathsofar - ) + msg = "Error: child folder {0} already exists in the folder {1}".format(final, pathsofar) self.save_progress(msg) return action_result.set_status(phantom.APP_ERROR, msg) - dir_id = self._new_child_folder( - action_result, final, parent_id, email, pathsofar - ) + dir_id = self._new_child_folder(action_result, final, parent_id, email, pathsofar) action_result.add_data(self._currentdir) except ReturnException as e: @@ -2981,9 +2660,7 @@ def _handle_create_folder(self, param): def _handle_get_folder_id(self, param): - self.save_progress( - "In action handler for: {0}".format(self.get_action_identifier()) - ) + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) action_result = self.add_action_result(ActionResult(dict(param))) email = param["email_address"] @@ -3009,48 +2686,45 @@ def _handle_get_folder_id(self, param): return action_result.set_status(phantom.APP_ERROR, error) def _email_to_recipient(self, email: str): - recipient = { - "emailAddress": { - "address": email - } - } + recipient = {"emailAddress": {"address": email}} return recipient - def _create_draft_message(self, action_result, subject: str, body: str, from_email: str, - *, to_emails: list[str], cc_emails: list[str], bcc_emails: list[str], headers: dict[str, str]): - endpoint = '/users/{}/messages'.format(from_email) - req_headers = { - 'Prefer': 'IdType="ImmutableId"' - } - msg = { - "subject": subject, - "body": { - "contentType": "HTML", - "content": body - } - } + def _create_draft_message( + self, + action_result, + subject: str, + body: str, + from_email: str, + *, + to_emails: list[str], + cc_emails: list[str], + bcc_emails: list[str], + headers: dict[str, str], + ): + endpoint = "/users/{}/messages".format(from_email) + req_headers = {"Prefer": 'IdType="ImmutableId"'} + msg = {"subject": subject, "body": {"contentType": "HTML", "content": body}} if to_emails: - msg['toRecipients'] = [self._email_to_recipient(email) for email in to_emails] + msg["toRecipients"] = [self._email_to_recipient(email) for email in to_emails] if cc_emails: - msg['ccRecipients'] = [self._email_to_recipient(email) for email in cc_emails] + msg["ccRecipients"] = [self._email_to_recipient(email) for email in cc_emails] if bcc_emails: - msg['bccRecipients'] = [self._email_to_recipient(email) for email in bcc_emails] + msg["bccRecipients"] = [self._email_to_recipient(email) for email in bcc_emails] if headers: - msg['internetMessageHeaders'] = [ - {'name': key, 'value': value} for key, value in headers.items()] + msg["internetMessageHeaders"] = [{"name": key, "value": value} for key, value in headers.items()] - ret_val, response = self._make_rest_call_helper(action_result, endpoint, method='post', headers=req_headers, data=json.dumps(msg)) + ret_val, response = self._make_rest_call_helper(action_result, endpoint, method="post", headers=req_headers, data=json.dumps(msg)) if phantom.is_fail(ret_val): return action_result.get_status(), None - message_id = response['id'] + message_id = response["id"] return action_result, message_id def _send_draft_message(self, action_result, user_id, message_id): endpoint = "/users/{}/messages/{}/send".format(user_id, message_id) - ret_val, _ = self._make_rest_call_helper(action_result, endpoint, method='post') + ret_val, _ = self._make_rest_call_helper(action_result, endpoint, method="post") if phantom.is_fail(ret_val): return action_result.set_status(phantom.APP_ERROR, "Failed to send draft email with id: {}".format(message_id)), None @@ -3067,7 +2741,7 @@ def _add_attachment_to_message(self, action_result, vault_id, user_id, message_i if not vault_info: return action_result.set_status(phantom.APP_ERROR, "Failed to find vault entry {}".format(vault_id)), None - if vault_info['size'] > MSGOFFICE365_UPLOAD_SESSION_CUTOFF: + if vault_info["size"] > MSGOFFICE365_UPLOAD_SESSION_CUTOFF: ret_val, attachment_id = self._upload_large_attachment(action_result, vault_info, user_id, message_id) else: ret_val, attachment_id = self._upload_small_attachment(action_result, vault_info, user_id, message_id) @@ -3076,80 +2750,90 @@ def _add_attachment_to_message(self, action_result, vault_id, user_id, message_i def _upload_small_attachment(self, action_result, vault_info, user_id, message_id): endpoint = "/users/{}/messages/{}/attachments".format(user_id, message_id) - with open(vault_info['path'], mode='rb') as file: + with open(vault_info["path"], mode="rb") as file: file_content = file.read() data = { - '@odata.type': '#microsoft.graph.fileAttachment', - 'name': vault_info['name'], - 'contentType': vault_info['mime_type'], - 'contentBytes': base64.b64encode(file_content).decode('ascii'), - 'contentId': vault_info['vault_id'] + "@odata.type": "#microsoft.graph.fileAttachment", + "name": vault_info["name"], + "contentType": vault_info["mime_type"], + "contentBytes": base64.b64encode(file_content).decode("ascii"), + "contentId": vault_info["vault_id"], } - ret_val, response = self._make_rest_call_helper(action_result, endpoint, method='post', data=json.dumps(data)) + ret_val, response = self._make_rest_call_helper(action_result, endpoint, method="post", data=json.dumps(data)) if phantom.is_fail(ret_val): return action_result.set_status(phantom.APP_ERROR, "Failed to upload vault entry {}".format(vault_info["vault_id"])), None - attachment_id = response['id'] + attachment_id = response["id"] return phantom.APP_SUCCESS, attachment_id def _upload_large_attachment(self, action_result, vault_info, user_id, message_id): endpoint = "/users/{}/messages/{}/attachments/createUploadSession".format(user_id, message_id) - file_size = vault_info['size'] + file_size = vault_info["size"] data = { - 'AttachmentItem': { - 'attachmentType': 'file', - 'name': vault_info['name'], - 'contentType': vault_info['mime_type'], - 'contentId': vault_info['vault_id'], - 'size': file_size + "AttachmentItem": { + "attachmentType": "file", + "name": vault_info["name"], + "contentType": vault_info["mime_type"], + "contentId": vault_info["vault_id"], + "size": file_size, } } - ret_val, response = self._make_rest_call_helper(action_result, endpoint, method='post', data=json.dumps(data)) + ret_val, response = self._make_rest_call_helper(action_result, endpoint, method="post", data=json.dumps(data)) if phantom.is_fail(ret_val): return action_result.set_status(phantom.APP_ERROR, "Failed to upload vault entry {}".format(vault_info["vault_id"])), None - upload_url = response['uploadUrl'] + upload_url = response["uploadUrl"] - with open(vault_info['path'], mode='rb') as file: + with open(vault_info["path"], mode="rb") as file: for start_position in range(0, file_size, MSGOFFICE365_UPLOAD_LARGE_FILE_CUTOFF): file_content = file.read(MSGOFFICE365_UPLOAD_LARGE_FILE_CUTOFF) end_position = start_position + len(file_content) - 1 headers = { - 'Content-Type': 'application/octet-stream', - 'Content-Range': "bytes {}-{}/{}".format(start_position, end_position, file_size) + "Content-Type": "application/octet-stream", + "Content-Range": "bytes {}-{}/{}".format(start_position, end_position, file_size), } flag = True while flag: response = requests.put(upload_url, headers=headers, data=file_content) - if response.status_code == 429 and response.headers['Retry-After']: - retry_time = int(response.headers['Retry-After']) + if response.status_code == 429 and response.headers["Retry-After"]: + retry_time = int(response.headers["Retry-After"]) if retry_time > 300: # throw error if wait time greater than 300 seconds self.debug_print("Retry is canceled as retry time is greater than 300 seconds") self._process_response(response, action_result) - return action_result.set_status( - phantom.APP_ERROR, "Failed to upload file, {} Please retry after \ - {} seconds".format(action_result.get_message(), retry_time ) - ), None + return ( + action_result.set_status( + phantom.APP_ERROR, + "Failed to upload file, {} Please retry after \ + {} seconds".format( + action_result.get_message(), retry_time + ), + ), + None, + ) self.debug_print("Retrying after {} seconds".format(retry_time)) time.sleep(retry_time + 1) elif not response.ok: - return action_result.set_status( - phantom.APP_ERROR, "Failed to upload file, Error occurred : {}, {}".format(response.status_code, str(response.text)) - ), None + return ( + action_result.set_status( + phantom.APP_ERROR, + "Failed to upload file, Error occurred : {}, {}".format(response.status_code, str(response.text)), + ), + None, + ) else: flag = False - result_location = response.headers.get('Location', 'no_location_found') + result_location = response.headers.get("Location", "no_location_found") match = re.search(r"Attachments\('(?P[^']+)'\)", result_location) if not match: return action_result.set_status(phantom.APP_ERROR, "Unable to extract attachment id from url {}".format(result_location)), None - attachment_id = match.group('attachment_id') + attachment_id = match.group("attachment_id") return phantom.APP_SUCCESS, attachment_id def _get_message(self, action_result, user_id, message_id): endpoint = "/users/{}/messages/{}".format(user_id, message_id) - ret_val, response = self._make_rest_call_helper(action_result, endpoint, method='get') + ret_val, response = self._make_rest_call_helper(action_result, endpoint, method="get") if phantom.is_fail(ret_val): return action_result.set_status(phantom.APP_ERROR, "Failed to get email with id: {}".format(message_id)), None @@ -3160,24 +2844,25 @@ def _handle_send_email(self, param): action_result = self.add_action_result(ActionResult(dict(param))) config = self.get_config() - from_email = param.get('from') or config.get('email_address') - to_emails = [email for x in param.get('to', '').split(',') if (email := x.strip())] - cc_emails = [email for x in param.get('cc', '').split(',') if (email := x.strip())] - bcc_emails = [email for x in param.get('bcc', '').split(',') if (email := x.strip())] + from_email = param.get("from") or config.get("email_address") + to_emails = [email for x in param.get("to", "").split(",") if (email := x.strip())] + cc_emails = [email for x in param.get("cc", "").split(",") if (email := x.strip())] + bcc_emails = [email for x in param.get("bcc", "").split(",") if (email := x.strip())] - subject = param['subject'] + subject = param["subject"] try: - headers = json.loads(param.get('headers', '{}')) + headers = json.loads(param.get("headers", "{}")) except Exception: return action_result.set_status(phantom.APP_ERROR, "Please enter headers in a valid JSON format") - body = param['body'] - vault_ids = [vault_id for x in param.get('attachments', '').split(',') if (vault_id := x.strip())] + body = param["body"] + vault_ids = [vault_id for x in param.get("attachments", "").split(",") if (vault_id := x.strip())] self.save_progress("Creating draft message") - ret_val, message_id = self._create_draft_message(action_result, subject, body, from_email, headers=headers, - to_emails=to_emails, cc_emails=cc_emails, bcc_emails=bcc_emails) + ret_val, message_id = self._create_draft_message( + action_result, subject, body, from_email, headers=headers, to_emails=to_emails, cc_emails=cc_emails, bcc_emails=bcc_emails + ) if phantom.is_fail(ret_val): return action_result self.save_progress("Created draft message with id: {}".format(message_id)) @@ -3189,10 +2874,7 @@ def _handle_send_email(self, param): if phantom.is_fail(ret_val): return action_result self.save_progress("Created attachment with id: {}".format(attachment_id)) - attachment = { - 'vault_id': vault_id, - 'attachment_id': attachment_id - } + attachment = {"vault_id": vault_id, "attachment_id": attachment_id} attachments.append(attachment) self.save_progress("Sending draft email with id: {}".format(message_id)) @@ -3235,16 +2917,14 @@ def _paginator(self, action_result, endpoint, limit=None, params=None, query=Non params = {"$top": page_size} if query: - params.update({'$filter': query}) + params.update({"$filter": query}) if is_advance_query: - params['$count'] = 'true' - headers['ConsistencyLevel'] = 'eventual' + params["$count"] = "true" + headers["ConsistencyLevel"] = "eventual" while True: - ret_val, response = self._make_rest_call_helper( - action_result, endpoint, nextLink=next_link, params=params, headers=headers - ) + ret_val, response = self._make_rest_call_helper(action_result, endpoint, nextLink=next_link, params=params, headers=headers) if phantom.is_fail(ret_val): return action_result.get_status(), None @@ -3263,6 +2943,118 @@ def _paginator(self, action_result, endpoint, limit=None, params=None, query=Non return phantom.APP_SUCCESS, list_items + def _handle_update_email(self, param): + self.save_progress(f"In action handler for: {self.get_action_identifier()}") + action_result = self.add_action_result(ActionResult(param)) + + email_addr = param["email_address"] + message_id = param["id"] + + endpoint = f"/users/{email_addr}/messages/{message_id}" + + categories = param.get("categories") + subject = param.get("subject") + + if subject is None and categories is None: + return action_result.set_status(phantom.APP_ERROR, "Please specify one of the email properties to update") + + data_to_send = {} + if categories is not None: + categories = [x.strip() for x in categories.split(",")] + data_to_send["categories"] = categories + + if subject is not None: + data_to_send["subject"] = subject + + self.save_progress("Updating email") + ret_val, _ = self._make_rest_call_helper(action_result, endpoint, method="patch", data=json.dumps(data_to_send)) + if phantom.is_fail(ret_val): + return action_result.get_status() + + self.save_progress(f"Getting sent email details with id: {message_id}") + ret_val, message_details = self._get_message(action_result, email_addr, message_id) + if phantom.is_fail(ret_val): + return action_result + self.save_progress("Got sent email details.") + + action_result.add_data(message_details) + + return action_result.set_status(phantom.APP_SUCCESS, "Successfully updated email") + + def _handle_block_sender(self, param): + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) + action_result = self.add_action_result(ActionResult(dict(param))) + + message = param["message_id"] + user = param["user_id"] + move_to_junk_folder = param.get("move_to_junk_folder", False) + + endpoint = f"/users/{user}/messages/{message}/markAsJunk" + self.save_progress(f"endpoint {endpoint}") + + ret_val, response = self._make_rest_call_helper( + action_result, endpoint, data=json.dumps({"moveToJunk": move_to_junk_folder}), method="post", beta=True + ) + + if phantom.is_fail(ret_val): + return action_result.set_status(phantom.APP_ERROR, f"Moving email with id: {message} to junk folder failed") + + action_result.add_data(response) + return action_result.set_status(phantom.APP_SUCCESS) + + def _handle_unblock_sender(self, param): + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) + action_result = self.add_action_result(ActionResult(dict(param))) + + message = param["message_id"] + user = param["user_id"] + move_to_inbox = param.get("move_to_inbox", False) + + endpoint = f"/users/{user}/messages/{message}/markAsNotJunk" + self.save_progress(f"endpoint {endpoint}") + + ret_val, response = self._make_rest_call_helper( + action_result, endpoint, data=json.dumps({"moveToInbox": move_to_inbox}), method="post", beta=True + ) + + if phantom.is_fail(ret_val): + return action_result.set_status(phantom.APP_ERROR, f"Moving email with id: {message} to inbox folder failed") + + action_result.add_data(response) + return action_result.set_status(phantom.APP_SUCCESS) + + def _handle_resolve_name(self, param): + self.save_progress("In action handler for: {0}".format(self.get_action_identifier())) + action_result = self.add_action_result(ActionResult(dict(param))) + + # id or userPrincipalName + email = param["email"] + + endpoint = f"/users?$filter=startswith(displayName,'{email}') or startswith(mail,'{email}')" + ret_val, responses = self._make_rest_call_helper(action_result, endpoint) + self.save_progress(f"Fetching user ended witch {ret_val}") + + if phantom.is_fail(ret_val): + return action_result.set_status(phantom.APP_ERROR, "Got invalid ret val") + + for response in responses.get("value"): + + user_id = response.get("id") + + endpoint_other_mails = f"/users/{user_id}?$select=mailNickname,proxyAddresses,otherMails" + endpoint_other_address = f"/users/{user_id}?$select=city,state,street,postalCode" + endpoint_mailbox = f"/users/{user_id}/mailboxSettings/userPurpose" + + ret_val_proxy, response_proxy = self._make_rest_call_helper(action_result, endpoint_other_mails) + ret_val_address, response_address = self._make_rest_call_helper(action_result, endpoint_other_address) + ret_val_mailbox, response_mailbox = self._make_rest_call_helper(action_result, endpoint_mailbox) + + self.save_progress(f"Got statuses: mails: {ret_val_proxy}, address: {ret_val_address}, mailbox: {ret_val_mailbox}") + + action_result.add_data(response | response_proxy | response_address | (response_mailbox or {"userPurpose": None})) + + return action_result.set_status(phantom.APP_SUCCESS) + def handle_action(self, param): ret_val = phantom.APP_SUCCESS @@ -3272,6 +3064,15 @@ def handle_action(self, param): self.debug_print("action_id", self.get_action_identifier()) + if action_id == "resolve_name": + ret_val = self._handle_resolve_name(param) + + if action_id == "block_sender": + ret_val = self._handle_block_sender(param) + + if action_id == "unblock_sender": + ret_val = self._handle_unblock_sender(param) + if action_id == "test_connectivity": ret_val = self._handle_test_connectivity(param) @@ -3305,7 +3106,7 @@ def handle_action(self, param): elif action_id == "list_groups": ret_val = self._handle_list_groups(param) - elif action_id == 'list_group_members': + elif action_id == "list_group_members": ret_val = self._handle_list_group_members(param) elif action_id == "list_users": @@ -3332,9 +3133,12 @@ def handle_action(self, param): elif action_id == "get_rule": ret_val = self._handle_get_rule(param) - elif action_id == 'send_email': + elif action_id == "send_email": ret_val = self._handle_send_email(param) + elif action_id == "update_email": + ret_val = self._handle_update_email(param) + return ret_val def _get_token(self, action_result): @@ -3371,9 +3175,7 @@ def _get_token(self, action_result): ) self.debug_print("Generating token...") - ret_val, resp_json = self._make_rest_call( - action_result, req_url, headers=headers, data=data, method="post" - ) + ret_val, resp_json = self._make_rest_call(action_result, req_url, headers=headers, data=data, method="post") if phantom.is_fail(ret_val): return action_result.get_status() # Save the response on the basis of admin_access @@ -3407,10 +3209,10 @@ def _get_token(self, action_result): if self._admin_access: - if self._access_token != self._state.get('admin_auth', {}).get('access_token'): + if self._access_token != self._state.get("admin_auth", {}).get("access_token"): return action_result.set_status(phantom.APP_ERROR, MSGOFFICE365_INVALID_PERMISSION_ERROR) else: - if self._access_token != self._state.get('non_admin_auth', {}).get('access_token'): + if self._access_token != self._state.get("non_admin_auth", {}).get("access_token"): return action_result.set_status(phantom.APP_ERROR, MSGOFFICE365_INVALID_PERMISSION_ERROR) self.debug_print("Token generated successfully") @@ -3437,23 +3239,22 @@ def initialize(self): # Load all the asset configuration in global variables self._state = self.load_state() - self._tenant = config['tenant'] - self._client_id = config['client_id'] - self._client_secret = config['client_secret'] - self._admin_access = config.get('admin_access') - self._admin_consent = config.get('admin_consent') - self._scope = config.get('scope') if config.get('scope') else None + self._tenant = config["tenant"] + self._client_id = config["client_id"] + self._client_secret = config["client_secret"] + self._admin_access = config.get("admin_access") + self._admin_consent = config.get("admin_consent") + self._scope = config.get("scope") if config.get("scope") else None self._number_of_retries = config.get("retry_count", MSGOFFICE365_DEFAULT_NUMBER_OF_RETRIES) - ret_val, self._number_of_retries = _validate_integer(self, self._number_of_retries, - "'Maximum attempts to retry the API call' asset configuration") + ret_val, self._number_of_retries = _validate_integer( + self, self._number_of_retries, "'Maximum attempts to retry the API call' asset configuration" + ) if phantom.is_fail(ret_val): return self.get_status() - self._retry_wait_time = config.get( - "retry_wait_time", MSGOFFICE365_DEFAULT_RETRY_WAIT_TIME - ) + self._retry_wait_time = config.get("retry_wait_time", MSGOFFICE365_DEFAULT_RETRY_WAIT_TIME) ret_val, self._retry_wait_time = _validate_integer( self, self._retry_wait_time, @@ -3469,17 +3270,11 @@ def initialize(self): "Please provide scope for non-admin access in the asset configuration", ) - self._access_token = self._state.get("non_admin_auth", {}).get( - "access_token", None - ) - self._refresh_token = self._state.get("non_admin_auth", {}).get( - "refresh_token", None - ) + self._access_token = self._state.get("non_admin_auth", {}).get("access_token", None) + self._refresh_token = self._state.get("non_admin_auth", {}).get("refresh_token", None) else: - self._access_token = self._state.get("admin_auth", {}).get( - "access_token", None - ) + self._access_token = self._state.get("admin_auth", {}).get("access_token", None) if action_id == "test_connectivity": # User is trying to complete the authentication flow, so just return True from here so that test connectivity continues @@ -3489,9 +3284,7 @@ def initialize(self): # if it was not and the current action is not test connectivity then it's an error if self._admin_access and not admin_consent: - return self.set_status( - phantom.APP_ERROR, MSGOFFICE365_RUN_CONNECTIVITY_MSG - ) + return self.set_status(phantom.APP_ERROR, MSGOFFICE365_RUN_CONNECTIVITY_MSG) if not self._admin_access and (not self._access_token or not self._refresh_token): ret_val = self._get_token(action_result) @@ -3499,9 +3292,7 @@ def initialize(self): if phantom.is_fail(ret_val): return self.set_status( phantom.APP_ERROR, - "{0}. {1}".format( - MSGOFFICE365_RUN_CONNECTIVITY_MSG, action_result.get_message() - ), + "{0}. {1}".format(MSGOFFICE365_RUN_CONNECTIVITY_MSG, action_result.get_message()), ) # Create ProcessEmail Object for on_poll @@ -3559,9 +3350,7 @@ def finalize(self): login_url = "{}login".format(BaseConnector._get_phantom_base_url()) try: print("Accessing the Login page") - r = requests.get( - login_url, verify=verify, timeout=MSGOFFICE365_DEFAULT_REQUEST_TIMEOUT - ) + r = requests.get(login_url, verify=verify, timeout=MSGOFFICE365_DEFAULT_REQUEST_TIMEOUT) csrftoken = r.cookies["csrftoken"] data = { "username": args.username, @@ -3584,9 +3373,7 @@ def finalize(self): session_id = r2.cookies["sessionid"] except Exception as e: - print( - "Unable to get session id from the platform. Error: {0}".format(str(e)) - ) + print("Unable to get session id from the platform. Error: {0}".format(str(e))) sys.exit(1) if len(sys.argv) < 2: diff --git a/office365_consts.py b/office365_consts.py index 4e72ea9..8a699bf 100644 --- a/office365_consts.py +++ b/office365_consts.py @@ -20,26 +20,34 @@ SPLUNK_SOAR_ASSET_INFO_URL = "{url}rest/asset/{asset_id}" SPLUNK_SOAR_CONTAINER_INFO_URL = "{url}rest/container/{container_id}" O365_TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ" -MSGOFFICE365_RUN_CONNECTIVITY_MSG = "Please run test connectivity first to complete authorization flow and "\ - "generate a token that the app can use to make calls to the server " -MSGOFFICE365_WELL_KNOWN_FOLDERS_FILTER = "displayName eq 'archive' or displayName eq 'clutter' or "\ - "displayName eq 'conflicts' or displayName eq 'conversation history' or displayName eq 'deleted items' or "\ - "displayName eq 'drafts' or displayName eq 'inbox' or displayName eq 'junk email' or displayName eq 'local failures' or"\ - " displayName eq 'msg folder root' or displayName eq 'outbox' or displayName eq 'recoverable items deletions' or "\ - "displayName eq 'scheduled' or displayName eq 'search folders' or displayName eq 'sent items' or displayName eq 'server failures' or "\ +MSGOFFICE365_RUN_CONNECTIVITY_MSG = ( + "Please run test connectivity first to complete authorization flow and " "generate a token that the app can use to make calls to the server " +) +MSGOFFICE365_WELL_KNOWN_FOLDERS_FILTER = ( + "displayName eq 'archive' or displayName eq 'clutter' or " + "displayName eq 'conflicts' or displayName eq 'conversation history' or displayName eq 'deleted items' or " + "displayName eq 'drafts' or displayName eq 'inbox' or displayName eq 'junk email' or displayName eq 'local failures' or" + " displayName eq 'msg folder root' or displayName eq 'outbox' or displayName eq 'recoverable items deletions' or " + "displayName eq 'scheduled' or displayName eq 'search folders' or displayName eq 'sent items' or displayName eq 'server failures' or " "displayName eq 'sync issues'" -MSGOFFICE365_STATE_FILE_CORRUPT_ERROR = "Error occurred while loading the state file. " \ - "Resetting the state file with the default format. Please test the connectivity." +) +MSGOFFICE365_STATE_FILE_CORRUPT_ERROR = ( + "Error occurred while loading the state file. " "Resetting the state file with the default format. Please test the connectivity." +) -MSGOFFICE365_AUTHORIZE_TROUBLESHOOT_MSG = 'If authorization URL fails to communicate with your '\ - 'Splunk SOAR instance, check whether you have: '\ - ' 1. Specified the Web Redirect URL of your App -- The Redirect URL should be /result . '\ - ' 2. Configured the base URL of your Splunk SOAR Instance at Administration -> Company Settings -> Info' -MSGOFFICE365_INVALID_PERMISSION_ERROR = "Error occurred while saving the newly generated access token "\ - "(in place of the expired token) in the state file." +MSGOFFICE365_AUTHORIZE_TROUBLESHOOT_MSG = ( + "If authorization URL fails to communicate with your " + "Splunk SOAR instance, check whether you have: " + " 1. Specified the Web Redirect URL of your App -- The Redirect URL should be /result . " + " 2. Configured the base URL of your Splunk SOAR Instance at Administration -> Company Settings -> Info" +) +MSGOFFICE365_INVALID_PERMISSION_ERROR = ( + "Error occurred while saving the newly generated access token " "(in place of the expired token) in the state file." +) MSGOFFICE365_INVALID_PERMISSION_ERROR += " Please check the owner, owner group, and the permissions of the state file. The Splunk SOAR " -MSGOFFICE365_INVALID_PERMISSION_ERROR += "user should have the correct access rights and ownership for the corresponding state file "\ - "(refer to readme file for more information)." +MSGOFFICE365_INVALID_PERMISSION_ERROR += ( + "user should have the correct access rights and ownership for the corresponding state file " "(refer to readme file for more information)." +) MSGOFFICE365_NO_DATA_FOUND = "No data found" MSGOFFICE365_DUPLICATE_CONTAINER_FOUND_MSG = "duplicate container found" MSGOFFICE365_ERROR_EMPTY_RESPONSE = "Status Code {code}. Empty response and no information in the header." @@ -47,13 +55,17 @@ MSGOFFICE365_DEFAULT_REQUEST_TIMEOUT = 30 # in seconds MSGOFFICE365_DEFAULT_NUMBER_OF_RETRIES = 3 MSGOFFICE365_DEFAULT_RETRY_WAIT_TIME = 60 # in seconds -MSGOFFICE365_CONTAINER_DESCRIPTION = 'Email ingested using MS Graph API - {last_modified_time}' -MSGOFFICE365_HTTP_401_STATUS_CODE = '401' -MSGOFFICE365_INVALID_CLIENT_ID_ERROR_CODE = 'AADSTS700016' -MSGOFFICE365_INVALID_TENANT_ID_FORMAT_ERROR_CODE = 'AADSTS900023' -MSGOFFICE365_INVALID_TENANT_ID_NOT_FOUND_ERROR_CODE = 'AADSTS90002' -MSGOFFICE365_ASSET_PARAM_CHECK_LIST_ERROR = [MSGOFFICE365_HTTP_401_STATUS_CODE, MSGOFFICE365_INVALID_CLIENT_ID_ERROR_CODE, - MSGOFFICE365_INVALID_TENANT_ID_FORMAT_ERROR_CODE, MSGOFFICE365_INVALID_TENANT_ID_NOT_FOUND_ERROR_CODE] +MSGOFFICE365_CONTAINER_DESCRIPTION = "Email ingested using MS Graph API - {last_modified_time}" +MSGOFFICE365_HTTP_401_STATUS_CODE = "401" +MSGOFFICE365_INVALID_CLIENT_ID_ERROR_CODE = "AADSTS700016" +MSGOFFICE365_INVALID_TENANT_ID_FORMAT_ERROR_CODE = "AADSTS900023" +MSGOFFICE365_INVALID_TENANT_ID_NOT_FOUND_ERROR_CODE = "AADSTS90002" +MSGOFFICE365_ASSET_PARAM_CHECK_LIST_ERROR = [ + MSGOFFICE365_HTTP_401_STATUS_CODE, + MSGOFFICE365_INVALID_CLIENT_ID_ERROR_CODE, + MSGOFFICE365_INVALID_TENANT_ID_FORMAT_ERROR_CODE, + MSGOFFICE365_INVALID_TENANT_ID_NOT_FOUND_ERROR_CODE, +] # Constants relating to '_get_error_message_from_exception' @@ -61,9 +73,7 @@ # Constants relating to 'validate_integer' MSGOFFICE365_VALID_INT_MSG = "Please provide a valid integer value in the {param} parameter" -MSGOFFICE365_NON_NEG_NON_ZERO_INT_MSG = ( - "Please provide a valid non-zero positive integer value in the {param} parameter" -) +MSGOFFICE365_NON_NEG_NON_ZERO_INT_MSG = "Please provide a valid non-zero positive integer value in the {param} parameter" AUTH_FAILURE_MSG = [ "token is invalid", "Access token has expired", @@ -71,7 +81,7 @@ "AuthenticationFailed", "TokenExpired", "InvalidAuthenticationToken", - "Lifetime validation failed, the token is expired." + "Lifetime validation failed, the token is expired.", ] MSGOFFICE365_NON_NEG_INT_MSG = "Please provide a valid non-negative integer value in the {param} parameter" MSGOFFICE365_ENCRYPTION_ERROR = "Error occurred while encrypting the state file" @@ -111,5 +121,5 @@ "toRecipients", "uniqueBody", "webLink", - "internetMessageId" + "internetMessageId", ] diff --git a/office365_resolve_name.html b/office365_resolve_name.html new file mode 100644 index 0000000..0c4c166 --- /dev/null +++ b/office365_resolve_name.html @@ -0,0 +1,379 @@ +{% extends 'widgets/widget_template.html' %} +{% load custom_template %} + +{% block custom_title_prop %}{% if title_logo %}style="background-size: auto 60%; background-position: 50%; background-repeat: no-repeat; background-image: url('/app_resource/{{ title_logo }}');"{% endif %}{% endblock %} +{% block title1 %}{{ title1 }}{% endblock %} +{% block title2 %}{{ title2 }}{% endblock %} +{% block custom_tools %} +{% endblock %} + +{% block widget_content %} + + + + +
+ + + {% for result in results %} + {% if not result.data %} +

No data found

+ {% else %} +
+

Action Parameters

+ + + + + +
Email Address + {{ result.param.email }} +   + +
+
+ +
+

Data

+ + + + + + + + + + + + + + + + + + + + + {% for data in result.data %} + + + {% if data.id is not None %} + + {% else %} + + {% endif %} + + {% if data.userPrincipalName is not None %} + + {% else %} + + {% endif %} + + {% if data.givenName is not None %} + + {% else %} + + {% endif %} + + {% if data.surname is not None %} + + {% else %} + + {% endif %} + + {% if data.displayName is not None %} + + {% else %} + + {% endif %} + + {% if data.mailNickname is not None %} + + {% else %} + + {% endif %} + + {% if data.mail is not None %} + + {% else %} + + {% endif %} + + {% if data.otherMails is not None %} + + {% else %} + + {% endif %} + + {% if data.proxyAddresses is not None %} + + {% else %} + + {% endif %} + + {% if data.jobTitle is not None %} + + {% else %} + + {% endif %} + + {% if data.officeLocation is not None %} + + {% else %} + + {% endif %} + + {% if data.value is not None %} + + {% else %} + + {% endif %} + + {% if data.mobilePhone is not None %} + + {% else %} + + {% endif %} + + {% if data.businessPhones is not None %} + + {% else %} + + {% endif %} + + {% if data.preferredLanguage is not None %} + + {% else %} + + {% endif %} + + {% if data.state is not None %} + + {% else %} + + {% endif %} + + {% if data.postalCode is not None %} + + {% else %} + + {% endif %} + + + {% endfor %} +
idPrincipal namegiven namesurnamedisplay namemail nicknamemailother mailsproxy email addressesjob titleoffice locationvaluemobile phonebusiness phonespreferred languagestatepostal code
+ + {{ data.id }} +   + + + None + + {{ data.userPrincipalName }} +   + + + None + + {{ data.givenName }} +   + + + None + + {{ data.surname }} +   + + + None + + {{ data.displayName }} +   + + + None + + {{ data.mailNickname }} +   + + + None + + {{ data.mail }} +   + + + None + + {{ data.otherMails }} +   + + + None + + {{ data.proxyAddresses }} +   + + + None + + {{ data.jobTitle }} +   + + + None + + {{ data.officeLocation }} +   + + + None + + {{ data.value }} +   + + + None + + {{ data.mobilePhone }} +   + + + None + + {{ data.businessPhones }} +   + + + None + + {{ data.preferredLanguage }} +   + + + None + + {{ data.state }} +   + + + None + + {{ data.postalCode }} +   + + + None
+
+ {% endif %} + {% endfor %} +
+ + +{% endblock %} diff --git a/office365_view.py b/office365_view.py index 538f096..2008a9a 100644 --- a/office365_view.py +++ b/office365_view.py @@ -105,4 +105,7 @@ def display_view(provides, all_app_runs, context): if provides == "list rules": return_page = "office365_list_rules.html" + if provides == "resolve name": + return_page = "office365_resolve_name.html" + return return_page diff --git a/process_email.py b/process_email.py index 2de3122..616eefe 100644 --- a/process_email.py +++ b/process_email.py @@ -38,30 +38,26 @@ from office365_consts import ERROR_MSG_UNAVAILABLE -_container_common = { - "run_automation": False # Don't run any playbooks, when this artifact is added -} +_container_common = {"run_automation": False} # Don't run any playbooks, when this artifact is added -_artifact_common = { - "run_automation": False # Don't run any playbooks, when this artifact is added -} +_artifact_common = {"run_automation": False} # Don't run any playbooks, when this artifact is added FILE_EXTENSIONS = { - '.vmsn': ['os memory dump', 'vm snapshot file'], - '.vmss': ['os memory dump', 'vm suspend file'], - '.js': ['javascript'], - '.doc': ['doc'], - '.docx': ['doc'], - '.xls': ['xls'], - '.xlsx': ['xls'], + ".vmsn": ["os memory dump", "vm snapshot file"], + ".vmss": ["os memory dump", "vm suspend file"], + ".js": ["javascript"], + ".doc": ["doc"], + ".docx": ["doc"], + ".xls": ["xls"], + ".xlsx": ["xls"], } MAGIC_FORMATS = [ - (re.compile('^PE.* Windows'), ['pe file', 'hash']), - (re.compile('^MS-DOS executable'), ['pe file', 'hash']), - (re.compile('^PDF '), ['pdf']), - (re.compile('^MDMP crash'), ['process dump']), - (re.compile('^Macromedia Flash'), ['flash']), + (re.compile("^PE.* Windows"), ["pe file", "hash"]), + (re.compile("^MS-DOS executable"), ["pe file", "hash"]), + (re.compile("^PDF "), ["pdf"]), + (re.compile("^MDMP crash"), ["process dump"]), + (re.compile("^Macromedia Flash"), ["flash"]), ] MSG_DEFAULT_ARTIFACT_COUNT = 100 @@ -89,26 +85,39 @@ PROC_EMAIL_JSON_EMAIL_HEADERS = "email_headers" PROC_EMAIL_CONTENT_TYPE_MSG = "message/rfc822" -URI_REGEX = r'([Hh][Tt][Tt][Pp][Ss]?:\/\/)((?:[:@\.\-_0-9]|[^ -@\[-\`\{-\~\s]|' \ - r'[\[\(][^\s\[\]\(\)]*[\]\)])+)((?:[\/\?]+(?:[^\[\'\"\(\{\)\]\}\s]|[\[\(][^\[\]\(\)]*[\]\)])*)*)[\/]?' +URI_REGEX = ( + r"([Hh][Tt][Tt][Pp][Ss]?:\/\/)((?:[:@\.\-_0-9]|[^ -@\[-\`\{-\~\s]|" + r"[\[\(][^\s\[\]\(\)]*[\]\)])+)((?:[\/\?]+(?:[^\[\'\"\(\{\)\]\}\s]|[\[\(][^\[\]\(\)]*[\]\)])*)*)[\/]?" +) EMAIL_REGEX = r"\b[A-Z0-9._%+-]+@+[A-Z0-9.-]+\.[A-Z]{2,}\b" EMAIL_REGEX2 = r'".*"@[A-Z0-9.-]+\.[A-Z]{2,}\b' HASH_REGEX = r"\b[0-9a-fA-F]{32}\b|\b[0-9a-fA-F]{40}\b|\b[0-9a-fA-F]{64}\b" -IP_REGEX = r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}' -IPV6_REGEX = r'\s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|' -IPV6_REGEX += r'(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))' -IPV6_REGEX += (r'|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)' - r'(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|') -IPV6_REGEX += (r'(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)' - r'(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|') -IPV6_REGEX += (r'(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)' - r'(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|') -IPV6_REGEX += (r'(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)' - r'(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|') -IPV6_REGEX += (r'(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)' - r'(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|') -IPV6_REGEX += (r'(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)' - r'(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?\s*') +IP_REGEX = r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}" +IPV6_REGEX = r"\s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|" +IPV6_REGEX += r"(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))" +IPV6_REGEX += ( + r"|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)" r"(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|" +) +IPV6_REGEX += ( + r"(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)" + r"(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|" +) +IPV6_REGEX += ( + r"(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)" + r"(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|" +) +IPV6_REGEX += ( + r"(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)" + r"(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|" +) +IPV6_REGEX += ( + r"(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)" + r"(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|" +) +IPV6_REGEX += ( + r"(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)" + r"(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?\s*" +) uri_regexc = re.compile(URI_REGEX) @@ -197,7 +206,7 @@ def is_ipv6(self, input_ip): def _debug_print(self, *args): - if self._base_connector and hasattr(self._base_connector, 'debug_print'): + if self._base_connector and hasattr(self._base_connector, "debug_print"): self._base_connector.debug_print(*args) return @@ -211,22 +220,20 @@ def _get_string(self, input_str, charset): input_str = str(make_header(decode_header(input_str))) except Exception: input_str = self._decode_uni_string(input_str, input_str) - self._debug_print( - "Error occurred while converting to string with specific encoding {}".format(input_str) - ) + self._debug_print("Error occurred while converting to string with specific encoding {}".format(input_str)) return input_str def _clean_url(self, url): - url = url.strip('\r\n') + url = url.strip("\r\n") # Check before splicing, find returns -1 if not found # _and_ you will end up splicing on -1 (incorrectly) - if '<' in url: - url = url[:url.find('<')] + if "<" in url: + url = url[: url.find("<")] - if '>' in url: - url = url[:url.find('>')] + if ">" in url: + url = url[: url.find(">")] return url.strip() @@ -242,7 +249,7 @@ def _extract_urls_domains(self, file_data, urls, domains, parent_id=None): emails.extend(re.findall(email_regexc2, file_data)) for curr_email in emails: - domain = curr_email[curr_email.rfind('@') + 1:] + domain = curr_email[curr_email.rfind("@") + 1 :] if domain and (not ph_utils.is_ip(domain)): extracted_domains.add(domain) @@ -268,16 +275,16 @@ def _extract_urls_domains(self, file_data, urls, domains, parent_id=None): # in the email uri_text.append(self._clean_url(link.get_text())) # it's html, so get all the urls - if not link['href'].startswith('mailto:'): - uris.append(link['href']) + if not link["href"].startswith("mailto:"): + uris.append(link["href"]) for src in srcs: uri_text.append(self._clean_url(src.get_text())) # it's html, so get all the urls - uris.append(src['src']) + uris.append(src["src"]) if uri_text: - uri_text = [x for x in uri_text if x.startswith('http')] + uri_text = [x for x in uri_text if x.startswith("http")] if uri_text: uris.extend(uri_text) @@ -291,7 +298,7 @@ def _extract_urls_domains(self, file_data, urls, domains, parent_id=None): unique_uris = set(uris) # Validate the uris - validate_uri = URLValidator(schemes=['http', 'https']) + validate_uri = URLValidator(schemes=["http", "https"]) validated_uris = list() for uri in unique_uris: try: @@ -304,9 +311,9 @@ def _extract_urls_domains(self, file_data, urls, domains, parent_id=None): # add the uris to the urls for uri in validated_uris: if parent_id: - uri_dict = {'requestURL': uri, 'parentInternetMessageId': parent_id} + uri_dict = {"requestURL": uri, "parentInternetMessageId": parent_id} else: - uri_dict = {'requestURL': uri} + uri_dict = {"requestURL": uri} urls.append(uri_dict) if self._config[PROC_EMAIL_JSON_EXTRACT_DOMAINS]: @@ -316,24 +323,24 @@ def _extract_urls_domains(self, file_data, urls, domains, parent_id=None): extracted_domains.add(domain) # work on any mailto urls if present if links: - mailtos = [x['href'] for x in links if (x['href'].startswith('mailto:'))] + mailtos = [x["href"] for x in links if (x["href"].startswith("mailto:"))] for curr_email in mailtos: - domain = curr_email[curr_email.find('@') + 1:] + domain = curr_email[curr_email.find("@") + 1 :] if domain and (not self._is_ip(domain)): - if '?' in domain: - domain = domain[:domain.find('?')] + if "?" in domain: + domain = domain[: domain.find("?")] extracted_domains.add(domain) for domain in extracted_domains: if parent_id: - domains.append({'destinationDnsDomain': domain, 'parentInternetMessageId': parent_id}) + domains.append({"destinationDnsDomain": domain, "parentInternetMessageId": parent_id}) else: - domains.append({'destinationDnsDomain': domain}) + domains.append({"destinationDnsDomain": domain}) return def _sanitize_file_name(self, file_name): - return re.sub('[,"\']', '', file_name) + return re.sub("[,\"']", "", file_name) def _get_ips(self, file_data, ips, parent_id=None): @@ -359,14 +366,14 @@ def _get_ips(self, file_data, ips, parent_id=None): unique_ips = set(ips_in_mail) for ip in unique_ips: if parent_id: - ips.append({'sourceAddress': ip, 'parentInternetMessageId': parent_id}) + ips.append({"sourceAddress": ip, "parentInternetMessageId": parent_id}) else: - ips.append({'sourceAddress': ip}) + ips.append({"sourceAddress": ip}) def _sanitize_dict(self, obj): if isinstance(obj, str): - return obj.replace('\x00', '') + return obj.replace("\x00", "") if isinstance(obj, list): return [self._sanitize_dict(item) for item in obj] if isinstance(obj, set): @@ -386,19 +393,19 @@ def _extract_hashes(self, file_data, hashes, parent_id=None): unique_hashes = set(hashs_in_mail) for hash in unique_hashes: if parent_id: - hashes.append({'fileHash': hash, 'parentInternetMessageId': parent_id}) + hashes.append({"fileHash": hash, "parentInternetMessageId": parent_id}) else: - hashes.append({'fileHash': hash}) + hashes.append({"fileHash": hash}) def _handle_body(self, body, parsed_mail, body_index, email_id): - local_file_path = body['file_path'] - charset = body.get('charset') + local_file_path = body["file_path"] + charset = body.get("charset") parent_id = None - if 'True' in local_file_path: - for item in parsed_mail['email_headers']: - parent_id = item['cef'].get('parentInternetMessageId') + if "True" in local_file_path: + for item in parsed_mail["email_headers"]: + parent_id = item["cef"].get("parentInternetMessageId") if parent_id: break @@ -408,10 +415,10 @@ def _handle_body(self, body, parsed_mail, body_index, email_id): domains = parsed_mail[PROC_EMAIL_JSON_DOMAINS] file_data = None - with open(local_file_path, 'rb') as f: # noqa + with open(local_file_path, "rb") as f: # noqa file_data = f.read() self._base_connector.debug_print("Reading file data using binary mode") - file_data = self._get_string(file_data, 'utf-8') + file_data = self._get_string(file_data, "utf-8") if file_data is None or len(file_data) == 0: return phantom.APP_ERROR @@ -437,10 +444,10 @@ def _add_artifacts(self, input_set, artifact_name, start_index, artifacts): artifact = {} artifact.update(_artifact_common) - artifact['source_data_identifier'] = start_index + added_artifacts - artifact['cef'] = item - artifact['name'] = artifact_name - self._debug_print('Artifact:', artifact) + artifact["source_data_identifier"] = start_index + added_artifacts + artifact["cef"] = item + artifact["name"] = artifact_name + self._debug_print("Artifact:", artifact) artifacts.append(artifact) added_artifacts += 1 @@ -449,8 +456,8 @@ def _add_artifacts(self, input_set, artifact_name, start_index, artifacts): def _parse_email_headers_as_inline(self, file_data, parsed_mail, charset, email_id): # remove the 'Forwarded Message' from the email text and parse it - p = re.compile(r'.*Forwarded Message.*\r\n(.*)', re.IGNORECASE) - email_text = p.sub(r'\1', file_data.strip()[:500]) + p = re.compile(r".*Forwarded Message.*\r\n(.*)", re.IGNORECASE) + email_text = p.sub(r"\1", file_data.strip()[:500]) mail = email.message_from_string(email_text) self._parse_email_headers(parsed_mail, mail, charset, add_email_id=email_id) @@ -461,7 +468,7 @@ def _add_email_header_artifacts(self, email_header_artifacts, start_index, artif added_artifacts = 0 for artifact in email_header_artifacts: - artifact['source_data_identifier'] = start_index + added_artifacts + artifact["source_data_identifier"] = start_index + added_artifacts artifacts.append(artifact) added_artifacts += 1 @@ -481,16 +488,16 @@ def _create_artifacts(self, parsed_mail, ingest_email=True): artifact_id = 0 # add artifacts - added_artifacts = self._add_artifacts(ips, 'IP Artifact', artifact_id, self._artifacts) + added_artifacts = self._add_artifacts(ips, "IP Artifact", artifact_id, self._artifacts) artifact_id += added_artifacts - added_artifacts = self._add_artifacts(hashes, 'Hash Artifact', artifact_id, self._artifacts) + added_artifacts = self._add_artifacts(hashes, "Hash Artifact", artifact_id, self._artifacts) artifact_id += added_artifacts - added_artifacts = self._add_artifacts(urls, 'URL Artifact', artifact_id, self._artifacts) + added_artifacts = self._add_artifacts(urls, "URL Artifact", artifact_id, self._artifacts) artifact_id += added_artifacts - added_artifacts = self._add_artifacts(domains, 'Domain Artifact', artifact_id, self._artifacts) + added_artifacts = self._add_artifacts(domains, "Domain Artifact", artifact_id, self._artifacts) artifact_id += added_artifacts if ingest_email: @@ -504,7 +511,7 @@ def _decode_uni_string(self, input_str, def_name): # try to find all the decoded strings, we could have multiple decoded strings # or a single decoded string between two normal strings separated by \r\n # YEAH...it could get that messy - encoded_strings = re.findall(r'=\?.*\?=', input_str, re.I) + encoded_strings = re.findall(r"=\?.*\?=", input_str, re.I) # return input_str as is, no need to do any conversion if not encoded_strings: @@ -513,7 +520,7 @@ def _decode_uni_string(self, input_str, def_name): # get the decoded strings try: decoded_strings = [decode_header(x)[0] for x in encoded_strings] - decoded_strings = [{'value': x[0], 'encoding': x[1]} for x in decoded_strings] + decoded_strings = [{"value": x[0], "encoding": x[1]} for x in decoded_strings] except Exception as e: error_msg = _get_error_msg_from_exception(e) self._debug_print("Decoding: {0}. {1}".format(encoded_strings, error_msg)) @@ -522,7 +529,7 @@ def _decode_uni_string(self, input_str, def_name): # convert to dict for safe access, if it's an empty list, the dict will be empty decoded_strings = dict(enumerate(decoded_strings)) - new_str = '' + new_str = "" new_str_create_count = 0 for i, encoded_string in enumerate(encoded_strings): @@ -532,8 +539,8 @@ def _decode_uni_string(self, input_str, def_name): # nothing to replace with continue - value = decoded_string.get('value') - encoding = decoded_string.get('encoding') + value = decoded_string.get("value") + encoding = decoded_string.get("encoding") if not encoding or not value: # nothing to replace with @@ -549,7 +556,7 @@ def _decode_uni_string(self, input_str, def_name): new_str_create_count += 1 except Exception: try: - if encoding != 'utf-8': + if encoding != "utf-8": value = str(value, encoding) except Exception: pass @@ -595,8 +602,8 @@ def _handle_if_body(self, content_disp, content_id, content_type, part, bodies, if content_disp is None: process_as_body = True # if content disposition is inline - elif content_disp.lower().strip() == 'inline': - if 'text/html' in content_type or 'text/plain' in content_type: + elif content_disp.lower().strip() == "inline": + if "text/html" in content_type or "text/plain" in content_type: process_as_body = True if not process_as_body: @@ -607,31 +614,31 @@ def _handle_if_body(self, content_disp, content_id, content_type, part, bodies, if not part_payload: return (phantom.APP_SUCCESS, False) - if 'text/html' in content_type and self._parsed_mail[PROC_EMAIL_JSON_EMAIL_HEADERS]: + if "text/html" in content_type and self._parsed_mail[PROC_EMAIL_JSON_EMAIL_HEADERS]: # Get Email Header Artifact email_header_artifact = self._parsed_mail[PROC_EMAIL_JSON_EMAIL_HEADERS][0] - artifact_cef = email_header_artifact['cef'] + artifact_cef = email_header_artifact["cef"] html_body = self._get_string(part_payload, charset=content_charset) - artifact_cef['bodyHtml'] = html_body + artifact_cef["bodyHtml"] = html_body try: soup = BeautifulSoup(html_body, "html.parser") # Remove the script, style, footer, title and navigation part from the HTML message for element in soup(["script", "style", "footer", "title", "nav"]): element.extract() - body_text = soup.get_text(separator=' ') - split_lines = body_text.split('\n') + body_text = soup.get_text(separator=" ") + split_lines = body_text.split("\n") split_lines = [x.strip() for x in split_lines if x.strip()] - body_text = '\n'.join(split_lines) + body_text = "\n".join(split_lines) if body_text: artifact_cef["bodyText"] = body_text except Exception: self._debug_print("Cannot parse email body text details") - with open(file_path, 'wb') as f: # noqa + with open(file_path, "wb") as f: # noqa f.write(part_payload) - bodies.append({'file_path': file_path, 'charset': part.get_content_charset()}) + bodies.append({"file_path": file_path, "charset": part.get_content_charset()}) return (phantom.APP_SUCCESS, False) @@ -649,51 +656,51 @@ def _handle_attachment(self, part, tmp_dir, file_name, file_path): attach_meta_info = dict() if headers: - attach_meta_info = {'headers': dict(headers)} + attach_meta_info = {"headers": dict(headers)} for curr_attach in self._attachments_from_msgraph: - if curr_attach.get('should_ignore', False): + if curr_attach.get("should_ignore", False): continue try: - attach_content = curr_attach['content'] + attach_content = curr_attach["content"] except Exception: continue - if attach_content.strip().replace('\r\n', '') == part_base64_encoded.strip().replace('\r\n', ''): + if attach_content.strip().replace("\r\n", "") == part_base64_encoded.strip().replace("\r\n", ""): attach_meta_info.update(dict(curr_attach)) - del attach_meta_info['content'] - curr_attach['should_ignore'] = True + del attach_meta_info["content"] + curr_attach["should_ignore"] = True part_payload = part.get_payload(decode=True) if not part_payload: return phantom.APP_SUCCESS try: - with open(file_path, 'wb') as f: # noqa + with open(file_path, "wb") as f: # noqa f.write(part_payload) - files.append({'file_name': file_name, 'file_path': file_path, 'meta_info': attach_meta_info}) + files.append({"file_name": file_name, "file_path": file_path, "meta_info": attach_meta_info}) except IOError as ioerr: error_msg = _get_error_msg_from_exception(ioerr) if "File name too long" in error_msg: self.write_with_new_filename(part_payload, files, file_name, as_byte=False) else: - self._debug_print('Failed to write file: {}'.format(ioerr)) + self._debug_print("Failed to write file: {}".format(ioerr)) def write_with_new_filename(self, data, dict_to_fill, file_name, as_byte=False): try: fd, full_path = tempfile.mkstemp(dir=Vault.get_vault_tmp_dir()) os.close(fd) - with open(full_path, 'wb') as f: # noqa + with open(full_path, "wb") as f: # noqa if as_byte: f.write(data.as_bytes()) else: f.write(data) - dict_to_fill.append({'file_name': file_name, 'file_path': full_path}) + dict_to_fill.append({"file_name": file_name, "file_path": full_path}) except Exception as e: - self._base_connector.debug_print('Exception while writing file: {}'.format(e)) + self._base_connector.debug_print("Exception while writing file: {}".format(e)) def _handle_part(self, part, part_index, tmp_dir, extract_attach, parsed_mail, child=False): @@ -701,9 +708,9 @@ def _handle_part(self, part, part_index, tmp_dir, extract_attach, parsed_mail, c # get the file_name file_name = part.get_filename() - content_disp = part.get('Content-Disposition') - content_type = part.get('Content-Type') - content_id = part.get('Content-ID') + content_disp = part.get("Content-Disposition") + content_type = part.get("Content-Type") + content_id = part.get("Content-ID") if file_name is None: # init name and extension to default values @@ -712,7 +719,7 @@ def _handle_part(self, part, part_index, tmp_dir, extract_attach, parsed_mail, c # Try to create an extension from the content type if possible if content_type is not None: - extension = mimetypes.guess_extension(re.sub(';.*', '', content_type)) + extension = mimetypes.guess_extension(re.sub(";.*", "", content_type)) # Try to create a name from the content id if possible if content_id is not None: @@ -723,7 +730,7 @@ def _handle_part(self, part, part_index, tmp_dir, extract_attach, parsed_mail, c file_name = self._sanitize_file_name(self._decode_uni_string(file_name, file_name)) # Remove any chars that we don't want in the name - file_path = "{0}/{1}_{2}_{3}".format(tmp_dir, part_index, file_name.replace('<', '').replace('>', '').replace(' ', ''), child) + file_path = "{0}/{1}_{2}_{3}".format(tmp_dir, part_index, file_name.replace("<", "").replace(">", "").replace(" ", ""), child) self._debug_print("file_path: {0}".format(file_path)) @@ -755,7 +762,7 @@ def _update_headers(self, headers): headers_ci = CaseInsensitiveDict(headers) for curr_header_lower in self._headers_from_ews: - if headers_ci.get('message-id', 'default_value1').strip() == curr_header_lower.get('message-id', 'default_value2').strip(): + if headers_ci.get("message-id", "default_value1").strip() == curr_header_lower.get("message-id", "default_value2").strip(): # the headers match with the one that we got from the ews API, so update it headers.update(curr_header_lower) @@ -770,7 +777,7 @@ def _get_email_headers_from_part(self, part, charset=None): charset = part.get_content_charset() if charset is None: - charset = 'utf8' + charset = "utf8" if not email_headers: return {} @@ -787,20 +794,20 @@ def _get_email_headers_from_part(self, part, charset=None): # Handle received seperately received_headers = list() try: - received_headers = [self._get_string(x[1], charset) for x in email_headers if x[0].lower() == 'received'] + received_headers = [self._get_string(x[1], charset) for x in email_headers if x[0].lower() == "received"] except Exception as e: error_msg = _get_error_msg_from_exception(e) err = "Error occurred while handling the received header tuple separately" self._debug_print("{}. {}".format(err, error_msg)) if received_headers: - headers['Received'] = received_headers + headers["Received"] = received_headers # handle the subject string, if required add a new key - subject = headers.get('Subject') + subject = headers.get("Subject") if subject: - if type(subject) == str: - headers['decodedSubject'] = self._decode_uni_string(subject, subject) + if isinstance(subject, str): + headers["decodedSubject"] = self._decode_uni_string(subject, subject) return headers @@ -817,40 +824,40 @@ def _parse_email_headers(self, parsed_mail, part, charset=None, add_email_id=Non cef_artifact = {} cef_types = {} - if headers.get('From'): - emails = headers['From'] + if headers.get("From"): + emails = headers["From"] if emails: - cef_artifact.update({'fromEmail': emails}) + cef_artifact.update({"fromEmail": emails}) - if headers.get('To'): - emails = headers['To'] + if headers.get("To"): + emails = headers["To"] if emails: - cef_artifact.update({'toEmail': emails}) + cef_artifact.update({"toEmail": emails}) # if the header did not contain any email addresses then ignore this artifact - message_id = headers.get('message-id') + message_id = headers.get("message-id") if not cef_artifact and message_id is None: return 0 - cef_types.update({'fromEmail': ['email'], 'toEmail': ['email']}) + cef_types.update({"fromEmail": ["email"], "toEmail": ["email"]}) if headers: self._update_headers(headers) - cef_artifact['emailHeaders'] = dict(headers) + cef_artifact["emailHeaders"] = dict(headers) - for curr_key in list(cef_artifact['emailHeaders'].keys()): - if curr_key.lower().startswith('body'): - curr_value = cef_artifact['emailHeaders'].pop(curr_key) + for curr_key in list(cef_artifact["emailHeaders"].keys()): + if curr_key.lower().startswith("body"): + curr_value = cef_artifact["emailHeaders"].pop(curr_key) if self._config.get(PROC_EMAIL_JSON_EXTRACT_BODY, False): cef_artifact.update({curr_key: curr_value}) - elif curr_key == 'parentInternetMessageId': - curr_value = cef_artifact['emailHeaders'].pop(curr_key) + elif curr_key == "parentInternetMessageId": + curr_value = cef_artifact["emailHeaders"].pop(curr_key) cef_artifact.update({curr_key: curr_value}) - elif curr_key == 'parentGuid': - curr_value = cef_artifact['emailHeaders'].pop(curr_key) + elif curr_key == "parentGuid": + curr_value = cef_artifact["emailHeaders"].pop(curr_key) cef_artifact.update({curr_key: curr_value}) - elif curr_key == 'emailGuid': - curr_value = cef_artifact['emailHeaders'].pop(curr_key) + elif curr_key == "emailGuid": + curr_value = cef_artifact["emailHeaders"].pop(curr_key) cef_artifact.update({curr_key: curr_value}) # Adding the email id as a cef artifact crashes the UI when trying to show the action dialog box @@ -858,17 +865,17 @@ def _parse_email_headers(self, parsed_mail, part, charset=None, add_email_id=Non # from adding the emailId # add_email_id = False if add_email_id: - cef_artifact['emailId'] = add_email_id + cef_artifact["emailId"] = add_email_id if self._email_id_contains: - cef_types.update({'emailId': self._email_id_contains}) + cef_types.update({"emailId": self._email_id_contains}) artifact = {} artifact.update(_artifact_common) - artifact['name'] = 'Email Artifact' - artifact['label'] = 'email' - artifact['severity'] = self._base_connector.get_config().get('container_severity', 'medium') - artifact['cef'] = cef_artifact - artifact['cef_types'] = cef_types + artifact["name"] = "Email Artifact" + artifact["label"] = "email" + artifact["severity"] = self._base_connector.get_config().get("container_severity", "medium") + artifact["cef"] = cef_artifact + artifact["cef_types"] = cef_types email_header_artifacts.append(artifact) return len(email_header_artifacts) @@ -886,14 +893,14 @@ def _handle_mail_object(self, mail, email_id, rfc822_email, tmp_dir, start_time_ charset = mail.get_content_charset() if charset is None: - charset = 'utf-8' + charset = "utf-8" # Extract fields and place it in a dictionary - self._parsed_mail[PROC_EMAIL_JSON_SUBJECT] = mail.get('Subject', '') - self._parsed_mail[PROC_EMAIL_JSON_FROM] = mail.get('From', '') - self._parsed_mail[PROC_EMAIL_JSON_TO] = mail.get('To', '') - self._parsed_mail[PROC_EMAIL_JSON_DATE] = mail.get('Date', '') - self._parsed_mail[PROC_EMAIL_JSON_MSG_ID] = mail.get('Message-ID', '') + self._parsed_mail[PROC_EMAIL_JSON_SUBJECT] = mail.get("Subject", "") + self._parsed_mail[PROC_EMAIL_JSON_FROM] = mail.get("From", "") + self._parsed_mail[PROC_EMAIL_JSON_TO] = mail.get("To", "") + self._parsed_mail[PROC_EMAIL_JSON_DATE] = mail.get("Date", "") + self._parsed_mail[PROC_EMAIL_JSON_MSG_ID] = mail.get("Message-ID", "") self._parsed_mail[PROC_EMAIL_JSON_FILES] = files = [] self._parsed_mail[PROC_EMAIL_JSON_BODIES] = bodies = [] self._parsed_mail[PROC_EMAIL_JSON_START_TIME] = start_time_epoch @@ -909,10 +916,10 @@ def _handle_mail_object(self, mail, email_id, rfc822_email, tmp_dir, start_time_ add_email_id = email_id self._parse_email_headers(self._parsed_mail, part, add_email_id=add_email_id) - if message_id is None and part.get('Message-ID'): - message_id = part.get('Message-ID') + if message_id is None and part.get("Message-ID"): + message_id = part.get("Message-ID") child = False - elif message_id and part.get('Message-ID'): + elif message_id and part.get("Message-ID"): child = True self._debug_print("part: {0}".format(part.__dict__)) @@ -931,9 +938,9 @@ def _handle_mail_object(self, mail, email_id, rfc822_email, tmp_dir, start_time_ else: self._parse_email_headers(self._parsed_mail, mail, add_email_id=email_id) file_path = "{0}/part_1.text".format(tmp_dir) - with open(file_path, 'wb') as f: # noqa + with open(file_path, "wb") as f: # noqa f.write(mail.get_payload(decode=True)) - bodies.append({'file_path': file_path, 'charset': mail.get_content_charset()}) + bodies.append({"file_path": file_path, "charset": mail.get_content_charset()}) # get the container name container_name = self._get_container_name(self._parsed_mail, email_id) @@ -947,11 +954,11 @@ def _handle_mail_object(self, mail, email_id, rfc822_email, tmp_dir, start_time_ container_data = dict(self._parsed_mail) # delete the header info, we dont make it a part of the container json - del(container_data[PROC_EMAIL_JSON_EMAIL_HEADERS]) + del container_data[PROC_EMAIL_JSON_EMAIL_HEADERS] container.update(_container_common) - self._container['source_data_identifier'] = email_id - self._container['name'] = container_name - self._container['data'] = {'raw_email': rfc822_email} + self._container["source_data_identifier"] = email_id + self._container["name"] = container_name + self._container["data"] = {"raw_email": rfc822_email} # Create the sets before handling the bodies If both the bodies add the same ip # only one artifact should be created @@ -986,7 +993,7 @@ def _set_email_id_contains(self, email_id): return try: - email_id = self._get_string(email_id, 'utf-8') + email_id = self._get_string(email_id, "utf-8") except Exception: email_id = str(email_id) @@ -1002,7 +1009,7 @@ def _int_process_email(self, rfc822_email, email_id, start_time_epoch, ingest_em ret_val = phantom.APP_SUCCESS - tmp_dir = tempfile.mkdtemp(prefix='ph_email_phmsgo365') + tmp_dir = tempfile.mkdtemp(prefix="ph_email_phmsgo365") self._tmp_dirs.append(tmp_dir) try: @@ -1012,7 +1019,7 @@ def _int_process_email(self, rfc822_email, email_id, start_time_epoch, ingest_em self._debug_print(msg) return (phantom.APP_ERROR, msg, []) - results = [{'container': self._container, 'artifacts': self._artifacts, 'files': self._attachments, 'temp_directory': tmp_dir}] + results = [{"container": self._container, "artifacts": self._artifacts, "files": self._attachments, "temp_directory": tmp_dir}] return (ret_val, "Email Parsed", results) @@ -1048,26 +1055,17 @@ def process_email(self, rfc822_email, email_id, epoch, container_id=None, email_ def _save_ingested(self, container, using_dummy): if using_dummy: - cid = container['id'] - artifacts = container['artifacts'] + cid = container["id"] + artifacts = container["artifacts"] for artifact in artifacts: - artifact['container_id'] = cid + artifact["container_id"] = cid ret_val, msg, ids = self._base_connector.save_artifacts(artifacts) - self._base_connector.debug_print( - "save_artifacts returns, value: {0}, reason: {1}".format( - ret_val, - msg - ) - ) + self._base_connector.debug_print("save_artifacts returns, value: {0}, reason: {1}".format(ret_val, msg)) else: ret_val, msg, cid = self._base_connector.save_container(container) self._base_connector.debug_print( - "save_container (with artifacts) returns, value: {0}, reason: {1}, id: {2}".format( - ret_val, - msg, - cid - ) + "save_container (with artifacts) returns, value: {0}, reason: {1}, id: {2}".format(ret_val, msg, cid) ) return ret_val, msg, cid @@ -1080,25 +1078,25 @@ def _handle_save_ingested(self, artifacts, container, container_id, files): # We are adding artifacts to an existing container using_dummy = True container = { - 'name': 'Dummy Container', - 'dummy': True, - 'id': container_id, - 'artifacts': artifacts, + "name": "Dummy Container", + "dummy": True, + "id": container_id, + "artifacts": artifacts, } else: # Create a new container - container['artifacts'] = artifacts + container["artifacts"] = artifacts - if hasattr(self._base_connector, '_preprocess_container'): + if hasattr(self._base_connector, "_preprocess_container"): container = self._base_connector._preprocess_container(container) - for artifact in list([x for x in container.get('artifacts', []) if not x.get('source_data_identifier')]): + for artifact in list([x for x in container.get("artifacts", []) if not x.get("source_data_identifier")]): self._set_sdi(artifact) - if files and container.get('artifacts'): + if files and container.get("artifacts"): # Make sure the playbook only runs once # We will instead set run_automation on the last vault artifact which is added - container['artifacts'][-1]['run_automation'] = False + container["artifacts"][-1]["run_automation"] = False ret_val, msg, container_id = self._save_ingested(container, using_dummy) @@ -1117,9 +1115,7 @@ def _handle_save_ingested(self, artifacts, container, container_id, files): last_file = len(files) - 1 for i, curr_file in enumerate(files): run_automation = self._trigger_automation if i == last_file else False - ret_val, added_to_vault = self._handle_file( - curr_file, container_id, run_automation - ) + ret_val, added_to_vault = self._handle_file(curr_file, container_id, run_automation) if added_to_vault: vault_artifacts_added += 1 @@ -1141,7 +1137,7 @@ def _parse_results(self, results, container_id=None): if container_id is None: - container = result.get('container') + container = result.get("container") if not container: continue @@ -1152,7 +1148,7 @@ def _parse_results(self, results, container_id=None): container = None # run a loop to first set the sdi which will create the hash - artifacts = result.get('artifacts', []) + artifacts = result.get("artifacts", []) for j, artifact in enumerate(artifacts): if not artifact: @@ -1173,20 +1169,20 @@ def _parse_results(self, results, container_id=None): # if it is the last artifact of the last container if (j + 1) == len_artifacts: # mark it such that active playbooks get executed if trigger automation is set to True - artifact['run_automation'] = self._trigger_automation + artifact["run_automation"] = self._trigger_automation - cef_artifact = artifact.get('cef') - if 'parentGuid' in cef_artifact: - parent_guid = cef_artifact.pop('parentGuid') + cef_artifact = artifact.get("cef") + if "parentGuid" in cef_artifact: + parent_guid = cef_artifact.pop("parentGuid") if parent_guid in self._guid_to_hash: - cef_artifact['parentSourceDataIdentifier'] = self._guid_to_hash[parent_guid] - if 'emailGuid' in cef_artifact: - del cef_artifact['emailGuid'] + cef_artifact["parentSourceDataIdentifier"] = self._guid_to_hash[parent_guid] + if "emailGuid" in cef_artifact: + del cef_artifact["emailGuid"] - self._handle_save_ingested(artifacts, container, container_id, result.get('files')) + self._handle_save_ingested(artifacts, container, container_id, result.get("files")) # delete any temp directories that were created by the email parsing function - [shutil.rmtree(x['temp_directory'], ignore_errors=True) for x in results if x.get('temp_directory')] + [shutil.rmtree(x["temp_directory"], ignore_errors=True) for x in results if x.get("temp_directory")] return self._base_connector.set_status(phantom.APP_SUCCESS) @@ -1201,22 +1197,22 @@ def _add_vault_hashes_to_dictionary(self, cef_artifact, vault_id): # matching the vault id, the info that we are looking for (the hashes) # will be the same for every entry, so just access the first one try: - metadata = vault_info[0].get('metadata') + metadata = vault_info[0].get("metadata") except Exception: return (phantom.APP_ERROR, "Failed to get vault item metadata") try: - cef_artifact['fileHashSha256'] = metadata['sha256'] + cef_artifact["fileHashSha256"] = metadata["sha256"] except Exception: pass try: - cef_artifact['fileHashMd5'] = metadata['md5'] + cef_artifact["fileHashMd5"] = metadata["md5"] except Exception: pass try: - cef_artifact['fileHashSha1'] = metadata['sha1'] + cef_artifact["fileHashSha1"] = metadata["sha1"] except Exception: pass @@ -1224,9 +1220,9 @@ def _add_vault_hashes_to_dictionary(self, cef_artifact, vault_id): def _handle_file(self, curr_file, container_id, run_automation=False): - file_name = curr_file.get('file_name') + file_name = curr_file.get("file_name") - local_file_path = curr_file['file_path'] + local_file_path = curr_file["file_path"] contains = self._get_file_contains(local_file_path) @@ -1249,10 +1245,7 @@ def _handle_file(self, curr_file, container_id, run_automation=False): try: vault_add_success, vault_add_msg, vault_id = phantom_rules.vault_add( - file_location=local_file_path, - container=container_id, - file_name=file_name, - metadata=vault_attach_dict + file_location=local_file_path, container=container_id, file_name=file_name, metadata=vault_attach_dict ) except Exception as e: self._debug_print(phantom.APP_ERR_FILE_ADD_TO_VAULT.format(e)) @@ -1263,12 +1256,12 @@ def _handle_file(self, curr_file, container_id, run_automation=False): return (phantom.APP_ERROR, phantom.APP_ERROR) # add the vault id artifact to the container - cef_artifact = curr_file.get('meta_info', {}) + cef_artifact = curr_file.get("meta_info", {}) if file_name: - cef_artifact.update({'fileName': file_name}) + cef_artifact.update({"fileName": file_name}) if vault_id: - cef_artifact.update({'vaultId': vault_id}) + cef_artifact.update({"vaultId": vault_id}) # now get the rest of the hashes and add them to the cef artifact self._add_vault_hashes_to_dictionary(cef_artifact, vault_id) @@ -1278,17 +1271,17 @@ def _handle_file(self, curr_file, container_id, run_automation=False): artifact = {} artifact.update(_artifact_common) - artifact['container_id'] = container_id - artifact['name'] = 'Vault Artifact' - artifact['cef'] = cef_artifact - artifact['run_automation'] = run_automation + artifact["container_id"] = container_id + artifact["name"] = "Vault Artifact" + artifact["cef"] = cef_artifact + artifact["run_automation"] = run_automation if contains: - artifact['cef_types'] = {'vaultId': contains} + artifact["cef_types"] = {"vaultId": contains} self._set_sdi(artifact) - if 'parentGuid' in cef_artifact: - parent_guid = cef_artifact.pop('parentGuid') - cef_artifact['parentSourceDataIdentifier'] = self._guid_to_hash[parent_guid] + if "parentGuid" in cef_artifact: + parent_guid = cef_artifact.pop("parentGuid") + cef_artifact["parentSourceDataIdentifier"] = self._guid_to_hash[parent_guid] ret_val, status_string, artifact_id = self._base_connector.save_artifact(artifact) self._base_connector.debug_print("save_artifact returns, value: {0}, reason: {1}, id: {2}".format(ret_val, status_string, artifact_id)) @@ -1297,30 +1290,30 @@ def _handle_file(self, curr_file, container_id, run_automation=False): def _set_sdi(self, input_dict): - if 'source_data_identifier' in input_dict: - del input_dict['source_data_identifier'] + if "source_data_identifier" in input_dict: + del input_dict["source_data_identifier"] input_dict_hash = input_dict - cef = input_dict.get('cef') + cef = input_dict.get("cef") curr_email_guid = None if cef is not None: - if 'parentGuid' in cef or 'emailGuid' in cef: + if "parentGuid" in cef or "emailGuid" in cef: # make a copy since the dictionary will have to be different input_dict_hash = deepcopy(input_dict) - cef = input_dict_hash['cef'] - if 'parentGuid' in cef: - del cef['parentGuid'] - curr_email_guid = cef.get('emailGuid') + cef = input_dict_hash["cef"] + if "parentGuid" in cef: + del cef["parentGuid"] + curr_email_guid = cef.get("emailGuid") if curr_email_guid is not None: - del cef['emailGuid'] + del cef["emailGuid"] - input_dict['source_data_identifier'] = self._create_dict_hash(input_dict_hash) + input_dict["source_data_identifier"] = self._create_dict_hash(input_dict_hash) if curr_email_guid: - self._guid_to_hash[curr_email_guid] = input_dict['source_data_identifier'] + self._guid_to_hash[curr_email_guid] = input_dict["source_data_identifier"] return phantom.APP_SUCCESS @@ -1334,14 +1327,14 @@ def _create_dict_hash(self, input_dict): try: input_dict_str = json.dumps(input_dict, sort_keys=True) except Exception as e: - self._base_connector.debug_print('Handled exception in _create_dict_hash', e) + self._base_connector.debug_print("Handled exception in _create_dict_hash", e) return None fips_enabled = self._base_connector._get_fips_enabled() if not fips_enabled: - return hashlib.md5(UnicodeDammit(input_dict_str).unicode_markup.encode('utf-8')).hexdigest() # nosemgrep + return hashlib.md5(UnicodeDammit(input_dict_str).unicode_markup.encode("utf-8")).hexdigest() # nosemgrep - return hashlib.sha256(UnicodeDammit(input_dict_str).unicode_markup.encode('utf-8')).hexdigest() + return hashlib.sha256(UnicodeDammit(input_dict_str).unicode_markup.encode("utf-8")).hexdigest() def _del_tmp_dirs(self): """Remove any tmp_dirs that were created.""" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..4c594fc --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,8 @@ +[tool.black] +line-length = 145 +target-version = ['py39'] +verbose = true + +[tool.isort] +line_length = 145 +profile = "black" diff --git a/release_notes/3.0.1.md b/release_notes/3.0.1.md new file mode 100644 index 0000000..ad146f5 --- /dev/null +++ b/release_notes/3.0.1.md @@ -0,0 +1,2 @@ +* added the 'update email' action [PAPP-34617] +* added 'resolve name', 'unblock sender' and 'block sender' actions [PAPP-34619] \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index d9e3a42..4266143 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1 @@ -beautifulsoup4==4.9.1 python-magic==0.4.18 diff --git a/tox.ini b/tox.ini index c4644ad..720a141 100644 --- a/tox.ini +++ b/tox.ini @@ -1,7 +1,4 @@ [flake8] max-line-length = 145 max-complexity = 28 -extend-ignore = F403,E128,E126,E111,E121,E127,E731,E201,E202,F405,E722,D,W292 - -[isort] -line_length = 145 +extend-ignore = F403,E128,E126,E121,E127,E731,E201,E202,E203,E701,F405,E722,D,W503 diff --git a/wheels/py3/beautifulsoup4-4.9.1-py3-none-any.whl b/wheels/py3/beautifulsoup4-4.9.1-py3-none-any.whl deleted file mode 100644 index 080b2f8..0000000 Binary files a/wheels/py3/beautifulsoup4-4.9.1-py3-none-any.whl and /dev/null differ diff --git a/wheels/py3/soupsieve-2.3.2.post1-py3-none-any.whl b/wheels/py3/soupsieve-2.3.2.post1-py3-none-any.whl deleted file mode 100644 index b363a9b..0000000 Binary files a/wheels/py3/soupsieve-2.3.2.post1-py3-none-any.whl and /dev/null differ diff --git a/wheels/py3/soupsieve-2.5-py3-none-any.whl b/wheels/py3/soupsieve-2.5-py3-none-any.whl deleted file mode 100644 index e1be128..0000000 Binary files a/wheels/py3/soupsieve-2.5-py3-none-any.whl and /dev/null differ