Esempio n. 1
0
class Automation():
    def __init__(self, webhook, cfg):
        self.logger = logger
        self.logger.info('Initiating RD Automation')
        self.TheHiveConnector = TheHiveConnector(cfg)
        self.webhook = webhook
        self.cfg = cfg
        self.report_action = report_action
        self.RDConnector = RDConnector(cfg)

    def parse_hooks(self):
        self.logger.info(f'{__name__}.parse_hooks starts')
        # Only continue if the right webhook is triggered

        if self.webhook.isResponsibleDisclosureAlertImported():
            pass
        else:
            return False
        try:
            # Define variables and actions based on certain webhook types
            self.case_id = self.webhook.data['object']['case']

            #parse Mgs ID field from the webhook

            self.email_id = re.search(r"Msg ID[\s\S]+?\|\s+(\S+)\s+\|",
                                      str(self.webhook.data['object']))
            self.logger.debug(f"regex match {self.email_id.group(1)}")

            #get all the attachments and upload to the hive observables
            attachment_data = self.RDConnector.listAttachment(
                self.cfg.get('ResponsibleDisclosure', 'email_address'),
                self.email_id.group(1))

            all_attachments = []

            if attachment_data:
                for att in attachment_data:
                    try:
                        file_name = self.RDConnector.downloadAttachments(
                            att['name'], att['attachment_id'], att['isInline'],
                            att['contentType'])
                        all_attachments.append(file_name)
                        self.TheHiveConnector.addFileObservable(
                            self.case_id, file_name, "email attachment")
                        self.logger.info(
                            f"Observable: {file_name} has been updated to Case: {self.case_id}"
                        )

                    except Exception as e:
                        self.logger.error(e, exc_info=True)
                        continue
                    finally:
                        os.remove(file_name)
                        self.logger.debug(
                            f"File: {file_name} has been removed")

            return True
        except Exception as e:
            self.logger.error(e, exc_info=True)
            return False
Esempio n. 2
0
 def __init__(self, webhook, cfg):
     logger.info('Initiating QRadarAutomation')
     self.TheHiveConnector = TheHiveConnector(cfg)
     self.QRadarConnector = QRadarConnector(cfg)
     self.webhook = webhook
     self.cfg = cfg
     self.report_action = report_action
Esempio n. 3
0
    def __init__(self, webhook, cfg, automation_config):
        """
            Class constructor

            :return: use case report
            :rtype: API call
        """
        self.logger = logging.getLogger(__name__)
        self.logger.info('Initiating Siem Integration')

        self.cfg = cfg
        self.app_dir = os.path.dirname(os.path.abspath(__file__)) + "/.."
        self.automation_config = automation_config
        self.TheHiveConnector = TheHiveConnector(cfg)
        self.webhook = webhook

        if cfg.getboolean('Automation', 'enable_customer_list',
                          fallback=False):
            self.logger.info('Loading Customer configuration')
            #Load optional customer config
            self.customer_cfg = ConfigParser(
                converters={
                    'list': lambda x: [i.strip() for i in x.split(';')]
                })
            self.confPath = self.app_dir + '/conf/customers.conf'
            try:
                self.logger.debug('Loading configuration from %s' %
                                  self.confPath)
                self.customer_cfg.read(self.confPath)
                self.customers = self.customer_cfg.sections()
                self.logger.debug('Loaded configuration for %s' %
                                  self.customers)
            except Exception as e:
                self.logger.error('%s', __name__, exc_info=True)
Esempio n. 4
0
 def __init__(self, webhook, cfg):
     self.logger = logger
     self.logger.info('Initiating RD Automation')
     self.TheHiveConnector = TheHiveConnector(cfg)
     self.webhook = webhook
     self.cfg = cfg
     self.report_action = report_action
     self.RDConnector = RDConnector(cfg)
Esempio n. 5
0
    def __init__(self, cfg, use_case_config):
        self.logger = logging.getLogger(__name__)
        self.logger.info('Initiating QRadar Automators')

        self.cfg = cfg
        self.use_case_config = use_case_config
        self.TheHiveConnector = TheHiveConnector(cfg)
        self.TheHiveAutomators = TheHiveAutomators(cfg, use_case_config)
        self.QRadarConnector = QRadarConnector(cfg)
Esempio n. 6
0
 def __init__(self, webhook, cfg):
     logger.info('Initiating MISPautomation')
     self.TheHiveConnector = TheHiveConnector(cfg)
     if self.cfg.getboolean('Cortex', 'enabled'):
         self.CortexConnector = CortexConnector(cfg)
     self.webhook = webhook
     self.report_action = report_action
     self.qr_config = {}
     for key, value in cfg.items('QRadar'):
         self.qr_config[key] = value
Esempio n. 7
0
    def __init__(self, cfg, use_case_config):
        self.logger = logging.getLogger(__name__)
        self.logger.info('Initiating The Hive Automator')

        self.cfg = cfg
        self.TheHiveConnector = TheHiveConnector(cfg)
        if self.cfg.getboolean('Cortex', 'enabled'):
            self.CortexConnector = CortexConnector(cfg)

        #Read mail config
        self.mailsettings = self.cfg.get('TheHive', 'mail')
Esempio n. 8
0
 def __init__(self, webhook, cfg):
     logger.info('Initiating AzureSentinel Automation')
     self.TheHiveConnector = TheHiveConnector(cfg)
     self.AzureSentinelConnector = AzureSentinelConnector(cfg)
     self.webhook = webhook
     self.cfg = cfg
     self.report_action = report_action
     self.closure_status = {
         "Indeterminate": "Undetermined",
         "FalsePositive": "FalsePositive",
         "TruePositive": "TruePositive",
         "Other": "BenignPositive"
     }
Esempio n. 9
0
 def __init__(self, webhook, cfg):
     logger.info('Initiating AzureSentinel Automation')
     self.TheHiveConnector = TheHiveConnector(cfg)
     self.AzureSentinelConnector = AzureSentinelConnector(cfg)
     self.webhook = webhook
     self.cfg = cfg
     self.report_action = report_action
Esempio n. 10
0
    def __init__(self, webhookData, cfg):
        """
            Class constructor

            :param cfg: Synapse's config
            :type cfg: ConfigParser

            :param webhookData: the json webhook from TheHive
            :type webhookData: dict

            :return: Object Webhook
            :rtype: Webhook
        """

        self.logger = logging.getLogger('workflows.' + __name__)
        # One liner to generate a sha1 hash from the data to use as an id. Requires json to create a byte array from the dict
        self.id = hashlib.sha1(json.dumps(webhookData).encode('utf-8')).hexdigest()
        self.data = webhookData
        self.TheHiveConnector = TheHiveConnector(cfg)
        self.ext_alert_ids = []
Esempio n. 11
0
def connectEws():
    logger = logging.getLogger(__name__)
    logger.info('%s.connectEws starts', __name__)

    report = dict()
    report['success'] = bool()

    try:
        cfg = getConf()

        ewsConnector = EwsConnector(cfg)
        folder_name = cfg.get('EWS', 'folder_name')
        unread = ewsConnector.scan(folder_name)

        TheHiveConnector = TheHiveConnector(cfg)

        for msg in unread:
            #type(msg)
            #<class 'exchangelib.folders.Message'>
            conversationId = msg.conversation_id.id

            #searching if case has already been created from the email
            #conversation
            esCaseId = TheHiveConnector.searchCaseByDescription(conversationId)

            if esCaseId is None:
                #no case previously created from the conversation
                caseTitle = str(msg.subject)
                caseDescription = ('```\n' + 'Case created by Synapse\n' +
                                   'conversation_id: "' +
                                   str(msg.conversation_id.id) + '"\n' + '```')
                if msg.categories:
                    assignee = msg.categories[0]
                else:
                    assignee = 'synapse'

                case = TheHiveConnector.craftCase(caseTitle, caseDescription)
                createdCase = TheHiveConnector.createCase(case)
                caseUpdated = TheHiveConnector.assignCase(
                    createdCase, assignee)

                commTask = TheHiveConnector.craftCommTask()
                esCaseId = caseUpdated.id
                commTaskId = TheHiveConnector.createTask(esCaseId, commTask)

            else:
                #case previously created from the conversation
                commTaskId = TheHiveConnector.getTaskIdByTitle(
                    esCaseId, 'Communication')

                if commTaskId != None:
                    pass
                else:
                    #case already exists but no Communication task found
                    #creating comm task
                    commTask = TheHiveConnector.craftCommTask()
                    commTaskId = TheHiveConnector.createTask(
                        esCaseId, commTask)

            fullBody = getEmailBody(msg)
            taskLog = TheHiveConnector.craftTaskLog(fullBody)
            createdTaskLogId = TheHiveConnector.addTaskLog(commTaskId, taskLog)

            readMsg = ewsConnector.markAsRead(msg)

            for attachmentLvl1 in msg.attachments:
                #uploading the attachment as file observable
                #is the attachment is a .msg, the eml version
                #of the file is uploaded
                tempAttachment = TempAttachment(attachmentLvl1)

                if not tempAttachment.isInline:
                    #adding the attachment only if it is not inline
                    #inline attachments are pictures in the email body
                    tmpFilepath = tempAttachment.writeFile()
                    to = str()
                    for recipient in msg.to_recipients:
                        to = to + recipient.email_address + ' '
                    comment = 'Attachment from email sent by '
                    comment += str(msg.author.email_address).lower()
                    comment += ' and received by '
                    comment += str(to).lower()
                    comment += ' with subject: <'
                    comment += msg.subject
                    comment += '>'
                    TheHiveConnector.addFileObservable(esCaseId, tmpFilepath,
                                                       comment)

                    if tempAttachment.isEmailAttachment:
                        #if the attachment is an email
                        #attachments of this email are also
                        #uploaded to TheHive
                        for attachmentLvl2 in tempAttachment.attachments:
                            tempAttachmentLvl2 = TempAttachment(attachmentLvl2)
                            tmpFilepath = tempAttachmentLvl2.writeFile()
                            comment = 'Attachment from the email attached'
                            TheHiveConnector.addFileObservable(
                                esCaseId, tmpFilepath, comment)

        report['success'] = True
        return report

    except Exception as e:
        logger.error('Failed to create case from email', exc_info=True)
        report['success'] = False
        return report
Esempio n. 12
0
class Integration(Main):

    def __init__(self):
        super().__init__()
        self.RDConnector = RDConnector(self.cfg)
        self.TheHiveConnector = TheHiveConnector(self.cfg)

    def validateRequest(self, request):
        workflowReport = self.connectRD()
        if workflowReport['success']:
            return json.dumps(workflowReport), 200
        else:
            return json.dumps(workflowReport), 500

    def connectRD(self):
        self.logger.info('%s.connectResponsibleDisclosure starts', __name__)

        report = dict()
        report['success'] = bool()

        # Setup Tags
        self.tags = ['Responsible disclosure', 'Synapse']

        tracker_file = "./modules/ResponsibleDisclosure/email_tracker"
        link_to_load = ""
        if os.path.exists(tracker_file):
            self.logger.debug("Reading from the tracker file...")
            with open(tracker_file, "r") as tracker:
                link_to_load = tracker.read()

        if not link_to_load:
            link_to_load = self.cfg.get('ResponsibleDisclosure', 'list_endpoint')

        emails, new_link = self.RDConnector.scan(link_to_load)

        try:
            for email in emails:
                try:
                    if ('@removed' in email) or [email["from"]["emailAddress"]["address"]] in self.cfg.get('ResponsibleDisclosure', 'excluded_senders'):
                        continue
                    self.logger.debug("Found unread E-mail with id: {}".format(email['id']))

                    # Get the conversation id from the email
                    CID = email["conversationId"]
                    # Conversation id hash will be used as a unique identifier for the alert
                    CIDHash = hashlib.md5(CID.encode()).hexdigest()

                    email_date = datetime.strptime(email["receivedDateTime"], "%Y-%m-%dT%H:%M:%SZ")
                    epoch_email_date = email_date.timestamp() * 1000

                    alertTitle = "Responsible Disclosure - {}".format(email["subject"])

                    alertDescription = self.createDescription(email)

                    # Moving the email from Inbox to the new folder defined by variable to_move_folder in synapse.conf
                    # Disabled temporarily
                    # self.RDConnector.moveToFolder(self.cfg.get('ResponsibleDisclosure', 'email_address'), email['id'], self.cfg.get('ResponsibleDisclosure', 'to_move_folder'))

                    # Get all the attachments and upload to the hive observables
                    attachment_data = self.RDConnector.listAttachment(self.cfg.get('ResponsibleDisclosure', 'email_address'), email['id'])

                    all_artifacts = []
                    all_attachments = []

                    if attachment_data:
                        for att in attachment_data:
                            file_name = self.RDConnector.downloadAttachments(att['name'], att['attachment_id'], att['isInline'], att['contentType'])
                            all_attachments.append(file_name)

                            self.af = AlertArtifact(dataType='file', data=file_name, tlp=2, tags=['Responsible disclosure', 'Synapse'], ioc=True)

                            all_artifacts.append(self.af)

                    # Create the alert in thehive
                    alert = self.TheHiveConnector.craftAlert(
                        alertTitle,
                        alertDescription,
                        1,
                        epoch_email_date,
                        self.tags, 2,
                        "New",
                        "internal",
                        "ResponsibleDisclosure",
                        CIDHash,
                        all_artifacts,
                        self.cfg.get('ResponsibleDisclosure', 'case_template'))

                    # Check if the alert was created successfully
                    query = dict()
                    query['sourceRef'] = str(CIDHash)

                    # Look up if any existing alert in theHive
                    alert_results = self.TheHiveConnector.findAlert(query)

                    # If no alerts are found for corresponding CIDHASH, create a new alert
                    if len(alert_results) == 0:
                        createdAlert = self.TheHiveConnector.createAlert(alert)

                        # automatish antwoord to the original email sender from the responsible disclosure emailaddress
                        autoreply_subject_name = "RE: {}".format(email["subject"])

                        self.RDConnector.sendAutoReply("*****@*****.**", email["from"]["emailAddress"]["address"], self.cfg.get('ResponsibleDisclosure', 'email_body_filepath'), autoreply_subject_name)

                    # If alert is found update the alert or it may have been migrated to case so update the case
                    if len(alert_results) > 0:
                        alert_found = alert_results[0]

                        # Check if alert is promoted to a case
                        if 'case' in alert_found:

                            case_found = self.TheHiveConnector.getCase(alert_found['case'])

                            # Create a case model
                            self.updated_case = Case

                            # Update the case with new description
                            # What if the email body is empty for new email, then use the old description
                            self.updated_case.description = case_found['description'] + "\n\n" + alertDescription

                            self.updated_case.id = alert_found['case']
                            self.TheHiveConnector.updateCase(self.updated_case, ["description"])
                            self.logger.info("updated the description of the case with id: {}".format(alert_found['case']))

                            # Check if there new observables available
                            if all_attachments:
                                for att in all_attachments:
                                    try:
                                        self.TheHiveConnector.addFileObservable(alert_found['case'], att, "email attachment")
                                    except Exception as e:
                                        self.logger.error(f"Encountered an error while creating a new file based observable: {e}", exc_info=True)
                                        continue
                        # Else it means there is no corresponding case so update the alert
                        else:
                            # create an alert model
                            self.updated_alert = Alert

                            # Update the alert with new description
                            # What if the email body is empty for new email, then use the old description
                            self.updated_alert.description = alert_found['description'] + "\n\n" + alertDescription

                            self.TheHiveConnector.updateAlert(alert_found['id'], self.updated_alert, ["description"])
                            self.logger.info("updated the description of the alert with id: {}".format(alert_found['id']))
                except Exception as e:
                    self.logger.error(e, exc_info=True)
                    continue

                if all_attachments:
                    for att in all_attachments:
                        os.remove(att)

            # Write the delta link to the tracker
            with open(tracker_file, "w+") as tracker:
                tracker.write(new_link)

            report['success'] = True
            return report

        except Exception as e:
            self.logger.error(e)
            self.logger.error('Connection failure', exc_info=True)
            report['success'] = False
            return report

    def createDescription(self, email):

        email_body = email['body']['content']
        subject = email["subject"]
        # Get the conversation id from the email
        CID = email["conversationId"]
        # Conversation id hash will be used as a unique identifier for the alert
        CIDHash = hashlib.md5(CID.encode()).hexdigest()

        # Parse all the URLs and add them to a field in the description table
        urls_list = re.findall(r'\<(https?://[\S]+?)\>', email_body)
        # "&#13;" is ascii for next line
        urls_str = ' &#13; '.join(str(x) for x in urls_list)

        from_e = email["from"]["emailAddress"]["address"]
        to_e = "N/A"
        if email["toRecipients"]:
            to_e = email["toRecipients"][0]["emailAddress"]["address"]

        OriginatingIP = "N/A"
        for header in email['internetMessageHeaders']:
            if header['name'] == 'X-Originating-IP':
                # Formatting the ip value, bydefault it comesup like [x.x.x.x]
                OriginatingIP = (header['value'][1:-1])

        # putting together the markdown table
        temp_fullbody = []
        temp_fullbody.append("|     |     |")
        temp_fullbody.append("|:-----|:-----|")
        temp_fullbody.append("|  " + "**" + "Subject" + "**" + "  |  " + subject + "  |")
        temp_fullbody.append("|  " + "**" + "Sender" + "**" + "  |  " + from_e + "  |")
        temp_fullbody.append("|  " + "**" + "Recipient" + "**" + "  |  " + to_e + "  |")
        temp_fullbody.append("|  " + "**" + "Originating IP" + "**" + "  |  " + OriginatingIP + "  |")
        temp_fullbody.append("|  " + "**" + "Received at" + "**" + "  |  " + email["receivedDateTime"] + "  |")
        temp_fullbody.append("|  " + "**" + "URL(s) in email" + "**" + "  |  " + urls_str + "  |")
        temp_fullbody.append("|  " + "**" + "Msg ID" + "**" + "  |  " + email['id'] + "  |")
        temp_fullbody.append("**" + "Email body" + "**")
        temp_fullbody.append("```")
        temp_fullbody.append(email_body)
        temp_fullbody.append("```")

        alertDescription = '\r\n'.join(str(x) for x in temp_fullbody)
        return alertDescription
Esempio n. 13
0
class Webhook:
    'Webhook class to identify where the webhook comes from, usual case, QRadar, etc..'

    def __init__(self, webhookData, cfg):
        """
            Class constructor

            :param cfg: Synapse's config
            :type cfg: ConfigParser

            :param webhookData: the json webhook from TheHive
            :type webhookData: dict

            :return: Object Webhook
            :rtype: Webhook
        """

        self.logger = logging.getLogger('workflows.' + __name__)
        # One liner to generate a sha1 hash from the data to use as an id. Requires json to create a byte array from the dict
        self.id = hashlib.sha1(
            json.dumps(webhookData).encode('utf-8')).hexdigest()
        self.data = webhookData
        self.theHiveConnector = TheHiveConnector(cfg)
        self.offenseIds = []

    def isAlert(self):
        """
            Check if the webhook describes an alert

            :return: True if it is an alert, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isAlert starts', __name__)

        if self.data['objectType'] == 'alert':
            return True
        else:
            return False

    def isCase(self):
        """
            Check if the webhook describes a case

            :return: True if it is a case, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isCase starts', __name__)

        if self.data['objectType'] == 'case':
            return True
        else:
            return False

    def isArtifact(self):
        """
            Check if the webhook describes an artifact

            :return: True if it is an artifact, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isArtifact starts', __name__)

        if self.data['objectType'] == 'case_artifact':
            return True
        else:
            return False

    def isNewArtifact(self):
        """
            Check if the webhook describes a artifact that is created

            :return: True if it is a artifact created, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isNewArtifact starts', __name__)

        if (self.isArtifact() and self.isNew()):
            return True
        return False

    def isCaseArtifactJob(self):
        """
            Check if the webhook describes a case artifact job

            :return: True if it is a case artifact job, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isCaseArtifactJob starts', __name__)

        if self.data['objectType'] == 'case_artifact_job':
            return True
        else:
            return False

    def isNew(self):
        """
            Check if the webhook describes a new item

            :return: True if it is new, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isNew starts', __name__)

        if self.data['operation'] == 'Creation':
            return True
        else:
            return False

    def isUpdate(self):
        """
            Check if the webhook describes an update

            :return: True if it is an update, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isUpdate starts', __name__)

        if self.data['operation'] == 'Update':
            return True
        else:
            return False

    def isMarkedAsRead(self):
        """
            Check if the webhook describes an marked as read alert

            :return: True if it is marked as read, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isMarkedAsRead starts', __name__)

        try:
            if self.data['details']['status'] == 'Ignored':
                return True
            else:
                return False
        except KeyError:
            # when the alert is ignored (ignore new updates), the webhook does
            # not have the status key, this exception handles that
            return False

    def isClosed(self):
        """
            Check if the webhook describes a closing event
            if it returns false, it doesn't mean that the case is open
            if a case is already closed, and a user update something
            the webhook will not describe a closing event but an update

            :return: True if it is a closing event, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isClosed starts', __name__)

        try:
            if self.data['details']['status'] == 'Resolved':
                return True
            else:
                return False
        except KeyError:
            # happens when the case is already closed
            # and user updates the case with a custom field (for example)
            # then status key is not included in the webhook
            return False

    def isDeleted(self):
        """
            Check if the webhook describes a deleted event
            if it returns false, it doesn't mean that the case is
            not deleted. It might already be deleted.

            :return: True if it is a deleting event, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isDeleted starts', __name__)

        if self.data['operation'] == 'Delete':
            return True
        else:
            return False

    def isMergedInto(self):
        """
            Check if the webhook describes a case merging

            :return: True if it is a merging event
            :rtype: boolean
        """

        self.logger.debug('%s.isMergedInto starts', __name__)

        if 'mergeInto' in self.data['object']:
            return True
        else:
            return False

    def isFromMergedCases(self):
        """
            Check if the webhook describes a case that comes from a merging action

            :return: True if it is case the comes from a merging action
            :rtype: boolean
        """

        self.logger.debug('%s.isFromMergedCases starts', __name__)

        if 'mergeFrom' in self.data['object']:
            return True
        else:
            return False

    def isSuccess(self):
        """
            Check if the webhook describes a successful action

            :return: True if it is a successful action, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isSuccess starts', __name__)

        if self.data['details']['status'] == "Success":
            return True
        else:
            return False

    def isNewAlert(self):
        """
            Check if the webhook describes a new alert.

            :return: True if it is a new alert, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isNewAlert starts', __name__)

        if (self.isAlert() and self.isNew()):
            return True
        else:
            return False

    def isImportedAlert(self):
        """
            Check if the webhook describes an imported alert.

            :return: True if it is an imported alert, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isImportedAlert starts', __name__)

        if (self.isAlert() and self.isUpdate()
                and 'status' in self.data['details']
                and self.data['details']['status'] == 'Imported'):
            return True
        else:
            return False

    def isFromAlert(self, esCaseId):
        """
            For a given esCaseId, search if the case has been opened from
            a QRadar offense, if so adds the offenseId attribute to this object

            :param esCaseId: elasticsearch case id
            :type esCaseId: str

            :return: True if it is a QRadar case, false if not
            :rtype: bool
        """

        query = dict()
        query['case'] = esCaseId
        results = self.theHiveConnector.findAlert(query)

        if len(results) == 1:
            # Case is based on a single alert
            self.alert = results[0]
            return True
        elif len(results) > 1:
            # Case is based on multiple alerts
            self.alerts = results
            return True
        else:
            return False

    def isNewCase(self):
        """
            Check if the webhook describes a new case.

            :return: True if it is a new case, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isNewCase starts', __name__)

        if (self.isCase() and self.isNew()):
            return True
        else:
            return False

    def isQRadar(self):
        """
            Check if the webhook describes a QRadar Offense

            :return: True if it is a QRadar Offense, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isQRadar starts', __name__)

        if ('tags' in self.data['details']
                and 'QRadar' in self.data['details']['tags']) or (
                    'tags' in self.data['object']
                    and 'QRadar' in self.data['object']['tags']):
            return True
        else:
            return False

    def isQRadarAlertImported(self):
        """
            Check if the webhook describes an Imported QRadar alert

            :return: True if it is a QRadar alert is imported, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isQRadarAlertImported starts', __name__)

        if (self.isImportedAlert() and self.isQRadar()):
            return True
        else:
            return False

    def isQRadarAlertUpdateFollowTrue(self):
        """
            Check if the webhook describes an Imported QRadar alert

            :return: True if it is a QRadar alert is imported, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isQRadarAlertImported starts', __name__)

        if (self.isAlert() and self.isUpdate() and self.isQRadar()
                and 'follow' in self.data['details']
                and self.data['details']['follow']):
            return True
        else:
            return False

    def isQRadarAlertWithArtifacts(self):
        """
            Check if the webhook describes an QRadar alert containing artifacts and case information

            :return: True if it is a QRadar alert containing artifacts, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isQRadarAlertWithArtifacts starts', __name__)

        if (self.isAlert() and self.isQRadar()) and 'artifacts' in self.data[
                'details'] and 'case' in self.data['object']:
            return True
        else:
            return False

    def isQRadarAlertMarkedAsRead(self):
        """
            Check if the webhook describes a QRadar alert marked as read
            "store" the offenseId in the webhook attribute "offenseId"

            :return: True if it is a QRadar alert marked as read, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isQRadarAlertMarkedAsRead starts', __name__)

        if (self.isAlert() and self.isMarkedAsRead()):
            # the value 'QRadar_Offenses' is hardcoded at creation by
            # workflow QRadar2alert
            if self.data['object']['source'] == 'QRadar_Offenses':
                self.offenseId = self.data['object']['sourceRef']
                return True
        return False

    def isNewQRadarCase(self):
        """
            Check if the webhook describes a new QRadar case,
            if the case has been opened from a QRadar alert
            returns True

            :return: True if it is a new QRadar case, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isNewQRadarCase starts', __name__)

        if self.isQRadar() and self.isCase() and self.isNew():
            return True
        else:
            return False

    def isUpdateQRadarCase(self):
        """
            Check if the webhook describes a new QRadar case,
            if the case has been opened from a QRadar alert
            returns True

            :return: True if it is a new QRadar case, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isUpdateQRadarCase starts', __name__)

        if self.isQRadar() and self.isCase() and self.isUpdate():
            return True
        else:
            return False

    def isClosedQRadarCase(self):
        """
            Check if the webhook describes a closing QRadar case,
            if the case has been opened from a QRadar alert
            returns True
            "store" the offenseId in the webhook attribute "offenseId"
            If the case is merged, it is not considered to be closed (even if it is
            from TheHive perspective), as a result, a merged qradar case will not close
            an offense.
            However a case created from merged case, where one of the merged case is
            related to QRadar, will close the linked QRadar offense.

            :return: True if it is a QRadar alert marked as read, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isClosedQRadarCase starts', __name__)

        try:
            if self.isCase() and self.isClosed():
                # searching in alerts if the case comes from a QRadar alert
                esCaseId = self.data['objectId']
                if self.fromQRadar(esCaseId):
                    return True

            else:
                # not a case or have not been closed when
                # when the webhook has been issued
                # (might be open or already closed)
                return False

        except Exception as e:
            self.logger.error('%s.isClosedQRadarCase failed',
                              __name__,
                              exc_info=True)
            raise

    def isDeletedQRadarCase(self):
        """
            Check if the webhook describes deleting a QRadar case,

            "store" the offenseId in the webhook attribute "offenseId"

            :return: True if it is deleting a QRadar case, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isDeletedQRadarCase starts', __name__)

        try:
            if self.isCase() and self.isDeleted():
                # searching in alerts if the case comes from a QRadar alert
                esCaseId = self.data['objectId']
                if self.fromQRadar(esCaseId):
                    return True
            else:
                # not a case or have not been deleted when
                # when the webhook has been issued
                return False

        except Exception as e:
            self.logger.error('%s.isDeletedQRadarCase failed',
                              __name__,
                              exc_info=True)
            raise

    def fromQRadar(self, esCaseId):
        """
            For a given esCaseId, search if the case has been opened from
            a QRadar offense, if so adds the offenseId attribute to this object

            :param esCaseId: elasticsearch case id
            :type esCaseId: str

            :return: True if it is a QRadar case, false if not
            :rtype: bool
        """

        if self.isFromAlert(esCaseId):
            if hasattr(self,
                       'alert') and self.alert['source'] == 'QRadar_Offenses':
                # case opened from alert
                # and from QRadar
                self.offenseId = self.alert['sourceRef']
                return True
            elif hasattr(self, 'alerts'):
                for alert in self.alerts:
                    if alert['source'] == 'QRadar_Offenses':
                        self.offenseIds.append(alert['sourceRef'])
                if len(self.offenseIds) > 0:
                    return True
            else:
                # case opened from an alert but
                # not from QRadar
                return False
        else:
            return False

    def isAzureSentinel(self):
        """
            Check if the webhook describes a AzureSentinel Incident

            :return: True if it is a AzureSentinel Incident, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isAzureSentinel starts', __name__)

        if ('tags' in self.data['details']
                and 'AzureSentinel' in self.data['details']['tags']) or (
                    'tags' in self.data['object']
                    and 'AzureSentinel' in self.data['object']['tags']):
            return True
        else:
            return False

    def isAzureSentinelAlertMarkedAsRead(self):
        """
            Check if the webhook describes a AzureSentinel alert marked as read
            "store" the incidentId in the webhook attribute "incidentId"

            :return: True if it is a AzureSentinel alert marked as read, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isAzureSentinelAlertMarkedAsRead starts',
                          __name__)

        if (self.isAlert() and self.isMarkedAsRead()):
            # the value 'AzureSentinel_Offenses' is hardcoded at creation by
            # workflow AzureSentinel2alert
            if self.data['object']['source'] == 'Azure_Sentinel_incidents':
                self.incidentId = self.data['object']['sourceRef']
                return True
        return False

    def isAzureSentinelAlertImported(self):
        """
            Check if the webhook describes an Imported AzureSentinel alert

            :return: True if it is a AzureSentinel alert is imported, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isAzureSentinelAlertImported starts', __name__)

        if (self.isImportedAlert() and self.isAzureSentinel()):
            return True
        else:
            return False

    def fromAzureSentinel(self, esCaseId):
        """
            For a given esCaseId, search if the case has been opened from
            a AzureSentinel incident, if so adds the incidentId attribute to this object

            :param esCaseId: elasticsearch case id
            :type esCaseId: str

            :return: True if it is a AzureSentinel case, false if not
            :rtype: bool
        """

        query = dict()
        query['case'] = esCaseId
        results = self.theHiveConnector.findAlert(query)

        if len(results) == 1:
            # should only have one hit
            if results[0]['source'] == 'Azure_Sentinel_incidents':
                # case opened from incident
                # and from AzureSentinel
                self.incidentId = results[0]['sourceRef']
                return True
            else:
                # case opened from an alert but
                # not from AzureSentinel
                return False
        else:
            return False

    def isClosedAzureSentinelCase(self):
        """
            Check if the webhook describes a closing AzureSentinel case,
            if the case has been opened from a AzureSentinel alert
            returns True
            "store" the incidentId in the webhook attribute "incidentId"
            If the case is merged, it is not considered to be closed (even if it is
            from TheHive perspective), as a result, a merged AzureSentinel case will not close
            an incident.
            However a case created from merged case, where one of the merged case is
            related to AzureSentinel, will close the linked AzureSentinel incident.

            :return: True if it is a AzureSentinel alert marked as read, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isClosedAzureSentinelCase starts', __name__)

        try:
            if self.isCase() and self.isClosed() and not self.isMergedInto():
                # searching in alerts if the case comes from a AzureSentinel alert
                esCaseId = self.data['objectId']
                if self.fromAzureSentinel(esCaseId):
                    return True
                else:
                    # at this point, the case was not opened from a AzureSentinel alert
                    # however, it could be a case created from merged cases
                    # if one of the merged case is related to AzureSentinel alert
                    # then we consider the case as being from AzureSentinel
                    if self.isFromMergedCases():
                        for esCaseId in self.data['object']['mergeFrom']:
                            if self.fromAzureSentinel(esCaseId):
                                return True
                        # went through all merged case and none where from AzureSentinel
                        return False
                    else:
                        # not a AzureSentinel case
                        return False
            else:
                # not a case or have not been closed when
                # when the webhook has been issued
                # (might be open or already closed)
                return False

        except Exception as e:
            self.logger.error('%s.isClosedAzureSentinelCase failed',
                              __name__,
                              exc_info=True)
            raise

    def isDeletedAzureSentinelCase(self):
        """
            Check if the webhook describes deleting a AzureSentinel case,

            "store" the offenseId in the webhook attribute "offenseId"

            :return: True if it is deleting a AzureSentinel case, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isDeletedAzureSentinelCase starts', __name__)

        try:
            if self.isCase() and self.isDeleted():
                # searching in alerts if the case comes from a AzureSentinel alert
                esCaseId = self.data['objectId']
                if self.fromAzureSentinel(esCaseId):
                    return True
                else:
                    # at this point, the case was not opened from a AzureSentinel alert
                    # however, it could be a case created from merged cases
                    # if one of the merged case is related to AzureSentinel alert
                    # then we consider the case as being from AzureSentinel
                    if self.isFromMergedCases():
                        for esCaseId in self.data['object']['mergeFrom']:
                            if self.fromAzureSentinel(esCaseId):
                                return True
                        # went through all merged case and none where from AzureSentinel
                        return False
                    else:
                        # not a AzureSentinel case
                        return False
            else:
                # not a case or have not been deleted when
                # when the webhook has been issued
                return False

        except Exception as e:
            self.logger.error('%s.isDeletedAzureSentinelCase failed',
                              __name__,
                              exc_info=True)
            raise

    def isMisp(self):
        """
            Check if the webhook describes a MISP alert that is created

            :return: True if it is a MISP alert created, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isMisp starts', __name__)

        if ('type' in self.data['object']
                and self.data['object']['type'] == 'misp') or (
                    'tags' in self.data['object']
                    and 'misp' in self.data['object']['tags']) or (
                        'tags' in self.data['details']
                        and 'misp' in self.data['details']['tags']) or (
                            'tags' in self.data['details']
                            and any('MISP:type=' in tag
                                    for tag in self.data['details']['tags'])):
            return True
        else:
            return False

    def isNewMispCase(self):
        """
            Check if the webhook describes a new MISP case,
            if the case has been opened from a MISP alert
            returns True

            :return: True if it is a new MISP case, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isNewMispCase starts', __name__)

        if self.isMisp() and self.isCase() and self.isNew():
            return True
        else:
            return False

    def isNewMispAlert(self):
        """
            Check if the webhook describes a MISP alert that is created

            :return: True if it is a MISP alert created, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isNewMispAlert starts', __name__)

        if (self.isAlert() and self.isNew() and self.isMisp()):
            return True
        return False

    def isNewMispArtifact(self):
        """
            Check if the webhook describes a MISP artifact that is created

            :return: True if it is a MISP artifact created, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isNewMispArtifact starts', __name__)

        if (self.isArtifact() and self.isNew() and self.isMisp()):
            return True
        return False
Esempio n. 14
0
 def __init__(self):
     super().__init__()
     self.mlabsConnector = MLabsConnector(self.cfg)
     self.theHiveConnector = TheHiveConnector(self.cfg)
Esempio n. 15
0
class Automation():
    def __init__(self, webhook, cfg):
        logger.info('Initiating QRadarAutomation')
        self.TheHiveConnector = TheHiveConnector(cfg)
        self.QRadarConnector = QRadarConnector(cfg)
        self.webhook = webhook
        self.cfg = cfg
        self.report_action = report_action

    def checkIfInClosedCaseOrAlertMarkedAsRead(self, sourceref):
        query = dict()
        query['sourceRef'] = str(sourceref)
        logger.debug(
            'Checking if third party ticket({}) is linked to a closed case'.
            format(sourceref))
        alert_results = self.TheHiveConnector.findAlert(query)
        if len(alert_results) > 0:
            alert_found = alert_results[0]
            if alert_found['status'] == 'Ignored':
                logger.info(
                    f"{sourceref} is found in alert {alert_found['id']} that has been marked as read"
                )
                return {
                    "resolutionStatus":
                    "Indeterminate",
                    "summary":
                    "Closed by Synapse with summary: Marked as Read within The Hive"
                }
            elif 'case' in alert_found:
                # Check if alert is present in closed case
                case_found = self.TheHiveConnector.getCase(alert_found['case'])
                if case_found['status'] == "Resolved":
                    logger.info(
                        f"{sourceref} was found in a closed case {case_found['id']}"
                    )
                    resolution_status = "N/A"
                    resolution_summary = "N/A"
                    # Return information required to sync with third party
                    if 'resolutionStatus' in case_found:
                        resolution_status = case_found['resolutionStatus']
                    if 'summary' in case_found:
                        resolution_summary = case_found['summary']
                    return {
                        "resolutionStatus": resolution_status,
                        "summary": resolution_summary
                    }
        return False

    def parse_hooks(self):
        # Update incident status to active when imported as Alert
        if self.webhook.isQRadarAlertImported():
            self.offense_id = self.webhook.data['object']['sourceRef']

            # Check if the alert is imported in a closed case
            closure_info = self.checkIfInClosedCaseOrAlertMarkedAsRead(
                self.offense_id)
            if closure_info:
                logger.info(
                    'Qradar offense({}) is linked to a closed case'.format(
                        self.offense_id))
                # Close incident and continue with the next incident
                self.QRadarConnector.closeOffense(self.offense_id)

        # Close offenses in QRadar
        if self.webhook.isClosedQRadarCase(
        ) or self.webhook.isDeletedQRadarCase(
        ) or self.webhook.isQRadarAlertMarkedAsRead():
            if self.webhook.data['operation'] == 'Delete':
                self.case_id = self.webhook.data['objectId']
                logger.info('Case {} has been deleted'.format(self.case_id))

            elif self.webhook.data['objectType'] == 'alert':
                self.alert_id = self.webhook.data['objectId']
                logger.info('Alert {} has been marked as read'.format(
                    self.alert_id))
                self.QRadarConnector.closeOffense(
                    self.webhook.data['object']['sourceRef'])

            else:
                self.case_id = self.webhook.data['object']['id']
                logger.info('Case {} has been marked as resolved'.format(
                    self.case_id))

            if hasattr(self, 'case_id'):
                if hasattr(self.webhook, 'ext_alert_id'):
                    logger.info("Closing offense {} for case {}".format(
                        self.webhook.ext_alert_id, self.case_id))
                    self.QRadarConnector.closeOffense(
                        self.webhook.ext_alert_id)

                elif len(self.webhook.ext_alert_ids) > 0:
                    # Close offense for every linked offense
                    logger.info(
                        "Found multiple offenses {} for case {}".format(
                            self.webhook.ext_alert_ids, self.case_id))
                    for offense_id in self.webhook.ext_alert_ids:
                        logger.info("Closing offense {} for case {}".format(
                            offense_id, self.case_id))
                        self.QRadarConnector.closeOffense(offense_id)

            self.report_action = 'closeOffense'

        return self.report_action
Esempio n. 16
0
class Automator():
    def __init__(self, webhook, cfg, automation_config):
        """
            Class constructor

            :return: use case report
            :rtype: API call
        """
        self.logger = logging.getLogger(__name__)
        self.logger.info('Initiating Siem Integration')

        self.cfg = cfg
        self.app_dir = os.path.dirname(os.path.abspath(__file__)) + "/.."
        self.automation_config = automation_config
        self.TheHiveConnector = TheHiveConnector(cfg)
        self.webhook = webhook

        if cfg.getboolean('Automation', 'enable_customer_list',
                          fallback=False):
            self.logger.info('Loading Customer configuration')
            #Load optional customer config
            self.customer_cfg = ConfigParser(
                converters={
                    'list': lambda x: [i.strip() for i in x.split(';')]
                })
            self.confPath = self.app_dir + '/conf/customers.conf'
            try:
                self.logger.debug('Loading configuration from %s' %
                                  self.confPath)
                self.customer_cfg.read(self.confPath)
                self.customers = self.customer_cfg.sections()
                self.logger.debug('Loaded configuration for %s' %
                                  self.customers)
            except Exception as e:
                self.logger.error('%s', __name__, exc_info=True)

    def check_automation(self):
        self.logger.info(
            'Start parsing use cases for the SIEM based alerts/cases')
        self.ucTaskId = False
        self.report_action = 'None'

        if 'tags' in self.webhook.data['object']:
            self.tags = self.webhook.data['object']['tags']
        #Add tagging to webhooks that are missing tags
        elif 'artifactId' in self.webhook.data[
                'object'] and self.webhook.isCaseArtifactJob:
            self.logger.debug(
                'Found artifact id {} for webhook {}. Retrieving tags from there'
                .format(self.webhook.data['object']['artifactId'],
                        self.webhook.id))
            self.tags = self.TheHiveConnector.getCaseObservable(
                self.webhook.data['object']['artifactId'])['tags']
        else:
            self.tags = []
            self.logger.warning("no tags found for webhook {}".format(
                self.webhook.id))
        self.automation_regexes = self.cfg.get('Automation',
                                               'automation_regexes',
                                               fallback=None)
        if not self.automation_regexes:
            self.logger.error(
                "Could not find any regexes to find tags for automation")
            return self.report_action
        self.automation_ids = self.automation_config['automation_ids']

        #loop through tags to see if there is a use case present
        for tag in self.tags:
            for automation_regex in self.automation_regexes:
                #The tag should match this regex otherwise it is no use case
                try:
                    tag = re.search(automation_regex, tag).group(0)
                except:
                    self.logger.info("Tag: %s is not matching the uc regex" %
                                     tag)
                    continue

                #check if use case that is provided, matches the case
                if tag in self.automation_ids:
                    self.found_a_id = tag

                    ## Try to retrieve the defined actions
                    self.use_case_actions = self.automation_ids[
                        self.found_a_id]['automation']
                    #perform actions defined for the use case
                    for action, action_config in self.use_case_actions.items():
                        self.action_config = action_config
                        #Give automator information regarding the webhook as some actions are limited to the state of the alert/case
                        self.logger.info(
                            'Found the following action for %s: %s, with task %s'
                            % (self.found_a_id, action, action_config['task']))

                        #Add support for multiple tasks, loop them 1 by 1
                        if 'tasks' in self.action_config:
                            for task in self.action_config['tasks']:
                                self.action_config['task'] = task

                                #Run actions through the automator
                                if self.Automate(self.action_config,
                                                 self.webhook):
                                    continue
                                else:
                                    self.logger.info(
                                        'Did not find any supported actions')
                        #Run actions through the automator
                        else:
                            if self.Automate(self.action_config, self.webhook):
                                continue
                            else:
                                self.logger.info(
                                    'Did not find any supported actions')
        return self.report_action

    def Automate(self, task_config, webhook):

        #Split the task name on the dot to have a module and a function variable in a list
        try:
            self.task = task_config['task'].split(".")
            #Should probably also do some matching for words to mitigate some security concerns?
            module_name = self.task[0]
            function_name = self.task[1]

        except:
            self.logger.error(
                "{} does not seem to be a valid automator task name".format(
                    self.task))
            return

        try:
            #Load the Automators class from the module to initialise it
            automators = loaded_modules[module_name].Automators(
                self.cfg, self.automation_config)
        except KeyError as e:
            self.logger.warning(
                "Automator module not found: {}".format(module_name),
                exc_info=True)
            return False

        try:
            #Run the function for the task and return the results
            self.results = getattr(automators,
                                   '{}'.format(function_name))(task_config,
                                                               webhook)

            #Return the results or True if the task was succesful without returning information
            if self.results:
                return self.results
            else:
                return False
        except KeyError as e:
            self.logger.warning("Automator task not found for {}: {}".format(
                module_name, function_name),
                                exc_info=True)
            return False
Esempio n. 17
0
 def __init__(self):
     super().__init__()
     self.azureSentinelConnector = AzureSentinelConnector(self.cfg)
     self.theHiveConnector = TheHiveConnector(self.cfg)
Esempio n. 18
0
class Integration(Main):
    def __init__(self):
        super().__init__()
        self.qradarConnector = QRadarConnector(self.cfg)
        self.TheHiveConnector = TheHiveConnector(self.cfg)

    def enrichOffense(self, offense):

        enriched = copy.deepcopy(offense)

        artifacts = []

        enriched['offense_type_str'] = \
            self.qradarConnector.getOffenseTypeStr(offense['offense_type'])

        # Add the offense source explicitly
        if enriched['offense_type_str'] == 'Username':
            artifacts.append({
                'data': offense['offense_source'],
                'dataType': 'user-account',
                'message': 'Offense Source',
                'tags': ['QRadar']
            })

        # Add the local and remote sources
        # scrIps contains offense source IPs
        srcIps = list()
        # dstIps contains offense destination IPs
        dstIps = list()
        # srcDstIps contains IPs which are both source and destination of offense
        srcDstIps = list()
        for ip in self.qradarConnector.getSourceIPs(enriched):
            srcIps.append(ip)

        for ip in self.qradarConnector.getLocalDestinationIPs(enriched):
            dstIps.append(ip)

        # making copies is needed since we want to
        # access and delete data from the list at the same time
        s = copy.deepcopy(srcIps)
        d = copy.deepcopy(dstIps)

        for srcIp in s:
            for dstIp in d:
                if srcIp == dstIp:
                    srcDstIps.append(srcIp)
                    srcIps.remove(srcIp)
                    dstIps.remove(dstIp)

        for ip in srcIps:
            artifacts.append({
                'data': ip,
                'dataType': 'ip',
                'message': 'Source IP',
                'tags': ['QRadar', 'src']
            })
        for ip in dstIps:
            artifacts.append({
                'data': ip,
                'dataType': 'ip',
                'message': 'Local destination IP',
                'tags': ['QRadar', 'dst']
            })
        for ip in srcDstIps:
            artifacts.append({
                'data': ip,
                'dataType': 'ip',
                'message': 'Source and local destination IP',
                'tags': ['QRadar', 'src', 'dst']
            })

        # Parse offense types to add the offense source as an observable when a valid type is used
        for offense_type, extraction_config in self.cfg.get(
                'QRadar', 'observables_in_offense_type', fallback={}).items():
            if enriched['offense_type_str'] == offense_type:
                if isinstance(extraction_config, str):
                    observable_type = extraction_config
                    artifacts.append({
                        'data': enriched['offense_source'],
                        'dataType': observable_type,
                        'message': 'QRadar Offense source',
                        'tags': ['QRadar']
                    })
                elif isinstance(extraction_config, list):
                    for extraction in extraction_config:
                        regex = re.compile(extraction['regex'])
                        matches = regex.findall(str(
                            enriched['offense_source']))
                        if len(matches) > 0:
                            # if isinstance(found_observable, tuple): << Fix later loop through matches as well
                            for match_group, observable_type in extraction[
                                    'match_groups'].items():
                                try:
                                    artifacts.append({
                                        'data':
                                        matches[0][match_group],
                                        'dataType':
                                        observable_type,
                                        'message':
                                        'QRadar Offense Type based observable',
                                        'tags': ['QRadar', 'offense_type']
                                    })
                                except Exception as e:
                                    self.logger.warning(
                                        "Could not find match group {} in {}".
                                        format(match_group,
                                               enriched['offense_type_str']))
                else:
                    self.logger.error(
                        "Configuration for observables_in_offense_type is wrongly formatted. Please fix this to enable this functionality"
                    )

        # Remove observables that are to be excluded based on the configuration
        artifacts = self.checkObservableExclusionList(artifacts)

        # Match observables against the TLP list
        artifacts = self.checkObservableTLP(artifacts)

        # Add all the observables
        enriched['artifacts'] = artifacts

        # Add rule names to offense
        enriched['rules'] = self.qradarConnector.getRuleNames(offense)

        # waiting 1s to make sure the logs are searchable
        sleep(1)
        # adding the first 3 raw logs
        enriched['logs'] = self.qradarConnector.getOffenseLogs(enriched)

        return enriched

    def qradarOffenseToHiveAlert(self, offense):
        def getHiveSeverity(offense):
            # severity in TheHive is either low, medium or high
            # while severity in QRadar is from 1 to 10
            # low will be [1;4] => 1
            # medium will be [5;6] => 2
            # high will be [7;10] => 3
            if offense['severity'] < 5:
                return 1
            elif offense['severity'] < 7:
                return 2
            elif offense['severity'] < 11:
                return 3

            return 1

        #
        # Creating the alert
        #

        # Setup Tags
        tags = ['QRadar', 'Offense', 'Synapse']
        # Add the offense type as a tag
        if 'offense_type_str' in offense:
            tags.append("qr_offense_type: {}".format(
                offense['offense_type_str']))

        # Check if the automation ids need to be extracted
        if self.cfg.getboolean('QRadar', 'extract_automation_identifiers'):

            # Run the extraction function and add it to the offense data
            # Extract automation ids
            tags_extracted = self.tagExtractor(
                offense, self.cfg.get('QRadar', 'automation_fields'),
                self.cfg.get('QRadar', 'tag_regexes'))
            # Extract any possible name for a document on a knowledge base
            offense['use_case_names'] = self.tagExtractor(
                offense, self.cfg.get('QRadar', 'automation_fields'),
                self.cfg.get('QRadar', 'uc_kb_name_regexes'))
            if len(tags_extracted) > 0:
                tags.extend(tags_extracted)
            else:
                self.logger.info('No match found for offense %s',
                                 offense['id'])

        # Check if the mitre ids need to be extracted
        if self.cfg.getboolean('QRadar', 'extract_mitre_ids'):
            # Extract mitre tactics
            offense['mitre_tactics'] = self.tagExtractor(
                offense, ["rules"], [r'[tT][aA]\d{4}'])
            if 'mitre_tactics' in offense:
                tags.extend(offense['mitre_tactics'])

            # Extract mitre techniques
            offense['mitre_techniques'] = self.tagExtractor(
                offense, ["rules"], [r'[tT]\d{4}'])
            if 'mitre_techniques' in offense:
                tags.extend(offense['mitre_techniques'])

        if "categories" in offense:
            for cat in offense['categories']:
                tags.append(cat)

        defaultObservableDatatype = [
            'autonomous-system', 'domain', 'file', 'filename', 'fqdn', 'hash',
            'ip', 'mail', 'mail_subject', 'other', 'process_filename',
            'regexp', 'registry', 'uri_path', 'url', 'user-account',
            'user-agent'
        ]

        artifacts = []
        for artifact in offense['artifacts']:
            # Add automation tagging and mitre tagging to observables
            if len(tags_extracted) > 0:
                artifact['tags'].extend(tags_extracted)
            if 'mitre_tactics' in offense:
                artifact['tags'].extend(offense['mitre_tactics'])
            if 'mitre_techniques' in offense:
                artifact['tags'].extend(offense['mitre_techniques'])

            if artifact['dataType'] in defaultObservableDatatype:
                hiveArtifact = self.TheHiveConnector.craftAlertArtifact(
                    dataType=artifact['dataType'],
                    data=artifact['data'],
                    message=artifact['message'],
                    tags=artifact['tags'],
                    tlp=artifact['tlp'])
            else:
                artifact['tags'].append('type:' + artifact['dataType'])
                hiveArtifact = self.TheHiveConnector.craftAlertArtifact(
                    dataType='other',
                    data=artifact['data'],
                    message=artifact['message'],
                    tags=artifact['tags'],
                    tlp=artifact['tlp'])
            artifacts.append(hiveArtifact)

        # Retrieve the configured case_template
        qradarCaseTemplate = self.cfg.get('QRadar', 'case_template')

        # Build TheHive alert
        alert = self.TheHiveConnector.craftAlert(
            "{}, {}".format(offense['id'], offense['description']),
            self.craftAlertDescription(offense), getHiveSeverity(offense),
            offense['start_time'],
            tags, 2, 'Imported', 'internal', 'QRadar_Offenses',
            str(offense['id']), artifacts, qradarCaseTemplate)

        return alert

    def validateRequest(self, request):
        if request.is_json:
            content = request.get_json()
            if 'timerange' in content:
                workflowReport = self.allOffense2Alert(content['timerange'])
                if workflowReport['success']:
                    return json.dumps(workflowReport), 200
                else:
                    return json.dumps(workflowReport), 500
            else:
                self.logger.error('Missing <timerange> key/value')
                return json.dumps({
                    'sucess': False,
                    'message': "timerange key missing in request"
                }), 500
        else:
            self.logger.error('Not json request')
            return json.dumps({
                'sucess': False,
                'message': "Request didn't contain valid JSON"
            }), 400

    def allOffense2Alert(self, timerange):
        """
        Get all openned offense created within the last
        <timerange> minutes and creates alerts for them in
        TheHive
        """
        self.logger.info('%s.allOffense2Alert starts', __name__)

        report = dict()
        report['success'] = True
        report['offenses'] = list()

        try:
            offensesList = self.qradarConnector.getOffenses(timerange)
            # Check for offenses that should have been closed
            for offense in offensesList:
                closure_info = self.checkIfInClosedCaseOrAlertMarkedAsRead(
                    offense['id'])
                if closure_info:
                    # Close incident and continue with the next incident
                    self.logger.info(
                        "Closed case found for {}. Closing offense...".format(
                            offense['id']))
                    self.qradarConnector.closeOffense(offense['id'])
                    continue

                matched = False
                # Filter based on regexes in configuration
                for offense_exclusion_regex in self.cfg.get(
                        'QRadar', 'offense_exclusion_regexes', fallback=[]):
                    self.logger.debug(
                        "Offense exclusion regex found '{}'. Matching against offense {}"
                        .format(offense_exclusion_regex, offense['id']))
                    regex = re.compile(offense_exclusion_regex, flags=re.I)
                    if regex.match(offense['description']):
                        self.logger.debug(
                            "Found exclusion match for offense {} and regex {}"
                            .format(offense['id'], offense_exclusion_regex))
                        matched = True
                if matched:
                    continue

                # Prepare new alert
                offense_report = dict()
                self.logger.debug("offense: %s" % offense)
                self.logger.info("Enriching offense...")
                enrichedOffense = self.enrichOffense(offense)
                self.logger.debug("Enriched offense: %s" % enrichedOffense)
                theHiveAlert = self.qradarOffenseToHiveAlert(enrichedOffense)

                # searching if the offense has already been converted to alert
                query = dict()
                query['sourceRef'] = str(offense['id'])
                self.logger.info('Looking for offense %s in TheHive alerts',
                                 str(offense['id']))
                results = self.TheHiveConnector.findAlert(query)
                if len(results) == 0:
                    self.logger.info(
                        'Offense %s not found in TheHive alerts, creating it',
                        str(offense['id']))

                    try:
                        theHiveEsAlertId = self.TheHiveConnector.createAlert(
                            theHiveAlert)['id']

                        offense_report['raised_alert_id'] = theHiveEsAlertId
                        offense_report['qradar_offense_id'] = offense['id']
                        offense_report['success'] = True

                    except Exception as e:
                        self.logger.error('%s.allOffense2Alert failed',
                                          __name__,
                                          exc_info=True)
                        offense_report['success'] = False
                        if isinstance(e, ValueError):
                            errorMessage = json.loads(str(e))['message']
                            offense_report['message'] = errorMessage
                        else:
                            offense_report['message'] = str(
                                e) + ": Couldn't raise alert in TheHive"
                        offense_report['offense_id'] = offense['id']
                        # Set overall success if any fails
                        report['success'] = False

                    report['offenses'].append(offense_report)
                else:
                    self.logger.info(
                        'Offense %s already imported as alert, checking for updates',
                        str(offense['id']))
                    alert_found = results[0]

                    if self.TheHiveConnector.checkForUpdates(
                            theHiveAlert, alert_found, offense['id']):
                        offense_report['updated_alert_id'] = alert_found['id']
                        offense_report['qradar_offense_id'] = offense['id']
                        offense_report['success'] = True
                    else:
                        offense_report['qradar_offense_id'] = offense['id']
                        offense_report['success'] = True
                report['offenses'].append(offense_report)
                ##########################################################

        except Exception as e:
            self.logger.error(
                'Failed to create alert from QRadar offense (retrieving offenses failed)',
                exc_info=True)
            report['success'] = False
            report[
                'message'] = "%s: Failed to create alert from offense" % str(e)

        return report

    def craftAlertDescription(self, offense):
        """
            From the offense metadata, crafts a nice description in markdown
            for TheHive
        """
        self.logger.debug('craftAlertDescription starts')

        # Start empty
        description = ""

        # Add url to Offense
        qradar_ip = self.cfg.get('QRadar', 'server')
        url = (
            '[%s](https://%s/console/qradar/jsp/QRadar.jsp?appName=Sem&pageId=OffenseSummary&summaryId=%s)'
            % (str(offense['id']), qradar_ip, str(offense['id'])))

        description += '#### Offense: \n - ' + url + '\n\n'

        # Format associated rules
        rule_names_formatted = "#### Rules triggered: \n"
        rules = offense['rules']
        if len(rules) > 0:
            for rule in rules:
                if 'name' in rule:
                    rule_names_formatted += "- %s \n" % rule['name']
                else:
                    continue

        # Add rules overview to description
        description += rule_names_formatted + '\n\n'

        # Format associated documentation
        uc_links_formatted = "#### Use Case documentation: \n"
        kb_url = self.cfg.get('QRadar', 'kb_url')
        if 'use_case_names' in offense and offense['use_case_names']:
            for uc in offense['use_case_names']:
                replaced_kb_url = kb_url.replace('<uc_kb_name>', uc)
                uc_links_formatted += f"- [{uc}]({replaced_kb_url}) \n"

            # Add associated documentation
            description += uc_links_formatted + '\n\n'

        # Add mitre Tactic information
        mitre_ta_links_formatted = "#### MITRE Tactics: \n"
        if 'mitre_tactics' in offense and offense['mitre_tactics']:
            for tactic in offense['mitre_tactics']:
                mitre_ta_links_formatted += "- [%s](%s/%s) \n" % (
                    tactic, 'https://attack.mitre.org/tactics/', tactic)

            # Add associated documentation
            description += mitre_ta_links_formatted + '\n\n'

        # Add mitre Technique information
        mitre_t_links_formatted = "#### MITRE Techniques: \n"
        if 'mitre_techniques' in offense and offense['mitre_techniques']:
            for technique in offense['mitre_techniques']:
                mitre_t_links_formatted += "- [%s](%s/%s) \n" % (
                    technique, 'https://attack.mitre.org/techniques/',
                    technique)

            # Add associated documentation
            description += mitre_t_links_formatted + '\n\n'

        # Add offense details table
        description += (
            '#### Summary:\n\n' +
            '|                         |               |\n' +
            '| ----------------------- | ------------- |\n' +
            '| **Start Time**          | ' +
            str(self.qradarConnector.formatDate(offense['start_time'])) +
            ' |\n' + '| **Offense ID**          | ' + str(offense['id']) +
            ' |\n' + '| **Description**         | ' +
            str(offense['description'].replace('\n', '')) + ' |\n' +
            '| **Offense Type**        | ' + str(offense['offense_type_str']) +
            ' |\n' + '| **Offense Source**      | ' +
            str(offense['offense_source']) + ' |\n' +
            '| **Destination Network** | ' +
            str(offense['destination_networks']) + ' |\n' +
            '| **Source Network**      | ' + str(offense['source_network']) +
            ' |\n\n\n' + '\n\n\n\n')

        # Add raw payload
        description += '#### Payload:\n```\n'
        for log in offense['logs']:
            description += log['utf8_payload'] + '\n'
        description += '```\n\n'

        return description
Esempio n. 19
0
 def __init__(self):
     super().__init__()
     self.qradarConnector = QRadarConnector(self.cfg)
     self.TheHiveConnector = TheHiveConnector(self.cfg)
Esempio n. 20
0
class Integration(Main):
    def __init__(self):
        super().__init__()
        self.azureSentinelConnector = AzureSentinelConnector(self.cfg)
        self.theHiveConnector = TheHiveConnector(self.cfg)

    def craftAlertDescription(self, incident):
        """
            From the incident metadata, crafts a nice description in markdown
            for TheHive
        """
        self.logger.debug('craftAlertDescription starts')

        # Start empty
        self.description = ""

        # Add url to incident
        self.url = ('[%s](%s)' %
                    (str(incident['properties']['incidentNumber']),
                     str(incident['properties']['incidentUrl'])))
        self.description += '#### Incident: \n - ' + self.url + '\n\n'

        # Format associated rules
        self.rule_names_formatted = "#### Rules triggered: \n"
        self.rules = incident['properties']['relatedAnalyticRuleIds']
        if len(self.rules) > 0:
            for rule in self.rules:
                self.rule_info = self.azureSentinelConnector.getRule(rule)
                self.logger.debug(
                    'Received the following rule information: {}'.format(
                        self.rule_info))
                self.rule_name = self.rule_info['properties']['displayName']
                rule_url = "https://management.azure.com{}".format(rule)
                self.rule_names_formatted += "- %s \n" % (self.rule_name)

        # Add rules overview to description
        self.description += self.rule_names_formatted + '\n\n'

        # Add mitre Tactic information
        # https://raw.githubusercontent.com/mitre/cti/master/enterprise-attack/enterprise-attack.json

        # mitre_ta_links_formatted = "#### MITRE Tactics: \n"
        # if 'mitre_tactics' in offense and offense['mitre_tactics']:
        #     for tactic in offense['mitre_tactics']:
        #         mitre_ta_links_formatted += "- [%s](%s/%s) \n" % (tactic, 'https://attack.mitre.org/tactics/', tactic)

        #     #Add associated documentation
        #     self.description += mitre_ta_links_formatted + '\n\n'

        # #Add mitre Technique information
        # mitre_t_links_formatted = "#### MITRE Techniques: \n"
        # if 'mitre_techniques' in offense and offense['mitre_techniques']:
        #     for technique in offense['mitre_techniques']:
        #         mitre_t_links_formatted += "- [%s](%s/%s) \n" % (technique, 'https://attack.mitre.org/techniques/', technique)

        # Add a custom description when the incident does not contain any
        if 'description' not in incident['properties']:
            incident['properties']['description'] = "N/A"

        # Add incident details table
        self.description += (
            '#### Summary\n\n' +
            '|                         |               |\n' +
            '| ----------------------- | ------------- |\n' +
            '| **Start Time**          | ' + str(
                self.azureSentinelConnector.formatDate(
                    "description", incident['properties']['createdTimeUtc'])) +
            ' |\n' + '| **incident ID**          | ' +
            str(incident['properties']['incidentNumber']) + ' |\n' +
            '| **Description**         | ' +
            str(incident['properties']['description'].replace('\n', '')) +
            ' |\n' + '| **incident Type**        | ' + str(incident['type']) +
            ' |\n' + '| **incident Source**      | ' +
            str(incident['properties']['additionalData']['alertProductNames'])
            + ' |\n' + '| **incident Status**      | ' +
            str(incident['properties']['status']) + ' |\n' + '\n\n\n\n')

        return self.description

    def sentinelIncidentToHiveAlert(self, incident):
        def getHiveSeverity(incident):
            # severity in TheHive is either low, medium or high
            # while severity in Sentinel is from Low to High
            if incident['properties']['severity'] == "Low":
                return 1
            elif incident['properties']['severity'] == "Medium":
                return 2
            elif incident['properties']['severity'] == "High":
                return 3

            return 1

        #
        # Creating the alert
        #

        # Setup Tags
        self.tags = ['AzureSentinel', 'incident', 'Synapse']

        # Skip for now
        self.artifacts = []

        # Retrieve the configured case_template
        self.sentinelCaseTemplate = self.cfg.get('AzureSentinel',
                                                 'case_template')

        # Build TheHive alert
        self.alert = self.theHiveConnector.craftAlert(
            "{}, {}".format(incident['properties']['incidentNumber'],
                            incident['properties']['title']),
            self.craftAlertDescription(incident), getHiveSeverity(incident),
            self.azureSentinelConnector.formatDate(
                "alert_timestamp", incident['properties']['createdTimeUtc']),
            self.tags, 2, 'New', 'internal', 'Azure_Sentinel_incidents',
            str(incident['name']), self.artifacts, self.sentinelCaseTemplate)

        return self.alert

    def validateRequest(self, request):
        if request.is_json:
            self.content = request.get_json()
            if 'type' in self.content and self.content['type'] == "Active":
                self.workflowReport = self.allIncidents2Alert(
                    self.content['type'])
                if self.workflowReport['success']:
                    return json.dumps(self.workflowReport), 200
                else:
                    return json.dumps(self.workflowReport), 500
            else:
                self.logger.error('Missing type or type is not supported')
                return json.dumps({
                    'sucess':
                    False,
                    'message':
                    "Missing type or type is not supported"
                }), 500
        else:
            self.logger.error('Not json request')
            return json.dumps({
                'sucess': False,
                'message': "Request didn't contain valid JSON"
            }), 400

    def allIncidents2Alert(self, status):
        """
        Get all opened incidents created within Azure Sentinel
        and create alerts for them in TheHive
        """
        self.logger.info('%s.allincident2Alert starts', __name__)

        self.report = dict()
        self.report['success'] = True
        self.report['incidents'] = list()

        try:
            self.incidentsList = self.azureSentinelConnector.getIncidents()

            # each incidents in the list is represented as a dict
            # we enrich this dict with additional details
            for incident in self.incidentsList:

                # Prepare new alert
                self.incident_report = dict()
                self.logger.debug("incident: %s" % incident)
                # self.logger.info("Enriching incident...")
                # enrichedincident = enrichIncident(incident)
                # self.logger.debug("Enriched incident: %s" % enrichedincident)
                self.theHiveAlert = self.sentinelIncidentToHiveAlert(incident)

                # searching if the incident has already been converted to alert
                self.query = dict()
                self.query['sourceRef'] = str(incident['name'])
                self.logger.info('Looking for incident %s in TheHive alerts',
                                 str(incident['name']))
                self.results = self.theHiveConnector.findAlert(self.query)
                if len(self.results) == 0:
                    self.logger.info(
                        'incident %s not found in TheHive alerts, creating it',
                        str(incident['name']))

                    try:
                        self.theHiveEsAlertId = self.theHiveConnector.createAlert(
                            self.theHiveAlert)['id']

                        self.incident_report[
                            'raised_alert_id'] = self.theHiveEsAlertId
                        self.incident_report[
                            'sentinel_incident_id'] = incident['name']
                        self.incident_report['success'] = True

                    except Exception as e:
                        self.logger.error('%s.allincident2Alert failed',
                                          __name__,
                                          exc_info=True)
                        self.incident_report['success'] = False
                        if isinstance(e, ValueError):
                            errorMessage = json.loads(str(e))['message']
                            self.incident_report['message'] = errorMessage
                        else:
                            self.incident_report['message'] = str(
                                e) + ": Couldn't raise alert in TheHive"
                        self.incident_report['incident_id'] = incident['name']
                        # Set overall success if any fails
                        self.report['success'] = False

                    self.report['incidents'].append(self.incident_report)
                else:
                    self.logger.info(
                        'incident %s already imported as alert, checking for updates',
                        str(incident['name']))
                    self.alert_found = self.results[0]

                    # Check if alert is already created, but needs updating
                    if self.check_if_updated(self.alert_found,
                                             vars(self.theHiveAlert)):
                        self.logger.info(
                            "Found changes for %s, updating alert" %
                            self.alert_found['id'])

                        # update alert
                        self.theHiveConnector.updateAlert(
                            self.alert_found['id'],
                            self.theHiveAlert,
                            fields=["tags", "artifacts"])
                        self.incident_report[
                            'updated_alert_id'] = self.alert_found['id']
                        self.incident_report[
                            'sentinel_incident_id'] = incident['name']
                        self.incident_report['success'] = True
                    else:
                        self.logger.info("No changes found for %s" %
                                         self.alert_found['id'])
                        continue

        except Exception as e:

            self.logger.error(
                'Failed to create alert from Azure Sentinel incident (retrieving incidents failed)',
                exc_info=True)
            self.report['success'] = False
            self.report[
                'message'] = "%s: Failed to create alert from incident" % str(
                    e)

        return self.report
Esempio n. 21
0
 def __init__(self):
     super().__init__()
     self.RDConnector = RDConnector(self.cfg)
     self.TheHiveConnector = TheHiveConnector(self.cfg)
Esempio n. 22
0
 def __init__(self):
     super().__init__()
     self.lexsi = LexsiConnector(self.cfg)
     self.TheHiveConnector = TheHiveConnector(self.cfg)
Esempio n. 23
0
class Integration(Main):
    def __init__(self):
        super().__init__()
        self.lexsi = LexsiConnector(self.cfg)
        self.TheHiveConnector = TheHiveConnector(self.cfg)

    def validateRequest(self, request):

        if request.is_json:
            content = request.get_json()
            if 'type' in content and content['type'] == "Active":
                workflowReport = self.allIncidents2Alert(content['type'])
                if workflowReport['success']:
                    return json.dumps(workflowReport), 200
                else:
                    return json.dumps(workflowReport), 500
            else:
                self.logger.error('Missing type or type is not supported')
                return json.dumps({
                    'sucess':
                    False,
                    'message':
                    "Missing type or type is not supported"
                }), 500
        else:
            self.logger.error('Not json request')
            return json.dumps({
                'sucess': False,
                'message': "Request didn't contain valid JSON"
            }), 400

    def allIncidents2Alert(self, status):
        """
        Get all opened incidents created within lexsi
        and create alerts for them in TheHive
        """
        self.logger.info('%s.allincident2Alert starts', __name__)

        incidentsList = self.lexsi.getOpenItems()['result']

        report = dict()
        report['success'] = True
        report['incidents'] = list()

        try:
            # each incidents in the list is represented as a dict
            # we enrich this dict with additional details
            for incident in incidentsList:

                # Prepare new alert
                incident_report = dict()
                self.logger.debug("incident: %s" % incident)

                theHiveAlert = self.IncidentToHiveAlert(incident)

                # searching if the incident has already been converted to alert
                query = dict()
                query['sourceRef'] = str(incident['incident'])
                self.logger.info('Looking for incident %s in TheHive alerts',
                                 str(incident['incident']))
                results = self.TheHiveConnector.findAlert(query)
                if len(results) == 0:
                    self.logger.info(
                        'incident %s not found in TheHive alerts, creating it',
                        str(incident['incident']))
                    try:

                        theHiveEsAlertId = self.TheHiveConnector.createAlert(
                            theHiveAlert)['id']
                        self.TheHiveConnector.promoteAlertToCase(
                            theHiveEsAlertId)

                        incident_report['raised_alert_id'] = theHiveEsAlertId
                        incident_report['lexsi_incident_id'] = incident[
                            'incident']
                        incident_report['success'] = True

                    except Exception as e:
                        self.logger.error(incident_report)
                        self.logger.error('%s.allincident2Alert failed',
                                          __name__,
                                          exc_info=True)
                        incident_report['success'] = False
                        if isinstance(e, ValueError):
                            errorMessage = json.loads(str(e))['message']
                            incident_report['message'] = errorMessage
                        else:
                            incident_report['message'] = str(
                                e) + ": Couldn't raise alert in TheHive"
                        incident_report['incident_id'] = incident['incident']
                        # Set overall success if any fails
                        report['success'] = False

                else:
                    self.logger.info(
                        'incident %s already imported as alert, checking for updates',
                        str(incident['incident']))
                    alert_found = results[0]

                    if self.TheHiveConnector.checkForUpdates(
                            theHiveAlert, alert_found,
                            str(incident['incident'])):
                        # Mark the alert as read
                        self.TheHiveConnector.markAlertAsRead(
                            alert_found['id'])
                        incident_report['updated_alert_id'] = alert_found['id']
                        incident_report['sentinel_incident_id'] = str(
                            incident['incident'])
                        incident_report['success'] = True
                    else:
                        incident_report['sentinel_incident_id'] = str(
                            incident['incident'])
                        incident_report['success'] = True
                report['incidents'].append(incident_report)

            thehiveAlerts, open_lexsi_cases = self.lexsi_opened_alerts_thehive(
            )
            self.set_alert_status_ignored(incidentsList, thehiveAlerts,
                                          open_lexsi_cases)

        except Exception as e:

            self.logger.error(
                'Failed to create alert from Lexsi incident (retrieving incidents failed)',
                exc_info=True)
            report['success'] = False
            report[
                'message'] = "%s: Failed to create alert from incident" % str(
                    e)

        return report

    def IncidentToHiveAlert(self, incident):

        #
        # Creating the alert
        #

        # Setup Tags
        tags = ['Lexsi', 'incident', 'Synapse']

        # Skip for now
        artifacts = []

        # Retrieve the configured case_template
        CaseTemplate = self.cfg.get('Lexsi', 'case_template')

        # Build TheHive alert
        alert = self.TheHiveConnector.craftAlert(
            "{}: {}".format(incident['incident'], incident['title']),
            self.craftAlertDescription(incident),
            self.getHiveSeverity(incident),
            self.timestamp_to_epoch(incident['detected'], "%Y-%m-%d %H:%M:%S"),
            tags, 2, 'New', 'internal', 'Lexsi', str(incident['incident']),
            artifacts, CaseTemplate)

        return alert

    def craftAlertDescription(self, incident):
        """
            From the incident metadata, crafts a nice description in markdown
            for TheHive
        """
        self.logger.debug('craftAlertDescription starts')

        # Start empty
        description = ""

        # Add incident details table
        description += (
            '#### Summary\n\n' +
            '|                         |               |\n' +
            '| ----------------------- | ------------- |\n' +
            '| **URL**          | ' +
            "{}{}{}".format("```", str(incident['url']), "```") + ' |\n' +
            '| **Type**          | ' + str(incident['type']) + ' |\n' +
            '| **Severity**          | ' + str(incident['severity']) + ' |\n' +
            '| **Category**         | ' + str(incident['category']) + ' |\n' +
            '| **Updated**        | ' + str(incident['updated']) + ' |\n' +
            '| **Detected**        | ' + str(incident['detected']) + ' |\n' +
            '| **Source**        | ' + str(incident['source']) + ' |\n' +
            '| **Analyst Name(Lexsi)**        | ' +
            str(incident['analystName']) + ' |\n' +
            '| **Link to Orange Portal**        | ' +
            str("https://portal.cert.orangecyberdefense.com/cybercrime/{}".
                format(incident['id'])) + ' |\n' + '\n\n\n\n')

        return description

    def timestamp_to_epoch(self, date_time, pattern):
        return int(time.mktime(time.strptime(date_time, pattern))) * 1000

    def getHiveSeverity(self, incident):
        # severity in TheHive is either low, medium, high or critical
        # while severity in Lexsi is from 0 to 5
        if int(incident['severity']) in {0, 5}:
            return 1
        # elif int(incident['severity']) in {2,3}:
        #    return 2
        # elif int(incident['severity']) in {4,5}:
        #    return 3
        else:
            return 2

    def lexsi_opened_alerts_thehive(self):
        thehiveAlerts = []
        open_lexsi_cases = {}
        query = In('tags', ['Lexsi'])

        self.logger.info(
            'Looking for incident in TheHive alerts with tag Lexsi')
        # self.logger.info(query)
        results = self.TheHiveConnector.findAlert(query)
        for alert_found in results:
            # Check if a case is linked
            if 'case' in alert_found:
                try:
                    case_found = self.TheHiveConnector.getCase(
                        alert_found['case'])
                    # Check if the status is open. Only then append it to the list
                    if case_found['status'] == "Open":
                        open_lexsi_cases[alert_found['sourceRef']] = case_found
                        thehiveAlerts.append(alert_found['sourceRef'])
                except Exception as e:
                    self.logger.error("Could not find case: {}".format(e),
                                      exc_info=True)
                    continue
        self.logger.debug(
            "Lexsi Alerts opened in theHive: {}".format(thehiveAlerts))
        return thehiveAlerts, open_lexsi_cases

    def compare_lists(self, list1, list2):
        return list(set(list1) - set(list2))

    def set_alert_status_ignored(self, incidentsList, thehiveAlerts,
                                 open_lexsi_cases):
        lexsi_reporting = []
        # incidentsList = self.lexsi.getOpenItems()['result']

        for incident in incidentsList:
            lexsi_reporting.append(incident['incident'])

        self.logger.debug(
            "the list of opened Lexsi Incidents: {}".format(lexsi_reporting))
        uncommon_elements = self.compare_lists(thehiveAlerts, lexsi_reporting)
        # uncommon_elements=['476121']
        self.logger.debug(
            "Open cases present in TheHive but not in list of opened Lexsi Incidents: {}"
            .format((uncommon_elements)))

        for element in uncommon_elements:
            self.logger.info(
                "Preparing to close the case for {}".format(element))
            query = dict()
            query['sourceRef'] = str(element)
            self.logger.debug('Looking for incident %s in TheHive alerts',
                              str(element))
            try:
                if element in open_lexsi_cases:
                    # Resolve the case
                    case_id = open_lexsi_cases[element]['id']
                    self.logger.debug("Case id for element {}: {}".format(
                        element, case_id))
                    self.logger.debug("Preparing to resolve the case")
                    self.TheHiveConnector.closeCase(case_id)
                    self.logger.debug("Closed case with id {} for {}".format(
                        case_id, element))

            except Exception as e:
                self.logger.error("Could not close case: {}".format(e),
                                  exc_info=True)
                continue
Esempio n. 24
0
class Automators(Main):
    def __init__(self, cfg, use_case_config):
        self.logger = logging.getLogger(__name__)
        self.logger.info('Initiating QRadar Automators')

        self.cfg = cfg
        self.use_case_config = use_case_config
        self.TheHiveConnector = TheHiveConnector(cfg)
        self.TheHiveAutomators = TheHiveAutomators(cfg, use_case_config)
        self.QRadarConnector = QRadarConnector(cfg)

    def search(self, action_config, webhook):
        # Only continue if the right webhook is triggered
        self.logger.debug("action_config:{}".format(action_config))
        if webhook.isImportedAlert():
            pass
        else:
            return False

        # Define variables and actions based on certain webhook types
        self.case_id = webhook.data['object']['case']

        self.logger.debug(self.case_id)

        self.enriched = False
        for query_name, query_config in action_config.items():
            try:
                self.logger.debug('Found the following query: {}'.format(
                    query_config['query']))
                self.query_variables = {}
                self.query_variables['input'] = {}

                # Render query
                try:
                    # Prepare the template
                    self.template = Template(query_config['query'])

                    # Find variables in the template
                    self.template_env = Environment()
                    self.template_parsed = self.template_env.parse(
                        query_config['query'])
                    # Grab all the variales from the template and try to find them in the description
                    self.template_vars = meta.find_undeclared_variables(
                        self.template_parsed)
                    self.logger.debug(
                        "Found the following variables in query: {}".format(
                            self.template_vars))

                    for template_var in self.template_vars:

                        # Skip dynamically generated Stop_time variable
                        if template_var == "Stop_Time":
                            continue

                        self.logger.debug(
                            "Looking up variable required for template: {}".
                            format(template_var))
                        # Replace the underscore from the variable name to a white space as this is used in the description table
                        self.template_var_with_ws = template_var.replace(
                            "_", " ")
                        self.case_data = self.TheHiveConnector.getCase(
                            self.case_id)
                        self.logger.debug('output for get_case: {}'.format(
                            self.case_data))

                        self.query_variables['input'][
                            template_var] = self.TheHiveAutomators.fetchValueFromMDTable(
                                self.case_data['description'],
                                self.template_var_with_ws)

                        if 'Start_Time' not in self.query_variables['input']:
                            self.logger.warning(
                                "Could not find Start Time value required to build the search"
                            )

                        # Parse times required for the query (with or without offset)
                        if template_var == "Start_Time":
                            self.logger.debug(
                                "Found Start Time: %s" %
                                self.query_variables['input']['Start_Time'])
                            if 'start_time_offset' in query_config:
                                self.query_variables['input'][
                                    'Start_Time'] = self.parseTimeOffset(
                                        self.query_variables['input']
                                        ['Start_Time'],
                                        self.cfg.get(
                                            'Automation',
                                            'event_start_time_format'),
                                        query_config['start_time_offset'],
                                        self.cfg.get('QRadar', 'time_format'))
                            else:
                                self.query_variables['input'][
                                    'Start_Time'] = self.query_variables[
                                        'input']['Start_Time']

                            if 'stop_time_offset' in query_config:
                                self.query_variables['input'][
                                    'Stop_Time'] = self.parseTimeOffset(
                                        self.query_variables['input']
                                        ['Start_Time'],
                                        self.cfg.get(
                                            'Automation',
                                            'event_start_time_format'),
                                        query_config['stop_time_offset'],
                                        self.cfg.get('QRadar', 'time_format'))
                            else:
                                self.query_variables['input'][
                                    'Stop_Time'] = datetime.now().strftime(
                                        self.cfg.get(
                                            'Automation',
                                            'event_start_time_format'))

                    self.rendered_query = self.template.render(
                        self.query_variables['input'])
                    self.logger.debug("Rendered the following query: %s" %
                                      self.rendered_query)
                except Exception as e:
                    self.logger.warning(
                        "Could not render query due to missing variables",
                        exc_info=True)
                    continue

                # Perform search queries
                try:
                    self.rendered_query_result = self.QRadarConnector.aqlSearch(
                        self.rendered_query)
                    # Check results
                    self.logger.debug(
                        'The search result returned the following information: \n %s'
                        % self.rendered_query_result)
                except Exception as e:
                    self.logger.warning("Could not perform query",
                                        exc_info=True)
                    continue

                try:
                    if query_config['create_thehive_task']:
                        self.logger.debug("create task is enabled")
                        # Task name
                        self.uc_task_title = query_config['thehive_task_title']
                        self.uc_task_description = "The following information is found. Investigate the results and act accordingly:\n\n\n\n"

                        # create a table header
                        self.table_header = "|"
                        self.rows = "|"
                        if len(self.rendered_query_result['events']) != 0:
                            for key in self.rendered_query_result['events'][
                                    0].keys():
                                self.table_header = self.table_header + " %s |" % key
                                self.rows = self.rows + "---|"
                            self.table_header = self.table_header + "\n" + self.rows + "\n"
                            self.uc_task_description = self.uc_task_description + self.table_header

                            # Create the data table for the results
                            for event in self.rendered_query_result['events']:
                                self.table_data_row = "|"
                                for field_key, field_value in event.items():
                                    # Escape pipe signs
                                    if field_value:
                                        field_value = field_value.replace(
                                            "|", "&#124;")
                                    # Use &nbsp; to create some additional spacing
                                    self.table_data_row = self.table_data_row + " %s &nbsp;|" % field_value
                                self.table_data_row = self.table_data_row + "\n"
                                self.uc_task_description = self.uc_task_description + self.table_data_row
                        else:
                            self.uc_task_description = self.uc_task_description + "No results \n"

                        # Add the case task
                        self.uc_task = self.TheHiveAutomators.craftUcTask(
                            self.uc_task_title, self.uc_task_description)
                        self.TheHiveConnector.createTask(
                            self.case_id, self.uc_task)
                except Exception as e:
                    self.logger.debug(e)
                    pass
                try:
                    if query_config['create_ioc']:
                        self.logger.debug("create IOC is enabled")
                        self.comment = "offense enrichment"
                        #static tags list
                        self.tags = ['synapse']
                        #want to add SECID of the rule as well in the tag
                        rule_secid = [
                            x for x in webhook.data['object']['tags']
                            if x.startswith('SEC')
                        ]
                        self.tags.extend(rule_secid)

                        self.uc_ioc_type = query_config['ioc_type']
                        if len(self.rendered_query_result['events']) != 0:
                            for event in self.rendered_query_result['events']:
                                for field_key, field_value in event.items():
                                    self.TheHiveConnector.addObservable(
                                        self.case_id, self.uc_ioc_type,
                                        list(field_value.split(",")),
                                        self.tags, self.comment)
                except Exception as e:
                    self.logger.debug(e)
                    pass

            except Exception as e:
                self.logger.debug(
                    'Could not process the following query: {}\n{}'.format(
                        query_config, e))
                continue

        # Return True when succesful
        return True

    def enrichAlert(self, action_config, webhook):
        # Only continue if the right webhook is triggered
        if webhook.isNewAlert():
            pass
        else:
            return False

        # Define variables and actions based on certain webhook types
        # Alerts
        self.alert_id = webhook.data['object']['id']
        self.alert_description = webhook.data['object']['description']

        self.query_variables = {}
        self.query_variables['input'] = {}
        self.enriched = False
        # Prepare search queries for searches
        for query_name, query_config in action_config.items():
            try:
                self.logger.info('Found the following query: %s' %
                                 (query_name))
                self.query_variables[query_name] = {}

                # Render query
                try:
                    # Prepare the template
                    self.template = Template(query_config['query'])

                    # Find variables in the template
                    self.template_env = Environment()
                    self.template_parsed = self.template_env.parse(
                        query_config['query'])
                    # Grab all the variales from the template and try to find them in the description
                    self.template_vars = meta.find_undeclared_variables(
                        self.template_parsed)
                    self.logger.debug(
                        "Found the following variables in query: {}".format(
                            self.template_vars))

                    for template_var in self.template_vars:

                        # Skip dynamically generated Stop_time variable
                        if template_var == "Stop_Time":
                            continue

                        self.logger.debug(
                            "Looking up variable required for template: {}".
                            format(template_var))
                        # Replace the underscore from the variable name to a white space as this is used in the description table
                        self.template_var_with_ws = template_var.replace(
                            "_", " ")
                        self.alert_data = self.TheHiveConnector.getAlert(
                            self.alert_id)
                        self.logger.debug('output for get_alert: {}'.format(
                            self.alert_data))

                        self.query_variables['input'][
                            template_var] = self.TheHiveAutomators.fetchValueFromMDTable(
                                self.alert_data['description'],
                                self.template_var_with_ws)

                        # Parse times required for the query (with or without offset)
                        if template_var == "Start_Time":
                            self.logger.debug(
                                "Found Start Time: %s" %
                                self.query_variables['input']['Start_Time'])
                            if 'start_time_offset' in query_config:
                                self.query_variables['input'][
                                    'Start_Time'] = self.parseTimeOffset(
                                        self.query_variables['input']
                                        ['Start_Time'],
                                        self.cfg.get(
                                            'Automation',
                                            'event_start_time_format'),
                                        query_config['start_time_offset'],
                                        self.cfg.get('QRadar', 'time_format'))
                            else:
                                self.query_variables['input'][
                                    'Start_Time'] = self.query_variables[
                                        'input']['Start_Time']

                            if 'stop_time_offset' in query_config:
                                self.query_variables['input'][
                                    'Stop_Time'] = self.parseTimeOffset(
                                        self.query_variables['input']
                                        ['Start_Time'],
                                        self.cfg.get(
                                            'Automation',
                                            'event_start_time_format'),
                                        query_config['stop_time_offset'],
                                        self.cfg.get('QRadar', 'time_format'))
                            else:
                                self.query_variables['input'][
                                    'Stop_Time'] = datetime.now().strftime(
                                        self.cfg.get(
                                            'Automation',
                                            'event_start_time_format'))

                    if not self.query_variables['input']['Start_Time']:
                        self.logger.warning("Could not find Start Time value ")
                        raise GetOutOfLoop

                    self.query_variables[query_name][
                        'query'] = self.template.render(
                            self.query_variables['input'])
                    self.logger.debug(
                        "Rendered the following query: %s" %
                        self.query_variables[query_name]['query'])
                except Exception as e:
                    self.logger.warning(
                        "Could not render query due to missing variables",
                        exc_info=True)
                    raise GetOutOfLoop

                # Perform search queries
                try:
                    self.query_variables[query_name][
                        'result'] = self.QRadarConnector.aqlSearch(
                            self.query_variables[query_name]['query'])
                except Exception as e:
                    self.logger.warning("Could not perform query",
                                        exc_info=True)
                    raise GetOutOfLoop

                # Check results
                self.logger.debug(
                    'The search result returned the following information: \n %s'
                    % self.query_variables[query_name]['result'])

                # making enrichment results presentable
                clean_enrichment_results = self.TheHiveAutomators.make_it_presentable(
                    self.query_variables[query_name]['result']['events'][0]
                    ['enrichment_result'])

                # Add results to description
                success = self.enrichAlertDescription(
                    self.alert_data['description'], query_name,
                    self.query_variables[query_name]['result']['events'][0]
                    ['enrichment_result'])
                if not success:
                    self.logger.warning(
                        "Could not add results from the query to the description. Error: {}"
                        .format(e))
                    raise GetOutOfLoop

            except GetOutOfLoop:
                pass
        return True
Esempio n. 25
0
class Automators(Main):
    def __init__(self, cfg, use_case_config):
        self.logger = logging.getLogger(__name__)
        self.logger.info('Initiating QRadar Automators')

        self.cfg = cfg
        self.use_case_config = use_case_config
        self.TheHiveConnector = TheHiveConnector(cfg)
        self.TheHiveAutomators = TheHiveAutomators(cfg, use_case_config)
        self.QRadarConnector = QRadarConnector(cfg)

    def checkSiem(self, action_config, webhook):
        #Only continue if the right webhook is triggered
        if webhook.isImportedAlert() or webhook.isNewAlert() or webhook.isQRadarAlertUpdateFollowTrue():
            pass
        else:
            return False
        
        #Define variables and actions based on certain webhook types
        #Alerts
        if webhook.isNewAlert() or webhook.isQRadarAlertUpdateFollowTrue():
            self.alert_id = webhook.data['object']['id']
            self.alert_description = webhook.data['object']['description']
            self.supported_query_type = 'enrichment_queries'

        #Cases
        elif webhook.isImportedAlert():
            self.case_id = webhook.data['object']['case']
            self.supported_query_type = 'search_queries'


        self.query_variables = {}
        self.query_variables['input'] = {}
        self.enriched = False
        #Prepare search queries for searches
        for query_name, query_config in action_config[self.supported_query_type].items():
            try:
                self.logger.info('Found the following query: %s' % (query_name))
                self.query_variables[query_name] = {}
                
                #Render query
                try:
                    #Prepare the template
                    self.template = Template(query_config['query'])

                    #Find variables in the template
                    self.template_env = Environment()
                    self.template_parsed = self.template_env.parse(query_config['query'])
                    #Grab all the variales from the template and try to find them in the description
                    self.template_vars = meta.find_undeclared_variables(self.template_parsed)
                    self.logger.debug("Found the following variables in query: {}".format(self.template_vars))

                    for template_var in self.template_vars:
                        
                        #Skip dynamically generated Stop_time variable
                        if template_var == "Stop_Time":
                            continue
                        
                        self.logger.debug("Looking up variable required for template: {}".format(template_var))
                        #Replace the underscore from the variable name to a white space as this is used in the description table
                        self.template_var_with_ws = template_var.replace("_", " ")
                        self.query_variables['input'][template_var] = self.TheHiveAutomators.fetchValueFromDescription(webhook,self.template_var_with_ws)
                        
                        #Parse times required for the query (with or without offset)
                        if template_var == "Start_Time":
                            self.logger.debug("Found Start Time: %s" % self.query_variables['input']['Start_Time'])
                            if 'start_time_offset' in query_config:
                                self.query_variables['input']['Start_Time'] = self.parseTimeOffset(self.query_variables['input']['Start_Time'], self.cfg.get('Automation', 'event_start_time_format'), query_config['start_time_offset'], self.cfg.get('QRadar', 'time_format'))
                            else:
                                self.query_variables['input']['Start_Time'] = self.query_variables['input']['Start_Time']
                                
                            if 'stop_time_offset' in query_config:
                                self.query_variables['input']['Stop_Time'] = self.parseTimeOffset(self.query_variables['input']['Start_Time'], self.cfg.get('Automation', 'event_start_time_format'), query_config['stop_time_offset'], self.cfg.get('QRadar', 'time_format'))
                            else:
                                self.query_variables['input']['Stop_Time'] = datetime.now().strftime(self.cfg.get('Automation', 'event_start_time_format'))

                    if not self.query_variables['input']['Start_Time']:
                        self.logger.warning("Could not find Start Time value ")
                        raise GetOutOfLoop

                    self.query_variables[query_name]['query'] = self.template.render(self.query_variables['input'])
                    self.logger.debug("Rendered the following query: %s" % self.query_variables[query_name]['query'])
                except Exception as e:
                    self.logger.warning("Could not render query due to missing variables", exc_info=True)
                    raise GetOutOfLoop
                
                #Perform search queries
                try:
                    self.query_variables[query_name]['result'] = self.QRadarConnector.aqlSearch(self.query_variables[query_name]['query'])
                except Exception as e:
                    self.logger.warning("Could not perform query", exc_info=True)
                    raise GetOutOfLoop
            
                #Check results
                self.logger.debug('The search result returned the following information: \n %s' % self.query_variables[query_name]['result'])
                    
                if self.supported_query_type == "search_queries":
                    #Task name
                    self.uc_task_title = query_config['task_title']
                
                    self.uc_task_description = "The following information is found. Investigate the results and act accordingly:\n\n\n\n"
                    
                    #create a table header
                    self.table_header = "|" 
                    self.rows = "|"
                    if len(self.query_variables[query_name]['result']['events']) != 0:
                        for key in self.query_variables[query_name]['result']['events'][0].keys():
                            self.table_header = self.table_header + " %s |" % key
                            self.rows = self.rows + "---|"
                        self.table_header = self.table_header + "\n" + self.rows + "\n"
                        self.uc_task_description = self.uc_task_description + self.table_header
                        
                        #Create the data table for the results
                        for event in self.query_variables[query_name]['result']['events']:
                            self.table_data_row = "|" 
                            for field_key, field_value in event.items():
                                # Escape pipe signs
                                if field_value:
                                    field_value = field_value.replace("|", "&#124;")
                                # Use &nbsp; to create some additional spacing
                                self.table_data_row = self.table_data_row + " %s &nbsp;|" % field_value
                            self.table_data_row = self.table_data_row + "\n"
                            self.uc_task_description = self.uc_task_description + self.table_data_row
                    else: 
                        self.uc_task_description = self.uc_task_description + "No results \n"
                        
                    
                    #Add the case task
                    self.uc_task = self.TheHiveAutomators.craftUcTask(self.uc_task_title, self.uc_task_description)
                    self.TheHiveConnector.createTask(self.case_id, self.uc_task)

                if self.supported_query_type == "enrichment_queries":

                    #Add results to description
                    try:
                        if self.TheHiveAutomators.fetchValueFromDescription(webhook,query_name) != self.query_variables[query_name]['result']['events'][0]['enrichment_result']:
                            self.regex_end_of_table = ' \|\\n\\n\\n'
                            self.end_of_table = ' |\n\n\n'
                            self.replacement_description = '|\n | **%s**  | %s %s' % (query_name, self.query_variables[query_name]['result']['events'][0]['enrichment_result'], self.end_of_table)
                            self.th_alert_description = self.TheHiveConnector.getAlert(self.alert_id)['description']
                            self.alert_description = re.sub(self.regex_end_of_table, self.replacement_description, self.th_alert_description)
                            self.enriched = True
                            #Update Alert with the new description field
                            self.updated_alert = Alert
                            self.updated_alert.description = self.alert_description
                            self.TheHiveConnector.updateAlert(self.alert_id, self.updated_alert, ["description"])
                    except Exception as e:
                        self.logger.warning("Could not add results from the query to the description. Error: {}".format(e))
                        raise GetOutOfLoop

            except GetOutOfLoop:
                pass
        return True
Esempio n. 26
0
class Integration(Main):
    def __init__(self):
        super().__init__()
        self.mlabsConnector = MLabsConnector(self.cfg)
        self.TheHiveConnector = TheHiveConnector(self.cfg)

    def validateRequest(self, request):
        workflowReport = self.connectMLabs()
        if workflowReport['success']:
            return json.dumps(workflowReport), 200
        else:
            return json.dumps(workflowReport), 500

    def connectMLabs(self):
        self.logger.info('%s.connectMLabs starts', __name__)

        report = dict()
        report['success'] = bool()

        # Setup Tags
        self.tags = ['MessageLabs', 'Synapse']

        try:
            tracker_file = "./modules/MessageLabs/phishing_tracker"
            link_to_load = ""
            if os.path.exists(tracker_file):
                self.logger.debug(
                    "MessageLabs: phishing Reading from the tracker file...")
                with open(tracker_file, "r") as tracker:
                    link_to_load = tracker.read()

            if not link_to_load:
                link_to_load = self.cfg.get('MessageLabs', 'list_endpoint')

            unread, new_link = self.mlabsConnector.scan(link_to_load)

            for msg in unread:
                self.logger.debug("Found unread E-mail with id: {}".format(
                    msg['id']))
                if ('@removed' in msg) or msg['subject'] != self.cfg.get(
                        'MessageLabs', 'subject_contains'):
                    continue

                fullBody = msg['body']['content']
                subject = ""
                MIDHash = ""

                email_date = datetime.strptime(msg["receivedDateTime"],
                                               "%Y-%m-%dT%H:%M:%SZ")
                epoch_email_date = email_date.timestamp() * 1000

                for line in fullBody.splitlines():
                    if line.startswith("Subject"):
                        subject = line
                    if line.startswith("Message ID:"):
                        MIDHash = hashlib.md5(
                            line.split(" ID: ")[-1].encode()).hexdigest()

                caseTitle = str(
                    self.cfg.get('MessageLabs', 'subject_contains') + " - " +
                    str(subject))
                caseDescription = self.createFullBody(fullBody)

                alert = self.TheHiveConnector.craftAlert(
                    caseTitle, caseDescription, 1, epoch_email_date, self.tags,
                    2, "New", "internal", "MessageLabs", MIDHash, [],
                    self.cfg.get('MessageLabs', 'case_template'))

                query = dict()
                query['sourceRef'] = str(MIDHash)
                results = self.TheHiveConnector.findAlert(query)

                if len(results) == 0:
                    createdCase = self.TheHiveConnector.createAlert(alert)

            with open(tracker_file, "w+") as tracker:
                tracker.write(new_link)

            report['success'] = True
            return report

        except Exception as e:
            self.logger.error('Connection failure', exc_info=True)
            report['success'] = False
            return report

    def createFullBody(self, fullbody):
        try:
            r = re.findall(
                r".*Policy name:\s([^\n\r]*)[\r\n]+.*Subject:\s([^\n\r]*)[\r\n]+.*Sender:\s([^\n\r]*)[\r\n]+Message ID: <([^\n\r]*)>[\r\n]+Sending server IP:\s([\d\.]*)[\r\n]+Date:\s([^\n\r]*)[\r\n]+Recipient:\s(.*)Attachments:\s(.*)Matched Content:\s(.*)Message body:\s(.*)",
                fullbody, re.MULTILINE | re.DOTALL)
            fields = [
                'Policy name', 'Subject', 'Sender', 'Message ID', 'Server IP',
                'Date', 'Recipients', 'Attachments', 'Matched Content',
                'E-mail body'
            ]
            values = []
            temp_fullbody = []
            if len(r) > 0:
                for it in range(0, 10):
                    values.append(r[0][it])
                values[3] = "<" + values[3] + ">"  # modify Message ID
                values[6] = re.sub(
                    r'<[^<>]*>', '',
                    values[6].strip().replace("\r\n", " ").replace("\n", " ")
                )  # modify Recipients, so all of them will be in 1 table field
                values[7] = values[7].strip(
                )  # remove empty lines/new lines from attachments
                values[8] = values[8].strip(
                )  # remove empty lines/new lines from matched content

                # putting together the markdown table
                temp_fullbody.append("|     |     |")
                temp_fullbody.append("|-----|-----|")
                for it in range(0, 9):
                    temp_fullbody.append("|  " + fields[it] + "  |  " +
                                         values[it] + "  |")
                temp_fullbody.append("**" + fields[9] + "**")
                temp_fullbody.append("```")
                temp_fullbody.append(values[9])
                temp_fullbody.append("```")
            else:
                # if the email can't be parsed with the regex above, then we provide it to SOC in an unparsed way
                temp_fullbody.append("```")
                temp_fullbody.append("**Unparsed E-mail**")
                temp_fullbody.append(str(fullbody))
                temp_fullbody.append("```")

            return '\r\n'.join(str(x) for x in temp_fullbody)

        except Exception as e:
            self.logger.error('Parsing error: ' + str(e), exc_info=True)
Esempio n. 27
0
 def __init__(self, webhook, cfg):
     logger.info('Initiating ELKAutomation')
     self.TheHiveConnector = TheHiveConnector(cfg)
     self.webhook = webhook
     self.report_action = report_action
     self.webhook = webhook
Esempio n. 28
0
class Integration(Main):
    def __init__(self):
        super().__init__()
        self.mlabsConnector = MLabsConnector(self.cfg)
        self.theHiveConnector = TheHiveConnector(self.cfg)

    def validateRequest(self, request):
        workflowReport = self.connectMLabs()
        if workflowReport['success']:
            return json.dumps(workflowReport), 200
        else:
            return json.dumps(workflowReport), 500

    def connectMLabs(self):
        self.logger.info('%s.connectMLabs starts', __name__)

        report = dict()
        report['success'] = bool()

        # Setup Tags
        self.tags = ['MessageLabs', 'Synapse']

        try:
            tracker_file = "./modules/MessageLabs/phishing_tracker"
            link_to_load = ""
            if os.path.exists(tracker_file):
                self.logger.debug(
                    "MessageLabs: phishing Reading from the tracker file...")
                with open(tracker_file, "r") as tracker:
                    link_to_load = tracker.read()

            if not link_to_load:
                link_to_load = self.cfg.get('MessageLabs', 'list_endpoint')

            unread, new_link = self.mlabsConnector.scan(link_to_load)

            for msg in unread:
                if msg['subject'] != self.cfg.get('MessageLabs',
                                                  'subject_contains'):
                    continue

                fullBody = msg['body']['content']
                subject = ""
                internalMessageId = ""

                for line in fullBody.splitlines():
                    if line.startswith("Subject"):
                        subject = line
                    if line.startswith("Message ID:"):
                        internalMessageId = line.split(" ID: ")[-1]
                    if line.startswith("Date:"):
                        email_date = datetime.strptime(
                            line.split("Date: ")[-1],
                            "%a, %d %b %Y %H:%M:%S %z")
                        epoch_email_date = email_date.timestamp() * 1000

                caseTitle = str(
                    self.cfg.get('MessageLabs', 'subject_contains') + " - " +
                    str(subject))
                caseDescription = fullBody

                alert = self.theHiveConnector.craftAlert(
                    caseTitle, caseDescription, 1, epoch_email_date, self.tags,
                    2, "New", "internal", "MessageLabs", internalMessageId, [],
                    self.cfg.get('MessageLabs', 'case_template'))
                try:
                    createdCase = self.theHiveConnector.createAlert(alert)
                except ValueError as e:
                    self.logger.info(
                        "Alert with sourceRef '{}' already exists".format(
                            internalMessageId))

            with open(tracker_file, "w+") as tracker:
                tracker.write(new_link)

            report['success'] = True
            return report

        except Exception as e:
            self.logger.error('Connection failure', exc_info=True)
            report['success'] = False
            return report
Esempio n. 29
0
class Automation():
    def __init__(self, webhook, cfg):
        logger.info('Initiating AzureSentinel Automation')
        self.TheHiveConnector = TheHiveConnector(cfg)
        self.AzureSentinelConnector = AzureSentinelConnector(cfg)
        self.webhook = webhook
        self.cfg = cfg
        self.report_action = report_action
        self.closure_status = {
            "Indeterminate": "Undetermined",
            "FalsePositive": "FalsePositive",
            "TruePositive": "TruePositive",
            "Other": "BenignPositive"
        }

    def checkIfInClosedCaseOrAlertMarkedAsRead(self, sourceref):
        query = dict()
        query['sourceRef'] = str(sourceref)
        logger.debug(
            'Checking if third party ticket({}) is linked to a closed case'.
            format(sourceref))
        alert_results = self.TheHiveConnector.findAlert(query)
        if len(alert_results) > 0:
            alert_found = alert_results[0]
            if alert_found['status'] == 'Ignored':
                logger.info(
                    f"{sourceref} is found in alert {alert_found['id']} that has been marked as read"
                )
                return {
                    "resolutionStatus":
                    "Indeterminate",
                    "summary":
                    "Closed by Synapse with summary: Marked as Read within The Hive"
                }
            elif 'case' in alert_found:
                # Check if alert is present in closed case
                case_found = self.TheHiveConnector.getCase(alert_found['case'])
                if case_found['status'] == "Resolved":
                    if 'resolutionStatus' in case_found and case_found[
                            'resolutionStatus'] == "Duplicated":
                        merged_case_found = self.getFinalMergedCase(case_found)
                        logger.debug(f"found merged cases {merged_case_found}")
                        if merged_case_found:
                            if merged_case_found['status'] != "Resolved":
                                return False
                            else:
                                case_found = merged_case_found
                    logger.info(
                        f"{sourceref} was found in a closed case {case_found['id']}"
                    )
                    resolution_status = "N/A"
                    resolution_summary = "N/A"
                    # Return information required to sync with third party
                    if 'resolutionStatus' in case_found:
                        resolution_status = case_found['resolutionStatus']
                    if 'summary' in case_found:
                        resolution_summary = case_found['summary']
                    return {
                        "resolutionStatus": resolution_status,
                        "summary": resolution_summary
                    }
        return False

    def parse_hooks(self):
        # Update incident status to active when imported as Alert
        if self.webhook.isAzureSentinelAlertImported():
            self.incidentId = self.webhook.data['object']['sourceRef']

            # Check if the alert is imported in a closed case
            closure_info = self.checkIfInClosedCaseOrAlertMarkedAsRead(
                self.incidentId)
            if closure_info:
                logger.info(
                    'Sentinel incident({}) is linked to a closed case'.format(
                        self.incidentId))
                # Translation table for case statusses

                classification = self.closure_status[
                    closure_info['resolutionStatus']]
                classification_comment = "Closed by Synapse with summary: {}".format(
                    closure_info['summary'])
                # Close incident and continue with the next incident
                self.AzureSentinelConnector.closeIncident(
                    self.incidentId, classification, classification_comment)

            else:
                logger.info(
                    'Incident {} needs to be updated to status Active'.format(
                        self.incidentId))
                self.AzureSentinelConnector.updateIncidentStatusToActive(
                    self.incidentId)
                self.report_action = 'updateIncident'

        # Close incidents in Azure Sentinel
        if self.webhook.isClosedAzureSentinelCase(
        ) or self.webhook.isDeletedAzureSentinelCase(
        ) or self.webhook.isAzureSentinelAlertMarkedAsRead():
            if self.webhook.data['operation'] == 'Delete':
                self.case_id = self.webhook.data['objectId']
                self.classification = "Undetermined"
                self.classification_comment = "Closed by Synapse with summary: Deleted within The Hive"
                logger.info('Case {} has been deleted'.format(self.case_id))

            elif self.webhook.data['objectType'] == 'alert':
                self.alert_id = self.webhook.data['objectId']
                self.incidentId = self.webhook.data['object']['sourceRef']
                self.classification = "Undetermined"
                self.classification_comment = "Closed by Synapse with summary: Marked as Read within The Hive"
                logger.info('Alert {} has been marked as read'.format(
                    self.webhook.data['object']['sourceRef']))
                self.AzureSentinelConnector.closeIncident(
                    self.incidentId, self.classification,
                    self.classification_comment)

            # Ensure duplicated incidents don't get closed when merged, but only when merged case is closed
            elif 'resolutionStatus' in self.webhook.data[
                    'details'] and self.webhook.data['details'][
                        'resolutionStatus'] != "Duplicated":
                self.case_id = self.webhook.data['object']['id']
                self.classification = self.closure_status[
                    self.webhook.data['details']['resolutionStatus']]
                self.classification_comment = "Closed by Synapse with summary: {}".format(
                    self.webhook.data['details']['summary'])
                logger.info('Case {} has been marked as resolved'.format(
                    self.case_id))

                if 'mergeFrom' in self.webhook.data['object']:
                    logger.info(
                        f'Case {self.case_id} is a merged case. Finding original cases'
                    )
                    original_cases = []
                    for merged_case in self.webhook.data['object'][
                            'mergeFrom']:
                        original_cases.extend(
                            self.getOriginalCases(merged_case))
                    # Find alerts for each original case
                    for original_case in original_cases:
                        query = {'case': original_case['id']}
                        found_alerts = self.TheHiveConnector.findAlert(query)
                        # Close alerts that have been found
                        for found_alert in found_alerts:
                            logger.info(
                                "Closing incident {} for case {}".format(
                                    found_alert['sourceRef'], self.case_id))
                            self.AzureSentinelConnector.closeIncident(
                                found_alert['sourceRef'], self.classification,
                                self.classification_comment)

            if hasattr(self, 'case_id'):
                if hasattr(self.webhook, 'ext_alert_id'):
                    logger.info("Closing incident {} for case {}".format(
                        self.webhook.ext_alert_id, self.case_id))
                    self.AzureSentinelConnector.closeIncident(
                        self.webhook.ext_alert_id, self.classification,
                        self.classification_comment)

                elif len(self.webhook.ext_alert_ids) > 0:
                    # Close incident for every linked incident
                    logger.info(
                        "Found multiple incidents {} for case {}".format(
                            self.webhook.ext_alert_ids, self.case_id))
                    for incident_id in self.webhook.ext_alert_ids:
                        logger.info("Closing incident {} for case {}".format(
                            incident_id, self.case_id))
                        self.AzureSentinelConnector.closeIncident(
                            incident_id, self.classification,
                            self.classification_comment)

            self.report_action = 'closeIncident'

        return self.report_action

    def getOriginalCases(self, merged_from_case_id, handled_cases=[]):
        cases_found = []
        case_found = self.TheHiveConnector.getCase(merged_from_case_id)
        if 'mergeFrom' in case_found:
            if merged_from_case_id not in handled_cases:
                handled_cases.append(merged_from_case_id)
                for merged_case in self.webhook.data['object']['mergeFrom']:
                    cases_found.extend(
                        self.getOriginalCases(merged_case, handled_cases))
        else:
            cases_found.append(case_found)
            return cases_found

    def getFinalMergedCase(self, duplicated_case, handled_cases=[]):
        if 'mergeInto' in duplicated_case:
            merged_into = duplicated_case['mergeInto']
            case_found = self.TheHiveConnector.getCase(merged_into)
            if 'resolutionStatus' in case_found:
                if case_found[
                        'resolutionStatus'] == "Duplicated" and merged_into not in handled_cases:
                    handled_cases.append(merged_into)
                    case_found = self.getFinalMergedCase(
                        case_found, handled_cases)
        else:
            case_found = duplicated_case
        return case_found
Esempio n. 30
0
class Automation():
    def __init__(self, webhook, cfg):
        logger.info('Initiating MISPautomation')
        self.TheHiveConnector = TheHiveConnector(cfg)
        if self.cfg.getboolean('Cortex', 'enabled'):
            self.CortexConnector = CortexConnector(cfg)
        self.webhook = webhook
        self.report_action = report_action
        self.qr_config = {}
        for key, value in cfg.items('QRadar'):
            self.qr_config[key] = value

    def parse_hooks(self):
        """
        Check for new MISP Alert containing supported IOC to search automatically
        """

        if self.webhook.isNewMispAlert():
            logger.info(
                'Alert {} has been tagged as MISP and is just created'.format(
                    self.webhook.data['rootId']))

            #Check alert for supported ioc types
            supported_iocs = False
            for artifact in self.webhook.data['object']['artifacts']:
                if artifact['dataType'] in self.qr_config[
                        'supported_datatypes']:
                    supported_iocs = True

            #Promote alert to case if there are support ioc types
            if supported_iocs:
                alert_id = self.webhook.data['rootId']
                casetemplate = "MISP Event"

                logger.info('Alert {} contains IOCs that are supported'.format(
                    alert_id))

                response = self.TheHiveConnector.createCaseFromAlert(
                    alert_id, casetemplate)

                self.report_action = 'createCase'
        """
        Add timestamps to keep track of the search activity per case (we do not want to keep searching forever)
        """
        #Perform automated Analyzer runs for supported observables in a case that has been created from a MISP alert
        if self.webhook.isNewMispCase():
            logger.info(
                'Case {} has been tagged as MISP and is just created'.format(
                    self.webhook.data['rootId']))

            #Retrieve caseid
            caseid = self.webhook.data['object']['id']

            #Add customFields firstSearched and lastSearched
            #Create a Case object? Or whatever it is
            case = Case()

            #Add the case id to the object
            case.id = caseid

            #Debug output
            logger.info('Updating case %s' % case.id)

            #Define which fields need to get updated
            fields = ['customFields']

            #Retrieve all required attributes from the alert and add them as custom fields to the case
            current_time = int(round(time.time() * 1000))
            customFields = CustomFieldHelper()\
                .add_date('firstSearched', current_time)\
                .add_date('lastSearched', current_time)\
                .build()

            #Add custom fields to the case object
            case.customFields = customFields

            #Update the case
            self.TheHiveConnector.updateCase(case, fields)
            self.report_action = 'updateCase'
        """
        Start the analyzers automatically for MISP observables that are supported and update the case with a new timestamp
        """
        #Automatically run Analyzers for newly created MISP cases where supported IOC's are present
        if self.webhook.isNewMispArtifact():
            logger.info(
                'Case artifact is tagged with "MISP-extern". Checking if observable is of a supported type'
            )

            #Retrieve caseid
            caseid = self.webhook.data['rootId']

            #Retrieve case data
            case_data = self.TheHiveConnector.getCase(caseid)

            #List all supported ioc's for the case
            observable = self.webhook.data['object']

            #When supported, start a cortex analyzer for it
            if observable['dataType'] in self.qr_config['supported_datatypes']:
                supported_observable = observable['_id']

                #Trigger a search for the supported ioc
                logger.info('Launching analyzers for observable: {}'.format(
                    observable['_id']))
                response = self.CortexConnector.runAnalyzer(
                    "Cortex-intern", supported_observable,
                    "IBMQRadar_Search_Manual_0_1")

                #Add customFields firstSearched and lastSearched
                #Create a Case object
                case = Case()

                #Add the case id to the object
                case.id = caseid

                #Debug output
                logger.info('Updating case %s' % case.id)

                #Define which fields need to get updated
                fields = ['customFields']

                #Retrieve all required attributes from the alert and add them as custom fields to the case
                current_time = int(round(time.time() * 1000))
                customFields = CustomFieldHelper()\
                    .add_date('firstSearched', case_data['customFields']['firstSearched']['date'])\
                    .add_date('lastSearched', current_time)\
                    .build()

                #Add custom fields to the case object
                case.customFields = customFields

                #Update the case
                self.TheHiveConnector.updateCase(case, fields)
                self.report_action = 'updateCase'
        """
        Automatically create a task for a found IOC
        """
        #If the Job result contains a successful search with minimum of 1 hit, create a task to investigate the results
        if self.webhook.isCaseArtifactJob() and self.webhook.isSuccess(
        ) and self.webhook.isMisp():
            #Case ID
            caseid = self.webhook.data['rootId']
            #Load Case information
            case_data = self.TheHiveConnector.getCase(caseid)

            logger.info(
                'Job {} is part of a case that has been tagged as MISP case and has just finished'
                .format(self.webhook.data['object']['cortexJobId']))

            #Check if the result count higher than 0
            if int(
                    float(self.webhook.data['object']['report']['summary']
                          ['taxonomies'][0]['value'])) > 0:
                logger.info(
                    'Job {} contains hits, checking if a task is already present for this observable'
                    .format(self.webhook.data['object']['cortexJobId']))
                #Retrieve case task information
                response = self.TheHiveConnector.getCaseTasks(caseid)
                case_tasks = response.json()

                #Load CaseTask template
                casetask = CaseTask()

                #Observable + Link
                observable = self.webhook.data['object']['artifactId']
                observable_link = TheHive.get(
                    'url'
                ) + "/index.html#!/case/" + caseid + "/observables/" + self.webhook.data[
                    'object']['artifactId']

                #Task name
                casetask.title = "Investigate found IOC with id: {}".format(
                    observable)

                #Date
                date_found = time.strftime("%d-%m-%Y %H:%M")

                case_task_found = False
                for case_task in case_tasks:

                    #Check if task is present for investigating the new results
                    if casetask.title == case_task['title']:
                        case_task_found = True

                if not case_task_found:
                    logger.info(
                        'No task found, creating task for observable found in job {}'
                        .format(self.webhook.data['object']['cortexJobId']))
                    #Add description
                    casetask.description = "The following ioc is hit in the environment. Investigate the results and act accordingly:\n\n"
                    casetask.description = casetask.description + "{} is seen on {}\n".format(
                        observable_link, date_found)

                    #Check if case is closed
                    if case_data['status'] == "Resolved":
                        #Create a Case object? Or whatever it is
                        case = Case()

                        #Add the case id to the object
                        case.id = caseid

                        logger.info('Updating case %s' % case.id)

                        #Define which fields need to get updated
                        fields = ['status']

                        #Reopen the case
                        case.status = "Open"

                        #Update the case
                        self.TheHiveConnector.updateCase(case, fields)

                    #Add the case task
                    self.TheHiveConnector.createTask(caseid, casetask)
                    self.report_action = 'createTask'

        return self.report_action