Esempio n. 1
0
class Webhook:
    'Webhook class to identify where the webhook comes from, usual case, QRadar, etc..'

    def __init__(self, webhookData, cfg):
        """
            Class constructor

            :param cfg: Synapse's config
            :type cfg: ConfigParser

            :param webhookData: the json webhook from TheHive
            :type webhookData: dict

            :return: Object Webhook
            :rtype: Webhook
        """

        self.logger = logging.getLogger('workflows.' + __name__)
        # One liner to generate a sha1 hash from the data to use as an id. Requires json to create a byte array from the dict
        self.id = hashlib.sha1(
            json.dumps(webhookData).encode('utf-8')).hexdigest()
        self.data = webhookData
        self.theHiveConnector = TheHiveConnector(cfg)
        self.offenseIds = []

    def isAlert(self):
        """
            Check if the webhook describes an alert

            :return: True if it is an alert, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isAlert starts', __name__)

        if self.data['objectType'] == 'alert':
            return True
        else:
            return False

    def isCase(self):
        """
            Check if the webhook describes a case

            :return: True if it is a case, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isCase starts', __name__)

        if self.data['objectType'] == 'case':
            return True
        else:
            return False

    def isArtifact(self):
        """
            Check if the webhook describes an artifact

            :return: True if it is an artifact, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isArtifact starts', __name__)

        if self.data['objectType'] == 'case_artifact':
            return True
        else:
            return False

    def isNewArtifact(self):
        """
            Check if the webhook describes a artifact that is created

            :return: True if it is a artifact created, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isNewArtifact starts', __name__)

        if (self.isArtifact() and self.isNew()):
            return True
        return False

    def isCaseArtifactJob(self):
        """
            Check if the webhook describes a case artifact job

            :return: True if it is a case artifact job, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isCaseArtifactJob starts', __name__)

        if self.data['objectType'] == 'case_artifact_job':
            return True
        else:
            return False

    def isNew(self):
        """
            Check if the webhook describes a new item

            :return: True if it is new, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isNew starts', __name__)

        if self.data['operation'] == 'Creation':
            return True
        else:
            return False

    def isUpdate(self):
        """
            Check if the webhook describes an update

            :return: True if it is an update, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isUpdate starts', __name__)

        if self.data['operation'] == 'Update':
            return True
        else:
            return False

    def isMarkedAsRead(self):
        """
            Check if the webhook describes an marked as read alert

            :return: True if it is marked as read, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isMarkedAsRead starts', __name__)

        try:
            if self.data['details']['status'] == 'Ignored':
                return True
            else:
                return False
        except KeyError:
            # when the alert is ignored (ignore new updates), the webhook does
            # not have the status key, this exception handles that
            return False

    def isClosed(self):
        """
            Check if the webhook describes a closing event
            if it returns false, it doesn't mean that the case is open
            if a case is already closed, and a user update something
            the webhook will not describe a closing event but an update

            :return: True if it is a closing event, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isClosed starts', __name__)

        try:
            if self.data['details']['status'] == 'Resolved':
                return True
            else:
                return False
        except KeyError:
            # happens when the case is already closed
            # and user updates the case with a custom field (for example)
            # then status key is not included in the webhook
            return False

    def isDeleted(self):
        """
            Check if the webhook describes a deleted event
            if it returns false, it doesn't mean that the case is
            not deleted. It might already be deleted.

            :return: True if it is a deleting event, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isDeleted starts', __name__)

        if self.data['operation'] == 'Delete':
            return True
        else:
            return False

    def isMergedInto(self):
        """
            Check if the webhook describes a case merging

            :return: True if it is a merging event
            :rtype: boolean
        """

        self.logger.debug('%s.isMergedInto starts', __name__)

        if 'mergeInto' in self.data['object']:
            return True
        else:
            return False

    def isFromMergedCases(self):
        """
            Check if the webhook describes a case that comes from a merging action

            :return: True if it is case the comes from a merging action
            :rtype: boolean
        """

        self.logger.debug('%s.isFromMergedCases starts', __name__)

        if 'mergeFrom' in self.data['object']:
            return True
        else:
            return False

    def isSuccess(self):
        """
            Check if the webhook describes a successful action

            :return: True if it is a successful action, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isSuccess starts', __name__)

        if self.data['details']['status'] == "Success":
            return True
        else:
            return False

    def isNewAlert(self):
        """
            Check if the webhook describes a new alert.

            :return: True if it is a new alert, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isNewAlert starts', __name__)

        if (self.isAlert() and self.isNew()):
            return True
        else:
            return False

    def isImportedAlert(self):
        """
            Check if the webhook describes an imported alert.

            :return: True if it is an imported alert, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isImportedAlert starts', __name__)

        if (self.isAlert() and self.isUpdate()
                and 'status' in self.data['details']
                and self.data['details']['status'] == 'Imported'):
            return True
        else:
            return False

    def isFromAlert(self, esCaseId):
        """
            For a given esCaseId, search if the case has been opened from
            a QRadar offense, if so adds the offenseId attribute to this object

            :param esCaseId: elasticsearch case id
            :type esCaseId: str

            :return: True if it is a QRadar case, false if not
            :rtype: bool
        """

        query = dict()
        query['case'] = esCaseId
        results = self.theHiveConnector.findAlert(query)

        if len(results) == 1:
            # Case is based on a single alert
            self.alert = results[0]
            return True
        elif len(results) > 1:
            # Case is based on multiple alerts
            self.alerts = results
            return True
        else:
            return False

    def isNewCase(self):
        """
            Check if the webhook describes a new case.

            :return: True if it is a new case, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isNewCase starts', __name__)

        if (self.isCase() and self.isNew()):
            return True
        else:
            return False

    def isQRadar(self):
        """
            Check if the webhook describes a QRadar Offense

            :return: True if it is a QRadar Offense, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isQRadar starts', __name__)

        if ('tags' in self.data['details']
                and 'QRadar' in self.data['details']['tags']) or (
                    'tags' in self.data['object']
                    and 'QRadar' in self.data['object']['tags']):
            return True
        else:
            return False

    def isQRadarAlertImported(self):
        """
            Check if the webhook describes an Imported QRadar alert

            :return: True if it is a QRadar alert is imported, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isQRadarAlertImported starts', __name__)

        if (self.isImportedAlert() and self.isQRadar()):
            return True
        else:
            return False

    def isQRadarAlertUpdateFollowTrue(self):
        """
            Check if the webhook describes an Imported QRadar alert

            :return: True if it is a QRadar alert is imported, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isQRadarAlertImported starts', __name__)

        if (self.isAlert() and self.isUpdate() and self.isQRadar()
                and 'follow' in self.data['details']
                and self.data['details']['follow']):
            return True
        else:
            return False

    def isQRadarAlertWithArtifacts(self):
        """
            Check if the webhook describes an QRadar alert containing artifacts and case information

            :return: True if it is a QRadar alert containing artifacts, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isQRadarAlertWithArtifacts starts', __name__)

        if (self.isAlert() and self.isQRadar()) and 'artifacts' in self.data[
                'details'] and 'case' in self.data['object']:
            return True
        else:
            return False

    def isQRadarAlertMarkedAsRead(self):
        """
            Check if the webhook describes a QRadar alert marked as read
            "store" the offenseId in the webhook attribute "offenseId"

            :return: True if it is a QRadar alert marked as read, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isQRadarAlertMarkedAsRead starts', __name__)

        if (self.isAlert() and self.isMarkedAsRead()):
            # the value 'QRadar_Offenses' is hardcoded at creation by
            # workflow QRadar2alert
            if self.data['object']['source'] == 'QRadar_Offenses':
                self.offenseId = self.data['object']['sourceRef']
                return True
        return False

    def isNewQRadarCase(self):
        """
            Check if the webhook describes a new QRadar case,
            if the case has been opened from a QRadar alert
            returns True

            :return: True if it is a new QRadar case, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isNewQRadarCase starts', __name__)

        if self.isQRadar() and self.isCase() and self.isNew():
            return True
        else:
            return False

    def isUpdateQRadarCase(self):
        """
            Check if the webhook describes a new QRadar case,
            if the case has been opened from a QRadar alert
            returns True

            :return: True if it is a new QRadar case, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isUpdateQRadarCase starts', __name__)

        if self.isQRadar() and self.isCase() and self.isUpdate():
            return True
        else:
            return False

    def isClosedQRadarCase(self):
        """
            Check if the webhook describes a closing QRadar case,
            if the case has been opened from a QRadar alert
            returns True
            "store" the offenseId in the webhook attribute "offenseId"
            If the case is merged, it is not considered to be closed (even if it is
            from TheHive perspective), as a result, a merged qradar case will not close
            an offense.
            However a case created from merged case, where one of the merged case is
            related to QRadar, will close the linked QRadar offense.

            :return: True if it is a QRadar alert marked as read, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isClosedQRadarCase starts', __name__)

        try:
            if self.isCase() and self.isClosed():
                # searching in alerts if the case comes from a QRadar alert
                esCaseId = self.data['objectId']
                if self.fromQRadar(esCaseId):
                    return True

            else:
                # not a case or have not been closed when
                # when the webhook has been issued
                # (might be open or already closed)
                return False

        except Exception as e:
            self.logger.error('%s.isClosedQRadarCase failed',
                              __name__,
                              exc_info=True)
            raise

    def isDeletedQRadarCase(self):
        """
            Check if the webhook describes deleting a QRadar case,

            "store" the offenseId in the webhook attribute "offenseId"

            :return: True if it is deleting a QRadar case, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isDeletedQRadarCase starts', __name__)

        try:
            if self.isCase() and self.isDeleted():
                # searching in alerts if the case comes from a QRadar alert
                esCaseId = self.data['objectId']
                if self.fromQRadar(esCaseId):
                    return True
            else:
                # not a case or have not been deleted when
                # when the webhook has been issued
                return False

        except Exception as e:
            self.logger.error('%s.isDeletedQRadarCase failed',
                              __name__,
                              exc_info=True)
            raise

    def fromQRadar(self, esCaseId):
        """
            For a given esCaseId, search if the case has been opened from
            a QRadar offense, if so adds the offenseId attribute to this object

            :param esCaseId: elasticsearch case id
            :type esCaseId: str

            :return: True if it is a QRadar case, false if not
            :rtype: bool
        """

        if self.isFromAlert(esCaseId):
            if hasattr(self,
                       'alert') and self.alert['source'] == 'QRadar_Offenses':
                # case opened from alert
                # and from QRadar
                self.offenseId = self.alert['sourceRef']
                return True
            elif hasattr(self, 'alerts'):
                for alert in self.alerts:
                    if alert['source'] == 'QRadar_Offenses':
                        self.offenseIds.append(alert['sourceRef'])
                if len(self.offenseIds) > 0:
                    return True
            else:
                # case opened from an alert but
                # not from QRadar
                return False
        else:
            return False

    def isAzureSentinel(self):
        """
            Check if the webhook describes a AzureSentinel Incident

            :return: True if it is a AzureSentinel Incident, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isAzureSentinel starts', __name__)

        if ('tags' in self.data['details']
                and 'AzureSentinel' in self.data['details']['tags']) or (
                    'tags' in self.data['object']
                    and 'AzureSentinel' in self.data['object']['tags']):
            return True
        else:
            return False

    def isAzureSentinelAlertMarkedAsRead(self):
        """
            Check if the webhook describes a AzureSentinel alert marked as read
            "store" the incidentId in the webhook attribute "incidentId"

            :return: True if it is a AzureSentinel alert marked as read, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isAzureSentinelAlertMarkedAsRead starts',
                          __name__)

        if (self.isAlert() and self.isMarkedAsRead()):
            # the value 'AzureSentinel_Offenses' is hardcoded at creation by
            # workflow AzureSentinel2alert
            if self.data['object']['source'] == 'Azure_Sentinel_incidents':
                self.incidentId = self.data['object']['sourceRef']
                return True
        return False

    def isAzureSentinelAlertImported(self):
        """
            Check if the webhook describes an Imported AzureSentinel alert

            :return: True if it is a AzureSentinel alert is imported, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isAzureSentinelAlertImported starts', __name__)

        if (self.isImportedAlert() and self.isAzureSentinel()):
            return True
        else:
            return False

    def fromAzureSentinel(self, esCaseId):
        """
            For a given esCaseId, search if the case has been opened from
            a AzureSentinel incident, if so adds the incidentId attribute to this object

            :param esCaseId: elasticsearch case id
            :type esCaseId: str

            :return: True if it is a AzureSentinel case, false if not
            :rtype: bool
        """

        query = dict()
        query['case'] = esCaseId
        results = self.theHiveConnector.findAlert(query)

        if len(results) == 1:
            # should only have one hit
            if results[0]['source'] == 'Azure_Sentinel_incidents':
                # case opened from incident
                # and from AzureSentinel
                self.incidentId = results[0]['sourceRef']
                return True
            else:
                # case opened from an alert but
                # not from AzureSentinel
                return False
        else:
            return False

    def isClosedAzureSentinelCase(self):
        """
            Check if the webhook describes a closing AzureSentinel case,
            if the case has been opened from a AzureSentinel alert
            returns True
            "store" the incidentId in the webhook attribute "incidentId"
            If the case is merged, it is not considered to be closed (even if it is
            from TheHive perspective), as a result, a merged AzureSentinel case will not close
            an incident.
            However a case created from merged case, where one of the merged case is
            related to AzureSentinel, will close the linked AzureSentinel incident.

            :return: True if it is a AzureSentinel alert marked as read, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isClosedAzureSentinelCase starts', __name__)

        try:
            if self.isCase() and self.isClosed() and not self.isMergedInto():
                # searching in alerts if the case comes from a AzureSentinel alert
                esCaseId = self.data['objectId']
                if self.fromAzureSentinel(esCaseId):
                    return True
                else:
                    # at this point, the case was not opened from a AzureSentinel alert
                    # however, it could be a case created from merged cases
                    # if one of the merged case is related to AzureSentinel alert
                    # then we consider the case as being from AzureSentinel
                    if self.isFromMergedCases():
                        for esCaseId in self.data['object']['mergeFrom']:
                            if self.fromAzureSentinel(esCaseId):
                                return True
                        # went through all merged case and none where from AzureSentinel
                        return False
                    else:
                        # not a AzureSentinel case
                        return False
            else:
                # not a case or have not been closed when
                # when the webhook has been issued
                # (might be open or already closed)
                return False

        except Exception as e:
            self.logger.error('%s.isClosedAzureSentinelCase failed',
                              __name__,
                              exc_info=True)
            raise

    def isDeletedAzureSentinelCase(self):
        """
            Check if the webhook describes deleting a AzureSentinel case,

            "store" the offenseId in the webhook attribute "offenseId"

            :return: True if it is deleting a AzureSentinel case, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isDeletedAzureSentinelCase starts', __name__)

        try:
            if self.isCase() and self.isDeleted():
                # searching in alerts if the case comes from a AzureSentinel alert
                esCaseId = self.data['objectId']
                if self.fromAzureSentinel(esCaseId):
                    return True
                else:
                    # at this point, the case was not opened from a AzureSentinel alert
                    # however, it could be a case created from merged cases
                    # if one of the merged case is related to AzureSentinel alert
                    # then we consider the case as being from AzureSentinel
                    if self.isFromMergedCases():
                        for esCaseId in self.data['object']['mergeFrom']:
                            if self.fromAzureSentinel(esCaseId):
                                return True
                        # went through all merged case and none where from AzureSentinel
                        return False
                    else:
                        # not a AzureSentinel case
                        return False
            else:
                # not a case or have not been deleted when
                # when the webhook has been issued
                return False

        except Exception as e:
            self.logger.error('%s.isDeletedAzureSentinelCase failed',
                              __name__,
                              exc_info=True)
            raise

    def isMisp(self):
        """
            Check if the webhook describes a MISP alert that is created

            :return: True if it is a MISP alert created, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isMisp starts', __name__)

        if ('type' in self.data['object']
                and self.data['object']['type'] == 'misp') or (
                    'tags' in self.data['object']
                    and 'misp' in self.data['object']['tags']) or (
                        'tags' in self.data['details']
                        and 'misp' in self.data['details']['tags']) or (
                            'tags' in self.data['details']
                            and any('MISP:type=' in tag
                                    for tag in self.data['details']['tags'])):
            return True
        else:
            return False

    def isNewMispCase(self):
        """
            Check if the webhook describes a new MISP case,
            if the case has been opened from a MISP alert
            returns True

            :return: True if it is a new MISP case, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isNewMispCase starts', __name__)

        if self.isMisp() and self.isCase() and self.isNew():
            return True
        else:
            return False

    def isNewMispAlert(self):
        """
            Check if the webhook describes a MISP alert that is created

            :return: True if it is a MISP alert created, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isNewMispAlert starts', __name__)

        if (self.isAlert() and self.isNew() and self.isMisp()):
            return True
        return False

    def isNewMispArtifact(self):
        """
            Check if the webhook describes a MISP artifact that is created

            :return: True if it is a MISP artifact created, False if not
            :rtype: boolean
        """

        self.logger.debug('%s.isNewMispArtifact starts', __name__)

        if (self.isArtifact() and self.isNew() and self.isMisp()):
            return True
        return False
Esempio n. 2
0
class Integration(Main):
    def __init__(self):
        super().__init__()
        self.mlabsConnector = MLabsConnector(self.cfg)
        self.TheHiveConnector = TheHiveConnector(self.cfg)

    def validateRequest(self, request):
        workflowReport = self.connectMLabs()
        if workflowReport['success']:
            return json.dumps(workflowReport), 200
        else:
            return json.dumps(workflowReport), 500

    def connectMLabs(self):
        self.logger.info('%s.connectMLabs starts', __name__)

        report = dict()
        report['success'] = bool()

        # Setup Tags
        self.tags = ['MessageLabs', 'Synapse']

        try:
            tracker_file = "./modules/MessageLabs/phishing_tracker"
            link_to_load = ""
            if os.path.exists(tracker_file):
                self.logger.debug(
                    "MessageLabs: phishing Reading from the tracker file...")
                with open(tracker_file, "r") as tracker:
                    link_to_load = tracker.read()

            if not link_to_load:
                link_to_load = self.cfg.get('MessageLabs', 'list_endpoint')

            unread, new_link = self.mlabsConnector.scan(link_to_load)

            for msg in unread:
                self.logger.debug("Found unread E-mail with id: {}".format(
                    msg['id']))
                if ('@removed' in msg) or msg['subject'] != self.cfg.get(
                        'MessageLabs', 'subject_contains'):
                    continue

                fullBody = msg['body']['content']
                subject = ""
                MIDHash = ""

                email_date = datetime.strptime(msg["receivedDateTime"],
                                               "%Y-%m-%dT%H:%M:%SZ")
                epoch_email_date = email_date.timestamp() * 1000

                for line in fullBody.splitlines():
                    if line.startswith("Subject"):
                        subject = line
                    if line.startswith("Message ID:"):
                        MIDHash = hashlib.md5(
                            line.split(" ID: ")[-1].encode()).hexdigest()

                caseTitle = str(
                    self.cfg.get('MessageLabs', 'subject_contains') + " - " +
                    str(subject))
                caseDescription = self.createFullBody(fullBody)

                alert = self.TheHiveConnector.craftAlert(
                    caseTitle, caseDescription, 1, epoch_email_date, self.tags,
                    2, "New", "internal", "MessageLabs", MIDHash, [],
                    self.cfg.get('MessageLabs', 'case_template'))

                query = dict()
                query['sourceRef'] = str(MIDHash)
                results = self.TheHiveConnector.findAlert(query)

                if len(results) == 0:
                    createdCase = self.TheHiveConnector.createAlert(alert)

            with open(tracker_file, "w+") as tracker:
                tracker.write(new_link)

            report['success'] = True
            return report

        except Exception as e:
            self.logger.error('Connection failure', exc_info=True)
            report['success'] = False
            return report

    def createFullBody(self, fullbody):
        try:
            r = re.findall(
                r".*Policy name:\s([^\n\r]*)[\r\n]+.*Subject:\s([^\n\r]*)[\r\n]+.*Sender:\s([^\n\r]*)[\r\n]+Message ID: <([^\n\r]*)>[\r\n]+Sending server IP:\s([\d\.]*)[\r\n]+Date:\s([^\n\r]*)[\r\n]+Recipient:\s(.*)Attachments:\s(.*)Matched Content:\s(.*)Message body:\s(.*)",
                fullbody, re.MULTILINE | re.DOTALL)
            fields = [
                'Policy name', 'Subject', 'Sender', 'Message ID', 'Server IP',
                'Date', 'Recipients', 'Attachments', 'Matched Content',
                'E-mail body'
            ]
            values = []
            temp_fullbody = []
            if len(r) > 0:
                for it in range(0, 10):
                    values.append(r[0][it])
                values[3] = "<" + values[3] + ">"  # modify Message ID
                values[6] = re.sub(
                    r'<[^<>]*>', '',
                    values[6].strip().replace("\r\n", " ").replace("\n", " ")
                )  # modify Recipients, so all of them will be in 1 table field
                values[7] = values[7].strip(
                )  # remove empty lines/new lines from attachments
                values[8] = values[8].strip(
                )  # remove empty lines/new lines from matched content

                # putting together the markdown table
                temp_fullbody.append("|     |     |")
                temp_fullbody.append("|-----|-----|")
                for it in range(0, 9):
                    temp_fullbody.append("|  " + fields[it] + "  |  " +
                                         values[it] + "  |")
                temp_fullbody.append("**" + fields[9] + "**")
                temp_fullbody.append("```")
                temp_fullbody.append(values[9])
                temp_fullbody.append("```")
            else:
                # if the email can't be parsed with the regex above, then we provide it to SOC in an unparsed way
                temp_fullbody.append("```")
                temp_fullbody.append("**Unparsed E-mail**")
                temp_fullbody.append(str(fullbody))
                temp_fullbody.append("```")

            return '\r\n'.join(str(x) for x in temp_fullbody)

        except Exception as e:
            self.logger.error('Parsing error: ' + str(e), exc_info=True)
Esempio n. 3
0
class Automation():
    def __init__(self, webhook, cfg):
        logger.info('Initiating QRadarAutomation')
        self.TheHiveConnector = TheHiveConnector(cfg)
        self.QRadarConnector = QRadarConnector(cfg)
        self.webhook = webhook
        self.cfg = cfg
        self.report_action = report_action

    def checkIfInClosedCaseOrAlertMarkedAsRead(self, sourceref):
        query = dict()
        query['sourceRef'] = str(sourceref)
        logger.debug(
            'Checking if third party ticket({}) is linked to a closed case'.
            format(sourceref))
        alert_results = self.TheHiveConnector.findAlert(query)
        if len(alert_results) > 0:
            alert_found = alert_results[0]
            if alert_found['status'] == 'Ignored':
                logger.info(
                    f"{sourceref} is found in alert {alert_found['id']} that has been marked as read"
                )
                return {
                    "resolutionStatus":
                    "Indeterminate",
                    "summary":
                    "Closed by Synapse with summary: Marked as Read within The Hive"
                }
            elif 'case' in alert_found:
                # Check if alert is present in closed case
                case_found = self.TheHiveConnector.getCase(alert_found['case'])
                if case_found['status'] == "Resolved":
                    logger.info(
                        f"{sourceref} was found in a closed case {case_found['id']}"
                    )
                    resolution_status = "N/A"
                    resolution_summary = "N/A"
                    # Return information required to sync with third party
                    if 'resolutionStatus' in case_found:
                        resolution_status = case_found['resolutionStatus']
                    if 'summary' in case_found:
                        resolution_summary = case_found['summary']
                    return {
                        "resolutionStatus": resolution_status,
                        "summary": resolution_summary
                    }
        return False

    def parse_hooks(self):
        # Update incident status to active when imported as Alert
        if self.webhook.isQRadarAlertImported():
            self.offense_id = self.webhook.data['object']['sourceRef']

            # Check if the alert is imported in a closed case
            closure_info = self.checkIfInClosedCaseOrAlertMarkedAsRead(
                self.offense_id)
            if closure_info:
                logger.info(
                    'Qradar offense({}) is linked to a closed case'.format(
                        self.offense_id))
                # Close incident and continue with the next incident
                self.QRadarConnector.closeOffense(self.offense_id)

        # Close offenses in QRadar
        if self.webhook.isClosedQRadarCase(
        ) or self.webhook.isDeletedQRadarCase(
        ) or self.webhook.isQRadarAlertMarkedAsRead():
            if self.webhook.data['operation'] == 'Delete':
                self.case_id = self.webhook.data['objectId']
                logger.info('Case {} has been deleted'.format(self.case_id))

            elif self.webhook.data['objectType'] == 'alert':
                self.alert_id = self.webhook.data['objectId']
                logger.info('Alert {} has been marked as read'.format(
                    self.alert_id))
                self.QRadarConnector.closeOffense(
                    self.webhook.data['object']['sourceRef'])

            else:
                self.case_id = self.webhook.data['object']['id']
                logger.info('Case {} has been marked as resolved'.format(
                    self.case_id))

            if hasattr(self, 'case_id'):
                if hasattr(self.webhook, 'ext_alert_id'):
                    logger.info("Closing offense {} for case {}".format(
                        self.webhook.ext_alert_id, self.case_id))
                    self.QRadarConnector.closeOffense(
                        self.webhook.ext_alert_id)

                elif len(self.webhook.ext_alert_ids) > 0:
                    # Close offense for every linked offense
                    logger.info(
                        "Found multiple offenses {} for case {}".format(
                            self.webhook.ext_alert_ids, self.case_id))
                    for offense_id in self.webhook.ext_alert_ids:
                        logger.info("Closing offense {} for case {}".format(
                            offense_id, self.case_id))
                        self.QRadarConnector.closeOffense(offense_id)

            self.report_action = 'closeOffense'

        return self.report_action
Esempio n. 4
0
class Automation():
    def __init__(self, webhook, cfg):
        logger.info('Initiating AzureSentinel Automation')
        self.TheHiveConnector = TheHiveConnector(cfg)
        self.AzureSentinelConnector = AzureSentinelConnector(cfg)
        self.webhook = webhook
        self.cfg = cfg
        self.report_action = report_action
        self.closure_status = {
            "Indeterminate": "Undetermined",
            "FalsePositive": "FalsePositive",
            "TruePositive": "TruePositive",
            "Other": "BenignPositive"
        }

    def checkIfInClosedCaseOrAlertMarkedAsRead(self, sourceref):
        query = dict()
        query['sourceRef'] = str(sourceref)
        logger.debug(
            'Checking if third party ticket({}) is linked to a closed case'.
            format(sourceref))
        alert_results = self.TheHiveConnector.findAlert(query)
        if len(alert_results) > 0:
            alert_found = alert_results[0]
            if alert_found['status'] == 'Ignored':
                logger.info(
                    f"{sourceref} is found in alert {alert_found['id']} that has been marked as read"
                )
                return {
                    "resolutionStatus":
                    "Indeterminate",
                    "summary":
                    "Closed by Synapse with summary: Marked as Read within The Hive"
                }
            elif 'case' in alert_found:
                # Check if alert is present in closed case
                case_found = self.TheHiveConnector.getCase(alert_found['case'])
                if case_found['status'] == "Resolved":
                    if 'resolutionStatus' in case_found and case_found[
                            'resolutionStatus'] == "Duplicated":
                        merged_case_found = self.getFinalMergedCase(case_found)
                        logger.debug(f"found merged cases {merged_case_found}")
                        if merged_case_found:
                            if merged_case_found['status'] != "Resolved":
                                return False
                            else:
                                case_found = merged_case_found
                    logger.info(
                        f"{sourceref} was found in a closed case {case_found['id']}"
                    )
                    resolution_status = "N/A"
                    resolution_summary = "N/A"
                    # Return information required to sync with third party
                    if 'resolutionStatus' in case_found:
                        resolution_status = case_found['resolutionStatus']
                    if 'summary' in case_found:
                        resolution_summary = case_found['summary']
                    return {
                        "resolutionStatus": resolution_status,
                        "summary": resolution_summary
                    }
        return False

    def parse_hooks(self):
        # Update incident status to active when imported as Alert
        if self.webhook.isAzureSentinelAlertImported():
            self.incidentId = self.webhook.data['object']['sourceRef']

            # Check if the alert is imported in a closed case
            closure_info = self.checkIfInClosedCaseOrAlertMarkedAsRead(
                self.incidentId)
            if closure_info:
                logger.info(
                    'Sentinel incident({}) is linked to a closed case'.format(
                        self.incidentId))
                # Translation table for case statusses

                classification = self.closure_status[
                    closure_info['resolutionStatus']]
                classification_comment = "Closed by Synapse with summary: {}".format(
                    closure_info['summary'])
                # Close incident and continue with the next incident
                self.AzureSentinelConnector.closeIncident(
                    self.incidentId, classification, classification_comment)

            else:
                logger.info(
                    'Incident {} needs to be updated to status Active'.format(
                        self.incidentId))
                self.AzureSentinelConnector.updateIncidentStatusToActive(
                    self.incidentId)
                self.report_action = 'updateIncident'

        # Close incidents in Azure Sentinel
        if self.webhook.isClosedAzureSentinelCase(
        ) or self.webhook.isDeletedAzureSentinelCase(
        ) or self.webhook.isAzureSentinelAlertMarkedAsRead():
            if self.webhook.data['operation'] == 'Delete':
                self.case_id = self.webhook.data['objectId']
                self.classification = "Undetermined"
                self.classification_comment = "Closed by Synapse with summary: Deleted within The Hive"
                logger.info('Case {} has been deleted'.format(self.case_id))

            elif self.webhook.data['objectType'] == 'alert':
                self.alert_id = self.webhook.data['objectId']
                self.incidentId = self.webhook.data['object']['sourceRef']
                self.classification = "Undetermined"
                self.classification_comment = "Closed by Synapse with summary: Marked as Read within The Hive"
                logger.info('Alert {} has been marked as read'.format(
                    self.webhook.data['object']['sourceRef']))
                self.AzureSentinelConnector.closeIncident(
                    self.incidentId, self.classification,
                    self.classification_comment)

            # Ensure duplicated incidents don't get closed when merged, but only when merged case is closed
            elif 'resolutionStatus' in self.webhook.data[
                    'details'] and self.webhook.data['details'][
                        'resolutionStatus'] != "Duplicated":
                self.case_id = self.webhook.data['object']['id']
                self.classification = self.closure_status[
                    self.webhook.data['details']['resolutionStatus']]
                self.classification_comment = "Closed by Synapse with summary: {}".format(
                    self.webhook.data['details']['summary'])
                logger.info('Case {} has been marked as resolved'.format(
                    self.case_id))

                if 'mergeFrom' in self.webhook.data['object']:
                    logger.info(
                        f'Case {self.case_id} is a merged case. Finding original cases'
                    )
                    original_cases = []
                    for merged_case in self.webhook.data['object'][
                            'mergeFrom']:
                        original_cases.extend(
                            self.getOriginalCases(merged_case))
                    # Find alerts for each original case
                    for original_case in original_cases:
                        query = {'case': original_case['id']}
                        found_alerts = self.TheHiveConnector.findAlert(query)
                        # Close alerts that have been found
                        for found_alert in found_alerts:
                            logger.info(
                                "Closing incident {} for case {}".format(
                                    found_alert['sourceRef'], self.case_id))
                            self.AzureSentinelConnector.closeIncident(
                                found_alert['sourceRef'], self.classification,
                                self.classification_comment)

            if hasattr(self, 'case_id'):
                if hasattr(self.webhook, 'ext_alert_id'):
                    logger.info("Closing incident {} for case {}".format(
                        self.webhook.ext_alert_id, self.case_id))
                    self.AzureSentinelConnector.closeIncident(
                        self.webhook.ext_alert_id, self.classification,
                        self.classification_comment)

                elif len(self.webhook.ext_alert_ids) > 0:
                    # Close incident for every linked incident
                    logger.info(
                        "Found multiple incidents {} for case {}".format(
                            self.webhook.ext_alert_ids, self.case_id))
                    for incident_id in self.webhook.ext_alert_ids:
                        logger.info("Closing incident {} for case {}".format(
                            incident_id, self.case_id))
                        self.AzureSentinelConnector.closeIncident(
                            incident_id, self.classification,
                            self.classification_comment)

            self.report_action = 'closeIncident'

        return self.report_action

    def getOriginalCases(self, merged_from_case_id, handled_cases=[]):
        cases_found = []
        case_found = self.TheHiveConnector.getCase(merged_from_case_id)
        if 'mergeFrom' in case_found:
            if merged_from_case_id not in handled_cases:
                handled_cases.append(merged_from_case_id)
                for merged_case in self.webhook.data['object']['mergeFrom']:
                    cases_found.extend(
                        self.getOriginalCases(merged_case, handled_cases))
        else:
            cases_found.append(case_found)
            return cases_found

    def getFinalMergedCase(self, duplicated_case, handled_cases=[]):
        if 'mergeInto' in duplicated_case:
            merged_into = duplicated_case['mergeInto']
            case_found = self.TheHiveConnector.getCase(merged_into)
            if 'resolutionStatus' in case_found:
                if case_found[
                        'resolutionStatus'] == "Duplicated" and merged_into not in handled_cases:
                    handled_cases.append(merged_into)
                    case_found = self.getFinalMergedCase(
                        case_found, handled_cases)
        else:
            case_found = duplicated_case
        return case_found
Esempio n. 5
0
class Integration(Main):
    def __init__(self):
        super().__init__()
        self.qradarConnector = QRadarConnector(self.cfg)
        self.TheHiveConnector = TheHiveConnector(self.cfg)

    def enrichOffense(self, offense):

        enriched = copy.deepcopy(offense)

        artifacts = []

        enriched['offense_type_str'] = \
            self.qradarConnector.getOffenseTypeStr(offense['offense_type'])

        # Add the offense source explicitly
        if enriched['offense_type_str'] == 'Username':
            artifacts.append({
                'data': offense['offense_source'],
                'dataType': 'user-account',
                'message': 'Offense Source',
                'tags': ['QRadar']
            })

        # Add the local and remote sources
        # scrIps contains offense source IPs
        srcIps = list()
        # dstIps contains offense destination IPs
        dstIps = list()
        # srcDstIps contains IPs which are both source and destination of offense
        srcDstIps = list()
        for ip in self.qradarConnector.getSourceIPs(enriched):
            srcIps.append(ip)

        for ip in self.qradarConnector.getLocalDestinationIPs(enriched):
            dstIps.append(ip)

        # making copies is needed since we want to
        # access and delete data from the list at the same time
        s = copy.deepcopy(srcIps)
        d = copy.deepcopy(dstIps)

        for srcIp in s:
            for dstIp in d:
                if srcIp == dstIp:
                    srcDstIps.append(srcIp)
                    srcIps.remove(srcIp)
                    dstIps.remove(dstIp)

        for ip in srcIps:
            artifacts.append({
                'data': ip,
                'dataType': 'ip',
                'message': 'Source IP',
                'tags': ['QRadar', 'src']
            })
        for ip in dstIps:
            artifacts.append({
                'data': ip,
                'dataType': 'ip',
                'message': 'Local destination IP',
                'tags': ['QRadar', 'dst']
            })
        for ip in srcDstIps:
            artifacts.append({
                'data': ip,
                'dataType': 'ip',
                'message': 'Source and local destination IP',
                'tags': ['QRadar', 'src', 'dst']
            })

        # Parse offense types to add the offense source as an observable when a valid type is used
        for offense_type, extraction_config in self.cfg.get(
                'QRadar', 'observables_in_offense_type', fallback={}).items():
            if enriched['offense_type_str'] == offense_type:
                if isinstance(extraction_config, str):
                    observable_type = extraction_config
                    artifacts.append({
                        'data': enriched['offense_source'],
                        'dataType': observable_type,
                        'message': 'QRadar Offense source',
                        'tags': ['QRadar']
                    })
                elif isinstance(extraction_config, list):
                    for extraction in extraction_config:
                        regex = re.compile(extraction['regex'])
                        matches = regex.findall(str(
                            enriched['offense_source']))
                        if len(matches) > 0:
                            # if isinstance(found_observable, tuple): << Fix later loop through matches as well
                            for match_group, observable_type in extraction[
                                    'match_groups'].items():
                                try:
                                    artifacts.append({
                                        'data':
                                        matches[0][match_group],
                                        'dataType':
                                        observable_type,
                                        'message':
                                        'QRadar Offense Type based observable',
                                        'tags': ['QRadar', 'offense_type']
                                    })
                                except Exception as e:
                                    self.logger.warning(
                                        "Could not find match group {} in {}".
                                        format(match_group,
                                               enriched['offense_type_str']))
                else:
                    self.logger.error(
                        "Configuration for observables_in_offense_type is wrongly formatted. Please fix this to enable this functionality"
                    )

        # Remove observables that are to be excluded based on the configuration
        artifacts = self.checkObservableExclusionList(artifacts)

        # Match observables against the TLP list
        artifacts = self.checkObservableTLP(artifacts)

        # Add all the observables
        enriched['artifacts'] = artifacts

        # Add rule names to offense
        enriched['rules'] = self.qradarConnector.getRuleNames(offense)

        # waiting 1s to make sure the logs are searchable
        sleep(1)
        # adding the first 3 raw logs
        enriched['logs'] = self.qradarConnector.getOffenseLogs(enriched)

        return enriched

    def qradarOffenseToHiveAlert(self, offense):
        def getHiveSeverity(offense):
            # severity in TheHive is either low, medium or high
            # while severity in QRadar is from 1 to 10
            # low will be [1;4] => 1
            # medium will be [5;6] => 2
            # high will be [7;10] => 3
            if offense['severity'] < 5:
                return 1
            elif offense['severity'] < 7:
                return 2
            elif offense['severity'] < 11:
                return 3

            return 1

        #
        # Creating the alert
        #

        # Setup Tags
        tags = ['QRadar', 'Offense', 'Synapse']
        # Add the offense type as a tag
        if 'offense_type_str' in offense:
            tags.append("qr_offense_type: {}".format(
                offense['offense_type_str']))

        # Check if the automation ids need to be extracted
        if self.cfg.getboolean('QRadar', 'extract_automation_identifiers'):

            # Run the extraction function and add it to the offense data
            # Extract automation ids
            tags_extracted = self.tagExtractor(
                offense, self.cfg.get('QRadar', 'automation_fields'),
                self.cfg.get('QRadar', 'tag_regexes'))
            # Extract any possible name for a document on a knowledge base
            offense['use_case_names'] = self.tagExtractor(
                offense, self.cfg.get('QRadar', 'automation_fields'),
                self.cfg.get('QRadar', 'uc_kb_name_regexes'))
            if len(tags_extracted) > 0:
                tags.extend(tags_extracted)
            else:
                self.logger.info('No match found for offense %s',
                                 offense['id'])

        # Check if the mitre ids need to be extracted
        if self.cfg.getboolean('QRadar', 'extract_mitre_ids'):
            # Extract mitre tactics
            offense['mitre_tactics'] = self.tagExtractor(
                offense, ["rules"], [r'[tT][aA]\d{4}'])
            if 'mitre_tactics' in offense:
                tags.extend(offense['mitre_tactics'])

            # Extract mitre techniques
            offense['mitre_techniques'] = self.tagExtractor(
                offense, ["rules"], [r'[tT]\d{4}'])
            if 'mitre_techniques' in offense:
                tags.extend(offense['mitre_techniques'])

        if "categories" in offense:
            for cat in offense['categories']:
                tags.append(cat)

        defaultObservableDatatype = [
            'autonomous-system', 'domain', 'file', 'filename', 'fqdn', 'hash',
            'ip', 'mail', 'mail_subject', 'other', 'process_filename',
            'regexp', 'registry', 'uri_path', 'url', 'user-account',
            'user-agent'
        ]

        artifacts = []
        for artifact in offense['artifacts']:
            # Add automation tagging and mitre tagging to observables
            if len(tags_extracted) > 0:
                artifact['tags'].extend(tags_extracted)
            if 'mitre_tactics' in offense:
                artifact['tags'].extend(offense['mitre_tactics'])
            if 'mitre_techniques' in offense:
                artifact['tags'].extend(offense['mitre_techniques'])

            if artifact['dataType'] in defaultObservableDatatype:
                hiveArtifact = self.TheHiveConnector.craftAlertArtifact(
                    dataType=artifact['dataType'],
                    data=artifact['data'],
                    message=artifact['message'],
                    tags=artifact['tags'],
                    tlp=artifact['tlp'])
            else:
                artifact['tags'].append('type:' + artifact['dataType'])
                hiveArtifact = self.TheHiveConnector.craftAlertArtifact(
                    dataType='other',
                    data=artifact['data'],
                    message=artifact['message'],
                    tags=artifact['tags'],
                    tlp=artifact['tlp'])
            artifacts.append(hiveArtifact)

        # Retrieve the configured case_template
        qradarCaseTemplate = self.cfg.get('QRadar', 'case_template')

        # Build TheHive alert
        alert = self.TheHiveConnector.craftAlert(
            "{}, {}".format(offense['id'], offense['description']),
            self.craftAlertDescription(offense), getHiveSeverity(offense),
            offense['start_time'],
            tags, 2, 'Imported', 'internal', 'QRadar_Offenses',
            str(offense['id']), artifacts, qradarCaseTemplate)

        return alert

    def validateRequest(self, request):
        if request.is_json:
            content = request.get_json()
            if 'timerange' in content:
                workflowReport = self.allOffense2Alert(content['timerange'])
                if workflowReport['success']:
                    return json.dumps(workflowReport), 200
                else:
                    return json.dumps(workflowReport), 500
            else:
                self.logger.error('Missing <timerange> key/value')
                return json.dumps({
                    'sucess': False,
                    'message': "timerange key missing in request"
                }), 500
        else:
            self.logger.error('Not json request')
            return json.dumps({
                'sucess': False,
                'message': "Request didn't contain valid JSON"
            }), 400

    def allOffense2Alert(self, timerange):
        """
        Get all openned offense created within the last
        <timerange> minutes and creates alerts for them in
        TheHive
        """
        self.logger.info('%s.allOffense2Alert starts', __name__)

        report = dict()
        report['success'] = True
        report['offenses'] = list()

        try:
            offensesList = self.qradarConnector.getOffenses(timerange)
            # Check for offenses that should have been closed
            for offense in offensesList:
                closure_info = self.checkIfInClosedCaseOrAlertMarkedAsRead(
                    offense['id'])
                if closure_info:
                    # Close incident and continue with the next incident
                    self.logger.info(
                        "Closed case found for {}. Closing offense...".format(
                            offense['id']))
                    self.qradarConnector.closeOffense(offense['id'])
                    continue

                matched = False
                # Filter based on regexes in configuration
                for offense_exclusion_regex in self.cfg.get(
                        'QRadar', 'offense_exclusion_regexes', fallback=[]):
                    self.logger.debug(
                        "Offense exclusion regex found '{}'. Matching against offense {}"
                        .format(offense_exclusion_regex, offense['id']))
                    regex = re.compile(offense_exclusion_regex, flags=re.I)
                    if regex.match(offense['description']):
                        self.logger.debug(
                            "Found exclusion match for offense {} and regex {}"
                            .format(offense['id'], offense_exclusion_regex))
                        matched = True
                if matched:
                    continue

                # Prepare new alert
                offense_report = dict()
                self.logger.debug("offense: %s" % offense)
                self.logger.info("Enriching offense...")
                enrichedOffense = self.enrichOffense(offense)
                self.logger.debug("Enriched offense: %s" % enrichedOffense)
                theHiveAlert = self.qradarOffenseToHiveAlert(enrichedOffense)

                # searching if the offense has already been converted to alert
                query = dict()
                query['sourceRef'] = str(offense['id'])
                self.logger.info('Looking for offense %s in TheHive alerts',
                                 str(offense['id']))
                results = self.TheHiveConnector.findAlert(query)
                if len(results) == 0:
                    self.logger.info(
                        'Offense %s not found in TheHive alerts, creating it',
                        str(offense['id']))

                    try:
                        theHiveEsAlertId = self.TheHiveConnector.createAlert(
                            theHiveAlert)['id']

                        offense_report['raised_alert_id'] = theHiveEsAlertId
                        offense_report['qradar_offense_id'] = offense['id']
                        offense_report['success'] = True

                    except Exception as e:
                        self.logger.error('%s.allOffense2Alert failed',
                                          __name__,
                                          exc_info=True)
                        offense_report['success'] = False
                        if isinstance(e, ValueError):
                            errorMessage = json.loads(str(e))['message']
                            offense_report['message'] = errorMessage
                        else:
                            offense_report['message'] = str(
                                e) + ": Couldn't raise alert in TheHive"
                        offense_report['offense_id'] = offense['id']
                        # Set overall success if any fails
                        report['success'] = False

                    report['offenses'].append(offense_report)
                else:
                    self.logger.info(
                        'Offense %s already imported as alert, checking for updates',
                        str(offense['id']))
                    alert_found = results[0]

                    if self.TheHiveConnector.checkForUpdates(
                            theHiveAlert, alert_found, offense['id']):
                        offense_report['updated_alert_id'] = alert_found['id']
                        offense_report['qradar_offense_id'] = offense['id']
                        offense_report['success'] = True
                    else:
                        offense_report['qradar_offense_id'] = offense['id']
                        offense_report['success'] = True
                report['offenses'].append(offense_report)
                ##########################################################

        except Exception as e:
            self.logger.error(
                'Failed to create alert from QRadar offense (retrieving offenses failed)',
                exc_info=True)
            report['success'] = False
            report[
                'message'] = "%s: Failed to create alert from offense" % str(e)

        return report

    def craftAlertDescription(self, offense):
        """
            From the offense metadata, crafts a nice description in markdown
            for TheHive
        """
        self.logger.debug('craftAlertDescription starts')

        # Start empty
        description = ""

        # Add url to Offense
        qradar_ip = self.cfg.get('QRadar', 'server')
        url = (
            '[%s](https://%s/console/qradar/jsp/QRadar.jsp?appName=Sem&pageId=OffenseSummary&summaryId=%s)'
            % (str(offense['id']), qradar_ip, str(offense['id'])))

        description += '#### Offense: \n - ' + url + '\n\n'

        # Format associated rules
        rule_names_formatted = "#### Rules triggered: \n"
        rules = offense['rules']
        if len(rules) > 0:
            for rule in rules:
                if 'name' in rule:
                    rule_names_formatted += "- %s \n" % rule['name']
                else:
                    continue

        # Add rules overview to description
        description += rule_names_formatted + '\n\n'

        # Format associated documentation
        uc_links_formatted = "#### Use Case documentation: \n"
        kb_url = self.cfg.get('QRadar', 'kb_url')
        if 'use_case_names' in offense and offense['use_case_names']:
            for uc in offense['use_case_names']:
                replaced_kb_url = kb_url.replace('<uc_kb_name>', uc)
                uc_links_formatted += f"- [{uc}]({replaced_kb_url}) \n"

            # Add associated documentation
            description += uc_links_formatted + '\n\n'

        # Add mitre Tactic information
        mitre_ta_links_formatted = "#### MITRE Tactics: \n"
        if 'mitre_tactics' in offense and offense['mitre_tactics']:
            for tactic in offense['mitre_tactics']:
                mitre_ta_links_formatted += "- [%s](%s/%s) \n" % (
                    tactic, 'https://attack.mitre.org/tactics/', tactic)

            # Add associated documentation
            description += mitre_ta_links_formatted + '\n\n'

        # Add mitre Technique information
        mitre_t_links_formatted = "#### MITRE Techniques: \n"
        if 'mitre_techniques' in offense and offense['mitre_techniques']:
            for technique in offense['mitre_techniques']:
                mitre_t_links_formatted += "- [%s](%s/%s) \n" % (
                    technique, 'https://attack.mitre.org/techniques/',
                    technique)

            # Add associated documentation
            description += mitre_t_links_formatted + '\n\n'

        # Add offense details table
        description += (
            '#### Summary:\n\n' +
            '|                         |               |\n' +
            '| ----------------------- | ------------- |\n' +
            '| **Start Time**          | ' +
            str(self.qradarConnector.formatDate(offense['start_time'])) +
            ' |\n' + '| **Offense ID**          | ' + str(offense['id']) +
            ' |\n' + '| **Description**         | ' +
            str(offense['description'].replace('\n', '')) + ' |\n' +
            '| **Offense Type**        | ' + str(offense['offense_type_str']) +
            ' |\n' + '| **Offense Source**      | ' +
            str(offense['offense_source']) + ' |\n' +
            '| **Destination Network** | ' +
            str(offense['destination_networks']) + ' |\n' +
            '| **Source Network**      | ' + str(offense['source_network']) +
            ' |\n\n\n' + '\n\n\n\n')

        # Add raw payload
        description += '#### Payload:\n```\n'
        for log in offense['logs']:
            description += log['utf8_payload'] + '\n'
        description += '```\n\n'

        return description
Esempio n. 6
0
class Integration(Main):
    def __init__(self):
        super().__init__()
        self.lexsi = LexsiConnector(self.cfg)
        self.TheHiveConnector = TheHiveConnector(self.cfg)

    def validateRequest(self, request):

        if request.is_json:
            content = request.get_json()
            if 'type' in content and content['type'] == "Active":
                workflowReport = self.allIncidents2Alert(content['type'])
                if workflowReport['success']:
                    return json.dumps(workflowReport), 200
                else:
                    return json.dumps(workflowReport), 500
            else:
                self.logger.error('Missing type or type is not supported')
                return json.dumps({
                    'sucess':
                    False,
                    'message':
                    "Missing type or type is not supported"
                }), 500
        else:
            self.logger.error('Not json request')
            return json.dumps({
                'sucess': False,
                'message': "Request didn't contain valid JSON"
            }), 400

    def allIncidents2Alert(self, status):
        """
        Get all opened incidents created within lexsi
        and create alerts for them in TheHive
        """
        self.logger.info('%s.allincident2Alert starts', __name__)

        incidentsList = self.lexsi.getOpenItems()['result']

        report = dict()
        report['success'] = True
        report['incidents'] = list()

        try:
            # each incidents in the list is represented as a dict
            # we enrich this dict with additional details
            for incident in incidentsList:

                # Prepare new alert
                incident_report = dict()
                self.logger.debug("incident: %s" % incident)

                theHiveAlert = self.IncidentToHiveAlert(incident)

                # searching if the incident has already been converted to alert
                query = dict()
                query['sourceRef'] = str(incident['incident'])
                self.logger.info('Looking for incident %s in TheHive alerts',
                                 str(incident['incident']))
                results = self.TheHiveConnector.findAlert(query)
                if len(results) == 0:
                    self.logger.info(
                        'incident %s not found in TheHive alerts, creating it',
                        str(incident['incident']))
                    try:

                        theHiveEsAlertId = self.TheHiveConnector.createAlert(
                            theHiveAlert)['id']
                        self.TheHiveConnector.promoteAlertToCase(
                            theHiveEsAlertId)

                        incident_report['raised_alert_id'] = theHiveEsAlertId
                        incident_report['lexsi_incident_id'] = incident[
                            'incident']
                        incident_report['success'] = True

                    except Exception as e:
                        self.logger.error(incident_report)
                        self.logger.error('%s.allincident2Alert failed',
                                          __name__,
                                          exc_info=True)
                        incident_report['success'] = False
                        if isinstance(e, ValueError):
                            errorMessage = json.loads(str(e))['message']
                            incident_report['message'] = errorMessage
                        else:
                            incident_report['message'] = str(
                                e) + ": Couldn't raise alert in TheHive"
                        incident_report['incident_id'] = incident['incident']
                        # Set overall success if any fails
                        report['success'] = False

                else:
                    self.logger.info(
                        'incident %s already imported as alert, checking for updates',
                        str(incident['incident']))
                    alert_found = results[0]

                    if self.TheHiveConnector.checkForUpdates(
                            theHiveAlert, alert_found,
                            str(incident['incident'])):
                        # Mark the alert as read
                        self.TheHiveConnector.markAlertAsRead(
                            alert_found['id'])
                        incident_report['updated_alert_id'] = alert_found['id']
                        incident_report['sentinel_incident_id'] = str(
                            incident['incident'])
                        incident_report['success'] = True
                    else:
                        incident_report['sentinel_incident_id'] = str(
                            incident['incident'])
                        incident_report['success'] = True
                report['incidents'].append(incident_report)

            thehiveAlerts, open_lexsi_cases = self.lexsi_opened_alerts_thehive(
            )
            self.set_alert_status_ignored(incidentsList, thehiveAlerts,
                                          open_lexsi_cases)

        except Exception as e:

            self.logger.error(
                'Failed to create alert from Lexsi incident (retrieving incidents failed)',
                exc_info=True)
            report['success'] = False
            report[
                'message'] = "%s: Failed to create alert from incident" % str(
                    e)

        return report

    def IncidentToHiveAlert(self, incident):

        #
        # Creating the alert
        #

        # Setup Tags
        tags = ['Lexsi', 'incident', 'Synapse']

        # Skip for now
        artifacts = []

        # Retrieve the configured case_template
        CaseTemplate = self.cfg.get('Lexsi', 'case_template')

        # Build TheHive alert
        alert = self.TheHiveConnector.craftAlert(
            "{}: {}".format(incident['incident'], incident['title']),
            self.craftAlertDescription(incident),
            self.getHiveSeverity(incident),
            self.timestamp_to_epoch(incident['detected'], "%Y-%m-%d %H:%M:%S"),
            tags, 2, 'New', 'internal', 'Lexsi', str(incident['incident']),
            artifacts, CaseTemplate)

        return alert

    def craftAlertDescription(self, incident):
        """
            From the incident metadata, crafts a nice description in markdown
            for TheHive
        """
        self.logger.debug('craftAlertDescription starts')

        # Start empty
        description = ""

        # Add incident details table
        description += (
            '#### Summary\n\n' +
            '|                         |               |\n' +
            '| ----------------------- | ------------- |\n' +
            '| **URL**          | ' +
            "{}{}{}".format("```", str(incident['url']), "```") + ' |\n' +
            '| **Type**          | ' + str(incident['type']) + ' |\n' +
            '| **Severity**          | ' + str(incident['severity']) + ' |\n' +
            '| **Category**         | ' + str(incident['category']) + ' |\n' +
            '| **Updated**        | ' + str(incident['updated']) + ' |\n' +
            '| **Detected**        | ' + str(incident['detected']) + ' |\n' +
            '| **Source**        | ' + str(incident['source']) + ' |\n' +
            '| **Analyst Name(Lexsi)**        | ' +
            str(incident['analystName']) + ' |\n' +
            '| **Link to Orange Portal**        | ' +
            str("https://portal.cert.orangecyberdefense.com/cybercrime/{}".
                format(incident['id'])) + ' |\n' + '\n\n\n\n')

        return description

    def timestamp_to_epoch(self, date_time, pattern):
        return int(time.mktime(time.strptime(date_time, pattern))) * 1000

    def getHiveSeverity(self, incident):
        # severity in TheHive is either low, medium, high or critical
        # while severity in Lexsi is from 0 to 5
        if int(incident['severity']) in {0, 5}:
            return 1
        # elif int(incident['severity']) in {2,3}:
        #    return 2
        # elif int(incident['severity']) in {4,5}:
        #    return 3
        else:
            return 2

    def lexsi_opened_alerts_thehive(self):
        thehiveAlerts = []
        open_lexsi_cases = {}
        query = In('tags', ['Lexsi'])

        self.logger.info(
            'Looking for incident in TheHive alerts with tag Lexsi')
        # self.logger.info(query)
        results = self.TheHiveConnector.findAlert(query)
        for alert_found in results:
            # Check if a case is linked
            if 'case' in alert_found:
                try:
                    case_found = self.TheHiveConnector.getCase(
                        alert_found['case'])
                    # Check if the status is open. Only then append it to the list
                    if case_found['status'] == "Open":
                        open_lexsi_cases[alert_found['sourceRef']] = case_found
                        thehiveAlerts.append(alert_found['sourceRef'])
                except Exception as e:
                    self.logger.error("Could not find case: {}".format(e),
                                      exc_info=True)
                    continue
        self.logger.debug(
            "Lexsi Alerts opened in theHive: {}".format(thehiveAlerts))
        return thehiveAlerts, open_lexsi_cases

    def compare_lists(self, list1, list2):
        return list(set(list1) - set(list2))

    def set_alert_status_ignored(self, incidentsList, thehiveAlerts,
                                 open_lexsi_cases):
        lexsi_reporting = []
        # incidentsList = self.lexsi.getOpenItems()['result']

        for incident in incidentsList:
            lexsi_reporting.append(incident['incident'])

        self.logger.debug(
            "the list of opened Lexsi Incidents: {}".format(lexsi_reporting))
        uncommon_elements = self.compare_lists(thehiveAlerts, lexsi_reporting)
        # uncommon_elements=['476121']
        self.logger.debug(
            "Open cases present in TheHive but not in list of opened Lexsi Incidents: {}"
            .format((uncommon_elements)))

        for element in uncommon_elements:
            self.logger.info(
                "Preparing to close the case for {}".format(element))
            query = dict()
            query['sourceRef'] = str(element)
            self.logger.debug('Looking for incident %s in TheHive alerts',
                              str(element))
            try:
                if element in open_lexsi_cases:
                    # Resolve the case
                    case_id = open_lexsi_cases[element]['id']
                    self.logger.debug("Case id for element {}: {}".format(
                        element, case_id))
                    self.logger.debug("Preparing to resolve the case")
                    self.TheHiveConnector.closeCase(case_id)
                    self.logger.debug("Closed case with id {} for {}".format(
                        case_id, element))

            except Exception as e:
                self.logger.error("Could not close case: {}".format(e),
                                  exc_info=True)
                continue
Esempio n. 7
0
class Integration(Main):
    def __init__(self):
        super().__init__()
        self.azureSentinelConnector = AzureSentinelConnector(self.cfg)
        self.theHiveConnector = TheHiveConnector(self.cfg)

    def craftAlertDescription(self, incident):
        """
            From the incident metadata, crafts a nice description in markdown
            for TheHive
        """
        self.logger.debug('craftAlertDescription starts')

        # Start empty
        self.description = ""

        # Add url to incident
        self.url = ('[%s](%s)' %
                    (str(incident['properties']['incidentNumber']),
                     str(incident['properties']['incidentUrl'])))
        self.description += '#### Incident: \n - ' + self.url + '\n\n'

        # Format associated rules
        self.rule_names_formatted = "#### Rules triggered: \n"
        self.rules = incident['properties']['relatedAnalyticRuleIds']
        if len(self.rules) > 0:
            for rule in self.rules:
                self.rule_info = self.azureSentinelConnector.getRule(rule)
                self.logger.debug(
                    'Received the following rule information: {}'.format(
                        self.rule_info))
                self.rule_name = self.rule_info['properties']['displayName']
                rule_url = "https://management.azure.com{}".format(rule)
                self.rule_names_formatted += "- %s \n" % (self.rule_name)

        # Add rules overview to description
        self.description += self.rule_names_formatted + '\n\n'

        # Add mitre Tactic information
        # https://raw.githubusercontent.com/mitre/cti/master/enterprise-attack/enterprise-attack.json

        # mitre_ta_links_formatted = "#### MITRE Tactics: \n"
        # if 'mitre_tactics' in offense and offense['mitre_tactics']:
        #     for tactic in offense['mitre_tactics']:
        #         mitre_ta_links_formatted += "- [%s](%s/%s) \n" % (tactic, 'https://attack.mitre.org/tactics/', tactic)

        #     #Add associated documentation
        #     self.description += mitre_ta_links_formatted + '\n\n'

        # #Add mitre Technique information
        # mitre_t_links_formatted = "#### MITRE Techniques: \n"
        # if 'mitre_techniques' in offense and offense['mitre_techniques']:
        #     for technique in offense['mitre_techniques']:
        #         mitre_t_links_formatted += "- [%s](%s/%s) \n" % (technique, 'https://attack.mitre.org/techniques/', technique)

        # Add a custom description when the incident does not contain any
        if 'description' not in incident['properties']:
            incident['properties']['description'] = "N/A"

        # Add incident details table
        self.description += (
            '#### Summary\n\n' +
            '|                         |               |\n' +
            '| ----------------------- | ------------- |\n' +
            '| **Start Time**          | ' + str(
                self.azureSentinelConnector.formatDate(
                    "description", incident['properties']['createdTimeUtc'])) +
            ' |\n' + '| **incident ID**          | ' +
            str(incident['properties']['incidentNumber']) + ' |\n' +
            '| **Description**         | ' +
            str(incident['properties']['description'].replace('\n', '')) +
            ' |\n' + '| **incident Type**        | ' + str(incident['type']) +
            ' |\n' + '| **incident Source**      | ' +
            str(incident['properties']['additionalData']['alertProductNames'])
            + ' |\n' + '| **incident Status**      | ' +
            str(incident['properties']['status']) + ' |\n' + '\n\n\n\n')

        return self.description

    def sentinelIncidentToHiveAlert(self, incident):
        def getHiveSeverity(incident):
            # severity in TheHive is either low, medium or high
            # while severity in Sentinel is from Low to High
            if incident['properties']['severity'] == "Low":
                return 1
            elif incident['properties']['severity'] == "Medium":
                return 2
            elif incident['properties']['severity'] == "High":
                return 3

            return 1

        #
        # Creating the alert
        #

        # Setup Tags
        self.tags = ['AzureSentinel', 'incident', 'Synapse']

        # Skip for now
        self.artifacts = []

        # Retrieve the configured case_template
        self.sentinelCaseTemplate = self.cfg.get('AzureSentinel',
                                                 'case_template')

        # Build TheHive alert
        self.alert = self.theHiveConnector.craftAlert(
            "{}, {}".format(incident['properties']['incidentNumber'],
                            incident['properties']['title']),
            self.craftAlertDescription(incident), getHiveSeverity(incident),
            self.azureSentinelConnector.formatDate(
                "alert_timestamp", incident['properties']['createdTimeUtc']),
            self.tags, 2, 'New', 'internal', 'Azure_Sentinel_incidents',
            str(incident['name']), self.artifacts, self.sentinelCaseTemplate)

        return self.alert

    def validateRequest(self, request):
        if request.is_json:
            self.content = request.get_json()
            if 'type' in self.content and self.content['type'] == "Active":
                self.workflowReport = self.allIncidents2Alert(
                    self.content['type'])
                if self.workflowReport['success']:
                    return json.dumps(self.workflowReport), 200
                else:
                    return json.dumps(self.workflowReport), 500
            else:
                self.logger.error('Missing type or type is not supported')
                return json.dumps({
                    'sucess':
                    False,
                    'message':
                    "Missing type or type is not supported"
                }), 500
        else:
            self.logger.error('Not json request')
            return json.dumps({
                'sucess': False,
                'message': "Request didn't contain valid JSON"
            }), 400

    def allIncidents2Alert(self, status):
        """
        Get all opened incidents created within Azure Sentinel
        and create alerts for them in TheHive
        """
        self.logger.info('%s.allincident2Alert starts', __name__)

        self.report = dict()
        self.report['success'] = True
        self.report['incidents'] = list()

        try:
            self.incidentsList = self.azureSentinelConnector.getIncidents()

            # each incidents in the list is represented as a dict
            # we enrich this dict with additional details
            for incident in self.incidentsList:

                # Prepare new alert
                self.incident_report = dict()
                self.logger.debug("incident: %s" % incident)
                # self.logger.info("Enriching incident...")
                # enrichedincident = enrichIncident(incident)
                # self.logger.debug("Enriched incident: %s" % enrichedincident)
                self.theHiveAlert = self.sentinelIncidentToHiveAlert(incident)

                # searching if the incident has already been converted to alert
                self.query = dict()
                self.query['sourceRef'] = str(incident['name'])
                self.logger.info('Looking for incident %s in TheHive alerts',
                                 str(incident['name']))
                self.results = self.theHiveConnector.findAlert(self.query)
                if len(self.results) == 0:
                    self.logger.info(
                        'incident %s not found in TheHive alerts, creating it',
                        str(incident['name']))

                    try:
                        self.theHiveEsAlertId = self.theHiveConnector.createAlert(
                            self.theHiveAlert)['id']

                        self.incident_report[
                            'raised_alert_id'] = self.theHiveEsAlertId
                        self.incident_report[
                            'sentinel_incident_id'] = incident['name']
                        self.incident_report['success'] = True

                    except Exception as e:
                        self.logger.error('%s.allincident2Alert failed',
                                          __name__,
                                          exc_info=True)
                        self.incident_report['success'] = False
                        if isinstance(e, ValueError):
                            errorMessage = json.loads(str(e))['message']
                            self.incident_report['message'] = errorMessage
                        else:
                            self.incident_report['message'] = str(
                                e) + ": Couldn't raise alert in TheHive"
                        self.incident_report['incident_id'] = incident['name']
                        # Set overall success if any fails
                        self.report['success'] = False

                    self.report['incidents'].append(self.incident_report)
                else:
                    self.logger.info(
                        'incident %s already imported as alert, checking for updates',
                        str(incident['name']))
                    self.alert_found = self.results[0]

                    # Check if alert is already created, but needs updating
                    if self.check_if_updated(self.alert_found,
                                             vars(self.theHiveAlert)):
                        self.logger.info(
                            "Found changes for %s, updating alert" %
                            self.alert_found['id'])

                        # update alert
                        self.theHiveConnector.updateAlert(
                            self.alert_found['id'],
                            self.theHiveAlert,
                            fields=["tags", "artifacts"])
                        self.incident_report[
                            'updated_alert_id'] = self.alert_found['id']
                        self.incident_report[
                            'sentinel_incident_id'] = incident['name']
                        self.incident_report['success'] = True
                    else:
                        self.logger.info("No changes found for %s" %
                                         self.alert_found['id'])
                        continue

        except Exception as e:

            self.logger.error(
                'Failed to create alert from Azure Sentinel incident (retrieving incidents failed)',
                exc_info=True)
            self.report['success'] = False
            self.report[
                'message'] = "%s: Failed to create alert from incident" % str(
                    e)

        return self.report
Esempio n. 8
0
class Integration(Main):

    def __init__(self):
        super().__init__()
        self.RDConnector = RDConnector(self.cfg)
        self.TheHiveConnector = TheHiveConnector(self.cfg)

    def validateRequest(self, request):
        workflowReport = self.connectRD()
        if workflowReport['success']:
            return json.dumps(workflowReport), 200
        else:
            return json.dumps(workflowReport), 500

    def connectRD(self):
        self.logger.info('%s.connectResponsibleDisclosure starts', __name__)

        report = dict()
        report['success'] = bool()

        # Setup Tags
        self.tags = ['Responsible disclosure', 'Synapse']

        tracker_file = "./modules/ResponsibleDisclosure/email_tracker"
        link_to_load = ""
        if os.path.exists(tracker_file):
            self.logger.debug("Reading from the tracker file...")
            with open(tracker_file, "r") as tracker:
                link_to_load = tracker.read()

        if not link_to_load:
            link_to_load = self.cfg.get('ResponsibleDisclosure', 'list_endpoint')

        emails, new_link = self.RDConnector.scan(link_to_load)

        try:
            for email in emails:
                try:
                    if ('@removed' in email) or [email["from"]["emailAddress"]["address"]] in self.cfg.get('ResponsibleDisclosure', 'excluded_senders'):
                        continue
                    self.logger.debug("Found unread E-mail with id: {}".format(email['id']))

                    # Get the conversation id from the email
                    CID = email["conversationId"]
                    # Conversation id hash will be used as a unique identifier for the alert
                    CIDHash = hashlib.md5(CID.encode()).hexdigest()

                    email_date = datetime.strptime(email["receivedDateTime"], "%Y-%m-%dT%H:%M:%SZ")
                    epoch_email_date = email_date.timestamp() * 1000

                    alertTitle = "Responsible Disclosure - {}".format(email["subject"])

                    alertDescription = self.createDescription(email)

                    # Moving the email from Inbox to the new folder defined by variable to_move_folder in synapse.conf
                    # Disabled temporarily
                    # self.RDConnector.moveToFolder(self.cfg.get('ResponsibleDisclosure', 'email_address'), email['id'], self.cfg.get('ResponsibleDisclosure', 'to_move_folder'))

                    # Get all the attachments and upload to the hive observables
                    attachment_data = self.RDConnector.listAttachment(self.cfg.get('ResponsibleDisclosure', 'email_address'), email['id'])

                    all_artifacts = []
                    all_attachments = []

                    if attachment_data:
                        for att in attachment_data:
                            file_name = self.RDConnector.downloadAttachments(att['name'], att['attachment_id'], att['isInline'], att['contentType'])
                            all_attachments.append(file_name)

                            self.af = AlertArtifact(dataType='file', data=file_name, tlp=2, tags=['Responsible disclosure', 'Synapse'], ioc=True)

                            all_artifacts.append(self.af)

                    # Create the alert in thehive
                    alert = self.TheHiveConnector.craftAlert(
                        alertTitle,
                        alertDescription,
                        1,
                        epoch_email_date,
                        self.tags, 2,
                        "New",
                        "internal",
                        "ResponsibleDisclosure",
                        CIDHash,
                        all_artifacts,
                        self.cfg.get('ResponsibleDisclosure', 'case_template'))

                    # Check if the alert was created successfully
                    query = dict()
                    query['sourceRef'] = str(CIDHash)

                    # Look up if any existing alert in theHive
                    alert_results = self.TheHiveConnector.findAlert(query)

                    # If no alerts are found for corresponding CIDHASH, create a new alert
                    if len(alert_results) == 0:
                        createdAlert = self.TheHiveConnector.createAlert(alert)

                        # automatish antwoord to the original email sender from the responsible disclosure emailaddress
                        autoreply_subject_name = "RE: {}".format(email["subject"])

                        self.RDConnector.sendAutoReply("*****@*****.**", email["from"]["emailAddress"]["address"], self.cfg.get('ResponsibleDisclosure', 'email_body_filepath'), autoreply_subject_name)

                    # If alert is found update the alert or it may have been migrated to case so update the case
                    if len(alert_results) > 0:
                        alert_found = alert_results[0]

                        # Check if alert is promoted to a case
                        if 'case' in alert_found:

                            case_found = self.TheHiveConnector.getCase(alert_found['case'])

                            # Create a case model
                            self.updated_case = Case

                            # Update the case with new description
                            # What if the email body is empty for new email, then use the old description
                            self.updated_case.description = case_found['description'] + "\n\n" + alertDescription

                            self.updated_case.id = alert_found['case']
                            self.TheHiveConnector.updateCase(self.updated_case, ["description"])
                            self.logger.info("updated the description of the case with id: {}".format(alert_found['case']))

                            # Check if there new observables available
                            if all_attachments:
                                for att in all_attachments:
                                    try:
                                        self.TheHiveConnector.addFileObservable(alert_found['case'], att, "email attachment")
                                    except Exception as e:
                                        self.logger.error(f"Encountered an error while creating a new file based observable: {e}", exc_info=True)
                                        continue
                        # Else it means there is no corresponding case so update the alert
                        else:
                            # create an alert model
                            self.updated_alert = Alert

                            # Update the alert with new description
                            # What if the email body is empty for new email, then use the old description
                            self.updated_alert.description = alert_found['description'] + "\n\n" + alertDescription

                            self.TheHiveConnector.updateAlert(alert_found['id'], self.updated_alert, ["description"])
                            self.logger.info("updated the description of the alert with id: {}".format(alert_found['id']))
                except Exception as e:
                    self.logger.error(e, exc_info=True)
                    continue

                if all_attachments:
                    for att in all_attachments:
                        os.remove(att)

            # Write the delta link to the tracker
            with open(tracker_file, "w+") as tracker:
                tracker.write(new_link)

            report['success'] = True
            return report

        except Exception as e:
            self.logger.error(e)
            self.logger.error('Connection failure', exc_info=True)
            report['success'] = False
            return report

    def createDescription(self, email):

        email_body = email['body']['content']
        subject = email["subject"]
        # Get the conversation id from the email
        CID = email["conversationId"]
        # Conversation id hash will be used as a unique identifier for the alert
        CIDHash = hashlib.md5(CID.encode()).hexdigest()

        # Parse all the URLs and add them to a field in the description table
        urls_list = re.findall(r'\<(https?://[\S]+?)\>', email_body)
        # "&#13;" is ascii for next line
        urls_str = ' &#13; '.join(str(x) for x in urls_list)

        from_e = email["from"]["emailAddress"]["address"]
        to_e = "N/A"
        if email["toRecipients"]:
            to_e = email["toRecipients"][0]["emailAddress"]["address"]

        OriginatingIP = "N/A"
        for header in email['internetMessageHeaders']:
            if header['name'] == 'X-Originating-IP':
                # Formatting the ip value, bydefault it comesup like [x.x.x.x]
                OriginatingIP = (header['value'][1:-1])

        # putting together the markdown table
        temp_fullbody = []
        temp_fullbody.append("|     |     |")
        temp_fullbody.append("|:-----|:-----|")
        temp_fullbody.append("|  " + "**" + "Subject" + "**" + "  |  " + subject + "  |")
        temp_fullbody.append("|  " + "**" + "Sender" + "**" + "  |  " + from_e + "  |")
        temp_fullbody.append("|  " + "**" + "Recipient" + "**" + "  |  " + to_e + "  |")
        temp_fullbody.append("|  " + "**" + "Originating IP" + "**" + "  |  " + OriginatingIP + "  |")
        temp_fullbody.append("|  " + "**" + "Received at" + "**" + "  |  " + email["receivedDateTime"] + "  |")
        temp_fullbody.append("|  " + "**" + "URL(s) in email" + "**" + "  |  " + urls_str + "  |")
        temp_fullbody.append("|  " + "**" + "Msg ID" + "**" + "  |  " + email['id'] + "  |")
        temp_fullbody.append("**" + "Email body" + "**")
        temp_fullbody.append("```")
        temp_fullbody.append(email_body)
        temp_fullbody.append("```")

        alertDescription = '\r\n'.join(str(x) for x in temp_fullbody)
        return alertDescription
Esempio n. 9
0
def logstash2Alert(event):
    """
       Parse the received ml watcher notification
       Original example logstash output:

       Nice example input:
        
    """
    #logger = logging.getLogger(__name__)
    logger.info('%s.logstash2Alert starts', __name__)

    report = dict()
    report['success'] = True

    try:
        cfg = getConf()

        theHiveConnector = TheHiveConnector(cfg)

        #Map the ml watcher alert to the alert that will be enhanced
        logger.info('Looking for Logstash Alert %s in TheHive alerts',
                    str(event['sourceRef']))

        #I should see if we can find a way to generate a shorter more useful sourceRef from within Synapse
        q = dict()
        q['sourceRef'] = str(event['sourceRef'])
        results = theHiveConnector.findAlert(q)
        if len(results) == 0:
            logger.info(
                'Logstash Alert %s not found in TheHive alerts, creating it',
                str(event['sourceRef']))
            event_report = dict()

            event['case_template'] = "ELK-Anomalies"

            #Enrichment is not in scope yet
            #enrichedAlert = enrichAlert(elkConnector, event)

            try:
                theHiveAlert = ELKToHiveAlert(theHiveConnector, event)
                theHiveEsAlertId = theHiveConnector.createAlert(theHiveAlert)

                event_report['raised_alert_id'] = theHiveEsAlertId
                event_report['alert_id'] = event['sourceRef']
                event_report['success'] = True

            except Exception as e:
                logger.error('%s.logstash2Alert failed',
                             __name__,
                             exc_info=True)
                event_report['success'] = False
                if isinstance(e, ValueError):
                    errorMessage = json.loads(str(e))['message']
                    event_report['message'] = errorMessage
                else:
                    event_report['message'] = str(
                        e) + ": Couldn't raise alert in TheHive"
                event_report['alert_id'] = event['sourceRef']
                # Set overall success if any fails
                report['success'] = False

            report['event'] = event_report
        else:
            logger.info('Logstash Alert %s already imported as alert',
                        str(event['sourceRef']))

    except Exception as e:

        logger.error('Failed to create alert from Logstash Alert',
                     exc_info=True)
        report['success'] = False
        report[
            'message'] = "%s: Failed to create alert from Logstash Alert" % str(
                e)

    return report
Esempio n. 10
0
def ml2Alert(mlalert):
    """
       Parse the received ml watcher notification
       Original example Watch Actions:
       
        "TheHive": {
            "webhook": {
                "scheme": "http",
                "host": "machine.domain.com",
                "port": 5000,
                "method": "post",
                "path": "/ELK2alert",
                "params": {},
                "headers": {
                    "Authorization": "Bearer 2WTbTHH8iaSeoo8yk8y0GA96dX7/Tz7s",
                    "Cookie": "cookie=no",
                    "Content-Type": "application/json"
                },
                "body": "{\"ml_job_id\": \"{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0._source.job_id}}\",\n\"description\": \"some description\",\n\"start_time\": \"{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.timestamp_iso8601.0}}\",\n\"anomaly_score\": \"{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.score.0}}\",\n\"url\": \"https://machine.domain.com:5601/app/ml#/explorer/?_g=(ml:(jobIds:!('{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0._source.job_id}}')),refreshInterval:(display:Off,pause:!f,value:0),time:(from:'{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.start.0}}',mode:absolute,to:'{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.end.0}}'))&_a=(filters:!(),mlAnomaliesTable:(intervalValue:auto,thresholdValue:0),mlExplorerSwimlane:(selectedLane:Overall,selectedTime:{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.timestamp_epoch.0}},selectedType:overall),query:(query_string:(analyze_wildcard:!t,query:'**')))\",\n\"influencers\": \"{{ctx.payload.aggregations.record_results.top_record_hits.hits.hits}}\\n{{_source.function}}({{_source.field_name}}) {{_source.by_field_value}} {{_source.over_field_value}} {{_source.partition_field_value}} [{{fields.score.0}}]\\n{{ctx.payload.aggregations.record_results.top_record_hits.hits.hits}}\",\n\"type\": \"asml\",\n\"source\": \"Elastic\",\n\"sourceRef\": \"{{ctx.payload.as_watch_id}}\"}"
            }
        }

       Nice example input:
        "{
            \"ml_job_id\": \"{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0._source.job_id}}\",\n
            \"description\": \"some description\",\n
            \"start_time\": \"{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.timestamp_iso8601.0}}\",\n
            \"anomaly_score\": \"{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.score.0}}\",\n
            \"url\": \"https://machine.domain.com:5601/app/ml#/explorer/?_g=(ml:(jobIds:!('{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0._source.job_id}}')),refreshInterval:(display:Off,pause:!f,value:0),time:(from:'{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.start.0}}',mode:absolute,to:'{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.end.0}}'))&_a=(filters:!(),mlAnomaliesTable:(intervalValue:auto,thresholdValue:0),mlExplorerSwimlane:(selectedLane:Overall,selectedTime:{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.timestamp_epoch.0}},selectedType:overall),query:(query_string:(analyze_wildcard:!t,query:'**')))\",\n
            \"influencers\": \"{{ctx.payload.aggregations.record_results.top_record_hits.hits.hits}}\\n
                               {{_source.function}}({{_source.field_name}}) {{_source.by_field_value}} {{_source.over_field_value}} {{_source.partition_field_value}} [{{fields.score.0}}]\\n
                               {{ctx.payload.aggregations.record_results.top_record_hits.hits.hits}}\",\n
            \"type\": \"asml\",\n
            \"source\": \"Elastic\",\n
            \"sourceRef\": \"{{ctx.payload.as_watch_id}}\"
        }"
    """
    #logger = logging.getLogger(__name__)
    logger.info('%s.ml2Alert starts', __name__)

    report = dict()
    report['success'] = True

    try:
        cfg = getConf()

        theHiveConnector = TheHiveConnector(cfg)

        #Map the ml watcher alert to the alert that will be enhanced
        logger.info('Looking for ML Alert %s in TheHive alerts',
                    str(mlalert['sourceRef']))

        #I should see if we can find a way to generate a shorter more useful sourceRef from within Synapse
        q = dict()
        q['sourceRef'] = str(mlalert['sourceRef'])
        results = theHiveConnector.findAlert(q)
        if len(results) == 0:
            logger.info('ML Alert %s not found in TheHive alerts, creating it',
                        str(mlalert['sourceRef']))
            mlalert_report = dict()

            #Set generic parameters
            mlalert['title'] = "ML: " + mlalert['ml_job_id']
            mlalert['description'] = craftMLAlertDescription(mlalert)
            mlalert['case_template'] = "ELK-ML"

            #Enrichment is not in scope yet
            #enrichedAlert = enrichAlert(elkConnector, mlalert)

            try:
                theHiveAlert = ELKToHiveAlert(theHiveConnector, mlalert)
                theHiveEsAlertId = theHiveConnector.createAlert(theHiveAlert)

                mlalert_report['raised_alert_id'] = theHiveEsAlertId
                mlalert_report['ml_alert_id'] = mlalert['sourceRef']
                mlalert_report['success'] = True

            except Exception as e:
                logger.error('%s.ml2Alert failed', __name__, exc_info=True)
                mlalert_report['success'] = False
                if isinstance(e, ValueError):
                    errorMessage = json.loads(str(e))['message']
                    mlalert_report['message'] = errorMessage
                else:
                    mlalert_report['message'] = str(
                        e) + ": Couldn't raise alert in TheHive"
                mlalert_report['ml_alert_id'] = mlalert['sourceRef']
                # Set overall success if any fails
                report['success'] = False

            report['mlalert'] = mlalert_report
        else:
            logger.info('ML Alert %s already imported as alert',
                        str(mlalert['sourceRef']))

    except Exception as e:

        logger.error('Failed to create alert from ML Alert', exc_info=True)
        report['success'] = False
        report['message'] = "%s: Failed to create alert from ML Alert" % str(e)

    return report