Example #1
0
class Integration(Main):
    def __init__(self):
        super().__init__()
        self.mlabsConnector = MLabsConnector(self.cfg)
        self.theHiveConnector = TheHiveConnector(self.cfg)

    def validateRequest(self, request):
        workflowReport = self.connectMLabs()
        if workflowReport['success']:
            return json.dumps(workflowReport), 200
        else:
            return json.dumps(workflowReport), 500

    def connectMLabs(self):
        self.logger.info('%s.connectMLabs starts', __name__)

        report = dict()
        report['success'] = bool()

        # Setup Tags
        self.tags = ['MessageLabs', 'Synapse']

        try:
            tracker_file = "./modules/MessageLabs/phishing_tracker"
            link_to_load = ""
            if os.path.exists(tracker_file):
                self.logger.debug(
                    "MessageLabs: phishing Reading from the tracker file...")
                with open(tracker_file, "r") as tracker:
                    link_to_load = tracker.read()

            if not link_to_load:
                link_to_load = self.cfg.get('MessageLabs', 'list_endpoint')

            unread, new_link = self.mlabsConnector.scan(link_to_load)

            for msg in unread:
                if msg['subject'] != self.cfg.get('MessageLabs',
                                                  'subject_contains'):
                    continue

                fullBody = msg['body']['content']
                subject = ""
                internalMessageId = ""

                for line in fullBody.splitlines():
                    if line.startswith("Subject"):
                        subject = line
                    if line.startswith("Message ID:"):
                        internalMessageId = line.split(" ID: ")[-1]
                    if line.startswith("Date:"):
                        email_date = datetime.strptime(
                            line.split("Date: ")[-1],
                            "%a, %d %b %Y %H:%M:%S %z")
                        epoch_email_date = email_date.timestamp() * 1000

                caseTitle = str(
                    self.cfg.get('MessageLabs', 'subject_contains') + " - " +
                    str(subject))
                caseDescription = fullBody

                alert = self.theHiveConnector.craftAlert(
                    caseTitle, caseDescription, 1, epoch_email_date, self.tags,
                    2, "New", "internal", "MessageLabs", internalMessageId, [],
                    self.cfg.get('MessageLabs', 'case_template'))
                try:
                    createdCase = self.theHiveConnector.createAlert(alert)
                except ValueError as e:
                    self.logger.info(
                        "Alert with sourceRef '{}' already exists".format(
                            internalMessageId))

            with open(tracker_file, "w+") as tracker:
                tracker.write(new_link)

            report['success'] = True
            return report

        except Exception as e:
            self.logger.error('Connection failure', exc_info=True)
            report['success'] = False
            return report
Example #2
0
class Integration(Main):
    def __init__(self):
        super().__init__()
        self.qradarConnector = QRadarConnector(self.cfg)
        self.TheHiveConnector = TheHiveConnector(self.cfg)

    def enrichOffense(self, offense):

        enriched = copy.deepcopy(offense)

        artifacts = []

        enriched['offense_type_str'] = \
            self.qradarConnector.getOffenseTypeStr(offense['offense_type'])

        # Add the offense source explicitly
        if enriched['offense_type_str'] == 'Username':
            artifacts.append({
                'data': offense['offense_source'],
                'dataType': 'user-account',
                'message': 'Offense Source',
                'tags': ['QRadar']
            })

        # Add the local and remote sources
        # scrIps contains offense source IPs
        srcIps = list()
        # dstIps contains offense destination IPs
        dstIps = list()
        # srcDstIps contains IPs which are both source and destination of offense
        srcDstIps = list()
        for ip in self.qradarConnector.getSourceIPs(enriched):
            srcIps.append(ip)

        for ip in self.qradarConnector.getLocalDestinationIPs(enriched):
            dstIps.append(ip)

        # making copies is needed since we want to
        # access and delete data from the list at the same time
        s = copy.deepcopy(srcIps)
        d = copy.deepcopy(dstIps)

        for srcIp in s:
            for dstIp in d:
                if srcIp == dstIp:
                    srcDstIps.append(srcIp)
                    srcIps.remove(srcIp)
                    dstIps.remove(dstIp)

        for ip in srcIps:
            artifacts.append({
                'data': ip,
                'dataType': 'ip',
                'message': 'Source IP',
                'tags': ['QRadar', 'src']
            })
        for ip in dstIps:
            artifacts.append({
                'data': ip,
                'dataType': 'ip',
                'message': 'Local destination IP',
                'tags': ['QRadar', 'dst']
            })
        for ip in srcDstIps:
            artifacts.append({
                'data': ip,
                'dataType': 'ip',
                'message': 'Source and local destination IP',
                'tags': ['QRadar', 'src', 'dst']
            })

        # Parse offense types to add the offense source as an observable when a valid type is used
        for offense_type, extraction_config in self.cfg.get(
                'QRadar', 'observables_in_offense_type', fallback={}).items():
            if enriched['offense_type_str'] == offense_type:
                if isinstance(extraction_config, str):
                    observable_type = extraction_config
                    artifacts.append({
                        'data': enriched['offense_source'],
                        'dataType': observable_type,
                        'message': 'QRadar Offense source',
                        'tags': ['QRadar']
                    })
                elif isinstance(extraction_config, list):
                    for extraction in extraction_config:
                        regex = re.compile(extraction['regex'])
                        matches = regex.findall(str(
                            enriched['offense_source']))
                        if len(matches) > 0:
                            # if isinstance(found_observable, tuple): << Fix later loop through matches as well
                            for match_group, observable_type in extraction[
                                    'match_groups'].items():
                                try:
                                    artifacts.append({
                                        'data':
                                        matches[0][match_group],
                                        'dataType':
                                        observable_type,
                                        'message':
                                        'QRadar Offense Type based observable',
                                        'tags': ['QRadar', 'offense_type']
                                    })
                                except Exception as e:
                                    self.logger.warning(
                                        "Could not find match group {} in {}".
                                        format(match_group,
                                               enriched['offense_type_str']))
                else:
                    self.logger.error(
                        "Configuration for observables_in_offense_type is wrongly formatted. Please fix this to enable this functionality"
                    )

        # Remove observables that are to be excluded based on the configuration
        artifacts = self.checkObservableExclusionList(artifacts)

        # Match observables against the TLP list
        artifacts = self.checkObservableTLP(artifacts)

        # Add all the observables
        enriched['artifacts'] = artifacts

        # Add rule names to offense
        enriched['rules'] = self.qradarConnector.getRuleNames(offense)

        # waiting 1s to make sure the logs are searchable
        sleep(1)
        # adding the first 3 raw logs
        enriched['logs'] = self.qradarConnector.getOffenseLogs(enriched)

        return enriched

    def qradarOffenseToHiveAlert(self, offense):
        def getHiveSeverity(offense):
            # severity in TheHive is either low, medium or high
            # while severity in QRadar is from 1 to 10
            # low will be [1;4] => 1
            # medium will be [5;6] => 2
            # high will be [7;10] => 3
            if offense['severity'] < 5:
                return 1
            elif offense['severity'] < 7:
                return 2
            elif offense['severity'] < 11:
                return 3

            return 1

        #
        # Creating the alert
        #

        # Setup Tags
        tags = ['QRadar', 'Offense', 'Synapse']
        # Add the offense type as a tag
        if 'offense_type_str' in offense:
            tags.append("qr_offense_type: {}".format(
                offense['offense_type_str']))

        # Check if the automation ids need to be extracted
        if self.cfg.getboolean('QRadar', 'extract_automation_identifiers'):

            # Run the extraction function and add it to the offense data
            # Extract automation ids
            tags_extracted = self.tagExtractor(
                offense, self.cfg.get('QRadar', 'automation_fields'),
                self.cfg.get('QRadar', 'tag_regexes'))
            # Extract any possible name for a document on a knowledge base
            offense['use_case_names'] = self.tagExtractor(
                offense, self.cfg.get('QRadar', 'automation_fields'),
                self.cfg.get('QRadar', 'uc_kb_name_regexes'))
            if len(tags_extracted) > 0:
                tags.extend(tags_extracted)
            else:
                self.logger.info('No match found for offense %s',
                                 offense['id'])

        # Check if the mitre ids need to be extracted
        if self.cfg.getboolean('QRadar', 'extract_mitre_ids'):
            # Extract mitre tactics
            offense['mitre_tactics'] = self.tagExtractor(
                offense, ["rules"], [r'[tT][aA]\d{4}'])
            if 'mitre_tactics' in offense:
                tags.extend(offense['mitre_tactics'])

            # Extract mitre techniques
            offense['mitre_techniques'] = self.tagExtractor(
                offense, ["rules"], [r'[tT]\d{4}'])
            if 'mitre_techniques' in offense:
                tags.extend(offense['mitre_techniques'])

        if "categories" in offense:
            for cat in offense['categories']:
                tags.append(cat)

        defaultObservableDatatype = [
            'autonomous-system', 'domain', 'file', 'filename', 'fqdn', 'hash',
            'ip', 'mail', 'mail_subject', 'other', 'process_filename',
            'regexp', 'registry', 'uri_path', 'url', 'user-account',
            'user-agent'
        ]

        artifacts = []
        for artifact in offense['artifacts']:
            # Add automation tagging and mitre tagging to observables
            if len(tags_extracted) > 0:
                artifact['tags'].extend(tags_extracted)
            if 'mitre_tactics' in offense:
                artifact['tags'].extend(offense['mitre_tactics'])
            if 'mitre_techniques' in offense:
                artifact['tags'].extend(offense['mitre_techniques'])

            if artifact['dataType'] in defaultObservableDatatype:
                hiveArtifact = self.TheHiveConnector.craftAlertArtifact(
                    dataType=artifact['dataType'],
                    data=artifact['data'],
                    message=artifact['message'],
                    tags=artifact['tags'],
                    tlp=artifact['tlp'])
            else:
                artifact['tags'].append('type:' + artifact['dataType'])
                hiveArtifact = self.TheHiveConnector.craftAlertArtifact(
                    dataType='other',
                    data=artifact['data'],
                    message=artifact['message'],
                    tags=artifact['tags'],
                    tlp=artifact['tlp'])
            artifacts.append(hiveArtifact)

        # Retrieve the configured case_template
        qradarCaseTemplate = self.cfg.get('QRadar', 'case_template')

        # Build TheHive alert
        alert = self.TheHiveConnector.craftAlert(
            "{}, {}".format(offense['id'], offense['description']),
            self.craftAlertDescription(offense), getHiveSeverity(offense),
            offense['start_time'],
            tags, 2, 'Imported', 'internal', 'QRadar_Offenses',
            str(offense['id']), artifacts, qradarCaseTemplate)

        return alert

    def validateRequest(self, request):
        if request.is_json:
            content = request.get_json()
            if 'timerange' in content:
                workflowReport = self.allOffense2Alert(content['timerange'])
                if workflowReport['success']:
                    return json.dumps(workflowReport), 200
                else:
                    return json.dumps(workflowReport), 500
            else:
                self.logger.error('Missing <timerange> key/value')
                return json.dumps({
                    'sucess': False,
                    'message': "timerange key missing in request"
                }), 500
        else:
            self.logger.error('Not json request')
            return json.dumps({
                'sucess': False,
                'message': "Request didn't contain valid JSON"
            }), 400

    def allOffense2Alert(self, timerange):
        """
        Get all openned offense created within the last
        <timerange> minutes and creates alerts for them in
        TheHive
        """
        self.logger.info('%s.allOffense2Alert starts', __name__)

        report = dict()
        report['success'] = True
        report['offenses'] = list()

        try:
            offensesList = self.qradarConnector.getOffenses(timerange)
            # Check for offenses that should have been closed
            for offense in offensesList:
                closure_info = self.checkIfInClosedCaseOrAlertMarkedAsRead(
                    offense['id'])
                if closure_info:
                    # Close incident and continue with the next incident
                    self.logger.info(
                        "Closed case found for {}. Closing offense...".format(
                            offense['id']))
                    self.qradarConnector.closeOffense(offense['id'])
                    continue

                matched = False
                # Filter based on regexes in configuration
                for offense_exclusion_regex in self.cfg.get(
                        'QRadar', 'offense_exclusion_regexes', fallback=[]):
                    self.logger.debug(
                        "Offense exclusion regex found '{}'. Matching against offense {}"
                        .format(offense_exclusion_regex, offense['id']))
                    regex = re.compile(offense_exclusion_regex, flags=re.I)
                    if regex.match(offense['description']):
                        self.logger.debug(
                            "Found exclusion match for offense {} and regex {}"
                            .format(offense['id'], offense_exclusion_regex))
                        matched = True
                if matched:
                    continue

                # Prepare new alert
                offense_report = dict()
                self.logger.debug("offense: %s" % offense)
                self.logger.info("Enriching offense...")
                enrichedOffense = self.enrichOffense(offense)
                self.logger.debug("Enriched offense: %s" % enrichedOffense)
                theHiveAlert = self.qradarOffenseToHiveAlert(enrichedOffense)

                # searching if the offense has already been converted to alert
                query = dict()
                query['sourceRef'] = str(offense['id'])
                self.logger.info('Looking for offense %s in TheHive alerts',
                                 str(offense['id']))
                results = self.TheHiveConnector.findAlert(query)
                if len(results) == 0:
                    self.logger.info(
                        'Offense %s not found in TheHive alerts, creating it',
                        str(offense['id']))

                    try:
                        theHiveEsAlertId = self.TheHiveConnector.createAlert(
                            theHiveAlert)['id']

                        offense_report['raised_alert_id'] = theHiveEsAlertId
                        offense_report['qradar_offense_id'] = offense['id']
                        offense_report['success'] = True

                    except Exception as e:
                        self.logger.error('%s.allOffense2Alert failed',
                                          __name__,
                                          exc_info=True)
                        offense_report['success'] = False
                        if isinstance(e, ValueError):
                            errorMessage = json.loads(str(e))['message']
                            offense_report['message'] = errorMessage
                        else:
                            offense_report['message'] = str(
                                e) + ": Couldn't raise alert in TheHive"
                        offense_report['offense_id'] = offense['id']
                        # Set overall success if any fails
                        report['success'] = False

                    report['offenses'].append(offense_report)
                else:
                    self.logger.info(
                        'Offense %s already imported as alert, checking for updates',
                        str(offense['id']))
                    alert_found = results[0]

                    if self.TheHiveConnector.checkForUpdates(
                            theHiveAlert, alert_found, offense['id']):
                        offense_report['updated_alert_id'] = alert_found['id']
                        offense_report['qradar_offense_id'] = offense['id']
                        offense_report['success'] = True
                    else:
                        offense_report['qradar_offense_id'] = offense['id']
                        offense_report['success'] = True
                report['offenses'].append(offense_report)
                ##########################################################

        except Exception as e:
            self.logger.error(
                'Failed to create alert from QRadar offense (retrieving offenses failed)',
                exc_info=True)
            report['success'] = False
            report[
                'message'] = "%s: Failed to create alert from offense" % str(e)

        return report

    def craftAlertDescription(self, offense):
        """
            From the offense metadata, crafts a nice description in markdown
            for TheHive
        """
        self.logger.debug('craftAlertDescription starts')

        # Start empty
        description = ""

        # Add url to Offense
        qradar_ip = self.cfg.get('QRadar', 'server')
        url = (
            '[%s](https://%s/console/qradar/jsp/QRadar.jsp?appName=Sem&pageId=OffenseSummary&summaryId=%s)'
            % (str(offense['id']), qradar_ip, str(offense['id'])))

        description += '#### Offense: \n - ' + url + '\n\n'

        # Format associated rules
        rule_names_formatted = "#### Rules triggered: \n"
        rules = offense['rules']
        if len(rules) > 0:
            for rule in rules:
                if 'name' in rule:
                    rule_names_formatted += "- %s \n" % rule['name']
                else:
                    continue

        # Add rules overview to description
        description += rule_names_formatted + '\n\n'

        # Format associated documentation
        uc_links_formatted = "#### Use Case documentation: \n"
        kb_url = self.cfg.get('QRadar', 'kb_url')
        if 'use_case_names' in offense and offense['use_case_names']:
            for uc in offense['use_case_names']:
                replaced_kb_url = kb_url.replace('<uc_kb_name>', uc)
                uc_links_formatted += f"- [{uc}]({replaced_kb_url}) \n"

            # Add associated documentation
            description += uc_links_formatted + '\n\n'

        # Add mitre Tactic information
        mitre_ta_links_formatted = "#### MITRE Tactics: \n"
        if 'mitre_tactics' in offense and offense['mitre_tactics']:
            for tactic in offense['mitre_tactics']:
                mitre_ta_links_formatted += "- [%s](%s/%s) \n" % (
                    tactic, 'https://attack.mitre.org/tactics/', tactic)

            # Add associated documentation
            description += mitre_ta_links_formatted + '\n\n'

        # Add mitre Technique information
        mitre_t_links_formatted = "#### MITRE Techniques: \n"
        if 'mitre_techniques' in offense and offense['mitre_techniques']:
            for technique in offense['mitre_techniques']:
                mitre_t_links_formatted += "- [%s](%s/%s) \n" % (
                    technique, 'https://attack.mitre.org/techniques/',
                    technique)

            # Add associated documentation
            description += mitre_t_links_formatted + '\n\n'

        # Add offense details table
        description += (
            '#### Summary:\n\n' +
            '|                         |               |\n' +
            '| ----------------------- | ------------- |\n' +
            '| **Start Time**          | ' +
            str(self.qradarConnector.formatDate(offense['start_time'])) +
            ' |\n' + '| **Offense ID**          | ' + str(offense['id']) +
            ' |\n' + '| **Description**         | ' +
            str(offense['description'].replace('\n', '')) + ' |\n' +
            '| **Offense Type**        | ' + str(offense['offense_type_str']) +
            ' |\n' + '| **Offense Source**      | ' +
            str(offense['offense_source']) + ' |\n' +
            '| **Destination Network** | ' +
            str(offense['destination_networks']) + ' |\n' +
            '| **Source Network**      | ' + str(offense['source_network']) +
            ' |\n\n\n' + '\n\n\n\n')

        # Add raw payload
        description += '#### Payload:\n```\n'
        for log in offense['logs']:
            description += log['utf8_payload'] + '\n'
        description += '```\n\n'

        return description
Example #3
0
class Integration(Main):
    def __init__(self):
        super().__init__()
        self.mlabsConnector = MLabsConnector(self.cfg)
        self.TheHiveConnector = TheHiveConnector(self.cfg)

    def validateRequest(self, request):
        workflowReport = self.connectMLabs()
        if workflowReport['success']:
            return json.dumps(workflowReport), 200
        else:
            return json.dumps(workflowReport), 500

    def connectMLabs(self):
        self.logger.info('%s.connectMLabs starts', __name__)

        report = dict()
        report['success'] = bool()

        # Setup Tags
        self.tags = ['MessageLabs', 'Synapse']

        try:
            tracker_file = "./modules/MessageLabs/phishing_tracker"
            link_to_load = ""
            if os.path.exists(tracker_file):
                self.logger.debug(
                    "MessageLabs: phishing Reading from the tracker file...")
                with open(tracker_file, "r") as tracker:
                    link_to_load = tracker.read()

            if not link_to_load:
                link_to_load = self.cfg.get('MessageLabs', 'list_endpoint')

            unread, new_link = self.mlabsConnector.scan(link_to_load)

            for msg in unread:
                self.logger.debug("Found unread E-mail with id: {}".format(
                    msg['id']))
                if ('@removed' in msg) or msg['subject'] != self.cfg.get(
                        'MessageLabs', 'subject_contains'):
                    continue

                fullBody = msg['body']['content']
                subject = ""
                MIDHash = ""

                email_date = datetime.strptime(msg["receivedDateTime"],
                                               "%Y-%m-%dT%H:%M:%SZ")
                epoch_email_date = email_date.timestamp() * 1000

                for line in fullBody.splitlines():
                    if line.startswith("Subject"):
                        subject = line
                    if line.startswith("Message ID:"):
                        MIDHash = hashlib.md5(
                            line.split(" ID: ")[-1].encode()).hexdigest()

                caseTitle = str(
                    self.cfg.get('MessageLabs', 'subject_contains') + " - " +
                    str(subject))
                caseDescription = self.createFullBody(fullBody)

                alert = self.TheHiveConnector.craftAlert(
                    caseTitle, caseDescription, 1, epoch_email_date, self.tags,
                    2, "New", "internal", "MessageLabs", MIDHash, [],
                    self.cfg.get('MessageLabs', 'case_template'))

                query = dict()
                query['sourceRef'] = str(MIDHash)
                results = self.TheHiveConnector.findAlert(query)

                if len(results) == 0:
                    createdCase = self.TheHiveConnector.createAlert(alert)

            with open(tracker_file, "w+") as tracker:
                tracker.write(new_link)

            report['success'] = True
            return report

        except Exception as e:
            self.logger.error('Connection failure', exc_info=True)
            report['success'] = False
            return report

    def createFullBody(self, fullbody):
        try:
            r = re.findall(
                r".*Policy name:\s([^\n\r]*)[\r\n]+.*Subject:\s([^\n\r]*)[\r\n]+.*Sender:\s([^\n\r]*)[\r\n]+Message ID: <([^\n\r]*)>[\r\n]+Sending server IP:\s([\d\.]*)[\r\n]+Date:\s([^\n\r]*)[\r\n]+Recipient:\s(.*)Attachments:\s(.*)Matched Content:\s(.*)Message body:\s(.*)",
                fullbody, re.MULTILINE | re.DOTALL)
            fields = [
                'Policy name', 'Subject', 'Sender', 'Message ID', 'Server IP',
                'Date', 'Recipients', 'Attachments', 'Matched Content',
                'E-mail body'
            ]
            values = []
            temp_fullbody = []
            if len(r) > 0:
                for it in range(0, 10):
                    values.append(r[0][it])
                values[3] = "<" + values[3] + ">"  # modify Message ID
                values[6] = re.sub(
                    r'<[^<>]*>', '',
                    values[6].strip().replace("\r\n", " ").replace("\n", " ")
                )  # modify Recipients, so all of them will be in 1 table field
                values[7] = values[7].strip(
                )  # remove empty lines/new lines from attachments
                values[8] = values[8].strip(
                )  # remove empty lines/new lines from matched content

                # putting together the markdown table
                temp_fullbody.append("|     |     |")
                temp_fullbody.append("|-----|-----|")
                for it in range(0, 9):
                    temp_fullbody.append("|  " + fields[it] + "  |  " +
                                         values[it] + "  |")
                temp_fullbody.append("**" + fields[9] + "**")
                temp_fullbody.append("```")
                temp_fullbody.append(values[9])
                temp_fullbody.append("```")
            else:
                # if the email can't be parsed with the regex above, then we provide it to SOC in an unparsed way
                temp_fullbody.append("```")
                temp_fullbody.append("**Unparsed E-mail**")
                temp_fullbody.append(str(fullbody))
                temp_fullbody.append("```")

            return '\r\n'.join(str(x) for x in temp_fullbody)

        except Exception as e:
            self.logger.error('Parsing error: ' + str(e), exc_info=True)
Example #4
0
class Integration(Main):
    def __init__(self):
        super().__init__()
        self.lexsi = LexsiConnector(self.cfg)
        self.TheHiveConnector = TheHiveConnector(self.cfg)

    def validateRequest(self, request):

        if request.is_json:
            content = request.get_json()
            if 'type' in content and content['type'] == "Active":
                workflowReport = self.allIncidents2Alert(content['type'])
                if workflowReport['success']:
                    return json.dumps(workflowReport), 200
                else:
                    return json.dumps(workflowReport), 500
            else:
                self.logger.error('Missing type or type is not supported')
                return json.dumps({
                    'sucess':
                    False,
                    'message':
                    "Missing type or type is not supported"
                }), 500
        else:
            self.logger.error('Not json request')
            return json.dumps({
                'sucess': False,
                'message': "Request didn't contain valid JSON"
            }), 400

    def allIncidents2Alert(self, status):
        """
        Get all opened incidents created within lexsi
        and create alerts for them in TheHive
        """
        self.logger.info('%s.allincident2Alert starts', __name__)

        incidentsList = self.lexsi.getOpenItems()['result']

        report = dict()
        report['success'] = True
        report['incidents'] = list()

        try:
            # each incidents in the list is represented as a dict
            # we enrich this dict with additional details
            for incident in incidentsList:

                # Prepare new alert
                incident_report = dict()
                self.logger.debug("incident: %s" % incident)

                theHiveAlert = self.IncidentToHiveAlert(incident)

                # searching if the incident has already been converted to alert
                query = dict()
                query['sourceRef'] = str(incident['incident'])
                self.logger.info('Looking for incident %s in TheHive alerts',
                                 str(incident['incident']))
                results = self.TheHiveConnector.findAlert(query)
                if len(results) == 0:
                    self.logger.info(
                        'incident %s not found in TheHive alerts, creating it',
                        str(incident['incident']))
                    try:

                        theHiveEsAlertId = self.TheHiveConnector.createAlert(
                            theHiveAlert)['id']
                        self.TheHiveConnector.promoteAlertToCase(
                            theHiveEsAlertId)

                        incident_report['raised_alert_id'] = theHiveEsAlertId
                        incident_report['lexsi_incident_id'] = incident[
                            'incident']
                        incident_report['success'] = True

                    except Exception as e:
                        self.logger.error(incident_report)
                        self.logger.error('%s.allincident2Alert failed',
                                          __name__,
                                          exc_info=True)
                        incident_report['success'] = False
                        if isinstance(e, ValueError):
                            errorMessage = json.loads(str(e))['message']
                            incident_report['message'] = errorMessage
                        else:
                            incident_report['message'] = str(
                                e) + ": Couldn't raise alert in TheHive"
                        incident_report['incident_id'] = incident['incident']
                        # Set overall success if any fails
                        report['success'] = False

                else:
                    self.logger.info(
                        'incident %s already imported as alert, checking for updates',
                        str(incident['incident']))
                    alert_found = results[0]

                    if self.TheHiveConnector.checkForUpdates(
                            theHiveAlert, alert_found,
                            str(incident['incident'])):
                        # Mark the alert as read
                        self.TheHiveConnector.markAlertAsRead(
                            alert_found['id'])
                        incident_report['updated_alert_id'] = alert_found['id']
                        incident_report['sentinel_incident_id'] = str(
                            incident['incident'])
                        incident_report['success'] = True
                    else:
                        incident_report['sentinel_incident_id'] = str(
                            incident['incident'])
                        incident_report['success'] = True
                report['incidents'].append(incident_report)

            thehiveAlerts, open_lexsi_cases = self.lexsi_opened_alerts_thehive(
            )
            self.set_alert_status_ignored(incidentsList, thehiveAlerts,
                                          open_lexsi_cases)

        except Exception as e:

            self.logger.error(
                'Failed to create alert from Lexsi incident (retrieving incidents failed)',
                exc_info=True)
            report['success'] = False
            report[
                'message'] = "%s: Failed to create alert from incident" % str(
                    e)

        return report

    def IncidentToHiveAlert(self, incident):

        #
        # Creating the alert
        #

        # Setup Tags
        tags = ['Lexsi', 'incident', 'Synapse']

        # Skip for now
        artifacts = []

        # Retrieve the configured case_template
        CaseTemplate = self.cfg.get('Lexsi', 'case_template')

        # Build TheHive alert
        alert = self.TheHiveConnector.craftAlert(
            "{}: {}".format(incident['incident'], incident['title']),
            self.craftAlertDescription(incident),
            self.getHiveSeverity(incident),
            self.timestamp_to_epoch(incident['detected'], "%Y-%m-%d %H:%M:%S"),
            tags, 2, 'New', 'internal', 'Lexsi', str(incident['incident']),
            artifacts, CaseTemplate)

        return alert

    def craftAlertDescription(self, incident):
        """
            From the incident metadata, crafts a nice description in markdown
            for TheHive
        """
        self.logger.debug('craftAlertDescription starts')

        # Start empty
        description = ""

        # Add incident details table
        description += (
            '#### Summary\n\n' +
            '|                         |               |\n' +
            '| ----------------------- | ------------- |\n' +
            '| **URL**          | ' +
            "{}{}{}".format("```", str(incident['url']), "```") + ' |\n' +
            '| **Type**          | ' + str(incident['type']) + ' |\n' +
            '| **Severity**          | ' + str(incident['severity']) + ' |\n' +
            '| **Category**         | ' + str(incident['category']) + ' |\n' +
            '| **Updated**        | ' + str(incident['updated']) + ' |\n' +
            '| **Detected**        | ' + str(incident['detected']) + ' |\n' +
            '| **Source**        | ' + str(incident['source']) + ' |\n' +
            '| **Analyst Name(Lexsi)**        | ' +
            str(incident['analystName']) + ' |\n' +
            '| **Link to Orange Portal**        | ' +
            str("https://portal.cert.orangecyberdefense.com/cybercrime/{}".
                format(incident['id'])) + ' |\n' + '\n\n\n\n')

        return description

    def timestamp_to_epoch(self, date_time, pattern):
        return int(time.mktime(time.strptime(date_time, pattern))) * 1000

    def getHiveSeverity(self, incident):
        # severity in TheHive is either low, medium, high or critical
        # while severity in Lexsi is from 0 to 5
        if int(incident['severity']) in {0, 5}:
            return 1
        # elif int(incident['severity']) in {2,3}:
        #    return 2
        # elif int(incident['severity']) in {4,5}:
        #    return 3
        else:
            return 2

    def lexsi_opened_alerts_thehive(self):
        thehiveAlerts = []
        open_lexsi_cases = {}
        query = In('tags', ['Lexsi'])

        self.logger.info(
            'Looking for incident in TheHive alerts with tag Lexsi')
        # self.logger.info(query)
        results = self.TheHiveConnector.findAlert(query)
        for alert_found in results:
            # Check if a case is linked
            if 'case' in alert_found:
                try:
                    case_found = self.TheHiveConnector.getCase(
                        alert_found['case'])
                    # Check if the status is open. Only then append it to the list
                    if case_found['status'] == "Open":
                        open_lexsi_cases[alert_found['sourceRef']] = case_found
                        thehiveAlerts.append(alert_found['sourceRef'])
                except Exception as e:
                    self.logger.error("Could not find case: {}".format(e),
                                      exc_info=True)
                    continue
        self.logger.debug(
            "Lexsi Alerts opened in theHive: {}".format(thehiveAlerts))
        return thehiveAlerts, open_lexsi_cases

    def compare_lists(self, list1, list2):
        return list(set(list1) - set(list2))

    def set_alert_status_ignored(self, incidentsList, thehiveAlerts,
                                 open_lexsi_cases):
        lexsi_reporting = []
        # incidentsList = self.lexsi.getOpenItems()['result']

        for incident in incidentsList:
            lexsi_reporting.append(incident['incident'])

        self.logger.debug(
            "the list of opened Lexsi Incidents: {}".format(lexsi_reporting))
        uncommon_elements = self.compare_lists(thehiveAlerts, lexsi_reporting)
        # uncommon_elements=['476121']
        self.logger.debug(
            "Open cases present in TheHive but not in list of opened Lexsi Incidents: {}"
            .format((uncommon_elements)))

        for element in uncommon_elements:
            self.logger.info(
                "Preparing to close the case for {}".format(element))
            query = dict()
            query['sourceRef'] = str(element)
            self.logger.debug('Looking for incident %s in TheHive alerts',
                              str(element))
            try:
                if element in open_lexsi_cases:
                    # Resolve the case
                    case_id = open_lexsi_cases[element]['id']
                    self.logger.debug("Case id for element {}: {}".format(
                        element, case_id))
                    self.logger.debug("Preparing to resolve the case")
                    self.TheHiveConnector.closeCase(case_id)
                    self.logger.debug("Closed case with id {} for {}".format(
                        case_id, element))

            except Exception as e:
                self.logger.error("Could not close case: {}".format(e),
                                  exc_info=True)
                continue
Example #5
0
class Integration(Main):
    def __init__(self):
        super().__init__()
        self.azureSentinelConnector = AzureSentinelConnector(self.cfg)
        self.theHiveConnector = TheHiveConnector(self.cfg)

    def craftAlertDescription(self, incident):
        """
            From the incident metadata, crafts a nice description in markdown
            for TheHive
        """
        self.logger.debug('craftAlertDescription starts')

        # Start empty
        self.description = ""

        # Add url to incident
        self.url = ('[%s](%s)' %
                    (str(incident['properties']['incidentNumber']),
                     str(incident['properties']['incidentUrl'])))
        self.description += '#### Incident: \n - ' + self.url + '\n\n'

        # Format associated rules
        self.rule_names_formatted = "#### Rules triggered: \n"
        self.rules = incident['properties']['relatedAnalyticRuleIds']
        if len(self.rules) > 0:
            for rule in self.rules:
                self.rule_info = self.azureSentinelConnector.getRule(rule)
                self.logger.debug(
                    'Received the following rule information: {}'.format(
                        self.rule_info))
                self.rule_name = self.rule_info['properties']['displayName']
                rule_url = "https://management.azure.com{}".format(rule)
                self.rule_names_formatted += "- %s \n" % (self.rule_name)

        # Add rules overview to description
        self.description += self.rule_names_formatted + '\n\n'

        # Add mitre Tactic information
        # https://raw.githubusercontent.com/mitre/cti/master/enterprise-attack/enterprise-attack.json

        # mitre_ta_links_formatted = "#### MITRE Tactics: \n"
        # if 'mitre_tactics' in offense and offense['mitre_tactics']:
        #     for tactic in offense['mitre_tactics']:
        #         mitre_ta_links_formatted += "- [%s](%s/%s) \n" % (tactic, 'https://attack.mitre.org/tactics/', tactic)

        #     #Add associated documentation
        #     self.description += mitre_ta_links_formatted + '\n\n'

        # #Add mitre Technique information
        # mitre_t_links_formatted = "#### MITRE Techniques: \n"
        # if 'mitre_techniques' in offense and offense['mitre_techniques']:
        #     for technique in offense['mitre_techniques']:
        #         mitre_t_links_formatted += "- [%s](%s/%s) \n" % (technique, 'https://attack.mitre.org/techniques/', technique)

        # Add a custom description when the incident does not contain any
        if 'description' not in incident['properties']:
            incident['properties']['description'] = "N/A"

        # Add incident details table
        self.description += (
            '#### Summary\n\n' +
            '|                         |               |\n' +
            '| ----------------------- | ------------- |\n' +
            '| **Start Time**          | ' + str(
                self.azureSentinelConnector.formatDate(
                    "description", incident['properties']['createdTimeUtc'])) +
            ' |\n' + '| **incident ID**          | ' +
            str(incident['properties']['incidentNumber']) + ' |\n' +
            '| **Description**         | ' +
            str(incident['properties']['description'].replace('\n', '')) +
            ' |\n' + '| **incident Type**        | ' + str(incident['type']) +
            ' |\n' + '| **incident Source**      | ' +
            str(incident['properties']['additionalData']['alertProductNames'])
            + ' |\n' + '| **incident Status**      | ' +
            str(incident['properties']['status']) + ' |\n' + '\n\n\n\n')

        return self.description

    def sentinelIncidentToHiveAlert(self, incident):
        def getHiveSeverity(incident):
            # severity in TheHive is either low, medium or high
            # while severity in Sentinel is from Low to High
            if incident['properties']['severity'] == "Low":
                return 1
            elif incident['properties']['severity'] == "Medium":
                return 2
            elif incident['properties']['severity'] == "High":
                return 3

            return 1

        #
        # Creating the alert
        #

        # Setup Tags
        self.tags = ['AzureSentinel', 'incident', 'Synapse']

        # Skip for now
        self.artifacts = []

        # Retrieve the configured case_template
        self.sentinelCaseTemplate = self.cfg.get('AzureSentinel',
                                                 'case_template')

        # Build TheHive alert
        self.alert = self.theHiveConnector.craftAlert(
            "{}, {}".format(incident['properties']['incidentNumber'],
                            incident['properties']['title']),
            self.craftAlertDescription(incident), getHiveSeverity(incident),
            self.azureSentinelConnector.formatDate(
                "alert_timestamp", incident['properties']['createdTimeUtc']),
            self.tags, 2, 'New', 'internal', 'Azure_Sentinel_incidents',
            str(incident['name']), self.artifacts, self.sentinelCaseTemplate)

        return self.alert

    def validateRequest(self, request):
        if request.is_json:
            self.content = request.get_json()
            if 'type' in self.content and self.content['type'] == "Active":
                self.workflowReport = self.allIncidents2Alert(
                    self.content['type'])
                if self.workflowReport['success']:
                    return json.dumps(self.workflowReport), 200
                else:
                    return json.dumps(self.workflowReport), 500
            else:
                self.logger.error('Missing type or type is not supported')
                return json.dumps({
                    'sucess':
                    False,
                    'message':
                    "Missing type or type is not supported"
                }), 500
        else:
            self.logger.error('Not json request')
            return json.dumps({
                'sucess': False,
                'message': "Request didn't contain valid JSON"
            }), 400

    def allIncidents2Alert(self, status):
        """
        Get all opened incidents created within Azure Sentinel
        and create alerts for them in TheHive
        """
        self.logger.info('%s.allincident2Alert starts', __name__)

        self.report = dict()
        self.report['success'] = True
        self.report['incidents'] = list()

        try:
            self.incidentsList = self.azureSentinelConnector.getIncidents()

            # each incidents in the list is represented as a dict
            # we enrich this dict with additional details
            for incident in self.incidentsList:

                # Prepare new alert
                self.incident_report = dict()
                self.logger.debug("incident: %s" % incident)
                # self.logger.info("Enriching incident...")
                # enrichedincident = enrichIncident(incident)
                # self.logger.debug("Enriched incident: %s" % enrichedincident)
                self.theHiveAlert = self.sentinelIncidentToHiveAlert(incident)

                # searching if the incident has already been converted to alert
                self.query = dict()
                self.query['sourceRef'] = str(incident['name'])
                self.logger.info('Looking for incident %s in TheHive alerts',
                                 str(incident['name']))
                self.results = self.theHiveConnector.findAlert(self.query)
                if len(self.results) == 0:
                    self.logger.info(
                        'incident %s not found in TheHive alerts, creating it',
                        str(incident['name']))

                    try:
                        self.theHiveEsAlertId = self.theHiveConnector.createAlert(
                            self.theHiveAlert)['id']

                        self.incident_report[
                            'raised_alert_id'] = self.theHiveEsAlertId
                        self.incident_report[
                            'sentinel_incident_id'] = incident['name']
                        self.incident_report['success'] = True

                    except Exception as e:
                        self.logger.error('%s.allincident2Alert failed',
                                          __name__,
                                          exc_info=True)
                        self.incident_report['success'] = False
                        if isinstance(e, ValueError):
                            errorMessage = json.loads(str(e))['message']
                            self.incident_report['message'] = errorMessage
                        else:
                            self.incident_report['message'] = str(
                                e) + ": Couldn't raise alert in TheHive"
                        self.incident_report['incident_id'] = incident['name']
                        # Set overall success if any fails
                        self.report['success'] = False

                    self.report['incidents'].append(self.incident_report)
                else:
                    self.logger.info(
                        'incident %s already imported as alert, checking for updates',
                        str(incident['name']))
                    self.alert_found = self.results[0]

                    # Check if alert is already created, but needs updating
                    if self.check_if_updated(self.alert_found,
                                             vars(self.theHiveAlert)):
                        self.logger.info(
                            "Found changes for %s, updating alert" %
                            self.alert_found['id'])

                        # update alert
                        self.theHiveConnector.updateAlert(
                            self.alert_found['id'],
                            self.theHiveAlert,
                            fields=["tags", "artifacts"])
                        self.incident_report[
                            'updated_alert_id'] = self.alert_found['id']
                        self.incident_report[
                            'sentinel_incident_id'] = incident['name']
                        self.incident_report['success'] = True
                    else:
                        self.logger.info("No changes found for %s" %
                                         self.alert_found['id'])
                        continue

        except Exception as e:

            self.logger.error(
                'Failed to create alert from Azure Sentinel incident (retrieving incidents failed)',
                exc_info=True)
            self.report['success'] = False
            self.report[
                'message'] = "%s: Failed to create alert from incident" % str(
                    e)

        return self.report
Example #6
0
class Integration(Main):

    def __init__(self):
        super().__init__()
        self.RDConnector = RDConnector(self.cfg)
        self.TheHiveConnector = TheHiveConnector(self.cfg)

    def validateRequest(self, request):
        workflowReport = self.connectRD()
        if workflowReport['success']:
            return json.dumps(workflowReport), 200
        else:
            return json.dumps(workflowReport), 500

    def connectRD(self):
        self.logger.info('%s.connectResponsibleDisclosure starts', __name__)

        report = dict()
        report['success'] = bool()

        # Setup Tags
        self.tags = ['Responsible disclosure', 'Synapse']

        tracker_file = "./modules/ResponsibleDisclosure/email_tracker"
        link_to_load = ""
        if os.path.exists(tracker_file):
            self.logger.debug("Reading from the tracker file...")
            with open(tracker_file, "r") as tracker:
                link_to_load = tracker.read()

        if not link_to_load:
            link_to_load = self.cfg.get('ResponsibleDisclosure', 'list_endpoint')

        emails, new_link = self.RDConnector.scan(link_to_load)

        try:
            for email in emails:
                try:
                    if ('@removed' in email) or [email["from"]["emailAddress"]["address"]] in self.cfg.get('ResponsibleDisclosure', 'excluded_senders'):
                        continue
                    self.logger.debug("Found unread E-mail with id: {}".format(email['id']))

                    # Get the conversation id from the email
                    CID = email["conversationId"]
                    # Conversation id hash will be used as a unique identifier for the alert
                    CIDHash = hashlib.md5(CID.encode()).hexdigest()

                    email_date = datetime.strptime(email["receivedDateTime"], "%Y-%m-%dT%H:%M:%SZ")
                    epoch_email_date = email_date.timestamp() * 1000

                    alertTitle = "Responsible Disclosure - {}".format(email["subject"])

                    alertDescription = self.createDescription(email)

                    # Moving the email from Inbox to the new folder defined by variable to_move_folder in synapse.conf
                    # Disabled temporarily
                    # self.RDConnector.moveToFolder(self.cfg.get('ResponsibleDisclosure', 'email_address'), email['id'], self.cfg.get('ResponsibleDisclosure', 'to_move_folder'))

                    # Get all the attachments and upload to the hive observables
                    attachment_data = self.RDConnector.listAttachment(self.cfg.get('ResponsibleDisclosure', 'email_address'), email['id'])

                    all_artifacts = []
                    all_attachments = []

                    if attachment_data:
                        for att in attachment_data:
                            file_name = self.RDConnector.downloadAttachments(att['name'], att['attachment_id'], att['isInline'], att['contentType'])
                            all_attachments.append(file_name)

                            self.af = AlertArtifact(dataType='file', data=file_name, tlp=2, tags=['Responsible disclosure', 'Synapse'], ioc=True)

                            all_artifacts.append(self.af)

                    # Create the alert in thehive
                    alert = self.TheHiveConnector.craftAlert(
                        alertTitle,
                        alertDescription,
                        1,
                        epoch_email_date,
                        self.tags, 2,
                        "New",
                        "internal",
                        "ResponsibleDisclosure",
                        CIDHash,
                        all_artifacts,
                        self.cfg.get('ResponsibleDisclosure', 'case_template'))

                    # Check if the alert was created successfully
                    query = dict()
                    query['sourceRef'] = str(CIDHash)

                    # Look up if any existing alert in theHive
                    alert_results = self.TheHiveConnector.findAlert(query)

                    # If no alerts are found for corresponding CIDHASH, create a new alert
                    if len(alert_results) == 0:
                        createdAlert = self.TheHiveConnector.createAlert(alert)

                        # automatish antwoord to the original email sender from the responsible disclosure emailaddress
                        autoreply_subject_name = "RE: {}".format(email["subject"])

                        self.RDConnector.sendAutoReply("*****@*****.**", email["from"]["emailAddress"]["address"], self.cfg.get('ResponsibleDisclosure', 'email_body_filepath'), autoreply_subject_name)

                    # If alert is found update the alert or it may have been migrated to case so update the case
                    if len(alert_results) > 0:
                        alert_found = alert_results[0]

                        # Check if alert is promoted to a case
                        if 'case' in alert_found:

                            case_found = self.TheHiveConnector.getCase(alert_found['case'])

                            # Create a case model
                            self.updated_case = Case

                            # Update the case with new description
                            # What if the email body is empty for new email, then use the old description
                            self.updated_case.description = case_found['description'] + "\n\n" + alertDescription

                            self.updated_case.id = alert_found['case']
                            self.TheHiveConnector.updateCase(self.updated_case, ["description"])
                            self.logger.info("updated the description of the case with id: {}".format(alert_found['case']))

                            # Check if there new observables available
                            if all_attachments:
                                for att in all_attachments:
                                    try:
                                        self.TheHiveConnector.addFileObservable(alert_found['case'], att, "email attachment")
                                    except Exception as e:
                                        self.logger.error(f"Encountered an error while creating a new file based observable: {e}", exc_info=True)
                                        continue
                        # Else it means there is no corresponding case so update the alert
                        else:
                            # create an alert model
                            self.updated_alert = Alert

                            # Update the alert with new description
                            # What if the email body is empty for new email, then use the old description
                            self.updated_alert.description = alert_found['description'] + "\n\n" + alertDescription

                            self.TheHiveConnector.updateAlert(alert_found['id'], self.updated_alert, ["description"])
                            self.logger.info("updated the description of the alert with id: {}".format(alert_found['id']))
                except Exception as e:
                    self.logger.error(e, exc_info=True)
                    continue

                if all_attachments:
                    for att in all_attachments:
                        os.remove(att)

            # Write the delta link to the tracker
            with open(tracker_file, "w+") as tracker:
                tracker.write(new_link)

            report['success'] = True
            return report

        except Exception as e:
            self.logger.error(e)
            self.logger.error('Connection failure', exc_info=True)
            report['success'] = False
            return report

    def createDescription(self, email):

        email_body = email['body']['content']
        subject = email["subject"]
        # Get the conversation id from the email
        CID = email["conversationId"]
        # Conversation id hash will be used as a unique identifier for the alert
        CIDHash = hashlib.md5(CID.encode()).hexdigest()

        # Parse all the URLs and add them to a field in the description table
        urls_list = re.findall(r'\<(https?://[\S]+?)\>', email_body)
        # "&#13;" is ascii for next line
        urls_str = ' &#13; '.join(str(x) for x in urls_list)

        from_e = email["from"]["emailAddress"]["address"]
        to_e = "N/A"
        if email["toRecipients"]:
            to_e = email["toRecipients"][0]["emailAddress"]["address"]

        OriginatingIP = "N/A"
        for header in email['internetMessageHeaders']:
            if header['name'] == 'X-Originating-IP':
                # Formatting the ip value, bydefault it comesup like [x.x.x.x]
                OriginatingIP = (header['value'][1:-1])

        # putting together the markdown table
        temp_fullbody = []
        temp_fullbody.append("|     |     |")
        temp_fullbody.append("|:-----|:-----|")
        temp_fullbody.append("|  " + "**" + "Subject" + "**" + "  |  " + subject + "  |")
        temp_fullbody.append("|  " + "**" + "Sender" + "**" + "  |  " + from_e + "  |")
        temp_fullbody.append("|  " + "**" + "Recipient" + "**" + "  |  " + to_e + "  |")
        temp_fullbody.append("|  " + "**" + "Originating IP" + "**" + "  |  " + OriginatingIP + "  |")
        temp_fullbody.append("|  " + "**" + "Received at" + "**" + "  |  " + email["receivedDateTime"] + "  |")
        temp_fullbody.append("|  " + "**" + "URL(s) in email" + "**" + "  |  " + urls_str + "  |")
        temp_fullbody.append("|  " + "**" + "Msg ID" + "**" + "  |  " + email['id'] + "  |")
        temp_fullbody.append("**" + "Email body" + "**")
        temp_fullbody.append("```")
        temp_fullbody.append(email_body)
        temp_fullbody.append("```")

        alertDescription = '\r\n'.join(str(x) for x in temp_fullbody)
        return alertDescription
Example #7
0
def logstash2Alert(event):
    """
       Parse the received ml watcher notification
       Original example logstash output:

       Nice example input:
        
    """
    #logger = logging.getLogger(__name__)
    logger.info('%s.logstash2Alert starts', __name__)

    report = dict()
    report['success'] = True

    try:
        cfg = getConf()

        theHiveConnector = TheHiveConnector(cfg)

        #Map the ml watcher alert to the alert that will be enhanced
        logger.info('Looking for Logstash Alert %s in TheHive alerts',
                    str(event['sourceRef']))

        #I should see if we can find a way to generate a shorter more useful sourceRef from within Synapse
        q = dict()
        q['sourceRef'] = str(event['sourceRef'])
        results = theHiveConnector.findAlert(q)
        if len(results) == 0:
            logger.info(
                'Logstash Alert %s not found in TheHive alerts, creating it',
                str(event['sourceRef']))
            event_report = dict()

            event['case_template'] = "ELK-Anomalies"

            #Enrichment is not in scope yet
            #enrichedAlert = enrichAlert(elkConnector, event)

            try:
                theHiveAlert = ELKToHiveAlert(theHiveConnector, event)
                theHiveEsAlertId = theHiveConnector.createAlert(theHiveAlert)

                event_report['raised_alert_id'] = theHiveEsAlertId
                event_report['alert_id'] = event['sourceRef']
                event_report['success'] = True

            except Exception as e:
                logger.error('%s.logstash2Alert failed',
                             __name__,
                             exc_info=True)
                event_report['success'] = False
                if isinstance(e, ValueError):
                    errorMessage = json.loads(str(e))['message']
                    event_report['message'] = errorMessage
                else:
                    event_report['message'] = str(
                        e) + ": Couldn't raise alert in TheHive"
                event_report['alert_id'] = event['sourceRef']
                # Set overall success if any fails
                report['success'] = False

            report['event'] = event_report
        else:
            logger.info('Logstash Alert %s already imported as alert',
                        str(event['sourceRef']))

    except Exception as e:

        logger.error('Failed to create alert from Logstash Alert',
                     exc_info=True)
        report['success'] = False
        report[
            'message'] = "%s: Failed to create alert from Logstash Alert" % str(
                e)

    return report
Example #8
0
def ml2Alert(mlalert):
    """
       Parse the received ml watcher notification
       Original example Watch Actions:
       
        "TheHive": {
            "webhook": {
                "scheme": "http",
                "host": "machine.domain.com",
                "port": 5000,
                "method": "post",
                "path": "/ELK2alert",
                "params": {},
                "headers": {
                    "Authorization": "Bearer 2WTbTHH8iaSeoo8yk8y0GA96dX7/Tz7s",
                    "Cookie": "cookie=no",
                    "Content-Type": "application/json"
                },
                "body": "{\"ml_job_id\": \"{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0._source.job_id}}\",\n\"description\": \"some description\",\n\"start_time\": \"{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.timestamp_iso8601.0}}\",\n\"anomaly_score\": \"{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.score.0}}\",\n\"url\": \"https://machine.domain.com:5601/app/ml#/explorer/?_g=(ml:(jobIds:!('{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0._source.job_id}}')),refreshInterval:(display:Off,pause:!f,value:0),time:(from:'{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.start.0}}',mode:absolute,to:'{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.end.0}}'))&_a=(filters:!(),mlAnomaliesTable:(intervalValue:auto,thresholdValue:0),mlExplorerSwimlane:(selectedLane:Overall,selectedTime:{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.timestamp_epoch.0}},selectedType:overall),query:(query_string:(analyze_wildcard:!t,query:'**')))\",\n\"influencers\": \"{{ctx.payload.aggregations.record_results.top_record_hits.hits.hits}}\\n{{_source.function}}({{_source.field_name}}) {{_source.by_field_value}} {{_source.over_field_value}} {{_source.partition_field_value}} [{{fields.score.0}}]\\n{{ctx.payload.aggregations.record_results.top_record_hits.hits.hits}}\",\n\"type\": \"asml\",\n\"source\": \"Elastic\",\n\"sourceRef\": \"{{ctx.payload.as_watch_id}}\"}"
            }
        }

       Nice example input:
        "{
            \"ml_job_id\": \"{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0._source.job_id}}\",\n
            \"description\": \"some description\",\n
            \"start_time\": \"{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.timestamp_iso8601.0}}\",\n
            \"anomaly_score\": \"{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.score.0}}\",\n
            \"url\": \"https://machine.domain.com:5601/app/ml#/explorer/?_g=(ml:(jobIds:!('{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0._source.job_id}}')),refreshInterval:(display:Off,pause:!f,value:0),time:(from:'{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.start.0}}',mode:absolute,to:'{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.end.0}}'))&_a=(filters:!(),mlAnomaliesTable:(intervalValue:auto,thresholdValue:0),mlExplorerSwimlane:(selectedLane:Overall,selectedTime:{{ctx.payload.aggregations.bucket_results.top_bucket_hits.hits.hits.0.fields.timestamp_epoch.0}},selectedType:overall),query:(query_string:(analyze_wildcard:!t,query:'**')))\",\n
            \"influencers\": \"{{ctx.payload.aggregations.record_results.top_record_hits.hits.hits}}\\n
                               {{_source.function}}({{_source.field_name}}) {{_source.by_field_value}} {{_source.over_field_value}} {{_source.partition_field_value}} [{{fields.score.0}}]\\n
                               {{ctx.payload.aggregations.record_results.top_record_hits.hits.hits}}\",\n
            \"type\": \"asml\",\n
            \"source\": \"Elastic\",\n
            \"sourceRef\": \"{{ctx.payload.as_watch_id}}\"
        }"
    """
    #logger = logging.getLogger(__name__)
    logger.info('%s.ml2Alert starts', __name__)

    report = dict()
    report['success'] = True

    try:
        cfg = getConf()

        theHiveConnector = TheHiveConnector(cfg)

        #Map the ml watcher alert to the alert that will be enhanced
        logger.info('Looking for ML Alert %s in TheHive alerts',
                    str(mlalert['sourceRef']))

        #I should see if we can find a way to generate a shorter more useful sourceRef from within Synapse
        q = dict()
        q['sourceRef'] = str(mlalert['sourceRef'])
        results = theHiveConnector.findAlert(q)
        if len(results) == 0:
            logger.info('ML Alert %s not found in TheHive alerts, creating it',
                        str(mlalert['sourceRef']))
            mlalert_report = dict()

            #Set generic parameters
            mlalert['title'] = "ML: " + mlalert['ml_job_id']
            mlalert['description'] = craftMLAlertDescription(mlalert)
            mlalert['case_template'] = "ELK-ML"

            #Enrichment is not in scope yet
            #enrichedAlert = enrichAlert(elkConnector, mlalert)

            try:
                theHiveAlert = ELKToHiveAlert(theHiveConnector, mlalert)
                theHiveEsAlertId = theHiveConnector.createAlert(theHiveAlert)

                mlalert_report['raised_alert_id'] = theHiveEsAlertId
                mlalert_report['ml_alert_id'] = mlalert['sourceRef']
                mlalert_report['success'] = True

            except Exception as e:
                logger.error('%s.ml2Alert failed', __name__, exc_info=True)
                mlalert_report['success'] = False
                if isinstance(e, ValueError):
                    errorMessage = json.loads(str(e))['message']
                    mlalert_report['message'] = errorMessage
                else:
                    mlalert_report['message'] = str(
                        e) + ": Couldn't raise alert in TheHive"
                mlalert_report['ml_alert_id'] = mlalert['sourceRef']
                # Set overall success if any fails
                report['success'] = False

            report['mlalert'] = mlalert_report
        else:
            logger.info('ML Alert %s already imported as alert',
                        str(mlalert['sourceRef']))

    except Exception as e:

        logger.error('Failed to create alert from ML Alert', exc_info=True)
        report['success'] = False
        report['message'] = "%s: Failed to create alert from ML Alert" % str(e)

    return report