def run(self, case_id, data_type, analyzer_name, linked_task_name=None):
        api = TheHiveApiExtended(self.config['thehive_url'],
                                 self.config['thehive_api_key'])
        linked_task_id = None
        if linked_task_name:
            response = api.get_case_tasks(case_id,
                                          query=Eq('title', linked_task_name))
            if response.status_code == 200:
                tasks = response.json()
                if len(tasks) == 1:
                    linked_task_id = tasks[0]['id']
                else:
                    raise ValueError(
                        '[RunAnalyzerOnDataTypeAction]: task not found')
            else:
                raise ValueError(
                    '[RunAnalyzerOnDataTypeAction]: tasks status_code %d' %
                    response.status_code)

        analyzer = api.get_analyzer_by_name_and_data_type(
            analyzer_name, data_type)
        response = api.get_case_observables(case_id,
                                            query=Eq('dataType', data_type))
        if response.status_code == 200:
            observables = response.json()
            if len(observables) == 1:
                for observable in observables:
                    cortex_id = random.choice(analyzer['cortexIds'])
                    response_job = api.run_analyzer(cortex_id,
                                                    observable['id'],
                                                    analyzer['id'])
                    if response_job.status_code == 200:
                        job = response_job.json()
                        if linked_task_id:
                            self.action_service.set_value(
                                name='thehive_job_{}'.format(job['id']),
                                value=linked_task_id,
                                local=False)
                        return job
                    else:
                        raise ValueError(
                            '[RunAnalyzerOnDataTypeAction]: job status_code %d'
                            % response.status_code)
            else:
                raise ValueError(
                    '[RunAnalyzerOnDataTypeAction]: no observable')
        else:
            raise ValueError('[RunAnalyzerOnDataTypeAction]: status_code %d' %
                             response.status_code)
    def run(self, case_id, task_name, status):
        api = TheHiveApiExtended(self.config['thehive_url'], self.config['thehive_api_key'])

        response = api.get_case_tasks(case_id, query=Eq('title', task_name))
        if response.status_code == 200:
            tasks = response.json()
            if len(tasks) == 1:
                task_id = tasks[0]['id']
            else:
                raise ValueError('[ChangeStatusTaskByNameAction]: task not found')
        else:
            raise ValueError('[ChangeStatusTaskByNameAction]: tasks status_code %d'
                 % response.status_code)

        response = api.get_task(task_id)
        if response.status_code == 200:
            task_object = response.json()
            task = CaseTask(json=task_object)
            task.id = task_id
            task.status = status
            task.owner = self.config['thehive_bot_username']
            api.update_case_task(task)
        else:
            raise ValueError('[ChangeStatusTaskByNameAction]: status_code %d'
                 % response.status_code)

        return True
예제 #3
0
    def get_hive_case_id(self, mi_caseid):
        """This function attempts to retrieve the case in The Hive using a tags
        to store the case ID from McAfee Investigator. A few considerations:

        * If this tag is removed, then this mapping will fail
        * The Hive uses Elasticsearch for storage, and using the default
          index refresh interval (1s). This code will retry for 5 seconds while
          no results are found.

        If this approach is not enough for accurately tracking case mapping,
        our recomendation is to setup some local storage and change this
        implementation. That said, this should work most of the time.
        """
        num_retries = 5
        time_between_retries = 1  # seconds
        while num_retries:
            try:
                response = self.api.find_cases(query=Eq('tags', mi_caseid))
                if response.status_code == requests.codes.ok:
                    return response.json()[0]['id']
            except Exception:
                log.warning(
                    "Exception during case lookup attempt #%s (will retry)",
                    num_retries,
                    exc_info=True)
            time.sleep(time_between_retries)
            num_retries -= 1

        raise Exception(
            "Could not lookup case with ID '{}' in The Hive".format(mi_caseid))
예제 #4
0
    def poll(self):
        query = And(Eq('owner', self._config['thehive_bot_username']),
                    Eq('status', 'Waiting'))
        response = self._client.find_tasks(query=query)

        if response.status_code == 200:
            tasks = response.json()
            self._logger.debug('%d tasks found' % len(tasks))
            for task in tasks:
                self._logger.debug('New task %s' % task['title'])
                self._sensor_service.dispatch(trigger=self._trigger_ref,
                                              payload=task)
        else:
            self._logger.exception(
                'TheHive sensor failed with status_code %d' %
                response.status_code)
            raise ValueError('[TheHiveTasksSearchSensor]: status_code %d' %
                             response.status_code)
예제 #5
0
    def findFirstMatchingTemplate(self, searchstring):
        self.logger.info('%s.findFirstMatchingTemplate starts', __name__)

        query = Eq('status', 'Ok')
        allTemplates = self.theHiveApi.find_case_templates(query=query)
        if allTemplates.status_code != 200:
            raise ValueError('Could not find matching template !')

        for template in allTemplates.json():
            if searchstring in template['name']:
                return template

        return None
예제 #6
0
 def run(self, case_id, analyzer_name, data_type):
     api = TheHiveApiExtended(self.config['thehive_url'],
                              self.config['thehive_api_key'])
     analyzer = api.get_analyzer_by_name_and_data_type(
         analyzer_name, data_type)
     response = api.get_case_observables(case_id,
                                         query=Eq('dataType', data_type))
     if response.status_code == 200:
         observables = response.json()
         for observable in observables:
             cortex_id = random.choice(analyzer['cortexIds'])
             api.run_analyzer(cortex_id, observable['id'], analyzer['id'])
     else:
         raise ValueError('[RunAnalyzerAction]: status_code %d' %
                          response.status_code)
     return True
예제 #7
0
    def run(self, task_id):
        api = TheHiveApi(self.config['thehive_url'],
                         self.config['thehive_api_key'])
        response = api.find_tasks(query=Eq('_id', task_id))
        if response.status_code == 200:
            tasks = response.json()
            if len(tasks) == 1:
                task = CaseTask(json=tasks[0])
                task.id = task_id
                task.status = 'InProgress'
                task.owner = self.config['thehive_bot_username']
                api.update_case_task(task)
            else:
                raise ValueError('[TakeTaskAction]: no tasks with this id')
        else:
            raise ValueError('[TakeTaskAction]: status_code %d' %
                             response.status_code)

        return True
예제 #8
0
def main():
    """Returns global dictionary data object

    Calls The Hives API then checks if a result was returned.
    If a result was returned, check the results and time since the result was created.
    """
    data = {}
    api = TheHiveApi(server_address, api_credentials)
    r = api.find_cases(query=Eq('status', 'Open'), range='all', sort=[])
    if r.status_code == 200:
        i = 0
        data = {}
        while i < len(r.json()):
            check_date = datetime.date.today() - datetime.timedelta(days=7)
            if (r.json()[i]['createdAt'] / 1000) < time.mktime(
                    check_date.timetuple()):
                tasks = api.get_case_tasks(r.json()[i]['id'])
                inc, cnt = 0, 0
                while inc < len(tasks.json()):
                    if (tasks.json()[inc]['status']
                            == ('Waiting')) or (tasks.json()[inc]['status']
                                                == ('InProgress')):
                        cnt += 1
                    inc += 1
                data[(i)] = {
                    'id':
                    r.json()[i]['id'],
                    'owner':
                    r.json()[i]['owner'],
                    'createdAt': (time.strftime(
                        '%m/%d/%Y %H:%M:%S',
                        time.gmtime(r.json()[i]['createdAt'] / 1000.))),
                    'totalTasks':
                    len(tasks.json()),
                    'pendingTasks':
                    cnt
                }
            i += 1
    build(data)
def get_case_by_casenum(casenum):
    query = And(Eq('caseId', casenum))
    return hive_api.find_first(query=query)
예제 #10
0
        filterRead = configuration.checkAndValidate(result, "read", default=FILTER_READ_DEFAULT, is_mandatory=False)
        filterSource = configuration.checkAndValidate(result, "source", default=FILTER_SOURCE_DEFAULT, is_mandatory=False)
        filterDate = configuration.checkAndValidate(result, "date", default=FILTER_DATE_DEFAULT, is_mandatory=False)
        maxAlerts = configuration.checkAndValidate(result, "max_alerts", default=defaults["MAX_ALERTS_DEFAULT"], is_mandatory=False)
        sortAlerts = configuration.checkAndValidate(result, "sort_alerts", default=defaults["SORT_ALERTS_DEFAULT"], is_mandatory=False)

        logger.debug("[THSA-5] Filters are: filterType: "+filterType+", filterSeverity: "+filterSeverity+", filterTags: "+filterTags+", filterTitle: "+filterTitle+", filterRead: "+filterRead+", filterSource: "+filterSource+", filterDate: "+filterDate+", max_alerts: "+maxAlerts+", sort_alerts: "+sortAlerts)

        # Format the query
        query = {}
        elements = []
        if filterType != FILTER_TYPE_DEFAULT:
            element = Or(*[Like("type",s) for s in filterType.replace(" ","").split(";") if s != "*"])
            elements.append(element)
        if filterSeverity != FILTER_SEVERITY_DEFAULT:
            element = Or(*[Eq("severity",int(s)) for s in filterSeverity.replace(" ","").split(";") if s != "*"])
            elements.append(element)
        if filterTags != FILTER_TAGS_DEFAULT:
            element = Or(*[Eq("tags",s) for s in filterTags.replace(" ","").split(";") if s != "*"])
            elements.append(element)
        if filterTitle != FILTER_TITLE_DEFAULT:
            element = Like("title",filterTitle)
            elements.append(element)
        if filterRead != FILTER_READ_DEFAULT:
            read = False if int(filterRead)==0 else True
            element = Eq("read",read)
            elements.append(element)
        if filterSource != FILTER_SOURCE_DEFAULT:
            element = Or(*[Eq("source",s) for s in filterSource.replace(" ","").split(";") if s != "*"])
            elements.append(element)
        if filterDate != FILTER_DATE_DEFAULT:
def threats2Alert():
    logger = logging.getLogger('workflows.' + __name__)
    logger.info('%s.threats2Alert starts', __name__)

    theHiveConnector = TheHiveConnector(getConf())

    organizations = None
    with open(os.path.join(current_dir, "..", "conf", "sentinelone.json")) as fd:
        organizations = json.load(fd)['organizations']

    for org in organizations:
        threats = SentinelOneConnector(org['mgt_url'], org['token']).get_threats()

        for threat in threats:
            tags = []
            for indicator in threat['indicators']:
                tags.append(indicator['category'])
                for tactic in indicator['tactics']:
                    tags.append(tactic['name'])

            customFields = CustomFieldHelper()\
                .add_string('client', org['name'])\
                .add_string('hostname', str(threat['agentRealtimeInfo']['agentComputerName']))\
                .build()

            sourceRef = "{}-{}".format(org['trigram'],threat['id'])
            severity = {"na":1, "suspicious":2, "malicious":3}[threat['threatInfo']['confidenceLevel']]

            # external link attribute is TheHive 4 only
            alert = Alert(
                title="{} performing {} activity".format(threat['threatInfo']['threatName'], threat['threatInfo']['classification']),
                description=descriptionCrafter(threat, org),
                severity=severity,
                tags=tags,
                tlp=2,
                date=int(dateutil_parse(threat['threatInfo']['createdAt']).timestamp()) * 1000,
                status="New",
                type='EDR',
                source='Sentinel One',
                sourceRef=sourceRef,
                artifacts=artifactsCrafter(threat),
                caseTemplate='Sentinel One Case',
                customFields=customFields,
                externalLink="{mgt_url}/incidents/threats/{threat_id}/overview"\
                    .format(mgt_url=org['mgt_url'], threat_id=threat['id']))

            find = theHiveConnector.theHiveApi.find_alerts(query=Eq('sourceRef', sourceRef))
            find.raise_for_status()
            find = find.json()

            if len(find) > 0:
                pass
                # update because already exists
                # disabled for now
                #ret = theHiveConnector.theHiveApi.update_alert(find['id'], alert)
                #ret.raise_for_status()
                #logger.info("Alert {} updated in TheHive".format(find['id']))
            else:
                # create because does not exists in TheHive
                ret = theHiveConnector.theHiveApi.create_alert(alert)
                ret.raise_for_status()
                logger.info('Alert {} created in TheHive'.format(ret.json()['id']))

    return {'success': True, 'message': ""}
예제 #12
0
def main():
    api_key = '**YOUR_API_KEY**'
    hive_url = 'http://127.0.0.1:9000'
    api = TheHiveApi(hive_url, api_key)
    alert_title = input(
        '[*] What is the title of the alert you want to merge?: ')
    case_num = input(
        '[*] What is the case number you want to merge the alert(s) into?: ')

    # New alerts start at 0, so if a new alert just came in it'd be 0, where an
    # alert that arrived last week may be number 100
    total_alerts = input('[*] How many alerts back do you want to query?: ')

    alert_id = []  # This is the list of alert id(s) to be merged into the case

    try:

        def search_cases(query, range, sort):
            """
            This function returns the case id for the case number specified by user
            or prints an error if it's unable to connect to Hive for the query.
            
            If a the HTTP Status code is 200, it returns the case data in question as a JSON.
            """
            # Query being what am I looking for, range being how far back to go
            # Sort I *THINK* defines how you want them returned
            case_response = api.find_cases(query=query, range=range, sort=sort)

            if case_response.status_code == 200:
                return case_response.json()
            else:
                print('[*] Invalid HTTP Response. Response != 200')
                sys.exit(0)

        # Performing the above function - attempts to find the case number specified at the start
        case_prep = search_cases(Eq('caseId', case_num), 'all', [])
        # Since they are returned as a list, we choose the first item in the list and specifically the
        # case '_id' field - this field is not visible through the gui but is needed later.
        case_id = case_prep[0]['_id']
    except Exception as e:
        print(e)
        print('[*] Failed to retrieve Case ID')
        sys.exit(0)

    try:
        # Query being what am I looking for, range being how far back to go
        # Sort I *THINK* defines how you want them returned
        def search_alerts(query, range, sort):
            response = api.find_alerts(query=query, range=range, sort=sort)

            if response.status_code == 200:
                return response.json()
            else:
                print('[*] Invalid HTTP Response. Response != 200')
                sys.exit(0)

        # Adds all alerts with a title that matches the input specified, searches through alerts
        # as far back as specified, and returns a list of alerts in JSON format
        all_alert_info = search_alerts(String("title:'%s'" % (alert_title)),
                                       '0-' + str(total_alerts), [])
    except Exception as e:
        print(e)
        print('[*] Failed to connect/retrieve alerts')
        sys.exit(0)

    try:
        for alert in all_alert_info:
            if alert[
                    'status'] == 'New':  # This validates that we're only attempting to merge New alerts that haven't been imported/read
                # Below ensures only the alerts with the same title are merged.
                if alert['title'].lower().strip() == alert_title.lower().strip(
                ):  # *IMPORTANT* If this isn't done partial matches will be added
                    alert_id.append(
                        alert['id']
                    )  # the alert 'id' field to the alert_id list. This field is not visible in the gui
    except Exception as e:
        print(e)
        print('[*] Failed to append alerts to list')
        sys.exit(0)

    try:
        for alerts in tqdm(
                alert_id
        ):  # tqdm is option - only adds a progress bar, remove tqdm() and it will work without it
            headers = {
                'Authorization': 'Bearer ' + api_key
            }  # provides header data needed for script to auth with Hive
            # Below sends the post request that does the actual merge. It uses the hidden alert/case id fields.
            # DO NOT add a / after the last %s or it will not work
            requests.post(hive_url + '/api/alert/%s/merge/%s' %
                          (alerts, case_id),
                          headers=headers)
            time.sleep(
                .1)  # To prevent merge errors from being sent to quickly.
        print('[*] Merge Complete.')
    except Exception as e:
        print(e)
예제 #13
0
def allOffense2Alert(timerange):
    """
       Get all openned offense created within the last
       <timerange> minutes and creates alerts for them in
       TheHive
    """
    logger = logging.getLogger(__name__)
    logger.info('%s.allOffense2Alert starts', __name__)

    report = dict()
    report['success'] = True
    report['offenses'] = list()

    try:
        cfg = getConf()

        qradarConnector = QRadarConnector(cfg)
        theHiveConnector = TheHiveConnector(cfg)

        offensesList = qradarConnector.getOffenses(timerange)

        #each offenses in the list is represented as a dict
        #we enrich this dict with additional details
        for offense in offensesList:
            #searching if the offense has already been converted to alert
            logger.info('Looking for offense %s in TheHive alerts',
                        str(offense['id']))
            # Update only new Alerts, as Ignored it will be closed on QRadar and should not be updated,
            # as Imported we will do a responder to fetch latest info in the case
            results = theHiveConnector.findAlert(
                Eq("sourceRef", str(offense['id'])))
            offense_report = dict()
            try:
                if len(results) == 0:
                    logger.info(
                        'Offense %s not found in TheHive alerts, creating it',
                        str(offense['id']))
                    enrichedOffense = enrichOffense(qradarConnector, offense)

                    theHiveAlert = qradarOffenseToHiveAlert(
                        theHiveConnector, enrichedOffense)
                    theHiveEsAlertId = theHiveConnector.createAlert(
                        theHiveAlert)['id']

                    offense_report['type'] = "Creation"
                    offense_report['raised_alert_id'] = theHiveEsAlertId
                    offense_report['qradar_offense_id'] = offense['id']
                    offense_report['success'] = True

                    report['offenses'].append(offense_report)

                elif results[0]['status'] not in ['Ignored', 'Imported']:
                    # update alert if alert is not imported and not dimissed
                    # will only update 'lastEventCount' and 'lastUpdatedTime' custom fields
                    logger.info('Updating offense %s', str(offense['id']))

                    alert = Alert(json=results[0])
                    cf = CustomFieldHelper()

                    alert.title = offense['description']

                    if 'lastEventCount' not in alert.customFields:
                        alert.customFields['lastEventCount'] = {}

                    if 'lastUpdated' not in alert.customFields:
                        alert.customFields['lastUpdated'] = {}

                    if 'offenseSource' not in alert.customFields:
                        alert.customFields['offenseSource'] = {}

                    alert.customFields['lastEventCount']['number'] = offense[
                        'event_count']
                    alert.customFields['lastUpdated']['date'] = offense[
                        'last_updated_time']
                    alert.customFields['offenseSource']['string'] = offense[
                        'offense_source']  # updated maybe ?

                    # should improve TheHiveConnector.updateAlert() rather than using this
                    updatedAlert = theHiveConnector.theHiveApi.update_alert(
                        results[0]['id'],
                        alert,
                        fields=['customFields', 'title'])
                    if not updatedAlert.ok:
                        raise ValueError(json.dumps(updatedAlert.json()))

                    offense_report['type'] = "Update"
                    offense_report['updated_alert_id'] = updatedAlert.json(
                    )['id']
                    offense_report['qradar_offense_id'] = offense['id']
                    offense_report['success'] = True

                    report['offenses'].append(offense_report)

                else:
                    logger.info("Offense already exists")

            except Exception as e:
                logger.error('%s.allOffense2Alert failed',
                             __name__,
                             exc_info=True)
                offense_report['success'] = False
                if isinstance(e, ValueError):
                    errorMessage = json.loads(str(e))['message']
                    offense_report['message'] = errorMessage
                else:
                    offense_report['message'] = str(
                        e) + ": Couldn't raise alert in TheHive"
                offense_report['offense_id'] = offense['id']
                # Set overall success if any fails
                report['success'] = False

    except Exception as e:

        logger.error(
            'Failed to create alert from QRadar offense (retrieving offenses failed)',
            exc_info=True)
        report['success'] = False
        report['message'] = "%s: Failed to create alert from offense" % str(e)

    return report
예제 #14
0
            i += 1
    else:
        print('fubard')
    update_sirp(data)

def update_sirp(data):
    """Auto Closes The Hive cases that meet criteria

    Posts case closure
    """
    i = 0
    while i < len(data):
        if data[i]['SentinelResolved'] is True:
            try:
                API.case.update(data[i]['sirpId'],
                                status='Resolved',
                                resolutionStatus='Other',
                                summary='Case Resolved at Sentinel One Console, autoclosed',
                                tags=['SentinelOne API'])
            except:
                pass
        else:
            pass
        i += 1

RESPONSE = API.find_cases(query=And(Eq('status', 'Open'), Eq('owner', 'sentinelone')),
                          range='all',
                          sort=[])
check_status(RESPONSE)
exit()
예제 #15
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

from __future__ import print_function
from __future__ import unicode_literals

import sys
import json
from thehive4py.api import TheHiveApi
from thehive4py.query import Eq

api = TheHiveApi('http://127.0.0.1:9000', '**YOUR_API_KEY**')

print('Search for case templates')
print('-----------------------------')

response = api.find_case_templates(query=Eq("status", "Ok"))

if response.status_code == 200:
    print(json.dumps(response.json(), indent=4, sort_keys=True))
    print('')

else:
    print('ko: {}/{}'.format(response.status_code, response.text))
    sys.exit(0)
예제 #16
0
        print("fubard")
    update_sirp(data)


def update_sirp(data):
    """Auto Closes The Hive cases that meet criteria

    Posts case closure
    """
    i = 0
    while i < len(data):
        if data[i]["SentinelResolved"] is True:
            API.case.update(
                data[i]["sirpId"],
                status="Resolved",
                resolutionStatus="Other",
                summary="Resolved at Sentinel One Console, autoclosed",
                tags=["SentinelOne API"],
            )
        else:
            pass
        i += 1


RESPONSE = API.find_cases(query=And(Eq("status", "Open"),
                                    Eq("owner", "sentinelone")),
                          range="all",
                          sort=[])
check_status(RESPONSE)
sys.exit()
        logger.debug("[THSC-5] Filters are: filterKeyword: " + filterKeyword +
                     ", filterStatus: " + filterStatus + ", filterSeverity: " +
                     filterSeverity + ", filterTags: " + filterTags +
                     ", filterTitle: " + filterTitle + ", filterAssignee: " +
                     filterAssignee + ", filterDate: " + filterDate +
                     ", max_cases: " + maxCases + ", sort_cases: " + sortCases)

        # Format the query
        query = {}
        elements = []
        if filterKeyword != FILTER_KEYWORD_DEFAULT:
            element = String(filterKeyword)
            elements.append(element)
        if filterStatus != FILTER_STATUS_DEFAULT:
            element = Or(*[
                Eq("status", s)
                for s in filterStatus.replace(" ", "").split(";") if s != "*"
            ])
            elements.append(element)
        if filterSeverity != FILTER_SEVERITY_DEFAULT:
            element = Or(*[
                Eq("severity", s)
                for s in filterSeverity.replace(" ", "").split(";") if s != "*"
            ])
            elements.append(element)
        if filterTags != FILTER_TAGS_DEFAULT:
            element = Or(*[
                Eq("tags", s) for s in filterTags.replace(" ", "").split(";")
                if s != "*"
            ])
            elements.append(element)
                     filterSource + ", filterDate: " + filterDate +
                     ", max_alerts: " + maxAlerts + ", sort_alerts: " +
                     sortAlerts)

        # Format the query
        query = {}
        elements = []
        if filterType != FILTER_TYPE_DEFAULT:
            element = Or(*[
                Like("type", s) for s in filterType.replace(" ", "").split(";")
                if s != "*"
            ])
            elements.append(element)
        if filterSeverity != FILTER_SEVERITY_DEFAULT:
            element = Or(*[
                Eq("severity", int(s))
                for s in filterSeverity.replace(" ", "").split(";") if s != "*"
            ])
            elements.append(element)
        if filterTags != FILTER_TAGS_DEFAULT:
            element = Or(*[
                Eq("tags", s) for s in filterTags.replace(" ", "").split(";")
                if s != "*"
            ])
            elements.append(element)
        if filterTitle != FILTER_TITLE_DEFAULT:
            element = Like("title", filterTitle)
            elements.append(element)
        if filterRead != FILTER_READ_DEFAULT:
            read = False if int(filterRead) == 0 else True
            element = Eq("read", read)