class Executestory(GeneratingCommand):
    logger = splunk.mining.dcutils.getLogger()

    story = Option(doc='''
        **Syntax:** **story=***<story name>*
        **Description:** Story to update.
        ''',
                   name='story',
                   require=True,
                   default=None)

    mode = Option(require=True)
    cron = Option(require=False)
    earliest_time = Option(require=False)
    latest_time = Option(require=False)

    def getURL(self):
        cfg = cli.getConfStanza('asx', 'settings')
        self.logger.info("executestory.py - asx_conf: {0}".format(
            cfg['api_url']))
        return cfg['api_url']

    def generate(self):

        if self.earliest_time:
            earliest_time = self.earliest_time
        if self.latest_time:
            latest_time = self.latest_time

        # connect to splunk and start execution
        port = splunk.getDefault('port')
        service = splunklib.client.connect(
            token=self._metadata.searchinfo.session_key,
            port=port,
            owner="nobody",
            app="Splunk_ASX")
        self.logger.info("executestory.py - starting ASX - {0} ".format(
            self.story))

        API_URL = self.getURL()
        asx_lib = ASXLib(service, API_URL)

        #Runnning the selected analytic story
        if self.mode == "now":

            #time attributes from time picker
            if hasattr(self.search_results_info, 'search_et') and hasattr(
                    self.search_results_info, 'search_lt'):
                earliest_time = self.search_results_info.search_et
                latest_time = self.search_results_info.search_lt

            search_name = asx_lib.run_analytics_story(self.story,
                                                      earliest_time,
                                                      latest_time)

            yield {
                '_time': time.time(),
                'sourcetype': "_json",
                '_raw': {
                    'analytic_story':
                    self.story,
                    'search_name':
                    search_name,
                    'mode':
                    self.mode,
                    'status':
                    "Successfully executed the searches in the analytic story"
                }
            }

        #Schedule the selected analytic story if cron is selected
        if self.mode == "schedule":
            if self.cron:
                search_name = asx_lib.schedule_analytics_story(
                    self.story, earliest_time, latest_time, self.cron)
                yield {
                    '_time': time.time(),
                    'sourcetype': "_json",
                    '_raw': {
                        'analytic_story': self.story,
                        'search_name': search_name,
                        'mode': self.mode,
                        'cron_schecule': self.cron,
                        'status': "Successfully scheduled the analytic story"
                    }
                }

        self.logger.info("executestory.py - completed ASX - {0} ".format(
            self.story))

    def __init__(self):
        super(Executestory, self).__init__()
class SimulateCommand(GeneratingCommand):
    """ Generates a sequence of events drawn from a CSV file using repeated random sampling

    ##Syntax

    .. code-block::
        simulate csv=<path> rate=<expected_event_count> interval=<sampling_period> duration=<execution_period>
        [seed=<string>]

    ##Description

    The :code:`simulate` command uses repeated random samples of the event records in :code:`csv` for the execution
    period of :code:`duration`. Sample sizes are determined for each time :code:`interval` in :code:`duration`
    using a Poisson distribution with an average :code:`rate` specifying the expected event count during
    :code:`interval`.

    ##Example

    .. code-block::
        | simulate csv=population.csv rate=50 interval=00:00:01
            duration=00:00:05 | countmatches fieldname=word_count
            pattern="\\w+" text | stats mean(word_count) stdev(word_count)

    This example generates events drawn from repeated random sampling of events from :code:`tweets.csv`. Events are
    drawn at an average rate of 50 per second for a duration of 5 seconds. Events are piped to the example
    :code:`countmatches` command which adds a :code:`word_count` field containing the number of words in the
    :code:`text` field of each event. The mean and standard deviation of the :code:`word_count` are then computed by
    the builtin :code:`stats` command.


    """
    csv_file = Option(doc='''**Syntax:** **csv=***<path>*
        **Description:** CSV file from which repeated random samples will be
        drawn''',
                      name='csv',
                      require=True,
                      validate=validators.File())

    duration = Option(doc='''**Syntax:** **duration=***<time-interval>*
        **Description:** Duration of simulation''',
                      require=True,
                      validate=validators.Duration())

    interval = Option(doc='''**Syntax:** **interval=***<time-interval>*
        **Description:** Sampling interval''',
                      require=True,
                      validate=validators.Duration())

    rate = Option(doc='''**Syntax:** **rate=***<expected-event-count>*
        **Description:** Average event count during sampling `interval`''',
                  require=True,
                  validate=validators.Integer(1))

    seed = Option(doc='''**Syntax:** **seed=***<string>*
        **Description:** Value for initializing the random number generator '''
                  )

    def generate(self):

        if not self.records:
            if self.seed is not None:
                random.seed(self.seed)
            self.records = [record for record in csv.DictReader(self.csv_file)]
            self.lambda_value = 1.0 / (self.rate / float(self.interval))

        duration = self.duration

        while duration > 0:
            count = int(round(random.expovariate(self.lambda_value)))
            start_time = time.clock()
            for record in random.sample(self.records, count):
                yield record
            interval = time.clock() - start_time
            if interval < self.interval:
                time.sleep(self.interval - interval)
            duration -= max(interval, self.interval)

    def __init__(self):
        super(SimulateCommand, self).__init__()
        self.lambda_value = None
        self.records = None
Example #3
0
class mispgetioc(ReportingCommand):
    mispsrv = Option(require=False,
                     validate=validators.Match(
                         "mispsrv", r"^https?:\/\/[0-9a-zA-Z\.]+(?:\:\d+)?$"))
    mispkey = Option(require=False,
                     validate=validators.Match("mispkey",
                                               r"^[0-9a-zA-Z]{40}$"))
    sslcheck = Option(require=False,
                      validate=validators.Match("sslcheck", r"^[yYnN01]$"))
    eventid = Option(require=False,
                     validate=validators.Match("eventid", r"^[0-9]+$"))
    last = Option(require=False,
                  validate=validators.Match("last", r"^[0-9]+[hdwm]$"))
    onlyids = Option(require=False,
                     validate=validators.Match("onlyids", r"^[yYnN01]+$"))
    getuuid = Option(require=False,
                     validate=validators.Match("getuuid", r"^[yYnN01]+$"))
    getorg = Option(require=False,
                    validate=validators.Match("getuuid", r"^[yYnN01]+$"))
    category = Option(require=False)
    type = Option(require=False)

    @Configuration()
    def map(self, records):
        self.logger.debug('mispgetioc.map')
        yield {}
        return

    def reduce(self, records):
        self.logger.debug('mispgetioc.reduce')
        if self.sslcheck == None:
            self.sslcheck = 'n'

        # open misp.conf
        config_file = '/opt/splunk/etc/apps/misp42splunk/local/misp.conf'
        config = ConfigParser.RawConfigParser()
        config.read(config_file)

        # Generate args
        my_args = {}
        if self.mispsrv:
            my_args['mispsrv'] = self.mispsrv
        else:
            my_args['mispsrv'] = config.get('mispsetup', 'mispsrv')
        if self.mispkey:
            my_args['mispkey'] = self.mispkey
        else:
            my_args['mispkey'] = config.get('mispsetup', 'mispkey')
        if self.sslcheck:
            if self.sslcheck == 'Y' or self.sslcheck == 'y' or self.sslcheck == '1':
                my_args['sslcheck'] = True
            else:
                my_args['sslcheck'] = False
        else:
            my_args['sslcheck'] = config.getboolean('mispsetup', 'sslcheck')

        if self.onlyids == 'Y' or self.onlyids == 'y' or self.onlyids == '1':
            onlyids = True
        else:
            onlyids = False

        if self.getuuid == 'Y' or self.getuuid == 'y' or self.getuuid == '1':
            getuuid = True
        else:
            getuuid = False

        if self.getorg == 'Y' or self.getorg == 'y' or self.getorg == '1':
            getorg = True
        else:
            getorg = False

        if self.eventid and self.last:
            print('DEBUG Options "eventid" and "last" are mutually exclusive')
            exit(2)

        if self.eventid:
            my_args['eventid'] = self.eventid
        elif self.last:
            my_args['last'] = self.last
        else:
            print('DEBUG Missing "eventid" or "last" argument')
            exit(1)

        _SPLUNK_PATH = '/opt/splunk'
        _NEW_PYTHON_PATH = '/usr/bin/python3'
        _SPLUNK_PYTHON_PATH = os.environ['PYTHONPATH']
        os.environ['PYTHONPATH'] = _NEW_PYTHON_PATH
        my_process = _SPLUNK_PATH + '/etc/apps/misp42splunk/bin/pymisp_getioc.py'

        # Remove LD_LIBRARY_PATH from the environment (otherwise, we will face some SSL issues
        env = dict(os.environ)
        del env['LD_LIBRARY_PATH']

        FNULL = open(os.devnull, 'w')
        p = subprocess.Popen([_NEW_PYTHON_PATH, my_process,
                              str(my_args)],
                             stdin=subprocess.PIPE,
                             stdout=subprocess.PIPE,
                             stderr=FNULL,
                             env=env)
        output = p.communicate()[0]
        results = {}
        for v in eval(output):
            # Do not display deleted attributes
            if v['deleted'] == False:
                # If specified, do not display attributes with the non-ids flag set to False
                if onlyids == True and v['to_ids'] == False:
                    continue
                if self.category != None and self.category != v['category']:
                    continue
                if self.type != None and self.type != v['type']:
                    continue
                if getuuid == True:
                    results['uuid'] = v['uuid']
                if getorg == True:
                    results['orgc'] = v['orgc']

                results['eventid'] = v['event_id']
                results['value'] = v['value']
                results['category'] = v['category']
                results['type'] = v['type']
                results['to_ids'] = str(v['to_ids'])
                yield results
class SVCPostRestore(GeneratingCommand):

    url = Option(require=True)
    splunk_vc_name = Option(require=True)
    app = Option(require=True)
    type = Option(require=True)
    obj_name = Option(require=True)
    tag = Option(require=True)
    restoreAsUser = Option(require=True)
    scope = Option(require=True)
    timeout = Option(require=True)
    sslVerify = Option(require=False,
                       default=False,
                       validate=OrValidator(Boolean(), Filename()))
    requestingAddress = Option(require=False, default=False)

    def generate(self):
        """
          The logic is:
            If the requested savedsearch is owned by the current user, or the requesting user is an admin user, then
            change the dispatch.ttl value of the saved search to the requested newttl value passed in
            If the optional sharing level is not specified check for the savedsearch in the private / user context first
            then app context
            If the owner is specified look under the particular owner context, only someone with admin access can use this option
        """
        urlregex = re.compile(
            "^(?:https:\/\/)[\w0-9_\.-]+:8089/services/splunkversioncontrol_rest_restore$"
        )
        if urlregex.match(self.url):
            url = self.url
        else:
            logger.error(
                "Requested to post to remote url=%s but this did not match the regex"
                % (self.url))
            yield {
                'result':
                'Invalid url passed in, url must begin with https:// and would normally end in :8089/services/splunkversioncontrol_rest_restore, url=%s'
                % (self.url)
            }
            return

        body = {}
        body['splunk_vc_name'] = self.splunk_vc_name
        body['app'] = self.app
        body['type'] = self.type
        body['obj_name'] = self.obj_name
        body['tag'] = self.tag
        body['restoreAsUser'] = self.restoreAsUser
        body['scope'] = self.scope
        body['timeout'] = self.timeout
        if self.requestingAddress:
            body['requestingAddress'] = self.requestingAddress

        logger.info("Attempting POST request to url=%s with body=\"%s\"" %
                    (url, body))

        body[
            'Authorization'] = 'Splunk ' + self._metadata.searchinfo.session_key

        logger.debug("Using token %s" % (body['Authorization']))

        attempt = requests.post(url, verify=self.sslVerify, data=body)
        if attempt.status_code != 200:
            logger.error(
                "POST request failed with status_code=%s, reason=%s, text=%s on url=%s"
                % (attempt.status_code, attempt.reason, attempt.text, url))
            yield {
                'result':
                'Unknown failure, received a non-200 response code of %s on the url %s, reason %s, text result is %s'
                % (attempt.status_code, url, attempt.reason, attempt.text)
            }
            return
        else:
            logger.debug("Received result of result=%s" % (attempt.text))
            yield {'result': attempt.text}
Example #5
0
class MispGetIocCommand(GeneratingCommand):
    """ get the attributes from a MISP instance.
    ##Syntax
    .. code-block::
        | mispgetioc misp_instance=<input> last=<int>(d|h|m)
        | mispgetioc misp_instance=<input> event=<id1>(,<id2>,...)
        | mispgetioc misp_instance=<input> date=<<YYYY-MM-DD>
                                           (date_to=<YYYY-MM-DD>)
    ##Description
    {
        "returnFormat": "mandatory",
        "page": "optional",
        "limit": "optional",
        "value": "optional",
        "type": "optional",
        "category": "optional",
        "org": "optional",
        "tags": "optional",
        "date": "optional",
        "last": "optional",
        "eventid": "optional",
        "withAttachments": "optional",
        "uuid": "optional",
        "publish_timestamp": "optional",
        "timestamp": "optional",
        "enforceWarninglist": "optional",
        "to_ids": "optional",
        "deleted": "optional",
        "includeEventUuid": "optional",
        "includeEventTags": "optional",
        "event_timestamp": "optional",
        "threat_level_id": "optional",
        "eventinfo": "optional",
        "includeProposals": "optional",
        "includeDecayScore": "optional",
        "includeFullModel": "optional",
        "decayingModel": "optional",
        "excludeDecayed": "optional",
        "score": "optional"
    }
    # status
        "returnFormat": forced to json,
        "page": param,
        "limit": param,
        "value": not managed,
        "type": param, CSV string,
        "category": param, CSV string,
        "org": not managed,
        "tags": param, see also not_tags
        "date": param,
        "last": param,
        "eventid": param,
        "withAttachments": forced to false,
        "uuid": not managed,
        "publish_timestamp": managed via param last
        "timestamp": not managed,
        "enforceWarninglist": param,
        "to_ids": param,
        "deleted": forced to False,
        "includeEventUuid": set to True,
        "includeEventTags": param,
        "event_timestamp":  not managed,
        "threat_level_id":  not managed,
        "eventinfo": not managed,
        "includeProposals": not managed
        "includeDecayScore": not managed,
        "includeFullModel": not managed,
        "decayingModel": not managed,
        "excludeDecayed": not managed,
        "score": not managed
    }
    """
    # MANDATORY MISP instance for this search
    misp_instance = Option(doc='''
        **Syntax:** **misp_instance=instance_name*
        **Description:** MISP instance parameters
        as described in local/misp42splunk_instances.conf.''',
                           require=True)
    # MANDATORY: json_request XOR eventid XOR last XOR date
    json_request = Option(doc='''
        **Syntax:** **json_request=***valid JSON request*
        **Description:**Valid JSON request''',
                          require=False)
    eventid = Option(doc='''
        **Syntax:** **eventid=***id1(,id2,...)*
        **Description:**list of event ID(s) or event UUID(s).''',
                     require=False,
                     validate=validators.Match("eventid", r"^[0-9a-f,\-]+$"))
    last = Option(doc='''
        **Syntax:** **last=***<int>d|h|m*
        **Description:** publication duration in day(s), hour(s) or minute(s).
        **nota bene:** last is an alias of published_timestamp''',
                  require=False,
                  validate=validators.Match("last", r"^[0-9]+[hdm]$"))
    date = Option(doc='''
        **Syntax:** **date=***The user set event date field
         - any of valid time related filters"*
        **Description:**starting date.
         **eventid**, **last** and **date** are mutually exclusive''',
                  require=False)
    # Other params
    add_description = Option(doc='''
        **Syntax:** **add_description=***<1|y|Y|t|true|True
        |0|n|N|f|false|False>*
        **Description:**Boolean to return misp_description.''',
                             require=False,
                             validate=validators.Boolean())
    category = Option(doc='''
        **Syntax:** **category=***CSV string*
        **Description:**Comma(,)-separated string of categories to search for.
         Wildcard is %.''',
                      require=False)
    expand_object = Option(doc='''
        **Syntax:** **gexpand_object=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean to have object attributes expanded (one per line).
        By default, attributes of one object are displayed on same line.''',
                           require=False,
                           validate=validators.Boolean())
    geteventtag = Option(doc='''
        **Syntax:** **geteventtag=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean includeEventTags. By default only
         attribute tag(s) are returned.''',
                         require=False,
                         validate=validators.Boolean())
    getorg = Option(doc='''
        **Syntax:** **getorg=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean to return the ID of the organisation that
         created the event.''',
                    require=False,
                    validate=validators.Boolean())
    getuuid = Option(doc='''
        **Syntax:** **getuuid=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean to return attribute UUID.''',
                     require=False,
                     validate=validators.Boolean())
    limit = Option(doc='''
        **Syntax:** **limit=***<int>*
        **Description:**define the limit for each MISP search;
         default 1000. 0 = no pagination.''',
                   require=False,
                   validate=validators.Match("limit", r"^[0-9]+$"))
    not_tags = Option(doc='''
        **Syntax:** **not_tags=***CSV string*
        **Description:**Comma(,)-separated string of tags to exclude.
         Wildcard is %.''',
                      require=False)
    output = Option(doc='''
        **Syntax:** **output=***<default|rawy>*
        **Description:**selection between the default behaviou or JSON output by attribute.''',
                    require=False,
                    validate=validators.Match("output", r"(default|raw)"))
    page = Option(doc='''
        **Syntax:** **page=***<int>*
        **Description:**define the page for each MISP search; default 1.''',
                  require=False,
                  validate=validators.Match("page", r"^[0-9]+$"))
    pipesplit = Option(doc='''
        **Syntax:** **pipesplit=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean to split multivalue attributes.''',
                       require=False,
                       validate=validators.Boolean())
    tags = Option(doc='''
        **Syntax:** **tags=***CSV string*
        **Description:**Comma(,)-separated string of tags to search for.
         Wildcard is %.''',
                  require=False)
    to_ids = Option(doc='''
        **Syntax:** **to_ids=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean to search only attributes with the flag
         "to_ids" set to true.''',
                    require=False,
                    validate=validators.Boolean())
    type = Option(doc='''
        **Syntax:** **type=***CSV string*
        **Description:**Comma(,)-separated string of types to search for.
         Wildcard is %.''',
                  require=False)
    warning_list = Option(doc='''
        **Syntax:** **warning_list=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean to filter out well known values.''',
                          require=False,
                          validate=validators.Boolean())

    def log_error(self, msg):
        logging.error(msg)

    def log_info(self, msg):
        logging.info(msg)

    def log_debug(self, msg):
        logging.debug(msg)

    def log_warn(self, msg):
        logging.warning(msg)

    def set_log_level(self):
        logging.root
        loglevel = logging_level('misp42splunk')
        logging.root.setLevel(loglevel)
        logging.error('[IO-101] logging level is set to %s', loglevel)
        logging.error('[IO-102] PYTHON VERSION: ' + sys.version)

    @staticmethod
    def _record(serial_number,
                time_stamp,
                host,
                attributes,
                attribute_names,
                encoder,
                condensed=False):

        if condensed is False:
            raw = encoder.encode(attributes)
        # Formulate record
        fields = dict()
        for f in attribute_names:
            if f in attributes:
                fields[f] = attributes[f]

        if serial_number > 0:
            fields['_serial'] = serial_number
            fields['_time'] = time_stamp
            if condensed is False:
                fields['_raw'] = raw
            fields['host'] = host
            return fields

        if condensed is False:
            record = OrderedDict(
                chain((('_serial', serial_number), ('_time', time_stamp),
                       ('_raw', raw), ('host', host)),
                      map(lambda name: (name, fields.get(name, '')),
                          attribute_names)))
        else:
            record = OrderedDict(
                chain((('_serial', serial_number), ('_time', time_stamp),
                       ('host', host)),
                      map(lambda name: (name, fields.get(name, '')),
                          attribute_names)))

        return record

    def generate(self):
        # loggging
        self.set_log_level()
        # Phase 1: Preparation
        misp_instance = self.misp_instance
        storage = self.service.storage_passwords
        my_args = prepare_config(self, 'misp42splunk', misp_instance, storage)
        if my_args is None:
            raise Exception(
                "Sorry, no configuration for misp_instance={}".format(
                    misp_instance))
        my_args['host'] = my_args['misp_url'].replace('https://', '')
        my_args['misp_url'] = my_args['misp_url'] + '/attributes/restSearch'

        # check that ONE of mandatory fields is present
        mandatory_arg = 0
        if self.json_request is not None:
            mandatory_arg = mandatory_arg + 1
        if self.eventid:
            mandatory_arg = mandatory_arg + 1
        if self.last:
            mandatory_arg = mandatory_arg + 1
        if self.date:
            mandatory_arg = mandatory_arg + 1

        if mandatory_arg == 0:
            self.log_error(
                'Missing "json_request", eventid", "last" or "date" argument')
            raise Exception(
                'Missing "json_request", "eventid", "last" or "date" argument')
        elif mandatory_arg > 1:
            self.log_error(
                'Options "json_request", eventid", "last" and "date" are mutually exclusive'
            )
            raise Exception(
                'Options "json_request", "eventid", "last" and "date" are mutually exclusive'
            )

        body_dict = dict()
        # Only ONE combination was provided
        if self.json_request is not None:
            body_dict = json.loads(self.json_request)
            self.log_info('Option "json_request" set')
        elif self.eventid:
            if "," in self.eventid:
                event_criteria = {}
                event_list = self.eventid.split(",")
                event_criteria['OR'] = event_list
                body_dict['eventid'] = event_criteria
            else:
                body_dict['eventid'] = self.eventid
            self.log_info('Option "eventid" set with {}'.format(
                json.dumps(body_dict['eventid'])))
        elif self.last:
            body_dict['last'] = self.last
            self.log_info('Option "last" set with {}'.format(
                body_dict['last']))
        else:
            body_dict['date'] = self.date.split()
            self.log_info('Option "date" set with {}'.format(
                json.dumps(body_dict['date'])))

        # Force some values on JSON request
        body_dict['returnFormat'] = 'json'
        body_dict['withAttachments'] = False
        body_dict['deleted'] = False
        body_dict['includeEventUuid'] = True
        # set proper headers
        headers = {'Content-type': 'application/json'}
        headers['Authorization'] = my_args['misp_key']
        headers['Accept'] = 'application/json'

        # Search pagination
        pagination = True
        if self.limit is not None:
            limit = int(self.limit)
        elif 'limit' in body_dict:
            limit = int(body_dict['limit'])
        else:
            limit = 1000
        if limit == 0:
            pagination = False
        if self.page is not None:
            page = int(self.page)
        elif 'page' in body_dict:
            page = body_dict['page']
        else:
            page = 1

        # Search parameters: boolean and filter
        # manage to_ids and enforceWarninglist
        # to avoid FP enforceWarninglist is set to True if
        # to_ids is set to True (search criterion)
        if self.to_ids is True:
            body_dict['to_ids'] = True
            body_dict['enforceWarninglist'] = True  # protection
        elif self.to_ids is False:
            body_dict['to_ids'] = False
        if self.warning_list is True:
            body_dict['enforceWarninglist'] = True
        elif self.warning_list is False:
            body_dict['enforceWarninglist'] = False
        if self.geteventtag is True:
            body_dict['includeEventTags'] = True
        if self.category is not None:
            if "," in self.category:
                cat_criteria = {}
                cat_list = self.category.split(",")
                cat_criteria['OR'] = cat_list
                body_dict['category'] = cat_criteria
            else:
                body_dict['category'] = self.category
        if self.type is not None:
            if "," in self.type:
                type_criteria = {}
                type_list = self.type.split(",")
                type_criteria['OR'] = type_list
                body_dict['type'] = type_criteria
            else:
                body_dict['type'] = self.type
        if self.tags is not None or self.not_tags is not None:
            tags_criteria = {}
            if self.tags is not None:
                tags_list = self.tags.split(",")
                tags_criteria['OR'] = tags_list
            if self.not_tags is not None:
                tags_list = self.not_tags.split(",")
                tags_criteria['NOT'] = tags_list
            body_dict['tags'] = tags_criteria

        # output filter parameters
        if self.add_description is True:
            my_args['add_desc'] = True
        else:
            my_args['add_desc'] = False
        if self.expand_object is True:
            my_args['expand'] = True
        else:
            my_args['expand'] = False
        if self.getorg is True:
            my_args['getorg'] = True
        else:
            my_args['getorg'] = False
        if self.getuuid is True:
            my_args['getuuid'] = True
        else:
            my_args['getuuid'] = False
        if self.pipesplit is True:
            my_args['pipe'] = True
        else:
            my_args['pipe'] = False
        if self.output is not None:
            my_args['output'] = self.output
        else:
            my_args['output'] = "default"

        # add colums for each type in results
        results = []
        typelist = []
        if pagination is True:
            body_dict['page'] = page
            body_dict['limit'] = limit
        body = json.dumps(body_dict)
        # search
        r = requests.post(my_args['misp_url'],
                          headers=headers,
                          data=body,
                          verify=my_args['misp_verifycert'],
                          cert=my_args['client_cert_full_path'],
                          proxies=my_args['proxies'])
        # check if status is anything other than 200;
        # throw an exception if it is
        if r.status_code in (200, 201, 204):
            self.log_info(
                "[IO301] INFO mispgetioc successful. url={}, HTTP status={}".
                format(my_args['misp_url'], r.status_code))
        else:
            self.log_error(
                "[IO302] ERROR mispgetioc failed. url={}, data={}, HTTP Error={}, content={}"
                .format(my_args['misp_url'], body, r.status_code, r.text))
            raise Exception(
                "[IO302] ERROR mispgetioc failed for url={} with HTTP Error={}. Check search.log for details"
                .format(my_args['misp_url'], r.status_code))

        # response is 200 by this point or we would have thrown an exception
        response = r.json()
        encoder = json.JSONEncoder(ensure_ascii=False, separators=(',', ':'))
        # if raw output, returns JSON 1st-level keys as columns
        if my_args['output'] == "raw":
            if 'response' in response:
                if 'Attribute' in response['response']:
                    attribute_names = list()
                    serial_number = 0
                    for a in response['response']['Attribute']:
                        yield MispGetIocCommand._record(
                            serial_number, a['timestamp'], my_args['host'], a,
                            attribute_names, encoder)
                        serial_number += 1
                        GeneratingCommand.flush
        # default output: extract some values from JSON attributes
        else:
            if 'response' in response:
                if 'Attribute' in response['response']:
                    for a in response['response']['Attribute']:
                        v = {}
                        v['misp_category'] = str(a['category'])
                        v['misp_attribute_id'] = str(a['id'])
                        v['misp_event_id'] = str(a['event_id'])
                        v['misp_timestamp'] = str(a['timestamp'])
                        v['misp_to_ids'] = str(a['to_ids'])
                        v['misp_comment'] = str(a['comment'])
                        tag_list = []
                        if 'Tag' in a:
                            for tag in a['Tag']:
                                try:
                                    tag_list.append(str(tag['name']))
                                except Exception:
                                    pass
                        v['misp_tag'] = tag_list
                        # include ID of the organisation that
                        # created the attribute if requested
                        if 'Event' in a:
                            v['misp_event_uuid'] = str(a['Event']['uuid'])
                            if my_args['getorg']:
                                v['misp_orgc_id'] = str(a['Event']['orgc_id'])
                            if my_args['add_desc'] is True:
                                v['misp_event_info'] = str(a['Event']['info'])
                        # include attribute UUID if requested
                        if my_args['getuuid']:
                            v['misp_attribute_uuid'] = str(a['uuid'])
                        # handle object and multivalue attributes
                        v['misp_object_id'] = str(a['object_id'])
                        if my_args['add_desc'] is True:
                            if int(a['object_id']) == 0:
                                v['misp_description'] = 'MISP e' \
                                    + str(a['event_id']) + ' attribute ' \
                                    + str(a['uuid']) + ' of type "' \
                                    + str(a['type']) \
                                    + '" in category "' + str(a['category']) \
                                    + '" (to_ids:' + str(a['to_ids']) + ')'
                            else:
                                v['misp_description'] = 'MISP e' \
                                    + str(a['event_id']) + ' attribute ' \
                                    + str(a['uuid']) + ' of type "' \
                                    + str(a['type']) + '" in category "' \
                                    + str(a['category']) \
                                    + '" (to_ids:' + str(a['to_ids']) \
                                    + ' - o' + str(a['object_id']) + ' )'
                        current_type = str(a['type'])
                        # combined: not part of an object
                        # AND multivalue attribute AND to be split
                        if int(a['object_id']) == 0 and '|' in current_type \
                           and my_args['pipe'] is True:
                            mv_type_list = current_type.split('|')
                            mv_value_list = str(a['value']).split('|')
                            left_v = v.copy()
                            left_v['misp_type'] = mv_type_list.pop()
                            left_v['misp_value'] = mv_value_list.pop()
                            results.append(left_v)
                            if left_v['misp_type'] not in typelist:
                                typelist.append(left_v['misp_type'])
                            right_v = v.copy()
                            right_v['misp_type'] = mv_type_list.pop()
                            right_v['misp_value'] = mv_value_list.pop()
                            results.append(right_v)
                            if right_v['misp_type'] not in typelist:
                                typelist.append(right_v['misp_type'])
                        else:
                            v['misp_type'] = current_type
                            v['misp_value'] = str(a['value'])
                            results.append(v)
                            if current_type not in typelist:
                                typelist.append(current_type)

            self.log_info(json.dumps(typelist))

            # consolidate attribute values under output table
            if my_args['expand'] is True:
                output_dict = {}
                for r in results:
                    key = str(r['misp_event_id']) + \
                        '_' + str(r['misp_attribute_id'])
                    if key not in output_dict:
                        v = dict(r)
                        for t in typelist:
                            misp_t = 'misp_' + t.replace('-', '_').replace(
                                '|', '_p_')
                            v[misp_t] = []
                            if t == r['misp_type']:
                                v[misp_t].append(r['misp_value'])
                        output_dict[key] = dict(v)
                    else:
                        v = dict(output_dict[key])
                        misp_t = 'misp_' + r['misp_type'].replace(
                            '-', '_').replace('|', '_p_')
                        v[misp_t].append(r['misp_value'])  # set value for type
                        misp_type = r['misp_type'] + '|' + v['misp_type']
                        v['misp_type'] = misp_type
                        misp_value = str(r['misp_value']) + '|' + str(
                            v['misp_value'])
                        v['misp_value'] = misp_value
                        output_dict[key] = dict(v)
            else:
                output_dict = {}
                for r in results:
                    if int(r['misp_object_id']) == 0:  # not an object
                        key = str(r['misp_event_id']) + \
                            '_' + str(r['misp_attribute_id'])
                        is_object_member = False
                    else:  # this is a  MISP object
                        key = str(r['misp_event_id']) \
                            + '_object_' + str(r['misp_object_id'])
                        is_object_member = True
                    if key not in output_dict:
                        v = dict(r)
                        for t in typelist:
                            misp_t = 'misp_' + t.replace('-', '_').replace(
                                '|', '_p_')
                            v[misp_t] = []
                            if t == r['misp_type']:
                                v[misp_t].append(r['misp_value'])
                        v['misp_to_ids'] = []
                        v['misp_to_ids'].append(r['misp_to_ids'])
                        v['misp_category'] = []
                        v['misp_category'].append(r['misp_category'])
                        v['misp_attribute_id'] = []
                        v['misp_attribute_id'].append(r['misp_attribute_id'])
                        if my_args['getuuid'] is True:
                            v['misp_attribute_uuid'] = []
                            v['misp_attribute_uuid'].append(
                                r['misp_attribute_uuid'])
                        if my_args['add_desc'] is True:
                            v['misp_description'] = []
                            v['misp_description'].append(r['misp_description'])
                        v['misp_type'] = []
                        v['misp_type'].append(r['misp_type'])
                        v['misp_value'] = []
                        v['misp_value'].append(str(r['misp_value']))
                        output_dict[key] = dict(v)
                    else:
                        v = dict(output_dict[key])
                        misp_t = 'misp_' + r['misp_type'].replace(
                            '-', '_').replace('|', '_p_')
                        v[misp_t].append(r['misp_value'])  # set value for type
                        v['misp_to_ids'].append(r['misp_to_ids'])
                        v['misp_category'].append(r['misp_category'])
                        tag_list = v['misp_tag']
                        for tag in r['misp_tag']:
                            if tag not in tag_list:
                                tag_list.append(tag)
                        v['misp_tag'] = tag_list
                        if my_args['add_desc'] is True:
                            description = v['misp_description']
                            if r['misp_description'] not in description:
                                description.append(r['misp_description'])
                            v['misp_description'] = description
                        if is_object_member is False:
                            if r['misp_attribute_id'] not in v[
                                    'misp_attribute_id']:
                                v['misp_attribute_id'].append(
                                    r['misp_attribute_id'])
                            if my_args['getuuid'] is True:
                                if r['misp_attribute_uuid'] not in v[
                                        'misp_attribute_uuid']:
                                    v['misp_attribute_uuid'].append(
                                        r['misp_attribute_uuid'])
                            misp_type = []
                            misp_type.append(r['misp_type'] + '|' +
                                             v['misp_type'][0])
                            v['misp_type'] = misp_type
                            misp_value = []
                            misp_value.append(
                                str(r['misp_value']) + '|' +
                                str(v['misp_value'][0]))
                            v['misp_value'] = misp_value
                        else:
                            v['misp_attribute_id'].append(
                                r['misp_attribute_id'])
                            if my_args['getuuid'] is True:
                                v['misp_attribute_uuid'].append(
                                    r['misp_attribute_uuid'])
                            v['misp_type'].append(r['misp_type'])
                            v['misp_value'].append(r['misp_value'])
                        output_dict[key] = dict(v)

            # return output table
            attribute_names = list()
            init_attribute_names = True
            serial_number = 0
            for v in output_dict.values():
                if init_attribute_names is True:
                    for key in v.keys():
                        if key not in attribute_names:
                            attribute_names.append(key)
                    attribute_names.sort()
                    init_attribute_names = False
                yield MispGetIocCommand._record(serial_number,
                                                v['misp_timestamp'],
                                                my_args['host'], v,
                                                attribute_names, encoder, True)
                serial_number += 1
                GeneratingCommand.flush
Example #6
0
class TtrCommand(EventingCommand):
    """ Computes the time-to-repair (TTR) of a set of fields.

    ##Syntax

    .. code-block::
        ttr ttrfield=<field> resultfield=<field> passvalue=<value> <field-list>

    ##Example

    ..code-block::
        ttr ttrfield=ttr resultfield=result passvalue=SUCCESS instance ci stage release
    """
    ttrfield = Option(doc='''
        **Syntax:** **ttrfield=***<fieldname>*
        **Description:** Name of the field that will hold the computed TTR
        ''',
                      require=True,
                      validate=validators.Fieldname())

    resultfield = Option(doc='''
        **Syntax:** **resultfield=***<fieldname>*
        **Description:** Name of the field that holds the result of the event
        ''',
                         require=True,
                         validate=validators.Fieldname())

    passvalue = Option(doc='''
        **Syntax:** **passvalue=***<value>*
        **Description:** Value that *resultfield* needs to be to consider an event to be passing
        ''',
                       require=True)

    timefield = Option(doc='''
        ''',
                       default='_time',
                       require=False,
                       validate=validators.Fieldname())

    lookbeyondboundary = Option(doc='''
        ''',
                                default=False,
                                require=False,
                                validate=validators.Boolean())

    def transform(self, records):
        earliest_time = self.metadata.searchinfo.earliest_time
        by_fieldnames = self.fieldnames

        self.logger.debug(
            'ttrfield: %s, resultfield: %s, passvalue: %s, timefield: %s, lookbeyondboundary: %s, fieldnames: %s, earliest: %s',
            self.ttrfield, self.resultfield, self.passvalue, self.timefield,
            self.lookbeyondboundary, by_fieldnames, earliest_time)

        # get all events needed for TTR calculation
        if self.lookbeyondboundary:
            events_for_ttr = self.get_events_for_ttr(records, earliest_time,
                                                     by_fieldnames)
        else:
            events_for_ttr = list(records)

        # calculate TTR
        sort_by_args = list(by_fieldnames)
        sort_by_args.append(self.timefield)
        events_for_ttr.sort(key=operator.itemgetter(*sort_by_args))

        metadata = {}
        self.init_metadata(metadata)

        last_by_fields = []

        for event in events_for_ttr:
            by_fields = self.get_by_fields(by_fieldnames, event)
            if by_fields != last_by_fields:
                self.init_metadata(metadata)

            ttr = self.calc_single_ttr(float(event[self.timefield]),
                                       event[self.resultfield], metadata)
            event[self.ttrfield] = ttr

            last_by_fields = by_fields

        final_events = []
        events_for_ttr.sort(key=operator.itemgetter(self.timefield),
                            reverse=True)

        for event in events_for_ttr:
            timestamp = float(event[self.timefield])
            if self.lookbeyondboundary and timestamp < earliest_time:
                # throw away events before earliest_time
                break
            final_events.append(event)

        return final_events

    def get_events_for_ttr(self, records, earliest_time, by_fieldnames):
        events_for_ttr = []

        metadata = {}
        within_time_window = True

        last_timestamp = -1

        for record in records:
            timestamp = float(record[self.timefield])

            # make sure timestamps are decreasing
            if last_timestamp != -1 and timestamp > last_timestamp:
                raise Exception('events aren\'t in decreasing time order!')

            last_timestamp = timestamp

            result = record[self.resultfield]
            by_fields = self.get_by_fields(by_fieldnames, record)
            key = self.gen_ttr_prep_metadata_key(by_fields)

            if timestamp >= earliest_time:
                events_for_ttr.append(record)

                if key not in metadata:
                    metadata[key] = {
                        'last_result': None,
                        'staged_events': [],
                        'has_success': False
                    }

                combo = metadata[key]

                combo['last_result'] = result
                if result == self.passvalue:
                    combo['has_success'] = True
            else:
                if within_time_window:
                    for key in metadata.keys():
                        combo = metadata[key]
                        if not combo['has_success']:
                            del metadata[key]

                    within_time_window = False

                if not metadata:
                    break

                if key in metadata:
                    combo = metadata[key]
                    combo['staged_events'].append(record)
                    if result == self.passvalue:
                        events_for_ttr += combo['staged_events']
                        del metadata[key]

        return events_for_ttr

    @staticmethod
    def gen_ttr_prep_metadata_key(by_fields):
        key = ''
        for by_field in by_fields:
            key += by_field + ':'
        return key[:-1]

    @staticmethod
    def init_metadata(metadata):
        metadata['last_result'] = ''
        metadata['first_failure_time'] = 0
        metadata['encountered_success'] = False

    def calc_single_ttr(self, timestamp, result, metadata):
        ttr = None

        last_result = metadata['last_result']
        first_failure_time = metadata['first_failure_time']
        encountered_success = metadata['encountered_success']

        if not encountered_success and self.passing(result):
            metadata['encountered_success'] = True

        if (self.passing(last_result) or last_result
                == '') and not self.passing(result) and encountered_success:
            metadata['first_failure_time'] = timestamp
        elif not self.passing(last_result) and self.passing(
                result) and last_result != '' and first_failure_time != 0:
            ttr = timestamp - first_failure_time

        metadata['last_result'] = result

        return ttr

    def passing(self, result):
        return result == self.passvalue

    @staticmethod
    def get_by_fields(by_fieldnames, event):
        return [event[x] for x in by_fieldnames]
Example #7
0
class MACFormatCommand(StreamingCommand):
    """ Convert a given MAC address field to specified format.

    ##Syntax

    .. code-block::
        | macformat input=field-list output=field-list format=[cisco|dash|ieee|none]

    ## Description

    Convert the fields in the `input` field list to the ones in the `output` list; Both lists are
    optional. The `input` list defaults to `macaddress`. The`output` list is filled with fields in
    the `input` list it the `output` list is shorter than the `input`.

    The `format` option is one of [cisco|dash|ieee|none]. The default is `none`.

    Raises a ValueError exception if the MAC address is invalid.
    """
    format = Option(doc='''
        **Syntax:** **format=**`[cisco|dash|ieee|none]`
        **Description:** Format of the output MAC address. Defaults to `none`.''',
                    require=False,
                    validate=validators.Set('cisco', 'dash', 'ieee', 'none'))

    inputs = Option(doc='''
        **Syntax:** **inputs=***<field-list>*
        **Description:** A comma-delimited list of input fields to convert. Defaults to `macaddress`.''',
                    require=False,
                    validate=validators.List())

    outputs = Option(doc='''
        **Syntax:** **outputs=***<field-list>*
        **Description:** A comma-delimited list of fields for the results. Defaults to `inputs`.''',
                     require=False,
                     validate=validators.List())

    def prepare(self):
        """ Prepare the options.

        :return: :const:`None`
        :rtype: NoneType
        """
        self.toform = globals()['_' + (self.format or self.def_format)]
        inputs = self.inputs
        if inputs is None:
            self.inputs = inputs = self.def_inputs
        outputs = self.outputs
        if outputs is None:
            outputs = inputs
        elif len(outputs) < len(inputs):
            outputs += inputs[len(outputs):]
        self.outputs = outputs
        self.logger.debug(
            'MACFormatCommand.prepare: inputs = %s, outputs = %s', self.inputs,
            outputs)

    def stream(self, records):
        toform = self.toform
        inputs = self.inputs
        outputs = self.outputs
        if outputs is None:
            outputs = inputs
        elif len(outputs) < len(inputs):
            outputs += inputs[len(outputs):]
        for record in records:
            self.logger.debug('MACFormatCommand: record = %s', record)
            for i in range(len(inputs)):
                mac = record.get(inputs[i])
                if mac != None:
                    try:
                        record[outputs[i]] = toform(mac)
                    except Exception as err:
                        record[outputs[i]] = mac
                        self.logger.error('(input=%s) %s', inputs[i],
                                          err.message)
            yield record

    def __init__(self):
        StreamingCommand.__init__(self)
        appdir = path.dirname(path.dirname(__file__))
        defconfpath = path.join(appdir, "default", "app.conf")
        defconf = cli.readConfFile(defconfpath).get('macformat') or {}
        localconfpath = path.join(appdir, "local", "app.conf")
        localconf = (cli.readConfFile(localconfpath).get('macformat')
                     or {}) if path.exists(localconfpath) else {}
        self.def_format = localconf.get('format') or defconf.get(
            'format') or 'none'
        inputs = localconf.get('inputs') or defconf.get('inputs')
        self.def_inputs = re.split('[\s,]',
                                   inputs) if inputs else ['macaddress']
Example #8
0
class mispgetioc(ReportingCommand):
    mispsrv = Option(require=False,
                     validate=validators.Match(
                         "mispsrv",
                         r"^https?:\/\/[0-9a-zA-Z\-\.]+(?:\:\d+)?$"))
    mispkey = Option(require=False,
                     validate=validators.Match("mispkey",
                                               r"^[0-9a-zA-Z]{40}$"))
    sslcheck = Option(require=False,
                      validate=validators.Match("sslcheck", r"^[yYnN01]$"))
    eventid = Option(require=False,
                     validate=validators.Match("eventid", r"^[0-9]+$"))
    last = Option(require=False,
                  validate=validators.Match("last", r"^[0-9]+[hdwm]$"))
    onlyids = Option(require=False,
                     validate=validators.Match("onlyids", r"^[yYnN01]+$"))
    getuuid = Option(require=False,
                     validate=validators.Match("getuuid", r"^[yYnN01]+$"))
    getorg = Option(require=False,
                    validate=validators.Match("getorg", r"^[yYnN01]+$"))
    category = Option(require=False)
    type = Option(require=False)
    tags = Option(require=False)
    not_tags = Option(require=False)

    @Configuration()
    def map(self, records):
        self.logger.debug('mispgetioc.map')
        yield {}
        return

    def reduce(self, records):
        self.logger.debug('mispgetioc.reduce')
        if self.sslcheck == None:
            self.sslcheck = 'n'

        _SPLUNK_PATH = os.environ['SPLUNK_HOME']

        # open misp.conf
        config_file = _SPLUNK_PATH + '/etc/apps/misp42splunk/local/misp.conf'
        mispconf = ConfigParser.RawConfigParser()
        mispconf.read(config_file)

        # Generate args
        my_args = {}
        #MISP instance parameters
        if self.mispsrv:
            my_args['mispsrv'] = self.mispsrv
        else:
            my_args['mispsrv'] = mispconf.get('mispsetup', 'mispsrv')
        if self.mispkey:
            my_args['mispkey'] = self.mispkey
        else:
            my_args['mispkey'] = mispconf.get('mispsetup', 'mispkey')
        if self.sslcheck:
            if self.sslcheck == 'Y' or self.sslcheck == 'y' or self.sslcheck == '1':
                my_args['sslcheck'] = True
            else:
                my_args['sslcheck'] = False
        else:
            my_args['sslcheck'] = mispconf.getboolean('mispsetup', 'sslcheck')

#Search parameters: boolean and filter
        if self.onlyids == 'Y' or self.onlyids == 'y' or self.onlyids == '1':
            my_args['onlyids'] = True
        else:
            my_args['onlyids'] = False
        if self.getuuid == 'Y' or self.getuuid == 'y' or self.getuuid == '1':
            my_args['getuuid'] = True
        else:
            my_args['getuuid'] = False
        if self.getorg == 'Y' or self.getorg == 'y' or self.getorg == '1':
            my_args['getorg'] = True
        else:
            my_args['getorg'] = False
        if self.category != None:
            my_args['category'] = self.category
        else:
            my_args['category'] = None
        if self.type != None:
            my_args['type'] = self.type
        else:
            my_args['type'] = None
        if self.tags != None:
            my_args['tags'] = self.tags
        else:
            my_args['tags'] = None
        if self.not_tags != None:
            my_args['not_tags'] = self.not_tags
        else:
            my_args['not_tags'] = None

#check that ONE of mandatory fields is present
        if self.eventid and self.last:
            print('DEBUG Options "eventid" and "last" are mutually exclusive')
            exit(2)
        elif self.eventid:
            my_args['eventid'] = self.eventid
        elif self.last:
            my_args['last'] = self.last
        else:
            print('DEBUG Missing "eventid" or "last" argument')
            exit(1)


#path to main components either use default values or set ones
        if mispconf.has_option('mispsetup', 'P3_PATH'):
            _NEW_PYTHON_PATH = mispconf.get('mispsetup', 'P3_PATH')
        else:
            _NEW_PYTHON_PATH = '/usr/bin/python3'
        if mispconf.has_option('mispsetup', 'TMP_PATH'):
            _TMP_PATH = mispconf.get('mispsetup', 'TMP_PATH')
        else:
            _TMP_PATH = '/tmp'

        _SPLUNK_PYTHON_PATH = os.environ['PYTHONPATH']
        os.environ['PYTHONPATH'] = _NEW_PYTHON_PATH
        my_process = _SPLUNK_PATH + '/etc/apps/misp42splunk/bin/pymisp_getioc.py'

        # Remove LD_LIBRARY_PATH from the environment (otherwise, we will face some SSL issues
        env = dict(os.environ)
        del env['LD_LIBRARY_PATH']

        FNULL = open(os.devnull, 'w')

        #use pickle
        swap_file = _TMP_PATH + '/mispgetioc_config'
        pickle.dump(my_args, open(swap_file, "wb"), protocol=2)
        env_file = _TMP_PATH + '/mispgetioc_env'
        pickle.dump(env, open(env_file, "wb"), protocol=2)

        p = subprocess.Popen([_NEW_PYTHON_PATH, my_process, swap_file],
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE,
                             env=env)
        stdout, stderr = p.communicate()

        #        if stderr:
        #            print('DEBUG error in pymisp_getioc.py')
        #            exit(1)

        output = {}
        output = pickle.load(open(swap_file, "rb"))

        if output:
            for v in output:
                yield v
Example #9
0
class MakeHolidays(StreamingCommand):
    """ 
    A wrapper for python-holidays providing holidays, business days, and business holidays when presented a _time (or specified) field.

    ##Syntax

    .. code-block::
       * | holidays [timefield=<field>] [country=<string>] [province=<string>] [state=<string>] [business_days=<comma_sep_int>] [custom_holiday=<date>]

    ##Description

    A wrapper for for the python library python-holidays (https://github.com/dr-prodigy/python-holidays) 
    to enrich existing data with timestamps to know if the given timestamp is a holiday, business day,
    or business holiday (a holiday that occurs on a normal business day). Default settings: country is 
    set to US and timestamp is expected in the `_time` field.

    ##Example

    .. code-block::
        * | holidays
    """

    timefield = Option(
        default='_time',
        doc='''
        **Syntax:** **timefield=***<fieldname>*
        **Description:** The field containing the timestamp in unix epoch, normally this is `_time` which is the default if not set.''',
        validate=validators.Fieldname())

    country = Option(
        default='US',
        doc='''
        **Syntax:** **country=***<string>*
        **Description:** Country code string from https://github.com/dr-prodigy/python-holidays/blob/master/README.rst, defaults to US''',
        )

    province = Option(
        default=None,
        doc='''
        **Syntax:** **province=***<string>*
        **Description:** Province code string from https://github.com/dr-prodigy/python-holidays/blob/master/README.rst, defaults to None''',
        )

    state = Option(
        default=None,
        doc='''
        **Syntax:** **state=***<string>*
        **Description:** State code string from https://github.com/dr-prodigy/python-holidays/blob/master/README.rst, defaults to None''',
        )

    business_days = Option(
        default=None,
        doc='''
        **Syntax:** **business_days=***<comma-sep-int>*
        **Description:** Defaults to Monday-Friday (1-5) but if different days are business days they can be specified using numbers (Saturday and Sunday are 6 and 7 respectively) ''',
        )

    custom_holiday = Option(
        default=None,
        doc='''
        **Syntax:** **custom_holiday=***<date>*
        **Description:** Ability to supply a date for a non-standard holiday.''',
        )

    def stream(self, records):
        holiday_list = holidays.CountryHoliday(
             self.country,
             self.province,
             self.state
        )
        if self.custom_holiday:
             holiday_list.append([self.custom_holiday])

        if self.business_days:
            if re.search(r'(\d+,?)+',self.business_days):
                working_days = [ int(i) for i in self.business_days.split(',')]
        else:
            working_days = [1,2,3,4,5]
        
        for record in records:
            converted_time = time.localtime(float(record[self.timefield]))
            converted_date = datetime.fromtimestamp(mktime(converted_time))
            record['is_holiday'] = converted_date in holiday_list
            record['holiday_name'] = holiday_list.get(converted_date)
            record['is_business_day'] = int(strftime('%u',converted_time)) in working_days
            record['is_business_holiday'] = int(strftime('%u',converted_time)) in working_days and converted_date in holiday_list

            yield record
class GNEnrichCommand(EventingCommand):
    """
    gnenrich - Transforming Command.

    Transforming command that enriches Splunk search events with the context information of the IP addresses
    present as values in the IP field passed in ip_field parameter.
    Data pulled from: /v2/noise/context/{ip}

    **Syntax**::
    `index=firewall | gnenrich ip_field="ip"

    **Description**::
    The `gnenrich` command uses the IP represented by IP field in `ip_field` to return
    context information using method :method:`quick` from GreyNoise Python SDK.
    """

    ip_field = Option(doc='''
        **Syntax:** **ip_field=***<ip_field>*
        **Description:** Name of the field representing IP address in Splunk events''',
                      name='ip_field',
                      require=True)

    api_validation_flag = False

    def transform(self, records):
        """Method that processes and yield event records to the Splunk events pipeline."""
        # Setup logger
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key,
            log_context=self._metadata.searchinfo.command)

        if self.search_results_info and not self.metadata.preview:

            EVENTS_PER_CHUNK = 1
            THREADS = 3
            USE_CACHE = False
            ip_field = self.ip_field

            logger.info(
                "Started retrieving context information for the IP addresses present in field: {}"
                .format(str(ip_field)))

            try:
                # Strip the spaces from the parameter value if given
                if ip_field:
                    ip_field = ip_field.strip()
                # Validating the given parameters
                try:
                    ip_field = validator.Fieldname(
                        option_name='ip_field').validate(ip_field)
                except ValueError as e:
                    # Validator will throw ValueError with error message when the parameters are not proper
                    logger.error(str(e))
                    self.write_error(str(e))
                    exit(1)

                try:
                    message = ''
                    api_key = utility.get_api_key(
                        self._metadata.searchinfo.session_key, logger=logger)
                except APIKeyNotFoundError as e:
                    message = str(e)
                except HTTPError as e:
                    message = str(e)

                if message:
                    self.write_error(message)
                    logger.error(
                        "Error occured while retrieving API key, Error: {}".
                        format(message))
                    exit(1)

                # API key validation
                if not self.api_validation_flag:
                    api_key_validation, message = utility.validate_api_key(
                        api_key, logger)
                    logger.debug(
                        "API validation status: {}, message: {}".format(
                            api_key_validation, str(message)))
                    self.api_validation_flag = True
                    if not api_key_validation:
                        logger.info(message)
                        self.write_error(message)
                        exit(1)

                # divide the records in the form of dict of tuples having chunk_index as key
                # {<index>: (<records>, <All the ips in records>)}
                chunk_dict = event_generator.batch(records,
                                                   ip_field,
                                                   EVENTS_PER_CHUNK,
                                                   logger,
                                                   optimize_requests=False)
                logger.debug("Successfully divided events into chunks")

                # This means there are only 1000 or below IPs to call in the entire bunch of records
                # Use one thread with single thread with caching mechanism enabled for the chunk
                if len(chunk_dict) == 1:
                    logger.debug(
                        "Less then 1000 distinct IPs are present, optimizing the IP requests call to GreyNoise API..."
                    )
                    THREADS = 1
                    USE_CACHE = True

                # Opting timout 120 seconds for the requests
                api_client = GreyNoise(api_key=api_key,
                                       timeout=120,
                                       use_cache=USE_CACHE,
                                       integration_name=INTEGRATION_NAME)

                if len(chunk_dict) > 0:
                    for event in event_generator.get_all_events(
                            self._metadata.searchinfo.session_key,
                            api_client,
                            'enrich',
                            ip_field,
                            chunk_dict,
                            logger,
                            threads=THREADS):
                        yield event

                    logger.info(
                        "Successfully sent all the results to the Splunk")
                else:
                    logger.info(
                        "No events found, please increase the search timespan to have more search results."
                    )

            except Exception:
                logger.info(
                    "Exception occured while getting context information for events events, Error: {}"
                    .format(traceback.format_exc()))
                self.write_error(
                    "Exception occured while enriching events with the context information of IP addresses. "
                    "See greynoise_main.log for more details.")

    def __init__(self):
        """Initialize custom command class."""
        super(GNEnrichCommand, self).__init__()
Example #11
0
class StreamFilterCommand(StreamingCommand):
    """ Returns a field with a list of non-overlapping matches to a regular expression in a set of fields.

    ##Syntax

    .. code-block::
        StreamFilterCommand fieldname=<field> pattern=<field containing regex pattern> <field-list>

    ##Description

    Returns the non-overlapping matches to the regular expression contained in the field specified by `pattern`
    The result is stored in the field specified by `fieldname`. If `fieldname` exists, its value
    is replaced. If `fieldname` does not exist, it is created. Event records are otherwise passed through to the next
    pipeline processor unmodified.

    ##Example

    Return the regular expression matches in the `text` field (field named text) of each tweet in tweets.csv and store the result in `word_count`.

    .. code-block::
        | inputlookup tweets | eval pattern="\\w+" | streamfilter fieldname=word_count pattern=pattern text

    """
    fieldname = Option(doc='''
        **Syntax:** **fieldname=***<fieldname>*
        **Description:** Name of the field that will hold the match count''',
                       require=True,
                       validate=validators.Fieldname())

    pattern = Option(doc='''
        **Syntax:** **pattern=***<fieldname>* 
        **Description:** Field name containign the regular expression pattern to match''',
                     require=True,
                     validate=validators.Fieldname())

    #Filtering function created so we can handle multi-value pattern fields
    def thefilter(self, record, pattern):
        values = ""
        for fieldname in self.fieldnames:
            #multivalue fields come through as a list, iterate through the list and run the regex against each entry
            #in the multivalued field
            if not fieldname in record:
                continue
            if isinstance(record[fieldname], list):
                for aRecord in record[fieldname]:
                    matches = pattern.findall(
                        six.text_type(aRecord.decode("utf-8")))
                    for match in matches:
                        values = values + " " + match
            else:
                matches = pattern.findall(
                    six.text_type(record[fieldname].decode("utf-8")))
                for match in matches:
                    values = values + " " + match
        return values

    #stream function to work on each event which may or may not be multi-valued
    def stream(self, records):
        self.logger.debug('StreamFilterCommand: %s', self)  # logs command line
        for record in records:
            values = ""
            pattern = self.pattern
            if pattern not in record:
                self.logger.warn(
                    "StreamFilterCommand: pattern field is %s but cannot find this field"
                    % (pattern), self)
                sys.exit(-1)
            if isinstance(record[pattern], list):
                for aPattern in record[pattern]:
                    pattern = re.compile(aPattern)
                    values = values + self.thefilter(record, pattern)
            else:
                pattern = re.compile(record[pattern])
                values = values + self.thefilter(record, pattern)

            record[self.fieldname] = values
            yield record
Example #12
0
class Outliers(OptionRemoteStreamingCommand):
    threshold = Option(require=False,
                       default=0.01,
                       validate=FloatValidator(minimum=0, maximum=1))

    # One-Class SVM arguments
    kernel = Option(require=False, default='rbf')
    degree = Option(require=False,
                    default=3,
                    validate=validators.Integer(minimum=1))
    gamma = Option(require=False,
                   default=0.1,
                   validate=FloatValidator(minimum=0, maximum=1))
    coef0 = Option(require=False, default=0.0, validate=FloatValidator())

    # Covariance Estimator arguments
    support_fraction = Option(require=False,
                              validate=FloatValidator(minimum=0, maximum=1))
    showmah = Option(require=False,
                     default=False,
                     validate=validators.Boolean())

    classifier = Option(require=False, default='one_class_svm')

    code = """
import os, sys, numbers, math
import numpy as np
import scipy.sparse as sp
from scipy import stats

from sklearn import svm
from sklearn.covariance import EllipticEnvelope
from sklearn.feature_extraction.text import HashingVectorizer

if __name__ == '__channelexec__':
	args = channel.receive()

	fraction = 1 - args['threshold']
	fields = args.get('fieldnames') or ['_raw']
	by_fields = None
	try:
		by_index = fields.index("by")
		by_fields = fields[(by_index+1):]
		fields = fields[:by_index]
	except:
		pass
	classifier = args['classifier']

	svm_args = {
		'nu': 0.95 * fraction + 0.05,
		'kernel': args['kernel'],
		'degree': args['degree'],
		'gamma': args['gamma'],
		'coef0': args['coef0']
	}

	rc_args = {
		'contamination': args['threshold'],
		'support_fraction': args['support_fraction']
	}

	classifiers = {
		'one_class_svm': svm.OneClassSVM(**svm_args),
		'covariance_estimator': EllipticEnvelope(**rc_args)
	}

	records = []
	for record in channel:
		if not record:
			break
		records.append(record)

	if records:
		vectorizer = HashingVectorizer(ngram_range=(1,3), n_features=int(math.sqrt(len(records))))
		X = sp.lil_matrix((len(records),vectorizer.n_features))

		for i, record in enumerate(records):
			nums = []
			strs = []
			for field in fields:
				if isinstance(record.get(field), numbers.Number):
					nums.append(record[field])
				else:
					strs.append(str(record.get(field) or ""))
			if nums:
				X[i] = np.array(nums, dtype=np.float64)
			elif strs:
				X[i] = vectorizer.transform([" ".join(strs)])

		X = X.toarray()
		y_pred = None
		mah = None

		clf = classifiers.get(classifier)
		if clf:
			try:
				clf.fit(X)
				y = clf.decision_function(X).ravel()
				threshold = stats.scoreatpercentile(y, 100 * fraction)
				y_pred = y > threshold
				if classifier == 'covariance_estimator' and args['showmah']:
					mah = clf.mahalanobis(X)
			except ValueError:
				y_pred = np.zeros((X.shape[0]))

			for i, y in enumerate(y_pred):
				if y:
					record = records[i]
					if mah is not None:
						record['mahalanobis'] = mah[i].item()
					channel.send(record)
		else:
			channel.send({ "error": "Incorrect classifier specified %s" % classifier })
"""

    def __dir__(self):
        return [
            'threshold', 'kernel', 'degree', 'gamma', 'coef0',
            'support_fraction', 'showmah', 'classifier'
        ]
Example #13
0
class MispGetEventCommand(GeneratingCommand):
    """ get the attributes from a MISP instance.
    ##Syntax
    .. code-block::
        | MispGetEventCommand misp_instance=<input> last=<int>(d|h|m)
        | MispGetEventCommand misp_instance=<input> event=<id1>(,<id2>,...)
        | MispGetEventCommand misp_instance=<input> date=<<YYYY-MM-DD>
                                            (date_to=<YYYY-MM-DD>)
    ##Description
    {
        "returnFormat": "mandatory",
        "page": "optional",
        "limit": "optional",
        "value": "optional",
        "type": "optional",
        "category": "optional",
        "org": "optional",
        "tag": "optional",
        "tags": "optional",
        "searchall": "optional",
        "date": "optional",
        "last": "optional",
        "eventid": "optional",
        "withAttachments": "optional",
        "metadata": "optional",
        "uuid": "optional",
        "published": "optional",
        "publish_timestamp": "optional",
        "timestamp": "optional",
        "enforceWarninglist": "optional",
        "sgReferenceOnly": "optional",
        "eventinfo": "optional",
        "excludeLocalTags": "optional"
    }
    # status
        "tag": "optional",
        "searchall": "optional",
        "metadata": "optional",
        "published": "optional",
        "sgReferenceOnly": "optional",
        "eventinfo": "optional",
        "excludeLocalTags": "optional"

        "returnFormat": forced to json,
        "page": param,
        "limit": param,
        "value": not managed,
        "type": param, CSV string,
        "category": param, CSV string,
        "org": not managed,
        "tags": param, see also not_tags
        "date": param,
        "last": param,
        "eventid": param,
        "withAttachments": forced to false,
        "uuid": not managed,
        "publish_timestamp": managed via param last
        "timestamp": not managed,
        "enforceWarninglist": not managed,
    }
    """
    # MANDATORY MISP instance for this search
    misp_instance = Option(doc='''
        **Syntax:** **misp_instance=instance_name*
        **Description:**MISP instance parameters as described
         in local/misp42splunk_instances.conf.''',
                           require=True)
    # MANDATORY: json_request XOR eventid XOR last XOR date
    json_request = Option(doc='''
        **Syntax:** **json_request=***valid JSON request*
        **Description:**Valid JSON request''',
                          require=False)
    eventid = Option(doc='''
        **Syntax:** **eventid=***id1(,id2,...)*
        **Description:**list of event ID(s) or event UUID(s).''',
                     require=False,
                     validate=validators.Match("eventid", r"^[0-9a-f,\-]+$"))
    last = Option(doc='''
        **Syntax:** **last=***<int>d|h|m*
        **Description:** publication duration in day(s), hour(s) or minute(s).
        **nota bene:** last is an alias of published_timestamp''',
                  require=False,
                  validate=validators.Match("last", r"^[0-9]+[hdm]$"))
    date = Option(doc='''
        **Syntax:** **date=***The user set event date field
         - any of valid time related filters"*
        **Description:**starting date. **eventid**, **last**
         and **date** are mutually exclusive''',
                  require=False)
    # Other params
    category = Option(doc='''
        **Syntax:** **category=***CSV string*
        **Description:**Comma(,)-separated string of categories to search for.
         Wildcard is %.''',
                      require=False)
    getioc = Option(doc='''
        **Syntax:** **getioc=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean to return the list of attributes
         together with the event.''',
                    require=False,
                    validate=validators.Boolean())
    limit = Option(doc='''
        **Syntax:** **limit=***<int>*
        **Description:**define the limit for each MISP search; default 1000.
         0 = no pagination.''',
                   require=False,
                   validate=validators.Match("limit", r"^[0-9]+$"))
    not_tags = Option(doc='''
        **Syntax:** **not_tags=***CSV string*
        **Description:**Comma(,)-separated string of tags to exclude.
         Wildcard is %.''',
                      require=False)
    output = Option(doc='''
        **Syntax:** **output=***<default|rawy>*
        **Description:**selection between a tabular or JSON output.''',
                    require=False,
                    validate=validators.Match("output", r"(default|raw)"))
    page = Option(doc='''
        **Syntax:** **page=***<int>*
        **Description:**define the page for each MISP search; default 1.''',
                  require=False,
                  validate=validators.Match("page", r"^[0-9]+$"))
    pipesplit = Option(doc='''
        **Syntax:** **pipesplit=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean to split multivalue attributes.''',
                       require=False,
                       validate=validators.Boolean())
    published = Option(doc='''
        **Syntax:** **published=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**select only published events (for option from to) .''',
                       require=False,
                       validate=validators.Boolean())
    tags = Option(doc='''
        **Syntax:** **tags=***CSV string*
        **Description:**Comma(,)-separated string of tags to search for.
         Wildcard is %.''',
                  require=False)
    type = Option(doc='''
        **Syntax:** **type=***CSV string*
        **Description:**Comma(,)-separated string of types to search for.
         Wildcard is %.''',
                  require=False)
    warning_list = Option(doc='''
        **Syntax:** **warning_list=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean to filter out well known values.''',
                          require=False,
                          validate=validators.Boolean())

    @staticmethod
    def _record(serial_number,
                time_stamp,
                host,
                attributes,
                attribute_names,
                encoder,
                condensed=False):

        if condensed is False:
            raw = encoder.encode(attributes)
        # Formulate record
        fields = dict()
        for f in attribute_names:
            if f in attributes:
                fields[f] = attributes[f]

        if serial_number > 0:
            fields['_serial'] = serial_number
            fields['_time'] = time_stamp
            if condensed is False:
                fields['_raw'] = raw
            fields['host'] = host
            return fields

        if condensed is False:
            record = OrderedDict(
                chain((('_serial', serial_number), ('_time', time_stamp),
                       ('_raw', raw), ('host', host)),
                      map(lambda name: (name, fields.get(name, '')),
                          attribute_names)))
        else:
            record = OrderedDict(
                chain((('_serial', serial_number), ('_time', time_stamp),
                       ('host', host)),
                      map(lambda name: (name, fields.get(name, '')),
                          attribute_names)))

        return record

    def generate(self):

        # Phase 1: Preparation
        misp_instance = self.misp_instance
        storage = self.service.storage_passwords
        my_args = prepare_config(self, 'misp42splunk', misp_instance, storage)
        if my_args is None:
            raise Exception(
                "Sorry, no configuration for misp_instance={}".format(
                    misp_instance))
        my_args['host'] = my_args['misp_url'].replace('https://', '')
        my_args['misp_url'] = my_args['misp_url'] + '/events/restSearch'

        # check that ONE of mandatory fields is present
        mandatory_arg = 0
        if self.json_request is not None:
            mandatory_arg = mandatory_arg + 1
        if self.eventid:
            mandatory_arg = mandatory_arg + 1
        if self.last:
            mandatory_arg = mandatory_arg + 1
        if self.date:
            mandatory_arg = mandatory_arg + 1

        if mandatory_arg == 0:
            logging.error('Missing "json_request", eventid", \
                "last" or "date" argument')
            raise Exception('Missing "json_request", "eventid", \
                "last" or "date" argument')
        elif mandatory_arg > 1:
            logging.error('Options "json_request", eventid", "last" \
                and "date" are mutually exclusive')
            raise Exception('Options "json_request", "eventid", "last" \
                and "date" are mutually exclusive')

        body_dict = dict()
        # Only ONE combination was provided
        if self.json_request is not None:
            body_dict = json.loads(self.json_request)
            logging.info('Option "json_request" set')
        elif self.eventid:
            if "," in self.eventid:
                event_criteria = {}
                event_list = self.eventid.split(",")
                event_criteria['OR'] = event_list
                body_dict['eventid'] = event_criteria
            else:
                body_dict['eventid'] = self.eventid
            logging.info('Option "eventid" set with %s',
                         json.dumps(body_dict['eventid']))
        elif self.last:
            body_dict['last'] = self.last
            logging.info('Option "last" set with %s', str(body_dict['last']))
        else:
            body_dict['date'] = self.date.split()
            logging.info('Option "date" set with %s',
                         json.dumps(body_dict['date']))

        # Force some values on JSON request
        body_dict['returnFormat'] = 'json'
        body_dict['withAttachments'] = False
        # set proper headers
        headers = {'Content-type': 'application/json'}
        headers['Authorization'] = my_args['misp_key']
        headers['Accept'] = 'application/json'

        # Search pagination
        pagination = True
        if self.limit is not None:
            limit = int(self.limit)
        elif 'limit' in body_dict:
            limit = int(body_dict['limit'])
        else:
            limit = 1000
        if limit == 0:
            pagination = False
        if self.page is not None:
            page = int(self.page)
        elif 'page' in body_dict:
            page = body_dict['page']
        else:
            page = 1
        if self.published is True:
            body_dict['published'] = True
        elif self.published is False:
            body_dict['published'] = False
        # Search parameters: boolean and filter
        # manage enforceWarninglist
        if self.warning_list is True:
            body_dict['enforceWarninglist'] = True
        elif self.warning_list is False:
            body_dict['enforceWarninglist'] = False
        if self.category is not None:
            if "," in self.category:
                cat_criteria = {}
                cat_list = self.category.split(",")
                cat_criteria['OR'] = cat_list
                body_dict['category'] = cat_criteria
            else:
                body_dict['category'] = self.category
        if self.type is not None:
            if "," in self.type:
                type_criteria = {}
                type_list = self.type.split(",")
                type_criteria['OR'] = type_list
                body_dict['type'] = type_criteria
            else:
                body_dict['type'] = self.type
        if self.tags is not None or self.not_tags is not None:
            tags_criteria = {}
            if self.tags is not None:
                tags_list = self.tags.split(",")
                tags_criteria['OR'] = tags_list
            if self.not_tags is not None:
                tags_list = self.not_tags.split(",")
                tags_criteria['NOT'] = tags_list
            body_dict['tags'] = tags_criteria
        # output filter parameters
        if self.getioc is True:
            getioc = True
        else:
            getioc = False
        if self.pipesplit is True:
            pipesplit = True
        else:
            pipesplit = False
        if self.output is not None:
            output = self.output
        else:
            output = "default"

        if pagination is True:
            body_dict['page'] = page
            body_dict['limit'] = limit

        body = json.dumps(body_dict)
        logging.error('MispGetEventCommand request body: %s', body)
        # search
        r = requests.post(my_args['misp_url'],
                          headers=headers,
                          data=body,
                          verify=my_args['misp_verifycert'],
                          cert=my_args['client_cert_full_path'],
                          proxies=my_args['proxies'])
        # check if status is anything other than 200;
        # throw an exception if it is
        # check if status is anything other than 200;
        # throw an exception if it is
        if r.status_code in (200, 201, 204):
            logging.info("[EV301] INFO mispgetevent successful. "
                         "url={}, HTTP status={}".format(
                             my_args['misp_url'], r.status_code))
        else:
            logging.error("[EV302] ERROR mispgetevent failed. "
                          "url={}, data={}, HTTP Error={}, content={}".format(
                              my_args['misp_url'], body, r.status_code,
                              r.text))
            raise Exception(
                "[EV302] ERROR mispgetevent failed. "
                "url={}, data={}, HTTP Error={}, content={}".format(
                    my_args['misp_url'], body, r.status_code, r.text))
        # response is 200 by this point or we would have thrown an exception
        response = r.json()

        encoder = json.JSONEncoder(ensure_ascii=False, separators=(',', ':'))
        if output == "raw":
            if 'response' in response:
                attribute_names = list()
                serial_number = 0
                for r_item in response['response']:
                    if 'Event' in r_item:
                        for e in r_item.values():
                            yield MispGetEventCommand._record(
                                serial_number, e['timestamp'], my_args['host'],
                                e, attribute_names, encoder)
                        serial_number += 1
                        GeneratingCommand.flush
        else:
            # build output table and list of types
            events = []
            typelist = []
            column_list = format_output_table(response, events, typelist,
                                              getioc, pipesplit)
            logging.info('typelist containss %s values', str(len(typelist)))
            logging.debug('typelist is %s', json.dumps(typelist))
            logging.info('results contains %s records', str(len(events)))

            if getioc is False:
                attribute_names = list()
                init_attribute_names = True
                serial_number = 0
                for e in events:
                    if init_attribute_names is True:
                        for key in e.keys():
                            if key not in attribute_names:
                                attribute_names.append(key)
                        attribute_names.sort()
                        init_attribute_names = False
                    yield MispGetEventCommand._record(serial_number,
                                                      e['misp_timestamp'],
                                                      my_args['host'], e,
                                                      attribute_names, encoder,
                                                      True)
                    serial_number += 1
                    GeneratingCommand.flush
            else:
                output_dict = {}
                for e in events:
                    if 'Attribute' in e:
                        for a in e['Attribute']:
                            if int(a['misp_object_id']) == 0:  # not an object
                                key = str(e['misp_event_id']) + '_' \
                                    + str(a['misp_attribute_id'])
                                is_object_member = False
                            else:  # this is a  MISP object
                                key = str(e['misp_event_id']) + \
                                    '_object_' + str(a['misp_object_id'])
                                is_object_member = True
                            if key not in output_dict:
                                v = init_misp_output(e, a, column_list)
                                for t in typelist:
                                    misp_t = 'misp_' \
                                        + t.replace('-', '_')\
                                             .replace('|', '_p_')
                                    v[misp_t] = []
                                    if t == a['misp_type']:
                                        v[misp_t].append(a['misp_value'])
                                if is_object_member is True:
                                    v['misp_type'] = v['misp_object_name']
                                    v['misp_value'] = v['misp_object_id']
                                output_dict[key] = dict(v)
                            else:
                                v = dict(output_dict[key])
                                misp_t = 'misp_' + a['misp_type']\
                                    .replace('-', '_').replace('|', '_p_')
                                v[misp_t].append(a['misp_value'])
                                if a['misp_to_ids'] not in v['misp_to_ids']:
                                    v['misp_to_ids'].append(a['misp_to_ids'])
                                if a['misp_category'] not in v[
                                        'misp_category']:
                                    v['misp_category'].append(
                                        a['misp_category'])
                                v['misp_attribute_uuid']\
                                    .append(a['misp_attribute_uuid'])
                                v['misp_attribute_id']\
                                    .append(a['misp_attribute_id'])
                                if a['misp_attribute_tag'] is not None:
                                    a_tag = v['misp_attribute_tag']
                                    for t in a['misp_attribute_tag']:
                                        if t not in a_tag:
                                            a_tag.append(t)
                                    v['misp_attribute_tag'] = a_tag
                                if a['misp_comment'] not in v['misp_comment']:
                                    v['misp_comment'].append(a['misp_comment'])
                                if is_object_member is False:
                                    misp_type = a['misp_type'] \
                                        + '|' + v['misp_type']
                                    v['misp_type'] = misp_type
                                    misp_value = a['misp_value'] + \
                                        '|' + v['misp_value']
                                    v['misp_value'] = misp_value
                                output_dict[key] = dict(v)

                if output_dict is not None:
                    attribute_names = list()
                    init_attribute_names = True
                    serial_number = 0
                    for v in output_dict.values():
                        if init_attribute_names is True:
                            for key in v.keys():
                                if key not in attribute_names:
                                    attribute_names.append(key)
                            attribute_names.sort()
                            init_attribute_names = False
                        yield MispGetEventCommand._record(
                            serial_number, v['misp_timestamp'],
                            my_args['host'], v, attribute_names, encoder, True)
                        serial_number += 1
                        GeneratingCommand.flush
Example #14
0
class MispCollectCommand(GeneratingCommand):
    """ get the attributes from a MISP instance.
    ##Syntax
    .. code-block::
        | mispgetioc misp_instance=<input> last=<int>(d|h|m)
        | mispgetioc misp_instance=<input> event=<id1>(,<id2>,...)
        | mispgetioc misp_instance=<input> date=<<YYYY-MM-DD>
                                           (date_to=<YYYY-MM-DD>)
    ##Description
    {
        "returnFormat": "mandatory",
        "page": "optional",
        "limit": "optional",
        "value": "optional",
        "type": "optional",
        "category": "optional",
        "org": "optional",
        "tags": "optional",
        "date": "optional",
        "last": "optional",
        "eventid": "optional",
        "withAttachments": "optional",
        "uuid": "optional",
        "publish_timestamp": "optional",
        "timestamp": "optional",
        "enforceWarninglist": "optional",
        "to_ids": "optional",
        "deleted": "optional",
        "includeEventUuid": "optional",
        "includeEventTags": "optional",
        "event_timestamp": "optional",
        "threat_level_id": "optional",
        "eventinfo": "optional",
        "includeProposals": "optional",
        "includeDecayScore": "optional",
        "includeFullModel": "optional",
        "decayingModel": "optional",
        "excludeDecayed": "optional",
        "score": "optional"
    }
    # status
        "returnFormat": forced to json,
        "page": param,
        "limit": param,
        "value": not managed,
        "type": param, CSV string,
        "category": param, CSV string,
        "org": not managed,
        "tags": param, see also not_tags
        "date": param,
        "last": param,
        "eventid": param,
        "withAttachments": forced to false,
        "uuid": not managed,
        "publish_timestamp": managed via param last
        "timestamp": not managed,
        "enforceWarninglist": param,
        "to_ids": param,
        "deleted": forced to False,
        "includeEventUuid": set to True,
        "includeEventTags": param,
        "event_timestamp":  not managed,
        "threat_level_id":  not managed,
        "eventinfo": not managed,
        "includeProposals": not managed
        "includeDecayScore": not managed,
        "includeFullModel": not managed,
        "decayingModel": not managed,
        "excludeDecayed": not managed,
        "score": not managed
    }
    """
    # MANDATORY MISP instance for this search
    misp_instance = Option(
        doc='''
        **Syntax:** **misp_instance=instance_name*
        **Description:** MISP instance parameters
        as described in local/misp42splunk_instances.conf.''',
        require=True)
    # MANDATORY: json_request XOR eventid XOR last XOR date
    json_request = Option(
        doc='''
        **Syntax:** **json_request=***valid JSON request*
        **Description:**Valid JSON request''',
        require=False)
    eventid = Option(
        doc='''
        **Syntax:** **eventid=***id1(,id2,...)*
        **Description:**list of event ID(s) or event UUID(s).''',
        require=False, validate=validators.Match("eventid", r"^[0-9a-f,\-]+$"))
    last = Option(
        doc='''
        **Syntax:** **last=***<int>d|h|m*
        **Description:** publication duration in day(s), hour(s) or minute(s).
        **nota bene:** last is an alias of published_timestamp''',
        require=False, validate=validators.Match("last", r"^[0-9]+[hdm]$"))
    date = Option(
        doc='''
        **Syntax:** **date=***The user set event date field
         - any of valid time related filters"*
        **Description:**starting date.
         **eventid**, **last** and **date** are mutually exclusive''',
        require=False)
    # Other params
    category = Option(
        doc='''
        **Syntax:** **category=***CSV string*
        **Description:**Comma(,)-separated string of categories to search for.
         Wildcard is %.''',
        require=False)
    endpoint = Option(
        doc='''
        **Syntax:** **endpoint=***<events|attributes>*
        **Description:**selection of MISP API restSearch endpoint.
        **default**: /attributes/restSearch''',
        require=False, validate=validators.Match("endpoint", r"(events|attributes)"))
    geteventtag = Option(
        doc='''
        **Syntax:** **geteventtag=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean includeEventTags. By default only
         attribute tag(s) are returned.''',
        require=False, validate=validators.Boolean())
    keep_related = Option(
        doc='''
        **Syntax:** **keep_related=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean to keep related events.
        default is to drop  RelatedEvents to reduce volume.''',
        require=False, validate=validators.Boolean())
    limit = Option(
        doc='''
        **Syntax:** **limit=***<int>*
        **Description:**define the limit for each MISP search;
         default 1000. 0 = no pagination.''',
        require=False, validate=validators.Match("limit", r"^[0-9]+$"))
    not_tags = Option(
        doc='''
        **Syntax:** **not_tags=***CSV string*
        **Description:**Comma(,)-separated string of tags to exclude.
         Wildcard is %.''',
        require=False)
    page = Option(
        doc='''
        **Syntax:** **page=***<int>*
        **Description:**define the page for each MISP search; default 1.''',
        require=False, validate=validators.Match("page", r"^[0-9]+$"))
    tags = Option(
        doc='''
        **Syntax:** **tags=***CSV string*
        **Description:**Comma(,)-separated string of tags to search for.
         Wildcard is %.''',
        require=False)
    to_ids = Option(
        doc='''
        **Syntax:** **to_ids=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean to search only attributes with the flag
         "to_ids" set to true.''',
        require=False, validate=validators.Boolean())
    type = Option(
        doc='''
        **Syntax:** **type=***CSV string*
        **Description:**Comma(,)-separated string of types to search for.
         Wildcard is %.''',
        require=False)
    warning_list = Option(
        doc='''
        **Syntax:** **warning_list=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean to filter out well known values.''',
        require=False, validate=validators.Boolean())

    def log_error(self, msg):
        logging.error(msg)

    def log_info(self, msg):
        logging.info(msg)

    def log_debug(self, msg):
        logging.debug(msg)

    def log_warn(self, msg):
        logging.warning(msg)

    def set_log_level(self):
        logging.root
        loglevel = logging_level('misp42splunk')
        logging.root.setLevel(loglevel)
        logging.error('[CO-101] logging level is set to %s', loglevel)
        logging.error('[CO-102] PYTHON VERSION: ' + sys.version)

    @staticmethod
    def _record(serial_number, time_stamp, host, attributes, attribute_names, encoder):

        raw = encoder.encode(attributes)
        # Formulate record
        fields = dict()
        for f in attribute_names:
            if f in attributes:
                fields[f] = attributes[f]

        if serial_number > 0:
            fields['_serial'] = serial_number
            fields['_time'] = time_stamp
            fields['_raw'] = raw
            fields['host'] = host
            return fields

        record = OrderedDict(chain(
            (('_serial', serial_number), ('_time', time_stamp),
             ('_raw', raw), ('host', host)),
            map(lambda name: (name, fields.get(name, '')), attribute_names)))

        return record

    def generate(self):
        # loggging
        self.set_log_level()
        # Phase 1: Preparation
        misp_instance = self.misp_instance
        storage = self.service.storage_passwords
        my_args = prepare_config(self, 'misp42splunk', misp_instance, storage)
        if my_args is None:
            raise Exception("Sorry, no configuration for misp_instance={}".format(misp_instance))
        my_args['host'] = my_args['misp_url'].replace('https://', '')
        # check that ONE of mandatory fields is present
        mandatory_arg = 0
        if self.json_request is not None:
            mandatory_arg = mandatory_arg + 1
        if self.eventid:
            mandatory_arg = mandatory_arg + 1
        if self.last:
            mandatory_arg = mandatory_arg + 1
        if self.date:
            mandatory_arg = mandatory_arg + 1

        if mandatory_arg == 0:
            raise Exception('Missing "json_request", "eventid", "last" or "date" argument')
        elif mandatory_arg > 1:
            raise Exception('Options "json_request", "eventid", "last" and "date" are mutually exclusive')

        body_dict = dict()
        # Only ONE combination was provided
        if self.json_request is not None:
            body_dict = json.loads(self.json_request)
            self.log_info('Option "json_request" set')
        elif self.eventid:
            if "," in self.eventid:
                event_criteria = {}
                event_list = self.eventid.split(",")
                event_criteria['OR'] = event_list
                body_dict['eventid'] = event_criteria
            else:
                body_dict['eventid'] = self.eventid
            self.log_info('Option "eventid" set with {}'
                          .format(json.dumps(body_dict['eventid'])))
        elif self.last:
            body_dict['last'] = self.last
            self.log_info('Option "last" set with {}'
                          .format(str(body_dict['last'])))
        else:
            body_dict['date'] = self.date.split()
            self.log_info('Option "date" set with {}'
                          .format(json.dumps(body_dict['date'])))

        # Force some values on JSON request
        body_dict['returnFormat'] = 'json'
        body_dict['withAttachments'] = False
        body_dict['deleted'] = False
        body_dict['includeEventUuid'] = True
        # set proper headers
        headers = {'Content-type': 'application/json'}
        headers['Authorization'] = my_args['misp_key']
        headers['Accept'] = 'application/json'

        # Search pagination
        pagination = True
        if self.limit is not None:
            limit = int(self.limit)
        elif 'limit' in body_dict:
            limit = int(body_dict['limit'])
        else:
            limit = 1000
        if limit == 0:
            pagination = False
        if self.page is not None:
            page = int(self.page)
        elif 'page' in body_dict:
            page = body_dict['page']
        else:
            page = 1

        # Search parameters: boolean and filter
        # manage to_ids and enforceWarninglist
        # to avoid FP enforceWarninglist is set to True if
        # to_ids is set to True (search criterion)
        if self.category is not None:
            if "," in self.category:
                cat_criteria = {}
                cat_list = self.category.split(",")
                cat_criteria['OR'] = cat_list
                body_dict['category'] = cat_criteria
            else:
                body_dict['category'] = self.category
        if self.endpoint == 'events':
            my_args['misp_url'] = my_args['misp_url'] + '/events/restSearch'
        else:
            my_args['misp_url'] = my_args['misp_url'] + '/attributes/restSearch'
        if self.geteventtag is True:
            body_dict['includeEventTags'] = True
        if self.keep_related is True:
            keep_related = True
        else:
            keep_related = False
        if self.to_ids is True:
            body_dict['to_ids'] = True
            body_dict['enforceWarninglist'] = True  # protection
        elif self.to_ids is False:
            body_dict['to_ids'] = False
        if self.type is not None:
            if "," in self.type:
                type_criteria = {}
                type_list = self.type.split(",")
                type_criteria['OR'] = type_list
                body_dict['type'] = type_criteria
            else:
                body_dict['type'] = self.type
        if self.warning_list is True:
            body_dict['enforceWarninglist'] = True
        elif self.warning_list is False:
            body_dict['enforceWarninglist'] = False
        if self.tags is not None or self.not_tags is not None:
            tags_criteria = {}
            if self.tags is not None:
                tags_list = self.tags.split(",")
                tags_criteria['OR'] = tags_list
            if self.not_tags is not None:
                tags_list = self.not_tags.split(",")
                tags_criteria['NOT'] = tags_list
            body_dict['tags'] = tags_criteria

        if pagination is True:
            body_dict['page'] = page
            body_dict['limit'] = limit

        body = json.dumps(body_dict)
        # search
        r = requests.post(my_args['misp_url'], headers=headers, data=body,
                          verify=my_args['misp_verifycert'],
                          cert=my_args['client_cert_full_path'],
                          proxies=my_args['proxies'])
        # check if status is anything other than 200;
        # throw an exception if it is
        if r.status_code in (200, 201, 204):
            self.log_info(
                "[CO301] INFO mispcollect successful. url={}, HTTP status={}".format(my_args['misp_url'], r.status_code)
            )
        else:
            self.log_error(
                "[CO302] ERROR mispcollect failed. url={}, data={}, HTTP Error={}, content={}".format(my_args['misp_url'], body, r.status_code, r.text)
            )
            raise Exception(
                "[CO302] ERROR mispcollect failed for url={} with HTTP Error={}. Check search.log for details".format(my_args['misp_url'], r.status_code)
            )
        # response is 200 by this point or we would have thrown an exception
        response = r.json()
        encoder = json.JSONEncoder(ensure_ascii=False, separators=(',', ':'))
        if self.endpoint == "events":
            if 'response' in response:
                for r_item in response['response']:
                    if 'Event' in r_item:
                        attribute_names = []
                        serial_number = 0
                        for e in list(r_item.values()):
                            if keep_related is False:
                                e.pop('RelatedEvent', None)
                            if serial_number == 0:
                                for k in list(e.keys()):
                                    attribute_names.append(k)
                            yield MispCollectCommand._record(
                                serial_number, e['timestamp'], my_args['host'],
                                e, attribute_names, encoder)
                        serial_number += 1
                        GeneratingCommand.flush
        else:
            if 'response' in response:
                if 'Attribute' in response['response']:
                    attribute_names = []
                    serial_number = 0
                    for a in response['response']['Attribute']:
                        if serial_number == 0:
                            for k in list(a.keys()):
                                attribute_names.append(k)
                        yield MispCollectCommand._record(
                            serial_number, a['timestamp'], my_args['host'],
                            a, attribute_names, encoder)
                        serial_number += 1
                        GeneratingCommand.flush
Example #15
0
class TestSearchCommand(SearchCommand):

    boolean = Option(
        doc='''
        **Syntax:** **boolean=***<value>*
        **Description:** A boolean value''',
        validate=validators.Boolean())

    required_boolean = Option(
        doc='''
        **Syntax:** **boolean=***<value>*
        **Description:** A boolean value''',
        require=True, validate=validators.Boolean())

    aliased_required_boolean = Option(
        doc='''
        **Syntax:** **boolean=***<value>*
        **Description:** A boolean value''',
        name='foo', require=True, validate=validators.Boolean())

    code = Option(
        doc='''
        **Syntax:** **code=***<value>*
        **Description:** A Python expression, if mode == "eval", or statement, if mode == "exec"''',
        validate=validators.Code())

    required_code = Option(
        doc='''
        **Syntax:** **code=***<value>*
        **Description:** A Python expression, if mode == "eval", or statement, if mode == "exec"''',
        require=True, validate=validators.Code())

    duration = Option(
        doc='''
        **Syntax:** **duration=***<value>*
        **Description:** A length of time''',
        validate=validators.Duration())

    required_duration = Option(
        doc='''
        **Syntax:** **duration=***<value>*
        **Description:** A length of time''',
        require=True, validate=validators.Duration())

    fieldname = Option(
        doc='''
        **Syntax:** **fieldname=***<value>*
        **Description:** Name of a field''',
        validate=validators.Fieldname())

    required_fieldname = Option(
        doc='''
        **Syntax:** **fieldname=***<value>*
        **Description:** Name of a field''',
        require=True, validate=validators.Fieldname())

    file = Option(
        doc='''
        **Syntax:** **file=***<value>*
        **Description:** Name of a file''',
        validate=validators.File())

    required_file = Option(
        doc='''
        **Syntax:** **file=***<value>*
        **Description:** Name of a file''',
        require=True, validate=validators.File())

    integer = Option(
        doc='''
        **Syntax:** **integer=***<value>*
        **Description:** An integer value''',
        validate=validators.Integer())

    required_integer = Option(
        doc='''
        **Syntax:** **integer=***<value>*
        **Description:** An integer value''',
        require=True, validate=validators.Integer())
    
    float = Option(
        doc='''
        **Syntax:** **float=***<value>*
        **Description:** An float value''',
        validate=validators.Float())

    required_float = Option(
        doc='''
        **Syntax:** **float=***<value>*
        **Description:** An float value''',
        require=True, validate=validators.Float())

    map = Option(
        doc='''
        **Syntax:** **map=***<value>*
        **Description:** A mapping from one value to another''',
        validate=validators.Map(foo=1, bar=2, test=3))

    required_map = Option(
        doc='''
        **Syntax:** **map=***<value>*
        **Description:** A mapping from one value to another''',
        require=True, validate=validators.Map(foo=1, bar=2, test=3))

    match = Option(
        doc='''
        **Syntax:** **match=***<value>*
        **Description:** A value that matches a regular expression pattern''',
        validate=validators.Match('social security number', r'\d{3}-\d{2}-\d{4}'))

    required_match = Option(
        doc='''
        **Syntax:** **required_match=***<value>*
        **Description:** A value that matches a regular expression pattern''',
        require=True, validate=validators.Match('social security number', r'\d{3}-\d{2}-\d{4}'))

    optionname = Option(
        doc='''
        **Syntax:** **optionname=***<value>*
        **Description:** The name of an option (used internally)''',
        validate=validators.OptionName())

    required_optionname = Option(
        doc='''
        **Syntax:** **optionname=***<value>*
        **Description:** The name of an option (used internally)''',
        require=True, validate=validators.OptionName())

    regularexpression = Option(
        doc='''
        **Syntax:** **regularexpression=***<value>*
        **Description:** Regular expression pattern to match''',
        validate=validators.RegularExpression())

    required_regularexpression = Option(
        doc='''
        **Syntax:** **regularexpression=***<value>*
        **Description:** Regular expression pattern to match''',
        require=True, validate=validators.RegularExpression())

    set = Option(
        doc='''
        **Syntax:** **set=***<value>*
        **Description:** A member of a set''',
        validate=validators.Set('foo', 'bar', 'test'))

    required_set = Option(
        doc='''
        **Syntax:** **set=***<value>*
        **Description:** A member of a set''',
        require=True, validate=validators.Set('foo', 'bar', 'test'))

    class ConfigurationSettings(SearchCommand.ConfigurationSettings):
        @classmethod
        def fix_up(cls, command_class):
            pass
Example #16
0
class MispSearchCommand(StreamingCommand):
    """
    search in MISP for attributes matching the value of field.

    ##Syntax

        code-block::
        mispsearch field=<field> to_ids=y|n

    ##Description

        body =  {
                    "returnFormat": "mandatory",
                    "page": "optional",
                    "limit": "optional",
                    "value": "optional",
                    "type": "optional",
                    "category": "optional",
                    "org": "optional",
                    "tags": "optional",
                    "from": "optional",
                    "to": "optional",
                    "last": "optional",
                    "eventid": "optional",
                    "withAttachments": "optional",
                    "uuid": "optional",
                    "publish_timestamp": "optional",
                    "timestamp": "optional",
                    "enforceWarninglist": "optional",
                    "to_ids": "optional",
                    "deleted": "optional",
                    "includeEventUuid": "optional",
                    "includeEventTags": "optional",
                    "event_timestamp": "optional",
                    "threat_level_id": "optional",
                    "eventinfo": "optional"
                }

    ##Example

    Search in MISP for value of fieldname r_ip (remote IP in proxy logs).

        code-block::
         * | mispsearch field=r_ip

    """

    misp_instance = Option(doc='''
        **Syntax:** **misp_instance=instance_name*
        **Description:**MISP instance parameters as \
        described in local/misp42splunk_instances.conf''',
                           require=True)
    field = Option(doc='''
        **Syntax:** **field=***<fieldname>*
        **Description:**Name of the field containing \
        the value to search for.''',
                   require=True,
                   validate=validators.Fieldname())
    to_ids = Option(doc='''
        **Syntax:** **to_ids=***<y|n>*
        **Description:** Boolean to search only attributes with to_ids set''',
                    require=False,
                    validate=validators.Boolean())
    includeEventUuid = Option(doc='''
        **Syntax:** **includeEventUuid=***y|Y|1|true|True|n|N|0|false|False*
        **Description:**Boolean to include event UUID(s) to results.''',
                              require=False,
                              validate=validators.Boolean())
    includeEventTags = Option(doc='''
        **Syntax:** **includeEventTags=***y|Y|1|true|True|n|N|0|false|False*
        **Description:**Boolean to include Event Tags to results.''',
                              require=False,
                              validate=validators.Boolean())
    last = Option(doc='''
        **Syntax:** **last=***<int>d|h|m*
        **Description:**Publication duration in day(s), hour(s) or minute(s) 
        to limit search scope only to published events in last X timerange.''',
                  require=False,
                  validate=validators.Match("last", r"^[0-9]+[hdm]$"))
    limit = Option(doc='''
        **Syntax:** **limit=***<int>*
        **Description:**define the limit for each MISP search; \
        default 1000. 0 = no pagination.''',
                   require=False,
                   validate=validators.Match("limit", r"^[0-9]+$"))
    page = Option(doc='''
        **Syntax:** **page=***<int>*
        **Description:**define the page for each MISP search; default 1.''',
                  require=False,
                  validate=validators.Match("page", r"^[0-9]+$"))
    json_request = Option(doc='''
        **Syntax:** **json_request=***valid JSON request*
        **Description:**Valid JSON request''',
                          require=False)

    def log_error(self, msg):
        logging.error(msg)

    def log_info(self, msg):
        logging.info(msg)

    def log_debug(self, msg):
        logging.debug(msg)

    def log_warn(self, msg):
        logging.warning(msg)

    def set_log_level(self):
        logging.root
        loglevel = logging_level('misp42splunk')
        logging.root.setLevel(loglevel)
        logging.error('[SE-101] logging level is set to %s', loglevel)
        logging.error('[SE-102] PYTHON VERSION: ' + sys.version)

    def stream(self, records):
        # loggging
        self.set_log_level()
        # Phase 1: Preparation
        misp_instance = self.misp_instance
        storage = self.service.storage_passwords
        my_args = prepare_config(self, 'misp42splunk', misp_instance, storage)
        if my_args is None:
            raise Exception(
                "Sorry, no configuration for misp_instance={}".format(
                    misp_instance))
        my_args['misp_url'] = my_args['misp_url'] + '/attributes/restSearch'
        # set proper headers
        headers = {'Content-type': 'application/json'}
        headers['Authorization'] = my_args['misp_key']
        headers['Accept'] = 'application/json'

        fieldname = str(self.field)
        pagination = True
        if self.limit is not None:
            if int(self.limit) == 0:
                pagination = False
            else:
                limit = int(self.limit)
        else:
            limit = 1000
        if self.page is not None:
            page = int(self.page)
        else:
            page = 1

        if self.json_request is not None:
            body_dict = json.loads(self.json_request)
            self.log_info('Option "json_request" set')
            body_dict['returnFormat'] = 'json'
            body_dict['withAttachments'] = False
            if 'limit' in body_dict:
                limit = int(body_dict['limit'])
                if limit == 0:
                    pagination = False
            if 'page' in body_dict:
                page = body_dict['page']
                pagination = False
        else:
            # build search JSON object
            body_dict = {"returnFormat": "json", "withAttachments": False}
            if self.to_ids is True:
                body_dict['to_ids'] = "True"
            if self.includeEventUuid is not None:
                body_dict['includeEventUuid'] = self.includeEventUuid
            if self.includeEventTags is not None:
                body_dict['includeEventTags'] = self.includeEventTags
            if self.last is not None:
                body_dict['last'] = self.last
        for record in records:
            if fieldname in record:
                value = record.get(fieldname, None)
                if value is not None:
                    body_dict['value'] = str(value)
                    misp_category = []
                    misp_event_id = []
                    misp_event_uuid = []
                    misp_orgc_id = []
                    misp_to_ids = []
                    misp_comment = []
                    misp_tag = []
                    misp_type = []
                    misp_value = []
                    misp_uuid = []
                    # search
                    if pagination is True:
                        body_dict['page'] = page
                        body_dict['limit'] = limit
                    body = json.dumps(body_dict)
                    r = requests.post(my_args['misp_url'],
                                      headers=headers,
                                      data=body,
                                      verify=my_args['misp_verifycert'],
                                      cert=my_args['client_cert_full_path'],
                                      proxies=my_args['proxies'])
                    # check if status is anything other than 200; throw an exception if it is
                    # check if status is anything other than 200;
                    # throw an exception if it is
                    if r.status_code in (200, 201, 204):
                        self.log_info(
                            "[SE301] INFO mispsearch successful. url={}, HTTP status={}"
                            .format(my_args['misp_url'], r.status_code))
                    else:
                        self.log_error(
                            "[SE302] ERROR mispsearch failed. url={}, data={}, HTTP Error={}, content={}"
                            .format(my_args['misp_url'], body, r.status_code,
                                    r.text))
                        raise Exception(
                            "[SE302] ERROR mispsearch failed for url={} with HTTP Error={}. Check search.log for details"
                            .format(my_args['misp_url'], r.status_code))
                    # response is 200 by this point or we would have thrown an exception
                    response = r.json()
                    if 'response' in response:
                        if 'Attribute' in response['response']:
                            for a in response['response']['Attribute']:
                                if str(a['type']) not in misp_type:
                                    misp_type.append(str(a['type']))
                                if str(a['value']) not in misp_value:
                                    misp_value.append(str(a['value']))
                                if str(a['to_ids']) not in misp_to_ids:
                                    misp_to_ids.append(str(a['to_ids']))
                                if str(a['comment']) not in misp_comment:
                                    misp_comment.append(str(a['comment']))
                                if str(a['category']) not in misp_category:
                                    misp_category.append(str(a['category']))
                                if str(a['uuid']) not in misp_uuid:
                                    misp_uuid.append(str(a['uuid']))
                                if str(a['event_id']) not in misp_event_id:
                                    misp_event_id.append(str(a['event_id']))
                                if 'Tag' in a:
                                    for tag in a['Tag']:
                                        if str(tag['name']) not in misp_tag:
                                            misp_tag.append(str(tag['name']))
                                if 'Event' in a:
                                    if a['Event']['uuid'] \
                                       not in misp_event_uuid:
                                        misp_event_uuid.append(
                                            str(a['Event']['uuid']))
                                    if a['Event']['orgc_id'] \
                                       not in misp_orgc_id:
                                        misp_orgc_id.append(
                                            str(a['Event']['orgc_id']))
                            record['misp_type'] = misp_type
                            record['misp_value'] = misp_value
                            record['misp_to_ids'] = misp_to_ids
                            record['misp_comment'] = misp_comment
                            record['misp_category'] = misp_category
                            record['misp_attribute_uuid'] = misp_uuid
                            record['misp_event_id'] = misp_event_id
                            record['misp_event_uuid'] = misp_event_uuid
                            record['misp_orgc_id'] = misp_orgc_id
                            record['misp_tag'] = misp_tag
            yield record
Example #17
0
class StreamFilterWildcardCommand(StreamingCommand):
    """ Returns a field with a list of non-overlapping matches to a wildcard pattern in a set of fields.

    ##Syntax

    .. code-block::
        StreamFilterWildcardCommand fieldname=<field> pattern=<field containing wildcard pattern> <field-list>

    ##Description

    Returns the non-overlapping matches to the wildcard pattern contained in the field specified by `pattern`
    The result is stored in the field specified by `fieldname`. If `fieldname` exists, its value
    is replaced. If `fieldname` does not exist, it is created. Event records are otherwise passed through to the next
    pipeline processor unmodified.

    ##Example

    Return the wildcard pattern matches in the `text` of each tweet in tweets.csv and store the result in `word_count`.

    .. code-block::
        | inputlookup tweets | eval pattern="\\w+" | streamfilter fieldname=word_count pattern=pattern text

    """
    fieldname = Option(
        doc='''
        **Syntax:** **fieldname=***<fieldname>*
        **Description:** Name of the field that will hold the match count''',
        require=True, validate=validators.Fieldname())

    pattern = Option(
        doc='''
        **Syntax:** **pattern=***<fieldname>* 
        **Description:** Field name containing the wildcard pattern pattern to match''',
        require=True, validate=validators.Fieldname())

    #Filter the data based on the passed in Wildcard pattern, this function exists so we can handle mutli-value pattern fields
    def thefilter(self, record, pattern):
        values = ""
        for fieldname in self.fieldnames:
            #multivalue fields come through as a ListType, iterate through the list and run the Wildcard against each entry
            #in the multivalued field
            if isinstance(record[fieldname], types.ListType):
                for aRecord in record[fieldname]:
                    matches = pattern.findall(six.text_type(aRecord.decode("utf-8")))
                    for match in matches:
                        values = values + " " + match
            else:
                matches = pattern.findall(six.text_type(record[fieldname].decode("utf-8")))
                for match in matches:
                    values = values + " " + match
        return values

    #Change a wildcard pattern to a Wildcard pattern
    def changeToWildcard(self, pattern):
        pattern = pattern.replace("\"", "")
        pattern = pattern.replace("'", "")
        pattern = pattern.replace("*", ".*")
        if pattern.find(".*") == 0:
            pattern = "[^_].*" + pattern[2:]
        pattern = "(?i)^" + pattern + "$"
        return pattern

    #Streaming command to work with each record
    def stream(self, records):
        self.logger.debug('StreamFilterWildcardCommand: %s', self)  # logs command line
        for record in records:
            values = ""
            pattern = self.pattern
            if not record.has_key(pattern):
               self.logger.warn("StreamFilterWildcardCommand: pattern field is %s but cannot find this field" % (pattern), self)
               sys.exit(-1)
            if isinstance(record[pattern], types.ListType):
                for aPattern in record[pattern]:
                    pattern = re.compile(self.changeToWildcard(aPattern))
                    values = values + self.thefilter(record, pattern)
            else:
                pattern = re.compile(self.changeToWildcard(record[pattern]))
                values = values + self.thefilter(record, pattern)

            record[self.fieldname] = values
            yield record
class AwsSnsAlertCommand(StreamingCommand, SNSPublisher):
    account = Option(require=True)
    region = Option(require=True)
    topic_name = Option(require=True)
    publish_all = Option(require=False)

    def stream(self, records):
        logger.info('Search Alert - Started')
        splunkd_uri = self.search_results_info.splunkd_uri
        session_key = self.search_results_info.auth_token
        publish_all = util.is_true(self.publish_all or 'false')

        err = 0
        count = 0
        for i, rec in enumerate(records):
            try:
                count += 1
                yield self._handle_record(splunkd_uri, session_key, rec, i)
            except Exception as exc:
                err += 1
                yield self._handle_error(exc, traceback.format_exc(), rec, i)
            if not publish_all:
                break

        if err:
            self.write_error('%s in %s events failed. '
                             'Check response events for detail' % (err, count))

    def _handle_record(self, splunkd_uri, session_key, record, serial):
        resp = self.publish(splunkd_uri,
                            session_key,
                            self.account,
                            self.region,
                            self.topic_name,
                            record=record)

        result = {'result': 'Success', 'response': json.dumps(resp)}
        res = AwsSnsAlertCommand.make_event(**result)
        logger.debug('Search Alert', **result)
        return {'_serial': serial, '_time': record.get('_time'), '_raw': res}

    def _handle_error(self, exc, tb, record, serial):
        logger.error('Search Alert', result='Failed', error=tb)
        res = AwsSnsAlertCommand.make_event('Failed', error=exc)
        return {'_serial': serial, '_time': record.get('_time'), '_raw': res}

    @staticmethod
    def make_event(result, **kwargs):
        event = 'Search Alert - result="{result}"'.format(result=result)
        arr = ['%s="%s"' % (key, val) for key, val in kwargs.iteritems()]
        arr.insert(0, event)
        return ', '.join(arr)

    def make_subject(self, *args, **kwargs):
        return 'Splunk - Alert from Search'

    def make_message(self, *args, **kwargs):
        record = kwargs['record']
        return SNSMessageContent(
            message=record.get('message', ''),
            timestamp=record.get('timestamp', record.get('_time')),
            entity=record.get('entity', ''),
            correlation_id=record.get('record', self.search_results_info.sid),
            source=record.get('source', ''),
            event=record.get('event', record.get('_raw')),
            search_name='',
            results_link='',
            app=self.search_results_info.ppc_app,
            owner=self.search_results_info.ppc_user,
        )
Example #19
0
class abuseipCommand(StreamingCommand):
    """ %(synopsis)

    ##Syntax

    %(syntax)

    ##Description

    %(description)

    """

    ipfield = Option(doc='''
        **Syntax:** **ipfield=***<fieldname>*
        **Description:** Name of the IP address field to look up''',
                     require=True,
                     validate=validators.Fieldname())

    def stream(self, events):
        # Load config with user specified API key, if this file does not exist copy it from ../default
        with open('../local/config.json') as config_file:
            data = json.load(config_file)
            api_key = data['abuseip'][0]['api_key']

        # API required headers
        headers = {
            'Key': api_key,
            'Accept': 'application/json',
        }

        for event in events:
            event_dest_ip = event[self.ipfield]
            # API required parameters
            params = (
                ('ipAddress', event_dest_ip),
                ('maxAgeInDays', '90'),
                ('verbose', ''),
            )
            # Make API Request
            error = 0
            response = req.get('https://api.abuseipdb.com/api/v2/check',
                               headers=headers,
                               params=params)
            if (response.status_code == 200):
                data = response.json()
                if 'data' in data:
                    addr_country_name = data['data']['countryName']
                    addr_domain = data['data']['domain']
                    addr_isp = data['data']['isp']
                    addr_last_reported = data['data']['lastReportedAt']
                    addr_abuse_confidence = data['data'][
                        'abuseConfidenceScore']
                else:
                    error = 1
                    event[
                        'AbuseApiError'] = "Invalid Response:Missing data key"
            else:
                error = 1
                event['AbuseApiError'] = "Invalid Request:status_code=" + str(
                    response.status_code)
            # Set event values to be returned
            if error == 0:
                event["CountryName"] = addr_country_name
                event["Domain"] = addr_domain
                event["ISP"] = addr_isp
                event["LastReportedAt"] = addr_last_reported
                event["AbuseConfidence"] = addr_abuse_confidence

            # Finalize event
            yield event
class EmailValidationCommand(StreamingCommand):

    field = Option(require=True, default=True, validate=validators.Fieldname())
    fast = Option(require=False, default=False, validate=validators.Boolean())
    timeout = Option(require=False, default=7, validate=validators.Integer())
    suggest_domain = Option(require=False,
                            default=False,
                            validate=validators.Boolean())
    strictness = Option(require=False,
                        default=0,
                        validate=validators.Integer())
    abuse_strictness = Option(require=False,
                              default=0,
                              validate=validators.Integer())

    def stream(self, records):
        logger = setup_logging()

        correct_records = []
        incorrect_records = []
        for record in records:
            if self.field in record:
                correct_records.append(record)
            else:
                incorrect_records.append(record)

        if len(incorrect_records) > 0:
            self.logger.error('email field missing from ' +
                              str(len(incorrect_records)) +
                              " events. They will be ignored.")

        if len(correct_records) > 0:
            storage_passwords = self.service.storage_passwords
            for credential in storage_passwords:
                if credential.content.get('realm') != 'ipqualityscore_realm':
                    continue
                usercreds = {
                    'username': credential.content.get('username'),
                    'password': credential.content.get('clear_password')
                }
            if usercreds is not None:
                ipqualityscoreclient = IPQualityScoreClient(
                    usercreds.get('password'), logger)

                emails = []
                rs = []
                for record in correct_records:
                    emails.append(record.get(self.field))
                    rs.append(record)

                results_dict = ipqualityscoreclient.email_validation_multithreaded(
                    emails,
                    fast=self.fast,
                    timeout=self.timeout,
                    suggest_domain=self.suggest_domain,
                    strictness=self.strictness,
                    abuse_strictness=self.abuse_strictness)
                for record in rs:
                    detection_result = results_dict.get(record[self.field])

                    if detection_result is not None:
                        for key, val in detection_result.items():
                            new_key = ipqualityscoreclient.get_prefix(
                            ) + "_" + key
                            record[new_key] = val
                        record[ipqualityscoreclient.get_prefix() +
                               "_status"] = 'api call success'
                    else:
                        record[ipqualityscoreclient.get_prefix() +
                               "_status"] = 'api call failed'

                    yield record
            else:
                raise Exception("No credentials have been found")
        else:
            raise Exception("There are no events with email field.")
Example #21
0
class mispapireport(ReportingCommand):
    """ MISP API wrapper for endpoint /attributes/restSearch.
    return format is JSON for the momemnt
    ##Syntax
    use paramater names to set values in the POST request body below.
    .. code-block::
        | mispapireport page=<int> limit=<int> value=string type=CSVstring category=CSVstring org=string 
                        tags=CSVstring not_tags=CSVstrings date_from=date_string date_to=date_string last=<int>(d|h|m)
                        eventid=CSVint uuid=CSVuuid_string enforceWarninglist=True|False 
                        to_ids=True|False deleted=True|False includeEventUuid=True|False includeEventTags==True|False
                        threat_level_id=<int> eventinfo=string

    forced parameters:
        "returnFormat": "json"
        withAttachments: False
    not handled parameters:
        "publish_timestamp": "optional",
        "timestamp": "optional",
        "event_timestamp": "optional",


    ##Description
    {
        "returnFormat": "mandatory",
        "page": "optional",
        "limit": "optional",
        "value": "optional",
        "type": "optional",
        "category": "optional",
        "org": "optional",
        "tags": "optional",
        "from": "optional",
        "to": "optional",
        "last": "optional",
        "eventid": "optional",
        "withAttachments": "optional",
        "uuid": "optional",
        "publish_timestamp": "optional",
        "timestamp": "optional",
        "enforceWarninglist": "optional",
        "to_ids": "optional",
        "deleted": "optional",
        "includeEventUuid": "optional",
        "includeEventTags": "optional",
        "event_timestamp": "optional",
        "threat_level_id": "optional",
        "eventinfo": "optional"
    }

    """
    # Superseede MISP instance for this search
    misp_instance = Option(doc='''
        **Syntax:** **misp_instance=instance_name*
        **Description:**MISP instance parameters as decibed in lookup/misp_instances.csv.''',
                           require=False)
    misp_url = Option(doc='''
        **Syntax:** **misp_url=***<MISP URL>*
        **Description:**URL of MISP instance.''',
                      require=False,
                      validate=validators.Match(
                          "misp_url",
                          r"^https?:\/\/[0-9a-zA-Z\-\.]+(?:\:\d+)?$"))
    misp_key = Option(doc='''
        **Syntax:** **misp_key=***<AUTH_KEY>*
        **Description:**MISP API AUTH KEY.''',
                      require=False,
                      validate=validators.Match("misp_key",
                                                r"^[0-9a-zA-Z]{40}$"))
    misp_verifycert = Option(doc='''
        **Syntax:** **misp_verifycert=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Verify or not MISP certificate.''',
                             require=False,
                             validate=validators.Boolean())
    # mode: p - give parameters one by one / j provide a complete JSON string
    # default is mode=p
    mode = Option(doc='''
        **Syntax:** **mode=***p|j<AUTH_KEY>*
        **Description:**mode to build the JSON request.''',
                  require=False,
                  validate=validators.Match("mode", r"^(p|j)$"))
    # if mode=j a complete JSON request has to be provided
    json_request = Option(doc='''
        **Syntax:** **json_request=***valid JSON request*
        **Description:**Valid JSON request''',
                          require=False)
    # specific formats
    last = Option(doc='''
        **Syntax:** **last=***<int>d|h|m*
        **Description:**publication duration in day(s), hour(s) or minute(s).''',
                  require=False,
                  validate=validators.Match("last", r"^[0-9]+[hdm]$"))
    date_from = Option(doc='''
        **Syntax:** **date_from=***date_string"*
        **Description:**starting date.''',
                       require=False)
    date_to = Option(doc='''
        **Syntax:** **date_to=***date_string"*
        **Description:**(optional)ending date in searches with date_from. if not set default is now''',
                     require=False)
    threat_level_id = Option(doc='''
        **Syntax:** **threat_level_id=***1-4*
        **Description:**Threat level.''',
                             require=False,
                             validate=validators.Match("threat_level_id",
                                                       r"^[1-4]$"))
    org = Option(doc='''
        **Syntax:** **org=***CSV string*
        **Description:**Comma(,)-separated string of org name(s), id(s), uuid(s).''',
                 require=False)
    # CSV numeric list
    eventid = Option(doc='''
        **Syntax:** **eventid=***id1(,id2,...)*
        **Description:**list of event ID(s).''',
                     require=False,
                     validate=validators.Match("eventid", r"^[0-9,]+$"))
    # strings
    value = Option(doc='''
        **Syntax:** **value=***string*
        **Description:**value.''',
                   require=False)
    eventinfo = Option(doc='''
        **Syntax:** **eventinfo=***string*
        **Description:**eventinfo string''',
                       require=False)
    # numeric values
    limit = Option(doc='''
        **Syntax:** **limit=***<int>*
        **Description:**define the limit for each MISP search; default 10000. 0 = no pagination.''',
                   require=False,
                   validate=validators.Match("limit", r"^[0-9]+$"))
    page = Option(doc='''
        **Syntax:** **page=***<int>*
        **Description:**define the page of result to get.''',
                  require=False,
                  validate=validators.Match("limit", r"^[0-9]+$"))
    # CSV strings
    uuid = Option(doc='''
        **Syntax:** **uuid=***id1(,id2,...)*
        **Description:**list of event UUID(s).''',
                  require=False)
    type = Option(doc='''
        **Syntax:** **type=***CSV string*
        **Description:**Comma(,)-separated string of categories to search for. Wildcard is %.''',
                  require=False)
    category = Option(doc='''
        **Syntax:** **category=***CSV string*
        **Description:**Comma(,)-separated string of categories to search for. Wildcard is %.''',
                      require=False)
    tags = Option(doc='''
        **Syntax:** **tags=***CSV string*
        **Description:**Comma(,)-separated string of tags to search for. Wildcard is %.''',
                  require=False)
    not_tags = Option(doc='''
        **Syntax:** **not_tags=***CSV string*
        **Description:**Comma(,)-separated string of tags to exclude from results. Wildcard is %.''',
                      require=False)
    # Booleans
    to_ids = Option(doc='''
        **Syntax:** **to_ids=***y|Y|1|true|True|n|N|0|false|False*
        **Description:**Boolean to search only attributes with the flag "to_ids" set to true.''',
                    require=False,
                    validate=validators.Boolean())
    enforceWarninglist = Option(doc='''
        **Syntax:** **enforceWarninglist=***y|Y|1|true|True|n|N|0|false|False*
        **Description:**Boolean to apply warning lists to results.''',
                                require=False,
                                validate=validators.Boolean())
    deleted = Option(doc='''
        **Syntax:** **deleted=***y|Y|1|true|True|n|N|0|false|False*
        **Description:**Boolean to include deleted attributes to results.''',
                     require=False,
                     validate=validators.Boolean())
    includeEventUuid = Option(doc='''
        **Syntax:** **includeEventUuid=***y|Y|1|true|True|n|N|0|false|False*
        **Description:**Boolean to include event UUID(s) to results.''',
                              require=False,
                              validate=validators.Boolean())
    includeEventTags = Option(doc='''
        **Syntax:** **includeEventTags=***y|Y|1|true|True|n|N|0|false|False*
        **Description:**Boolean to include event UUID(s) to results.''',
                              require=False,
                              validate=validators.Boolean())
    pipesplit = Option(doc='''
        **Syntax:** **pipesplit=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean to split multivalue attributes into 2 attributes.''',
                       require=False,
                       validate=validators.Boolean())

    @Configuration()
    def map(self, records):
        # self.logger.debug('mispgetioc.map')
        return records

    def reduce(self, records):

        # Phase 1: Preparation
        my_args = prepare_config(self)
        my_args['misp_url'] = my_args['misp_url'] + '/attributes/restSearch'

        jsonmode = False
        if self.mode is not None:
            if 'j' in self.mode and self.json_request is not None:
                jsonmode = True

        if jsonmode is True:
            pagination = True
            other_page = True
            body_dict = json.loads(self.json_request)
            logging.info('Option "json_request" set with %s',
                         self.json_request)
            body_dict['returnFormat'] = 'json'
            body_dict['withAttachments'] = False
            if 'limit' in body_dict:
                limit = int(body_dict['limit'])
                if limit == 0:
                    pagination = False
            else:
                limit = 10000

            if 'page' in body_dict:
                page = body_dict['page']
            else:
                page = 1
            l = 0
        else:
            # build search JSON object
            body_dict = {"returnFormat": "json", "withAttachments": False}

            # add provided parameters to JSON request body
            # specific formats
            if self.last is not None:
                body_dict['last'] = self.last
                logging.info('Option "last" set with %s', body_dict['last'])

            if self.date_from is not None:
                body_dict['from'] = self.date_from
                logging.info('Option "date_from" set with %s',
                             body_dict['from'])
                if self.date_to is not None:
                    body_dict['to'] = self.date_to
                    logging.info('Option "date_to" set with %s',
                                 body_dict['to'])
                else:
                    logging.info('Option "date_to" will be set to now().')

            if self.threat_level_id is not None:
                body_dict['threat_level_id'] = self.threat_level_id
                logging.info('Option "threat_level_id" set with %s',
                             body_dict['threat_level_id'])

            if self.org is not None:
                body_dict['org'] = self.org
                logging.info('Option "org" set with %s', body_dict['org'])

            if self.eventid:
                if "," in self.eventid:
                    event_criteria = {}
                    event_list = self.eventid.split(",")
                    event_criteria['OR'] = event_list
                    body_dict['eventid'] = event_criteria
                else:
                    body_dict['eventid'] = self.eventid
                logging.info('Option "eventid" set with %s',
                             body_dict['eventid'])

            if self.value is not None:
                body_dict['value'] = self.value
                logging.info('Option "value" set with %s', body_dict['value'])

            if self.eventinfo is not None:
                body_dict['eventinfo'] = self.eventinfo
                logging.info('Option "eventinfo" set with %s',
                             body_dict['eventinfo'])

            # CSV strings
            if self.category is not None:
                cat_criteria = {}
                cat_list = self.category.split(",")
                cat_criteria['OR'] = cat_list
                body_dict['category'] = cat_criteria
            if self.type is not None:
                type_criteria = {}
                type_list = self.type.split(",")
                type_criteria['OR'] = type_list
                body_dict['type'] = type_criteria
            if self.tags is not None or self.not_tags is not None:
                tags_criteria = {}
                if self.tags is not None:
                    tags_list = self.tags.split(",")
                    tags_criteria['OR'] = tags_list
                if self.not_tags is not None:
                    tags_list = self.not_tags.split(",")
                    tags_criteria['NOT'] = tags_list
                body_dict['tags'] = tags_criteria
            if self.uuid is not None:
                uuid_criteria = {}
                uuid_list = self.uuid.split(",")
                uuid_criteria['OR'] = uuid_list
                body_dict['uuid'] = uuid_criteria

            # Booleans
            if self.to_ids is not None:
                body_dict['to_ids'] = self.to_ids
                logging.info('Option "to_ids" set with %s',
                             body_dict['to_ids'])

            if self.enforceWarninglist is not None:
                body_dict['enforceWarninglist'] = self.enforceWarninglist
                logging.info('Option "enforceWarninglist" set with %s',
                             body_dict['enforceWarninglist'])

            if self.deleted is not None:
                body_dict['deleted'] = self.deleted
                logging.info('Option "deleted" set with %s',
                             body_dict['deleted'])

            if self.includeEventUuid is not None:
                body_dict['includeEventUuid'] = self.includeEventUuid
                logging.info('Option "includeEventUuid" set with %s',
                             body_dict['includeEventUuid'])

            if self.includeEventTags is not None:
                body_dict['includeEventTags'] = self.includeEventTags
                logging.info('Option "includeEventTags" set with %s',
                             body_dict['includeEventTags'])
            # Search pagination
            pagination = True
            other_page = True
            if self.page:
                page = self.page
            else:
                page = 1
            l = 0
            if self.limit is not None:
                if int(self.limit) == 0:
                    pagination = False
                else:
                    limit = int(self.limit)
            else:
                limit = 10000

        # set proper headers
        headers = {'Content-type': 'application/json'}
        headers['Authorization'] = my_args['misp_key']
        headers['Accept'] = 'application/json'

        results = []
        # add colums for each type in results
        while other_page:
            if pagination == True:
                body_dict['page'] = page
                body_dict['limit'] = limit

            body = json.dumps(body_dict)
            logging.error('INFO MISP REST API REQUEST: %s', body)
            # search
            r = requests.post(my_args['misp_url'],
                              headers=headers,
                              data=body,
                              verify=my_args['misp_verifycert'],
                              cert=my_args['client_cert_full_path'],
                              proxies=my_args['proxies'])
            # check if status is anything other than 200; throw an exception if it is
            r.raise_for_status()
            # response is 200 by this point or we would have thrown an exception
            response = r.json()
            if 'response' in response:
                if 'Attribute' in response['response']:
                    l = len(response['response']['Attribute'])
                    for a in response['response']['Attribute']:
                        v = {}
                        v['misp_Object'] = "-"
                        if self.includeEventTags is True:
                            v['misp_tag'] = "-"
                        for ak, av in a.items():
                            if ak == 'Event':
                                json_event = a['Event']
                                for ek, ev in json_event.items():
                                    key = 'misp_event_' + ek
                                    v[key] = str(ev)
                            elif ak == 'Tag':
                                tag_list = []
                                for tag in a['Tag']:
                                    try:
                                        tag_list.append(str(tag['name']))
                                    except Exception:
                                        pass
                                v['misp_tag'] = tag_list
                            else:
                                vkey = 'misp_' + ak
                                v[vkey] = av
                        results.append(v)

            if pagination == True:
                if l < limit:
                    other_page = False
                else:
                    page = page + 1
            else:
                other_page = False

        # add colums for each type in results
        typelist = []
        for r in results:
            if r['misp_type'] not in typelist:
                typelist.append(r['misp_type'])

        output_dict = {}
        increment = 1
        for r in results:
            key = str(r['misp_event_id']) + '_' + str(increment)
            increment = increment + 1
            v = r
            for t in typelist:
                misp_t = 'misp_' + t.replace('-', '_').replace('|', '_p_')
                if t == r['misp_type']:
                    v[misp_t] = r['misp_value']
                else:
                    v[misp_t] = ''
            output_dict[key] = v

        for k, v in output_dict.items():
            yield v
Example #22
0
class BlaggertCommand(StreamingCommand):

    opt_token = Option(doc='''
        **Syntax:** **token=***<fieldname>*
        **Description:** HEC token to use.
        **Default:** None''',
                       name='token',
                       require=True,
                       validate=validators.Fieldname())

    opt_server = Option(doc='''
        **Syntax:** **server=***<fieldname>*
        **Description:** Server to send the payload to.
        **Default:** localhost''',
                        name='server',
                        require=False,
                        default='localhost',
                        validate=validators.Fieldname())

    opt_port = Option(doc='''
        **Syntax:** **port=***<fieldname>*
        **Description:** HEC Port, not fortified red wine.
        **Default:** 8088''',
                      name='port',
                      require=False,
                      default=8088,
                      validate=validators.Integer())

    def __init__(self):
        super(BlaggertCommand, self).__init__()

    def prepare(self):
        return

    def stream(self, records):

        # Put your event transformation code here
        url = "https://{}:{}/services/collector/event".format(
            self.opt_server, self.opt_port)
        headers = {"Authorization": "Splunk {}".format(self.opt_token)}
        for record in records:
            self.logger.info('Record {0}'.format(record))
            t2 = time.time()

            payload = {"event": {"event_id": str(uuid.uuid4())}}
            for k, v in record.iteritems():
                payload["event"][k] = v

            payload_str = json.dumps(payload)
            self.logger.info('send to HEC url={} - payload='.format(
                url, payload_str))
            try:
                res = requests.post(url,
                                    data=payload_str,
                                    headers=headers,
                                    verify=False)
                res.raise_for_status()
                self.logger.info("Sweet as {} {}".format(
                    res.status_code, res.text))
                record["blaggert_says"] = "Done it"
            except Exception as e:
                self.logger.error('Send HEC Caught exception: {}'.format(e))
                record["blaggert_says"] = "Buggered it {}".format(e)

            yield record
Example #23
0
class GNQuickCommand(EventingCommand):
    """
    This command can be used as generating command as well as transforming command,
    When used as generating command, it returns noise status of the given IP addresses,
    When used as transforming command, it adds the noise status information to the events that are returned from Splunk search.

    Data pulled from /v2/noise/multi/quick?ips=<ip_address1>,<ip_address2> using GreyNoise Python SDK

    **Syntax**::
    `| gnquick ip="10.0.1.254"`
    `| gnquick ip="1.2.3.4,8.8.8.8"`
    `index=_internal | gnquick ip_field="ip"`

    **Description**::
    When used as generating command, gnquick command uses the IP address or IP addresses provided in ip field to return
    Noise status, when used as transforming command, gnquick command uses the field representing IP address presented by ip_field
    to add the noise information to each events. The Noise status is pulled using method :method:quick from GreyNoise Python SDK.
    """

    ip = Option(
        doc='''**Syntax:** **ip=***<ip_address>*
        **Description:** IP address(es) for which noise status needs to be retrieved from GreyNoise''',
        name='ip', require=False
    )

    ip_field = Option(
        doc='''
        **Syntax:** **ip_field=***<ip_field>*
        **Description:** Name of the field representing IP address in Splunk events''',
        name='ip_field', require=False
    )

    def transform(self, records):
        
        ip_addresses = self.ip
        ip_field = self.ip_field
        api_key = ""
        EVENTS_PER_CHUNK = 1000
        THREADS = 3
        USE_CACHE = False
        logger = utility.setup_logger(session_key=self._metadata.searchinfo.session_key, log_context=self._metadata.searchinfo.command)
        
        if ip_addresses and ip_field:
            logger.error("Please use parameter ip to work gnquick as generating command or use parameter ip_field to work gnquick as transforming command.")
            self.write_error("Please use parameter ip to work gnquick as generating command or use parameter ip_field to work gnquick as transforming command")
            exit(1)

        try:
            message = ''
            api_key = utility.get_api_key(self._metadata.searchinfo.session_key, logger=logger)
        except APIKeyNotFoundError as e:
            message = str(e)
        except HTTPError as e:
            message = str(e)
        
        if message:
            self.write_error(message)
            logger.error("Error occured while retrieving API key, Error: {}".format(message))
            exit(1)

        if ip_addresses and not ip_field:
            # This peice of code will work as generating command and will not use the Splunk events.
            # Splitting the ip_addresses by commas and stripping spaces from both the sides for each IP address
            ip_addresses = [ip.strip() for ip in ip_addresses.split(',')]

            logger.info("Started retrieving results")
            try:
                logger.debug("Initiating to fetch noise status for IP address(es): {}".format(str(ip_addresses)))
                api_client = GreyNoise(api_key=api_key, timeout=120, integration_name="Splunk")
                # Opting timout 120 seconds for the requests
                noise_status = api_client.quick(ip_addresses)
                logger.info("Retrieved results successfully")
                
                # Process the API response and send the noise status information of IP with extractions to the Splunk
                # Using this flag to handle the field extraction issue in custom commands
                # Only the fields extracted from the first event of generated by custom command will be extracted from all events
                first_record_flag = True

                # Flag to indicate whether erroneous IPs are present
                erroneous_ip_present = False
                for ip in ip_addresses:
                    for sample in noise_status:
                        if ip == sample['ip']:
                            yield event_generator.make_valid_event('quick', sample, first_record_flag)
                            if first_record_flag:
                                first_record_flag = False
                            logger.debug("Fetched noise status for ip={} from GreyNoise API".format(str(ip)))
                            break
                    else:
                        erroneous_ip_present = True
                        logger.debug("Generating noise status for ip={} manually".format(str(ip)))
                        event = {
                            'ip': ip,
                            'error': 'IP address doesn\'t match the valid IP format'
                        }
                        yield event_generator.make_invalid_event('quick', event, first_record_flag)
                        
                        if first_record_flag:
                            first_record_flag = False

                if erroneous_ip_present:
                   logger.warn("Value of one or more IP address(es) is invalid")
                   self.write_warning("Value of one or more IP address(es) passed to {command_name} is invalid".format(command_name=str(self._metadata.searchinfo.command)))
                        
            except RateLimitError:
                logger.error("Rate limit error occured while fetching the context information for ips={}".format(str(ip_addresses)))
                self.write_error("The Rate Limit has been exceeded. Please contact the Administrator")
            except RequestFailure as e:
                response_code, response_message = e.args
                if response_code == 401:
                    msg = "Unauthorized. Please check your API key."
                else:
                    # Need to handle this, as splunklib is unable to handle the exception with (400, {'error': 'error_reason'}) format
                    msg = "The API call to the GreyNoise platform have been failed with status_code: {} and error: {}".format(response_code, response_message['error'] if isinstance(response_message, dict) else response_message)

                logger.error("{}".format(str(msg)))
                self.write_error(msg)
            except ConnectionError:
                logger.error("Error while connecting to the Server. Please check your connection and try again.")
                self.write_error("Error while connecting to the Server. Please check your connection and try again.")
            except RequestException:
                logger.error("There was an ambiguous exception that occurred while handling your Request. Please try again.")
                self.write_error("There was an ambiguous exception that occurred while handling your Request. Please try again.")
            except Exception as e:
                logger.error("Exception: {} ".format(str(traceback.format_exc())))
                self.write_error("Exception occured while fetching the noise status of the IP address(es). See greynoise_main.log for more details.")

        elif ip_field:
            # Enter the mechanism only when the Search is complete and all the events are available
            if self.search_results_info and not self.metadata.preview:
                
                try:
                    # Strip the spaces from the parameter value if given
                    ip_field = ip_field.strip()
                    # Validating the given parameter
                    try:
                        ip_field = validator.Fieldname(option_name='ip_field').validate(ip_field)
                    except ValueError as e:
                        # Validator will throw ValueError with error message when the parameters are not proper
                        logger.error(str(e))
                        self.write_error(str(e))
                        exit(1)

                    # API key validation
                    api_key_validation, message = utility.validate_api_key(api_key, logger)
                    logger.debug("API validation status: {}, message: {}".format(api_key_validation, str(message)))
                    if not api_key_validation:
                        logger.info(message)
                        self.write_error(message)
                        exit(1)

                    # This piece of code will work as transforming command and will use the Splunk ingested events and field which is specified in ip_field.
                    chunk_dict = event_generator.batch(records, ip_field, EVENTS_PER_CHUNK, logger)
                    
                    # This means there are only 1000 or below IPs to call in the entire bunch of records
                    # Use one thread with single thread with caching mechanism enabled for the chunk
                    if len(chunk_dict) == 1:
                        logger.info("Less then 1000 distinct IPs are present, optimizing the IP requests call to GreyNoise API...")
                        THREADS = 1
                        USE_CACHE = True
                    
                    api_client = GreyNoise(api_key=api_key, timeout=120, use_cache=USE_CACHE, integration_name="Splunk")

                    # When no records found, batch will return {0:([],[])}
                    if len(list(chunk_dict.values())[0][0]) >= 1:
                        for event in event_generator.get_all_events(api_client, 'multi', ip_field, chunk_dict, logger, threads=THREADS):
                            yield event
                    else:
                        logger.info("No events found, please increase the search timespan to have more search results.")
                except Exception as e:
                    logger.info("Exception occured while adding the noise status to the events, Error: {}".format(traceback.format_exc()))
                    self.write_error("Exception occured while adding the noise status of the IP addresses to events. See greynoise_main.log for more details.")

        else:
            logger.error("Please specify exactly one parameter from ip and ip_field with some value.")
            self.write_error("Please specify exactly one parameter from ip and ip_field with some value.")

    def __init__(self):
        super(GNQuickCommand, self).__init__()
Example #24
0
class GNFilterCommand(EventingCommand):
    """
    gnfilter - Transforming Command.

    Transforming command that returns events having noisy/not noisy IP addresses
    as specified with noise_events parameter, defaults to true.
    Data pulled from: /v2/noise/multi/quick

    **Syntax**::
    `index=firewall | gnfilter ip_field="ip" noise_events="false"

    **Description**::
    The `gnfilter` command returns the events having noisy/not noisy IP addresses represented by `ip_field` parameter
    using method :method:`quick` from GreyNoise Python SDK.
    """

    ip_field = Option(
        doc='''
        **Syntax:** **ip_field=***<ip_field>*
        **Description:** Name of the field representing IP address in Splunk events''',
        name='ip_field', require=True
    )

    noise_events = Option(
        doc='''
        **Syntax:** **noise_events=***<true/false>*
        **Description:** Flag specifying whether to return events having noisy IP or
        events having non-noisy IP addresses''',
        name='noise_events', require=False, default="True"
    )

    api_validation_flag = False

    def transform(self, records):
        """Method that processes and yield event records to the Splunk events pipeline."""
        method = 'filter'

        # Setup logger
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key, log_context=self._metadata.searchinfo.command)

        # Enter the mechanism only when the Search is complete and all the events are available
        if self.search_results_info and not self.metadata.preview:

            EVENTS_PER_CHUNK = 1000
            THREADS = 3
            USE_CACHE = False
            ip_field = self.ip_field
            noise_events = self.noise_events

            logger.info("Started filtering the IP address(es) present in field: {}, with noise_status: {}".format(
                str(ip_field), str(noise_events)))

            try:
                if ip_field:
                    ip_field = ip_field.strip()
                if noise_events:
                    noise_events = noise_events.strip()
                # Validating the given parameters
                try:
                    ip_field = validator.Fieldname(option_name='ip_field').validate(ip_field)
                    noise_events = validator.Boolean(option_name='noise_events').validate(noise_events)
                except ValueError as e:
                    # Validator will throw ValueError with error message when the parameters are not proper
                    logger.error(str(e))
                    self.write_error(str(e))
                    exit(1)

                try:
                    message = ''
                    api_key = utility.get_api_key(self._metadata.searchinfo.session_key, logger=logger)
                except APIKeyNotFoundError as e:
                    message = str(e)
                except HTTPError as e:
                    message = str(e)

                if message:
                    self.write_error(message)
                    logger.error("Error occured while retrieving API key, Error: {}".format(message))
                    exit(1)

                # API key validation
                if not self.api_validation_flag:
                    api_key_validation, message = utility.validate_api_key(api_key, logger)
                    logger.debug("API validation status: {}, message: {}".format(api_key_validation, str(message)))
                    self.api_validation_flag = True
                    if not api_key_validation:
                        logger.info(message)
                        self.write_error(message)
                        exit(1)

                # divide the records in the form of dict of tuples having chunk_index as key
                # {<index>: (<records>, <All the ips in records>)}
                chunk_dict = event_generator.batch(records, ip_field, EVENTS_PER_CHUNK, logger)
                logger.debug("Successfully divided events into chunks")

                # This means there are only 1000 or below IPs to call in the entire bunch of records
                # Use one thread with single thread with caching mechanism enabled for the chunk
                if len(chunk_dict) == 1:
                    logger.info(
                        "Less then 1000 distinct IPs are present, optimizing the IP requests call to GreyNoise API...")
                    THREADS = 1
                    USE_CACHE = True

                # Opting timout 120 seconds for the requests
                api_client = GreyNoise(api_key=api_key, timeout=120,
                                       use_cache=USE_CACHE, integration_name=INTEGRATION_NAME)

                # When no records found, batch will return {0:([],[])}
                if len(list(chunk_dict.values())[0][0]) >= 1:
                    for chunk_index, result in event_generator.get_all_events(
                            self._metadata.searchinfo.session_key, api_client, method, ip_field, chunk_dict, logger,
                            threads=THREADS):
                        # Pass the collected data to the event filter method
                        for event in event_filter(
                                chunk_index, result, chunk_dict[chunk_index], ip_field, noise_events, method):
                            yield event

                        # Deleting the chunk with the events that are already indexed
                        del chunk_dict[chunk_index]

                    logger.info("Successfully sent all the results to the Splunk")
                else:
                    logger.info("No events found, please increase the search timespan to have more search results.")

            except Exception:
                logger.info("Exception occured while filtering events, Error: {}".format(traceback.format_exc()))
                self.write_error("Exception occured while filtering the events based on noise status. "
                                 "See greynoise_main.log for more details.")

    def __init__(self):
        """Initialize custom command class."""
        super(GNFilterCommand, self).__init__()
class ListAlertsCommand(GeneratingCommand):

    data = Option(doc='''
        **Syntax:** **data=***<field>*
        **Description:** Field name that will receive the alert data in json format''',
                  require=False,
                  validate=validators.Fieldname())
    data_prefix = Option(doc='''
        **Syntax:** **data_prefix=***<string>*
        **Description:** Prefix that will be inserted before the data fields. Each data field will appear as a separate field.''',
                         require=False)
    json_field = Option(doc='''
        **syntax:** **raw=***<field>*
        **description:** field name that will receive the entire record as a json object.''',
                        require=False,
                        name='json',
                        validate=validators.Fieldname())
    status = Option(doc='''
        **syntax:** **status=***<comma_separated_list_of_status>*
        **description:** Only selects alerts with the provided statuses''',
                    require=False)
    type = Option(doc='''
        **syntax:** **type=***<comma_separated_list_of_types>*
        **description:** Only selects alerts with the provided types''',
                  require=False)
    severity = Option(doc='''
        **syntax:** **severity=***<comma_separated_list_of_severity>*
        **description:** Only selects alerts with the provided severity''',
                      require=False)
    analyst = Option(doc='''
        **syntax:** **analyst=***<comma_separated_list_of_analyst>*
        **description:** Only selects alerts with the provided analysts''',
                     require=False)
    #count = Option(require=True, validate=validators.Integer(0))
    alerts = None

    def generate(self):
        self.logger.info('ListAlertsCommand: %s', self)
        if not self.alerts:
            self.alerts = AlertCollection(
                self._metadata.searchinfo.session_key)
        if self.status:
            status = self.status.split(',')
        else:
            status = []
        if self.type:
            type = self.type.split(',')
        else:
            type = []
        if self.severity:
            severity = self.severity.split(',')
        else:
            severity = []
        if self.analyst:
            analyst = self.analyst.split(',')
        else:
            analyst = []
        if self._metadata.searchinfo.earliest_time != 0:
            earliest_time = self._metadata.searchinfo.earliest_time
        else:
            earliest_time = None
        if self._metadata.searchinfo.latest_time != 0:
            latest_time = self._metadata.searchinfo.latest_time
        else:
            latest_time = None
        for record in self.alerts.list(status=status,
                                       type=type,
                                       severity=severity,
                                       analyst=analyst,
                                       earliest_time=earliest_time,
                                       latest_time=latest_time,
                                       logger=self.logger):
            event = {
                '_time': record['time'],
                'sourcetype': 'alerts',
                'type': record['type'],
                'severity': record.get('severity'),
                'entity': record['entity'],
                'kv_key': record['_key'],
                'analyst': record.get('analyst'),
                'status': record['status'],
                'sid': record['sid']
            }
            data = record['data']

            if self.data:
                event[self.data] = json.dumps(record['data'])
            if self.data_prefix is not None:
                for key, value in data.iteritems():
                    event[self.data_prefix + key] = value
            if self.json_field:
                event[self.json_field] = json.dumps(record)

            yield event
Example #26
0
class MispSearchCommand(StreamingCommand):
    """
    search in MISP for attributes matching the value of field.

    ##Syntax

        code-block::
        mispsearch field=<field> to_ids=y|n

    ##Description

        body =  {
                    "returnFormat": "mandatory",
                    "page": "optional",
                    "limit": "optional",
                    "value": "optional",
                    "type": "optional",
                    "category": "optional",
                    "org": "optional",
                    "tags": "optional",
                    "from": "optional",
                    "to": "optional",
                    "last": "optional",
                    "eventid": "optional",
                    "withAttachments": "optional",
                    "uuid": "optional",
                    "publish_timestamp": "optional",
                    "timestamp": "optional",
                    "enforceWarninglist": "optional",
                    "to_ids": "optional",
                    "deleted": "optional",
                    "includeEventUuid": "optional",
                    "includeEventTags": "optional",
                    "event_timestamp": "optional",
                    "threat_level_id": "optional",
                    "eventinfo": "optional"
                }

    ##Example

    Search in MISP for value of fieldname r_ip (remote IP in proxy logs).

        code-block::
         * | mispsearch field=r_ip

    """

    misp_instance = Option(doc='''
        **Syntax:** **misp_instance=instance_name*
        **Description:**MISP instance parameters as \
        described in local/inputs.conf''',
                           require=True)
    field = Option(doc='''
        **Syntax:** **field=***<fieldname>*
        **Description:**Name of the field containing \
        the value to search for.''',
                   require=True,
                   validate=validators.Fieldname())
    to_ids = Option(doc='''
        **Syntax:** **to_ids=***<y|n>*
        **Description:** Boolean to search only attributes with to_ids set''',
                    require=False,
                    validate=validators.Boolean())
    includeEventUuid = Option(doc='''
        **Syntax:** **includeEventUuid=***y|Y|1|true|True|n|N|0|false|False*
        **Description:**Boolean to include event UUID(s) to results.''',
                              require=False,
                              validate=validators.Boolean())
    includeEventTags = Option(doc='''
        **Syntax:** **includeEventTags=***y|Y|1|true|True|n|N|0|false|False*
        **Description:**Boolean to include event UUID(s) to results.''',
                              require=False,
                              validate=validators.Boolean())
    last = Option(doc='''
        **Syntax:** **last=***<int>d|h|m*
        **Description:**publication duration in day(s), hour(s) or minute(s). \
        **eventid**, **last** and **date_from** are mutually exclusive''',
                  require=False,
                  validate=validators.Match("last", r"^[0-9]+[hdm]$"))
    limit = Option(doc='''
        **Syntax:** **limit=***<int>*
        **Description:**define the limit for each MISP search; \
        default 1000. 0 = no pagination.''',
                   require=False,
                   validate=validators.Match("limit", r"^[0-9]+$"))
    page = Option(doc='''
        **Syntax:** **page=***<int>*
        **Description:**define the page for each MISP search; default 1.''',
                  require=False,
                  validate=validators.Match("limit", r"^[0-9]+$"))
    json_request = Option(doc='''
        **Syntax:** **json_request=***valid JSON request*
        **Description:**Valid JSON request''',
                          require=False)

    def stream(self, records):
        # Generate args
        my_args = prepare_config(self, 'misp42splunk')
        my_args['misp_url'] = my_args['misp_url'] + '/attributes/restSearch'
        # set proper headers
        headers = {'Content-type': 'application/json'}
        headers['Authorization'] = my_args['misp_key']
        headers['Accept'] = 'application/json'

        fieldname = str(self.field)
        pagination = True
        if self.limit is not None:
            if int(self.limit) == 0:
                pagination = False
            else:
                limit = int(self.limit)
        else:
            limit = 1000
        if self.page is not None:
            page = int(self.page)
        else:
            page = 1

        if self.json_request is not None:
            body_dict = json.loads(self.json_request)
            logging.info('Option "json_request" set')
            body_dict['returnFormat'] = 'json'
            body_dict['withAttachments'] = False
            if 'limit' in body_dict:
                limit = int(body_dict['limit'])
                if limit == 0:
                    pagination = False
            if 'page' in body_dict:
                page = body_dict['page']
                pagination = False
        else:
            # build search JSON object
            body_dict = {"returnFormat": "json", "withAttachments": False}
            if self.to_ids is True:
                body_dict['to_ids'] = "True"
            if self.includeEventUuid is not None:
                body_dict['includeEventUuid'] = self.includeEventUuid
            if self.includeEventTags is not None:
                body_dict['includeEventTags'] = self.includeEventTags
            if self.last is not None:
                body_dict['last'] = self.last
        for record in records:
            if fieldname in record:
                value = record.get(fieldname, None)
                if value is not None:
                    body_dict['value'] = str(value)
                    misp_category = []
                    misp_event_id = []
                    misp_event_uuid = []
                    misp_orgc_id = []
                    misp_to_ids = []
                    misp_comment = []
                    misp_tag = []
                    misp_type = []
                    misp_value = []
                    misp_uuid = []
                    # search
                    if pagination is True:
                        body_dict['page'] = page
                        body_dict['limit'] = limit
                    body = json.dumps(body_dict)
                    logging.debug('mispsearch request body: %s', body)
                    r = requests.post(my_args['misp_url'],
                                      headers=headers,
                                      data=body,
                                      verify=my_args['misp_verifycert'],
                                      cert=my_args['client_cert_full_path'],
                                      proxies=my_args['proxies'])
                    # check if status is anything other than 200; throw an exception if it is
                    r.raise_for_status()
                    # response is 200 by this point or we would have thrown an exception
                    # print >> sys.stderr, "DEBUG MISP REST API response: %s" % response.json()
                    response = r.json()
                    if 'response' in response:
                        if 'Attribute' in response['response']:
                            for a in response['response']['Attribute']:
                                if str(a['type']) not in misp_type:
                                    misp_type.append(str(a['type']))
                                if str(a['value']) not in misp_value:
                                    misp_value.append(str(a['value']))
                                if str(a['to_ids']) not in misp_to_ids:
                                    misp_to_ids.append(str(a['to_ids']))
                                if str(a['comment']) not in misp_comment:
                                    misp_comment.append(str(a['comment']))
                                if str(a['category']) not in misp_category:
                                    misp_category.append(str(a['category']))
                                if str(a['uuid']) not in misp_uuid:
                                    misp_uuid.append(str(a['uuid']))
                                if str(a['event_id']) not in misp_event_id:
                                    misp_event_id.append(str(a['event_id']))
                                if 'Tag' in a:
                                    for tag in a['Tag']:
                                        if str(tag['name']) not in misp_tag:
                                            misp_tag.append(str(tag['name']))
                                if 'Event' in a:
                                    if a['Event']['uuid'] \
                                       not in misp_event_uuid:
                                        misp_event_uuid.append(
                                            str(a['Event']['uuid']))
                                    if a['Event']['orgc_id'] \
                                       not in misp_orgc_id:
                                        misp_orgc_id.append(
                                            str(a['Event']['orgc_id']))
                            record['misp_type'] = misp_type
                            record['misp_value'] = misp_value
                            record['misp_to_ids'] = misp_to_ids
                            record['misp_comment'] = misp_comment
                            record['misp_category'] = misp_category
                            record['misp_attribute_uuid'] = misp_uuid
                            record['misp_event_id'] = misp_event_id
                            record['misp_event_uuid'] = misp_event_uuid
                            record['misp_orgc_id'] = misp_orgc_id
                            record['misp_tag'] = misp_tag
            yield record
Example #27
0
class mispgetioc(ReportingCommand):
    """ get the attributes from a MISP instance.
    ##Syntax
    .. code-block::
        | mispgetioc misp_instance=<input> last=<int>(d|h|m)
        | mispgetioc misp_instance=<input> event=<id1>(,<id2>,...)
        | mispgetioc misp_instance=<input> date=<<YYYY-MM-DD>
                                           (date_to=<YYYY-MM-DD>)
    ##Description
    {
        "returnFormat": "mandatory",
        "page": "optional",
        "limit": "optional",
        "value": "optional",
        "type": "optional",
        "category": "optional",
        "org": "optional",
        "tags": "optional",
        "date": "optional",
        "last": "optional",
        "eventid": "optional",
        "withAttachments": "optional",
        "uuid": "optional",
        "publish_timestamp": "optional",
        "timestamp": "optional",
        "enforceWarninglist": "optional",
        "to_ids": "optional",
        "deleted": "optional",
        "includeEventUuid": "optional",
        "includeEventTags": "optional",
        "event_timestamp": "optional",
        "threat_level_id": "optional",
        "eventinfo": "optional",
        "includeProposals": "optional",
        "includeDecayScore": "optional",
        "includeFullModel": "optional",
        "decayingModel": "optional",
        "excludeDecayed": "optional",
        "score": "optional"
    }
    # status
        "returnFormat": forced to json,
        "page": param,
        "limit": param,
        "value": not managed,
        "type": param, CSV string,
        "category": param, CSV string,
        "org": not managed,
        "tags": param, see also not_tags
        "date": param,
        "last": param,
        "eventid": param,
        "withAttachments": forced to false,
        "uuid": not managed,
        "publish_timestamp": managed via param last
        "timestamp": not managed,
        "enforceWarninglist": param,
        "to_ids": param,
        "deleted": forced to False,
        "includeEventUuid": set to True,
        "includeEventTags": param,
        "event_timestamp":  not managed,
        "threat_level_id":  not managed,
        "eventinfo": not managed,
        "includeProposals": not managed
        "includeDecayScore": not managed,
        "includeFullModel": not managed,
        "decayingModel": not managed,
        "excludeDecayed": not managed,
        "score": not managed
    }
    """
    # MANDATORY MISP instance for this search
    misp_instance = Option(doc='''
        **Syntax:** **misp_instance=instance_name*
        **Description:** MISP instance parameters
        as described in local/inputs.conf.''',
                           require=True)
    # MANDATORY: json_request XOR eventid XOR last XOR date
    json_request = Option(doc='''
        **Syntax:** **json_request=***valid JSON request*
        **Description:**Valid JSON request''',
                          require=False)
    eventid = Option(doc='''
        **Syntax:** **eventid=***id1(,id2,...)*
        **Description:**list of event ID(s) or event UUID(s).''',
                     require=False,
                     validate=validators.Match("eventid", r"^[0-9a-f,\-]+$"))
    last = Option(doc='''
        **Syntax:** **last=***<int>d|h|m*
        **Description:** publication duration in day(s), hour(s) or minute(s).
        **nota bene:** last is an alias of published_timestamp''',
                  require=False,
                  validate=validators.Match("last", r"^[0-9]+[hdm]$"))
    date = Option(doc='''
        **Syntax:** **date=***The user set event date field
         - any of valid time related filters"*
        **Description:**starting date.
         **eventid**, **last** and **date** are mutually exclusive''',
                  require=False)
    # Other params
    page = Option(doc='''
        **Syntax:** **page=***<int>*
        **Description:**define the page for each MISP search; default 1.''',
                  require=False,
                  validate=validators.Match("limit", r"^[0-9]+$"))
    limit = Option(doc='''
        **Syntax:** **limit=***<int>*
        **Description:**define the limit for each MISP search;
         default 1000. 0 = no pagination.''',
                   require=False,
                   validate=validators.Match("limit", r"^[0-9]+$"))
    type = Option(doc='''
        **Syntax:** **type=***CSV string*
        **Description:**Comma(,)-separated string of types to search for.
         Wildcard is %.''',
                  require=False)
    category = Option(doc='''
        **Syntax:** **category=***CSV string*
        **Description:**Comma(,)-separated string of categories to search for.
         Wildcard is %.''',
                      require=False)
    tags = Option(doc='''
        **Syntax:** **tags=***CSV string*
        **Description:**Comma(,)-separated string of tags to search for.
         Wildcard is %.''',
                  require=False)
    not_tags = Option(doc='''
        **Syntax:** **not_tags=***CSV string*
        **Description:**Comma(,)-separated string of tags to exclude.
         Wildcard is %.''',
                      require=False)
    warning_list = Option(doc='''
        **Syntax:** **warning_list=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean to filter out well known values.''',
                          require=False,
                          validate=validators.Boolean())
    to_ids = Option(doc='''
        **Syntax:** **to_ids=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean to search only attributes with the flag
         "to_ids" set to true.''',
                    require=False,
                    validate=validators.Boolean())
    geteventtag = Option(doc='''
        **Syntax:** **geteventtag=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean includeEventTags. By default only
         attribute tag(s) are returned.''',
                         require=False,
                         validate=validators.Boolean())
    getuuid = Option(doc='''
        **Syntax:** **getuuid=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean to return attribute UUID.''',
                     require=False,
                     validate=validators.Boolean())
    getorg = Option(doc='''
        **Syntax:** **getorg=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean to return the ID of the organisation that
         created the event.''',
                    require=False,
                    validate=validators.Boolean())
    pipesplit = Option(doc='''
        **Syntax:** **pipesplit=***<1|y|Y|t|true|True|0|n|N|f|false|False>*
        **Description:**Boolean to split multivalue attributes.''',
                       require=False,
                       validate=validators.Boolean())
    add_description = Option(doc='''
        **Syntax:** **add_description=***<1|y|Y|t|true|True
        |0|n|N|f|false|False>*
        **Description:**Boolean to return misp_description.''',
                             require=False,
                             validate=validators.Boolean())

    @Configuration()
    def map(self, records):
        # self.logger.debug('mispgetioc.map')
        return records

    def reduce(self, records):

        # Phase 1: Preparation
        my_args = prepare_config(self)
        my_args['misp_url'] = my_args['misp_url'] + '/attributes/restSearch'

        # check that ONE of mandatory fields is present
        mandatory_arg = 0
        if self.json_request is not None:
            mandatory_arg = mandatory_arg + 1
        if self.eventid:
            mandatory_arg = mandatory_arg + 1
        if self.last:
            mandatory_arg = mandatory_arg + 1
        if self.date:
            mandatory_arg = mandatory_arg + 1

        if mandatory_arg == 0:
            logging.error('Missing "json_request", eventid", \
                "last" or "date" argument')
            raise Exception('Missing "json_request", "eventid", \
                "last" or "date" argument')
        elif mandatory_arg > 1:
            logging.error('Options "json_request", eventid", "last" \
                and "date" are mutually exclusive')
            raise Exception('Options "json_request", "eventid", "last" \
                and "date" are mutually exclusive')

        body_dict = dict()
        # Only ONE combination was provided
        if self.json_request is not None:
            body_dict = json.loads(self.json_request)
            logging.info('Option "json_request" set')
        elif self.eventid:
            if "," in self.eventid:
                event_criteria = {}
                event_list = self.eventid.split(",")
                event_criteria['OR'] = event_list
                body_dict['eventid'] = event_criteria
            else:
                body_dict['eventid'] = self.eventid
            logging.info('Option "eventid" set with %s',
                         json.dumps(body_dict['eventid']))
        elif self.last:
            body_dict['last'] = self.last
            logging.info('Option "last" set with %s', str(body_dict['last']))
        else:
            body_dict['date'] = self.date.split()
            logging.info('Option "date" set with %s',
                         json.dumps(body_dict['date']))

        # Force some values on JSON request
        body_dict['returnFormat'] = 'json'
        body_dict['withAttachments'] = False
        body_dict['deleted'] = False
        body_dict['includeEventUuid'] = True
        # set proper headers
        headers = {'Content-type': 'application/json'}
        headers['Authorization'] = my_args['misp_key']
        headers['Accept'] = 'application/json'

        # Search pagination
        pagination = True
        if self.limit is not None:
            limit = int(self.limit)
        elif 'limit' in body_dict:
            limit = int(body_dict['limit'])
        else:
            limit = 1000
        if limit == 0:
            pagination = False
        if self.page is not None:
            page = int(self.page)
        elif 'page' in body_dict:
            page = body_dict['page']
        else:
            page = 1

        # Search parameters: boolean and filter
        if self.to_ids is True:
            body_dict['to_ids'] = True
            body_dict['warning_list'] = True  # protection
        elif self.to_ids is False:
            body_dict['to_ids'] = False
        if self.warning_list is True:
            body_dict['warning_list'] = True
        elif self.warning_list is False:
            body_dict['warning_list'] = False
        if self.geteventtag is True:
            body_dict['includeEventTags'] = True
        if self.category is not None:
            if "," in self.category:
                cat_criteria = {}
                cat_list = self.category.split(",")
                cat_criteria['OR'] = cat_list
                body_dict['category'] = cat_criteria
            else:
                body_dict['category'] = self.category
        if self.type is not None:
            if "," in self.type:
                type_criteria = {}
                type_list = self.type.split(",")
                type_criteria['OR'] = type_list
                body_dict['type'] = type_criteria
            else:
                body_dict['type'] = self.type
        if self.tags is not None or self.not_tags is not None:
            tags_criteria = {}
            if self.tags is not None:
                tags_list = self.tags.split(",")
                tags_criteria['OR'] = tags_list
            if self.not_tags is not None:
                tags_list = self.not_tags.split(",")
                tags_criteria['NOT'] = tags_list
            body_dict['tags'] = tags_criteria

        # output filter parameters
        if self.getuuid is True:
            my_args['getuuid'] = True
        else:
            my_args['getuuid'] = False
        if self.getorg is True:
            my_args['getorg'] = True
        else:
            my_args['getorg'] = False
        if self.pipesplit is True:
            my_args['pipe'] = True
        else:
            my_args['pipe'] = False
        if self.add_description is True:
            my_args['add_desc'] = True
        else:
            my_args['add_desc'] = False

        results = []
        # add colums for each type in results
        typelist = []

        if pagination is True:
            body_dict['page'] = page
            body_dict['limit'] = limit

        body = json.dumps(body_dict)
        logging.debug('mispgetioc request body: %s', body)
        # search
        r = requests.post(my_args['misp_url'],
                          headers=headers,
                          data=body,
                          verify=my_args['misp_verifycert'],
                          cert=my_args['client_cert_full_path'],
                          proxies=my_args['proxies'])
        # check if status is anything other than 200;
        # throw an exception if it is
        r.raise_for_status()
        # response is 200 by this point or we would have thrown an exception
        response = r.json()
        if 'response' in response:
            if 'Attribute' in response['response']:
                for a in response['response']['Attribute']:
                    v = {}
                    v['misp_category'] = str(a['category'])
                    v['misp_attribute_id'] = str(a['id'])
                    v['misp_event_id'] = str(a['event_id'])
                    v['misp_timestamp'] = str(a['timestamp'])
                    v['misp_to_ids'] = str(a['to_ids'])
                    v['misp_comment'] = str(a['comment'])
                    tag_list = []
                    if 'Tag' in a:
                        for tag in a['Tag']:
                            try:
                                tag_list.append(str(tag['name']))
                            except Exception:
                                pass
                    v['misp_tag'] = tag_list
                    # include ID of the organisation that
                    # created the attribute if requested
                    if 'Event' in a:
                        v['misp_event_uuid'] = str(a['Event']['uuid'])
                        if my_args['getorg']:
                            v['misp_orgc_id'] = str(a['Event']['orgc_id'])
                        if my_args['add_desc'] is True:
                            v['misp_event_info'] = str(a['Event']['info'])
                    # include attribute UUID if requested
                    if my_args['getuuid']:
                        v['misp_attribute_uuid'] = str(a['uuid'])
                    # handle object and multivalue attributes
                    v['misp_object_id'] = str(a['object_id'])
                    if my_args['add_desc'] is True:
                        if int(a['object_id']) == 0:
                            v['misp_description'] = 'MISP e' \
                                + str(a['event_id']) + ' attribute ' \
                                + str(a['uuid']) + ' of type "' \
                                + str(a['type']) \
                                + '" in category "' + str(a['category']) \
                                + '" (to_ids:' + str(a['to_ids']) + ')'
                        else:
                            v['misp_description'] = 'MISP e' \
                                + str(a['event_id']) + ' attribute ' \
                                + str(a['uuid']) + ' of type "' \
                                + str(a['type']) + '" in category "' \
                                + str(a['category']) \
                                + '" (to_ids:' + str(a['to_ids']) \
                                + ' - o' + str(a['object_id']) + ' )'
                    current_type = str(a['type'])
                    # combined: not part of an object
                    # AND multivalue attribute AND to be split
                    if int(a['object_id']) == 0 and '|' in current_type \
                       and my_args['pipe'] is True:
                        mv_type_list = current_type.split('|')
                        mv_value_list = str(a['value']).split('|')
                        left_v = v.copy()
                        left_v['misp_type'] = mv_type_list.pop()
                        left_v['misp_value'] = mv_value_list.pop()
                        results.append(left_v)
                        if left_v['misp_type'] not in typelist:
                            typelist.append(left_v['misp_type'])
                        right_v = v.copy()
                        right_v['misp_type'] = mv_type_list.pop()
                        right_v['misp_value'] = mv_value_list.pop()
                        results.append(right_v)
                        if right_v['misp_type'] not in typelist:
                            typelist.append(right_v['misp_type'])
                    else:
                        v['misp_type'] = current_type
                        v['misp_value'] = str(a['value'])
                        results.append(v)
                        if current_type not in typelist:
                            typelist.append(current_type)

        logging.info(json.dumps(typelist))

        output_dict = {}
        # relevant_cat = ['Artifacts dropped', 'Financial fraud',
        # 'Network activity','Payload delivery','Payload installation']
        for r in results:
            if int(r['misp_object_id']) == 0:  # not an object
                key = str(r['misp_event_id']) + '_' + r['misp_attribute_id']
                is_object_member = False
            else:  # this is a  MISP object
                key = str(r['misp_event_id']) \
                    + '_object_' + str(r['misp_object_id'])
                is_object_member = True
            if key not in output_dict:
                v = dict(r)
                for t in typelist:
                    misp_t = 'misp_' + t.replace('-', '_').replace('|', '_p_')
                    if t == r['misp_type']:
                        v[misp_t] = r['misp_value']
                    else:
                        v[misp_t] = ''
                to_ids = []
                to_ids.append(r['misp_to_ids'])
                v['misp_to_ids'] = to_ids
                category = []
                category.append(r['misp_category'])
                v['misp_category'] = category
                if my_args['add_desc'] is True:
                    description = []
                    description.append(r['misp_description'])
                    v['misp_description'] = description
                if my_args['getuuid'] is True:
                    attribute_uuid = []
                    attribute_uuid.append(r['misp_attribute_uuid'])
                    v['misp_attribute_uuid'] = attribute_uuid
                if is_object_member is True:
                    v['misp_type'] = 'misp_object'
                    v['misp_value'] = r['misp_object_id']
                output_dict[key] = dict(v)
            else:
                v = dict(output_dict[key])
                misp_t = 'misp_' + r['misp_type'].replace('-', '_')
                v[misp_t] = r['misp_value']  # set value for relevant type
                to_ids = v['misp_to_ids']
                if r['misp_to_ids'] not in to_ids:
                    to_ids.append(r['misp_to_ids'])
                    v['misp_to_ids'] = to_ids
                category = v['misp_category']
                if r['misp_category'] not in category:  # append category
                    category.append(r['misp_category'])
                    v['misp_category'] = category
                if my_args['add_desc'] is True:
                    description = v['misp_description']
                    if r['misp_description'] not in description:
                        description.append(r['misp_description'])
                    v['misp_description'] = description
                if my_args['getuuid'] is True:
                    attribute_uuid = v['misp_attribute_uuid']
                    if r['misp_attribute_uuid'] not in attribute_uuid:
                        attribute_uuid.append(r['misp_attribute_uuid'])
                    v['misp_attribute_uuid'] = attribute_uuid
                if is_object_member is False:
                    misp_type = r['misp_type'] + '|' + v['misp_type']
                    v['misp_type'] = misp_type
                    misp_value = r['misp_value'] + '|' + v['misp_value']
                    v['misp_value'] = misp_value
                output_dict[key] = dict(v)

        for k, v in list(output_dict.items()):
            yield v
Example #28
0
class ip2intCommand(StreamingCommand):
    """ Converts IPv4 to integer

    ##Syntax

    .. code-block::
        ip2int fields="<ipv4_field>[,<ipv4_field>]*" [destfield=<new_field>]

    ##Parameters:
    fields = Comma seperated list of fields containing IPv4 addresses

    destfield = *optional* If provided then the result of the command
                will be stored in this new *destfield*.
                Only 1 destination field can be provided so if the fields
                parameter contains a list then the destfield will be a
                overwritten with the last result!
 
                If destfield is not provided then a new field will be
                created as <field_int>

    ##Description

    Splunk Streaming command (SCPv2) that will change an IPv4 address
    into an integer value. Very often IP addresses are stored in
    integer format in databases.

    ##Example

    Convert a single field containing an IPv4 address into an integer. The result
    will be stored in a new field called ipv4_int

    .. code-block::
        | makeresults
        | eval ipv4="192.168.1.10" 
        | ip2int fields=ipv4

    Convert a single field containing an IPv4 address into an integer. The result
    will be stored in a new field called ip_as_int

    .. code-block::
        | makeresults
        | eval ipv4="192.168.1.10" 
        | ip2int fields=ipv4 destfield=ip_as_int


    """
    destfield = Option(doc='''
        **Syntax:** **destfield=***<fieldname>*
        **Description:** Name of the field that will be created''',
                       require=False,
                       validate=validators.Fieldname())

    @Option()
    def fields(self):
        """ **Syntax:** logging_configuration=<path>
        **Description:** Loads an alternative logging configuration file for a command invocation. The logging
        configuration file must be in Python ConfigParser-format. The *<path>* name and all path names specified in
        configuration are relative to the app root directory.
        """
        return self._fields.split(",")

    @fields.setter
    def fields(self, value):
        if value is not None:
            self._fields = value

    def __init__(self, *args, **kwargs):
        super(StreamingCommand, self).__init__(*args, **kwargs)
        self._fields = None

    def stream(self, records):
        self.logger.debug('ip2intCommand: %s', self)

        for record in records:
            for fld in self.fields:
                if fld in record:
                    destfield = self.destfield if self.destfield else "{}_int".format(
                        fld)
                    try:
                        record[destfield] = int(self.ip2int(record[fld]))
                    except:
                        self.logger.error(
                            "unable to convert IP '{}' to an integer".format(
                                record[fld]))
                        record[destfield] = "conversion error"
                else:
                    self.logger.warning("field '{}' is not found".format(fld))

            yield record

    def ip2int(self, addr):
        return struct.unpack("!I", socket.inet_aton(str(addr)))[0]
Example #29
0
class mispsight(StreamingCommand):
    """ search in MISP for attributes matching the value of field.

    ##Syntax

        code-block::
        mispsearch field=<field> onlyids=y|n

    ##Description

        search_body = {"returnFormat": "json",
                "value": "optional",
                "type": "optional",
                "category": "optional",
                "org": "optional",
                "tags": "optional",
                "from": "optional",
                "to": "optional",
                "last": "optional",
                "eventid": "optional",
                "withAttachments": "optional",
                "uuid": "optional",
                "publish_timestamp": "optional",
                "timestamp": "optional",
                "enforceWarninglist": "optional",
                "to_ids": "optional",
                "deleted": "optional",
                "includeEventUuid": "optional",
                "event_timestamp": "optional",
                "threat_level_id": "optional"
                }
    
    ##Example

    Search in MISP for value of fieldname r_ip (remote IP in proxy logs).

        code-block::
         * | mispsearch fieldname=r_ip

    """

    field = Option(doc='''
        **Syntax:** **field=***<fieldname>*
        **Description:**Name of the field containing the value to search for.''',
                   require=True,
                   validate=validators.Fieldname())
    misp_instance = Option(doc='''
        **Syntax:** **misp_instance=instance_name*
        **Description:**MISP instance parameters as described in local/inputs.conf.''',
                           require=True)

    def stream(self, records):
        # self.logger.debug('mispgetioc.reduce')

        # Generate args
        my_args = prepare_config(self)
        # set proper headers
        headers = {'Content-type': 'application/json'}
        headers['Authorization'] = my_args['misp_key']
        headers['Accept'] = 'application/json'

        fieldname = str(self.field)

        for record in records:
            if fieldname in record:
                value = record.get(fieldname, None)
                if value is not None:
                    search_url = my_args['misp_url'] + '/attributes/restSearch'
                    search_dict = {"returnFormat": "json"}
                    search_dict['value'] = str(value)
                    search_dict['withAttachments'] = "false",
                    search_body = json.dumps(search_dict)

                    sight_url = my_args[
                        'misp_url'] + '/sightings/restSearch/attribute'
                    sight_dict = {"returnFormat": "json"}

                    misp_value = ''
                    misp_fp = False
                    misp_fp_timestamp = 0
                    misp_fp_event_id = ''
                    misp_sight_seen = False
                    misp_sight = {
                        'count': 0,
                        'first': 0,
                        'first_event_id': 0,
                        'last': 0,
                        'last_event_id': 0
                    }
                    # search
                    logging.debug('mispsight request body: %s', body)
                    r = requests.post(search_url,
                                      headers=headers,
                                      data=search_body,
                                      verify=my_args['misp_verifycert'],
                                      cert=my_args['client_cert_full_path'],
                                      proxies=my_args['proxies'])
                    # check if status is anything other than 200; throw an exception if it is
                    r.raise_for_status()
                    # response is 200 by this point or we would have thrown an exception
                    response = r.json()
                    logging.info(
                        "MISP REST API %s has got a response with status code 200",
                        search_url)
                    logging.debug("MISP REST API %s has got a response: %s" %
                                  (search_url, r.json()))
                    if 'response' in response:
                        if 'Attribute' in response['response']:
                            for a in response['response']['Attribute']:
                                if misp_value == '':
                                    misp_value = str(a['value'])
                                if misp_fp == False:
                                    sight_dict['id'] = str(a['id'])
                                    sight_body = json.dumps(sight_dict)
                                    s = requests.post(
                                        sight_url,
                                        headers=headers,
                                        data=sight_body,
                                        verify=my_args['misp_verifycert'],
                                        cert=my_args['client_cert_full_path'],
                                        proxies=my_args['proxies'])
                                    # check if status is anything other than 200; throw an exception if it is
                                    s.raise_for_status()
                                    # response is 200 by this point or we would have thrown an exception
                                    sight = s.json()
                                    logging.info(
                                        "MISP REST API %s has got a response with status code 200",
                                        sight_url)
                                    logging.debug(
                                        "MISP REST API %s has got a response: %s"
                                        % (sight_url, s.json()))
                                    if 'response' in sight:
                                        for se in sight['response']:
                                            if 'Sighting' in se:
                                                if int(se['Sighting']['type']
                                                       ) == 0:  #true sighting
                                                    misp_sight_seen = True
                                                    misp_sight[
                                                        'count'] = misp_sight[
                                                            'count'] + 1
                                                    if misp_sight['first'] == 0 or \
                                                       misp_sight['first'] > int(se['Sighting']['date_sighting']):
                                                        misp_sight[
                                                            'first'] = int(
                                                                se['Sighting']
                                                                ['date_sighting']
                                                            )
                                                        misp_sight[
                                                            'first_event_id'] = se[
                                                                'Sighting'][
                                                                    'event_id']
                                                    if misp_sight['last'] < int(
                                                            se['Sighting']
                                                        ['date_sighting']):
                                                        misp_sight['last'] = int(
                                                            se['Sighting']
                                                            ['date_sighting'])
                                                        misp_sight[
                                                            'last_event_id'] = se[
                                                                'Sighting'][
                                                                    'event_id']
                                                elif int(
                                                        se['Sighting']['type']
                                                ) == 1:  #false positive
                                                    misp_fp = True
                                                    misp_fp_timestamp = int(
                                                        se['Sighting']
                                                        ['date_sighting'])
                                                    misp_fp_event_id = se[
                                                        'Sighting']['event_id']
                            if misp_fp == True:
                                record['misp_value'] = misp_value
                                record['misp_fp'] = "True"
                                record['misp_fp_timestamp'] = str(
                                    misp_fp_timestamp)
                                record['misp_fp_event_id'] = str(
                                    misp_fp_event_id)
                            if misp_sight_seen == True:
                                record['misp_value'] = misp_value
                                record['misp_sight_count'] = str(
                                    misp_sight['count'])
                                record['misp_sight_first'] = str(
                                    misp_sight['first'])
                                record['misp_sight_first_event_id'] = str(
                                    misp_sight['first_event_id'])
                                record['misp_sight_last'] = str(
                                    misp_sight['last'])
                                record['misp_sight_last_event_id'] = str(
                                    misp_sight['last_event_id'])
            yield record
class StackOperation(GeneratingCommand):
    command = Option(require=True)
    stack_id = Option(require=True)

    def generate(self):
        root_logger = logging.getLogger()
        root_logger.setLevel("DEBUG")

        class EventBufferHandler(logging.Handler):
            _events = None
            _last_time = None

            def __init__(self):
                logging.Handler.__init__(self)
                self._events = []

            def emit(self, record):
                msg = record.getMessage().replace('"', '\\"')
                time = round(record.created, 3)
                if self._last_time and time <= self._last_time:
                    time = self._last_time + 0.001
                self._last_time = time
                self._events.append({
                    "_raw": "%.3f, level=\"%s\", msg=\"%s\"" % (
                        time,
                        record.levelname,
                        msg
                    ),
                })

            @property
            def events(self):
                return self._events
        buffer_handler = EventBufferHandler()
        root_logger.addHandler(buffer_handler)

        logging.debug("running '%s' command .." % self.command)

        # HTTPError: HTTP 503 Service Unavailable - - KV Store is initializing.
        # Please try again later.
        try:
            _ = self.service.kvstore["stacks"].data
        except splunklib.binding.HTTPError as e:
            if e.status == 503:
                logging.warning("%s" % e)
                return
            raise

        try:
            if self.command == "up":
                up(self.service, self.stack_id)
                return
            elif self.command == "down" or self.command == "kill":
                down(self.service, self.stack_id,
                     force=self.command == "kill")
            else:
                logging.error("unknown command: %s" % self.command)
            logging.debug("will stop '%s' command" % self.command)
        except errors.RetryOperation as e:
            msg = "%s" % e
            if msg:
                logging.info("%s" % msg)
            logging.debug("will check in 1m")
            return
        except:
            import traceback
            logging.error("%s\n(will try again in 1m)" %
                          traceback.format_exc())
            return
        finally:
            logging.shutdown()
            for e in buffer_handler.events:
                yield e

        unschedule_operation(self.service, self.stack_id)