Ejemplo n.º 1
0
def toAtomEntry(lxmlNode):
    '''
    Converts an lxml node into an AtomEntry object
    '''

    root = lxmlNode

    #SPL-20024
    link_nodes = root.findall(ATOM_TAGF % 'link')
    link = []
    for ln in link_nodes:
        link.append((ln.attrib['rel'], ln.attrib['href']))

    # extract props
    params = {
        'id':
        root.findtext(ATOM_TAGF % 'id'),
        'title':
        root.findtext(ATOM_TAGF % 'title'),
        'published':
        util.parseISO(root.findtext(ATOM_TAGF % 'published', '')),
        'updated':
        util.parseISO(root.findtext(ATOM_TAGF % 'updated', '')),
        'summary':
        root.findtext(ATOM_TAGF % 'summary'),
        'author':
        root.findtext('/'.join([ATOM_TAGF % 'author', ATOM_TAGF % 'name'])),
        #SPL-20024
        'link':
        link,
    }

    output = AtomEntry(**params)

    contentNodes = root.xpath('a:content', namespaces={'a': ATOM_NS})

    if contentNodes:

        output.contentType = contentNodes[0].get('type')

        if output.contentType == 'text':
            output.rawcontents = contentNodes[0].text.strip()
        elif len(contentNodes[0]) > 0:
            #logger.debug('toAtomEntry - content is of type: %s' % contentNodes[0][0])
            output.rawcontents = contentNodes[0][0]
        elif contentNodes[0].text:
            #logger.debug('toAtomEntry - content is text')
            output.rawcontents = contentNodes[0].text.strip()
        else:
            raise Exception, "No idea what content type is"

    return output
Ejemplo n.º 2
0
def createIncident(metadata, config, incident_status, sessionKey):
    alert_time = int(float(sutil.dt2epoch(sutil.parseISO(metadata['alert_time'], True))))
    entry = {}
    entry['title'] = metadata['title']
    entry['category'] = config['category']
    entry['subcategory'] = config['subcategory']
    entry['tags'] = config['tags']
    entry['incident_id'] = metadata['incident_id']
    entry['alert_time'] = alert_time
    entry['job_id'] = metadata['job_id']
    entry['result_id'] = metadata['result_id']
    entry['alert'] = metadata['alert']
    entry['app'] = metadata['app']
    entry['status'] = incident_status
    entry['ttl'] = metadata['ttl']
    entry['impact'] = metadata['impact']
    entry['urgency'] = metadata['urgency']
    entry['priority'] = metadata['priority']
    entry['owner'] = metadata['owner']
    entry['display_fields'] = config['display_fields']
    entry['search'] = metadata['entry'][0]['name']

    entry = json.dumps(entry, sort_keys=True)
    #log.debug("createIncident(): Entry: %s" % entry)
    uri = '/servicesNS/nobody/alert_manager/storage/collections/data/incidents'
    response = getRestData(uri, sessionKey, entry)
    return response["_key"]
Ejemplo n.º 3
0
def toAtomFeed(lxmlNode):
    '''
    Converts an lxml node into an AtomFeed object
    '''

    root = lxmlNode
    output = AtomFeed()

    # extract props
    output.id = root.findtext(ATOM_TAGF % 'id')
    output.title = root.findtext(ATOM_TAGF % 'title')
    output.updated = util.parseISO(root.findtext(ATOM_TAGF % 'updated', ''))

    # extract OpenSearch props
    output.os_totalResults = root.findtext(OPENSEARCH_TAGF % 'totalResults')
    output.os_itemsPerPage = root.findtext(OPENSEARCH_TAGF % 'itemsPerPage')
    output.os_startIndex   = root.findtext(OPENSEARCH_TAGF % 'startIndex')

    # extract messages
    for msg in root.xpath('//s:msg', namespaces={'s': SPLUNK_NS}):
        output.messages.append({'type': msg.get('type','error').lower(), 'text': msg.text})
    
    # extract links
    try: 
        output.links = map((lambda link: (link.attrib['rel'], link.attrib['href'])), root.findall(ATOM_TAGF % 'link'))
    except KeyError:
        pass # SPL-21884
                        
    # iterate over entries
    output.entries = map(toAtomEntry, root.xpath('//a:entry', namespaces={'a': ATOM_NS}))

    return output
Ejemplo n.º 4
0
def createIncident(metadata, config, incident_status, sessionKey):
    alert_time = int(
        float(util.dt2epoch(util.parseISO(metadata['alert_time'], True))))
    entry = {}
    entry['title'] = metadata['title']
    entry['category'] = config['category']
    entry['subcategory'] = config['subcategory']
    entry['tags'] = config['tags']
    entry['incident_id'] = metadata['incident_id']
    entry['alert_time'] = alert_time
    entry['job_id'] = metadata['job_id']
    entry['result_id'] = metadata['result_id']
    entry['alert'] = metadata['alert']
    entry['app'] = metadata['app']
    entry['status'] = incident_status
    entry['ttl'] = metadata['ttl']
    entry['impact'] = metadata['impact']
    entry['urgency'] = metadata['urgency']
    entry['priority'] = metadata['priority']
    entry['owner'] = metadata['owner']
    entry['display_fields'] = config['display_fields']

    entry = json.dumps(entry)
    #log.debug("createIncident(): Entry: %s" % entry)
    uri = '/servicesNS/nobody/alert_manager/storage/collections/data/incidents'
    response = getRestData(uri, sessionKey, entry)
    return response["_key"]
Ejemplo n.º 5
0
def createNewIncident(alert_time, incident_id, job_id, result_id, alert, status, ttl, impact, urgency, priority, owner, user_list, notifier, digest_mode, results):
    alert_time = int(float(util.dt2epoch(util.parseISO(alert_time, True))))
    entry = {}
    entry['incident_id'] = incident_id
    entry['alert_time'] = alert_time
    entry['job_id'] = job_id
    entry['result_id'] = result_id
    entry['alert'] = alert
    entry['status'] = status
    entry['ttl'] = ttl
    entry['impact'] = impact
    entry['urgency'] = urgency
    entry['priority'] = priority
    entry['owner'] = owner

    if incident_config['auto_assign'] and incident_config['auto_assign_owner'] != 'unassigned':
            entry['owner'] = incident_config['auto_assign_owner']
            owner = incident_config['auto_assign_owner']
            log.info("Assigning incident to %s" % incident_config['auto_assign_owner'])
            auto_assgined = True
            status = 'auto_assigned'
            entry['status'] = status
            notifyAutoAssign(user_list, notifier, digest_mode, results, job_id, result_id, ttl, impact, urgency, priority)

    entry = json.dumps(entry)

    writeIncidentToCollection(entry)

    logCreateEvent(alert, incident_id, job_id, result_id, owner, urgency, ttl, alert_time)
    logChangeEvent(incident_id, job_id, result_id, status, owner)
Ejemplo n.º 6
0
def createNewIncident(alert_time, incident_id, job_id, result_id, alert, status, ttl, impact, urgency, priority, owner, user_list, notifier, digest_mode, results):
    alert_time = int(float(util.dt2epoch(util.parseISO(alert_time, True))))
    entry = {}
    entry['incident_id'] = incident_id
    entry['alert_time'] = alert_time
    entry['job_id'] = job_id
    entry['result_id'] = result_id
    entry['alert'] = alert
    entry['status'] = status
    entry['ttl'] = ttl
    entry['impact'] = impact
    entry['urgency'] = urgency
    entry['priority'] = priority
    entry['owner'] = owner

    if incident_config['auto_assign'] and incident_config['auto_assign_owner'] != 'unassigned':
            entry['owner'] = incident_config['auto_assign_owner']
            owner = incident_config['auto_assign_owner']
            log.info("Assigning incident to %s" % incident_config['auto_assign_owner'])
            auto_assgined = True
            status = 'auto_assigned'
            entry['status'] = status
            notifyAutoAssign(user_list, notifier, digest_mode, results, job_id, result_id, ttl, impact, urgency, priority)

    entry = json.dumps(entry)

    writeIncidentToCollection(entry)

    logCreateEvent(alert, incident_id, job_id, result_id, owner, urgency, ttl, alert_time)
    logChangeEvent(incident_id, job_id, result_id, status, owner)
Ejemplo n.º 7
0
def createIncident(metadata, config, incident_status, sessionKey):
    alert_time = int(float(util.dt2epoch(util.parseISO(metadata["alert_time"], True))))
    entry = {}
    entry["title"] = metadata["title"]
    entry["category"] = config["category"]
    entry["subcategory"] = config["subcategory"]
    entry["tags"] = config["tags"]
    entry["incident_id"] = metadata["incident_id"]
    entry["alert_time"] = alert_time
    entry["job_id"] = metadata["job_id"]
    entry["result_id"] = metadata["result_id"]
    entry["alert"] = metadata["alert"]
    entry["app"] = metadata["app"]
    entry["status"] = incident_status
    entry["ttl"] = metadata["ttl"]
    entry["impact"] = metadata["impact"]
    entry["urgency"] = metadata["urgency"]
    entry["priority"] = metadata["priority"]
    entry["owner"] = metadata["owner"]
    entry["display_fields"] = config["display_fields"]

    entry = json.dumps(entry)
    # log.debug("createIncident(): Entry: %s" % entry)
    uri = "/servicesNS/nobody/alert_manager/storage/collections/data/incidents"
    response = getRestData(uri, sessionKey, entry)
    return response["_key"]
Ejemplo n.º 8
0
def createIncident(metadata, config, incident_status, sessionKey):
    alert_time = int(
        float(sutil.dt2epoch(sutil.parseISO(metadata['alert_time'], True))))
    entry = {}
    entry['title'] = metadata['title']
    entry['category'] = metadata["category"]
    entry['subcategory'] = metadata["subcategory"]
    entry['tags'] = metadata["tags"]
    entry['display_fields'] = metadata['display_fields']
    entry['incident_id'] = metadata['incident_id']
    entry['alert_time'] = alert_time
    entry['job_id'] = metadata['job_id']
    entry['result_id'] = metadata['result_id']
    entry['alert'] = metadata['alert']
    entry['app'] = metadata['app']
    entry['status'] = incident_status
    entry['ttl'] = metadata['ttl']
    entry['impact'] = metadata['impact']
    entry['urgency'] = metadata['urgency']
    entry['priority'] = metadata['priority']
    if metadata.get('owner') is not None:
        entry['owner'] = metadata['owner']
    else:
        entry['owner'] = 'unassigned'
    entry['search'] = metadata['entry'][0]['name']
    entry['external_reference_id'] = metadata['external_reference_id']

    entry = json.dumps(entry, sort_keys=True)
    log.debug("createIncident(): Entry: {}".format(entry))

    uri = '/servicesNS/nobody/alert_manager/storage/collections/data/incidents'
    response = getRestData(uri, sessionKey, entry)
    return response["_key"]
def toAtomEntry(lxmlNode):
    '''
    Converts an lxml node into an AtomEntry object
    '''
    
    root = lxmlNode

    #SPL-20024
    link_nodes = root.findall(ATOM_TAGF % 'link')
    link = []
    for ln in link_nodes:
       link.append((ln.attrib['rel'], ln.attrib['href']))

    # extract props
    params = {
        'id': root.findtext(ATOM_TAGF % 'id'),
        'title': root.findtext(ATOM_TAGF % 'title'),
        'published': util.parseISO(root.findtext(ATOM_TAGF % 'published', '')),
        'updated': util.parseISO(root.findtext(ATOM_TAGF % 'updated', '')),
        'summary': root.findtext(ATOM_TAGF % 'summary'),
        'author': root.findtext('/'.join([ATOM_TAGF % 'author', ATOM_TAGF % 'name'])),
        #SPL-20024
        'link':link,
    }

    output = AtomEntry(**params)

    contentNodes = root.xpath('a:content', namespaces={'a': ATOM_NS})
    
    if contentNodes:
        
        output.contentType = contentNodes[0].get('type')
        
        if output.contentType == 'text':
            output.rawcontents = contentNodes[0].text.strip()
        elif len(contentNodes[0]) > 0:
            #logger.debug('toAtomEntry - content is of type: %s' % contentNodes[0][0])
            output.rawcontents = contentNodes[0][0]
        elif contentNodes[0].text:
            #logger.debug('toAtomEntry - content is text')
            output.rawcontents = contentNodes[0].text.strip()
        else:
            raise Exception, "No idea what content type is"

    return output
Ejemplo n.º 10
0
    def timeParser(ts='now', session_key=None):
        getargs = {}
        getargs['time'] = ts

        tsStatus, tsResp = rest.simpleRequest('/search/timeparser', sessionKey=session_key, getargs=getargs)
        
        root = et.fromstring(tsResp)  
    
        ts = root.find('dict/key')
        if ts != None:
            return util.parseISO(ts.text, strict=True)
  
        else:
            logger.warn("Could not retrieve timestamp for specifier '%s' from /search/timeparser" % (getargs['time']) )
            return False               
Ejemplo n.º 11
0
def createNewIncident(alert_time, incident_id, job_id, result_id, alert, status, ttl, impact, urgency, priority, owner, digest_mode, results, title):
    alert_time = int(float(util.dt2epoch(util.parseISO(alert_time, True))))
    entry = {}
    entry['title'] = title
    entry['incident_id'] = incident_id
    entry['alert_time'] = alert_time
    entry['job_id'] = job_id
    entry['result_id'] = result_id
    entry['alert'] = alert
    entry['app'] = alert_app
    entry['status'] = status
    entry['ttl'] = ttl
    entry['impact'] = impact
    entry['urgency'] = urgency
    entry['priority'] = priority
    entry['owner'] = owner

    incident_key = writeIncidentToCollection(entry)

    return incident_key
 def _makeInstance(self, obj):
     '''convert jv result to dict. jv result is not editable. also has _time in iso format.'''
     result = ScriptResult()
     for a in obj:
         v = obj.get(a)
         # if this is a multivalued field, return list of strings. (for some reason the python api is returning
         # _raw with a length of the string, rather than number of mv-values as for every other field.)
         if a != '_raw' and len(v) > 1:
             val = [str(x) for x in v]
         else: # otherwise just a string
             val = str(obj.get(a))
         try:
             if a == '_time':
                 val = sutil.dt2epoch(sutil.parseISO(val))
             else:
                 val = float(val) if '.' in val else int(val)
         except:
             pass
         result[a] = val
     if self._callback != None:
         # run callback.  callback takes a result and returns a result.
         result = apply(self._callback, result)
     return result
Ejemplo n.º 13
0
 def _makeInstance(self, obj):
     """convert jv result to dict. jv result is not editable. also has _time in iso format."""
     result = ScriptResult()
     for a in obj:
         v = obj.get(a)
         # if this is a multivalued field, return list of strings. (for some reason the python api is returning
         # _raw with a length of the string, rather than number of mv-values as for every other field.)
         if a != "_raw" and len(v) > 1:
             val = [str(x) for x in v]
         else:  # otherwise just a string
             val = str(obj.get(a))
         try:
             if a == "_time":
                 val = sutil.dt2epoch(sutil.parseISO(val))
             else:
                 val = float(val) if "." in val else int(val)
         except:
             pass
         result[a] = val
     if self._callback != None:
         # run callback.  callback takes a result and returns a result.
         result = apply(self._callback, result)
     return result
Ejemplo n.º 14
0
def findTransaction(tname, tconstraint, useORs, eventsOnly, maxTerms, messages, **kwargs):

    base_search, fields, maxspan = getTransactionInfo(tname, **kwargs)

    if maxspan == None:
        si.addWarnMessage(messages, "Add a maxspan contraint to the %s transactiontype definition to improve performance.  Searching over all time for transitive values." % tname)

    log("MAXSPAN: %s" % maxspan)
    
    # require one field in transaction definition
    fieldsearch = " OR ".join(["%s=*" % field for field in fields])

    initialConstraint = tconstraint
    if useORs:
        ## forces an OR of terms. slow and unnessary
        ## initialConstraint = disjunctify(tconstraint)
        # get the most restrictive term in the search and use that as the initial constrait to find events
        restrictiveTerm = getMostRestrictiveTerm(tconstraint, **kwargs)
        log("MOST RESTRICTIVE: %s" % restrictiveTerm)
        initialConstraint = restrictiveTerm
    # e.g., "sourcetype=sendmail" + "from=amrit" + "(qid=* OR mid=* OR pid=*)"
    index_search = "search (%s) (%s) (%s)" % (base_search, initialConstraint, fieldsearch)
    log("INDEX SEARCH: %s" % index_search)
    
    field_list_str = " ".join(fields)
    max_combos = maxTerms / len(fields)
    log("MAX_COMBINATION: %s" % max_combos)


    needsTIME = ""
    if maxspan != None:
        needsTIME = "_time"
        
    # make search to get field value pairs.
    #    # e.g. | stats values(qid) as qid values(mid) as mid values(pid) as pid
    #    stats_search = "| stats " + " ".join("values(%s) as %s" % (field, field) for field in fields)
    #    # use top
    #    stats_search = '| fillnull value="%s" %s | top %s %s showperc=false | addcoltotals' % (NULL_VAL, field_list_str, MAX_FIELD_COMBOS, field_list_str)
    #
    # TODO: if transactiondefinition contains maxspan, consider making
    # first stats_search return time ranges to limit values of fields
    stats_search = '| table %s %s | fillnull value="%s" %s | dedup %s | head %s' % (field_list_str, needsTIME, NULL_VAL, field_list_str, field_list_str, max_combos)

    seenFields = set()

    while True:

        search =  index_search + stats_search

        log("running search: %s" % search)
        results = splunk.search.searchAll(search, **kwargs)

        ## generate an OR of ANDS of field combinations -- (qid=1 pid=2) OR (qid=3 pid=4)..."
        ors = []
        # for each top permuation of field values
        for result in results:
            ands = []
            # for each field
            for field in result:
                if field == '_time': # if we have time field we must have maxspan
                    # if we have maxspan info about event, use it to limit window of events to +/- maxspan of window
                    # we don't need float precision, because subseconds don't matter in maxpan spec
                    eventtime = int(util.dt2epoch(util.parseISO(str(result['_time']))))
                    ands.append('_time>=%s' % (eventtime - maxspan))
                    ands.append('_time<=%s' % (eventtime + maxspan))
                else:
                    val = result[field]
                    # ignore empty values
                    if val != NULL_VAL:
                        seenFields.add(field) # add to list of fields with a value
                        ands.append('%s="%s"' % (field, escVal(result[field])))
                                
            ands_str = "(" + " ".join(ands) + ")"
            ors.append(ands_str)
        field_constraints = " OR ".join(ors)
        # e.g., "sourcetype=sendmail (qid=1 pid=2) OR (qid=3 pid=4)..."
        index_search = "search (%s) (%s)" % (base_search, field_constraints)
        log("INDEXSEARCH: %s" % index_search)
        
        if len(results) >= max_combos:
            si.addWarnMessage(messages, "Reached max complexity in trying to find transaction events with %s unique values per field.  Preferring more recent values.  A more detailed initial transaction constraint will allow more complete transactions" % max_combos)

        if seenFields == set(fields):
            log("SEEN VALUES FOR ALL FIELDS: %s" % fields)
            break

        if len(results) == 0:
            msg = "No results in searching for required fields"
            si.addWarnMessage(messages, msg)
            return []



    # we've retrieved all the events we're going to with the last index_search!


    if eventsOnly:
        # no transaction search, just return the events
        transaction_search = ""
    else:
        # this is it, find the transactions!
        transaction_search = '| transaction name="%s" | search %s' % (tname, tconstraint)

    search =  index_search + transaction_search
    log("running final search! %s" % search)
    results = splunk.search.searchAll(search, **kwargs)
        
    return results
Ejemplo n.º 15
0
def main():
    if len(sys.argv) < 3:
        usage()

    tname = sys.argv[1]
    #log("args")
    #for v in sys.argv:
    #    log(v)

    options = ["max_terms", "use_disjunct", "eventsonly"]
    srchargs = []
    log("ARGS: %s" % sys.argv[2:])
    for arg in sys.argv[2:]:
        for option in options:
            if arg.startswith(option):
                break
        else:
            srchargs.append(arg)
    if len(srchargs) == 0:
        usage()

    tsearch = ' '.join(srchargs)
    log("SEARCH: %s" % tsearch)

    results, dummyresults, settings = si.getOrganizedResults()
    results = []  # we don't care about incoming results

    ########TEST#####################
    if 'sessionKey' not in settings:
        settings['owner'] = 'admin'
        settings['password'] = '******'
        settings['namespace'] = 'search'
        settings['sessionKey'] = splunk.auth.getSessionKey('admin', 'changeme')
    ########TEST####################
    kwargs = {}
    for f in ['owner', 'namespace', 'sessionKey', 'hostPath']:
        if f in settings:
            kwargs[f] = settings[f]

    messages = {}
    try:
        maxTerms = int(settings.get("max_terms", MAX_SEARCH_COMPLEXITY))
        if maxTerms > MAX_SEARCH_COMPLEXITY or maxTerms < 1:
            si.addWarnMessage(
                messages,
                "max_terms must be between 1 and %s.  Using default." %
                MAX_SEARCH_COMPLEXITY)
            maxTerms = MAX_SEARCH_COMPLEXITY
    except Exception as e:
        maxTerms = MAX_SEARCH_COMPLEXITY

    dummy, options = si.getKeywordsAndOptions()
    makeORs = isTrue(options.get("use_disjunct", "t"))
    eventsOnly = isTrue(options.get("eventsonly", "f"))

    log("MAXTERMS: %s MAKEORS: %s eventsOnly: %s" %
        (maxTerms, makeORs, eventsOnly))
    log("tsearch: %s" % tsearch)

    results = []
    try:
        results = findTransaction(tname, tsearch, makeORs, eventsOnly,
                                  maxTerms, messages, **kwargs)
    except Exception as e:
        error(e)

    events = []
    log("RESULTS: %s" % len(results))
    for result in results:  # api fail
        event = {}
        for field in result:
            if field == '_time':
                event['_time'] = util.dt2epoch(
                    util.parseISO(str(result['_time'])))
            else:
                event[field] = result[field]
        events.append(event)

    si.outputResults(events, messages)
Ejemplo n.º 16
0
def findTransaction(tname, tconstraint, useORs, eventsOnly, maxTerms, messages,
                    **kwargs):

    base_search, fields, maxspan = getTransactionInfo(tname, **kwargs)

    if maxspan == None:
        si.addWarnMessage(
            messages,
            "Add a maxspan contraint to the %s transactiontype definition to improve performance.  Searching over all time for transitive values."
            % tname)

    log("MAXSPAN: %s" % maxspan)

    # require one field in transaction definition
    fieldsearch = " OR ".join(["%s=*" % field for field in fields])

    initialConstraint = tconstraint
    if useORs:
        ## forces an OR of terms. slow and unnessary
        ## initialConstraint = disjunctify(tconstraint)
        # get the most restrictive term in the search and use that as the initial constrait to find events
        restrictiveTerm = getMostRestrictiveTerm(tconstraint, **kwargs)
        log("MOST RESTRICTIVE: %s" % restrictiveTerm)
        initialConstraint = restrictiveTerm
    # e.g., "sourcetype=sendmail" + "from=amrit" + "(qid=* OR mid=* OR pid=*)"
    index_search = "search (%s) (%s) (%s)" % (base_search, initialConstraint,
                                              fieldsearch)
    log("INDEX SEARCH: %s" % index_search)

    field_list_str = " ".join(fields)
    max_combos = maxTerms / len(fields)
    log("MAX_COMBINATION: %s" % max_combos)

    needsTIME = ""
    if maxspan != None:
        needsTIME = "_time"

    # make search to get field value pairs.
    #    # e.g. | stats values(qid) as qid values(mid) as mid values(pid) as pid
    #    stats_search = "| stats " + " ".join("values(%s) as %s" % (field, field) for field in fields)
    #    # use top
    #    stats_search = '| fillnull value="%s" %s | top %s %s showperc=false | addcoltotals' % (NULL_VAL, field_list_str, MAX_FIELD_COMBOS, field_list_str)
    #
    # TODO: if transactiondefinition contains maxspan, consider making
    # first stats_search return time ranges to limit values of fields
    stats_search = '| table %s %s | fillnull value="%s" %s | dedup %s | head %d' % (
        field_list_str, needsTIME, NULL_VAL, field_list_str, field_list_str,
        max_combos)

    seenFields = set()

    while True:

        search = index_search + stats_search

        log("running search: %s" % search)
        results = splunk.search.searchAll(search, **kwargs)

        ## generate an OR of ANDS of field combinations -- (qid=1 pid=2) OR (qid=3 pid=4)..."
        ors = []
        # for each top permuation of field values
        for result in results:
            ands = []
            # for each field
            for field in result:
                if field == '_time':  # if we have time field we must have maxspan
                    # if we have maxspan info about event, use it to limit window of events to +/- maxspan of window
                    # we don't need float precision, because subseconds don't matter in maxpan spec
                    eventtime = int(
                        util.dt2epoch(util.parseISO(str(result['_time']))))
                    ands.append('_time>=%s' % (eventtime - maxspan))
                    ands.append('_time<=%s' % (eventtime + maxspan))
                else:
                    val = result[field]
                    # ignore empty values
                    if val != NULL_VAL:
                        seenFields.add(
                            field)  # add to list of fields with a value
                        ands.append('%s="%s"' % (field, escVal(result[field])))

            ands_str = "(" + " ".join(ands) + ")"
            ors.append(ands_str)
        field_constraints = " OR ".join(ors)
        # e.g., "sourcetype=sendmail (qid=1 pid=2) OR (qid=3 pid=4)..."
        index_search = "search (%s) (%s)" % (base_search, field_constraints)
        log("INDEXSEARCH: %s" % index_search)

        if len(results) >= max_combos:
            si.addWarnMessage(
                messages,
                "Reached max complexity in trying to find transaction events with %s unique values per field.  Preferring more recent values.  A more detailed initial transaction constraint will allow more complete transactions"
                % max_combos)

        if seenFields == set(fields):
            log("SEEN VALUES FOR ALL FIELDS: %s" % fields)
            break

        if len(results) == 0:
            msg = "No results in searching for required fields"
            si.addWarnMessage(messages, msg)
            return []

    # we've retrieved all the events we're going to with the last index_search!

    if eventsOnly:
        # no transaction search, just return the events
        transaction_search = ""
    else:
        # this is it, find the transactions!
        transaction_search = '| transaction name="%s" | search %s' % (
            tname, tconstraint)

    search = index_search + transaction_search
    log("running final search! %s" % search)
    results = splunk.search.searchAll(search, **kwargs)

    return results
Ejemplo n.º 17
0
        (maxTerms, makeORs, eventsOnly))
    log("tsearch: %s" % tsearch)

    results = []
    try:
        results = findTransaction(tname, tsearch, makeORs, eventsOnly,
                                  maxTerms, messages, **kwargs)
    except Exception, e:
        error(e)

    events = []
    log("RESULTS: %s" % len(results))
    for result in results:  # api fail
        event = {}
        for field in result:
            if field == '_time':
                event['_time'] = util.dt2epoch(
                    util.parseISO(str(result['_time'])))
            else:
                event[field] = result[field]
        events.append(event)

    si.outputResults(events, messages)


if __name__ == '__main__':
    try:
        main()
    except Exception, e:
        error(e)
Ejemplo n.º 18
0
    log("MAXTERMS: %s MAKEORS: %s eventsOnly: %s" % (maxTerms, makeORs, eventsOnly))
    log("tsearch: %s" % tsearch)

    results = []
    try:
        results = findTransaction(tname, tsearch, makeORs, eventsOnly, maxTerms, messages, **kwargs)
    except Exception, e:
        error(e)

    events = []
    log("RESULTS: %s" % len(results))
    for result in results:  # api fail
        event = {}
        for field in result:
            if field == '_time':
                event['_time'] = util.dt2epoch(util.parseISO(str(result['_time'])))
            else:
                event[field] = result[field]
        events.append(event)

    si.outputResults(events, messages)


if __name__ == '__main__':
    try:
        main()
    except Exception, e:
        error(e)
        
Ejemplo n.º 19
0
                notifier.send_notification(alert, user['email'], "notify_user",
                                           context)

            else:
                log.info(
                    "Auto-assign user %s is configured either to not receive a notification or is missing the email address. Won't send any notification."
                    % alert_config['auto_assign_owner'])

    else:
        entry['owner'] = config['default_owner']
        owner = config['default_owner']
        log.info("Assigning incident to default owner %s" %
                 config['default_owner'])

    log.debug("Alert time: %s" %
              util.dt2epoch(util.parseISO(alert_time, True)))

    log.debug("Incident not found")

    # Write to incident to collection
    uri = '/servicesNS/nobody/alert_manager/storage/collections/data/incidents'
    alert_time = int(float(util.dt2epoch(util.parseISO(alert_time, True))))
    entry['alert_time'] = alert_time
    entry['job_id'] = job_id
    entry['alert'] = alert
    entry['status'] = status
    entry['ttl'] = ttl
    entry['priority'] = alert_config['priority']
    entry['severity_id'] = savedsearchContent['entry'][0]['content'][
        'alert.severity']
    entry = json.dumps(entry)