예제 #1
0
def append_account_to_summary(name=None,
                              account_id=None,
                              category=None,
                              session_key=None):
    search.dispatch(ACCOUNT_APPEND_SPL % (account_id, name, category),
                    sessionKey=session_key)
    return
예제 #2
0
def append_assume_role_to_summary(name=None, arn=None, session_key=None):
    account_id = extract_account_id_from_role_arn(arn)

    if account_id:
        search.dispatch(ACCOUNT_APPEND_SPL % (account_id, name, 'N/A'),
                        sessionKey=session_key)

    return
예제 #3
0
    def searchSplunkSummarize(self):
        # /////////////////////////////////////////////////////////////////////////////
        # Scenario 2: do a search for all web server logs and summarize
        # /////////////////////////////////////////////////////////////////////////////

        # start search
        job = search.dispatch('search sourcetype="access_combined" | timechart count')

        # the 'job' object has 2 distinct result containers: 'events' and 'results'
        # 'events' contains the data in a non-transformed manner
        # 'results' contains the data that is post-transformed, i.e. after being
        # processed by the 'timechart' operator

        # wait for search to complete, and make the results available
        while not job.isDone:
            time.sleep(1)

        # print out the results
        for result in job.results:
            print result

        # because we used the 'timechart' operator, the previous loop will output a
        # compacted string; to get at a native dictionary of fields:
        for result in job.results:
            print result.fields     # prints a standard python str() of a dict object

        # or, if we just want the raw events
#        for event in job.events:
#            print event
#            print event.time    # returns a datetime.datetime object

        # clean up
        job.cancel()
예제 #4
0
def dispatchJob(search, sessionKey, namespace, owner, argList):
    """
   helpers fun used by both sync/async search
   """

    search = search.strip()

    argListRem = copy.deepcopy(argList)

    if len(search) == 0 or search[0] != '|':
        search = "search " + search

    #the remaining keys if any need to be passed in to the dispatch call so the endpoint can handle it. eg. the 'id' arg
    for remove_key in ['maxout', 'buckets', 'maxtime', 'authstr', 'terms']:
        try:
            argListRem.pop(remove_key)
        except:
            pass

    #rename the maxout/buckets/maxtime args - SPL-20794/SPL-20916
    argListRem['max_count'] = argList.get('maxout', 100)
    argListRem['status_buckets'] = argList.get('buckets', 0)
    argListRem['max_time'] = argList.get('maxtime', 0)

    argListRem['sessionKey'] = sessionKey
    argListRem['namespace'] = namespace
    argListRem['owner'] = owner

    try:
        searchjob = dispatch(search, **argListRem)
    except splunk.SearchException, e:
        raise
예제 #5
0
파일: rcUtils.py 프로젝트: DRArpitha/splunk
def dispatchJob(search, sessionKey, namespace, owner, argList):
   """
   helpers fun used by both sync/async search
   """

   search = search.strip()

   argListRem = copy.deepcopy(argList)

   if len(search) == 0 or search[0] != '|':
      search = "search " + search

   #the remaining keys if any need to be passed in to the dispatch call so the endpoint can handle it. eg. the 'id' arg
   for remove_key in ['maxout', 'buckets', 'maxtime', 'authstr', 'terms']:
      try:
         argListRem.pop(remove_key)
      except:
         pass

   #rename the maxout/buckets/maxtime args - SPL-20794/SPL-20916
   argListRem['max_count'] = argList.get('maxout', 100)
   argListRem['status_buckets'] = argList.get('buckets', 0)
   argListRem['max_time'] = argList.get('maxtime', 0)

   argListRem['sessionKey'] = sessionKey
   argListRem['namespace'] = namespace
   argListRem['owner'] = owner

   try:
      searchjob = dispatch(search, **argListRem)
   except splunk.SearchException, e:
      raise
예제 #6
0
 def testSimple(self):
     sessionKey = auth.getSessionKey('admin', 'changeme')
     job = search.dispatch('windbag', sessionKey=sessionKey)
     time.sleep(1)
     event = job.results[0]
     custom = Custom(namespace='search')
     renderer = custom.getRenderer(event.fields)
     self.assertEquals(renderer.get('eventtype', None), None)
     self.assertEquals(renderer.get('priority'), 0)
     self.assertEquals(renderer.get('template'), '//results/EventsViewer_default_renderer.html')
     self.assertEquals(renderer.get('css_class', None), None)
예제 #7
0
 def testSimple(self):
     sessionKey = auth.getSessionKey('admin', 'changeme')
     job = search.dispatch('windbag', sessionKey=sessionKey)
     time.sleep(1)
     event = job.results[0]
     custom = Custom(namespace='search')
     renderer = custom.getRenderer(event.fields)
     self.assertEquals(renderer.get('eventtype', None), None)
     self.assertEquals(renderer.get('priority'), 0)
     self.assertEquals(renderer.get('template'),
                       '//results/EventsViewer_default_renderer.html')
     self.assertEquals(renderer.get('css_class', None), None)
예제 #8
0
 def get_metadata(session_key=None):
     """
     Dispatch a metadata search to retrieve firstTime values
     per sources (correlation search names) in the notable event index
     """
     logger.info('Dispatching metadata search')
     job = search.dispatch('| metadata type=sources `get_notable_index` | fields firstTime, source', sessionKey=session_key)
     
     logger.info('Waiting for metadata search')
     search.waitForJob(job)
     
     logger.info('Returning metadata search results')
     return job.results
예제 #9
0
    def searchSplunk(self):
        # /////////////////////////////////////////////////////////////////////////////
        # Scenario 1: do a simple search for all web server logs
        # /////////////////////////////////////////////////////////////////////////////

        # start search
        job = search.dispatch('search index="coherence" host="*hou" source="coherence_gc_log" sourcetype="garbagecollection" | timechart max(gctime) by host')

        # at this point, Splunk is running the search in the background; how long it
        # takes depends on how much data is indexed, and the scope of the search
        #
        # from this point, we explore some of the things you can do:
        #
        #
        # Option A: return all of the matched events

        # this will stream events back until the last event is reached
#        for event in job:
#            print event

        # Option B: just return the host field all of the matched events
#        for event in job:
#                print event['host']

        # Option C: return specific events

        # wait until the job has completed before trying to access arbirary indices
        while not job.isDone:
            time.sleep(1)

        # print the total number of matched events
        print len(job)
        print job.count

        # print the second event (remember that python is 0-indexed)
        print job[1]

        # print the first 10
        for event in job[0:10]:
                print event

        # print the last 5
        for event in job[:-5]:
            print event

        # clean up
        job.cancel()
예제 #10
0
 def findNonDupes(self):
     try:
         cdjob = search.dispatch(self.dedup_search, sessionKey=self.sessionKey, 
                                 namespace=self.bf.namespace, owner=self.owner)
     except:
         raise
     existmap = {}
     while not cdjob.isDone:
         time.sleep(1)
     for r in cdjob.results:
         if self.time_field in r:
             existmap[str(math.trunc(float(str(r[self.time_field]))))] = 1 
     not_skipping = []
     for st in self.time_list:
         if str(math.trunc(float(st))) not in existmap:
             not_skipping.append(st)
     return not_skipping
예제 #11
0
파일: bfsum.py 프로젝트: mealy/splunk
 def findNonDupes(self):
     try:
         cdjob = search.dispatch(
             self.dedup_search, sessionKey=self.sessionKey, namespace=self.bf.namespace, owner=self.owner
         )
     except:
         raise
     existmap = {}
     while not cdjob.isDone:
         time.sleep(1)
     for r in cdjob.results:
         if self.time_field in r:
             existmap[str(math.trunc(float(str(r[self.time_field]))))] = 1
     not_skipping = []
     for st in self.time_list:
         if str(math.trunc(float(st))) not in existmap:
             not_skipping.append(st)
     return not_skipping
예제 #12
0
        def testDuplicateEventtypePriority(self):
            sessionKey = auth.getSessionKey('admin', 'changeme')
            job = search.dispatch('| windbag | eval eventtype="testeventtype"',
                                  sessionKey=sessionKey)
            time.sleep(1)
            event = job.results[0]
            conf = splunk.bundle.getConf('event_renderers',
                                         sessionKey=sessionKey,
                                         namespace='search')

            conf.beginBatch()
            conf['event_renderer_test1']['eventtype'] = 'testeventtype'
            conf['event_renderer_test1']['priority'] = 300
            conf['event_renderer_test1']['css_class'] = 'testclass1'
            conf['event_renderer_test2']['eventtype'] = 'testeventtype'
            conf['event_renderer_test2']['priority'] = 400
            conf['event_renderer_test2']['css_class'] = 'testclass2'
            conf.commitBatch()
            custom = Custom(namespace='search')
            renderer = custom.getRenderer(event.fields)
            self.assertEquals(renderer.get('eventtype'), 'testeventtype')
            self.assertEquals(renderer.get('priority'), 400)
            self.assertEquals(renderer.get('template'),
                              '//results/EventsViewer_default_renderer.html')
            self.assertEquals(renderer.get('css_class'), 'testclass2')

            conf.beginBatch()
            conf['event_renderer_test1']['eventtype'] = 'testeventtype'
            conf['event_renderer_test1']['priority'] = 500
            conf['event_renderer_test1']['css_class'] = 'testclass1'
            conf['event_renderer_test2']['eventtype'] = 'testeventtype'
            conf['event_renderer_test2']['priority'] = 400
            conf['event_renderer_test2']['css_class'] = 'testclass2'
            conf.commitBatch()
            custom = Custom(namespace='search')
            renderer = custom.getRenderer(event.fields)
            self.assertEquals(renderer.get('eventtype'), 'testeventtype')
            self.assertEquals(renderer.get('priority'), 500)
            self.assertEquals(renderer.get('template'),
                              '//results/EventsViewer_default_renderer.html')
            self.assertEquals(renderer.get('css_class'), 'testclass1')
예제 #13
0
        def testDuplicateEventtypePriority(self):
            sessionKey = auth.getSessionKey('admin', 'changeme')
            job = search.dispatch('| windbag | eval eventtype="testeventtype"', sessionKey=sessionKey)
            time.sleep(1)
            event = job.results[0]
            conf = splunk.bundle.getConf('event_renderers', sessionKey=sessionKey, namespace='search')

            conf.beginBatch()
            conf['event_renderer_test1']['eventtype'] = 'testeventtype'
            conf['event_renderer_test1']['priority'] = 300
            conf['event_renderer_test1']['css_class'] = 'testclass1'
            conf['event_renderer_test2']['eventtype'] = 'testeventtype'
            conf['event_renderer_test2']['priority'] = 400
            conf['event_renderer_test2']['css_class'] = 'testclass2'
            conf.commitBatch()
            custom = Custom(namespace='search')
            renderer = custom.getRenderer(event.fields)
            self.assertEquals(renderer.get('eventtype'), 'testeventtype')
            self.assertEquals(renderer.get('priority'), 400)
            self.assertEquals(renderer.get('template'), '//results/EventsViewer_default_renderer.html')
            self.assertEquals(renderer.get('css_class'), 'testclass2')

            conf.beginBatch()
            conf['event_renderer_test1']['eventtype'] = 'testeventtype'
            conf['event_renderer_test1']['priority'] = 500
            conf['event_renderer_test1']['css_class'] = 'testclass1'
            conf['event_renderer_test2']['eventtype'] = 'testeventtype'
            conf['event_renderer_test2']['priority'] = 400
            conf['event_renderer_test2']['css_class'] = 'testclass2'
            conf.commitBatch()
            custom = Custom(namespace='search')
            renderer = custom.getRenderer(event.fields)
            self.assertEquals(renderer.get('eventtype'), 'testeventtype')
            self.assertEquals(renderer.get('priority'), 500)
            self.assertEquals(renderer.get('template'), '//results/EventsViewer_default_renderer.html')
            self.assertEquals(renderer.get('css_class'), 'testclass1')
예제 #14
0
파일: recipe.py 프로젝트: DRArpitha/splunk
    def run(self, workspace = {}, debug=None):
        import splunk.search as se

        q = self.args['search']
        q = substVars(q, workspace)
        workspace['_'] = se.dispatch(q, **workspace)
예제 #15
0
			try:
				key = au.getSessionKey(self.splunkuser, self.splunkpassword)
			except httplib2.ServerNotFoundError, e:
				raise LoginError("Unable to find the server at %s" % self.splunkHostPath)
			except Exception, e:
				raise LoginError("userid/password combination for splunk user is invalid...")
			
			if not key:
				raise LoginError("userid/password combination for splunk user is invalid...")
			
			if box[0] == "'" or box[0] == '"':
				ss = 'search index=mail mailbox=' + box + ' | head 1 | stats max(Date)'
			else:
				ss = 'search index=mail mailbox="' + box + '" | head 1 | stats max(Date)'

			job = se.dispatch(ss, sessionKey=key)

			start = datetime.datetime.now()

			logging.debug("dispatched search = " + ss)
			logging.debug("dispatched job to splunk through the REST API. Waiting for response...")

			while not job.isDone:
				time.sleep(1)
				logging.debug("*** waiting ")
				now = datetime.datetime.now()
				#if (now - start).seconds > self.timeout:
				if int((now - start).seconds) > int(self.timeout):
					logging.debug("REST response took more than %s seconds, timing out...using default UID of 0 i.e. same as noCache" % str(self.timeout))
					break
예제 #16
0
    def run(self, workspace={}, debug=None):
        import splunk.search as se

        q = self.args['search']
        q = substVars(q, workspace)
        workspace['_'] = se.dispatch(q, **workspace)
예제 #17
0
 def _search_for_last_updated_issue(self, repository):
     search_string = 'search index=splunkgit sourcetype="github_data" repository=%s github_issue_update_time=* | sort -str(github_issue_update_time) | head 1' % repository
     issue_search = search.dispatch(search_string)
     while not issue_search.isDone:
         time.sleep(0.5) #for a while
     return issue_search
예제 #18
0
    def getCacheIDForMailbox(self, box):
        if not self.noCache:

            #If we are here it means we have to extract the last used UID from splunk...
            import splunk.auth as au
            import splunk.search as se
            import splunk
            import httplib2
            import time
            import string

            if self.splunkxpassword:
                try:
                    p = subprocess.Popen(
                        'openssl bf -d -a -pass file:"%s"' %
                        (os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'auth',
                                      'splunk.secret')),
                        shell=True,
                        stdin=subprocess.PIPE,
                        stdout=subprocess.PIPE)
                    self.splunkpassword = p.communicate(self.splunkxpassword +
                                                        '\n')[0]
                except Exception as e:
                    if self.debug:
                        logging.error(e)
                        print(traceback.print_exc(file=sys.stderr))
                    raise ConfigError('Could not decrypt splunkxpassword')

            logging.debug("decrypted splunk password")

            splunk.mergeHostPath(self.splunkHostPath, True)
            try:
                key = au.getSessionKey(self.splunkuser, self.splunkpassword)
            except httplib2.ServerNotFoundError as e:
                raise LoginError("Unable to find the server at %s" %
                                 self.splunkHostPath)
            except Exception as e:
                raise LoginError(
                    "userid/password combination for splunk user is invalid..."
                )

            if not key:
                raise LoginError(
                    "userid/password combination for splunk user is invalid..."
                )

            if box[0] == "'" or box[0] == '"':
                ss = 'search index=mail mailbox=' + box + ' | head 1 | stats max(Date)'
            else:
                ss = 'search index=mail mailbox="' + box + '" | head 1 | stats max(Date)'

            job = se.dispatch(ss, sessionKey=key)

            start = datetime.datetime.now()

            logging.debug("dispatched search = " + ss)
            logging.debug(
                "dispatched job to splunk through the REST API. Waiting for response..."
            )

            while not job.isDone:
                time.sleep(1)
                logging.debug("*** waiting ")
                now = datetime.datetime.now()
                #if (now - start).seconds > self.timeout:
                if int((now - start).seconds) > int(self.timeout):
                    logging.debug(
                        "REST response took more than %s seconds, timing out...using default UID of 0 i.e. same as noCache",
                        self.timeout)
                    break

            #if we have caching on, and we run this for the first time, the result will not have any key like UID
            #Hence it will throw a KeyError or IndexError. Just ignore that error and return 0
            try:
                retVal = str(job.results[0]['max(Date)'])
                logging.debug(" got back " + str(retVal))
            except Exception as e:
                logging.debug(str(e))
                logging.debug(" mailbox was empty ")
                retVal = ""

            job.cancel()

            return retVal

        else:
            return ""
예제 #19
0
            except Exception, e:
                raise LoginError(
                    "userid/password combination for splunk user is invalid..."
                )

            if not key:
                raise LoginError(
                    "userid/password combination for splunk user is invalid..."
                )

            if box[0] == "'" or box[0] == '"':
                ss = 'search index=mail mailbox=' + box + ' | head 1 | stats max(Date)'
            else:
                ss = 'search index=mail mailbox="' + box + '" | head 1 | stats max(Date)'

            job = se.dispatch(ss, sessionKey=key)

            start = datetime.datetime.now()

            logging.debug("dispatched search = " + ss)
            logging.debug(
                "dispatched job to splunk through the REST API. Waiting for response..."
            )

            while not job.isDone:
                time.sleep(1)
                logging.debug("*** waiting ")
                now = datetime.datetime.now()
                #if (now - start).seconds > self.timeout:
                if int((now - start).seconds) > int(self.timeout):
                    logging.debug(