def __init__(self): ssl_enabled_conf_str = self.conf(key='enableSplunkdSSL', name="server", stanza="sslConfig", default="true") # normalizeBoolean doesn't do its job, so we clean up for unusual cases try: ssl_enabled = splunk.util.normalizeBoolean(ssl_enabled_conf_str, enableStrictMode=True) except ValueError: ssl_enabled = False if ssl_enabled: protocol = 'https' else: protocol = 'http' # old way self._splunkd_urlhost = '%s://%s' % (protocol, self.conf('mgmtHostPort')) # better way: this sets the global default for any object that uses the # the splunk.* SDK splunk.setDefault('protocol', protocol) splunk.mergeHostPath(self.conf('mgmtHostPort'), True)
def getCacheIDForMailbox(self, box): if not self.noCache: #If we are here it means we have to extract the last used UID from splunk... import splunk.auth as au import splunk.search as se import splunk import httplib2 import time import string if self.splunkxpassword: try: p = subprocess.Popen('openssl bf -d -a -pass file:%s' % (os.path.join(os.environ['SPLUNK_HOME'],'etc','auth', 'splunk.secret')), shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE) self.splunkpassword = p.communicate(self.splunkxpassword + '\n')[0] except Exception, e: if self.debug: logging.error(e) print traceback.print_exc(file=sys.stderr) raise ConfigError('Could not decrypt splunkxpassword') logging.debug("decrypted splunk password") splunk.mergeHostPath(self.splunkHostPath, True) try: key = au.getSessionKey(self.splunkuser, self.splunkpassword) except httplib2.ServerNotFoundError, e: raise LoginError("Unable to find the server at %s" % self.splunkHostPath)
def get_total_event_count( server, index, username, password ): from splunk import entity, auth, mergeHostPath mergeHostPath( server, True ) auth.getSessionKey( username=username, password=password ) properties = entity.getEntity( entityPath='/data/indexes', entityName=index ).properties if 'totalEventCount' in properties: return int( properties['totalEventCount'] ) else: return 0
def parseSearchToXML(search, hostPath=None, sessionKey=None, parseOnly='t', namespace=None, owner=None): """ Given a valid search string, return the XML from the splunk parsing endpoint that represents the search. """ if search == None or len(search) == 0: return None if not owner: owner = auth.getCurrentUser()['name'] uri = entity.buildEndpoint('/search/parser', namespace=namespace, owner=owner) if hostPath: uri = splunk.mergeHostPath(hostPath) + uri args = { 'q' : search, 'parse_only' : parseOnly } serverResponse, serverContent = rest.simpleRequest(uri, getargs=args, sessionKey=sessionKey) #print "SERVERCONTENT:", serverContent # normal messages from splunkd are propogated via SplunkdException; if 400 <= serverResponse.status < 500: root = et.fromstring(serverContent) extractedMessages = rest.extractMessages(root) for msg in extractedMessages: raise splunk.SearchException, msg['text'] return serverContent
def getSessionKey(username, password, hostPath=None): ''' Get a session key from the auth system ''' uri = '/services/auth/login' if hostPath: uri = splunk.mergeHostPath(hostPath) + uri args = {'username': username, 'password': password} # To prove the theory of timing issue of Splunkd not in running state # in Windows Bamboo tests, sleep for 10 seconds # An attempt to fix SPL-37413 # if platform.system() == 'Windows': # time.sleep(10) serverResponse, serverContent = rest.simpleRequest(uri, postargs=args) if serverResponse.status != 200: logger.error('getSessionKey - unable to login; check credentials') rest.extractMessages(et.fromstring(serverContent)) return None root = et.fromstring(serverContent) sessionKey = root.findtext('sessionKey') splunk.setDefault('username', username) splunk.setDefault('sessionKey', sessionKey) return sessionKey
def open(hostname=None, source=None, sourcetype=None, index=None, type='http', sessionKey=None, host_regex=None, host_segment=None): """ the interface to the 'stream' receivers endpoint """ #construct the uri to POST to base_uri = splunk.mergeHostPath() postargs = {'source': source, 'sourcetype': sourcetype, 'index': index} if host_regex: postargs['host_regex'] = host_regex elif host_segment: postargs['host_segment'] = host_segment elif hostname: postargs['host'] = hostname endpoint = '/services/receivers/stream?%s' % urlencode(postargs) #get default session key. If none exists, the rest call will raise a splunk.AuthenticationFailed exception if not sessionKey: sessionKey = splunk.getSessionKey() (proto, host_colon_port) = base_uri.split("://", 1) return StreamHandler(host_colon_port, endpoint, sessionKey, type, proto != 'http')
def dispatchSavedSearch(savedSearchName, sessionKey=None, namespace=None, owner=None, hostPath=None, now=0, triggerActions=0, **kwargs): """Initiates a new job based on a saved search.""" uri = entity.buildEndpoint(['saved', 'searches', savedSearchName, 'dispatch'], namespace=namespace, owner=owner) if hostPath: uri = splunk.mergeHostPath(hostPath) + uri args = { 'now': now, 'trigger_actions' : triggerActions } for key, val in kwargs.items(): if key in SAVED_SEARCH_DISPATCH_ARG_MAP: args[SAVED_SEARCH_DISPATCH_ARG_MAP[key]] = val # Pass through for dispatch.* formated kwargs elif key.startswith('dispatch.'): args[key] = val serverResponse, serverContent = rest.simpleRequest(uri, postargs=args, sessionKey=sessionKey) root = et.fromstring(serverContent) # normal messages from splunkd are propogated via SplunkdException; if not 201 == serverResponse.status: extractedMessages = rest.extractMessages(root) for msg in extractedMessages: raise splunk.SearchException, msg['text'] # get the search ID sid = root.findtext('sid').strip() # instantiate result object return splunk.search.SearchJob(sid, hostPath, sessionKey, namespace, owner)
def generateResults(self, host_app=None, client_app=None, savedSearchName=None, useHistory=None): if savedSearchName: jsonSearch = None owner = 'nobody' try: savedSearchObject = splunk.search.getSavedSearch( label=savedSearchName, namespace=client_app, owner=owner) jsonSearch = splunk.appserver.mrsparkle.util.resurrectFromSavedSearch( savedSearchObject=savedSearchObject, hostPath=splunk.mergeHostPath(), namespace=client_app, owner=owner) job = splunk.search.getJobForSavedSearch( savedSearchName, useHistory="True", namespace=client_app, owner=owner, search='name=scheduler*') if (job): jsonSearch["job"] = job.toJsonable(timeFormat='unix') return json.dumps(jsonSearch) except Exception, e: logger.exception(e) return ""
def getSessionKey(username, password, hostPath=None): ''' Get a session key from the auth system ''' uri = '/services/auth/login' if hostPath: uri = splunk.mergeHostPath(hostPath) + uri args = {'username': username, 'password': password } # To prove the theory of timing issue of Splunkd not in running state # in Windows Bamboo tests, sleep for 10 seconds # An attempt to fix SPL-37413 # if platform.system() == 'Windows': # time.sleep(10) serverResponse, serverContent = rest.simpleRequest(uri, postargs=args) if serverResponse.status != 200: logger.error('getSessionKey - unable to login; check credentials') rest.extractMessages(et.fromstring(serverContent)) return None root = et.fromstring(serverContent) sessionKey = root.findtext('sessionKey') splunk.setDefault('username', username) splunk.setDefault('sessionKey', sessionKey) return sessionKey
def _redirect_to_local(self, b): url = splunk.mergeHostPath() for part in self.pathParts[:(self.BASE_DEPTH - 1)]: url += '/' + part url += '/' + 'local' url += '/' + urllib.quote(b.prettyname()) return url
def generateResults(self, host_app=None, client_app=None, savedSearchName=None, useHistory=None): if savedSearchName: jsonSearch = None owner = 'nobody' try: savedSearchObject = splunk.search.getSavedSearch(label = savedSearchName, namespace = client_app, owner = owner) jsonSearch = splunk.appserver.mrsparkle.util.resurrectFromSavedSearch( savedSearchObject = savedSearchObject, hostPath = splunk.mergeHostPath(), namespace = client_app, owner = owner) job = splunk.search.getJobForSavedSearch( savedSearchName, useHistory="True", namespace=client_app, owner=owner, search='name=scheduler*') if (job): jsonSearch["job"] = job.toJsonable(timeFormat='unix') return json.dumps(jsonSearch) except Exception, e: logger.exception(e) return ""
def generateByline(): '''This should be safe''' appHost = cherrypy.request.headers.get('host', 'UNKONWN_HOST') splunkdHost = splunk.mergeHostPath() buildNumber = cherrypy.config.get('build_number', 'UNKNOWN_BUILD') currentTime = str(datetime.datetime.now().ctime()) return 'You are using <span>%s</span>, which is connected to splunkd <span>@%s</span> at <span>%s</span> on <span>%s</span>' % (appHost, buildNumber, splunkdHost, currentTime)
def generateByline(): '''This should be safe''' appHost = cherrypy.request.headers.get('host', 'UNKONWN_HOST') splunkdHost = splunk.mergeHostPath() buildNumber = cherrypy.config.get('build_number', 'UNKNOWN_BUILD') currentTime = str(datetime.datetime.now().ctime()) return 'You are using <span>%s</span>, which is connected to splunkd <span>@%s</span> at <span>%s</span> on <span>%s</span>' % ( appHost, buildNumber, splunkdHost, currentTime)
def _foreign_to_native_url(self, url): """ Convert a remote-provider URL into a URL pointing to this endpoint. """ if not url.startswith(self._base): return url converted_base = splunk.mergeHostPath() for part in self.pathParts[:self.BASE_DEPTH]: converted_base += '/' + part return converted_base + url[len(self._base):]
def submit(event, hostname=None, source=None, sourcetype=None, index=None): """ the interface to the 'simple' receivers endpoint """ global h #construct the uri to POST to base_uri = splunk.mergeHostPath() postargs = { 'host': hostname, 'source': source, 'sourcetype': sourcetype, 'index': index } uri = base_uri + '/services/receivers/simple?%s' % urlencode(postargs) #get default session key. If none exists, the rest call will raise a splunk.AuthenticationFailed exception sessionKey = splunk.getDefault('sessionKey') #make the call, we cannot use the rest interface here as it urlencodes the payload serverResponse, serverContent = h.request( uri, "POST", headers={'Authorization': 'Splunk %s' % sessionKey}, body=event) #process results root = et.fromstring(serverContent) #4xx error messages indicate a client side error e.g. bad request, unauthorized etc so raise a RESTException if 400 <= serverResponse.status < 500: extractedMessages = rest.extractMessages(root) msg_text = [] for msg in extractedMessages: msg_text.append( 'message type=%(type)s code=%(code)s text=%(text)s;' % msg) raise splunk.RESTException, (serverResponse.status, msg_text) #5xx error messages indicate server side error e.g. Internal server error etc so raise a SplunkdException elif serverResponse.status >= 500: extractedMessages = rest.extractMessages(root) msg_text = [] for msg in extractedMessages: msg_text.append( 'message type=%(type)s code=%(code)s text=%(text)s;' % msg) raise splunk.SplunkdException, (serverResponse.status, msg_text) #everything is kosher... else: return serverResponse
def ping(hostPath=None, sessionKey=None): ''' Pings services server and returns a bool for a users session. This method is useful for synchronizing an applications authentication with Splunk's services authentication. ''' uri = '/services' if hostPath: uri = splunk.mergeHostPath(hostPath) + uri try: serverResponse, serverContent = rest.simpleRequest(uri, sessionKey=sessionKey) return True except: return False
def getCacheIDForMailbox(self, box): if not self.noCache: #If we are here it means we have to extract the last used UID from splunk... import splunk.auth as au import splunk.search as se import splunk import httplib2 import time import string if self.splunkxpassword: try: p = subprocess.Popen( 'openssl bf -d -a -pass file:%s' % (os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'auth', 'splunk.secret')), shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE) self.splunkpassword = p.communicate(self.splunkxpassword + '\n')[0] except Exception, e: if self.debug: logging.error(e) print traceback.print_exc(file=sys.stderr) raise ConfigError('Could not decrypt splunkxpassword') logging.debug("decrypted splunk password") splunk.mergeHostPath(self.splunkHostPath, True) try: key = au.getSessionKey(self.splunkuser, self.splunkpassword) except httplib2.ServerNotFoundError, e: raise LoginError("Unable to find the server at %s" % self.splunkHostPath)
def ping(hostPath=None, sessionKey=None): ''' Pings services server and returns a bool for a users session. This method is useful for synchronizing an applications authentication with Splunk's services authentication. ''' uri = '/services' if hostPath: uri = splunk.mergeHostPath(hostPath) + uri try: serverResponse, serverContent = rest.simpleRequest( uri, sessionKey=sessionKey) return True except: return False
def status(self, **args): ''' Provides a debug output page for appserver config ''' hasReadPerms = self._hasReadPerms() # get overview items general = splunk.util.OrderedDict() general['Appserver boot path'] = getattr(__main__, '__file__', '<N/A>') general['Splunkd URI'] = splunk.mergeHostPath() general['Debug Mode'] = __debug__ # get various dicts configController = ConfigController() uiConfig = configController.index(asDict=True) mm = moduleMapper moduleMap = mm.getInstalledModules() uiPanels = splunk.util.OrderedDict() uiPanels['config'] = uiConfig uiPanels['views'] = en.getEntities( view.VIEW_ENTITY_CLASS, namespace=splunk.getDefault('namespace')) uiPanels['modules'] = moduleMap uiPanels['cherrypy'] = cherrypy.config uiPanels['request'] = args uiPanels['wsgi'] = cherrypy.request.wsgi_environ splunkdPanels = splunk.util.OrderedDict() #code to display splunkd debug information as well try: serverResponse, serverContent = splunk.rest.simpleRequest( '/services/debug/status', sessionKey=cherrypy.session['sessionKey']) atomFeed = splunk.rest.format.parseFeedDocument(serverContent) atomFeed_prim = atomFeed.toPrimitive() general['Splunkd time'] = splunk.util.getISOTime(atomFeed.updated) general['Splunkd home'] = atomFeed_prim.get( 'SPLUNK_HOME', '<unknown>') for key in atomFeed_prim: splunkdPanels[key] = atomFeed_prim[key] except splunk.AuthenticationFailed, e: splunkdPanels[ 'errors'] = 'The appserver is not authenticated with splunkd; retry login'
def status(self, **args): ''' Provides a debug output page for appserver config ''' hasReadPerms = self._hasReadPerms() # get overview items general = splunk.util.OrderedDict() general['Appserver boot path'] = getattr(__main__,'__file__', '<N/A>') general['Splunkd URI'] = splunk.mergeHostPath() general['Debug Mode'] = __debug__ # get various dicts configController = ConfigController() uiConfig = configController.index(asDict=True) mm = moduleMapper moduleMap = mm.getInstalledModules() uiPanels = splunk.util.OrderedDict() uiPanels['config'] = uiConfig uiPanels['views'] = en.getEntities(view.VIEW_ENTITY_CLASS, namespace=splunk.getDefault('namespace')) uiPanels['modules'] = moduleMap uiPanels['cherrypy'] = cherrypy.config uiPanels['request'] = args uiPanels['wsgi'] = cherrypy.request.wsgi_environ splunkdPanels = splunk.util.OrderedDict() #code to display splunkd debug information as well try: serverResponse, serverContent = splunk.rest.simpleRequest('/services/debug/status', sessionKey=cherrypy.session['sessionKey']) atomFeed = splunk.rest.format.parseFeedDocument(serverContent) atomFeed_prim = atomFeed.toPrimitive() general['Splunkd time'] = splunk.util.getISOTime(atomFeed.updated) general['Splunkd home'] = atomFeed_prim.get('SPLUNK_HOME', '<unknown>') for key in atomFeed_prim: splunkdPanels[key] = atomFeed_prim[key] except splunk.AuthenticationFailed, e: splunkdPanels['errors'] = 'The appserver is not authenticated with splunkd; retry login'
def submit(event, hostname=None, source=None, sourcetype=None, index=None): """ the interface to the 'simple' receivers endpoint """ global h #construct the uri to POST to base_uri = splunk.mergeHostPath() postargs = {'host': hostname, 'source': source, 'sourcetype' : sourcetype, 'index':index} uri = base_uri + '/services/receivers/simple?%s' % urlencode(postargs) #get default session key. If none exists, the rest call will raise a splunk.AuthenticationFailed exception sessionKey = splunk.getDefault('sessionKey') #make the call, we cannot use the rest interface here as it urlencodes the payload serverResponse, serverContent = h.request(uri, "POST", headers={'Authorization':'Splunk %s' % sessionKey}, body=event) #process results root = et.fromstring(serverContent) #4xx error messages indicate a client side error e.g. bad request, unauthorized etc so raise a RESTException if 400 <= serverResponse.status < 500: extractedMessages = rest.extractMessages(root) msg_text = [] for msg in extractedMessages: msg_text.append('message type=%(type)s code=%(code)s text=%(text)s;' % msg) raise splunk.RESTException, (serverResponse.status, msg_text) #5xx error messages indicate server side error e.g. Internal server error etc so raise a SplunkdException elif serverResponse.status >= 500: extractedMessages = rest.extractMessages(root) msg_text = [] for msg in extractedMessages: msg_text.append('message type=%(type)s code=%(code)s text=%(text)s;' % msg) raise splunk.SplunkdException, (serverResponse.status, msg_text) #everything is kosher... else: return serverResponse
def parseSearchToXML(search, hostPath=None, sessionKey=None, parseOnly='t', timeline=None, namespace=None, owner=None): """ Given a valid search string, return the XML from the splunk parsing endpoint that represents the search. """ if search == None or len(search) == 0: return None if not owner: owner = auth.getCurrentUser()['name'] uri = entity.buildEndpoint('/search/parser', namespace=namespace, owner=owner) if hostPath: uri = splunk.mergeHostPath(hostPath) + uri args = {'q': search, 'parse_only': parseOnly} if timeline is not None: args['timeline'] = timeline serverResponse, serverContent = rest.simpleRequest(uri, getargs=args, sessionKey=sessionKey) #print "SERVERCONTENT:", serverContent # normal messages from splunkd are propogated via SplunkdException; if 400 <= serverResponse.status < 500: root = et.fromstring(serverContent) extractedMessages = rest.extractMessages(root) for msg in extractedMessages: raise splunk.SearchException, msg['text'] return serverContent
def open(hostname=None, source=None, sourcetype=None, index=None, type='http', sessionKey=None, host_regex=None, host_segment=None): """ the interface to the 'stream' receivers endpoint """ #construct the uri to POST to base_uri = splunk.mergeHostPath() postargs = {'source': source, 'sourcetype' : sourcetype, 'index':index} if host_regex: postargs['host_regex'] = host_regex elif host_segment: postargs['host_segment'] = host_segment elif hostname: postargs['host'] = hostname endpoint = '/services/receivers/stream?%s' % urlencode(postargs) #get default session key. If none exists, the rest call will raise a splunk.AuthenticationFailed exception if not sessionKey: sessionKey = splunk.getSessionKey() ( proto, host_colon_port ) = base_uri.split("://", 1); return StreamHandler(host_colon_port, endpoint, sessionKey, type, proto != 'http')
def getRemoteSessionKey(username, password, hostPath): ''' Get a remote session key from the auth system If fails return None ''' uri = splunk.mergeHostPath(hostPath) + "/services/auth/login" args = {"username": username, "password": password} try: serverResponse, serverContent = splunk_rest_equest(uri, postargs=args) except splunk.AuthenticationFailed: return None if serverResponse.status != 200: logger.error( "getRemoteSessionKey - unable to login; check credentials") rest.extractMessages(et.fromstring(serverContent)) return None root = et.fromstring(serverContent) sessionKey = root.findtext("sessionKey") return sessionKey
def getSessionKeyForTrustedUser(username, hostPath=None): ''' Get a session key from the auth system ''' uri = '/services/auth/trustedlogin' if hostPath: uri = splunk.mergeHostPath(hostPath) + uri args = {'username': username} serverResponse, serverContent = rest.simpleRequest(uri, postargs=args) if serverResponse.status != 200: logger.error('getSessionKey - unable to login; check credentials') rest.extractMessages(et.fromstring(serverContent)) return None root = et.fromstring(serverContent) sessionKey = root.findtext('sessionKey') splunk.setDefault('username', username) splunk.setDefault('sessionKey', sessionKey) return sessionKey
class TestParse(unittest.TestCase): _sessionKey = auth.getSessionKey('admin', 'changeme') _hostPath = splunk.mergeHostPath() # searches q = { 'single': "search foo bar baz", 'two': "search quux | diff position1=1 position2=2", 'quotes': 'search twikiuser="******" | diff position1=1 position2=2', } def testCreateClause(self): """ Test the creation of new clause objects """ clause1 = ParsedClause() clause1.command = "search" clause1.args = "foo" self.assertEquals(clause1.serialize(), "search foo") # python dicts (the structure in which args are stored in the ParsedClause class) # no longer maintain determinate ordering. # therefore, the output of this test can be either # search index="_audit" foo bar baz, or # search foo bar baz index="_audit" # both are identical searches clause2 = ParsedClause() clause2.command = "search" clause2.args = {'index': '_audit', 'search': "foo bar baz"} clause2String = clause2.serialize() self.assertTrue( clause2String == 'search index="_audit" foo bar baz' or clause2String == 'search foo bar baz index="_audit"') clause3 = ParsedClause(command="search", args="quux") self.assertEquals(clause3.serialize(), 'search quux') clause4 = ParsedClause(command="loglady") self.assertEquals(clause4.serialize(), 'loglady') def testEqualsOperatorClause(self): """ Test the equals operator in ParsedClause """ # two clauses, including kv's that should be ignored in the compare, string case clause1 = ParsedClause() clause1.command = "search" clause1.args = "foo readlevel=2" clause2 = ParsedClause() clause2.command = "search" clause2.args = "foo index=default" self.assert_(clause1 == clause2) # two clauses, including kv's that should be ignored in the compare, dict case clause3 = ParsedClause() clause3.command = "search" clause3.args = {"index": "_internal", "user": "******"} clause4 = ParsedClause() clause4.command = "search" clause4.args = { "index": "_internal", "user": "******", "readlevel": "2" } self.assert_(clause3 == clause4) # two clauses, including kv's that should be not ignored in the compare, string case clause5 = ParsedClause() clause5.command = "search" clause5.args = "foo readlevel=11" clause6 = ParsedClause() clause6.command = "search" clause6.args = "foo index=default" self.failIf(clause5 == clause6) # test indiv clauses pulled out of ParsedSearch search1 = parseSearch(self.q['two'], hostPath=self._hostPath, sessionKey=self._sessionKey) search2 = parseSearch(self.q['two'], hostPath=self._hostPath, sessionKey=self._sessionKey) self.assert_(search1.clauses[1] == search2.clauses[1]) def testEqualsOperatorSearch(self): """ Test the equals operator in ParsedSearch """ ps1 = parseSearch(self.q['single'], hostPath=self._hostPath, sessionKey=self._sessionKey) ps2 = parseSearch(self.q['single'], hostPath=self._hostPath, sessionKey=self._sessionKey) self.assert_(ps1 == ps2) ps3 = parseSearch(self.q['single'], hostPath=self._hostPath, sessionKey=self._sessionKey) ps4 = parseSearch(self.q['two'], hostPath=self._hostPath, sessionKey=self._sessionKey) self.assert_(ps3 != ps4) def testParseOneClause(self): """ Test the parsing of a single clause search """ ps = parseSearch(self.q['single'], hostPath=self._hostPath, sessionKey=self._sessionKey) self.assertEquals(len(ps.clauses), 1) self.assertEquals(ps.clauses[0].command, 'search') self.assertEquals(ps.clauses[0].serialize(), 'search foo bar baz') self.assert_(ps.clauses[0].properties['streamType'] == 'SP_STREAM') def testParseTwoClause(self): """ Test the parsing of a single clause search """ ps = parseSearch(self.q['two'], hostPath=self._hostPath, sessionKey=self._sessionKey) self.assertEquals(len(ps.clauses), 2) self.assertEquals(ps.clauses[0].command, 'search') self.assertEquals(ps.clauses[1].command, 'diff') self.assertEquals(normalizeListArgs(ps.clauses[0].args['search']), 'quux') self.assertEquals(normalizeListArgs(ps.clauses[1].args), 'position1=1 position2=2') print "PROPS:", ps.clauses[1].properties self.assertEquals(ps.clauses[1].properties['streamType'], 'SP_EVENTS') def testSerialize(self): """ Test search serialization/tostring""" ps = parseSearch(self.q['single'], hostPath=self._hostPath, sessionKey=self._sessionKey) self.assertEquals(str(ps), self.q['single']) ps = parseSearch(self.q['two'], hostPath=self._hostPath, sessionKey=self._sessionKey) self.assertEquals(str(ps), self.q['two']) ps = parseSearch(self.q['quotes'], hostPath=self._hostPath, sessionKey=self._sessionKey) self.assertEquals(str(ps), self.q['quotes']) indexSearch = 'search index="_audit"' ps = parseSearch(indexSearch, hostPath=self._hostPath, sessionKey=self._sessionKey) self.assertEquals(str(ps), indexSearch) def testJsonable(self): """ Test JSONable """ ps = parseSearch(self.q['single'], hostPath=self._hostPath, sessionKey=self._sessionKey) print "\n\t", json.dumps(ps.jsonable()) ps = parseSearch(self.q['quotes'], hostPath=self._hostPath, sessionKey=self._sessionKey) print "\t", json.dumps(ps.jsonable()) def test_chartSerializer(self): """ Test chart serialization """ pc = ParsedClause() cases = { 'chart sum(events) by hello,world': { 'xfield': 'hello', 'stat-specifiers': [{ 'function': 'sum', 'field': 'events', 'rename': 'sum(events)' }], 'seriesfield': 'world' }, 'chart sum(events),count by hello,world': { 'xfield': 'hello', 'seriesfield': 'world', 'stat-specifiers': [{ 'function': 'sum', 'field': 'events', 'rename': 'sum(events)' }, { 'function': 'count', 'rename': 'count' }] }, 'timechart sum(events) by world': { 'xfield': '_time', 'stat-specifiers': [{ 'function': 'sum', 'field': 'events', 'rename': 'sum(events)' }], 'seriesfield': 'world' }, 'timechart sum(events),count by hello': { 'xfield': '_time', 'seriesfield': 'hello', 'stat-specifiers': [{ 'function': 'sum', 'field': 'events', 'rename': 'sum(events)' }, { 'function': 'count', 'rename': 'count' }] }, 'timechart span="1d" sum(events) by world': { 'xfield': '_time', 'stat-specifiers': [{ 'function': 'sum', 'field': 'events', 'rename': 'sum(events)' }], 'seriesfield': 'world', 'span': '1d' }, 'timechart bins=5 sum(events) by world': { 'xfield': '_time', 'stat-specifiers': [{ 'function': 'sum', 'field': 'events', 'rename': 'sum(events)' }], 'seriesfield': 'world', 'bins': 5 }, } for k, v in cases.items(): command = k.split()[0] out = str( ParsedClause(None, command, v)) #out = pc._chartingSerializer(command, v) if out != k: print "\n\nINPUT: ", v print "GOAL: ", k print "OUTPUT:", out self.assertEquals(k, out)
def getLicenseInfo(): import splunk.entity # avoid circ dep # set this as a default since getEntity doesn't take proto/host/prrt just yet... splunk.mergeHostPath(getMgmtUri(), saveAsDefault=True) retDict = splunk.entity.getEntity("server", "info") return retDict
"""The Splunk Controller API Provides the Splink class for subclassing. """ import time import splunk from splunk import auth, search from pylons import config HOST = config.get('splunk.host') splunk.mergeHostPath(HOST, True) class Splunk: # first get the session key # (the method will automatically cache during the interactive session) auth.getSessionKey('admin','changeme') def searchSplunk(self): # ///////////////////////////////////////////////////////////////////////////// # Scenario 1: do a simple search for all web server logs # ///////////////////////////////////////////////////////////////////////////// # start search job = search.dispatch('search index="coherence" host="*hou" source="coherence_gc_log" sourcetype="garbagecollection" | timechart max(gctime) by host') # at this point, Splunk is running the search in the background; how long it # takes depends on how much data is indexed, and the scope of the search # # from this point, we explore some of the things you can do: #
def getCacheIDForMailbox(self, box): if not self.noCache: #If we are here it means we have to extract the last used UID from splunk... import splunk.auth as au import splunk.search as se import splunk import httplib2 import time import string if self.splunkxpassword: try: p = subprocess.Popen( 'openssl bf -d -a -pass file:"%s"' % (os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'auth', 'splunk.secret')), shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE) self.splunkpassword = p.communicate(self.splunkxpassword + '\n')[0] except Exception as e: if self.debug: logging.error(e) print(traceback.print_exc(file=sys.stderr)) raise ConfigError('Could not decrypt splunkxpassword') logging.debug("decrypted splunk password") splunk.mergeHostPath(self.splunkHostPath, True) try: key = au.getSessionKey(self.splunkuser, self.splunkpassword) except httplib2.ServerNotFoundError as e: raise LoginError("Unable to find the server at %s" % self.splunkHostPath) except Exception as e: raise LoginError( "userid/password combination for splunk user is invalid..." ) if not key: raise LoginError( "userid/password combination for splunk user is invalid..." ) if box[0] == "'" or box[0] == '"': ss = 'search index=mail mailbox=' + box + ' | head 1 | stats max(Date)' else: ss = 'search index=mail mailbox="' + box + '" | head 1 | stats max(Date)' job = se.dispatch(ss, sessionKey=key) start = datetime.datetime.now() logging.debug("dispatched search = " + ss) logging.debug( "dispatched job to splunk through the REST API. Waiting for response..." ) while not job.isDone: time.sleep(1) logging.debug("*** waiting ") now = datetime.datetime.now() #if (now - start).seconds > self.timeout: if int((now - start).seconds) > int(self.timeout): logging.debug( "REST response took more than %s seconds, timing out...using default UID of 0 i.e. same as noCache", self.timeout) break #if we have caching on, and we run this for the first time, the result will not have any key like UID #Hence it will throw a KeyError or IndexError. Just ignore that error and return 0 try: retVal = str(job.results[0]['max(Date)']) logging.debug(" got back " + str(retVal)) except Exception as e: logging.debug(str(e)) logging.debug(" mailbox was empty ") retVal = "" job.cancel() return retVal else: return ""
count = 0 for ele in job.events: count += 1 job.cancel() assert count == 3, fail_msg % count print ok_msg # ------------------------- # ------------------------- if __name__ == '__main__': import splunk.auth as au import splunk.search splunk.mergeHostPath('localhost:8089', True) key = au.getSessionKey('admin','changeme') raw_data = """Apr 29 19:11:54 AAA\nApr 29 19:12:54 BBB\nApr 29 19:13:54 CCC\n""" # ------------------------------- # # test simple receivers endpoint # # ------------------------------- # resp = submit(raw_data, sourcetype='http-receivers', index='default', source='http-test', hostname='simple-receivers-test') print 'insertion for simple receivers complete...querying splunk...waiting 60 seconds...' try: _get_final_count('simple-receivers-test', key, 'inserted 3 events via simple receivers end point, but found %d', 'insert via simple receivers endpoint - OK') except AssertionError, e: #test failed, continue to next print e
responseObject.setStatus(500) responseObject.setHeader('content-type', 'text/plain') responseObject.write(str(e), True) return responseObject.toXml() finally: sys.stdout = org_stdout sys.stderr = org_stderr # if the script writer has used the HTTPResponse.write() method, then use # that as the raw output if responseObject.hasBufferedData(): return responseObject.toXml() # otherwise, methods that return dictionaries or lists or strings get their # contents auto-converted into individual entries feed = format.primitiveToAtomFeed(splunk.mergeHostPath(), requestDict['path'], methodOutput) feed.messages = classInstance.messages responseObject.setHeader('content-type', 'text/xml; charset=utf-8') responseObject.write(feed.toXml()) return responseObject.toXml() # ///////////////////////////////////////////////////////////////////////////// # Define classes used by dispatcher system # ///////////////////////////////////////////////////////////////////////////// class HTTPResponse(object): """ Represents a complete HTTP response to pass back to main HTTP server
file_handler = logging.handlers.RotatingFileHandler(make_splunkhome_path(['var', 'log', 'splunk', log_name]), maxBytes=2500000, backupCount=5) formatter = logging.Formatter(log_format) file_handler.setFormatter(formatter) logger.handlers = [] logger.addHandler(file_handler) logger.debug("init read structures service logger") return logger logger = setupLogger() splunk.setDefault() local_host_path = splunk.mergeHostPath() class SOLNSelectorError(cherrypy.HTTPError): """ This error class will be used to set the status and msg on the error responses. """ def get_error_page(self, *args, **kwargs): kwargs['noexname'] = 'true' return super(SOLNSelectorError, self).get_error_page(*args, **kwargs) class read_structures_service(controllers.BaseController): '''Read Structures Service Controller''' # Dictionary for single entity views
def main(): # Call argument_parser function and store returned value into ko_value variable ko_value = argument_parser() if ko_value[2]!="": mergeHostPath(ko_value[2], True) session_key = user_check(ko_value) ko_name = ko_value[1] filter = ko_value[3] owner = ko_value[4] file = ko_value[5] # Retrieve knowledge objects if ko_value[0] == 'list': retrieve_content(session_key, ko_name, owner, file, filter) elif ko_value[0] == 'change': new_owner = ko_value[6] sharing = ko_value[7] read_perm = ko_value[8] write_perm = ko_value[9] if read_perm: if ',' in read_perm: r_perm = read_perm.split(',') else: r_perm = read_perm.split(' ') if write_perm: if ',' in write_perm: w_perm = write_perm.split(',') else: w_perm = write_perm.split(' ') # Check whether role exist or not for Read and Write permission if read_perm and write_perm: for readrole in r_perm: if readrole != '*': role_check(readrole) else: if len(r_perm) > 1: print ('You can\'t supply \'*\' with any other role in read permission') sys.exit(1) for writerole in w_perm: if writerole != '*': role_check(writerole) else: if len(w_perm) > 1: print ('You can\'t supply \'*\' with any other role in write permission') sys.exit(1) elif read_perm: for readrole in r_perm: if readrole != '*': role_check(readrole) else: if len(r_perm) > 1: print ('You can\'t supply \'*\' with any other role in read permission') sys.exit(1) elif write_perm: for writerole in w_perm: if writerole != '*': role_check(writerole) else: if len(w_perm) > 1: print ('You can\'t supply \'*\' with any other role in write permission') sys.exit(1) change_permission(session_key, ko_name, owner, new_owner, file, filter, sharing, read_perm, write_perm) elif ko_value[0] == 'move': appname = ko_value[6] if appname: app_check(appname, session_key) move_app(session_key, ko_name, owner, file, filter, appname)
"moid": cluster_id, "name": cluster_name, "type": "ClusterComputeResource", "hosts": hosts }) cluster_index = cluster_index + 1 return (vc, { "clusters": clusters, "existing_hosts_count": existing_hosts_count, "existing_vms_count": existing_vms_count }) if __name__ == "__main__": local_session_key = sys.stdin.readline().strip("\r\n") local_host_path = mergeHostPath() #Get Data Collection Nodes nodes = get_node_adapters(local_host_path, local_session_key) if len(nodes) == 0: logger.info( "could not authenticate with any data collection nodes, exiting run of vmware hierarchy agent" ) sys.exit(0) #Get target Virtual Centers vcs = get_virtual_centers(local_host_path, local_session_key) if len(vcs) == 0: logger.info( "could not find any configured virtual centers, exiting run of vmware hierarchy agent"
class TestTransformUtil(unittest.TestCase): import Parser _sessionKey = splunk.auth.getSessionKey('admin', 'changeme') _hostPath = splunk.mergeHostPath() def testHasTerms(self): """ Are terms found correctly in search strings? """ searchString = 'search userid=6 username="******" owner=ivan login' ps = Parser.parseSearch(searchString, hostPath=self._hostPath, sessionKey=self._sessionKey) sc = getClauseWithCommand(ps, "search") self.assert_(hasTerm(sc, "login")) self.assert_(hasTerm(sc, {"username": "******"})) self.assert_(hasTerm(sc, {"userid": 6})) self.assert_(hasTerm(sc, 'username="******"')) self.assert_(hasTerm(sc, 'username=nick')) self.assert_(hasTerm(sc, 'owner="ivan"')) self.assert_(hasTerm(sc, 'owner=ivan')) self.assert_(hasTerm(sc, "userid=6")) self.assert_(not hasTerm(sc, "shouldNotBeHere")) self.assert_(not hasTerm(sc, {"username": "******"})) self.assert_(not hasTerm(sc, {"userid": 7})) def testRemoveTerms(self): """ Are search terms correctly removed from search strings? """ searchString = 'search loglevel=7 userid=6 username="******" owner=ivan target="mars" destination=home login' ps = Parser.parseSearch(searchString, hostPath=self._hostPath, sessionKey=self._sessionKey) sc = getClauseWithCommand(ps, "search") removeTerm(sc, "login") self.assert_(sc.serialize().find("login") == -1) removeTerm(sc, {"username": "******"}) self.assert_(sc.serialize().find('username="******"') == -1) removeTerm(sc, {"owner": "ivan"}) self.assert_(sc.serialize().find('owner=ivan') == -1) removeTerm(sc, {"userid": 6}) self.assert_(sc.serialize().find('userid=6') == -1) removeTerm(sc, 'target=mars') removeTerm(sc, 'destination="home"') removeTerm(sc, "loglevel=7") self.assert_(sc.serialize().find('loglevel=7') == -1) # ELVIS print sc.serialize() self.assert_(sc.serialize() == 'search *') # SPL-32258 searchString = 'search index=_internal sourcetype=splunkd OR sourcetype=searches' ps = Parser.parseSearch(searchString, hostPath=self._hostPath, sessionKey=self._sessionKey) sc = getClauseWithCommand(ps, "search") removeTerm(sc, {'sourcetype': 'searches'}) self.assert_(sc, 'index="_internal" sourcetype="splunkd"') searchString = 'search index=_internal sourcetype=splunkd OR sourcetype=searches' ps = Parser.parseSearch(searchString, hostPath=self._hostPath, sessionKey=self._sessionKey) sc = getClauseWithCommand(ps, "search") removeTerm(sc, {'sourcetype': 'splunkd'}) self.assert_(sc, 'index="_internal" sourcetype="searches"') def testRemoveTermsEscaped(self): ''' Verify remove term behavior when presented with terms that contain escape character ''' beforeSearchString = r'search this \\that foo' parser = Parser.parseSearch(beforeSearchString, hostPath=self._hostPath, sessionKey=self._sessionKey) searchClause = getClauseWithCommand(parser, 'search') removeTerm(searchClause, r'\\that') self.assertEquals(searchClause.serialize(), 'search this foo') beforeSearchString = r'search this \that foo' parser = Parser.parseSearch(beforeSearchString, hostPath=self._hostPath, sessionKey=self._sessionKey) searchClause = getClauseWithCommand(parser, 'search') removeTerm(searchClause, r'\that') self.assertEquals(searchClause.serialize(), 'search this foo') def testTokenize(self): """ Are search strings correctly tokenized? """ tokenTests = [ ( 'johnsmith', ['johnsmith'] ), ( 'john smith', ['john','smith'] ), ( 'x="y z"', ['x="y z"'] ), ( 'user=Main.JohnSmith', ['user=Main.JohnSmith'] ), ( 'superman "Lex Luther"', ['superman', '"Lex Luther"'] ), ( 'sourcetype=bar', ['sourcetype=bar'] ), ( 'sourcetype::bar', ['sourcetype=bar'] ), ( '( sourcetype=bar )', ['sourcetype=bar'] ), ( 'source="/var/log/*"', ['source="/var/log/*"'] ), ( 'x=p', ['x=p'] ), ( 'x="p"', ['x="p"'] ), ( 'NOT x', ['NOT x'] ), ( 'x NOT y', ['x','NOT y'] ), ( 'x NOT y z', ['x','NOT y','z'] ), ( '(toBe OR notToBe) question', ['(toBe OR notToBe)', 'question'] ), ( 'toBe OR notToBe) question', ['toBe', 'OR', 'notToBe)', 'question'] ), ( 'toBe OR notToBe) ) question', ['toBe', 'OR', 'notToBe)', ')', 'question'] ), ( 'toBe OR notToBe)) question', ['toBe', 'OR', 'notToBe))', 'question'] ), ( '((toBe OR notToBe)) question', ['((toBe OR notToBe))', 'question'] ), ( '((toBe OR notToBe question', ['((toBe OR notToBe question'] ), ( '(toBe OR (notToBe)) question', ['(toBe OR (notToBe))', 'question'] ), ( '(toBe (OR (not)ToBe)) question', ['(toBe (OR (not)ToBe))', 'question'] ), ( 'error OR failed OR severe OR ( sourcetype=access_* ( 404 OR 500 OR 503 ) ) starthoursago::24',\ ['error', 'OR', 'failed', 'OR', 'severe', 'OR', '( sourcetype=access_* ( 404 OR 500 OR 503 ) )', 'starthoursago::24']), ( 'error OR failed OR severe OR ( sourcetype="access_*" ( 404 OR 500 OR 503 ) ) starthoursago::24',\ ['error', 'OR', 'failed', 'OR', 'severe', 'OR', '( sourcetype="access_*" ( 404 OR 500 OR 503 ) )', 'starthoursago::24']), ('search foo [search bar | top host | format]', ['search', 'foo', '[search bar | top host | format]']), ('search foo [search bar [search wunderbar] | top host | format]', ['search', 'foo', '[search bar [search wunderbar] | top host | format]']), ('search "["', ['search', '"["']), ('search "]"', ['search', '"]"']), ('search "[[]"', ['search', '"[[]"']), ('search "("', ['search', '"("']), ('search "(["', ['search', '"(["']), ('search "]"', ['search', '"]"']), ('search this [search "]"]', ['search', 'this', '[search "]"]']), ('search this (that OR ")")', ['search', 'this', '(that OR ")")']), ] for test in tokenTests: self.assertEquals(tokenize(test[0]), test[1]) def testStringToKV(self): """ Are terms correctly tokenized in KV pairs? """ self.assertEquals(stringToSearchKV("index=_audit login"), { "index": "_audit", "search": "login" }) def testEqualStringTerms(self): """ Are quotes in kv pairs ignored? """ self.assert_(_equalKVStringTerms('hello=world', 'hello=world')) self.assert_(_equalKVStringTerms('hello=world', 'hello="world"')) self.assert_(_equalKVStringTerms("hello='world'", 'hello="world"')) self.assert_(_equalKVStringTerms("hello='world'", 'hello=world')) self.assertFalse( _equalKVStringTerms("hello='world'", 'hello=wxrld')) def testKToString(self): """ Are K fields correctly quoted when needed? """ self.assertEquals(searchKToString("johnsmith"), 'johnsmith') self.assertEquals(searchKToString("john smith"), '"john smith"') self.assertEquals(searchKToString('boo'), 'boo') def testKVToString(self): """ Are KV pairs correctly merged into search string terms? """ self.assertEquals(searchVToString("johnsmith"), '"johnsmith"') self.assertEquals(searchVToString("john smith"), '"john smith"') self.assertEquals(searchVToString(26), '26') self.assertEquals(searchVToString('26'), '"26"') self.assertEquals(searchVToString('"6"'), '"6"') self.assertEquals(searchVToString('"boo"'), '"boo"') self.assertEquals(searchVToString('boo'), '"boo"') def testUnfilter(self): """ Are the search terms correctly parsed out of search filter wrappers? """ self.assertEquals(unfilterize("( index=_audit login ) ( ( * ) )"), "index=_audit login") self.assertEquals( unfilterize(" ( index=_audit login ) ( ( * ) )"), "index=_audit login") self.assertEquals( unfilterize(" ( index=_audit login ) ( ( * ) ) "), "index=_audit login")
responseObject.write(str(e), True) # this is the python-to-splunk XML, which contains a base64 encoded payload that may be in a non-XML format. return responseObject.toXml() finally: sys.stdout = org_stdout sys.stderr = org_stderr # if the script writer has used the HTTPResponse.write() method, then use # that as the raw output if responseObject.hasBufferedData(): # this is the python-to-splunk XML, which contains a base64 encoded payload that may be in a non-XML format. return responseObject.toXml() # otherwise, methods that return dictionaries or lists or strings get their # contents auto-converted into individual entries feed = format.primitiveToAtomFeed(splunk.mergeHostPath(), requestDict['path'], methodOutput) feed.messages = classInstance.messages if requestDict["explicit_output_mode"]: if requestDict["output_mode"] == "json": import json responseObject.setHeader('content-type', 'application/json; charset=utf-8') responseObject.write(json.dumps(feed.asJsonStruct(), separators=(',', ':'))) elif requestDict["output_mode"] == "xml": responseObject.setHeader('content-type', 'text/xml; charset=utf-8') responseObject.write(feed.toXml()) else: raise splunk.BadRequest("Output mode='%s' not supported by this endpoint." % requestDict["output_mode"]) else: responseObject.setHeader('content-type', 'text/xml; charset=utf-8')
if serverResponse.status != 200: logger.error( "getRemoteSessionKey - unable to login; check credentials") rest.extractMessages(et.fromstring(serverContent)) return None root = et.fromstring(serverContent) sessionKey = root.findtext("sessionKey") return sessionKey logger = setupLogger() splunk.setDefault() local_host_path = splunk.mergeHostPath() def readRestConfigForCsv(): path = CSV_PATH conf = [] f = open(path, "r") try: info_file = csv.reader(f) for line in info_file: conf = line except: logger.error('file=clayrest.py, msg=Read clay_rest_info.csv Error') stack = traceback.format_exc() logger.error(stack) finally:
responseObject.setStatus(500) responseObject.setHeader('content-type', 'text/plain') responseObject.write(str(e), True) return responseObject.toXml() finally: sys.stdout = org_stdout sys.stderr = org_stderr # if the script writer has used the HTTPResponse.write() method, then use # that as the raw output if responseObject.hasBufferedData(): return responseObject.toXml() # otherwise, methods that return dictionaries or lists or strings get their # contents auto-converted into individual entries feed = format.primitiveToAtomFeed(splunk.mergeHostPath(), requestDict['path'], methodOutput) feed.messages = classInstance.messages responseObject.setHeader('content-type', 'text/xml; charset=utf-8') responseObject.write(feed.toXml()) return responseObject.toXml() # ///////////////////////////////////////////////////////////////////////////// # Define classes used by dispatcher system # ///////////////////////////////////////////////////////////////////////////// class HTTPResponse(object): """ Represents a complete HTTP response to pass back to main HTTP server """