def test_read_outputs_with_type(self): name = testlib.tmpname() service = client.connect(**self.opts.kwargs) service.post('data/outputs/tcp/syslog', name=name, type='tcp') entity = client.Entity(service, 'data/outputs/tcp/syslog/' + name) self.assertTrue('tcp', entity.content.type) if service.restart_required: self.restartSplunk() service = client.connect(**self.opts.kwargs) client.Entity(service, 'data/outputs/tcp/syslog/' + name).delete() if service.restart_required: self.restartSplunk()
def test_read(self): service = client.connect(**self.opts.kwargs) for message in service.messages: self.check_message(message) message.refresh() self.check_message(message)
def test_read(self): service = client.connect(**self.opts.kwargs) for role in service.roles: self.check_role(role) role.refresh() self.check_role(role)
def rds_controller(rds_list, username, password, hostname, splunkusername, splunkpassword, port, indexname): for item in rds_list: rds_host_endpoint = item["Endpoint"] rds_port = item["Port"] connection = pymysql.connect(host=rds_host_endpoint, port=rds_port, user=username, password=password) cursor = connection.cursor() cursor.execute(""" SHOW ENGINE INNODB STATUS; """) rds_result = cursor.fetchall() cursor.close() connection.close() regex = r"-{4,}\sLATEST DETECTED DEADLOCK\s-{4,}\s((.*)\s)*?-{4,}" global_str = "" for row in rds_result: matches = re.finditer(regex, row[2]) for matchNum, match in enumerate(matches, start=1): global_str = match.group() expr = re.compile(r"\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}") global_str = re.sub(expr, '', global_str) #to avoid empty dead locks if len(global_str) > 0: service = splunk_client.connect(host=hostname, port=port, username=splunkusername, password=splunkpassword) myindex = service.indexes[indexname] # Open a socket mysocket = myindex.attach(host=rds_host_endpoint, source="INNODB STATUS", sourcetype="RDS") # Send events to it mysocket.send(global_str) # Close the socket mysocket.close()
def setUpClass(cls): cls.opts = parse([], {}, ".splunkrc") # Before we start, make sure splunk doesn't need a restart. service = client.connect(**cls.opts.kwargs) if service.restart_required: service.restart(timeout=120)
def start(self): self.service = client.connect( host=self.host, port=self.port, username=self.username, password=self.password) self.index = self.service.indexes['cowrie']
def ConnectToSplunk(splunk_host, port, user, passwd, **kwargs): """ Make the connection to the splunk search-head """ print("Connecting to: %s:%d" % (splunk_host, port)) service = client.connect(host=str(splunk_host), port=int(port), username=str(user), password=str(passwd), **kwargs) return service
def test_read(self): service = client.connect(**self.opts.kwargs) for app in service.apps: self.check_app(app) app.refresh() self.check_app(app)
def main(argv): usage = 'usage: %prog [options] "search"' flags = [] flags.extend(FLAGS_TOOL) flags.extend(FLAGS_CREATE) flags.extend(FLAGS_RESULTS) opts = cmdline(argv, flags, usage=usage) if len(opts.args) != 1: error("Search expression required", 2) search = opts.args[0] verbose = opts.kwargs.get("verbose", 0) kwargs_splunk = dslice(opts.kwargs, FLAGS_SPLUNK) kwargs_create = dslice(opts.kwargs, FLAGS_CREATE) kwargs_results = dslice(opts.kwargs, FLAGS_RESULTS) service = client.connect(**kwargs_splunk) try: service.parse(search, parse_only=True) except HTTPError as e: cmdopts.error("query '%s' is invalid:\n\t%s" % (search, e.message), 2) return job = service.jobs.create(search, **kwargs_create) while True: while not job.is_ready(): pass stats = {'isDone': job['isDone'], 'doneProgress': job['doneProgress'], 'scanCount': job['scanCount'], 'eventCount': job['eventCount'], 'resultCount': job['resultCount']} progress = float(stats['doneProgress'])*100 scanned = int(stats['scanCount']) matched = int(stats['eventCount']) results = int(stats['resultCount']) if verbose > 0: status = ("\r%03.1f%% | %d scanned | %d matched | %d results" % ( progress, scanned, matched, results)) sys.stdout.write(status) sys.stdout.flush() if stats['isDone'] == '1': if verbose > 0: sys.stdout.write('\n') break sleep(2) if not kwargs_results.has_key('count'): kwargs_results['count'] = 0 results = job.results(**kwargs_results) while True: content = results.read(1024) if len(content) == 0: break sys.stdout.write(content) sys.stdout.flush() sys.stdout.write('\n') job.cancel()
def test_crud(self): service = client.connect(**self.opts.kwargs) event_types = service.event_types if 'sdk-test' in event_types: event_types.delete('sdk-test') self.assertFalse('sdk-test' in event_types) kwargs = {} kwargs['search'] = "index=_internal *" kwargs['description'] = "An internal event" kwargs['disabled'] = 1 kwargs['priority'] = 2 event_type = event_types.create('sdk-test', **kwargs) self.assertTrue('sdk-test' in event_types) self.assertEqual('sdk-test', event_type.name) self.check_content(event_type, **kwargs) kwargs['search'] = "index=_audit *" kwargs['description'] = "An audit event" kwargs['priority'] = 3 event_type.update(**kwargs) event_type.refresh() self.check_content(event_type, **kwargs) event_type.enable() event_type.refresh() self.check_content(event_type, disabled=0) event_types.delete('sdk-test') self.assertFalse('sdk-teset' in event_types)
def start_proxy(config, daemonize=False): argv = None connected = None pid_file = get_proxy_pid_file(config) argv = [config['node'], path.join(MAIN_DIR, "proxy", "proxy.js")] # Get the appropriate stdin/stdout/stderr stdin = None if not daemonize else file(os.devnull, 'r') stdout = sys.stdout if not daemonize else file(os.devnull, 'a+') stderr = sys.stderr if not daemonize else file(os.devnull, 'a+', 0) connected = connect( argv, stdin=stdin, stdout=stdout, stderr=stderr) if daemonize: # We already forked the process, we simply need to get the pid # and then write it out to the file pid = str(connected.pid) with file(pid_file, 'w+') as f: f.write("%s\n" % pid) else: def cleanup(): try: connected.kill() except: # Ignore errors during cleanup pass atexit.register(cleanup) return connected
def test_results(self): service = client.connect(**self.opts.kwargs) jobs = service.jobs # Run a new job to get the results, but we also make # sure that there is at least one event in the index already index = service.indexes['sdk-tests'] old_event_count = int(index['totalEventCount']) if old_event_count == 0: index.submit("test event") testlib.wait(index, lambda index: index['totalEventCount'] == '1') job = jobs.create("search index=sdk-tests | head 1 | stats count") testlib.wait(job, lambda job: job['isDone'] == '1') self.assertEqual(job['isDone'], '1') # Fetch the results reader = results.ResultsReader(job.results()) # The first one should always be RESULTS kind, result = reader.next() self.assertEqual(results.RESULTS, kind) self.assertEqual(int(result["preview"]), 0) # The second is always the actual result kind, result = reader.next() self.assertEqual(results.RESULT, kind) self.assertEqual(int(result["count"]), 1)
def test_read(self): service = client.connect(**self.opts.kwargs) for job in service.jobs: self.check_job(job) job.refresh() self.check_job(job)
def main(argv): usage = "usage: %prog [options]" opts = parse(argv, {}, ".splunkrc", usage=usage) service = client.connect(**opts.kwargs) for logger in service.loggers: print "%s (%s)" % (logger.name, logger['level'])
def doSplunkSummarizationSearch(electricDeviceIdList, StartTime, EndTime, span, summariesToDo): #### do splunk search #### Organize into pandas dataFrame, myDataDF service = client.connect(host='localhost', port=8089, username='******', password='******') #print 'got service, now make job' kwargs_oneshot={"earliest_time": StartTime, "latest_time": EndTime, "count": 0} statsStr = '' for summary in summariesToDo: statsStr += summary['function']+'('+summary['inVariable']+') as '+summary['outVariable']+' ' idSelectionRequirements = [] for myID in electricDeviceIdList: thisIDStr = 'id='+str(myID) idSelectionRequirements.append(thisIDStr) idSelectionStr = '('+(' OR '.join(idSelectionRequirements))+')' jobSearchString= "search "+idSelectionStr+" | bucket _time span="+span+" | stats " + statsStr + " by _time | sort _time " print 'jobSearchString: ', jobSearchString job_results = service.jobs.oneshot(jobSearchString, **kwargs_oneshot) reader = results.ResultsReader(io.BufferedReader(responseReaderWrapper.ResponseReaderWrapper(job_results))) return reader
def main(): """Main program.""" usage = "usage: %prog [options] <command> [<args>]" argv = sys.argv[1:] command = None commands = ['create', 'delete', 'list'] # parse args, connect and setup opts = parse(argv, {}, ".splunkrc", usage=usage) service = connect(**opts.kwargs) program = Program(service) if len(opts.args) == 0: # no args means list command = "list" elif opts.args[0] in commands: # args and the first in our list of commands, extract # command and remove from regular args command = opts.args[0] opts.args.remove(command) else: # first one not in our list, default to list command = "list" program.run(command, opts)
def main(): usage = "usage: %prog <search>" opts = utils.parse(sys.argv[1:], {}, ".splunkrc", usage=usage) if len(opts.args) != 1: utils.error("Search expression required", 2) search = opts.args[0] service = connect(**opts.kwargs) try: result = service.get( "search/jobs/export", search=search, earliest_time="rt", latest_time="rt", search_mode="realtime" ) reader = results.ResultsReader(result.body) while True: kind = reader.read() if kind == None: break if kind == results.RESULT: event = reader.value pprint(event) except KeyboardInterrupt: print "\nInterrupted."
def main(): usage = "usage: follow.py <search>" opts = utils.parse(sys.argv[1:], {}, ".splunkrc", usage=usage) if len(opts.args) != 1: utils.error("Search expression required", 2) search = opts.args[0] service = client.connect(**opts.kwargs) job = service.jobs.create(search, earliest_time="rt", latest_time="rt", search_mode="realtime") # Wait for the job to transition out of QUEUED and PARSING so that # we can if its a transforming search, or not. while True: job.refresh() if job["dispatchState"] not in ["QUEUED", "PARSING"]: break time.sleep(2) # Wait if job["reportSearch"] is not None: # Is it a transforming search? count = lambda: int(job["numPreviews"]) items = lambda _: job.preview() else: count = lambda: int(job["eventCount"]) items = lambda offset: job.events(offset=offset) try: follow(job, count, items) except KeyboardInterrupt: print "\nInterrupted." finally: job.cancel()
def on_message(identifier, channel, payload): print "on_message: %s" % payload print identifier print channel print payload service = client.connect(username="******", password="******") cn = service.indexes["testing"].attach() try: decoded = json.loads(str(payload)) except: decoded = {'raw': payload} if not 'md5' in decoded or not 'data' in decoded: log.info("Received message does not contain hash or data - Ignoring it") return csv = ', '.join(['{0} = {1}'.format(i, decoded[i]) for i in ['url', 'md5', 'sha1', 'type']]) outmsg = 'PUBLISH channel = %s, identifier = %s, %s' % (channel, identifier, csv) log.info(outmsg) cn.write(outmsg) filedata = decoded['data'].decode('base64') fpath = os.path.join(OUTDIR, decoded['md5']) with open(fpath, 'wb') as fd: fd.write(filedata)
def connect(self): try: self.service = client.connect(host=self.host, port=self.port, username=self.user, password=self.pasw) except Exception as e: msg = '[!] Error: %s' % str(e) if self.verbose: print msg
def test_read(self): service = client.connect(**self.opts.kwargs) for index in service.indexes: self.check_index(index) index.refresh() self.check_index(index)
def test_name_collision(self): opts = self.opts.kwargs.copy() opts['owner'] = '-' opts['app'] = '-' opts['sharing'] = 'user' service = client.connect(**opts) logging.debug("Namespace for collision testing: %s", service.namespace) saved_searches = service.saved_searches name = testlib.tmpname() query1 = '* earliest=-1m | head 1' query2 = '* earliest=-2m | head 2' namespace1 = client.namespace(app='search', sharing='app') namespace2 = client.namespace(owner='admin', app='search', sharing='user') saved_search2 = saved_searches.create( name, query2, namespace=namespace1) saved_search1 = saved_searches.create( name, query1, namespace=namespace2) self.assertRaises(client.AmbiguousReferenceException, saved_searches.__getitem__, name) search1 = saved_searches[name, namespace1] self.check_saved_search(search1) search1.update(**{'action.email.from': '*****@*****.**'}) search1.refresh() self.assertEqual(search1['action.email.from'], '*****@*****.**') search2 = saved_searches[name, namespace2] search2.update(**{'action.email.from': '*****@*****.**'}) search2.refresh() self.assertEqual(search2['action.email.from'], '*****@*****.**') self.check_saved_search(search2)
def test_crud(self): service = client.connect(**self.opts.kwargs) saved_searches = service.saved_searches if 'sdk-test1' in saved_searches: saved_searches.delete('sdk-test1') self.assertFalse('sdk-test1' in saved_searches) search = "search index=sdk-tests * earliest=-1m" saved_search = saved_searches.create('sdk-test1', search) self.assertEqual('sdk-test1', saved_search.name) self.assertTrue('sdk-test1' in saved_searches) saved_search = saved_searches['sdk-test1'] self.check_content(saved_search, is_visible=1) saved_search.update(is_visible=False) saved_search.refresh() self.check_content(saved_search, is_visible=0) saved_searches.delete('sdk-test1') self.assertFalse('sdk-test1' in saved_searches) saved_search = saved_searches.create( 'sdk-test1', search, is_visible=False) self.assertEqual('sdk-test1', saved_search.name) self.assertTrue('sdk-test1' in saved_searches) self.check_content(saved_search, is_visible=0) saved_searches.delete('sdk-test1') self.assertFalse('sdk-test1' in saved_searches)
def test_settings(self): service = client.connect(**self.opts.kwargs) settings = service.settings # Verify that settings contains the keys we expect keys = [ "SPLUNK_DB", "SPLUNK_HOME", "enableSplunkWebSSL", "host", "httpport", "mgmtHostPort", "minFreeSpace", "pass4SymmKey", "serverName", "sessionTimeout", "startwebserver", "trustedIP" ] for key in keys: self.assertTrue(key in settings.content) # Verify that we can update the settings original = settings['sessionTimeout'] self.assertTrue(original != "42h") settings.update(sessionTimeout="42h") settings.refresh() updated = settings['sessionTimeout'] self.assertEqual(updated, "42h") # Restore (and verify) original value settings.update(sessionTimeout=original) settings.refresh() updated = settings['sessionTimeout'] self.assertEqual(updated, original)
def test_dispatch(self): service = client.connect(**self.opts.kwargs) saved_searches = service.saved_searches if 'sdk-test1' in saved_searches: saved_searches.delete('sdk-test1') self.assertFalse('sdk-test1' in saved_searches) search = "search index=sdk-tests * earliest=-1m" saved_search = saved_searches.create('sdk-test1', search) self.assertEqual('sdk-test1', saved_search.name) self.assertTrue('sdk-test1' in saved_searches) job = saved_search.dispatch() testlib.wait(job, lambda job: bool(int(job['isDone']))) job.results().close() job.cancel() # Dispatch with some additional options kwargs = { 'dispatch.buckets': 100 } job = saved_search.dispatch(**kwargs) testlib.wait(job, lambda job: bool(int(job['isDone']))) job.timeline().close() job.cancel() saved_searches.delete('sdk-test1') self.assertFalse('sdk-test1' in saved_searches)
def __init__(self, splunk_conf, splunk_evt, splunk_index): self.splunk = client.connect(**splunk_conf) if not splunk_index in self.splunk.indexes: self.index = self.splunk.indexes.create(splunk_index) else: self.index = self.splunk.indexes[splunk_index] self.socket = self.index.attach(**splunk_evt)
def test_read(self): service = client.connect(**self.opts.kwargs) for alert_group in service.fired_alerts: alert_group.count for alert in alert_group.alerts: alert.content
def main(): usage = "usage: %prog <search>" opts = utils.parse(sys.argv[:], {}, ".splunkrc", usage=usage) service = connect(**opts.kwargs) try: result = service.get( "search/jobs/export", search="search instantaneous_eps", index="_internal", earliest_time="rt", latest_time="rt", search_mode="realtime") for result in ResultsReader(result.body): if result is not None: if isinstance(result, dict): # extract only the event contents event=result.items()[2][1] # strip out the leading timestamp files, they don't read well shorte=event[61:] # send the shortened event contents to the speech synth subprocess.call(["/usr/bin/say", shorte]) except KeyboardInterrupt: print "\nInterrupted."
def handle(self, **options): service = connect( username=options['username'], password=options['password'], host=settings.SPLUNKD_HOST, port=settings.SPLUNKD_PORT, ) user_apps = list(settings.USER_APPS) apps = service.apps did_delete = False for app in apps: namespace = service.namespace service.namespace = binding.namespace(owner="nobody", app=app.name) is_appfx = app.name in user_apps and 'appfx' in service.confs['app'] service.namespace = namespace if is_appfx: print "Uninstalling '%s'" % app.name service.namespace = namespace apps.delete(app.name) did_delete = True if did_delete: print "Restarting..." restart(service) for user_app in user_apps: print "Installing '%s'" % user_app user_app_module = importlib.import_module(user_app) label = user_app if hasattr(user_app_module, 'NAME'): label = user_app_module.NAME apps.create(user_app, visible=True, label=label) service.namespace = binding.namespace(owner="nobody", app=user_app) stanza = service.confs['app'].create('appfx') stanza.submit("appfx=1") nav_kwargs = { "eai:data": '<nav><view name="default" default="true"/></nav>' } view_kwargs = { "name": "default", "eai:data": '<view template="appfx_base:/templates/redirect.html"></view>' } service.post( 'data/ui/views', **view_kwargs ) service.post( 'data/ui/nav/default', **nav_kwargs )
def test_crud(self): service = client.connect(**self.opts.kwargs) jobs = service.jobs if not service.indexes.contains("sdk-tests"): service.indexes.create("sdk-tests") service.indexes['sdk-tests'].clean() # Make sure we can create a job job = jobs.create("search index=sdk-tests") self.assertTrue(jobs.contains(job.sid)) # Make sure we can cancel the job job.cancel() self.assertFalse(jobs.contains(job.sid)) # Search for non-existant data job = jobs.create("search index=sdk-tests TERM_DOES_NOT_EXIST") testlib.wait(job, lambda job: job['isDone'] == '1') self.assertEqual(job['isDone'], '1') self.assertEqual(job['eventCount'], '0') job.finalize() # Create a new job job = jobs.create("search * | head 1 | stats count") self.assertTrue(jobs.contains(job.sid)) # Set various properties on it job.disable_preview() job.pause() job.set_ttl(1000) job.set_priority(5) job.touch() job.refresh() # Assert that the properties got set properly self.check_properties(job, { 'isPreviewEnabled': '0', 'isPaused': '1', 'ttl': '1000', 'priority': '5' }) # Set more properties job.enable_preview() job.unpause() job.finalize() job.refresh() # Assert that they got set properly self.check_properties(job, { 'isPreviewEnabled': '1', 'isPaused': '0', 'isFinalized': '1' }) job.cancel() self.assertFalse(jobs.contains(job.sid))
def GetTokens(sesssionKey): splunkService = client.connect(token=sessionKey, app='GoogleDriveAddonforSplunk') return splunkService.storage_passwords
def test_analytics(self): # We have to add the current path to the PYTHONPATH, # otherwise the import doesn't work quite right sys.path.append(os.getcwd()) import analytics # Create a tracker tracker = analytics.input.AnalyticsTracker( "sdk-test", self.opts.kwargs, index = "sdk-test") service = client.connect(**self.opts.kwargs) # Before we start, we'll clean the index index = service.indexes["sdk-test"] index.clean() tracker.track("test_event", distinct_id="abc123", foo="bar", abc="123") tracker.track("test_event", distinct_id="123abc", abc="12345") # Wait until the events get indexed self.assertEventuallyTrue(lambda: index.refresh()['totalEventCount'] == '2', timeout=200) # Now, we create a retriever to retrieve the events retriever = analytics.output.AnalyticsRetriever( "sdk-test", self.opts.kwargs, index = "sdk-test") # Assert applications applications = retriever.applications() self.assertEquals(len(applications), 1) self.assertEquals(applications[0]["name"], "sdk-test") self.assertEquals(applications[0]["count"], 2) # Assert events events = retriever.events() self.assertEqual(len(events), 1) self.assertEqual(events[0]["name"], "test_event") self.assertEqual(events[0]["count"], 2) # Assert properties expected_properties = { "abc": 2, "foo": 1 } properties = retriever.properties("test_event") self.assertEqual(len(properties), len(expected_properties)) for prop in properties: name = prop["name"] count = prop["count"] self.assertTrue(name in list(expected_properties.keys())) self.assertEqual(count, expected_properties[name]) # Assert property values expected_property_values = { "123": 1, "12345": 1 } values = retriever.property_values("test_event", "abc") self.assertEqual(len(values), len(expected_property_values)) for value in values: name = value["name"] count = value["count"] self.assertTrue(name in list(expected_property_values.keys())) self.assertEqual(count, expected_property_values[name]) # Assert event over time over_time = retriever.events_over_time( time_range = analytics.output.TimeRange.MONTH) self.assertEquals(len(over_time), 1) self.assertEquals(len(over_time["test_event"]), 1) self.assertEquals(over_time["test_event"][0]["count"], 2) # Now that we're done, we'll clean the index index.clean()
def CreateToken(sessionKey, password, user, realm): splunkService = client.connect(token=sessionKey, app='GoogleDriveAddonforSplunk') splunkService.storage_passwords.create(password, user, realm)
return { 'status': response.code, # type: ignore 'reason': response.msg, # type: ignore 'headers': response.info().dict, # type: ignore 'body': StringIO(response.read()) # type: ignore } service = None proxy = demisto.params()['proxy'] if proxy: try: service = client.connect( handler=handler(proxy), host=demisto.params()['host'], port=demisto.params()['port'], app=demisto.params().get('app'), username=demisto.params()['authentication']['identifier'], password=demisto.params()['authentication']['password'], verify=VERIFY_CERTIFICATE) except urllib2.URLError as e: if e.reason.errno == 1 and sys.version_info < (2, 6, 3): # type: ignore pass else: raise else: service = client.connect( host=demisto.params()['host'], port=demisto.params()['port'], app=demisto.params().get('app'), username=demisto.params()['authentication']['identifier'],
def ListTokens(sessionKey): splunkService = client.connect(token=sessionKey, app='GoogleDriveAddonforSplunk') for storage_password in splunkService.storage_passwords: logger.info(storage_password.name)
def read_splunk(query, username, password, host, port, time_limit=5): '''This function queries Splunk database. Note that function dependencies includes installing splunklib. Args: query (str): SPL query. username (str): Splunk username. password (str): Splunk password. host (str): Splunk host. port (int): Splunk management port. time_limit (int): time limit (minutes) for the query (default: 5). Returns: DataFrame: query results. ''' # save start time start_time = time() print('start..\n') print('your query:\n {}\n'.format("|\n".join(query.split("|")))) # connect to splunk service = client.connect(host=host, port=port, username=username, password=password) print('connection succeed\n') # query splunk kwargs_normalsearch = {"exec_mode": "normal", "count": 0} job = service.jobs.create(query, **kwargs_normalsearch) # A normal search returns the job's SID right away, so we need to poll for completion while True: while not job.is_ready(): if time() > start_time + (time_limit * 60): break else: pass if time() > start_time + (time_limit * 60): print( "\n\njob stopped - query run more then {} minutes. You can change this limitation ('time_limit')\n" .format(time_limit)) return None stats = { "isDone": job["isDone"], "doneProgress": float(job["doneProgress"]) * 100, "scanCount": int(job["scanCount"]), "eventCount": int(job["eventCount"]), "resultCount": int(job["resultCount"]) } status = ( "\rquery status: %(doneProgress)03.1f%% %(scanCount)d scanned " "%(eventCount)d matched %(resultCount)d results") % stats sys.stdout.write(status) sys.stdout.flush() if stats["isDone"] == "1": sys.stdout.write("\n\nDone!\n\n") break sleep(0.01) job_results = job.results(output_mode='csv', count=0) print('query succeed\n') # read results results = job_results.read() print('read results succeed\n') if 'job' in locals(): job.cancel() print("job finished and canceled\n") # transform results to DataFrame try: df = pd.read_csv(BytesIO(results), encoding='utf8', sep=',', low_memory=False) except: print('finished! number of rows: {}\n'.format(0)) return None # drop Splunk columns which didn't declared in the query df.drop(columns=[col for col in df.columns if col[0] == '_'], inplace=True) print('finished! number of rows: {}\n'.format(len(df))) # missing results warning if len(df) == 50000: print( '''Warning! Splunk API resutls is limited to 50,000 rows. You may have missing rows. please update your query\n''') return df
More details here: https://github.com/MHaggis/notes/blob/master/Splunk-Python-SDK/ """ # Built-in/Generic Imports import sys # Libs import splunklib.results as results import splunklib.client as client import pandas as pd __author__ = 'mhaggis' service = client.connect( username="******", password="******", # change this to ip address of machine where the splunk monitoring instance is located (remote splunk instance) host="localhost", port=8089) # Run a one-shot search and display the results using the results reader # Set the parameters for the search: # - Search everything in a 30 day time range kwargs_oneshot = {"earliest_time": "-30d", "latest_time": "now"} searchquery_oneshot = "search index=_internal sourcetype=splunkd component=UiAuth | table _time user clientip" oneshotsearch_results = service.jobs.oneshot(searchquery_oneshot, **kwargs_oneshot) # Our results # Another way to output the data using splunklib.results # Get the results and display them using the ResultsReader #reader = results.ResultsReader(oneshotsearch_results)
""" 获取 Entity 信息,比如应用、索引、用户、角色、版本 """ import splunklib.client as client HOST = "192.168.2.131" PORT = 8089 USERNAME = "******" PASSWORD = "******" service = client.connect( host=HOST, port=PORT, username=USERNAME, password=PASSWORD) print "********Splunk Apps********" for app in service.apps: print app.name print "********Splunk Indexes*********" for index in service.indexes: print index.name print "********Splunk Users*********" for user in service.users:
import splunklib.client as client from pprint import pprint service = client.connect(schema='http', host='127.0.0.1', port=8089, username='******', password='******', app='adhfile') configs = service.confs for cf in configs: print("\n--[[%s]]--" % cf.name) for stanza in cf: print("\n[%s]" % stanza.name) for key, value in stanza.content.items(): print("%s = %s" % (key, value))
def __init__(self, application_name, splunk_info, index = ANALYTICS_INDEX_NAME): self.application_name = application_name self.splunk = client.connect(**splunk_info) self.index = index
## Add the Splunk SDK path to our import search path ## so we don't have to install the SDK on the server site.addsitedir("./splunk-sdk-python-1.6.16") import splunklib.client as client import splunklib.results as results host = "localhost" username = "******" password = "" app = "search" service = client.connect(host=host, port=8089, app=app, username=username, password=password) search = ''' | rest /services/search/jobs | fields - fieldMetadata*, custom*, performance* | where isRealTimeSearch=0 AND isDone=0 | where 'eai:acl.app' != "" | where NOT match(label, "^_ACCELERATE_DM_") | where runDuration >= 3600 | table sid, eai:acl.app, label, dispatchState, runDuration '''
def delete_data_sampling_models_del(self, request_info, **kwargs): # Declare model_name = None query_string = None describe = False # Retrieve from data try: resp_dict = json.loads(str(request_info.raw_args['payload'])) except Exception as e: resp_dict = None if resp_dict is not None: try: describe = resp_dict['describe'] if describe in ("true", "True"): describe = True except Exception as e: describe = False if not describe: model_name = resp_dict['model_name'] else: # body is required in this endpoint, if not submitted describe the usage describe = True if describe: response = "{\"describe\": \"This endpoint deletes a custom data sampling model, it requires a DELETE call with the following data:\""\ + ", \"options\" : [ { "\ + "\"model_name\": \"name of the custom model\", "\ + "\"update_comment\": \"OPTIONAL: a comment for the update, comments are added to the audit record, if unset will be defined to: API update\""\ + " } ] }" return { "payload": json.dumps(json.loads(str(response)), indent=1), 'status': 200 # HTTP status code } # Update comment is optional and used for audit changes try: update_comment = resp_dict['update_comment'] except Exception as e: update_comment = "API update" # Define the KV query query_string = '{ "model_name": "' + model_name + '" }' # Get splunkd port entity = splunk.entity.getEntity('/server', 'settings', namespace='trackme', sessionKey=request_info.session_key, owner='-') splunkd_port = entity['mgmtHostPort'] try: # Data collection collection_name = "kv_trackme_data_sampling_custom_models" service = client.connect( owner="nobody", app="trackme", port=splunkd_port, token=request_info.session_key ) collection = service.kvstore[collection_name] # Audit collection collection_name_audit = "kv_trackme_audit_changes" service_audit = client.connect( owner="nobody", app="trackme", port=splunkd_port, token=request_info.session_key ) collection_audit = service_audit.kvstore[collection_name_audit] # Get the current record # Notes: the record is returned as an array, as we search for a specific record, we expect one record only try: record = collection.data.query(query=str(query_string)) key = record[0].get('_key') except Exception as e: key = None # Render result if key is not None and len(key)>2: # This record exists already # Store the record for audit purposes record = str(json.dumps(collection.data.query_by_id(key), indent=1)) # Record an audit change import time current_time = int(round(time.time() * 1000)) user = request_info.user try: # Remove the record collection.data.delete(json.dumps({"_key":key})) # Insert the record collection_audit.data.insert(json.dumps({ "time": str(current_time), "user": str(user), "action": "success", "change_type": "delete data parsing custom rule", "object": str(model_name), "object_category": "data_source", "object_attrs": str(record), "result": "N/A", "comment": str(update_comment) })) except Exception as e: return { 'payload': 'Warn: exception encountered: ' + str(e) # Payload of the request. } return { "payload": "Record with _key " + str(key) + " was deleted from the collection.", 'status': 200 # HTTP status code } else: return { "payload": 'Warn: resource not found ' + str(key), 'status': 404 # HTTP status code } except Exception as e: return { 'payload': 'Warn: exception encountered: ' + str(e) # Payload of the request. }
def start(self): self.service = client.connect(host=self.host, port=self.port, username=self.username, password=self.password) self.index = self.service.indexes['cowrie']
def post_data_sampling_models_add(self, request_info, **kwargs): # Declare model_name = None model_regex = None model_type = None query_string = None describe = False # Retrieve from data try: resp_dict = json.loads(str(request_info.raw_args['payload'])) except Exception as e: resp_dict = None if resp_dict is not None: try: describe = resp_dict['describe'] if describe in ("true", "True"): describe = True except Exception as e: describe = False if not describe: model_name = resp_dict['model_name'] model_regex = resp_dict['model_regex'] model_type = resp_dict['model_type'] # Update comment is optional and used for audit changes try: update_comment = resp_dict['update_comment'] except Exception as e: update_comment = "API update" # sourcetype_scope is optional, if unset it will be defined to * (any) try: sourcetype_scope = resp_dict['sourcetype_scope'] except Exception as e: sourcetype_scope = "*" else: # body is required in this endpoint, if not submitted describe the usage describe = True if describe: response = "{\"describe\": \"This endpoint creates a new data sampling custom model, it requires a POST call with the following data:\""\ + ", \"options\" : [ { "\ + "\"model_name\": \"v name of the custom model\", "\ + "\"model_regex\": \"The regular expression to be used by the custom model, special characters should be escaped.\", "\ + "\"model_type\": \"The type of match for this model, valid options are “inclusive” (rule must match) and “exclusive” (rule must not match).\", "\ + "\"sourcetype_scope\": \"OPTIONAL: value of the sourcetype to match, if unset defaults to “*”. You can enter a list of sourcetypes as a comma separated list of values, wilcards and spaces should not be used.\", "\ + "\"update_comment\": \"OPTIONAL: a comment for the update, comments are added to the audit record, if unset will be defined to: API update.\""\ + " } ] }" return { "payload": json.dumps(json.loads(str(response)), indent=1), 'status': 200 # HTTP status code } # Define the KV query query_string = '{ "model_name": "' + model_name + '" }' # Get splunkd port entity = splunk.entity.getEntity('/server', 'settings', namespace='trackme', sessionKey=request_info.session_key, owner='-') splunkd_port = entity['mgmtHostPort'] try: # Data collection collection_name = "kv_trackme_data_sampling_custom_models" service = client.connect( owner="nobody", app="trackme", port=splunkd_port, token=request_info.session_key ) collection = service.kvstore[collection_name] # Audit collection collection_name_audit = "kv_trackme_audit_changes" service_audit = client.connect( owner="nobody", app="trackme", port=splunkd_port, token=request_info.session_key ) collection_audit = service_audit.kvstore[collection_name_audit] # Get the current record # Notes: the record is returned as an array, as we search for a specific record, we expect one record only try: record = collection.data.query(query=str(query_string)) key = record[0].get('_key') except Exception as e: key = None # Render result if key is not None and len(key)>2 and model_type in("inclusive", "exclusive"): # This record exists already model_id = record[0].get('model_id') # Record an audit change import time current_time = int(round(time.time() * 1000)) user = request_info.user # Update the record collection.data.update(str(key), json.dumps({"model_name": model_name, "model_regex": model_regex, "model_type": model_type, "model_id": model_id, "sourcetype_scope": sourcetype_scope, "mtime": current_time})) # Store the record for audit purposes record = str(json.dumps(collection.data.query_by_id(key), indent=1)) try: # Insert the record collection_audit.data.insert(json.dumps({ "time": str(current_time), "user": str(user), "action": "success", "change_type": "add data parsing custom rule", "object": str(model_name), "object_category": "data_source", "object_attrs": str(record), "result": "N/A", "comment": str(update_comment) })) except Exception as e: return { 'payload': 'Warn: exception encountered: ' + str(e) # Payload of the request. } return { "payload": str(record), 'status': 200 # HTTP status code } elif model_type in("inclusive", "exclusive"): # This record does not exist yet import hashlib model_id = hashlib.md5(model_name.encode('utf-8')).hexdigest() # Record an audit change import time current_time = int(round(time.time() * 1000)) user = request_info.user # Insert the record collection.data.insert(json.dumps({"model_name": model_name, "model_regex": model_regex, "model_type": model_type, "model_id": model_id, "sourcetype_scope": sourcetype_scope, "mtime": current_time})) # Get record record = json.dumps(collection.data.query(query=str(query_string)), indent=1) try: # Insert the record collection_audit.data.insert(json.dumps({ "time": str(current_time), "user": str(user), "action": "success", "change_type": "add data parsing custom rule", "object": str(model_name), "object_category": "data_source", "object_attrs": str(record), "result": "N/A", "comment": str(update_comment) })) except Exception as e: return { 'payload': 'Warn: exception encountered: ' + str(e) # Payload of the request. } return { "payload": str(record), 'status': 200 # HTTP status code } else: return { "payload": "bad request", 'status': 404 # HTTP status code } except Exception as e: return { 'payload': 'Warn: exception encountered: ' + str(e) # Payload of the request. }
def get_data_sampling_by_name(self, request_info, **kwargs): # By data_name data_name = None query_string = None describe = False # Retrieve from data try: resp_dict = json.loads(str(request_info.raw_args['payload'])) except Exception as e: resp_dict = None if resp_dict is not None: try: describe = resp_dict['describe'] if describe in ("true", "True"): describe = True except Exception as e: describe = False if not describe: data_name = resp_dict['data_name'] else: # body is required in this endpoint, if not submitted describe the usage describe = True if describe: response = "{\"describe\": \"This endpoint retrieves a data sampling record, it requires a GET call with the following data:\""\ + ", \"options\" : [ { "\ + "\"data_name\": \"name of the data source\""\ + " } ] }" return { "payload": json.dumps(json.loads(str(response)), indent=1), 'status': 200 # HTTP status code } # Define the KV query query_string = '{ "data_name": "' + data_name + '" }' # Get splunkd port entity = splunk.entity.getEntity('/server', 'settings', namespace='trackme', sessionKey=request_info.session_key, owner='-') splunkd_port = entity['mgmtHostPort'] try: collection_name = "kv_trackme_data_sampling" service = client.connect( owner="nobody", app="trackme", port=splunkd_port, token=request_info.session_key ) collection = service.kvstore[collection_name] # Get the current record # Notes: the record is returned as an array, as we search for a specific record, we expect one record only try: record = collection.data.query(query=str(query_string)) key = record[0].get('_key') except Exception as e: key = None # Render result if key is not None and len(key)>2: return { "payload": json.dumps(collection.data.query_by_id(key), indent=1), 'status': 200 # HTTP status code } else: return { "payload": 'Warn: resource not found ' + str(key), 'status': 404 # HTTP status code } except Exception as e: return { 'payload': 'Warn: exception encountered: ' + str(e) # Payload of the request. }
ppid = os.getpid() # Create configuration objcfg = configconf(fargs, sargs, ppid) if objcfg.status != 'Success': logging.debug('Error, creating configconf object, err = ' + str(objcfg.status)) sys.exit(1) # If getconfig unsuccessful, log errmsg and exit script gc = objcfg.getconfig(False) if gc != 'Success': logging.debug('Error, getconfig function, err = ' + str(gc)) sys.exit(1) logging.debug('endpoint: objcfg.hosts[0] = ' + str(objcfg.hosts[0])) binding_service = binding.connect(token=sargs['authtoken']) client_service = client.connect(token=sargs['authtoken']) sk = cliauto_kvstore(binding_service) #def __init__(self, fargs, sargs, cargs, ppid, sk, binding_service, client_service, objcfg): sc = sshcom(fargs, sargs, cargs, ppid, sk, binding_service, client_service, objcfg) r = sc.hb_tester() print('os.environ[PYTHONHTTPSVERIFY] = ' + str(os.environ['PYTHONHTTPSVERIFY'])) print("sc.status = " + str(sc.status)) print("r = " + str(r))
def post_data_sampling_reset(self, request_info, **kwargs): # Declare data_name = None query_string = None describe = False # Retrieve from data try: resp_dict = json.loads(str(request_info.raw_args['payload'])) except Exception as e: resp_dict = None if resp_dict is not None: try: describe = resp_dict['describe'] if describe in ("true", "True"): describe = True except Exception as e: describe = False if not describe: data_name = resp_dict['data_name'] else: # body is required in this endpoint, if not submitted describe the usage describe = True if describe: response = "{\"describe\": \"This endpoint clears the data sampling record state and runs the sampling operation for a given data source, it requires a POST call with the following data:\""\ + ", \"options\" : [ { "\ + "\"data_name\": \"name of the data source\", "\ + "\"update_comment\": \"OPTIONAL: a comment for the update, comments are added to the audit record, if unset will be defined to: API update\""\ + " } ] }" return { "payload": json.dumps(json.loads(str(response)), indent=1), 'status': 200 # HTTP status code } # Update comment is optional and used for audit changes try: update_comment = resp_dict['update_comment'] except Exception as e: update_comment = "API update" # Define the KV query query_string = '{ "data_name": "' + data_name + '" }' # Get splunkd port entity = splunk.entity.getEntity('/server', 'settings', namespace='trackme', sessionKey=request_info.session_key, owner='-') splunkd_port = entity['mgmtHostPort'] try: # Data collection collection_name = "kv_trackme_data_sampling" service = client.connect( owner="nobody", app="trackme", port=splunkd_port, token=request_info.session_key ) collection = service.kvstore[collection_name] # Audit collection collection_name_audit = "kv_trackme_audit_changes" service_audit = client.connect( owner="nobody", app="trackme", port=splunkd_port, token=request_info.session_key ) collection_audit = service_audit.kvstore[collection_name_audit] # Get the current record # Notes: the record is returned as an array, as we search for a specific record, we expect one record only try: record = collection.data.query(query=str(query_string)) key = record[0].get('_key') except Exception as e: key = None # Render result if key is not None and len(key)>2: # This record exists already # Store the record for audit purposes record = str(json.dumps(collection.data.query_by_id(key), indent=1)) # Record an audit change import time current_time = int(round(time.time() * 1000)) user = request_info.user try: # Remove the record collection.data.delete(json.dumps({"_key":key})) # Insert the record collection_audit.data.insert(json.dumps({ "time": str(current_time), "user": str(user), "action": "success", "change_type": "data sampling clear state", "object": str(data_name), "object_category": "data_source", "object_attrs": str(record), "result": "N/A", "comment": str(update_comment) })) except Exception as e: return { 'payload': 'Warn: exception encountered: ' + str(e) # Payload of the request. } # Run and update sampling data_sample_status_colour = "unknown" import splunklib.results as results kwargs_search = {"app": "trackme", "earliest_time": "-7d", "latest_time": "now"} searchquery = "| savedsearch \"TrackMe - Data sampling engine for target\" key=\"" + str(key) + "\"" # spawn the search and get the results searchresults = service.jobs.oneshot(searchquery, **kwargs_search) # Get the results and display them using the ResultsReader try: reader = results.ResultsReader(searchresults) for item in reader: query_result = item data_sample_status_colour = query_result["data_sample_status_colour"] except Exception as e: data_sample_status_colour = "unknown" return { "payload": "Data sampling state for: " + str(data_name) + " was cleared and sampling operation ran, data sampling state is: " + str(data_sample_status_colour), 'status': 200 # HTTP status code } else: return { "payload": 'Warn: resource not found ' + str(key), 'status': 404 # HTTP status code } except Exception as e: return { 'payload': 'Warn: exception encountered: ' + str(e) # Payload of the request. }