def parseSpan(span): #maxspan = [<integer> s|m|h|d] match = re.search("(\d*)([shdwmqy])", span) if match == None: si.generateErrorResults(" 'timeunit' argument required, such as s (seconds), h (hours), d (days), w (weeks), y (years). Optionally prefix with a number: 600s (10 minutes), 2w (2 weeks).") exit(-1) scalar, units = match.groups() if len(scalar) == 0: scalar = 1 secs = scalar = int(scalar) if units == "s": pass elif units == "h": secs *= 60 * 60 elif units == "d": secs *= 24 * 60 * 60 elif units == "w": secs *= 7 * 24 * 60 * 60 elif units == "m": secs *= 30 * 24 * 60 * 60 elif units == "q": secs *= 365/4. * 24 * 60 * 60 elif units == "y": secs *= 365 * 24 * 60 * 60 else: return None, None, None return secs, scalar, units
def main(self): results, dummyresults, self.settings = isp.getOrganizedResults() self.keywords, self.argvals = isp.getKeywordsAndOptions() logger.info('keywords:' + str(self.keywords)) # in Splunk pre 5.0 we don't get the info, so we just read it from it's standard location infoPath = self.settings.get('infoPath', '') if len(infoPath) == 0: infoPath = os.path.join( getDispatchDir(self.settings.get('sid'), self.settings.get('sharedStorage', None)), 'info.csv') self.info.readFrom(infoPath) self.raiseAll = splunk.util.normalizeBoolean( unquote(self.argvals.get('raiseall', 'f'))) self.sessionKey = self.settings.get('sessionKey', None) self.owner = self.settings.get('owner', None) self.namespace = self.settings.get('namespace', None) self.krb5_principal = unquote( self.argvals.get('kerberos_principal', '')).strip() if len(self.krb5_principal) == 0: self.krb5_principal = None HadoopEnvManager.init(APP_NAME, 'nobody', self.sessionKey, self.krb5_principal) self._main_impl()
def error(msg): # for some reason the old style generateErrorResults aren't making their way into the ui. # si.generateErrorResults("Usage: searchtxn <transaction_type> <transaction_search>. Ex: searchtxn loginsessions user=bob") messages = {} si.addErrorMessage(messages, msg) si.outputResults([], messages) exit(0)
def parseSpan(span): #maxspan = [<integer> s|m|h|d] match = re.search("(\d*)([shdwmqy])", span) if match == None: si.generateErrorResults( " 'timeunit' argument required, such as s (seconds), h (hours), d (days), w (weeks), y (years). Optionally prefix with a number: 600s (10 minutes), 2w (2 weeks)." ) exit(-1) scalar, units = match.groups() if len(scalar) == 0: scalar = 1 secs = scalar = int(scalar) if units == "s": pass elif units == "h": secs *= 60 * 60 elif units == "d": secs *= 24 * 60 * 60 elif units == "w": secs *= 7 * 24 * 60 * 60 elif units == "m": secs *= 30 * 24 * 60 * 60 elif units == "q": secs *= 365 / 4. * 24 * 60 * 60 elif units == "y": secs *= 365 * 24 * 60 * 60 else: return None, None, None return secs, scalar, units
def run(results, fields): try: values = set() for result in results: field = None for f,v in result.items(): if f not in ['count','percent']: field = f break else: continue value = result[field] if value.lower() == "other": value = ' '.join(['NOT %s="%s" ' % (field, v.replace('"','\\"')) for v in values]) + ' %s=*' % field elif value.lower() == "null": value = 'NOT %s=*' % field else: values.add(value) value = '%s="%s"' % (field, v.replace('"','\\"')) result['_drilldown'] = value if '_drilldown' not in fields: fields.append('_drilldown') si.outputResults(results, {}, fields=fields) except Exception, e2: stack2 = traceback.format_exc() si.generateErrorResults("Error '%s'. %s" % (e2, stack2))
def run(results, fields): try: values = set() for result in results: field = None for f, v in result.items(): if f not in ['count', 'percent']: field = f break else: continue value = result[field] if value.lower() == "other": value = ' '.join([ 'NOT %s="%s" ' % (field, v.replace('"', '\\"')) for v in values ]) + ' %s=*' % field elif value.lower() == "null": value = 'NOT %s=*' % field else: values.add(value) value = '%s="%s"' % (field, v.replace('"', '\\"')) result['_drilldown'] = value if '_drilldown' not in fields: fields.append('_drilldown') si.outputResults(results, {}, fields=fields) except Exception, e2: stack2 = traceback.format_exc() si.generateErrorResults("Error '%s'. %s" % (e2, stack2))
def main(self): logger.error("[---] Splunk Debug printing isp Inspect Results: {}".format(inspect.stack()[1])) results, dummyresults, self.settings = isp.getOrganizedResults() self.keywords, self.argvals = isp.getKeywordsAndOptions() logger.error("[---] Splunk Debug splunklib results: {}".format(self._metadata)) # in Splunk pre 5.0 we don't get the info, so we just read it from it's standard location infoPath = self.settings.get('infoPath', '') logger.error("[---] Splunk Debug printing isp stuff inside hsc: {}".format(isp.getOrganizedResults())) logger.error("[---] Splunk Debug printing isp keywords and argvals inside hsc: {}".format(isp.getKeywordsAndOptions())) if len(infoPath) == 0: infoPath = os.path.join(getDispatchDir(self.settings.get('sid'), self.settings.get('sharedStorage', None)), 'info.csv') self.info.readFrom(infoPath) self.raiseAll = splunk.util.normalizeBoolean(unquote(self.argvals.get('raiseall', 'f'))) self.sessionKey = self.settings.get('sessionKey', None) self.owner = self.settings.get('owner', None) self.namespace = self.settings.get('namespace', None) self.krb5_principal = unquote(self.argvals.get('kerberos_principal', '')).strip() if len(self.krb5_principal) == 0: self.krb5_principal = None HadoopEnvManager.init(APP_NAME, 'nobody', self.sessionKey, self.krb5_principal) self._main_impl()
def run(spantext, seriesmode, results): try: secsPerSpan, scalar, unit = parseSpan(spantext) maxtime = -1 # for each results time_data = {} fields_seen = {} span = None latest = None for result in results: if maxtime < 0: try: maxtime = int(float(result['info_max_time'])) except: maxtime = int(time.time()) maxtime -= 1 # not inclusive if '_time' not in result: raise Exception("Missing required _time field on data") if span == None and '_span' in result: span = result['_span'] mytime = int(float(result['_time'])) spansago = int((maxtime - mytime) / secsPerSpan) new_time = mytime + (spansago * secsPerSpan) if new_time not in time_data: time_data[new_time] = {'_time': new_time, '_span': span} this_row = time_data[new_time] spanstart = maxtime - ((spansago + 1) * secsPerSpan) + 1 series = seriesName(series_mode, scalar, spansago, unit, spanstart) if spansago == 0: latest = series acount = len(result) for k, v in result.items(): if k not in [ '_time', 'info_sid', 'info_max_time', 'info_min_time', 'info_search_time', 'info_sid', '_span' ]: if k == 'count': attr = series else: attr = '%s_%s' % (k, series) this_row[attr] = result[k] fields_seen[attr] = spansago field_order = fields_seen.items() field_order.sort(lambda x, y: cmp(x[1], y[1])) field_order = [f for f, v in field_order] field_order.insert(0, '_time') field_order.append('_span') results = time_data.values() results.sort(lambda x, y: cmp(x['_time'], y['_time'])) si.outputResults(results, {}, fields=field_order) except Exception, e2: stack2 = traceback.format_exc() si.generateErrorResults("Error '%s'. %s" % (e2, stack2))
def execute(): try: keywords, argvals = isp.getKeywordsAndOptions() results, dummyresults, settings = isp.getOrganizedResults() sessionKey = settings.get('sessionKey') if sessionKey == None: return vixutils.generateErrorResults( 'sessionKey not passed to the search command, something\'s very wrong!' ) #check that the command is being executed by the scheduler sid = settings.get('sid') if not sid.startswith('scheduler_') and not argvals.get( 'forcerun', '') == '1': return vixutils.generateErrorResults( 'rollercontroller is supposed to be ran by the scheduler, add forcerun=1 to force execution' ) # check if error messaging is disabled global ERRMSGS_ENABLED ERRMSGS_ENABLED = 'disablemsgs' not in keywords providers = erp_launcher.listProviders(sessionKey) rollVixes = erp_launcher.listVixes( sessionKey, 'disabled=0 AND vix.output.buckets.from.indexes=*') rollProviders = filterRollProviders(rollVixes, providers) searchString = genSearchString(rollVixes, rollProviders) kwargs = {} for k in ['owner', 'namespace', 'sessionKey', 'hostPath']: if k in settings: kwargs[k] = settings[k] if not os.path.exists(vixutils.getAppBinJars()): # first time we're copying jars, force bundle replication kwargs['force_bundle_replication'] = 1 prepareSearchExecution() numRetries = argvals.get("retries", 1) for i in range(0, int(numRetries)): logger.info("Dispatching the search: %s" % searchString) search = splunk.search.dispatch(searchString, **kwargs) try: streamSearch(search, sessionKey) finally: cancelSearch(search) except Exception as e: import traceback splunkio.write([{ "stack": traceback.format_exc(), "exception": str(e) }]) finally: sys.stdout.flush()
def run(spantext, seriesmode, results): try: secsPerSpan, scalar, unit = parseSpan(spantext) maxtime = -1 # for each results time_data = {} fields_seen = {} span = None latest = None for result in results: if maxtime < 0: try: maxtime = int(float(result['info_max_time'])) except: maxtime = int(time.time()) maxtime -= 1 # not inclusive if '_time' not in result: raise Exception("Missing required _time field on data") if span == None and '_span' in result: span = result['_span'] mytime = int(float(result['_time'])) spansago = int((maxtime-mytime) / secsPerSpan) new_time = mytime + (spansago * secsPerSpan) if new_time not in time_data: time_data[new_time] = { '_time': new_time, '_span': span } this_row = time_data[new_time] spanstart = maxtime - ((spansago+1)*secsPerSpan) + 1 series = seriesName(series_mode, scalar, spansago, unit, spanstart) if spansago == 0: latest = series acount = len(result) for k,v in result.items(): if k not in ['_time', 'info_sid', 'info_max_time', 'info_min_time', 'info_search_time', 'info_sid', '_span']: if k == 'count': attr = series else: attr = '%s_%s' % (k, series) this_row[attr] = result[k] fields_seen[attr] = spansago field_order = fields_seen.items() field_order.sort(lambda x,y: cmp(x[1], y[1])) field_order = [f for f,v in field_order] field_order.insert(0,'_time') field_order.append('_span') results = time_data.values() results.sort(lambda x,y: cmp(x['_time'], y['_time'])) si.outputResults(results, {}, fields=field_order) except Exception, e2: stack2 = traceback.format_exc() si.generateErrorResults("Error '%s'. %s" % (e2, stack2))
def main(): # get config from config file config = ConfigParser.ConfigParser() if os.path.exists(os.path.join('..', 'local', 'slack.conf')): config.readfp(open(os.path.join('..', 'local', 'slack.conf'))) else: config.readfp(open(os.path.join('..', 'default', 'slack.conf'))) # username and icon can only be set by conf username = config.get('config', 'username') icon = config.get('config', 'icon') # update args if user speicify them in search channel = kwargs.get('channel', config.get('config', 'channel')) if not channel.startswith('#'): channel = '#' + channel if config.get('config', 'allow_user_set_slack_url').lower() in TRUE_VALUES: url = kwargs.get('url', config.get('config', 'url')) else: url = config.get('config', 'url') # no url specified, dont procceed. if not url: raise Exception("Not slack url specified!") # read search results results = sis.readResults(None, None, True) https_proxy = config.get('config', 'proxy') proxyDict = { "https" : https_proxy } # prepare data to be sent to slack data = { 'text': get_pretty_table(results), 'username': username, 'channel': channel, 'icon_url': icon, 'mrkdwn': True, } if https_proxy != "": # send data to slack. r = requests.post(url, data=json.dumps(data), proxies=proxyDict) else: r = requests.post(url, data=json.dumps(data)) if r.status_code == 200: sis.outputResults(results) else: err_msg = ("Error sending results to slack, reason: {r}, {t}".format( r=r.reason, t=r.text)) sis.generateErrorResults(err_msg)
def main(): bsmProc = BSMProcessor() optlist = None try: optlist, args = getopt.getopt(sys.argv[1:], '?', ['noCache=', 'filter=']) bsmProc.initFromOptlist(optlist) except getopt.error, val: print str(val) # tell them what was wrong bsmProc.usage() si.generateErrorResults("Incorrect usage...")
def getRanges(options): ranges = {} for name,startend in options.items(): if name in ['field','default']: continue try: start,end = re.match("(-?\d+)-(-?\d+)", startend).groups() ranges[name] = (float(start),float(end)) except: si.generateErrorResults("Invalid range: '%s'. '<start_num>-<end_num>' expected." % startend) exit(0) return ranges
def main(): # get config from config file config = ConfigParser.ConfigParser() config.readfp(open(os.path.join('..', 'default', 'hipchat.conf'))) # update args if user speicify them in search room = kwargs.get('room', config.get('default', 'room')) color = kwargs.get('color', config.get('default', 'color')) notify = kwargs.get('notify', config.get('default', 'notify')) msg_fmt = kwargs.get('message_format', config.get('default', 'message_format')) if config.get('default', 'allow_users_set_base_url').lower() in TRUE_VALUES: base_url = kwargs.get('base_url', config.get('default', 'base_url')) else: base_url = config.get('default', 'base_url') # check if auth token is set properly try: auth_token = {"auth_token": config.get(room, 'auth_token')} except ConfigParser.NoSectionError as e: raise Exception("Room not set, please set the room stanza") except ConfigParser.NoOptionError as e: raise Exception("Auth token not set, please set auth token for room") # construct url url = base_url + "{s}{r}/notification".format( s='' if base_url.endswith('/') else '/', r=room) # read search results results = sis.readResults(None, None, True) # prepare data to be sent data = { 'message': get_pretty_table(results, msg_fmt), 'message_format': msg_fmt, 'color': color, 'notify': notify.lower() in TRUE_VALUES } # send data headers = {'Content-type': 'application/json'} r = requests.post(url, data=json.dumps(data), params=auth_token, headers=headers) if r.status_code == 204: sis.outputResults(results) else: err_msg = ("Error sending results to slack, reason: {r}, {t}".format( r=r.reason, t=r.text)) sis.generateErrorResults(err_msg)
def main(): # get config from config file config = ConfigParser.ConfigParser() if os.path.exists(os.path.join('..', 'local', 'slack.conf')): config.readfp(open(os.path.join('..', 'local', 'slack.conf'))) else: config.readfp(open(os.path.join('..', 'default', 'slack.conf'))) # username and icon can only be set by conf username = config.get('config', 'username') icon = config.get('config', 'icon') # update args if user speicify them in search channel = kwargs.get('channel', config.get('config', 'channel')) if not channel.startswith('#'): channel = '#' + channel if config.get('config', 'allow_user_set_slack_url').lower() in TRUE_VALUES: url = kwargs.get('url', config.get('config', 'url')) else: url = config.get('config', 'url') # no url specified, dont procceed. if not url: raise Exception("Not slack url specified!") # read search results results = sis.readResults(None, None, True) https_proxy = config.get('config', 'proxy') proxyDict = {"https": https_proxy} # prepare data to be sent to slack data = { 'text': get_pretty_table(results), 'username': username, 'channel': channel, 'icon_url': icon, 'mrkdwn': True, } if https_proxy != "": # send data to slack. r = requests.post(url, data=json.dumps(data), proxies=proxyDict) else: r = requests.post(url, data=json.dumps(data)) if r.status_code == 200: sis.outputResults(results) else: err_msg = ("Error sending results to slack, reason: {r}, {t}".format( r=r.reason, t=r.text)) sis.generateErrorResults(err_msg)
def main(): client = actconfig.setup() # Parse arguments from splunk search opts, kwargs = Intersplunk.getKeywordsAndOptions() results = [] if opts and "keywords" not in kwargs: kwargs["keywords"] = " ".join(opts) results += fact_search(client, **kwargs) Intersplunk.outputResults(results)
def main(): if len(sys.argv) < 3: usage() tname = sys.argv[1] #log("args") #for v in sys.argv: # log(v) options = ["max_terms", "use_disjunct", "eventsonly"] srchargs = [] log("ARGS: %s" % sys.argv[2:]) for arg in sys.argv[2:]: for option in options: if arg.startswith(option): break else: srchargs.append(arg) if len(srchargs) == 0: usage() tsearch = ' '.join(srchargs) log("SEARCH: %s" % tsearch) results, dummyresults, settings = si.getOrganizedResults() results = [] # we don't care about incoming results ########TEST##################### if 'sessionKey' not in settings: settings['owner'] = 'admin' settings['password'] = '******' settings['namespace'] = 'search' settings['sessionKey'] = splunk.auth.getSessionKey('admin', 'changeme') ########TEST#################### kwargs = {} for f in ['owner', 'namespace', 'sessionKey', 'hostPath']: if f in settings: kwargs[f] = settings[f] messages = {} try: maxTerms = int(settings.get("max_terms", MAX_SEARCH_COMPLEXITY)) if maxTerms > MAX_SEARCH_COMPLEXITY or maxTerms < 1: si.addWarnMessage( messages, "max_terms must be between 1 and %s. Using default." % MAX_SEARCH_COMPLEXITY) maxTerms = MAX_SEARCH_COMPLEXITY except Exception, e: maxTerms = MAX_SEARCH_COMPLEXITY
def return_results(module): try: results, dummy_results, settings = InterSplunk.getOrganizedResults() if isinstance(results, list) and len(results) > 0: new_results = module.process_iocs(results) elif len(sys.argv) > 1: new_results = module.process_iocs(None) except: stack = traceback.format_exc() new_results = InterSplunk.generateErrorResults("Error: " + str(stack)) InterSplunk.outputResults(new_results) return
def _do_handle(self): self.logger.info("Start of ServiceNow script") results = [] for event in self._get_events(): if event is None: break result = self._handle_event(event) if result: result["_time"] = time.time() results.append(result) si.outputResults(results) self.logger.info("End of ServiceNow script")
def _handle_response(self, response, content): if response.status in (200, 201): resp = self._get_resp_record(content) if resp: result = self._get_result(resp) else: result = {"error": "Failed to create ticket"} self.logger.debug(result) return result else: self.logger.error("Failed to create ticket. Return code is %s. " "Reason is %s", response.status, response.reason) si.parseError("Failed to create ticket. Return code is {0}. Reason" " is {1}".format(response.status, response.reason)) return None
def _handle_response(self, response, content): if response.status in (200, 201): resp = self._get_resp_record(content) if resp: result = self._get_result(resp) else: result = {"error": "Failed to create ticket"} return result else: self.logger.error( "Failed to create ticket. Return code is %s. " "Reason is %s", response.status, response.reason) si.parseError("Failed to create ticket. Return code is {0}. Reason" " is {1}".format(response.status, response.reason)) return None
def wrap_anomaly_detection(search_results): fields = search_results[0].keys() detected_fields = list( filter(lambda x: x not in const.FILTER_FIELDS, fields)) output_count = len(search_results) - algorithm.get_train_count() output_results = [{ '_time': search_results[i + algorithm.get_train_count()]['_time'] } for i in xrange(output_count)] # initialize output output_fields = ['_time'] outlier_count = 0 for cur_field in detected_fields: output_fields += [ 'value_' + cur_field, 'outlier_' + cur_field, 'severity_' + cur_field ] try: cur_data = [ float(str(search_results[i][cur_field])) for i in xrange(len(search_results)) ] outlier_indexes, severity_array = algorithm.anomaly_detection( cur_data) severity_index = 0 outlier_count += len(outlier_indexes) for i in xrange(output_count): search_results_index = i + algorithm.get_train_count() output_results[i]['value_' + cur_field] = search_results[ search_results_index][cur_field] output_results[i][ 'outlier_' + cur_field] = search_results_index in outlier_indexes output_results[i]['severity_' + cur_field] = severity_array[ severity_index] if search_results_index in outlier_indexes else 0 if search_results_index in outlier_indexes: severity_index += 1 except ValueError: intersplunk.parseError( 'This command only supports numbers. Field %s is not numerical. ' % cur_field) if outlier_count == 0: return [], output_fields else: return output_results, output_fields
def parse_pstacks(): results = [] keywords, options = si.getKeywordsAndOptions() separator = options.get('separator', DEFAULT_SEPARATOR) fileorderindex = int(options.get('fileorderindex', DEFAULT_FOI)) timeorderindex = int(options.get('timeorderindex', DEFAULT_TSI)) if len(keywords)==0: raise Exception("requires path to pstack file(s)") gpath = keywords.pop(0) gpath = gpath.replace("\\\\", "\\") gpath = gpath.replace('\[', '[') gpath = gpath.replace('\]', ']') # find all files matching complete_path = os.path.expanduser( os.path.expandvars(gpath)) glob_matches = glob.glob(complete_path) logger.error("complete path: %s" % complete_path) logger.error("glob matches: %s" % glob_matches) if len(glob_matches)==0: logger.error("No file matching %s" % complete_path) raise Exception("No files matching %s." % complete_path) for pfile in glob_matches: logger.error("parsing file: %s" % pfile) results += parse_pstack_file(pfile, separator, fileorderindex, timeorderindex) #return results return results
def getArgs(): fields, argvals = si.getKeywordsAndOptions() url = argvals.get("url", "null") username = argvals.get("username", "null") if url == 'null': url = "https://api.github.com/feeds" return url, username
def yamuser(): try: # logger = dcu.getLogger() # logger.info("Starting the yamuser command") # Get configuration values from jira.conf splunk_conf = yammercommon.getSplunkConf() # logger.root.setLevel(logging.DEBUG) local_conf = yammercommon.getLocalConf() access_token = local_conf.get('yammercommon', 'access_token') # logger.debug("Access Token %s" % access_token) yammer = yampy.Yammer(access_token=access_token) results, dummyresults, settings = isp.getOrganizedResults() keywords, options = isp.getKeywordsAndOptions() output_field = options.get('out', 'yammer_user_full_name') user_id_fld = options.get('field', 'sender_id') #userid = argvals.get("id") if results: for result in results: userid = result[user_id_fld] if userid: #user = yammer.users.find(userid) result[str(output_field)] = "test" #user.full_name else: result={} #user = yammer.users.find(userid) #result[str(user_name)] = user.full_name #results.append(result) splunk.Intersplunk.outputResults(results) except Exception, e: import traceback stack = traceback.format_exc() splunk.Intersplunk.generateErrorResults(str(e))
def main(): if len(sys.argv) < 3: usage() tname = sys.argv[1] #log("args") #for v in sys.argv: # log(v) options = ["max_terms", "use_disjunct", "eventsonly"] srchargs = [] log("ARGS: %s" % sys.argv[2:]) for arg in sys.argv[2:]: for option in options: if arg.startswith(option): break else: srchargs.append(arg) if len(srchargs) == 0: usage() tsearch = ' '.join(srchargs) log("SEARCH: %s" % tsearch) results,dummyresults,settings = si.getOrganizedResults() results = [] # we don't care about incoming results ########TEST##################### if 'sessionKey' not in settings: settings['owner'] = 'admin' settings['password'] = '******' settings['namespace'] = 'search' settings['sessionKey'] = splunk.auth.getSessionKey('admin', 'changeme') ########TEST#################### kwargs = {} for f in ['owner','namespace','sessionKey','hostPath']: if f in settings: kwargs[f] = settings[f] messages = {} try: maxTerms = int(settings.get("max_terms", MAX_SEARCH_COMPLEXITY)) if maxTerms > MAX_SEARCH_COMPLEXITY or maxTerms < 1: si.addWarnMessage(messages, "max_terms must be between 1 and %s. Using default." % MAX_SEARCH_COMPLEXITY) maxTerms = MAX_SEARCH_COMPLEXITY except Exception, e: maxTerms = MAX_SEARCH_COMPLEXITY
def getSplunkConf(): results, dummyresults, settings = isp.getOrganizedResults() namespace = settings.get("namespace", None) owner = settings.get("owner", None) sessionKey = settings.get("sessionKey", None) conf = sb.getConf('jira', namespace=namespace, owner=owner, sessionKey=sessionKey) stanza = conf.get('jira') return stanza
def getSessionKey(): results, dummyresults, settings = si.getOrganizedResults() sessionKey = settings.get("sessionKey", None) if len(sessionKey) == 0: sys.stderr.write("Did not receive a session key from splunkd. " + "Please enable passAuth in inputs.conf for this " + "script\n") exit(2) return sessionKey
def initFromOptlist(self, optlist): # First read settings in config.ini, if it exists... self.readConfig() # ...now, for debugging and backward compat, allow command line # settings to override... self.readOptlist(optlist) if self.debug: keys = self.__dict__.keys() keys.sort() for k in keys: if k.startswith("_"): continue print k + "=" + str(self.__dict__[k]) # check min required args if self.prFlags == "": self.usage() #sys.exit() si.generateErrorResults("Too few arguments")
def main(): try: messages = {} keywords,options = si.getKeywordsAndOptions() DEFAULT_MAX_TYPES = 10 maxtypes = options.get('max', str(DEFAULT_MAX_TYPES)) error = None if not maxtypes.isdigit(): error = 'max must be an integer between 1-%s.' % MAXRESULTS else: maxtypes = int(maxtypes) if not (0 < maxtypes <= MAXRESULTS): error = 'max must be an integer between 1-%s.' % MAXRESULTS if error: si.generateErrorResults(error) return ignore_covered = 'notcovered' in keywords useraw = 'useraw' in keywords results,dummyresults,settings = si.getOrganizedResults() #for r in results: # for attr in r: # print attr, r[attr], len(r[attr]) if len(results) > MAXRESULTS: results = results[:MAXRESULTS] si.addWarnMessage(messages, "For performance reasons, the maximum number of results used to discover event types was capped at %s. Consider a more restrictive search." % MAXRESULTS) argc = len(sys.argv) argv = sys.argv sessionKey = settings.get("sessionKey", None) owner = settings.get("owner", None) namespace = settings.get("namespace", None) searchhead = '' try: searches = sutils.getCommands(settings.get("search", ''), None) firstcmd = searches[0][0][0] firstarg = searches[0][0][1].strip() if firstcmd == 'search' and firstarg != '*': searchhead = firstarg except Exception, e: pass results = discover(results, searchhead, maxtypes, ignore_covered, useraw) if len(results) == 0: si.addWarnMessage(messages, "Unable to isolate useful groups of events.")
def parseArgs(txt): m = re.match('\s*(?:(?P<count>-?\d+)\s+)?(?P<variables>.+)', txt) if m == None: si.parseError(usage()) md = m.groupdict() counttext = md['count'] count = 1 if counttext != None: count = int(counttext) variables = md['variables'] mapping = [] matches = re.findall("(?i)\s*(?:(?P<alias>[a-z0-9_]+)\s*[=])?\s*(?P<field>[$a-z0-9_]+)", variables) for alias, value in matches: if value.startswith('$'): value = value[1:] elif alias == '': alias = value mapping.append((alias, value)) return count, mapping
def getArgs(): badcounts = False try: maxcount = int(options.get('maxcount', '20')) if maxcount <= 0: badcounts = True except: badcounts = True if badcounts: si.generateErrorResults("Error: invalid required 'maxcount' (1-INF) setting.") exit() sizefield = options.get('sizefield', 'totalCount') pathfield = options.get('pathfield', 'source') if sizefield == None or pathfield == None: si.generateErrorResults("Error: both pathfield and sizefield must be specified.") exit() countfield = options.get('countfield', 'count') delimiter = options.get('sep', os.sep) return maxcount, sizefield, pathfield, countfield, delimiter
def parseArgs(txt): m = re.match('\s*(?:(?P<count>-?\d+)\s+)?(?P<variables>.+)', txt) if m == None: si.parseError(usage()) md = m.groupdict() counttext = md['count'] count = 1 if counttext != None: count = int(counttext) variables = md['variables'] mapping = [] matches = re.findall("(?i)\s*(?:(?P<alias>[a-z0-9_.]+)\s*[=])?\s*(?P<field>[$a-z0-9_.]+)", variables) for alias, value in matches: if value.startswith('$'): value = value[1:] elif alias == '': alias = value mapping.append((alias, value)) return count, mapping
def main(): client = actconfig.setup() # Parse arguments opts, kwargs = Intersplunk.getKeywordsAndOptions() if not opts: Intersplunk.generateErrorResult( "Usage: | actadd <field1> ... <fieldN> [fact_type=<fact type>] [fact_value=<fact value]" ) return events, _, _ = Intersplunk.getOrganizedResults() # Annotate events for event in events: object_value = [] for field in opts: if event.get(field): object_value.append(event[field]) if not object_value: continue event.update(fact_search(client, object_value, **kwargs)) Intersplunk.outputResults(events)
def download_python(version, build_path): base_url = simpleRequest( "/servicesNS/nobody/pyden-manager/properties/pyden/download/url", sessionKey=session_key)[1] try: dpr = requests.get(base_url + "{0}/".format(version), proxies=proxies) except Exception as ex: Intersplunk.generateErrorResults( "Exception thrown getting python: ({0}, {1})".format(type(ex), ex)) sys.exit(1) else: if dpr.status_code in range(200, 300): python_link = [ link for link in re.findall("href=\"(.*?)\"", dpr.content) if link.endswith('tgz') ][0] dpr = requests.get(base_url + "{0}/{1}".format(version, python_link), proxies=proxies) else: Intersplunk.generateErrorResults( "Failed to reach www.python.org. Request returned - Status code: {0}, Response: {1}" .format(dpr.status_code, dpr.text)) sys.exit(1) if dpr.status_code in range(200, 300): # save build_file = os.path.join(build_path, "Python-{0}.tgz".format(version)) with open(build_file, "w") as download: download.write(dpr.content) else: Intersplunk.generateErrorResults( "Failed to download python. Request returned - Status code: {0}, Response: {1}" .format(dpr.status_code, dpr.text)) sys.exit(1) return build_file
def activate(): if sys.argv[-1] == "reloaded": reload(os) reload(sys) return sys.argv.append("reloaded") from splunk import Intersplunk settings = dict() Intersplunk.readResults(settings=settings) session_key = settings['sessionKey'] proxies = get_proxies(session_key) bin_dir = os.path.dirname(py_exec) path = bin_dir + os.pathsep + os.environ["PATH"] passed_envs = { "PATH": path, "SPLUNK_HOME": os.environ['SPLUNK_HOME'] } if proxies: passed_envs['HTTP_PROXY'] = proxies['http'] passed_envs['HTTPS_PROXY'] = proxies['https'] os.execve(py_exec, ['python'] + sys.argv, passed_envs)
def getArgs(): badcounts = False try: maxcount = int(options.get('maxcount', '20')) if maxcount <= 0: badcounts = True except: badcounts = True if badcounts: si.generateErrorResults( "Error: invalid required 'maxcount' (1-INF) setting.") exit() sizefield = options.get('sizefield', 'totalCount') pathfield = options.get('pathfield', 'source') if sizefield == None or pathfield == None: si.generateErrorResults( "Error: both pathfield and sizefield must be specified.") exit() countfield = options.get('countfield', 'count') delimiter = options.get('sep', os.sep) return maxcount, sizefield, pathfield, countfield, delimiter
def main(): """ """ lookup_path = '/opt/splunk/etc/apps/osweep/lookups' file_path = '{}/urlhaus_url_feed.csv'.format(lookup_path) if sys.argv[1].lower() == 'feed': data_feed = urlhaus.get_feed() urlhaus.write_file(data_feed, file_path) exit(0) try: results, dummy_results, settings = InterSplunk.getOrganizedResults() if isinstance(results, list) and len(results) > 0: new_results = process_master(results) elif len(sys.argv) > 1: new_results = process_master(None) except: stack = traceback.format_exc() new_results = InterSplunk.generateErrorResults("Error: " + str(stack)) InterSplunk.outputResults(new_results) return
def main(): if DEBUG: logger("main") check_port(PROXY_HOST, PROXY_PORT) check_port(SNOW_HOST, SNOW_PORT) check_port(HEC_HOST, HEC_PORT) check_port(SPLUNK_INDEX, SPLUNK_INDEX_PORT) verify_log_path(LOG_PATH) if DEBUG: logger("MAIN: Start of Run") print "MAIN: Requests Version", requests.__version__ print "MAIN: before si call" try: myresults, dummyresults, settings = si.getOrganizedResults() except Exception as ex: print datetime.datetime.now( ), "SEARCH_RESULTS: ERROR: Call to get Splunk Results failed. Reason:", ex print datetime.datetime.now( ), "SPLUNK_SEARCH: Response from Splunk:", str(myresults.text) logger("ERROR: Call to get Splunk Results failed.") logger(str(myresults.text)) if DEBUG: print datetime.datetime.now(), "SNOW: Message: ", ex.message logger("Message: " + ex.message) else: for r in myresults: if DEBUG: print datetime.datetime.now(), "MAIN: r=", r #logger("from MAIN: " + str(r)) SNOW_Event = {} SEND_SNOW = True for k, v in r.items(): SNOW_Event[k] = v if k == "nosend": print "nosend detected" SEND_SNOW = False ### NOTE request to SNOW required data to be of type STR if SEND_SNOW: send_to_snow(str(SNOW_Event)) else: print datetime.datetime.now(), "NO Send honored." logger("NO send honored") if DEBUG: logger("MAIN: End of Run")
def run(messages, count, mapping): results = si.readResults(None, None, True) ORS = [] seenValues = set() # dedup rows for i, result in enumerate(results): if count > 0 and i >= count: break ANDS = [] for j, (renamed, attr) in enumerate(mapping): val = str(result.get(attr,'')) if renamed == None or renamed == '': if val != '': ANDS.append(val) else: ANDS.append('%s="%s"' % (renamed, val)) andstr = str(ANDS) if len(ANDS) > 0 and andstr not in seenValues: ORS.append(ANDS) seenValues.add(andstr) output = "" if len(ORS) > 1: output += "(" for i, OR in enumerate(ORS): if i > 0: output += ") OR (" for j, AND in enumerate(OR): if j > 0: output += " " #" AND " output += AND if len(ORS) > 1: output += ")" si.outputResults([{'search': output}], messages)
def run(messages, count, mapping): results = si.readResults(None, None, True) ORS = [] seenValues = set() # dedup rows for i, result in enumerate(results): if count > 0 and i >= count: break ANDS = [] for j, (renamed, attr) in enumerate(mapping): val = str(result.get(attr, '')) if renamed == None or renamed == '': if val != '': ANDS.append(val) else: ANDS.append('%s="%s"' % (renamed, val)) andstr = str(ANDS) if len(ANDS) > 0 and andstr not in seenValues: ORS.append(ANDS) seenValues.add(andstr) output = "" if len(ORS) > 1: output += "(" for i, OR in enumerate(ORS): if i > 0: output += ") OR (" for j, AND in enumerate(OR): if j > 0: output += " " #" AND " output += AND if len(ORS) > 1: output += ")" si.outputResults([{'search': output}], messages)
def main(): try: search_results, dummy_results, settings = intersplunk.getOrganizedResults() if len(search_results) > 0: output_results = cal_utilization(search_results) intersplunk.outputResults(output_results, fields=output_results[0].keys()) except: stack = traceback.format_exc() results = intersplunk.generateErrorResults("Error : Traceback: " + str(stack)) intersplunk.outputResults(results)
def execute(): results = [] try: results, dummyresults, settings = si.getOrganizedResults() # default values args = {"namespace": "search"} # get commandline args keywords, options = si.getKeywordsAndOptions() # override default args with settings from search kernel args.update(settings) # override default args with commandline args args.update(options) sessionKey = args.get("sessionKey", None) owner = args.get("owner", "admin") namespace = args.get("namespace", None) if namespace.lower() == "none": namespace = None messages = {} if sessionKey == None: # this shouldn't happen, but it's useful for testing. try: sessionKey = sa.getSessionKey("admin", "changeme") si.addWarnMessage( messages, "No session given to 'tune' command. Using default admin account and password." ) except splunk.AuthenticationFailed, e: si.addErrorMessage(messages, "No session given to 'tune' command.") return if len(keywords) != 1: usage() # e.g., '/data/inputs/monitor' entity = keywords[0] logger.info("Entity: %s Args: %s" % (entity, args)) results = [] # we don't care about incoming results try: entitys = en.getEntities(entity, sessionKey=sessionKey, owner=owner, namespace=namespace, count=-1) for name, entity in entitys.items(): try: myapp = entity["eai:acl"]["app"] if namespace != None and myapp != namespace: continue except: continue # if no eai:acl/app, filter out result = entityToResult(name, entity) results.append(result) except splunk.ResourceNotFound, e2: pass
def raw_pstack(): results = [] keywords, options = si.getKeywordsAndOptions() separator = options.get('separator', DEFAULT_SEPARATOR) fileorderindex = int(options.get('fileorderindex', DEFAULT_FOI)) thread_id = options.get('threadid', DEFAULT_THREADID) reverse = options.get('reverse', DEFAULT_REVERSE) timeorderindex = int(options.get('timeorderindex', DEFAULT_TSI)) if len(keywords)==0: raise Exception("requires path to pstack file(s)") gpath = keywords.pop(0).strip() logger.error("b4 gpath = %s" % gpath) gpath = gpath.replace("\\\\", "\\") gpath = gpath.replace("\[", "[") gpath = gpath.replace("\]", "]") logger.error("gpath = %s" % gpath) # find all files matching complete_path = os.path.expanduser( os.path.expandvars(gpath)) glob_matches = glob.glob(complete_path) logger.debug("complete path: %s" % complete_path) logger.debug("glob matches: %s" % glob_matches) if len(glob_matches)==0: logger.error("No file matching %s" % complete_path) raise Exception("No files matching %s." % complete_path) for pfile in glob_matches: logger.error("parsing file: %s" % pfile) results += parse_raw_pstack(pfile, thread_id, reverse, separator, fileorderindex, timeorderindex) #return results return results
for _t in threads: _t.join() _iops = sum(results) bandwidth = int(blocksize*_iops) #print " %sB blocks: %6.1f IO/s, %sB/s (%sbit/s)" % (greek(blocksize), _iops, greek(bandwidth, 1), greek(8*bandwidth, 1, 'si')) #print strftime("%Y-%m-%d %H:%M:%S") + " location=%s, capacity=%s, threads=%d, block_size=%s, iops=%s" % (dev, mediasize(dev), num_threads, blocksize, _iops) #blocksize *= 2 runs-=1 now = str(int(time.mktime(time.localtime()))) def hello(results, settings): result = {} #result['string'] = strftime("%Y-%m-%d %H:%M:%S") + " location=%s, storage_type=%s, file_size_kb=%s, threads=%d, block_size=%s, iops=%s" % (dev, storage_type, file_size_kb, num_threads, blocksize, _iops) #results.append({'_time' : now, 'location' : dev, 'run_time_sec' : t, 'storage_type' : storage_type, 'file_size_kb' : file_size_kb, 'threads' : num_threads, 'block_size' : blocksize, 'iops' : _iops}) results.append({'_time' : now, 'location' : dev, 'run_time_sec' : t, 'threads' : num_threads, 'block_size' : blocksize, 'iops' : _iops}) return results results, dummyresults, settings = si.getOrganizedResults() results = hello(results, settings) si.outputResults(results) except IOError, (err_no, err_str): raise SystemExit(err_str) except KeyboardInterrupt: print "caught ctrl-c, bye." # eof.
def usage(): si.generateErrorResults("Usage: entity <endpoint>") exit(0)
# e.g., '/data/inputs/monitor' entity = keywords[0] logger.info("Entity: %s Args: %s" % (entity, args)) results = [] # we don't care about incoming results try: entitys = en.getEntities(entity, sessionKey=sessionKey, owner=owner, namespace=namespace, count=-1) for name, entity in entitys.items(): try: myapp = entity["eai:acl"]["app"] if namespace != None and myapp != namespace: continue except: continue # if no eai:acl/app, filter out result = entityToResult(name, entity) results.append(result) except splunk.ResourceNotFound, e2: pass si.outputResults(results, messages) except Exception, e: import traceback stack = traceback.format_exc() logger.error(str(e) + ". Traceback: " + str(stack)) si.generateErrorResults(str(e)) if __name__ == "__main__": execute()
import hashlib import splunk.Intersplunk as si if __name__ == '__main__': try: keywords,options = si.getKeywordsAndOptions() if len(keywords) == 0: si.generateErrorResults('Requires fields list.') exit(0) search = ' '.join(keywords) results,dummyresults,settings = si.getOrganizedResults() for result in results: eventSignature = '-=XXX=-'.join([result.get(field,'') for field in keywords]) sigHash = hashlib.md5(eventSignature).hexdigest() result['_icon'] = sigHash si.outputResults(results) except Exception, e: import traceback stack = traceback.format_exc() si.generateErrorResults("Error '%s'. %s" % (e, stack))
""" out = None err = None if self.locateProcess(proc_name=the_proc_name): p = subprocess.Popen(self._proc_cmds['kill_proc_name'][self.getPlatform()] + [str(the_proc_name)], stdout=subprocess.PIPE) out, err = p.communicate() else: logger.error("Process Name: " + str(the_proc_name) + " not found " + " running on the system!") return [out,err] if __name__ == '__main__': try: results = si.readResults() keywords, options = si.getKeywordsAndOptions() for entry in results: ## PID if "pid" in entry: pid = entry["pid"] else: pid = options.get('pid', None) ## Process Name if 'proc_name' in entry: proc_name = entry['proc_name'] else: proc_name = options.get('proc_name', None)
# Copyright (C) 2005-2011 Splunk Inc. All Rights Reserved. Version 4.0 import splunk.Intersplunk as si import splunk.mining.FieldLearning as ifl import re import splunk.mining.dcutils as dcu logger = dcu.getLogger() if __name__ == '__main__': try: keywords,options = si.getKeywordsAndOptions() examples = options.get('examples', None) badexamples = options.get('counterexamples', None) fromfield = options.get('fromfield', '_raw') maxtrainers = options.get('maxtrainers', '100') msg = None if examples != None and examples.startswith('"') and examples.endswith('"'): examples = examples[1:-1] if badexamples != None and badexamples.startswith('"') and badexamples.endswith('"'): badexamples = badexamples[1:-1] logger.error(examples) if len(keywords) == 0: msg = "A required fieldname is missing" elif examples == None: msg = "Value for 'examples' is required" else:
# send splunk results to slack import prettytable import ConfigParser import requests import json import os import sys import splunk.Intersplunk as sis (a, kwargs) = sis.getKeywordsAndOptions() TRUE_VALUES = ['true', '1', 't', 'y', 'yes'] def get_pretty_table(results): if results: keys = results[0].keys() else: return '' x = prettytable.PrettyTable(keys, padding_width=4) for row in results: x.add_row([row[k] for k in keys]) return "```" + x.get_string() + "```" def main(): # get config from config file config = ConfigParser.ConfigParser() config.readfp(open(os.path.join('..', 'default', 'slack.conf'))) # username and icon can only be set by conf username = config.get('config', 'username') icon = config.get('config', 'icon')
def usage(): si.generateErrorResults(" 'timeunit' argument required, such as s (seconds), h (hours), d (days), w (weeks), or y (years). Optionally prefix with a number: 600s (10 minutes), 2w (2 weeks). Optionally add another argument to specify the time-range label: series=[short,exact,relative]") exit(-1)
results.sort(lambda x,y: cmp(x['_time'], y['_time'])) si.outputResults(results, {}, fields=field_order) except Exception, e2: stack2 = traceback.format_exc() si.generateErrorResults("Error '%s'. %s" % (e2, stack2)) def usage(): si.generateErrorResults(" 'timeunit' argument required, such as s (seconds), h (hours), d (days), w (weeks), or y (years). Optionally prefix with a number: 600s (10 minutes), 2w (2 weeks). Optionally add another argument to specify the time-range label: series=[short,exact,relative]") exit(-1) if __name__ == '__main__': try: series_mode = 'relative' (isgetinfo, sys.argv) = si.isGetInfo(sys.argv) argc = len(sys.argv) if argc != 2 and argc != 3: usage() if argc == 3: arg = sys.argv[2] match = re.search("(?i)series=(short|exact|relative)", sys.argv[2]) if match == None: usage() series_mode = match.group(1) if isgetinfo: # outputInfo(streaming, generating, retevs, reqsop, preop, timeorder=False, clear_req_fields=False, req_fields = None) si.outputInfo(False, False, False, True, "addinfo", timeorder=False) results, dummyresults, settings = si.getOrganizedResults() run(sys.argv[1], series_mode, results)
return output # # main # # merge any passed args args = DEFAULT_ARGS for item in sys.argv: kv = item.split('=') if len(kv) > 1: val = item[item.find('=') + 1:] try: val = int(val) except: pass args[kv[0]] = util.normalizeBoolean(val) # run generator try: for c in range(args['chunks']): if (c > 0 and args['chunkdelay'] > 0): time.sleep(args['chunkdelay']) results = generateData(c, **args) isp.outputStreamResults(results) except: sys.stdout.write("FAILED: \n") traceback.print_exc()
argList.append(notes) argList.append("-p") argList.append(shape) argList.append("-R") argList.append(readRole) argList.append("-S") argList.append(search) argList.append("-s") argList.append(save) argList.append("-t") argList.append(term_list) argList.append("-u") argList.append(uom) argList.append("-W") argList.append(writeRole) settings = saUtils.getSettings(sys.stdin) argList.append("-E") argList.append(settings['namespace']) argList.append("-I") argList.append(settings['infoPath']) saUtils.runProcess(sys.argv[0], "xsCreateADContext", argList, True) if containerName == '': containerName = contextName (worked, response, content) = saUtils.force_lookup_replication(settings['namespace'], containerName, settings['sessionKey'], None) except Exception, e: si.generateErrorResults(e)
owner = settings.get("owner", None) namespace = settings.get("namespace", None) searchhead = '' try: searches = sutils.getCommands(settings.get("search", ''), None) firstcmd = searches[0][0][0] firstarg = searches[0][0][1].strip() if firstcmd == 'search' and firstarg != '*': searchhead = firstarg except Exception, e: pass results = discover(results, searchhead, maxtypes, ignore_covered, useraw) if len(results) == 0: si.addWarnMessage(messages, "Unable to isolate useful groups of events.") except: import traceback stack = traceback.format_exc() results = si.generateErrorResults("Error : Traceback: " + str(stack)) si.outputResults( results, messages ) if __name__ == '__main__': #profileMain() main()