def main(): # get config from config file config = ConfigParser.ConfigParser() if os.path.exists(os.path.join('..', 'local', 'slack.conf')): config.readfp(open(os.path.join('..', 'local', 'slack.conf'))) else: config.readfp(open(os.path.join('..', 'default', 'slack.conf'))) # username and icon can only be set by conf username = config.get('config', 'username') icon = config.get('config', 'icon') # update args if user speicify them in search channel = kwargs.get('channel', config.get('config', 'channel')) if not channel.startswith('#'): channel = '#' + channel if config.get('config', 'allow_user_set_slack_url').lower() in TRUE_VALUES: url = kwargs.get('url', config.get('config', 'url')) else: url = config.get('config', 'url') # no url specified, dont procceed. if not url: raise Exception("Not slack url specified!") # read search results results = sis.readResults(None, None, True) https_proxy = config.get('config', 'proxy') proxyDict = { "https" : https_proxy } # prepare data to be sent to slack data = { 'text': get_pretty_table(results), 'username': username, 'channel': channel, 'icon_url': icon, 'mrkdwn': True, } if https_proxy != "": # send data to slack. r = requests.post(url, data=json.dumps(data), proxies=proxyDict) else: r = requests.post(url, data=json.dumps(data)) if r.status_code == 200: sis.outputResults(results) else: err_msg = ("Error sending results to slack, reason: {r}, {t}".format( r=r.reason, t=r.text)) sis.generateErrorResults(err_msg)
def main(): # get config from config file config = ConfigParser.ConfigParser() config.readfp(open(os.path.join('..', 'default', 'hipchat.conf'))) # update args if user speicify them in search room = kwargs.get('room', config.get('default', 'room')) color = kwargs.get('color', config.get('default', 'color')) notify = kwargs.get('notify', config.get('default', 'notify')) msg_fmt = kwargs.get('message_format', config.get('default', 'message_format')) if config.get('default', 'allow_users_set_base_url').lower() in TRUE_VALUES: base_url = kwargs.get('base_url', config.get('default', 'base_url')) else: base_url = config.get('default', 'base_url') # check if auth token is set properly try: auth_token = {"auth_token": config.get(room, 'auth_token')} except ConfigParser.NoSectionError as e: raise Exception("Room not set, please set the room stanza") except ConfigParser.NoOptionError as e: raise Exception("Auth token not set, please set auth token for room") # construct url url = base_url + "{s}{r}/notification".format( s='' if base_url.endswith('/') else '/', r=room) # read search results results = sis.readResults(None, None, True) # prepare data to be sent data = { 'message': get_pretty_table(results, msg_fmt), 'message_format': msg_fmt, 'color': color, 'notify': notify.lower() in TRUE_VALUES } # send data headers = {'Content-type': 'application/json'} r = requests.post(url, data=json.dumps(data), params=auth_token, headers=headers) if r.status_code == 204: sis.outputResults(results) else: err_msg = ("Error sending results to slack, reason: {r}, {t}".format( r=r.reason, t=r.text)) sis.generateErrorResults(err_msg)
def main(): # get config from config file config = ConfigParser.ConfigParser() if os.path.exists(os.path.join('..', 'local', 'slack.conf')): config.readfp(open(os.path.join('..', 'local', 'slack.conf'))) else: config.readfp(open(os.path.join('..', 'default', 'slack.conf'))) # username and icon can only be set by conf username = config.get('config', 'username') icon = config.get('config', 'icon') # update args if user speicify them in search channel = kwargs.get('channel', config.get('config', 'channel')) if not channel.startswith('#'): channel = '#' + channel if config.get('config', 'allow_user_set_slack_url').lower() in TRUE_VALUES: url = kwargs.get('url', config.get('config', 'url')) else: url = config.get('config', 'url') # no url specified, dont procceed. if not url: raise Exception("Not slack url specified!") # read search results results = sis.readResults(None, None, True) https_proxy = config.get('config', 'proxy') proxyDict = {"https": https_proxy} # prepare data to be sent to slack data = { 'text': get_pretty_table(results), 'username': username, 'channel': channel, 'icon_url': icon, 'mrkdwn': True, } if https_proxy != "": # send data to slack. r = requests.post(url, data=json.dumps(data), proxies=proxyDict) else: r = requests.post(url, data=json.dumps(data)) if r.status_code == 200: sis.outputResults(results) else: err_msg = ("Error sending results to slack, reason: {r}, {t}".format( r=r.reason, t=r.text)) sis.generateErrorResults(err_msg)
def activate(): if sys.argv[-1] == "reloaded": reload(os) reload(sys) return sys.argv.append("reloaded") from splunk import Intersplunk settings = dict() Intersplunk.readResults(settings=settings) session_key = settings['sessionKey'] proxies = get_proxies(session_key) bin_dir = os.path.dirname(py_exec) path = bin_dir + os.pathsep + os.environ["PATH"] passed_envs = { "PATH": path, "SPLUNK_HOME": os.environ['SPLUNK_HOME'] } if proxies: passed_envs['HTTP_PROXY'] = proxies['http'] passed_envs['HTTPS_PROXY'] = proxies['https'] os.execve(py_exec, ['python'] + sys.argv, passed_envs)
def run(messages, count, mapping): results = si.readResults(None, None, True) ORS = [] seenValues = set() # dedup rows for i, result in enumerate(results): if count > 0 and i >= count: break ANDS = [] for j, (renamed, attr) in enumerate(mapping): val = str(result.get(attr, '')) if renamed == None or renamed == '': if val != '': ANDS.append(val) else: ANDS.append('%s="%s"' % (renamed, val)) andstr = str(ANDS) if len(ANDS) > 0 and andstr not in seenValues: ORS.append(ANDS) seenValues.add(andstr) output = "" if len(ORS) > 1: output += "(" for i, OR in enumerate(ORS): if i > 0: output += ") OR (" for j, AND in enumerate(OR): if j > 0: output += " " #" AND " output += AND if len(ORS) > 1: output += ")" si.outputResults([{'search': output}], messages)
def run(messages, count, mapping): results = si.readResults(None, None, True) ORS = [] seenValues = set() # dedup rows for i, result in enumerate(results): if count > 0 and i >= count: break ANDS = [] for j, (renamed, attr) in enumerate(mapping): val = str(result.get(attr,'')) if renamed == None or renamed == '': if val != '': ANDS.append(val) else: ANDS.append('%s="%s"' % (renamed, val)) andstr = str(ANDS) if len(ANDS) > 0 and andstr not in seenValues: ORS.append(ANDS) seenValues.add(andstr) output = "" if len(ORS) > 1: output += "(" for i, OR in enumerate(ORS): if i > 0: output += ") OR (" for j, AND in enumerate(OR): if j > 0: output += " " #" AND " output += AND if len(ORS) > 1: output += ")" si.outputResults([{'search': output}], messages)
def _get_events(self): return si.readResults(None, None, True)
def main(): output = [] def Log(s, verb_level, code=1, extra=None, force_print=False): if verb_level <= log.getverbosity(): output.extend(s.split("\n")) # def PrintCollectionStatus(col_stats, force_print=False): # # raise ValueError(type(col_stats.matched_chain_pair[1])) # output.append({ # "num_backup_sets": # }) # log.PrintCollectionStatus = PrintCollectionStatus results = None try: settings = dict() Intersplunk.readResults(None, settings, True) dup_time.setcurtime() archive_dir = os.path.join(app_dir, "local", "data", "archive") try: os.makedirs(archive_dir) except: pass if sys.argv[1] == "splunk-last-backups": ap = argparse.ArgumentParser() ap.add_argument("--time", type=int) ap.add_argument("backend") args = ap.parse_args(sys.argv[2:]) dup_globals.gpg_profile = gpg.GPGProfile() dup_globals.gpg_profile.passphrase = os.environ["PASSPHRASE"] backend.import_backends() dup_globals.backend = backend.get_backend(args.backend) if dup_globals.backup_name is None: dup_globals.backup_name = commandline.generate_default_backup_name( args.backend) commandline.set_archive_dir(archive_dir) results = [] time = args.time col_stats = dup_collections.CollectionsStatus( dup_globals.backend, dup_globals.archive_dir_path, "list-current").set_values() try: sig_chain = col_stats.get_backup_chain_at_time(time) except dup_collections.CollectionsError: results.append({ "last_full_backup_time": 0, "last_incr_backup_time": 0, }) else: if sig_chain.incset_list: last_incr_backup_time = max( [incset.end_time for incset in sig_chain.incset_list]) else: last_incr_backup_time = 0 results.append({ "last_full_backup_time": col_stats.get_last_full_backup_time(), "last_incr_backup_time": last_incr_backup_time }) elif sys.argv[1] == "splunk-file-list": ap = argparse.ArgumentParser() ap.add_argument("--time") ap.add_argument("backend") args = ap.parse_args(sys.argv[2:]) args.time = int(args.time.split(".")[0]) dup_time.setcurtime(args.time) dup_globals.restore_time = args.time dup_globals.gpg_profile = gpg.GPGProfile() dup_globals.gpg_profile.passphrase = os.environ["PASSPHRASE"] backend.import_backends() dup_globals.backend = backend.get_backend(args.backend) if dup_globals.backup_name is None: dup_globals.backup_name = commandline.generate_default_backup_name( args.backend) commandline.set_archive_dir(archive_dir) results = [] col_stats = dup_collections.CollectionsStatus( dup_globals.backend, dup_globals.archive_dir_path, "list-current").set_values() time = args.time sig_chain = col_stats.get_signature_chain_at_time(time) path_iter = diffdir.get_combined_path_iter( sig_chain.get_fileobjs(time)) for path in path_iter: if path.difftype != u"deleted" and path.index: mode = bin(path.mode)[2:] perms = "" for p, val in enumerate(mode): if p in (0, 3, 6): c = "r" elif p in (1, 4, 7): c = "w" elif p in (2, 5, 8): c = "x" perms += c if int(val) else "-" if path.type == "dir": perms = "d" + perms elif path.type == "sym": perms = "l" + perms else: perms = "-" + perms results.append({ "perms": perms, "owner": path.stat.st_uid, "group": path.stat.st_gid, "size": path.stat.st_size, "modtime": path.stat.st_mtime, "filename": os.path.join(*path.index), }) else: args = ["--archive-dir", archive_dir] + sys.argv[1:] action = commandline.ProcessCommandLine(args) log.Log = Log try: dup_main.do_backup(action) except dup_collections.CollectionsError: results = [] except SystemExit: pass except Exception as e: import traceback # sys.stderr.write(traceback.format_exc()) Intersplunk.generateErrorResults("Traceback: %s" % traceback.format_exc()) return if output and not results: import time results = [{"_raw": "\n".join(output), "_time": time.time()}] if results: try: Intersplunk.outputResults(results) except Exception: import traceback sys.stderr.write(traceback.format_exc()) results = Intersplunk.generateErrorResults("Traceback: %s" % traceback.format_exc()) Intersplunk.outputResults(results)
elif arg.lower().startswith("terms="): eqsign = arg.find('=') term_list = arg[eqsign+1:len(arg)] elif arg.lower().startswith("type="): eqsign = arg.find('=') context_type = arg[eqsign+1:len(arg)] elif arg.lower().startswith("uom="): eqsign = arg.find('=') uom = arg[eqsign+1:len(arg)] else: errString = "xsCreateContext-F-003: Invalid argument: " + arg raise Exception(errString) else: raise Exception("xsCreateContext-F-001: Usage: xsCreateContext name=<string> terms=<conceptlist-option> (type=<contexttype-option>)? (<fuzzyvalues-option>)*") results = si.readResults(None, settings, True) for res in results: if 'avg' in res: avg = res['avg'] if 'count' in res: count = res['count'] if 'max' in res: max = res['max'] if 'median' in res: median = res['median'] if 'min' in res: min = res['min'] if 'stdev' in res: stdev = res['stdev'] if notes == '':
# You need to install Splunk Python SDK into # $SPLUNK_HOME/lib/python2.7/site-packages/splunklib # (C) Bojan Zdrnja, INFIGO IS d.o.o. (http://www.infigo.hr/en) # <*****@*****.**> import splunklib.client as client import splunk.auth as auth import splunk.Intersplunk as si import splunklib.binding as binding import sys import logging import splunk.entity as entity settings = dict() records = si.readResults(settings=settings, has_header=True) logging.debug(settings) sKey = settings['sessionKey'] service = client.Service(token=sKey) results = [] for i in service.indexes: retention = (int)(i.frozenTimePeriodInSecs) / (60 * 60 * 24) if i.coldToFrozenDir is None: ArchiveDir = "Not Defined" currentSize = i.currentDBSizeMB indexPath = i.homePath_expanded
if is_bool: rex = "^(?:t|true|1|yes)$" if (rex is None and arg in argvals) or (arg in argvals and re.match(rex, argvals[arg])): result = True return result if __name__ == '__main__': logger = setup_logging() logger.info('starting..') eStart = time.time() try: results = si.readResults(None, None, False) keywords, argvals = si.getKeywordsAndOptions() validate_args(keywords, argvals) if results is not None and len(results) > 0: argvals = make_arg_sub_based_results(argvals, results) # if api_key argument is passed to command, use it instead of default if arg_on_and_enabled(argvals, "auth_token"): QUANDL_AUTH_TOKEN = argvals["auth_token"] logger.debug('setting QUANDL_AUTH_TOKEN="%s"' % str(QUANDL_AUTH_TOKEN)) if arg_on_and_enabled(argvals, "debug", is_bool=True): logger.setLevel(logging.DEBUG)
""" out = None err = None if self.locateProcess(proc_name=the_proc_name): p = subprocess.Popen(self._proc_cmds['kill_proc_name'][self.getPlatform()] + [str(the_proc_name)], stdout=subprocess.PIPE) out, err = p.communicate() else: logger.error("Process Name: " + str(the_proc_name) + " not found " + " running on the system!") return [out,err] if __name__ == '__main__': try: results = si.readResults() keywords, options = si.getKeywordsAndOptions() for entry in results: ## PID if "pid" in entry: pid = entry["pid"] else: pid = options.get('pid', None) ## Process Name if 'proc_name' in entry: proc_name = entry['proc_name'] else: proc_name = options.get('proc_name', None)
######################## key = '_raw' tableName = '' if len(sys.argv) >= 3: tableName = sys.argv[1] key = sys.argv[2] ######################## #results,dummyresults,settings = isp.getOrganizedResults() results = isp.readResults(None, None, True) rowkey = [] for field in results: if field.get(key, None): rowkey.append(field.get(key)) ############################# # #f = open('/root/input','w') # #f.write(str(rowkey)) # #f.close() #
if self.locateProcess(proc_name=the_proc_name): p = subprocess.Popen( self._proc_cmds['kill_proc_name'][self.getPlatform()] + [str(the_proc_name)], stdout=subprocess.PIPE) out, err = p.communicate() else: logger.error("Process Name: " + str(the_proc_name) + " not found " + " running on the system!") return [out, err] if __name__ == '__main__': try: results = si.readResults() keywords, options = si.getKeywordsAndOptions() for entry in results: ## PID if "pid" in entry: pid = entry["pid"] else: pid = options.get('pid', None) ## Process Name if 'proc_name' in entry: proc_name = entry['proc_name'] else: proc_name = options.get('proc_name', None)
if __name__ == '__main__': logger = setup_logging() # Set this when debugging logger.setLevel(logging.DEBUG) logger.debug("entered __main__") search='' try: if len(sys.argv) > 1: search = sys.argv[2] logger.debug("search: '" + search + "'") else: raise Exception("Usage: strace <search>") (isgetinfo, sys.argv) = si.isGetInfo(sys.argv) if isgetinfo: reqsop = True preop = "prestrace " + search +"" logger.debug("passed to prestrace: '" + preop + "'") si.outputInfo(False, False, False, reqsop, preop) # calls sys.exit() results = si.readResults(None, None, True) si.outputResults(results) logger.debug("exited __main__") except Exception, e: si.generateErrorResults(e)
if __name__ == '__main__': logger = setup_logging() # Set this when debugging logger.setLevel(logging.DEBUG) logger.debug("entered __main__") search = '' try: if len(sys.argv) > 1: search = sys.argv[2] logger.debug("search: '" + search + "'") else: raise Exception("Usage: strace <search>") (isgetinfo, sys.argv) = si.isGetInfo(sys.argv) if isgetinfo: reqsop = True preop = "prestrace " + search + "" logger.debug("passed to prestrace: '" + preop + "'") si.outputInfo(False, False, False, reqsop, preop) # calls sys.exit() results = si.readResults(None, None, True) si.outputResults(results) logger.debug("exited __main__") except Exception, e: si.generateErrorResults(e)
from splunk.rest import simpleRequest from splunk import Intersplunk import requests import re from splunk_logger import setup_logging from utils import get_proxies if __name__ == "__main__": logger = setup_logging() settings = dict() Intersplunk.readResults(settings=settings) session_key = settings['sessionKey'] proxies = get_proxies(session_key) download_url = simpleRequest( "/servicesNS/nobody/pyden-manager/properties/pyden/download/url", sessionKey=session_key)[1] r = requests.get(download_url, proxies=proxies) version_pattern = r"""<a href\=\"\d(?:\.\d{1,2}){1,2}\/\"\>(?P<version>\d(?:\.\d{1,2}){1,2})""" all_versions = re.findall(version_pattern, r.text) # logger.debug(all_versions) compatible_versions = [ version for version in all_versions if (version.startswith('2') and version > '2.7') or ( version.startswith('3') and version > '3.5') ] # logger.debug(compatible_versions) # sometime there are only pre release or release candidates so we need to check each compatible version for release for version in compatible_versions: url = download_url.rstrip() + "%s/" % version logger.debug(url) r = requests.get(url,
def arg_on_and_enabled(argvals, arg, rex=None, is_bool=False): result = False if is_bool: rex = "^(?:t|true|1|yes)$" if (rex is None and arg in argvals) or (arg in argvals and re.match(rex, argvals[arg])): result = True return result if __name__ == '__main__': logger = setup_logging() logger.info('starting..') eStart = time.time() try: results = si.readResults(None, None, False) keywords, argvals = si.getKeywordsAndOptions() validate_args(keywords, argvals) if arg_on_and_enabled(argvals, "debug", is_bool=True): logger.setLevel(logging.DEBUG) logger.debug("detecting debug argument passed, setting command log_level=DEBUG") output_column_name = "mvmath" if arg_on_and_enabled(argvals, "labelfield"): output_column_name = argvals['labelfield'] if arg_on_and_enabled(argvals, "prefix"): output_column_name = argvals['prefix'] + output_column_name for row in results:
recs.append([rec[n] for rec in records]) elif t=='numeric': recs.append(float(n)) elif t=='string': recs.append(n.lstrip('"').rstrip('"')) return recs if __name__ == '__main__': stdin = None if not os.isatty(0): stdin = sys.stdin settings = dict() records = si.readResults(settings = settings, has_header = True) sessionKey = settings['sessionKey'] logger.debug("sessionKey = %s" % sessionKey) ret = collections.OrderedDict() for i in range(1, len(sys.argv)): func = py.parse_func(sys.argv[i]) logger.debug("func = %s" % func) recs = get_inputs(records, func.arguments) logger.debug("get_inputs = %s" % recs) f = py.find_func(func) f._sessionKey_ = sessionKey try: if len(func.arguments)==0: