def show_non_search_results(log_rec, code_view=True, json_view=False, show_message_details=False): """show_non_search_results Show non-search results for search jobs like: ``index="antinex" | stats count`` :param log_rec: log record from splunk :param code_view: show as a normal tail -f <log file> view :param json_view: pretty print each log's dictionary :param show_message_details """ log_dict = None try: log_dict = json.loads(log_rec) except Exception: log_dict = None # end of try/ex if not log_dict: log.info(('{}').format(ppj(log_rec))) else: log.info(('{}').format(ppj(log_dict)))
def publish_to_splunk(self, payload=None, shutdown_event=None, shutdown_ack_event=None, already_done_event=None): """publish_to_splunk Publish the queued messages to Splunk :param payload: optional string log message to send to Splunk :param shutdown_event: multiprocessing.Event - shutdown event :param shutdown_ack_event: multiprocessing.Event - acknowledge shutdown is in progress :param already_done_event: multiprocessing.Event - already shutting down """ self.debug_log(('publish_to_splunk - start')) use_payload = payload if not use_payload: use_payload = self.log_payload if use_payload: self.debug_log('payload available for sending') url = 'https://{}:{}/services/collector'.format( self.host, self.port) self.debug_log('destination URL={}'.format(url)) try: if self.debug: try: msg_dict = json.loads(use_payload) event_data = json.loads(msg_dict['event']) msg_dict['event'] = event_data self.debug_log( ('sending payload: {}').format(ppj(msg_dict))) except Exception: self.debug_log( ('sending data payload: {}').format(use_payload)) if self.num_sent > 100000: self.num_sent = 0 else: self.num_sent += 1 send_to_splunk.send_to_splunk( session=self.session, url=url, data=use_payload, headers={'Authorization': 'Splunk {}'.format(self.token)}, verify=self.verify, timeout=self.timeout) self.debug_log('payload sent successfully') except Exception as e: try: self.write_log( 'Exception in Splunk logging handler: {}'.format(e)) self.write_log(traceback.format_exc()) except Exception: self.debug_log('Exception encountered,' 'but traceback could not be formatted') self.log_payload = '' # end of publish handling self.debug_log( ('publish_to_splunk - done - ' 'self.is_shutting_down={} self.shutdown_now={}').format( self.is_shutting_down(shutdown_event=self.shutdown_event), self.shutdown_now))
def publish_to_splunk( self, payload=None): """publish_to_splunk Build the ``self.log_payload`` from the queued log messages and POST it to the Splunk endpoint :param payload: string message to send to Splunk """ self.debug_log('publish_to_splunk - start') use_payload = payload if not use_payload: use_payload = self.log_payload self.num_sent = 0 if use_payload: url = 'https://{}:{}/services/collector'.format( self.host, self.port) self.debug_log('splunk url={}'.format( url)) try: if self.debug: try: msg_dict = json.loads(use_payload) event_data = json.loads(msg_dict['event']) msg_dict['event'] = event_data self.debug_log(( 'sending payload: {}').format( ppj(msg_dict))) except Exception: self.debug_log(( 'sending data payload: {}').format( use_payload)) if self.num_sent > 100000: self.num_sent = 1 else: self.num_sent += 1 send_to_splunk.send_to_splunk( session=self.session, url=url, data=use_payload, headers={'Authorization': 'Splunk {}'.format( self.token)}, verify=self.verify, timeout=self.timeout) self.debug_log('payload sent success') except Exception as e: try: self.write_log( 'Exception in Splunk logging handler: {}'.format( e)) self.write_log(traceback.format_exc()) except Exception: self.debug_log( 'Exception encountered,' 'but traceback could not be formatted') self.log_payload = '' else: self.debug_log( 'no logs to send') self.debug_log(( 'publish_to_splunk - done - ' 'self.is_shutting_down={} self.shutdown_now={}').format( self.is_shutting_down(shutdown_event=self.shutdown_event), self.shutdown_now))
def build_payload_from_queued_messages( self, use_queue, shutdown_event, triggered_by_shutdown=False): """build_payload_from_queued_messages Empty the queued messages by building a large ``self.log_payload`` :param use_queue: queue holding the messages :param shutdown_event: shutdown event :param triggered_by_shutdown: called during shutdown """ self.debug_log('build_payload - start') not_done = True while not_done: if not triggered_by_shutdown and self.is_shutting_down( shutdown_event=shutdown_event): self.debug_log( 'build_payload shutting down') return True self.debug_count += 1 if self.debug_count > 60: self.debug_count = 0 self.debug_log('build_payload tid={} queue={}'.format( self.tid, str(use_queue))) try: msg = use_queue.get( block=True, timeout=self.sleep_interval) self.log_payload = self.log_payload + msg if self.debug: self.debug_log('{} got={}'.format( self, ppj(msg))) not_done = not self.queue_empty( use_queue=use_queue) except Exception as e: if self.is_shutting_down( shutdown_event=shutdown_event): self.debug_log( 'helper was shut down ' 'msgs in the queue may not all ' 'have been sent') if ('No such file or directory' in str(e) or 'Broken pipe' in str(e)): raise e elif ("object, typeid 'Queue' at" in str(e) and "'__str__()' failed" in str(e)): raise e not_done = True # end of getting log msgs from the queue if not triggered_by_shutdown and self.is_shutting_down( shutdown_event=shutdown_event): self.debug_log( 'build_payload - already shutting down') return True # If the payload is getting very long, # stop reading and send immediately. # Current limit is 50MB if (not triggered_by_shutdown and self.is_shutting_down( shutdown_event=shutdown_event) or len(self.log_payload) >= 524288): self.debug_log( 'payload maximum size exceeded, sending immediately') return False self.debug_log('build_payload - done') return True
def show_search_results(log_rec, code_view=True, json_view=False, show_message_details=False): """show_search_results Show search results like rsyslog or as pretty-printed JSON dictionaries per log for debugging drill-down fields :param log_rec: log record from splunk :param code_view: show as a normal tail -f <log file> view :param json_view: pretty print each log's dictionary :param show_message_details """ log_dict = None try: log_dict = json.loads(log_rec) except Exception as e: log.error(('Failed logging record={} with ex={}').format(log_rec, e)) return # end of try/ex if not log_dict: log.error( ('Failed to parse log_rec={} as a dictionary').format(log_rec)) return if code_view: comp_name = log_dict.get('name', '') logger_name = log_dict.get('logger_name', '') use_log_name = ('{}').format(logger_name) if logger_name: use_log_name = '{}'.format(logger_name) else: if comp_name: use_log_name = '{}'.format(comp_name) prefix_log = ('{} {} - {} -').format( log_dict.get('systime', log_dict.get('asctime', '')), use_log_name, log_dict.get('levelname', '')) suffix_log = '' if log_dict.get('exc', ''): suffix_log = ('{} exc={}').format(suffix_log, log_dict.get('exc', '')) if show_message_details: suffix_log = ('dc={} env={} ' 'source={} line={}').format( log_dict.get('dc', ''), log_dict.get('env', ''), log_dict.get('path', ''), log_dict.get('lineno', '')) msg = ('{} {} {}').format(prefix_log, log_dict.get('message', ''), suffix_log) if log_dict['levelname'] == 'INFO': log.info(('{}').format(msg)) elif log_dict['levelname'] == 'DEBUG': log.debug(('{}').format(msg)) elif log_dict['levelname'] == 'ERROR': log.error(('{}').format(msg)) elif log_dict['levelname'] == 'CRITICAL': log.critical(('{}').format(msg)) elif log_dict['levelname'] == 'WARNING': log.warning(('{}').format(msg)) else: log.debug(('{}').format(msg)) elif json_view: if log_dict['levelname'] == 'INFO': log.info(('{}').format(ppj(log_dict))) elif log_dict['levelname'] == 'DEBUG': log.debug(('{}').format(ppj(log_dict))) elif log_dict['levelname'] == 'ERROR': log.error(('{}').format(ppj(log_dict))) elif log_dict['levelname'] == 'CRITICAL': log.critical(('{}').format(ppj(log_dict))) elif log_dict['levelname'] == 'WARNING': log.warning(('{}').format(ppj(log_dict))) else: log.debug(('{}').format(ppj(log_dict))) else: log.error( ('Please use either code_view or json_view to view the logs'))
def run_main(): """run_main Search Splunk """ parser = argparse.ArgumentParser(description=('Search Splunk')) parser.add_argument('-u', help='username', required=False, dest='user') parser.add_argument('-p', help='user password', required=False, dest='password') parser.add_argument('-f', help='splunk-ready request in a json file', required=False, dest='datafile') parser.add_argument('-i', help='index to search', required=False, dest='index_name') parser.add_argument('-a', help='host address: <fqdn:port>', required=False, dest='address') parser.add_argument('-e', help='(Optional) earliest_time minutes back', required=False, dest='earliest_time_minutes') parser.add_argument('-l', help='(Optional) latest_time minutes back', required=False, dest='latest_time_minutes') parser.add_argument('-q', '--queryargs', nargs='*', help=('query string for searching splunk: ' 'search index="antinex" AND levelname="ERROR"'), required=False, dest='query_args') parser.add_argument('-j', help='(Optional) view as json dictionary logs', required=False, dest='json_view', action='store_true') parser.add_argument('-t', help=('(Optional) pre-existing Splunk token ' 'which can be set using export ' 'SPLUNK_TOKEN=<token> - if provided ' 'the user (-u) and password (-p) ' 'arguments are not required'), required=False, dest='token') parser.add_argument('-m', help='(Optional) verbose message when getting logs', required=False, dest='message_details', action='store_true') parser.add_argument('-v', help='(Optional) verify certs - disabled by default', required=False, dest='verify', action='store_true') parser.add_argument('-b', help='verbose', required=False, dest='verbose', action='store_true') args = parser.parse_args() user = SPLUNK_USER password = SPLUNK_PASSWORD token = SPLUNK_TOKEN address = SPLUNK_API_ADDRESS index_name = SPLUNK_INDEX verbose = SPLUNK_VERBOSE show_message_details = bool(str(ev('MESSAGE_DETAILS', '0')).lower() == '1') earliest_time_minutes = None latest_time_minutes = None verify = False code_view = True json_view = False datafile = None if args.user: user = args.user if args.password: password = args.password if args.address: address = args.address if args.datafile: datafile = args.datafile if args.index_name: index_name = args.index_name if args.verify: verify = args.verify if args.earliest_time_minutes: earliest_time_minutes = int(args.earliest_time_minutes) if args.latest_time_minutes: latest_time_minutes = int(args.latest_time_minutes) if args.verbose: verbose = True if args.message_details: show_message_details = args.message_details if args.token: token = args.token if args.json_view: json_view = True code_view = False default_search_query = 'index="{}" | head 10 | reverse'.format(index_name) search_query = ev('SPLUNK_QUERY', default_search_query) if args.query_args: search_query = ' '.join(args.query_args) valid = True if not user or user == 'user-not-set': log.critical('missing user') valid = False if not password or password == 'password-not-set': log.critical('missing password') valid = False if not index_name: log.critical('missing splunk index') valid = False if token: # if the token is present, # then the user and the password are not required if not valid and index_name: valid = True if not valid: log.critical('Please run with the following arguments:\n') log.error('-u <username> -p <password> ' '-i <index> -t <token if user and password not set> ' '-a <host address as: fqdn:port>') log.critical('\n' 'Or you can export the following ' 'environment variables and retry the command: ' '\n') log.error( 'export SPLUNK_ADDRESS="splunkenterprise:8088"\n' 'export SPLUNK_API_ADDRESS="splunkenterprise:8089"\n' 'export SPLUNK_PASSWORD="******"\n' 'export SPLUNK_USER="******"\n' 'export SPLUNK_INDEX="antinex"\n' 'export SPLUNK_TOKEN="<Optional pre-existing Splunk token>"\n') sys.exit(1) if verbose: log.info(('creating client user={} address={}').format(user, address)) last_msg = '' host = '' port = -1 try: last_msg = ('Invalid address={}').format(address) address_split = address.split(':') last_msg = ('Failed finding host in address={} ' '- please use: -a <fqdn:port>').format(address) host = address_split[0] last_msg = ('Failed finding integer port in address={} ' '- please use: -a <fqdn:port>').format(address) port = int(address_split[1]) except Exception as e: log.error(('Failed to parse -a {} for the ' 'splunk host address: {} which threw an ' 'ex={}').format(address, last_msg, e)) sys.exit(1) # end of try ex if verbose: log.info(('connecting {}@{}:{}').format(user, host, port)) req_body = None if datafile: if verbose: log.info(('loading request in datafile={}').format(datafile)) with open(datafile, 'r') as f: req_body = json.loads(f.read()) earliest_time = None latest_time = None now = datetime.datetime.now() if earliest_time_minutes: min_15_ago = now - datetime.timedelta(minutes=earliest_time_minutes) earliest_time = min_15_ago.strftime('%Y-%m-%dT%H:%M:%S.000-00:00') if latest_time_minutes: latest_time = (now - datetime.timedelta(minutes=latest_time_minutes) ).strftime('%Y-%m-%dT%H:%M:%S.000-00:00') # Step 2: Create a search job if not search_query.startswith('search'): search_query = 'search {}'.format(search_query) search_data = req_body if not search_data: search_data = {'search': search_query} if earliest_time: search_data['earliest_time'] = earliest_time if latest_time: search_data['latest_time'] = latest_time res = sp.search(user=user, password=password, address=address, token=token, query_dict=search_data, verify=verify) if res['status'] == SUCCESS: result_list = [] try: result_list = res['record'].get('results', result_list) if len(result_list) == 0: log.info(('No matches for search={} ' 'response={}').format(ppj(search_data), ppj(res['record']))) except Exception as e: result_list = [] log.error(('Failed to find results for the query={} ' 'with ex={}').format(ppj(search_data), e)) for ridx, log_record in enumerate(result_list): log_raw = log_record.get('_raw', None) if log_raw: show_search_results(log_rec=log_raw, code_view=code_view, json_view=json_view, show_message_details=show_message_details) else: show_non_search_results( log_rec=log_record, code_view=code_view, json_view=json_view, show_message_details=show_message_details) # end of handling log record presentation as a view # end for all log records else: log.error(('Failed searching splunk with status={} and ' 'error: {}').format(res['status'], res['err'])) # end of if job_id if verbose: log.info('done')
def setup_logging(default_level=logging.INFO, default_path=None, env_key='LOG_CFG', handler_name='console', handlers_dict=None, log_dict=None, config_name=None, splunk_host=None, splunk_port=None, splunk_index=None, splunk_token=None, splunk_verify=False, splunk_handler_name='splunk', splunk_sleep_interval=-1, splunk_debug=False): """setup_logging Setup logging configuration :param default_level: level to log :param default_path: path to config (optional) :param env_key: path to config in this env var :param handler_name: handler name in the config :param handlers_dict: handlers dict :param log_dict: full log dictionary config :param config_name: filename for config :param splunk_host: optional splunk host :param splunk_port: optional splunk port :param splunk_index: optional splunk index :param splunk_token: optional splunk token :param splunk_verify: optional splunk verify - default to False :param splunk_handler_name: optional splunk handler name :param splunk_sleep_interval: optional splunk sleep interval :param splunk_debug: optional splunk debug - default to False """ if SPLUNK_DEBUG: splunk_debug = True if not splunk_token: if SPLUNK_TOKEN: splunk_token = splunk_token config = None if os.getenv('LOG_DICT', False): try: config = json.loads(os.getenv('LOG_DICT', None).strip()) except Exception as e: print('Please confirm the env key LOG_DICT has a valid ' 'JSON dictionary. Failed json.loads() parsing with ' '- using default config for ' 'ex={}').format(e) # try to parse the dict and log it that there was a failure elif log_dict: config = config # end of if passed in set in an environment variable if not config and default_path: path = default_path file_name = default_path.split('/')[-1] if config_name: file_name = config_name path = '{}/{}'.format('/'.join(default_path.split('/')[:-1]), file_name) value = os.getenv(env_key, None) if value: path = value if os.path.exists(path): with open(path, 'rt') as f: config = json.load(f) else: cwd_path = os.getcwd() + '/spylunking/log/{}'.format(file_name) if os.path.exists(cwd_path): with open(cwd_path, 'rt') as f: config = json.load(f) else: rels_path = os.getcwd() + '/../log/{}'.format(file_name) if os.path.exists(rels_path): with open(rels_path, 'rt') as f: config = json.load(f) else: repo_config = ('/opt/spylunking/spylunking/log/' 'shared-logging.json') if os.path.exists(repo_config): if splunk_debug: print( 'checking repo_config={}'.format(repo_config)) with open(repo_config, 'rt') as f: config = json.load(f) # end of finding a config dictionary # end of trying to find a config on disk if config: if handlers_dict: config['handlers'] = handlers_dict found_splunk_handler = False if handler_name: for hidx, h in enumerate(config['handlers']): if splunk_debug: print('handler={} name={}'.format(hidx, h)) if handler_name == h: config['root']['handlers'].append(h) # by default splunk_handler_name == 'splunk' if splunk_handler_name == h and splunk_token: found_splunk_handler = True if found_splunk_handler: if splunk_token: config['handlers'][splunk_handler_name]['token'] = \ splunk_token config['handlers'][splunk_handler_name]['verify'] = \ splunk_verify if splunk_host: config['handlers'][splunk_handler_name]['host'] = \ splunk_host if splunk_port: config['handlers'][splunk_handler_name]['port'] = \ splunk_port if splunk_index: config['handlers'][splunk_handler_name]['index'] = \ splunk_index config['handlers'][splunk_handler_name]['debug'] = \ splunk_debug if config['handlers'][splunk_handler_name].get( 'queue_size', True): key = 'queue_size' config['handlers'][splunk_handler_name][key] = \ SPLUNK_QUEUE_SIZE # end of checking for queue_size changes if SPLUNK_RETRY_COUNT: key = 'retry_count' config['handlers'][splunk_handler_name][key] = \ SPLUNK_RETRY_COUNT # end of checking for retry_count changes if SPLUNK_TIMEOUT: config['handlers'][splunk_handler_name][key] = \ SPLUNK_TIMEOUT # end of checking for splunk_timeout changes key = 'sleep_interval' if splunk_sleep_interval >= 0: config['handlers'][splunk_handler_name][key] = \ splunk_sleep_interval else: if SPLUNK_SLEEP_INTERVAL: key = 'sleep_interval' config['handlers'][splunk_handler_name][key] = \ SPLUNK_SLEEP_INTERVAL # end of checking for sleep_interval changes if found_splunk_handler: config['root']['handlers'].append(splunk_handler_name) else: if splunk_debug: print('Unable to get a valid splunk token ' '- splunk disabled') config['handlers'].pop('splunk', None) good_handlers = [] for k in config['root']['handlers']: if k != splunk_handler_name: good_handlers.append(k) config['root']['handlers'] = good_handlers else: if splunk_debug: print('splunk disabled') config['handlers'].pop(splunk_handler_name, None) good_handlers = [] for k in config['root']['handlers']: if k != splunk_handler_name: good_handlers.append(k) config['root']['handlers'] = good_handlers if len(config['root']['handlers']) == 0: print(('Failed to find logging root.handlers={} in log ' 'config={}').format(config['root']['handlers'], ppj(config))) else: if splunk_debug: print(('Using log config={}').format(ppj(config))) logging.config.dictConfig(config) return else: if not splunk_host and not splunk_port: if SPLUNK_ADDRESS: try: addr_split = SPLUNK_ADDRESS.split(':') if len(addr_split) > 1: splunk_host = addr_split[0] splunk_port = int(addr_split[1]) except Exception as e: print(('Failed building SPLUNK_ADDRESS={} as' 'host:port with ex={}').format(SPLUNK_ADDRESS, e)) else: if not splunk_host: if SPLUNK_HOST: splunk_host = SPLUNK_HOST if not splunk_port: if SPLUNK_PORT: splunk_port = SPLUNK_PORT # end of connectivity changes from env vars config = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'colors': { '()': 'colorlog.ColoredFormatter', 'format': ('%(log_color)s%(asctime)s - %(name)s - ' '%(levelname)s - %(message)s%(reset)s') }, 'no_date_colors': { '()': 'colorlog.ColoredFormatter', 'format': ('%(log_color)s%(name)s - %(levelname)s ' '- %(message)s%(reset)s') }, 'simple': { '()': 'colorlog.ColoredFormatter', 'format': ('%(log_color)s' '%(message)s%(reset)s') }, splunk_handler_name: { '()': 'spylunking.log.setup_logging.SplunkFormatter', 'format': ('%(asctime)s - %(name)s - %(levelname)s ' '- %(message)s [%(filename)s:%(lineno)s]') } }, 'handlers': { 'console': { 'class': 'logging.StreamHandler', 'level': 'INFO', 'formatter': 'colors', 'stream': 'ext://sys.stdout' }, 'no_date_colors': { 'class': 'logging.StreamHandler', 'level': 'INFO', 'formatter': 'no_date_colors', 'stream': 'ext://sys.stdout' }, 'simple': { 'class': 'logging.StreamHandler', 'level': 'INFO', 'formatter': 'simple', 'stream': 'ext://sys.stdout' } }, 'loggers': { '': { 'level': 'INFO', 'propagate': True } }, 'root': { 'level': 'INFO', 'propagate': True, 'handlers': ['console'] } } if splunk_token and splunk_host and splunk_port: config['handlers'][splunk_handler_name] = { 'class': ('spylunking.splunk_publisher.SplunkPublisher'), 'host': splunk_host, 'port': splunk_port, 'index': SPLUNK_INDEX, 'token': splunk_token, 'formatter': splunk_handler_name, 'sourcetype': SPLUNK_SOURCETYPE, 'verify': SPLUNK_VERIFY, 'timeout': SPLUNK_TIMEOUT, 'retry_count': SPLUNK_RETRY_COUNT, 'sleep_interval': SPLUNK_SLEEP_INTERVAL, 'queue_size': SPLUNK_QUEUE_SIZE, 'debug': SPLUNK_DEBUG } config['root']['handlers'].append(splunk_handler_name) # only add splunk if the token is set for HEC logging.config.dictConfig(config) return
def search( user=None, password=None, token=None, address=None, query_dict=None, verify=False, debug=False): """search Search Splunk with a pre-built query dictionary and wait until it finishes. :param user: splunk username :param password: splunk password :param token: splunk token :param address: splunk HEC address: localhost:8089 :param query_dict: query dictionary to search :param verify: ssl verify :param debug: debug flag """ response_dict = None res = { 'status': NOT_RUN, 'err': '', 'record': response_dict } try: url = 'https://{}'.format( address) if not token: try: token = get_session_key.get_session_key( user=user, password=password, url=url, verify=verify) except Exception as f: res['err'] = ( 'Failed to get splunk token for user={} url={} ' 'ex={}').format( user, url, f) res['status'] = ERR return res # end of trying to login to get a valid token auth_header = { 'Authorization': 'Splunk {}'.format( token) } search_url = '{}/services/search/jobs'.format( url) search_job = requests.post( url=search_url, headers=auth_header, verify=verify, data=query_dict ) job_id = None try: job_id = parseString( search_job.text).getElementsByTagName( 'sid')[0].childNodes[0].nodeValue except Exception as e: log.error(( 'Failed searching splunk response={} for ' 'query={} url={} ' 'ex={}').format( search_job.text, ppj(query_dict), search_url, e)) # end of try/ex for search if job_id: # Step 3: Get the search status search_status_url = '{}/services/search/jobs/{}/'.format( url, job_id) isnotdone = True while isnotdone: searchstatus = requests.get( url=search_status_url, verify=verify, headers=auth_header) isdonestatus = re.compile('isDone">(0|1)') isdonestatus = isdonestatus.search( searchstatus.text).groups()[0] if (isdonestatus == '1'): isnotdone = False # Step 4: Get the search results job_url = ( '{}/services/search/jobs/{}/' 'results?output_mode=json&count=0').format( url, job_id) search_results = requests.get( job_url, verify=verify, headers=auth_header) if debug: log.info( 'search {}\nresults:\n[{}]'.format( query_dict, search_results.text)) response_dict = None try: response_dict = json.loads(search_results.text) except Exception as e: log.error(( 'Failed to load json from search_results.text={} ' 'url={} ' 'ex={}').format( search_results.text, job_url, e)) res['status'] = SUCCESS res['err'] = '' res['record'] = response_dict except Exception as e: res['err'] = ( 'Failed searching user={} url={} ' 'ex={}').format( user, query_dict, e) res['status'] = ERR log.error(res['err']) # end of try/ex return res