def setup(): cfg = cli.getConfStanza('act', 'config') app = cli.getConfStanza('app', 'launcher') api_url = cfg.get("api_url") user_id = cfg.get("act_userid") api_proxy = cfg.get("api_proxy") api_http_user = cfg.get("api_http_user") api_http_password = cfg.get("api_http_auth") requests_opt = { "headers": { # Include version string in user agent header "User-Agent": "act-splunk-{}".format(app.get("version")) } } if api_http_user or api_http_password: requests_opt["auth"] = (api_http_user, api_http_password) if api_proxy: requests_opt["proxies"] = { "http": api_proxy, "https": api_proxy, } return act.Act(api_url, user_id=user_id, log_level="warning", requests_common_kwargs=requests_opt)
def query_tie(ioc_value): tie_args = cli.getConfStanza('dcso_hunt_setup', 'tie') request = urllib.request.Request("{}?value={}".format(str(tie_args['feed_api']), ioc_value)) request.add_header("X-Authorization", 'bearer {}'.format(str(tie_args['token']))) request.add_header("Accept", 'application/json') contents = json.loads(urllib.request.urlopen(request).read()) return contents
def get_config_on_memory(self): try: config = cli.getConfStanza("config", "configuration") return config except Exception as e: self.logger.error("log: Error getting the configuration on memory: %s" % (e)) raise e
def get_results(bloomfile, sub_id): tie_args = cli.getConfStanza('dcso_hunt_setup', 'tie') TIE_TOKEN = str(tie_args["token"]) request = urllib.request.Request("{}/{}/data/latest".format(bloomfile['bloomfilter']['bf-res_api'], sub_id)) request.add_header("X-Authorization", 'bearer {}'.format(TIE_TOKEN)) sys.stdout = open(os.path.join(os.path.dirname(__file__), 'splunk.bloom'), 'w') sys.stdout.write(urllib.request.urlopen(request).read())
def __init__(self, logger=None, conf="redis", stanza="default"): self.logger = logger if logger: logger.debug("reading configs") cfg = cli.getConfStanza(conf, stanza) self.hostname = cfg.get('hostname') self.port = cfg.get('port')
def get_forwarders(group): try: host_groups = conf_tools.getConfStanza(FORWARDER_CONF, group) except Exception, e: print( "Unable to find group: %s, reason: %s" % ( group, e ) ) return None
def get_config_on_memory(self): try: self.logger.debug("api: Getting configuration on memory.") config = cli.getConfStanza("config", "configuration") return config except Exception as e: self.logger.error("api: Error getting the configuration on memory: %s" % (e)) return jsonbak.dumps({"error": str(e)})
def current_version(self, **kwargs): try: app = cli.getConfStanza('package', 'app') app_version = app.get('version') app_revision = app.get('revision') wazuh = cli.getConfStanza('package', 'wazuh') wazuh_version = wazuh.get('version') my_arr = [] version_dict = {} version_dict['appversion'] = app_version version_dict['apprevision'] = app_revision version_dict['wazuhversion'] = wazuh_version my_arr.append(version_dict) data_temp = json.dumps(my_arr) except Exception as e: return json.dumps("{error:" + str(err) + "}") return data_temp
def getSelfAdminStanza(self): """Get the configuration from a stanza. """ try: apikeyconf = cli.getConfStanza('config', 'configuration') # parsed_data = jsonbak.dumps(apikeyconf) except Exception as e: raise e return apikeyconf
def isCloudInstance(): try: config = cli.getConfStanza('cloud', 'deployment') isCloudInstance = json.loads(config.get('is_cloud_instance').lower()) except Exception as e: logger.exception(e) return False logger.debug("utils::isCloudInstance:: %s" % isCloudInstance) return isCloudInstance
def delete_key_from_event(self, delete_event): try: cfg = cli.getConfStanza('kvstore_tools', 'settings') except BaseException as e: eprint("Could not read configuration: " + repr(e)) # Facility info - prepended to log lines facility = os.path.basename(__file__) facility = os.path.splitext(facility)[0] try: logger = setup_logger(cfg["log_level"], 'kvstore_tools.log', facility) except BaseException as e: eprint("Could not create logger: " + repr(e)) print("Could not create logger: " + repr(e)) exit(1) url_tmpl_delete = '%(server_uri)s/servicesNS/%(owner)s/%(app)s/storage/collections/data/%(collection)s/%(id)s?output_mode=json' headers = { 'Authorization': 'Splunk %s' % self.session_key, 'Content-Type': 'application/json' } for key, value in list(delete_event.items()): delete_event[key] = value if key == '_key' and len(value) > 0: logger.debug("Found %s (%s) in event" % (key, value)) try: delete_url = url_tmpl_delete % dict( server_uri=self.splunkd_uri, owner='nobody', app=self.app, collection=self.collection, id=urllib.parse.quote(value, safe='')) logger.debug("Delete url: " + delete_url) try: response, response_code = request( 'DELETE', delete_url, '', headers) logger.debug('Server response: %s' % response) except BaseException as e: logger.error('ERROR Failed to delete key: %s', repr(e)) if response_code == 200: logger.debug("Successfully deleted " + key) delete_event['delete_status'] = "success" return delete_event else: logger.error("Error %d deleting %s: %s" % (response_code, key, response)) delete_event['delete_status'] = "error" return delete_event except BaseException as e: logger.error("Error deleting %s: %s" % (key, repr(e))) delete_event['delete_status'] = "error" return delete_event
def app_info(self, **kwargs): """Obtain app information from file. Parameters ---------- kwargs : dict The request's parameters """ try: self.logger.debug("manager: Getting app info.") stanza = cli.getConfStanza('package', 'app') data_temp = stanza stanza = cli.getConfStanza('package', 'splunk') data_temp['splunk_version'] = stanza['version'] parsed_data = jsonbak.dumps(data_temp) except Exception as e: return jsonbak.dumps({'error': str(e)}) return parsed_data
def get_databricks_configs(): """ Get configuration details from ta_databricks_settings.conf. :return: dictionary with Databricks fields and values """ _LOGGER.info("Reading configuration file.") configs = cli.getConfStanza("ta_databricks_settings", "databricks_credentials") return configs
def get_proxy_cfg(self): try: # Initially look for proxyConfg in securegateway.conf proxy_cfg = self.get_config_keys(self.PROXY_CONFIG) # Fall back to look at severs.conf for proxyConfig if not proxy_cfg: proxy_cfg = cli.getConfStanza('server', self.PROXY_CONFIG) return proxy_cfg except: return None
def handleList(self, confInfo): self.capabilityRead = 'read_kvst_config' try: cfg = cli.getConfStanza('kvstore_tools','settings') except BaseException as e: raise Exception("Could not read configuration: " + repr(e)) # Facility info - prepended to log lines facility = os.path.basename(__file__) facility = os.path.splitext(facility)[0] try: logger = setup_logger(cfg["log_level"], 'kvstore_tools.log', facility) except BaseException as e: raise Exception("Could not create logger: " + repr(e)) logger.debug('KV Store Tools Settings handler started (List)') # Check for permissions to read the configuration session_key = self.getSessionKey() content = rest.simpleRequest('/services/authentication/current-context?output_mode=json', sessionKey=session_key, method='GET')[1] content = json.loads(content) current_user = content['entry'][0]['content']['username'] current_user_capabilities = content['entry'][0]['content']['capabilities'] if self.capabilityRead in current_user_capabilities: logger.debug("User %s is authorized" % current_user) confDict = self.readConf("kvstore_tools") if None != confDict: for stanza, settings in list(confDict.items()): for key, val in list(settings.items()): logger.debug("key: {0}, value: {1}".format(key, val)) if key in ['compression']: if str2bool(val): val = '1' else: val = '0' ''' if key in ['default_path'] and val in [None, '', 'unset']: val = os.path.join('$SPLUNK_HOME', 'etc', 'apps', 'kvstore_tools', 'backups') # Windows wildcard support (works with $ but this is more clear). if '\\' in val: val = val.replace('$SPLUNK_HOME', '%SPLUNK_HOME%') if key in ['backup_batch_size'] and val in [None, '']: val = '50000' if key in ['retention_days'] and val in [None, '']: val = '0' if key in ['retention_size'] and val in [None, '']: val = '0' ''' confInfo[stanza].append(key, val) else: raise Exception("User %s is unauthorized. Has the read_kvst_config capability been granted?" % current_user)
def current_credentials(): try: app = cli.getConfStanza('config', 'credentials') current_username = app.get('username') current_pwd = app.get('password') my_arr = [] credential_dict = {} credential_dict['username'] = current_username credential_dict['password'] = current_pwd except Exception as e: print("Error at load configuration file") return credential_dict
def polling_state(self, **kwargs): try: app = cli.getConfStanza( 'inputs', 'script:///opt/splunk/etc/apps/SplunkAppForWazuh/bin/get_agents_status.py' ) disabled = app.get('disabled') polling_dict = {} polling_dict['disabled'] = disabled data_temp = json.dumps(polling_dict) except Exception as e: return json.dumps("{error:" + str(err) + "}") return data_temp
def load_conf(): global FORWARDER_CONF try: group_stanza = conf_tools.getConfStanza(FORWARDER_CONF, 'groups') groups = group_stanza['groupList'].split(',') # clean any extra whitespace out of our grouplist groups = map(lambda x: x.strip(), groups) except Exception, e: print e
def setup_logging(log_name): """ Get a logger object with specified log level. :param log_name: (str): name for logger :return: logger object """ # Make path till log file log_file = make_splunkhome_path( ["var", "log", "splunk", "%s.log" % log_name]) # Get directory in which log file is present log_dir = os.path.dirname(log_file) # Create directory at the required path to store log file, if not found if not os.path.exists(log_dir): os.makedirs(log_dir) # Read log level from conf file cfg = cli.getConfStanza("ta_databricks_settings", "logging") log_level = cfg.get("loglevel") logger = logging.getLogger(log_name) logger.propagate = False # Set log level try: logger.setLevel(log_level) except Exception: logger.setLevel(DEFAULT_LOG_LEVEL) handler_exists = any( [True for h in logger.handlers if h.baseFilename == log_file]) if not handler_exists: file_handler = logging.handlers.RotatingFileHandler(log_file, mode="a", maxBytes=10485760, backupCount=10) # Format logs fmt_str = ( "%(asctime)s %(levelname)s pid=%(process)d tid=%(threadName)s " "file=%(filename)s:%(funcName)s:%(lineno)d | %(message)s") formatter = logging.Formatter(fmt_str) file_handler.setFormatter(formatter) logger.addHandler(file_handler) if log_level: try: file_handler.setLevel(log_level) except Exception: file_handler.setLevel(DEFAULT_LOG_LEVEL) return logger
def stream(self, records): cfg = cli_common.getConfStanza("appsetup", "app_config") bam = BAM(cfg.get("bamip"), cfg.get("username"), cfg.get("password")) bam.login() for record in records: if self.source in record: ipaddr = record[self.source] ipobj = self.cacheLookup(ipaddr) update_record = {} obj = {} if not ipobj: obj = bam.getIP4Address(ipaddr) if obj: ipobj = obj.values update_record.update(ipobj) # If no object found returns None hostname = obj.getLinkedHostRecord() if hostname: update_record.update(hostname.values) network = obj.getParent() if network: update_record.update(network.values) if "ip_macAddress" in ipobj: macobj = bam.getMACAddress(ipobj["ip_macAddress"]) update_record.update(macobj.values) # Copy attributes from one Tag on MAC address object # MAC address objects are tagged with a user in Identity Bridge user = macobj.getLinkedTag() if user: update_record.update(user.values) self.cacheAdd(update_record) else: update_record = ipobj for key in self.useful_fields: if key in update_record: record[key] = update_record[key] else: record[key] = "" yield record bam.logout() return
def returnPublicInfo(self, **kwargs): cfg = cli.getConfStanza('ta_plaid_settings','additional_parameters') publickey = cfg.get('public_key') enable_development_mode = cfg.get('enable_development_mode_') mode = "sandbox" if enable_development_mode.isdigit(): if int(enable_development_mode) == 1: mode = "development" data = { 'public_key': publickey, 'environment': mode } return json.dumps(data)
def __init__(self): cfg = cli.getConfStanza('ssp', 'config') self.api_key = cfg.get('api_key') self.base_url = cfg.get('base_url') check_tls = cfg.get('check_tls') if check_tls.lower() in ['no', 'false']: self.check_tls = False else: self.check_tls = True self.headers = { 'Content-Type': 'application/json', 'Authorization': 'Token ' + self.api_key } self.next = None self.page = 1
def getSelfConfStanza(file, stanza): """Get the configuration from a stanza. Parameters ---------- stanza : unicode The selected stanza """ try: apikeyconf = cli.getConfStanza(file, stanza) parsed_data = jsonbak.dumps(apikeyconf) except Exception as e: raise e return parsed_data
def query_subscription(bloomfile): if bloomfile['bloomfilter']['subscr_id'] == "": tie_args = cli.getConfStanza('dcso_hunt_setup', 'tie') TIE_TOKEN = str(tie_args["token"]) url = '{}/{}'.format(bloomfile['bloomfilter']['bf-sub_api'], bloomfile['bloomfilter']['query_id']) values = {"format": "application/bloom"} data = urllib.parse.urlencode(values) post = urllib.request.Request(url, data) post.add_header("Authorization", 'Bearer {}'.format(TIE_TOKEN)) response = json.loads(urllib.request.urlopen(post).read()) bloomfile['bloomfilter']['subscr_id'] = response['subscription']['id'] with open(os.path.join(os.path.dirname(__file__), '../local/bloom.json'), 'w') as bloominput: json.dump(bloomfile, bloominput) return bloomfile['bloomfilter']['subscr_id'] else: return bloomfile['bloomfilter']['subscr_id']
def create_splunk_service(self, session_key): self.logger.debug('splunkd_uri create_splunk_service %s ', session_key) try: cfg = cli.getConfStanza('web', 'settings') managmentIP = cfg.get('mgmtHostPort') port = managmentIP.split(':')[1] self.logger.debug('managment port: %s', port) s = client.connect(token=session_key, port=port) return s except Exception as e: self.logger.error('\n' + e + ' ' + Stacktrace.get()) return self.render_json("Read request failed")
def get_proxies(self): try: proxies = {} proxy_cfg = cli.getConfStanza('server', 'proxyConfig') # get http_proxy http_proxy = proxy_cfg.get('http_proxy') if http_proxy: proxies['http'] = http_proxy # get https_proxy https_proxy = proxy_cfg.get('https_proxy') if https_proxy: proxies['https'] = https_proxy return proxies except Exception: return {}
def __init__(self): cfg = cli.getConfStanza('pdns', 'config') api_url = cfg.get("api_url") api_key = cfg.get("api_key") proxy = cfg.get("proxy") if proxy: proxy_handler = urllib2.ProxyHandler({'http': proxy}) opener = urllib2.build_opener(proxy_handler) else: opener = urllib2.build_opener() if api_key: opener.addheaders = [('Argus-API-Key', api_key)] self.opener = opener self.api_url = api_url self.api_key = api_key self.proxy = proxy
def stream_events(self, inputs, ew): # http = httplib2.Http('.cache') # overview @ .cache param -- https://github.com/jcgregorio/httplib2/#usage # The 'content' is the content retrieved from the URL # The 'resp' contains all the response headers # A 'source type' determines how Splunk Enterprise # formats the data during the indexing process. event_data = Event() event_data.index = "catchpoint" # Create driver class object cp_object = CPDrive() # Get data from the catchpoint configuration file at stanza [catchpoint_account]. This is configured in the setup ui: http://localhost:8000/en-US/manager/catchpoint_search/apps/local/catchpoint_search/setup?action=edit setup_input = cli_common.getConfStanza("catchpoint", "catchpoint_account") consumer_key = setup_input['client_key'] consumer_secret = setup_input['client_secret'] # Here we should be able to get the access_token that was set on the setup page. IS THIS NECESSARY?? # access_token = setup_input['access_token'] # Testing: # event_data.data = cp_object.retrieve_rd_wrapper('RY-Rc-jSl18UYU23', '59d65360-9248-410e-a697-28e62b70054e', 81093) # consider writing driver retrieve interface to accept variant key / secret / tests. -- update: done. for input_name, input_item in inputs.inputs.iteritems(): test_id = input_item['test_id'] event_data.stanza = input_name content = cp_object.retrieve_rd_wrapper(consumer_key, consumer_secret, test_id) # Must convert the Python Dictionary to a String for splunk to write to stdout and ingest data # json.dumps(content["detail"]) for index in content["detail"]: metric = content["detail"][index] element = {'start': content['start'], 'end': content['end'], 'timezone': content['timezone'], 'breakdown_1': metric['breakdown_1'], 'breakdown_2': metric['breakdown_2'], 'dimension': metric['dimension'], 'host_Ip': metric['host_Ip'], 'synthetic_metric': metric['synthetic_metrics'] } event_data.data = json.dumps(element, sort_keys=True) # Testing: # print event_data.data ew.write_event(event_data)
def check_wazuh_version(self, kwargs): """Check Wazuh version Parameters ---------- kwargs : dict The request's parameters """ try: opt_username = kwargs["user"] opt_password = kwargs["pass"] opt_base_url = kwargs["ip"] opt_base_port = kwargs["port"] url = opt_base_url + ":" + opt_base_port verify = False auth = requestsbak.auth.HTTPBasicAuth(opt_username, opt_password) wazuh_token = self.wztoken.get_auth_token(url, auth) wazuh_version = self.session.get(url + '/', headers={ 'Authorization': f'Bearer {wazuh_token}' }, timeout=20, verify=verify).json() wazuh_version = wazuh_version['data']['api_version'] app_version = cli.getConfStanza('package', 'app') app_version = app_version['version'] v_split = wazuh_version.split('.') a_split = app_version.split('.') wazuh_version = str(v_split[0] + "." + v_split[1]) app_version = str(a_split[0] + "." + a_split[1]) if wazuh_version != app_version: raise Exception( "Unexpected Wazuh version. App version: %s, Wazuh version: %s" % (app_version, wazuh_version)) except Exception as e: self.logger.error("Error when checking Wazuh version: %s" % (e)) raise e
def get_value(conf_name, stanza_name, param_name, default_value, logger=None): try: stanza = cli_common.getConfStanza(conf_name, stanza_name) try: v = stanza[param_name] except: if logger is not None: logger.warn( '%s.conf->[%s]->%s not found, using default value (%s=%s)' % (conf_name, stanza_name, param_name, param_name, default_value)) v = default_value except: if logger is not None: logger.warn(Stacktrace.getn()) v = default_value return v
def polling_state(self, **kwargs): """Check agent monitoring status. Parameters ---------- kwargs : dict The request's parameters """ try: self.logger.debug("manager: Getting agents polling state.") app = cli.getConfStanza( 'inputs', 'script:///opt/splunk/etc/apps/SplunkAppForWazuh/bin/get_agents_status.py') disabled = app.get('disabled') polling_dict = {} polling_dict['disabled'] = disabled data_temp = jsonbak.dumps(polling_dict) except Exception as e: return jsonbak.dumps({'error': str(e)}) return data_temp
def stream(self, events): try: cfg = cli.getConfStanza('kvstore_tools', 'settings') except BaseException as e: eprint("Could not read configuration: " + repr(e)) # Facility info - prepended to log lines facility = os.path.basename(__file__) facility = os.path.splitext(facility)[0] try: logger = setup_logger(cfg["log_level"], 'kvstore_tools.log', facility) except BaseException as e: eprint("Could not create logger: " + repr(e)) print("Could not create logger: " + repr(e)) exit(1) logger.info('Script started by %s' % self._metadata.searchinfo.username) if self.app: logger.debug('App: %s' % self.app) else: self.app = self._metadata.searchinfo.app if self.collection: logger.debug('Collection: %s' % self.collection) else: logger.critical("No collection specified. Exiting.") print("Error: No collection specified.") exit(1) if self.outputkeyfield: logger.debug('Output Key Field: %s' % self.outputkeyfield) else: self.outputkeyfield = self.collection + "_key" if self.outputvalues: logger.debug('Output Values: %s' % self.outputvalues) else: self.outputvalues = "" if self.delimiter: logger.debug('Delimiter: %s' % self.delimiter) else: self.delimiter = "," if self.groupby: logger.debug('Group by field: %s' % self.groupby) else: self.groupby = None opts = {} opts["owner"] = "nobody" opts["token"] = self._metadata.searchinfo.session_key opts["app"] = self.app #epoch_time = int(time.time()) current_user = self._metadata.searchinfo.username lookup_output_kvpairs = [] # Static output fields are literal values that are given within the search command arguments # e.g. "lookup_field1=value1" static_output_fields = {} # variable output fields are values taken from the events and pushed into the lookup record # as events are processed # e.g. "lookup_field2=$sourcetype$" variable_output_fields = {} resolved_variables = {} # Check for lockfile from previous invocations for this search ID dispatch = self._metadata.searchinfo.dispatch_dir static_kvfields_file = os.path.join(dispatch, "kvfields_static") #dict variable_kvfields_file = os.path.join(dispatch, "kvfields_variable") #dict resolved_variables_file = os.path.join(dispatch, "resolved_variables") #dict try: if os.path.isfile(static_kvfields_file): with open(static_kvfields_file, 'r') as f: # Set static kvfields values static_output_fields = json.loads(f.read()) #dict if os.path.isfile(variable_kvfields_file): with open(variable_kvfields_file, 'r') as f: # Set variable kvfields values variable_output_fields = json.loads(f.read()) #dict # Connect to the kv store service = connect(**opts) if self.collection in service.kvstore: obj_collection = service.kvstore[self.collection] else: logger.critical("KVStore not found: %s" % self.collection) print('KVStore not found: %s' % self.collection) exit(1) # First invocation - build the lists for static and variable values if static_output_fields == {} and variable_output_fields == {}: # Split the key-value pairs argument into individual key-value pairs # Account for quoted string values and delimiters within the quoted value kvpair_split_re = r'([^=]+=(?:"[^"\\]*(?:\\.[^"\\]*)*"|[^{}]+))'.format( self.delimiter) x = re.findall(kvpair_split_re, self.outputvalues) for i in x: i = i.strip(self.delimiter).strip() lookup_output_kvpairs.append(i) for lof in lookup_output_kvpairs: k, v = lof.split("=") k = k.strip() v = v.strip().strip('"').replace('\\"', '"') logger.debug("k = %s, v = %s" % (k, v)) # Replace special values v = v.replace("$kv_current_userid$", current_user) v = v.replace("$kv_now$", str(time.time())) # Value starts and ends with $ - variable field if v[0] + v[-1] == '$$': # Add to the list of variable fields variable_output_fields[k] = v.replace("$", "") else: # Add to the list of static fields static_output_fields[k] = v logger.info( "Unpacked %d static and %d variable fields from arguments" % (len(list(static_output_fields.keys())), len(list(variable_output_fields.keys())))) # Write the static payload to the file # File doesn't exist. Open/claim it. with open(static_kvfields_file, 'w') as f: f.write( json.dumps(static_output_fields, ensure_ascii=False)) with open(variable_kvfields_file, 'w') as f: f.write( json.dumps(variable_output_fields, ensure_ascii=False)) except BaseException as e: logger.critical('Error connecting to collection: %s' % repr(e), exc_info=True) print('Error connecting to collection: %s' % repr(e)) exit(1) # Read the events, resolve the variables, store them on a per-groupby-fieldvalue basis i = 0 inserts = 0 for e in events: update = False # (Re)read the latest data if os.path.isfile(resolved_variables_file): with open(resolved_variables_file, 'r') as f: # Open in non-blocking mode fd = f.fileno() flag = fcntl.fcntl(fd, fcntl.F_GETFL) fcntl.fcntl(fd, fcntl.F_SETFL, flag | os.O_NONBLOCK) # Set static kvfields values resolved_variables = json.loads( f.read()) #dict [groupby value][field name] if self.groupby is not None: groupby_value = e[self.groupby] else: # Make this value the same for every event (no group-by) groupby_value = '____placeholder' new_kv_record = {} if groupby_value in list(resolved_variables.keys()): # Set the previously recorded key value for this group-by value within the event kvstore_entry_key = resolved_variables[groupby_value]["_key"] # We've already resolved the variables for this groupby, but see if any are not populated for lookup_field, event_field in list( variable_output_fields.items()): if lookup_field not in list( resolved_variables[groupby_value].keys()): if event_field in list(e.keys()): if e[event_field] is not None and e[ event_field] != '': resolved_variables[groupby_value][ lookup_field] = e[event_field] new_kv_record[lookup_field] = e[event_field] update = True if update: # Update the collection new_kv_record.update(static_output_fields) response = obj_collection.data.update( kvstore_entry_key, json.dumps(new_kv_record)) # Write the data to disk immediately so other threads can benefit with open(resolved_variables_file, 'w') as f: f.write( json.dumps(resolved_variables, ensure_ascii=False)) else: # First time we're seeing this groupby value. Resolve variables and write the KV store record. # Define the dictionary resolved_variables[groupby_value] = {} # Update the static values new_kv_record = static_output_fields.copy() # Resolve the variables for lookup_field, event_field in list( variable_output_fields.items()): if event_field in list(e.keys()): if e[event_field] is not None: resolved_variables[groupby_value][ lookup_field] = e[event_field] new_kv_record[lookup_field] = e[event_field] # Write the new kvstore record and get the ID (_key) response = obj_collection.data.insert( json.dumps(new_kv_record)) kvstore_entry_key = response["_key"] resolved_variables[groupby_value]["_key"] = kvstore_entry_key # Write the data to disk immediately so other threads can benefit with open(resolved_variables_file, 'w') as f: f.write(json.dumps(resolved_variables, ensure_ascii=False)) inserts += 1 # Write the KV store record's _key value to the event e[self.outputkeyfield] = kvstore_entry_key yield e i += 1 logger.info("Modified %d events and inserted %s new records into %s" % (i, inserts, self.collection))
def main(): # sys.stderr = open('err.txt', 'w+') # Redirect error to out, so we can see any errors sessionXml = sys.stdin.readline() if len(sessionXml) == 0: sys.stderr.write("Did not receive a session key from splunkd. " + "Please enable passAuth in inputs.conf for this " + "script\n") exit(2) #parse the xml sessionKey start = sessionXml.find('<authToken>') + 11 stop = sessionXml.find('</authToken>') authTok = sessionXml[start:stop] # now get tanium credentials - might exit if no creds are available username, passwd = getCredentials(authTok) sys.stderr = sys.stdout configuration_dict = spcli.getConfStanza('tanium', 'taniumserver') tanium_server = configuration_dict['taniumhost'] parser = argparse.ArgumentParser(description='Tanium Splunk Saved Query') """ parser.add_argument( '--tanium', metavar='TANIUM', required=True, help='Tanium server') parser.add_argument( '--user', metavar='USER', required=True, help='user name') parser.add_argument( '--password', metavar='PASSWORD', required=True, help='user password') """ parser.add_argument('--saved', metavar='SAVED', required=True, help='saved question') parser.add_argument('--timeout', metavar='TIMEOUT', required=False, default="3", help='sensor poll timeout') parser.add_argument('--splunk', metavar='SPLUNK', required=False, help='Splunk server to TCP to') parser.add_argument('--splunk_port', metavar='SPLUNK_PORT', required=False, default="9999", help='Splunk server TCP port') args = vars(parser.parse_args()) tanium = tanium_server user = username password = passwd saved = args['saved'] timeout = args['timeout'] splunk = args['splunk'] splunk_port = int(args['splunk_port']) # end processing args now inst the Tanium class my_tanium = TaniumQuestion(tanium, user, password) # send the question to Tanium xml_response = my_tanium.ask_tanium_a_question(saved, timeout) # check to make sure the result is good. if xml_response == "Timeout": print "Alert,Suggestion" print "The request timed out, Try setting a higher timeout" else: # translate the results to a user friendly list. list_response = my_tanium.xml_from_tanium_to_csv_list(xml_response) list_line = "" list_count = 0 head_len = len(list_response[0].split(',')) print list_response[0] for i in range(1, len(list_response)): list_line = list_line + "," + list_response[i] list_count = list_count + 1 if list_count == head_len: print list_line.lstrip(',') list_line = "" list_count = 0 # if requested send the data to a TCP indexer BROKEN! if splunk != None: syslog_list = my_tanium.csv_list_to_syslog_list(list_response) for element in syslog_list: tcp_to_splunk(splunk, splunk_port, element)
def main(): # sys.stderr = open('err.txt', 'w+') # Redirect error to out, so we can see any errors sessionXml = sys.stdin.readline() if len(sessionXml) == 0: sys.stderr.write("Did not receive a session key from splunkd. " + "Please enable passAuth in inputs.conf for this " + "script\n") exit(2) #parse the xml sessionKey start = sessionXml.find('<authToken>') + 11 stop = sessionXml.find('</authToken>') authTok = sessionXml[start:stop] # now get tanium credentials - might exit if no creds are available username, passwd = getCredentials(authTok) #sys.stderr = sys.stdout configuration_dict = spcli.getConfStanza('tanium_customized', 'taniumhost') tanium_server = configuration_dict['content'] parser = argparse.ArgumentParser(description='Tanium Splunk NLP Query') """ parser.add_argument( '--tanium', metavar = 'TANIUM', required = True, help = 'Tanium server') parser.add_argument( '--user', metavar = 'USER', required = True, help = 'user name') parser.add_argument( '--password', metavar = 'PASSWORD', required = True, help = 'user password') """ parser.add_argument( '--question', metavar='QUESTION', required=True, help='nlp question') parser.add_argument( '--timeout', metavar='TIMEOUT', required=False, default="3", help='sensor poll timeout') parser.add_argument( '--show_parse', required=False, action="store_true", help='show parsed quesion on line 1') parser.add_argument( '--splunk', metavar='SPLUNK', required=False, help='Splunk server to TCP to') parser.add_argument( '--splunk_port', metavar='SPLUNK_PORT', required=False, default="443", help='Splunk server TCP port') parser.add_argument( '--clean_key', metavar='(True|False)', required=False, default="True", help='Controls for key cleaning') args = vars(parser.parse_args()) tanium = tanium_server user = username password = passwd question = args['question'] timeout = args['timeout'] show_parse = args['show_parse'] splunk = args['splunk'] splunk_port = int(args['splunk_port']) if args['clean_key'].lower() == "true": clean_key = True else: clean_key = False # end processing args now inst the Tanium class my_tanium = TaniumQuestion(tanium, user, password) # send the question to Tanium xml_response = my_tanium.ask_tanium_a_question(question, timeout) # check to make sure the result is good. if xml_response == "Timeout": print "Alert,Suggestion" print "The request timed out, Try setting a higher timeout" else: # translate the results to a user friendly list. list_response = my_tanium.xml_from_tanium_to_csv_list(xml_response, clean_key) list_line = "" list_count = 0 head_len = len(list_response[0].split(',')) if head_len > 1: print list_response[0] else: print "No,Results,Returned" if show_parse == True: print "Question Parsed As: \"" + my_tanium.verbage + "\"" for i in range(1, len(list_response)): list_line = list_line + "," + list_response[i] list_count = list_count + 1 if list_count == head_len: print list_line.lstrip(',') list_line = "" list_count = 0 # if requested send the data to a TCP indexer BROKEN! if splunk != None: syslog_list = my_tanium.csv_list_to_syslog_list(list_response) for element in syslog_list: tcp_to_splunk(splunk, splunk_port, element)
def main(): # sys.stderr = open('err.txt', 'w+') # Redirect error to out, so we can see any errors sessionXml = sys.stdin.readline() if len(sessionXml) == 0: sys.stderr.write("Did not receive a session key from splunkd. " + "Please enable passAuth in inputs.conf for this " + "script\n") exit(2) #parse the xml sessionKey start = sessionXml.find('<authToken>') + 11 stop = sessionXml.find('</authToken>') authTok = sessionXml[start:stop] # now get tanium credentials - might exit if no creds are available username, passwd = getCredentials(authTok) sys.stderr = sys.stdout configuration_dict = spcli.getConfStanza('tanium_customized', 'taniumhost') tanium_server = configuration_dict['content'] #parse the cli parser = argparse.ArgumentParser(description='Tanium Splunk Save A Question') """ parser.add_argument( '--tanium', metavar = 'TANIUM', required = True, help = 'Tanium server') parser.add_argument( '--user', metavar = 'USER', required = True, help = 'user name') parser.add_argument( '--password', metavar = 'PASSWORD', required = True, help = 'user password') """ parser.add_argument( '--question', metavar='QUESTION', required=True, help='nlp question') parser.add_argument( '--save_name', metavar='SAVENAME', required=True, help='The name to save the question under') args = vars(parser.parse_args()) tanium = tanium_server user = username password = passwd #tanium = args['tanium'] #user = args['user'] #password = args['password'] question = args['question'] save_name = args['save_name'] # end processing args now inst the Tanium class my_tanium = TaniumQuestion(tanium, user, password) # send the question to Tanium saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print "Question,NLP Saved as,Saved Name,Tanium ID number" print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1]
def main(): # sys.stderr = open('err.txt', 'w+') # Redirect error to out, so we can see any errors sessionXml = sys.stdin.readline() if len(sessionXml) == 0: sys.stderr.write("Did not receive a session key from splunkd. " + "Please enable passAuth in inputs.conf for this " + "script\n") exit(2) #parse the xml sessionKey start = sessionXml.find('<authToken>') + 11 stop = sessionXml.find('</authToken>') authTok = sessionXml[start:stop] # now get tanium credentials - might exit if no creds are available username, passwd = getCredentials(authTok) sys.stderr = sys.stdout configuration_dict = spcli.getConfStanza('tanium_customized', 'taniumhost') tanium_server = configuration_dict['content'] #get the cli parsed args... not really needed here #parser = argparse.ArgumentParser(description='Tanium Splunk Save A Question') """ parser.add_argument( '--tanium', metavar = 'TANIUM', required = True, help = 'Tanium server') parser.add_argument( '--user', metavar = 'USER', required = True, help = 'user name') parser.add_argument( '--password', metavar = 'PASSWORD', required = True, help = 'user password') """ args = vars(parser.parse_args()) tanium = tanium_server user = username password = passwd #tanium = args['tanium'] #user = args['user'] #password = args['password'] # end processing args now inst the Tanium class my_tanium = TaniumQuestion(tanium, user, password) # print the header print "Question,NLP Saved as,Saved Name,Tanium ID number" # send the questions to Tanium question = "Get Computer Name and Non-Approved Established Connections from all machines with Non-Approved Established Connections containing \":\"" save_name = "Splunk Non-Approved Established Connections by Computer" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get DNS Resolver Cache Hosts from all machines" save_name = "Splunk DNS Cache" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get DNS Resolver Misses from all machines" save_name = "Splunk DNS Cache Misses" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get Installed Applications from all machines" save_name = "Splunk Installed Applications" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get Installed Java Runtimes from all machines" save_name = "Splunk Installed Java Runtimes" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get Listen Ports with MD5 Hash from all machines" save_name = "Splunk Listening Ports with MD5 Hash" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get Computer Name and IP Address and Logged In Users from all machines" save_name = "Splunk Machine User Map" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get Running Java Applications from all machines" save_name = "Splunk Machines Actively Running Vulnerable Java Applications" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get Out of Date Managed Applications from all machines" save_name = "Splunk Machines Running Vulnerable Applications" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get Non-Approved Established Connections from all machines with Non-Approved Established Connections containing \":\"" save_name = "Splunk Non-Approved Established Connections" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get ARP Cache from all machines" save_name = "Splunk ARP" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get Open Port from all machines" save_name = "Splunk Open Ports" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get Recently Closed Connections from all machines" save_name = "Splunk Recently Closed Connections" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get Available Patches from all machines with Available Patches containing \"Not Installed\"" save_name = "Splunk Required Windows Patches" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get Running Applications from all machines" save_name = "Splunk Running Applications" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get Running Processes from all machines" save_name = "Splunk Running Processes" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get Running Processes with MD5 Hash from all machines" save_name = "Splunk Running Processes with MD5 Hash" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get Running Service from all machines" save_name = "Splunk Running Services" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get Stopped Service from all machines" save_name = "Splunk Stopped Services" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get Installed Applications containing \"Is Uninstallable\" from all machines" save_name = "Splunk Uninstallable Applications" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get Unmanaged Assets from all machines with Unmanaged Assets not containing \"not found\"" save_name = "Splunk Unmanaged Assets" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1] # send the questions to Tanium question = "Get Computer Name and Computer Serial Number and System Disk Free Space and CPU Manufacturer and IP Address and Disk Total Space and Free Memory and Total Memory and Operating System and MAC Address and Domain Name and Last Logged In User and CPU from all machines" save_name = "Splunk Asset MGMT Common Data" saved_resp = my_tanium.ask_tanium_a_question(question,save_name) print question + "," + saved_resp[0] + "," + save_name + "," + saved_resp[1]
req_fields=None ) keywords, kvs = splunk.Intersplunk.getKeywordsAndOptions() #splunk.Intersplunk.parseError(",".join(keywords)) if len(sys.argv) < 2: splunk.Intersplunk.parseError("Missing actual R script parameter") r_snippet = sys.argv[1] #calculate some paths bin_dir_path, _ = os.path.split(os.path.abspath(__file__)) scripts_dir_path = os.path.join(os.path.dirname(bin_dir_path), 'local', 'scripts') #read R library path from configuration r_path_config = cli.getConfStanza('r', 'paths') r_path = r_path_config.get('r') if not os.path.exists(r_path): splunk.Intersplunk.outputResults( splunk.Intersplunk.generateErrorResults('Cannot find R executable at path \'%s\'' % r_path)) exit(0) #read all the input data input_data = splunk.Intersplunk.readResults() #collect field names fieldnames = set() for result in input_data: for key in list(result.keys()): if not key in fieldnames: fieldnames.add(key)
def main(): # sys.stderr = open('err.txt', 'w+') # Redirect error to out, so we can see any errors sessionXml = sys.stdin.readline() if len(sessionXml) == 0: sys.stderr.write("Did not receive a session key from splunkd. " + "Please enable passAuth in inputs.conf for this " + "script\n") exit(2) #parse the xml sessionKey start = sessionXml.find('<authToken>') + 11 stop = sessionXml.find('</authToken>') authTok = sessionXml[start:stop] # now get tanium credentials - might exit if no creds are available username, passwd = getCredentials(authTok) sys.stderr = sys.stdout configuration_dict = spcli.getConfStanza('tanium', 'taniumserver') tanium_server = configuration_dict['taniumhost'] parser = argparse.ArgumentParser(description='Tanium Splunk NLP Parser') """ parser.add_argument( '--tanium', metavar='TANIUM', required=True, help='Tanium server') parser.add_argument( '--user', metavar='USER', required=True, help='user name') parser.add_argument( '--password', metavar='PASSWORD', required=True, help='user password') """ parser.add_argument( '--question', metavar='QUESTION', required=True, help='nlp question') args = vars(parser.parse_args()) tanium = tanium_server user = username password = passwd question = args['question'] # end processing args now inst the Tanium class my_tanium = TaniumQuestion(tanium, user, password) # send the question to Tanium response = my_tanium.ask_tanium_a_question(question) print "Possible Parsed Translations" for element in response: print element
from splunk.clilib import cli_common as cli cfg = cli.getConfStanza('setup', 'mystanza') MY_SERVER_URL = 'http://' + cfg.get('server') + ':' + cfg.get('port') LOOKUP_INPUT_FIELD = cfg.get('lookup_input_field') LOOKUP_OUTPUT_FIELDS = cfg.get('lookup_output_fields')