def create_logger(in_splunk): if in_splunk: import splunk.mining.dcutils as logging # this logs to python.log logger = logging.getLogger() else: import logging logger = logging.getLogger() ch = logging.StreamHandler(sys.stdout) logger.addHandler(ch) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) return logger
def post_chatter_message(msg, settings): dcu.getLogger().info("Message: %s" % msg) instance_server = settings.get('splatter', 'instance_server') access_token = get_chatter_access_token(instance_server, settings) dcu.getLogger().info("Access token: %s" % access_token) encoded_chatter_message = urllib.quote(msg) # Post to configured user's feed url = 'https://%s.salesforce.com/services/data/v26.0/chatter/feeds/news/me/feed-items?text=%s' % (instance_server, encoded_chatter_message) # Post to group's feed #url = 'https://%s.salesforce.com/services/data/v26.0/chatter/feeds/record/SFDC_GROUP_ID/feed-items?text=%s' % (instance_server, encoded_chatter_message) # Post to another user's feed (not yet) h = httplib2.Http(disable_ssl_certificate_validation=True) headers = { 'authorization' : 'Bearer %s' % access_token } h.request(url, 'POST', headers=headers)
def post_chatter_message(msg, settings): dcu.getLogger().info("Message: %s" % msg) instance_server = settings.get('splatter', 'instance_server') access_token = get_chatter_access_token(instance_server, settings) dcu.getLogger().info("Access token: %s" % access_token) encoded_chatter_message = urllib.quote(msg) # Post to configured user's feed url = 'https://%s.salesforce.com/services/data/v26.0/chatter/feeds/news/me/feed-items?text=%s' % ( instance_server, encoded_chatter_message) # Post to group's feed #url = 'https://%s.salesforce.com/services/data/v26.0/chatter/feeds/record/SFDC_GROUP_ID/feed-items?text=%s' % (instance_server, encoded_chatter_message) # Post to another user's feed (not yet) h = httplib2.Http(disable_ssl_certificate_validation=True) headers = {'authorization': 'Bearer %s' % access_token} h.request(url, 'POST', headers=headers)
def execute(): try: logger = dcu.getLogger() except Exception as e: splunk.Intersplunk.generateErrorResults(str("Unable to initialize logging.")+str(e)) try: argv = splunk.Intersplunk.win32_utf8_argv() or sys.argv first = True options = {} pattern=re.compile('^\s*([^=]+)=(.*)') for arg in argv: if first: first = False continue else: result = pattern.match(arg) if result: options[result.group(1)] = result.group(2) except Exception as e: splunk.Intersplunk.generateErrorResults(str("Did not receive list of options with openssl command."+ str(e))) try: # get the previous search results results,dummyresults,settings = splunk.Intersplunk.getOrganizedResults() if len(results) == 0: splunk.Intersplunk.generateErrorResults(str("Did not receive a list of results in the pipeline.")+str(e)) if len(results) > 0: for result in results: #do work pathToOpenSSL = os.path.join(os.getcwd(),"..","..","..","..","bin","openssl") argv.pop(0) args = [pathToOpenSSL] + argv if 'certfield' in options: for b in args: if re.match("certfield\=.*",b): args.remove(b) cert = result[options['certfield']] p1 = subprocess.Popen(["echo", cert], stdout=subprocess.PIPE) p2 = subprocess.Popen(args, stdin=p1.stdout, stdout=subprocess.PIPE) else: p2 = subprocess.Popen(args, stdout=subprocess.PIPE) p2.wait() output = p2.stdout.read() result['openssl'] = output #output results splunk.Intersplunk.outputResults(results) except Exception as e: stack = traceback.format_exc() splunk.Intersplunk.generateErrorResults(str(e)) logger.error(str(e) + ". Traceback: " + str(stack))
def init_logging(level=20): # level 20 is logging.INFO if run_by_splunk(): import splunk.mining.dcutils as dcu logger = dcu.getLogger() else: import logging logger = logging.getLogger() logger.setLevel(level) ch = logging.StreamHandler(sys.stdout) logger.addHandler(ch) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) return logger
import csv import splunk.rest as rest import json import collections import os, re, sys, urllib import splunk.Intersplunk, splunk.mining.dcutils as dcu logger = dcu.getLogger() results, dummyresults, settings = splunk.Intersplunk.getOrganizedResults() #(isgetinfo, sys.argv) = intersplunk.isGetInfo(sys.argv) if len(sys.argv) < 2: splunk.Intersplunk.generateErrorResults( "Please specify a valid incident_id field") #if isgetinfo: # intersplunk.outputInfo(False, False, True, False, None, True) # # outputInfo automatically calls sys.exit() sessionKey = settings.get("sessionKey", None) incident_id_field = sys.argv[1] for row in results: if incident_id_field in row: query = {'incident_id': row[incident_id_field]} uri = '/servicesNS/nobody/alert_manager/storage/collections/data/incident_results?query=%s' % urllib.quote(
# Do NOT modify anything below this line unless you are # certain of the ramifications of the changes ######################################################### import splunk.Intersplunk # so you can interact with Splunk import splunk.entity as entity # for splunk config info import urllib2 # make http requests to PAN firewall import sys # for system params and sys.exit() import re # regular expressions checks in PAN messages import splunk.mining.dcutils as dcu import ConfigParser # to parse out the pa.conf file from splunk.appserver.mrsparkle.lib.util import make_splunkhome_path # to grab the default splunk path import splunk.rest import json logger = dcu.getLogger() # set path of config panconf = make_splunkhome_path( ["etc", "apps", "SA-Mitigation", "default", "pan.conf"]) # read config file config = ConfigParser.RawConfigParser() config.read(panconf) #Assign PA IP PAN = config.get('PAN', 'IP') #Assign BADACTORS group name BADACTORS = config.get('PAN', 'GROUP') ## Major props to Ledion. copying his function, verbatim and then adding comments and traceback and logging ## http://blogs.splunk.com/2011/03/15/storing-encrypted-credentials/
class GenerateMitreCommand(GeneratingCommand): logger = dcu.getLogger() def generate(self): self.logger.info("SA-RBA genmitrelookup.py Starting") info = self.search_results_info #capture the getinfo context sent by splunkd self.logger.info("SA-RBA search_results_info: {}".format(info)) url = 'https://raw.githubusercontent.com/mitre/cti/master/enterprise-attack/enterprise-attack.json' self.logger.info( "SA-RBA gen_mitre_lookup.py requesting enterprise ATT&CK dict from url:{}" .format(url)) req = six.moves.urllib.request.Request(url) search_results = six.moves.urllib.request.urlopen(req) jsonData = json.loads(search_results.read()) self.logger.info( "SA-RBA retrieved {} objects from the enterprise ATT&CK dict". format(len(jsonData['objects']))) # Grab all mitre relationship context from the mitre dict. Techniques will map to these relationships = [] for r in jsonData["objects"]: if r['type'] == 'relationship' and r['source_ref'].startswith( "intrusion-set"): relationships.append( relationship(r['id'], r['target_ref'], r['source_ref'])) if r['type'] == 'relationship' and r['source_ref'].startswith( "malware--"): relationships.append( relationship(r['id'], r['target_ref'], r['source_ref'])) if r['type'] == 'relationship' and r['source_ref'].startswith( "tool--"): relationships.append( relationship(r['id'], r['target_ref'], r['source_ref'])) self.logger.info( "SA-RBA retrieved {} relationships from the enterprise ATT&CK dict" .format(len(relationships))) #The below list will hold all individual (sub)technique stored in the result{} dict. We will dump this list into the KVStore collection. finalResults = [] # Grab all mitre software context from the mitre dict. Techniques will map to these via relationships softwares = [] for s in jsonData["objects"]: x_mitre_platforms = "none" x_mitre_aliases = "none" if s['type'] == 'malware' or s['type'] == 'tool': if 'x_mitre_platforms' in s: x_mitre_platforms = ','.join( s['x_mitre_platforms']) #Convert the list to a string if 'x_mitre_aliases' in s: x_mitre_aliases = s['x_mitre_aliases'] softwares.append( software(s['id'], s['external_references'][0]['external_id'], s['name'], s['labels'], x_mitre_platforms, s['type'], s['external_references'][0]['url'], x_mitre_aliases)) self.logger.info( "SA-RBA retrieved {} software object from the enterprise ATT&CK dict" .format(len(softwares))) # Grab all mitre threat group context from the mitre dict. Techniques will map to these via relationships groups = [] for g in jsonData["objects"]: aliases = "none" description = "none" x_mitre_version = "none" if g['type'] == 'intrusion-set': if 'aliases' in g: aliases = ','.join( g['aliases']) #Convert the list to a string if 'description' in g: description = g['description'] if 'x_mitre_version' in g: x_mitre_version = g['x_mitre_version'] groups.append( group(g['id'], g['external_references'][0]['external_id'], g['name'], aliases, description, x_mitre_version, g['external_references'][0]['url'])) self.logger.info( "SA-RBA retrieved {} groups from the enterprise ATT&CK dict". format(len(groups))) #Build a mapping of mitre tactic names to ids tactics = {} for i in jsonData["objects"]: if i['type'] == 'x-mitre-tactic': tactics.update({ i['x_mitre_shortname']: i['external_references'][0]['external_id'] }) self.logger.info( "SA-RBA Tactics discovered in the mitre dict: {}".format(tactics)) # Grab all mitre technique context from the mitre dict for i in jsonData["objects"]: if i['type'] == 'attack-pattern': tactic_name = [] tactic_name_id = [] if 'kill_chain_phases' in i: for x in i['kill_chain_phases']: tactic_name.append(x['phase_name']) tactic_name_id.append(tactics[x['phase_name']]) result = {} result["mitre_technique_id"] = i['external_references'][0][ 'external_id'] result["mitre_tactic"] = tactic_name result["mitre_tactic_id"] = tactic_name_id result["mitre_technique"] = i['name'] if 'description' in i: result["mitre_description"] = i['description'] else: result["mitre_description"] = "" result["mitre_url"] = i['external_references'][0]['url'] if "x_mitre_detection" in i: result["mitre_detection"] = i['x_mitre_detection'] else: result["mitre_detection"] = "" if "revoked" in i: if str(i['revoked']).lower() == str("true"): result["mitre_description"] = "revoked" result["mitre_tactic"] = "revoked" # lets add threat group and software association based on the technique group_name = [] group_alias = [] group_url = [] group_external_id = [] software_name = [] software_type = [] software_platform = [] software_url = [] for r in relationships: if r.attack_pattern == i['id'] and r.source_ref.startswith( "intrusion-set"): for g in groups: if r.source_ref == g.intrusion_id: group_name.append(g.name) group_alias.append(g.aliases) group_url.append(g.url) group_external_id.append(g.external_id) if r.attack_pattern == i['id'] and ( r.source_ref.startswith("malware--") or r.source_ref.startswith("tool--")): for s in softwares: if r.source_ref == s.software_id: software_name.append(s.name) software_type.append(s.type) software_platform.append(s.platform) software_url.append(s.url) result['mitre_threat_group_name'] = group_name result['mitre_threat_group_aliases'] = group_alias result['mitre_threat_group_url'] = group_url result['mitre_threat_group_id'] = group_external_id result['mitre_software_name'] = software_name result['mitre_software_type'] = software_type result['mitre_software_platform'] = software_platform result['mitre_software_url'] = software_url finalResults.append(result) #send them to stdout if you wanted to carry results into spl for something like an outputlookup #yield {'mitre_technique_id': result["mitre_technique_id"],\ # 'mitre_tactic': result["mitre_tactic"],\ # 'mitre_tactic_id': result["mitre_tactic_id"],\ # 'mitre_technique': result["mitre_technique"],\ # 'mitre_description': result["mitre_description"],\ # 'mitre_url': result["mitre_url"],\ # 'mitre_detection': result["mitre_detection"]} #Get the KVStore ready to roll service = client.connect(token=info.auth_token, owner='nobody') collection_name = "mitredict" collection = service.kvstore[collection_name] if collection_name in service.kvstore: self.logger.info( "SA-RBA KVStore Collection {} Found".format(collection_name)) self.logger.info( "SA-RBA KVStore Deleting all data from collection {}".format( collection_name)) collection.data.delete() json_obj = json.dumps(collection.data.query()) len_obj = len(json_obj.encode("utf-8")) while len_obj > 4: time.sleep(2) self.logger.info( "SA-RBA KVStore Deleting. Size of the collection:{}". format(obj_len)) json_obj = json.dumps(collection.data.query()) len_obj = len(json_obj.encode("utf-8")) else: self.logger.info("SA-RBA KVStore Collection {} NOT Found".format( collection_name)) #write it to the KVStore for item in finalResults: collection.data.insert(json.dumps({"mitre_technique_id": item["mitre_technique_id"],\ "mitre_tactic": item["mitre_tactic"],\ "mitre_tactic_id": item["mitre_tactic_id"],\ "mitre_technique": item["mitre_technique"],\ "mitre_description": item["mitre_description"],\ "mitre_url": item["mitre_url"],\ "mitre_detection": item["mitre_detection"],\ "mitre_threat_group_name": item["mitre_threat_group_name"],\ "mitre_threat_group_aliases": item["mitre_threat_group_aliases"],\ "mitre_threat_group_url": item["mitre_threat_group_url"],\ "mitre_software_name": item["mitre_software_name"],\ "mitre_software_type": item["mitre_software_type"],\ "mitre_software_platform": item["mitre_software_platform"],\ "mitre_software_url": item["mitre_software_url"]})) self.logger.info("SA-RBA genmitrelookup.py finished") yield { '_time': time.time(), '_raw': 'SA-RBA genmitrelookup.py finished' }
"""Common functions used by all custom searchbar commands""" import os import sys import traceback import json from environment import run_by_splunk '''Import different logging library depending if script was run by Splunk or on cli''' if run_by_splunk(): import splunk.Intersplunk import splunk.mining.dcutils as logging import splunk.entity as entity # for splunk config info from splunk import ResourceNotFound logger = logging.getLogger() else: import logging # python 2.6 doesn't have a null handler, so create it if not hasattr(logging, 'NullHandler'): class NullHandler(logging.Handler): def emit(self, record): pass logging.NullHandler = NullHandler # set logging to nullhandler to prevent exceptions if logging not enabled logging.getLogger().addHandler(logging.NullHandler()) logger = logging.getLogger() ch = logging.StreamHandler(sys.stdout)
# an empty dictionary will be used to hold system values settings = dict() # results contains the data from the search results and settings contains the sessionKey that we can use to talk to splunk results,unused1,settings = splunk.Intersplunk.getOrganizedResults() args, kwargs = splunk.Intersplunk.getKeywordsAndOptions() #logger.debug(settings) #For debugging # get the sessionKey sessionKey = settings['sessionKey'] try: if 'debug' in kwargs: DEBUG = kwargs['debug'] # setup the logger. $SPLUNK_HOME/var/log/splunk/python.log logger = dcu.getLogger().getChild('retrieveNewApps') if DEBUG: logger.setLevel(DEBUG) existing_apps = [] for app in results: existing_apps.append(str(app['app{@name}'])) results = [] logger.debug("Existing apps already known and considered: %s" % (len(existing_apps),)) logger.debug(existing_apps) logger.debug("Getting new Apps from Palo Alto Networks") resp = retrieveNewApps() logger.debug("Apps retrieved")
import splunk.Intersplunk as si from splunk.mining.dcutils import getLogger import os, re, glob logger = getLogger() DEFAULT_SEPARATOR = '.' DEFAULT_FOI = 1 DEFAULT_REVERSE = False DEFAULT_THREADID = 0 messages = {} fields = ['file', 'fileorder', 'threadno', 'threadaddr', 'threadid', 'stack'] def parse_raw_pstack(pstack_file, selected_thread_id=DEFAULT_THREADID, reverse=DEFAULT_REVERSE, separator=DEFAULT_SEPARATOR, fileorderindex=DEFAULT_FOI): global messages output = [] linecount = 0 ''' frame re group 1 (\d+) captures the frame no group 2 (0x[0-9a-f]+) captures frome addr group 3 ([^(]*) captures class::function name group 4 (\([^(]*\)) captures arguments group 5 (from )? is ignore group 6 ([\w\.\/\(\)]*) captures external library name or arguments ''' frame_re = r'#(\d+)\s+(0x[0-9a-f]+) in ([^(]*)(\([^(]*\))\s(from )?([\w\.\/\(\)]*)'
# an empty dictionary will be used to hold system values settings = dict() # results contains the data from the search results and settings contains the sessionKey that we can use to talk to splunk results, unused1, settings = splunk.Intersplunk.getOrganizedResults() args, kwargs = splunk.Intersplunk.getKeywordsAndOptions() #logger.debug(settings) #For debugging # get the sessionKey sessionKey = settings['sessionKey'] try: if 'debug' in kwargs: DEBUG = kwargs['debug'] # setup the logger. $SPLUNK_HOME/var/log/splunk/python.log logger = dcu.getLogger().getChild('retrieveNewApps') if DEBUG: logger.setLevel(DEBUG) existing_apps = [] for app in results: existing_apps.append(str(app['app{@name}'])) results = [] logger.debug("Existing apps already known and considered: %s" % (len(existing_apps), )) logger.debug(existing_apps) logger.debug("Getting new Apps from Palo Alto Networks") resp = retrieveNewApps()
"""Common functions used by all custom searchbar commands""" import os import sys import traceback from environment import run_by_splunk # Import different logging library depending if script was run by Splunk or on cli if run_by_splunk(): import splunk.Intersplunk import splunk.mining.dcutils as logging import splunk.entity as entity # for splunk config info from splunk import ResourceNotFound logger = logging.getLogger() else: import logging # python 2.6 doesn't have a null handler, so create it if not hasattr(logging, 'NullHandler'): class NullHandler(logging.Handler): def emit(self, record): pass logging.NullHandler = NullHandler # set logging to nullhandler to prevent exceptions if logging not enabled logging.getLogger().addHandler(logging.NullHandler()) logger = logging.getLogger() ch = logging.StreamHandler(sys.stdout) logger.addHandler(ch) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
HTTP_PROXY = {} # Default fields that contain IP addresses and should be tagged if they exist IP_FIELDS = ['src_ip', 'dst_ip', 'ip'] # Enable debugging (script is otherwise silent unless there is an error) DEBUG = False ######################################################### # Do NOT modify anything below this line unless you are # certain of the ramifications of the changes ######################################################### import splunk.mining.dcutils as dcu logger = dcu.getLogger().getChild('panTag') logger.setLevel(20) try: import splunk.Intersplunk # so you can interact with Splunk import splunk.entity as entity # for splunk config info import urllib2 # make http requests to PAN firewall import sys # for system params and sys.exit() import os import re # regular expressions checks in PAN messages import traceback libpath = os.path.dirname(os.path.abspath(__file__)) sys.path[:0] = [os.path.join(libpath, 'lib')] import pandevice import pan.xapi