def read_license_status(): # The license validator code in LicenseAlert.py writes to this file # LicenseStatus.json with license status periodically licenseStatusJson = { 'status': 'valid', 'message': 'None', 'time-bomb_status_code': '500' } try: with open( make_splunkhome_path([ 'etc', 'apps', 'splunk_app_windows_infrastructure', 'bin', 'LicenseStatus.json' ]), 'r') as licenseStatusFile: licenseStatusJson = json.load(licenseStatusFile) with open( make_splunkhome_path([ 'etc', 'apps', 'splunk_app_windows_infrastructure', 'bin', 'TimeBombStatus.json' ]), 'r') as timeBombStatusFile: timeBombStatusJson = json.load(timeBombStatusFile) licenseStatusJson['timebomb_status_code'] = timeBombStatusJson[ 'status'] except: pass return licenseStatusJson
def get_driver(self): profile = self.get_firefox_profile() if profile is not None: driver = webdriver.Firefox(profile, log_path=make_splunkhome_path(['var', 'log', 'splunk', 'geckodriver.log'])) else: driver = webdriver.Firefox(log_path=make_splunkhome_path(['var', 'log', 'splunk', 'geckodriver.log'])) return driver
def ipinfo(ip_add): local_conf = splunk_lib_util.make_splunkhome_path(["etc","apps","ipinfo_app","local", "ip_info_setup.conf"]) default_conf = splunk_lib_util.make_splunkhome_path(["etc","apps","ipinfo_app","default", "ip_info_setup.conf"]) config = ConfigParser() config.read([default_conf,local_conf]) url = config.get("ip_info_configuration","api_url") token = config.get("ip_info_configuration","api_token") enable = config.get("ip_info_configuration","proxy_enable") proxy_url = config.get("ip_info_configuration","proxy_url") disable_ssl = config.get("ip_info_configuration","disable_ssl") cert_path=splunk_lib_util.make_splunkhome_path(["etc", "apps", "ipinfo_app","appserver","static","ipinfo.cert"]) if (os.path.exists(cert_path)): cert_exists = True else: cert_exists = False if(disable_ssl != ""): disable_ssl_request=False else: disable_ssl_request=True if(disable_ssl_request==True and cert_exists==True): disable_ssl_request = cert_path response = "" url = "https://ipinfo.io/"+ip_add+"/privacy" param = {"token" : token} try: if enable == "No": response = requests.request("GET", url, headers="", verify= disable_ssl_request, params=param) response_result = json.loads(response.text) else: proxies = { 'https' : proxy_url} response = requests.request("GET", url, headers="", verify= disable_ssl_request, params=param, proxies=proxies) response_result = json.loads(response.text) except Exception as e: logger.info(e) result={} result["ip"] = ip_add result["vpn"] = response_result["vpn"] if 'vpn' in response_result else "" result["proxy"] = response_result["proxy"] if 'proxy' in response_result else "" result["tor"] = response_result["tor"] if 'tor' in response_result else "" result["hosting"] = response_result["hosting"] if 'hosting' in response_result else "" return result
def get_driver(self): profile = self.get_firefox_profile() if profile is not None: driver = webdriver.Firefox(profile, log_path=make_splunkhome_path([ 'var', 'log', 'splunk', 'geckodriver.log' ])) else: driver = webdriver.Firefox(log_path=make_splunkhome_path( ['var', 'log', 'splunk', 'geckodriver.log'])) return driver
def __init__(self): """Setup a logger for the REST handler.""" global loggers self.config = self.get_config_on_memory() self.debug_enabled = True if self.config['log.level'] == 'debug' else False if loggers.get('splunk.appserver.%s.controllers.logs' % _APPNAME): self.logger = loggers.get( 'splunk.appserver.%s.controllers.logs' % _APPNAME) else: self.logger = logging.getLogger( 'splunk.appserver.%s.controllers.logs' % _APPNAME) try: self.logger.propagate = False self.logger.setLevel(logging.DEBUG) self.file_handler = logging.handlers.RotatingFileHandler( make_splunkhome_path( ['var', 'log', 'splunk', 'SplunkAppForWazuh.log']), maxBytes=100000000, backupCount=50 ) self.formatter = logging.Formatter( "%(levelname)s: %(asctime)s: '%(message)s'", "%Y/%m/%d %H:%M:%S") self.file_handler.setFormatter(self.formatter) self.logger.addHandler(self.file_handler) loggers['splunk.appserver.%s.controllers.logs' % _APPNAME] = self.logger except Exception as e: self.error('[log.py][constructor] %s' % (e)) raise e
def setupLogger(logger=None, log_format='%(asctime)s %(levelname)s [ModInput] %(message)s', level=logging.DEBUG, log_name="modinput.log", logger_name="modinput"): """ Setup a logger suitable for splunkd consumption """ if logger is None: logger = logging.getLogger(logger_name) logger.propagate = False # Prevent the log messages from being duplicated in the python.log file logger.setLevel(level) file_handler = logging.handlers.RotatingFileHandler(make_splunkhome_path( ['var', 'log', 'splunk', log_name]), maxBytes=2500000, backupCount=5) formatter = logging.Formatter(log_format) file_handler.setFormatter(formatter) logger.handlers = [] logger.addHandler(file_handler) logger.debug("Initialized ModularInput Logger") return logger
def logger(self): """ A property that returns the logger. """ # Make a logger unless it already exists if self._logger is not None: return self._logger logger = logging.getLogger(self.logger_name) # Prevent the log messages from being duplicated in the python.log file: logger.propagate = False logger.setLevel(self.log_level) file_handler = handlers.RotatingFileHandler(make_splunkhome_path( ['var', 'log', 'splunk', self.logger_name + '.log']), maxBytes=25000000, backupCount=5) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') file_handler.setFormatter(formatter) logger.addHandler(file_handler) self._logger = logger return self._logger
def upload(self, **kargs): if cherrypy.request.method == 'GET': return self.render_template('upload_image:upload.html', {}) image = kargs.get('image', None) tour_name = kargs.get('tourName', None) filename = kargs.get('filename', None) if image is not None: try: tempPath = util.make_splunkhome_path([ 'etc', 'apps', 'tour_makr', 'appserver', 'static', 'img', tour_name ]) if not os.path.exists(tempPath): os.makedirs(tempPath) if filename.find('/') > -1 or filename.find( '\\') > -1 or filename.startswith('.'): return 'Filename cannot contain / or \\ character or start with . filename="%s"' % filename newPath = os.path.join(tempPath, filename) with open(newPath, 'wb') as newFile: copyfileobj(image.file, newFile) return "Successfully stored %s" % filename except Exception, e: #raise cherrypy.HTTPError(200, 'Failed to upload the file %s. Exception %s' % (filename, str(e))) return 'Failed to upload the file %s. Exception %s' % ( filename, str(e))
def getBackupDirectory(self, lookup_file, namespace, owner=None, resolved_lookup_path=None): """ Get the backup directory where the lookup should be stored """ if owner is None: owner = 'nobody' # Identify the current path of the given lookup file if resolved_lookup_path is None: resolved_lookup_path = lookupfiles.SplunkLookupTableFile.get( lookupfiles.SplunkLookupTableFile.build_id( lookup_file, namespace, owner)).path # Determine what the backup directory should be backup_directory = make_splunkhome_path([ os.path.dirname(resolved_lookup_path), "lookup_file_backups", namespace, owner, self.escapeFilename(lookup_file) ]) #backup_directory = make_splunkhome_path([os.path.split(resolved_lookup_path)[0], "lookup_file_backups", namespace, owner, self.escapeFilename(lookup_file)]) # Make the backup directory, if necessary if not os.path.exists(backup_directory): os.makedirs(backup_directory) logger.debug("Backup directory is:" + backup_directory) return backup_directory
def get_temporary_lookup_file(prefix=None, basedir=None): '''Create a temporary file and return the filehandle. Exceptions will be passed to caller. @param prefix: A prefix for the file (default is "lookup_gen_<date>_<time>_") @param basedir: The base directory for the file (default is $SPLUNK_HOME/var/run/splunk/lookup_tmp, the staging directory for use in creating new lookup table files). ''' if prefix is None: prefix = 'lookup_gen_' + time.strftime('%Y%m%d_%H%M%S') + '_' if basedir is None: basedir = make_splunkhome_path(['var', 'run', 'splunk', 'lookup_tmp']) if not os.path.isdir(basedir): os.mkdir(basedir) if os.path.isdir(basedir): return tempfile.NamedTemporaryFile(prefix=prefix, suffix='.txt', dir=basedir, delete=False) else: return None
def create_lookup_table(filename, lookup_file, namespace, owner, key): ''' Create a new lookup file. @param filename: The full path to the replacement lookup table file. @param lookup_file: The lookup FILE name (NOT the stanza name) @param namespace: A Splunk namespace to limit the search to. @param owner: A Splunk user. @param key: A Splunk session key. @return: Boolean success status. WARNING: "owner" should be "nobody" to update a public lookup table file; otherwise the file will be replicated only for the admin user. ''' # Create the temporary location path lookup_tmp = make_splunkhome_path(['var', 'run', 'splunk', 'lookup_tmp']) destination_lookup_full_path = os.path.join(lookup_tmp, lookup_file) # Copy the file to the temporary location shutil.move(filename, destination_lookup_full_path) # CReate the URL for the REST call url = '/servicesNS/%s/%s/data/lookup-table-files' % (owner, namespace) postargs = { 'output_mode': 'json', 'eai:data': str(destination_lookup_full_path), 'name': lookup_file } # Perform the call rest.simpleRequest( url, postargs=postargs, sessionKey=key, raiseAllErrors=True)
def setup_rotating_log_file(): try: SPLUNK_HOME_LOG_PATH = make_splunkhome_path(["var", "log", "splunk"]) LOG_FILENAME = '' # check to see if the SPLUNK_HOME based log path exists if not os.path.exists(SPLUNK_HOME_LOG_PATH): # check to see if the relative path based log path exists SPLUNK_BASE = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..', '..', '..')) SPLUNK_BASE_LOG_PATH = os.path.join(SPLUNK_BASE, 'var', 'log', 'splunk') if not os.path.exists(SPLUNK_BASE_LOG_PATH): # disable logging with noop handler logger.addHandler(logging.NullHandler()) return logger else: LOG_FILENAME = os.path.join(SPLUNK_BASE_LOG_PATH, 'document_builder.log') else: LOG_FILENAME = os.path.join(SPLUNK_HOME_LOG_PATH, 'document_builder.log') # valid log file path exists and rotate at 10 MB file_handler = logging.handlers.RotatingFileHandler(LOG_FILENAME, maxBytes=10240000, backupCount=10) LOGGING_FORMAT = "%(asctime)s %(levelname)-s\t%(name)s:%(lineno)d - %(message)s" file_handler.setFormatter(logging.Formatter(LOGGING_FORMAT)) return file_handler except: # disable logging with noop handler return logging.NullHandler()
def make_ta_for_indexers(username, password): ''' Splunk_TA_ForIndexers spl generation for ES 4.2.0 and up ''' if not username or not password: raise Exception("Splunk username and password must be defined.") sys.path.append(make_splunkhome_path(['etc', 'apps', 'SA-Utils', 'bin'])) session_key = auth.getSessionKey(username, password) from app_maker.make_index_time_properties import makeIndexTimeProperties try: archive = makeIndexTimeProperties( app_info, session_key, include_indexes=include_indexes, imported_apps_only=imported_apps_only, namespace=namespace) except TypeError: #Some versions have a change that removed the kwarg imported_apps_only #For older versions, we'll still need to use the imported_apps_only arg, so that's why we #do this second archive = makeIndexTimeProperties(app_info, session_key, include_indexes=include_indexes, namespace=namespace) print archive assert archive.startswith(spl_location)
def acquire_lock(lockname=None): ''' Check if another instance of this script is running by checking pid of old process. ARGS: lockname: if lockname is none, locks on scriptname else locks on lockname Return: pid file name if successful Raises: ScriptAlreadyRunningException exception if another instance is running ''' try: pidfile = os.path.split(sys.argv[0])[1] + ".pid" if lockname is not None: pidfile = lockname.replace("/", "_").replace(":", "_") + "." + pidfile pidfile = os.path.join( make_splunkhome_path(['var', 'log', 'splunk', pidfile])) if os.path.isfile(pidfile): pid = get_processpid_formfile(pidfile) if check_pid_running(pid): logger.warn( "Instance of this script is already running with pid:" + pid + ".. exiting.") raise ScriptAlreadyRunningException( "Another instance of scripts is running, with pid:" + pid) with open(pidfile, 'w+') as f: f.write(str(os.getpid())) return pidfile except ScriptAlreadyRunningException: raise except Exception as e: print_errinfo_log_stmt("General error acquiring lock") logger.exception(e)
def setupLogger(logger=None, log_format="%(asctime)s %(levelname)s [" + APP_NAME + "] %(message)s", level=logging.INFO, log_name=APP_NAME + ".log", logger_name=APP_NAME): """ Setup a logger suitable for splunkd consumption """ if logger is None: logger = logging.getLogger(logger_name) logger.propagate = False # Prevent the log messages from being duplicated in the python.log file logger.setLevel(level) file_handler = logging.handlers.RotatingFileHandler(make_splunkhome_path( ["var", "log", "splunk", log_name]), maxBytes=2500000, backupCount=5) formatter = logging.Formatter(log_format) file_handler.setFormatter(formatter) logger.handlers = [] logger.addHandler(file_handler) return logger
def __init__(self, app_name=None, scheme={}, cim_fields=None): try: self.__splunk_home = None if app_name is None: raise Exception("App Name not passed to Modular Input") self._app_name = app_name self._use_cim = False self._setup_logging() self.log.debug("Splunk App Name set: %s" % self._app_name) self.source(app_name) self.sourcetype(app_name) self.host(app_name) if self._splunk_home is None: self._splunk_home = make_splunkhome_path([""]) if self._splunk_home is None: raise Exception("SPLUNK HOME UNABLE TO BE SET") self.log.debug("Splunk Home set: %s" % self._splunk_home) self._app_home = os.path.join(self._splunk_home, "etc", "apps", self._app_name) except Exception, e: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] myJson = "message=\"{}\" exception_type=\"{}\" exception_arguments=\"{}\" filename=\"{}\" line=\"{}\" input=\"{}\"".format( str(e), type(e).__name__, e, fname, exc_tb.tb_lineno, self.get_config("name")) self.log.error(myJson) raise e
def _readFilesFromFolder(self,folderName, datatype): """"Read data structures from folder. Type field is used for host vm perf view to read host and vm data """ try: folderPath= make_splunkhome_path(['etc', 'apps', 'search_activity','local', 'data', folderName]) structDict={} for file in os.listdir(folderPath): with contextlib.closing(bz2.BZ2File(folderPath+"/"+file, 'rb')) as f: dataDict=json.load(f) for key,val in dataDict.iteritems(): structDict[key]=json.loads(val) logger.debug('Key in %s', key) if(key=='idFieldsHash'): idFieldsHash=json.loads(val) logger.debug('idFieldsHash %s', idFieldsHash) structDict['hostHash']=idFieldsHash['hostHash'] structDict['moidHash']=idFieldsHash['moidHash'] #logger.debug('Struct Dictionary %s', structDict['moidHash']) if datatype=='host': self.hostDataDict=structDict elif datatype=='vm': self.vmDataDict= structDict else: self.datastructDict= structDict return True except Exception as e: logger.error('Could not read files from folder={0}, for datatype={1} due to {2}'.format(folderPath, datatype,e )) msg="[SOLNSelector_read_strcutures] Couldn't read files at " + folderPath raise SOLNSelectorError(status="404", message=msg)
def setup_logger(name, level=logging.WARNING, maxBytes=25000000, backupCount=5): ''' Set up a default logger. @param name: The log file name. @param level: The logging level. @param maxBytes: The maximum log file size before rollover. @param backupCount: The number of log files to retain. ''' # Strip ".py" from the log file name if auto-generated by a script. if '.py' in name: name = name.replace(".py", "") logfile = make_splunkhome_path(["var", "log", "splunk", name + '.log']) logger = logging.getLogger(name) logger.propagate = False # Prevent the log messages from being duplicated in the python.log file logger.level = level # Prevent re-adding handlers to the logger object, which can cause duplicate # log lines. handler_exists = any([True for h in logger.handlers if h.baseFilename == logfile]) if not handler_exists: file_handler = logging.handlers.RotatingFileHandler(logfile, mode='a', maxBytes=maxBytes, backupCount=backupCount) formatter = logging.Formatter('%(asctime)s %(levelname)s pid=%(process)d tid=%(threadName)s file=%(filename)s:%(funcName)s:%(lineno)d | %(message)s') file_handler.setFormatter(formatter) logger.addHandler(file_handler) return logger
class StageContentCommand(GeneratingCommand): ctf_users_staged = Option(require=True) ctf_questions_staged = Option(require=True) ctf_answers_staged = Option(require=True) ctf_hints_staged = Option(require=True) LOOKUPS_DIR = make_splunkhome_path( ['etc', 'apps', 'SA-ctf_scoreboard_admin', 'lookups']) def generate(self): urllib.urlretrieve( self.ctf_users_staged, "%s/%s" % (self.LOOKUPS_DIR, 'ctf_users_staged.csv')) yield {'_time': time.time(), '_raw': self.ctf_users_staged} urllib.urlretrieve( self.ctf_questions_staged, "%s/%s" % (self.LOOKUPS_DIR, 'ctf_questions_staged.csv')) yield {'_time': time.time(), '_raw': self.ctf_questions_staged} urllib.urlretrieve( self.ctf_answers_staged, "%s/%s" % (self.LOOKUPS_DIR, 'ctf_answers_staged.csv')) yield {'_time': time.time(), '_raw': self.ctf_answers_staged} urllib.urlretrieve( self.ctf_hints_staged, "%s/%s" % (self.LOOKUPS_DIR, 'ctf_hints_staged.csv')) yield {'_time': time.time(), '_raw': self.ctf_hints_staged}
def __init__(self, app_name=None, configuration={}): """Construct an instance of the RESTClient""" try: self._splunk_home = None if app_name is None: raise Exception("App Name not sent to RESTClient") self._app_name = app_name if self._splunk_home is None: self._splunk_home = make_splunkhome_path([""]) if self._splunk_home is None: raise Exception("SPLUNK HOME UNABLE TO BE SET") self._setup_logging() self._useproxy = False self._hostname = configuration["hostname"] except KeyError as ne: self._log.warn( "action=failure msg=\"required argument not passed\" argument=\"%s\" " % ne) raise ValueError("Required argument not passed: %s" % ne) except Exception, e: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] myJson = { "msg": str((e)), "exception_type": "%s" % type(e).__name__, "exception_arguments": "%s" % e, "filename": fname, "line": exc_tb.tb_lineno } self.error(self._build_string(myJson), e)
def setup_logging(log_name, level_name="INFO"): level_name = level_name.upper() if level_name else "INFO" loglevel_map = { "DEBUG": logging.DEBUG, "INFO": logging.INFO, "WARN": logging.WARN, "ERROR": logging.ERROR, } if level_name in loglevel_map: loglevel = loglevel_map[level_name] else: loglevel = logging.INFO logfile = make_splunkhome_path(["var", "log", "splunk", "%s.log" % log_name]) logger = logging.getLogger(log_name) logger.propagate = False logger.setLevel(loglevel) handler_exists = any([True for h in logger.handlers if h.baseFilename == logfile]) if not handler_exists: file_handler = logging.handlers.RotatingFileHandler(logfile, mode="a", maxBytes=104857600, backupCount=5) fmt_str = "%(asctime)s %(levelname)s %(thread)d - %(message)s" formatter = logging.Formatter(fmt_str) file_handler.setFormatter(formatter) logger.addHandler(file_handler) return logger
def get_backup_directory(self, session_key, lookup_file, namespace, owner=None, resolved_lookup_path=None): """ Get the backup directory where the lookup should be stored """ if owner is None: owner = 'nobody' # Identify the current path of the given lookup file if resolved_lookup_path is None: resolved_lookup_path = SplunkLookupTableFile.get( SplunkLookupTableFile.build_id(lookup_file, namespace, owner), sessionKey=session_key).path # Determine what the backup directory should be backup_directory = make_splunkhome_path([ os.path.dirname(resolved_lookup_path), "lookup_file_backups", namespace, owner, escape_filename(lookup_file) ]) # Make the backup directory, if necessary if not os.path.exists(backup_directory): os.makedirs(backup_directory) return backup_directory
def logger(self): # Make a logger unless it already exists if self._logger is not None: return self._logger logger = logging.getLogger(self.logger_name) logger.propagate = False # Prevent the log messages from being duplicated in the python.log file logger.setLevel(self.log_level) # Setup a file logger if requested if self.log_to_file: file_handler = handlers.RotatingFileHandler(make_splunkhome_path( ['var', 'log', 'splunk', self.logger_name + '.log']), maxBytes=25000000, backupCount=5) formatter = logging.Formatter( '%(asctime)s %(levelname)s %(message)s') file_handler.setFormatter(formatter) logger.addHandler(file_handler) else: stderr_handler = logging.StreamHandler(sys.stderr) formatter = logging.Formatter(' %(levelname)s %(message)s') stderr_handler.setFormatter(formatter) logger.addHandler(stderr_handler) self._logger = logger return self._logger
def backup_lookup_file(self, session_key, lookup_file, namespace, resolved_file_path, owner=None, file_save_time=None): """ Make a backup if the lookup file. """ try: # Get the backup directory backup_directory = self.get_backup_directory( session_key, lookup_file, namespace, owner, resolved_file_path) # Get the modification time of the existing file so that we put the date as an epoch # in the name try: file_time = os.path.getmtime(resolved_file_path) except: self.logger.warning( 'Unable to get the file modification time for the existing lookup file="%s"', resolved_file_path) file_time = None # If we got the time for backup file, then use that time # This is important because the times ought to be consistent between search heads in a # cluster if file_save_time is not None: file_time = file_save_time # If we couldn't get the time, then just use the current time (the time we are making # a backup) if file_time is None: file_time = time.time() # Make the full paths for the backup to be stored dst = make_splunkhome_path([backup_directory, str(file_time)]) # Make the backup shutil.copyfile(resolved_file_path, dst) # Copy the permissions and timestamps shutil.copystat(resolved_file_path, dst) self.logger.info( 'A backup of the lookup file was created, namespace=%s, lookup_file="%s", backup_file="%s"', namespace, lookup_file, dst) # Return the path of the backup in case the caller wants to do something with it return dst except: self.logger.exception( "Error when attempting to make a backup; the backup will not be made" ) return None
def make_lookup_filename(lookup_file, namespace="lookup_editor", owner=None): """ Create the file name of a lookup file. That is, device a path for where the file should exist. """ # Strip out invalid characters like ".." so that this cannot be used to conduct an # directory traversal lookup_file = os.path.basename(lookup_file) namespace = os.path.basename(namespace) if owner is not None: owner = os.path.basename(owner) # Get the user lookup if owner is not None and owner != 'nobody' and owner.strip() != '': return make_splunkhome_path( ["etc", "users", owner, namespace, "lookups", lookup_file]) # Get the non-user lookup else: return make_splunkhome_path( ["etc", "apps", namespace, "lookups", lookup_file]) def is_lookup_in_users_path(lookup_file_path): """ Determine if the lookup is within the user's path as opposed to being within the apps path. """ if "etc/users/" in lookup_file_path: return True else: return False def is_file_name_valid(self, lookup_file): """ Indicate if the lookup file is valid (doesn't contain invalid characters such as ".."). """ allowed_path = re.compile("^[-A-Z0-9_ ]+([.][-A-Z0-9_ ]+)*$", re.IGNORECASE) if not allowed_path.match(lookup_file): return False else: return True
def info(self): """ Provides table of contents for all locally hosted resources """ # gather all of the XML schema files dir = util.make_splunkhome_path(['share', 'splunk', 'search_mrsparkle', 'exposed', 'schema']) schemaFiles = [x[0:-4] for x in os.listdir(dir) if x.endswith('.rnc')] return self.render_template('top/info.html', {'schemaFiles': schemaFiles})
def add_aob_lib_paths(): import sys from splunk.appserver.mrsparkle.lib.util import make_splunkhome_path bin_path = make_splunkhome_path( ['etc', 'apps', 'splunk_app_addon-builder', 'bin']) validation_path = make_splunkhome_path( ['etc', 'apps', 'splunk_app_addon-builder', 'bin', 'validation_rules']) controller_path = make_splunkhome_path([ 'etc', 'apps', 'splunk_app_addon-builder', 'appserver', 'controllers' ]) res_path = make_splunkhome_path([ 'etc', 'apps', 'splunk_app_addon-builder', 'bin', 'splunk_app_add_on_builder' ]) for mpath in (bin_path, validation_path, controller_path, res_path): if mpath not in sys.path: sys.path.insert(1, mpath)
def make_lookup_filename(lookup_file, namespace="lookup_editor", owner=None): """ Create the file name of a lookup file. That is, device a path for where the file should exist. """ # Strip out invalid characters like ".." so that this cannot be used to conduct an # directory traversal lookup_file = os.path.basename(lookup_file) namespace = os.path.basename(namespace) if owner is not None: owner = os.path.basename(owner) # Get the user lookup if owner is not None and owner != 'nobody' and owner.strip() != '': return make_splunkhome_path(["etc", "users", owner, namespace, "lookups", lookup_file]) # Get the non-user lookup else: return make_splunkhome_path(["etc", "apps", namespace, "lookups", lookup_file]) def is_lookup_in_users_path(lookup_file_path): """ Determine if the lookup is within the user's path as opposed to being within the apps path. """ if "etc/users/" in lookup_file_path: return True else: return False def is_file_name_valid(self, lookup_file): """ Indicate if the lookup file is valid (doesn't contain invalid characters such as ".."). """ allowed_path = re.compile("^[-A-Z0-9_ ]+([.][-A-Z0-9_ ]+)*$", re.IGNORECASE) if not allowed_path.match(lookup_file): return False else: return True
def makeLookupFilename(self, lookup_file, namespace="lookup_editor", owner=None): """ Create the file name of a lookup file. That is, device a path for where the file should exist. """ # Strip out invalid characters like ".." so that this cannot be used to conduct an directory traversal lookup_file = os.path.basename(lookup_file) namespace = os.path.basename(namespace) if owner is not None: owner = os.path.basename(owner) # Get the user lookup if owner is not None and owner != 'nobody' and owner.strip() != '': return make_splunkhome_path(["etc", "users", owner, namespace, "lookups", lookup_file]) # Get the non-user lookup else: return make_splunkhome_path(["etc", "apps", namespace, "lookups", lookup_file])
def info(self): """ Provides table of contents for all locally hosted resources """ # gather all of the XML schema files dir = util.make_splunkhome_path( ['share', 'splunk', 'search_mrsparkle', 'exposed', 'schema']) schemaFiles = [x[0:-4] for x in os.listdir(dir) if x.endswith('.rnc')] return self.render_template('top/info.html', {'schemaFiles': schemaFiles})
def upload(self, **kargs): if cherrypy.request.method == 'GET': return self.render_template('upload_image:upload.html', {}) image = kargs.get('image', None) tour_name = kargs.get('tourName', None) filename = kargs.get('filename', None) app = 'tour_makr' # This is hard-coded in order prevent the upload of files into arbitrary apps # Get the file extension file_extension = os.path.splitext(filename)[1][1:] # Make sure the file is an actual image and one that we accept # Note that files that are not image files at all will return None if imghdr.what( image.file ) not in UploadController.ALLOWED_FILE_TYPES or file_extension not in UploadController.ALLOWED_FILE_TYPES: raise cherrypy.HTTPError( 403, 'The type of file is not allowed; must be gif, jpeg, bmp, or png' ) if image is not None: try: # Verify that the app name doesn't attempt a path traversal attack if self.isDirTraversing(app): return 'App name cannot contain / or \\ character or start with . app="%s"' % app # Verify that the tour name doesn't attempt a path traversal attack if self.isDirTraversing(tour_name): return 'Tour name cannot contain / or \\ character or start with . tour_name="%s"' % tour_name tempPath = util.make_splunkhome_path([ 'etc', 'apps', self.cleanPath(app), 'appserver', 'static', 'img', self.cleanPath(tour_name) ]) if not os.path.exists(tempPath): os.makedirs(tempPath) # Verify that the filename doesn't attempt a path traversal attack if self.isDirTraversing(filename): return 'Filename cannot contain / or \\ character or start with . filename="%s"' % filename newPath = os.path.join(tempPath, self.cleanPath(filename)) with open(newPath, 'wb') as newFile: copyfileobj(image.file, newFile) return "Successfully stored %s" % filename except Exception, e: #raise cherrypy.HTTPError(200, 'Failed to upload the file %s. Exception %s' % (filename, str(e))) return 'Failed to upload the file %s. Exception %s' % ( filename, str(e))
def get_file_name(self): "Get a file name that can be used for creating a stash file" # Sanitize the source name source_name = re.sub(r"[^a-zA-Z_0-9]", "_", str(self.source_name)) # Make the file path stash_file = make_splunkhome_path(["var", "spool", "splunk", source_name + "_" \ + str(time.time()) + "_" + str(random.randrange(0, 65535, 1)) + self.file_extension]) return stash_file
def setup_logger(level): """ Setup a logger for the REST handler. """ logger = logging.getLogger('splunk.appserver.%s.controllers.agents' % _APPNAME) logger.propagate = False # Prevent the log messages from being duplicated in the python.log file logger.setLevel(level) file_handler = logging.handlers.RotatingFileHandler(make_splunkhome_path(['var', 'log', 'splunk', 'agents.log']), maxBytes=25000000, backupCount=5) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') file_handler.setFormatter(formatter) logger.addHandler(file_handler) return logger
def setup_logger(level, filename): ''' Setup a logger for the custom search command. ''' logger = logging.getLogger('splunk.appserver.SA-ctf_scoreboard.customsearch.getanswer.' + filename) logger.propagate = False logger.setLevel(level) file_handler = logging.handlers.RotatingFileHandler(make_splunkhome_path(['var', 'log', 'scoreboard', filename]), maxBytes=25000000, backupCount=5) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') file_handler.setFormatter(formatter) logger.addHandler(file_handler) return logger
def makeLookupFilename(self, lookup_file, namespace="lookup_editor", owner=None): """ Create the file name of a lookup file. That is, device a path for where the file should exist. """ # Strip out invalid characters like ".." so that this cannot be used to conduct an # directory traversal lookup_file = os.path.basename(lookup_file) namespace = os.path.basename(namespace) if owner is not None: owner = os.path.basename(owner) # Get the user lookup if owner is not None and owner != 'nobody' and owner.strip() != '': return make_splunkhome_path(["etc", "users", owner, namespace, "lookups", lookup_file]) # Get the non-user lookup else: return make_splunkhome_path(["etc", "apps", namespace, "lookups", lookup_file])
def resolve_lookup_filename(self, lookup_file, namespace="lookup_editor", owner=None, get_default_csv=True, version=None, throw_not_found=True): """ Resolve the lookup filename. This function will handle things such as: * Returning the default lookup file if requested * Returning the path to a particular version of a file Note that the lookup file must have an existing lookup file entry for this to return correctly; this shouldn't be used for determining the path of a new file. """ # Strip out invalid characters like ".." so that this cannot be used to conduct an # directory traversal lookup_file = os.path.basename(lookup_file) namespace = os.path.basename(namespace) if owner is not None: owner = os.path.basename(owner) # Determine the lookup path by asking Splunk try: resolved_lookup_path = lookupfiles.SplunkLookupTableFile.get(lookupfiles.SplunkLookupTableFile.build_id(lookup_file, namespace, owner)).path except ResourceNotFound: if throw_not_found: raise else: return None # Get the backup file for one without an owner if version is not None and owner is not None: lookup_path = make_splunkhome_path([self.getBackupDirectory(lookup_file, namespace, owner, resolved_lookup_path=resolved_lookup_path), version]) lookup_path_default = make_splunkhome_path(["etc", "users", owner, namespace, "lookups", lookup_file + ".default"]) # Get the backup file for one with an owner elif version is not None: lookup_path = make_splunkhome_path([self.getBackupDirectory(lookup_file, namespace, owner, resolved_lookup_path=resolved_lookup_path), version]) lookup_path_default = make_splunkhome_path(["etc", "apps", namespace, "lookups", lookup_file + ".default"]) # Get the user lookup elif owner is not None and owner != 'nobody': # e.g. $SPLUNK_HOME/etc/users/luke/SA-NetworkProtection/lookups/test.csv lookup_path = resolved_lookup_path lookup_path_default = make_splunkhome_path(["etc", "users", owner, namespace, "lookups", lookup_file + ".default"]) # Get the non-user lookup else: lookup_path = resolved_lookup_path lookup_path_default = make_splunkhome_path(["etc", "apps", namespace, "lookups", lookup_file + ".default"]) logger.info('Resolved lookup file, path=%s', lookup_path) # Get the file path if get_default_csv and not os.path.exists(lookup_path) and os.path.exists(lookup_path_default): return lookup_path_default else: return lookup_path
def setup_logger(): logger = logging.getLogger(HANDLER_NAME) logger.propagate = False # Prevent the log messages from being duplicated in the python.log file logger.setLevel(logging.DEBUG) file_handler = logging.handlers.RotatingFileHandler(make_splunkhome_path(['var', 'log', 'splunk', LOG_FILE_NAME]), maxBytes=5000000, backupCount=1) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') file_handler.setFormatter(formatter) logger.addHandler(file_handler) return logger
def backupLookupFile(self, lookup_file, namespace, owner=None, resolved_file_path=None): """ Make a backup if the lookup file """ try: # If we don't already know the path of the file, then load it if resolved_file_path is None: resolved_file_path = self.resolve_lookup_filename(lookup_file, namespace, owner, throw_not_found=False) # If the file doesn't appear to exist yet. Then skip the backup. if resolved_file_path is None: logger.info("The file dosen't exist yet; the backup will not be made") return None # Get the backup directory backup_directory = self.getBackupDirectory(lookup_file, namespace, owner, resolved_lookup_path=resolved_file_path) # Get the modification time of the existing file so that we put the date as an epoch # in the name try: file_time = os.path.getmtime(resolved_file_path) except: logger.warning('Unable to get the file modification time for the existing lookup file="%s"', resolved_file_path) file_time = None # If we couldn't get the time, then just use the current time (the time we are making # a backup) if file_time is None: file_time = time.time() # Make the full paths for the backup to be stored dst = make_splunkhome_path([backup_directory, str(file_time)]) # Make the backup shutil.copyfile(resolved_file_path, dst) # Copy the permissions and timestamps shutil.copystat(resolved_file_path, dst) logger.info('A backup of the lookup file was created, namespace=%s, lookup_file="%s", backup_file="%s"', namespace, lookup_file, dst) # Return the path of the backup in case the caller wants to do something with it return dst except: logger.exception("Error when attempting to make a backup; the backup will not be made") return None
def setup_logger(): logger = logging.getLogger('configure_oauth') logger.propagate = False logger.setLevel(logging.DEBUG) file_handler = logging.handlers.RotatingFileHandler( make_splunkhome_path(['var', 'log', 'splunk', 'configure_oauth.log']), maxBytes=25000000, backupCount=5) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') file_handler.setFormatter(formatter) logger.addHandler(file_handler) return logger
def setup_logger(level): """ Setup a logger for the REST handler. """ logger = logging.getLogger('splunk.appserver.lookup_editor.controllers.LookupEditor') logger.propagate = False # Prevent the log messages from being duplicated in the python.log file logger.setLevel(level) file_handler = logging.handlers.RotatingFileHandler(make_splunkhome_path(['var', 'log', 'splunk', 'lookup_editor_controller.log']), maxBytes=25000000, backupCount=5) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') file_handler.setFormatter(formatter) logger.addHandler(file_handler) return logger
def setup_logger(): """ Setup a logger. """ logger = logging.getLogger('web_availability_modular_input') logger.propagate = False # Prevent the log messages from being duplicated in the python.log file logger.setLevel(logging.INFO) file_handler = handlers.RotatingFileHandler(make_splunkhome_path(['var', 'log', 'splunk', 'cache_size_modular_input.log']), maxBytes=25000000, backupCount=5) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') file_handler.setFormatter(formatter) logger.addHandler(file_handler) return logger
def addUploadAssets(appName): appPath = _getAppPath(appName, True) if not appPath: raise admin.ArgValidationException(_("App '%s' does not exist") % appName) tempPath = make_splunkhome_path(['var', 'run', 'splunk', 'apptemp']) # if does not exist then it means no assets exist for moving if not os.path.exists(tempPath): return dstPath = os.path.join(appPath, 'appserver', 'static') bundle_paths.maybe_makedirs(dstPath) comm.mergeDirs(tempPath, dstPath) # clean up bundle_paths.safe_remove(tempPath)
def backup_lookup_file(self, session_key, lookup_file, namespace, resolved_file_path, owner=None, file_save_time=None): """ Make a backup if the lookup file. """ try: # Get the backup directory backup_directory = self.get_backup_directory(session_key, lookup_file, namespace, owner, resolved_file_path) # Get the modification time of the existing file so that we put the date as an epoch # in the name try: file_time = os.path.getmtime(resolved_file_path) except: self.logger.warning('Unable to get the file modification time for the existing lookup file="%s"', resolved_file_path) file_time = None # If we got the time for backup file, then use that time # This is important because the times ought to be consistent between search heads in a # cluster if file_save_time is not None: file_time = file_save_time # If we couldn't get the time, then just use the current time (the time we are making # a backup) if file_time is None: file_time = time.time() # Make the full paths for the backup to be stored dst = make_splunkhome_path([backup_directory, str(file_time)]) # Make the backup shutil.copyfile(resolved_file_path, dst) # Copy the permissions and timestamps shutil.copystat(resolved_file_path, dst) self.logger.info('A backup of the lookup file was created, namespace=%s, lookup_file="%s", backup_file="%s"', namespace, lookup_file, dst) # Return the path of the backup in case the caller wants to do something with it return dst except: self.logger.exception("Error when attempting to make a backup; the backup will not be made") return None
def setup_logger(): """ sets up logger for shutdown command """ logger = logging.getLogger('ta-akamai') # Prevent the log messgaes from being duplicated in the python.log # AuthorizationFailed logger.propagate = False logger.setLevel(logging.DEBUG) file_handler = logging.handlers.RotatingFileHandler( make_splunkhome_path(['var', 'log', 'splunk', 'ta-akamai.log']), maxBytes=25000000, backupCount=5) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') file_handler.setFormatter(formatter) logger.addHandler(file_handler) return logger
def setup_logger(): """ Setup a logger. """ logger = logging.getLogger("python_modular_input") logger.propagate = False # Prevent the log messages from being duplicated in the python.log file logger.setLevel(logging.DEBUG) file_handler = handlers.RotatingFileHandler( make_splunkhome_path(["var", "log", "splunk", "python_modular_input.log"]), maxBytes=25000000, backupCount=5 ) formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s") file_handler.setFormatter(formatter) logger.addHandler(file_handler) return logger
def logger(self): # Make a logger unless it already exists if self._logger is not None: return self._logger logger = logging.getLogger(self.logger_name) logger.propagate = False # Prevent the log messages from being duplicated in the python.log file logger.setLevel(logging.INFO) file_handler = handlers.RotatingFileHandler(make_splunkhome_path(['var', 'log', 'splunk', self.logger_name + '.log']), maxBytes=25000000, backupCount=5) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') file_handler.setFormatter(formatter) logger.addHandler(file_handler) self._logger = logger return self._logger
def setup_logger(level): """ Setup a logger for the REST handler. """ logger = logging.getLogger("splunk.appserver.alert_manager.controllers.IncidentWorkflow") logger.propagate = False # Prevent the log messages from being duplicated in the python.log file logger.setLevel(level) file_handler = logging.handlers.RotatingFileHandler( make_splunkhome_path(["var", "log", "splunk", "alert_manager_settings_controller.log"]), maxBytes=25000000, backupCount=5, ) formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s") file_handler.setFormatter(formatter) logger.addHandler(file_handler) return logger
def setup_logger(): """ Setup a logger. Note that the modular input base class has a logger too. However, it isn't currently used because there are several classmethods that don't have access to the logger. """ logger = logging.getLogger('web_input_modular_input') logger.propagate = False # Prevent the log messages from being duplicated in the python.log file logger.setLevel(logging.DEBUG) file_handler = handlers.RotatingFileHandler(make_splunkhome_path(['var', 'log', 'splunk', 'web_input_modular_input.log']), maxBytes=25000000, backupCount=5) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') file_handler.setFormatter(formatter) logger.addHandler(file_handler) return logger
def setupLogger(logger=None, log_format='%(asctime)s %(levelname)s [ReadStructuresService] %(message)s', level=logging.DEBUG, log_name="read_structures_service.log", logger_name="read_structures_service"): """ Setup a logger suitable for splunkd consumption """ if logger is None: logger = logging.getLogger(logger_name) logger.propagate = False # Prevent the log messages from being duplicated in the python.log file logger.setLevel(level) file_handler = logging.handlers.RotatingFileHandler(make_splunkhome_path(['var', 'log', 'splunk', log_name]), maxBytes=2500000, backupCount=5) formatter = logging.Formatter(log_format) file_handler.setFormatter(formatter) logger.handlers = [] logger.addHandler(file_handler) logger.debug("init read structures service logger") return logger
def getPastSearches(user, sessionKey, namespace): bootstrapSearches = [] try: bootsearchlog = make_splunkhome_path(['etc','system','static','bootstrapsearches.txt']) lines = utils.readText(bootsearchlog).split('\n') bootstrapSearches.extend(lines) except: logger.warn("Unable to get bootstrap search history") userHistory = [] try: # get user's history of searches, ignoring those that didn't return any results q = "|history | head %s | search event_count>0 OR result_count>0 | dedup search | table search" % MAX_HISTORY results = se.searchAll(q, sessionKey=sessionKey, namespace=namespace, owner=user, spawn_process=False) userHistory = [str(r['search']) for r in results] if q in userHistory: userHistory.remove(q) except Exception, e: logger.warn("Unable to get search history: %s" % e)
def setup_logger(): """ Sets up a logger for the ProcMitigator. """ logger = logging.getLogger('proc_mitigator') # Prevent the log messages from being duplicated in the python.log # Authorization Failed logger.propagate = False logger.setLevel(logging.DEBUG) file_handler = logging.handlers.RotatingFileHandler( make_splunkhome_path(['etc', 'apps', 'SA-Mitigation-Endpoint', 'logs','proc_mitigator.log'])) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') file_handler.setFormatter(formatter) logger.addHandler(file_handler) return logger
def get_backup_directory(self, session_key, lookup_file, namespace, owner=None, resolved_lookup_path=None): """ Get the backup directory where the lookup should be stored """ if owner is None: owner = 'nobody' # Identify the current path of the given lookup file if resolved_lookup_path is None: resolved_lookup_path = SplunkLookupTableFile.get(SplunkLookupTableFile.build_id(lookup_file, namespace, owner), sessionKey=session_key).path # Determine what the backup directory should be backup_directory = make_splunkhome_path([os.path.dirname(resolved_lookup_path), "lookup_file_backups", namespace, owner, escape_filename(lookup_file)]) # Make the backup directory, if necessary if not os.path.exists(backup_directory): os.makedirs(backup_directory) return backup_directory
def get_temporary_lookup_file(prefix=None, basedir=None): """Create a temporary file and return the filehandle. Exceptions will be passed to caller. @param prefix: A prefix for the file (default is "lookup_gen_<date>_<time>_") @param basedir: The base directory for the file (default is $SPLUNK_HOME/var/run/splunk/lookup_tmp, the staging directory for use in creating new lookup table files). """ if prefix is None: prefix = "lookup_gen_" + time.strftime("%Y%m%d_%H%M%S") + "_" if basedir is None: basedir = make_splunkhome_path(["var", "run", "splunk", "lookup_tmp"]) if not os.path.isdir(basedir): os.mkdir(basedir) if os.path.isdir(basedir): return tempfile.NamedTemporaryFile(prefix=prefix, suffix=".txt", dir=basedir, delete=False) else: return None
def getBackupDirectory(self, lookup_file, namespace, owner=None, resolved_lookup_path=None): """ Get the backup directory where the lookup should be stored """ if owner is None: owner = 'nobody' # Identify the current path of the given lookup file if resolved_lookup_path is None: resolved_lookup_path = lookupfiles.SplunkLookupTableFile.get(lookupfiles.SplunkLookupTableFile.build_id(lookup_file, namespace, owner)).path # Determine what the backup directory should be backup_directory = make_splunkhome_path([os.path.dirname(resolved_lookup_path), "lookup_file_backups", namespace, owner, self.escapeFilename(lookup_file)]) #backup_directory = make_splunkhome_path([os.path.split(resolved_lookup_path)[0], "lookup_file_backups", namespace, owner, self.escapeFilename(lookup_file)]) # Make the backup directory, if necessary if not os.path.exists(backup_directory): os.makedirs(backup_directory) logger.debug("Backup directory is:" + backup_directory) return backup_directory
def resolve_lookup_filename(self, lookup_file, namespace="lookup_editor", owner=None, get_default_csv=True, version=None, throw_not_found=True): """ Resolve the lookup filename. """ # Strip out invalid characters like ".." so that this cannot be used to conduct an directory traversal lookup_file = os.path.basename(lookup_file) namespace = os.path.basename(namespace) if owner is not None: owner = os.path.basename(owner) # Determine the lookup path by asking Splunk try: resolved_lookup_path = lookupfiles.SplunkLookupTableFile.get(lookupfiles.SplunkLookupTableFile.build_id(lookup_file, namespace, owner)).path except ResourceNotFound: if throw_not_found: raise else: return None if version is not None and owner is not None: lookup_path = make_splunkhome_path([self.getBackupDirectory(lookup_file, namespace, owner, resolved_lookup_path=resolved_lookup_path), version]) lookup_path_default = make_splunkhome_path(["etc", "users", owner, namespace, "lookups", lookup_file + ".default"]) elif version is not None: lookup_path = make_splunkhome_path([self.getBackupDirectory(lookup_file, namespace, owner, resolved_lookup_path=resolved_lookup_path), version]) lookup_path_default = make_splunkhome_path(["etc", "apps", namespace, "lookups", lookup_file + ".default"]) elif owner is not None: # e.g. $SPLUNK_HOME/etc/users/luke/SA-NetworkProtection/lookups/test.csv lookup_path = resolved_lookup_path #lookup_path = make_splunkhome_path(["etc", "users", owner, namespace, "lookups", lookup_file]) lookup_path_default = make_splunkhome_path(["etc", "users", owner, namespace, "lookups", lookup_file + ".default"]) else: lookup_path = resolved_lookup_path #lookup_path = make_splunkhome_path(["etc", "apps", namespace, "lookups", lookup_file]) lookup_path_default = make_splunkhome_path(["etc", "apps", namespace, "lookups", lookup_file + ".default"]) logger.info('Resolved lookup file, path=%s', lookup_path) # Get the file path if get_default_csv and not os.path.exists(lookup_path) and os.path.exists(lookup_path_default): return lookup_path_default else: return lookup_path
######################################################### import splunk.Intersplunk # so you can interact with Splunk import splunk.entity as entity # for splunk config info import urllib2 # make http requests to PAN firewall import sys # for system params and sys.exit() import re # regular expressions checks in PAN messages import splunk.mining.dcutils as dcu import ConfigParser # to parse out the pa.conf file from splunk.appserver.mrsparkle.lib.util import make_splunkhome_path # to grab the default splunk path import splunk.rest import json logger = dcu.getLogger() # set path of config panconf = make_splunkhome_path( ["etc", "apps", "SA-Mitigation", "default", "pan.conf"]) # read config file config = ConfigParser.RawConfigParser() config.read(panconf) #Assign PA IP PAN = config.get('PAN', 'IP') #Assign BADACTORS group name BADACTORS = config.get('PAN', 'GROUP') ## Major props to Ledion. copying his function, verbatim and then adding comments and traceback and logging ## http://blogs.splunk.com/2011/03/15/storing-encrypted-credentials/ ## access the credentials in /servicesNS/nobody/<YourApp>/admin/passwords
import splunk import splunk.admin as admin import splunk.clilib.bundle_paths as bundle_paths import splunk.clilib.cli_common as comm import splunk.util as util, traceback import splunk.appserver.mrsparkle.lib.i18n as i18n from splunk.appserver.mrsparkle.lib.util import make_splunkhome_path import os, time, urllib2, string, tarfile, sys, socket APPS_PATH = bundle_paths.get_base_path() PACKAGE_PATH = os.path.join(bundle_paths.get_system_bundle_path(), 'static', 'app-packages') TEMPLATES_PATH = make_splunkhome_path(['share', 'splunk', 'app_templates']) TEXT_EXTENSIONS = ['txt', 'html', 'htm', 'xhtml', 'css', 'py', 'pl', 'ps1', 'bat', 'sh', 'conf', 'js', 'xml', 'xsl', 'conf', 'meta'] TXT_PREFIX = '__' ''' Needed to prevent collisions between mako and appbuilder templates ''' class SafeTemplate(string.Template): delimiter = '$$' ''' Returns Splunkd uri ''' def _getSplunkdUri(): return comm.getMgmtUri().replace('127.0.0.1',socket.gethostname().lower()) ''' Returns Splunkweb uri