def clear_cache(): ''' Deletes all cached minified JS and CSS resources ''' # declare tuples of (directory_path, cache_file_prefix) to delete cache_paths = [(make_absolute(lib.i18n.CACHE_PATH), MODULE_JS_FILE_PREFIX), (make_absolute(os.path.join(MODULE_STATIC_CACHE_PATH, 'css')), MODULE_CSS_FILE_PREFIX) ] logger.info('clearing filechain cache') for cache_pair in cache_paths: try: files = os.listdir(cache_pair[0]) except Exception, e: logger.warn('unable to list cache directory "%s": %s' % (cache_pair[0], e)) break for file in files: if file.startswith(cache_pair[1]): logger.debug('deleting cached resource: %s' % file) try: os.unlink(os.path.join(cache_pair[0], file)) except Exception, e: logger.warning('failed to delete cached resource: %s' % e)
def clear_cache(): ''' Deletes all cached minified JS and CSS resources ''' # declare tuples of (directory_path, cache_file_prefix) to delete cache_paths = [ (make_absolute(lib.i18n.CACHE_PATH), MODULE_JS_FILE_PREFIX), (make_absolute(os.path.join(MODULE_STATIC_CACHE_PATH, 'css')), MODULE_CSS_FILE_PREFIX) ] logger.info('clearing filechain cache') for cache_pair in cache_paths: try: files = os.listdir(cache_pair[0]) except Exception, e: logger.warn('unable to list cache directory "%s": %s' % (cache_pair[0], e)) break for file in files: if file.startswith(cache_pair[1]): logger.debug('deleting cached resource: %s' % file) try: os.unlink(os.path.join(cache_pair[0], file)) except Exception, e: logger.warning('failed to delete cached resource: %s' % e)
def mount_static(ctrl, global_cfg, cfg): static_endpoint = global_cfg['static_endpoint'] static_app_dir= make_absolute('etc/apps', '') # resolver for static content bundled with applications def static_app_resolver(section, branch, dir): """ Resolver that pulls application specific assets. """ parts = branch.split('/') subbranch, app, asset = parts[0], parts[1], '/'.join(parts[2:] ) appstaticdir = os.path.normpath(os.path.join(dir, app, 'appserver', 'static')) fn = os.path.normpath(os.path.join(appstaticdir, asset)) if fn.startswith(appstaticdir) and fn.startswith(os.path.normpath(dir)) and os.path.exists(fn): sp = os.path.splitext(asset) if sp[1] == '.js' and not asset.startswith('js/contrib') and 'i18noff' not in cherrypy.request.query_string: i18n_cache = i18n.translate_js(fn) if i18n_cache: return i18n_cache return fn return False def static_resolver(section, branch, dir): """resolver that knows how to add translations to javascript files""" # chain off to another resolver for statics served from application bundles. # overrides the 'dir' param with where applications are stored. if branch.startswith('app/'): return static_app_resolver(section, branch, static_app_dir) sp = os.path.splitext(branch) fn = os.path.join(dir, branch) if branch == 'js/i18n.js': return i18n.dispatch_i18n_js(fn) # send the locale data with the i18n.js system elif branch.endswith('common.min.js'): return filechain.chain_common_js() # returns the path to a cached file containing the finished cache file elif not branch.startswith('js/contrib') and sp[1] == '.js' and os.path.exists(fn) and 'i18noff' not in cherrypy.request.query_string: return i18n.translate_js(fn) # returns the path to a cached file containing the original js + json translation map return False # fallback to the default handler if (global_cfg.get('static_dir','') == '') : logger.warn('static endpoint configured, but no static directory. Falling back to ' + FAILSAFE_STATIC_DIR) staticdir = make_absolute(global_cfg.get('static_dir', FAILSAFE_STATIC_DIR), '') global_cfg['staticdir'] = staticdir cfg[static_endpoint] = { 'tools.sessions.on' : False, # no session required for static resources 'tools.staticdir.on' : True, 'tools.staticdir.dir' : staticdir, 'tools.staticdir.strip_version' : True, 'tools.staticdir.resolver' : static_resolver, 'tools.staticdir.content_types' : {'js' : 'application/javascript'} # correct python's application/x-javascript } ctrl.favicon_ico = cherrypy.tools.staticfile.handler(os.path.join(staticdir, 'img', 'favicon.ico'))
def module_resolver(section, branch, dir): from lib.apps import local_apps # first part of branch is the module name parts = os.path.normpath(branch.strip('/')).replace(os.path.sep, '/').split('/') locale = i18n.current_lang(True) if not parts: return False module_path = local_apps.getModulePath(parts[0]) if module_path: # this means there is a module named parts[0] # SPL-51365 images should load irrespective of css_minification. if parts[0]==parts[1]: # ignore of repetition of module name # happens for image request when minify_css=False fn = os.path.join(module_path, *parts[2:]) else: fn = os.path.join(module_path, *parts[1:]) #verified while fixing SPL-47422 #pylint: disable=E1103 if fn.endswith('.js') and os.path.exists(fn): return i18n.translate_js(fn) # returns the path to a cached file containing the original js + json translation map return fn elif parts[0].startswith('modules-') and parts[0].endswith('.js'): hash = parts[0].replace('modules-', '').replace('.min.js', '') return make_absolute(os.path.join(i18n.CACHE_PATH, '%s-%s-%s.cache' % ('modules.min.js', hash, locale))) elif parts[0].startswith('modules-') and parts[0].endswith('.css'): return filechain.MODULE_STATIC_CACHE_PATH + os.sep + 'css' + os.sep + parts[0] return False
def module_resolver(section, branch, dir): from lib.apps import local_apps # first part of branch is the module name parts = os.path.normpath(branch.strip('/')).replace( os.path.sep, '/').split('/') locale = i18n.current_lang(True) if not parts: return False module_path = local_apps.getModulePath(parts[0]) if module_path: fn = os.path.join(module_path, *parts[1:]) if fn.endswith('.js') and os.path.exists(fn): return i18n.translate_js( fn ) # returns the path to a cached file containing the original js + json translation map return fn elif parts[0].startswith('modules-') and parts[0].endswith('.js'): hash = parts[0].replace('modules-', '').replace('.min.js', '') return make_absolute( os.path.join( i18n.CACHE_PATH, '%s-%s-%s.cache' % ('modules.min.js', hash, locale))) elif parts[0].startswith('modules-') and parts[0].endswith('.css'): return filechain.MODULE_STATIC_CACHE_PATH + os.sep + 'css' + os.sep + parts[ 0] return False
def __init__(self, port=8000, **kwargs): self._initialized = False self._storage_path = None if not settings.SPLUNK_WEB_INTEGRATED: return try: from lib.util import splunk_to_cherry_cfg, make_absolute cherrypy_cfg = splunk_to_cherry_cfg('web', 'settings') storage_type = cherrypy_cfg.get('tools.sessions.storage_type') storage_path = None if storage_type == 'file': storage_path = make_absolute(cherrypy_cfg['tools.sessions.storage_path']) else: return self._storage_path = storage_path self._initialized = True except Exception, e: self._initialized = False pass
def module_resolver(section, branch, dir): from lib.apps import local_apps # first part of branch is the module name parts = os.path.normpath(branch.strip('/')).replace(os.path.sep, '/').split('/') locale = i18n.current_lang(True) if not parts: return False module_path = local_apps.getModulePath(parts[0]) if module_path: fn = os.path.join(module_path, *parts[1:]) if fn.endswith('.js') and os.path.exists(fn): return i18n.translate_js(fn) # returns the path to a cached file containing the original js + json translation map return fn elif parts[0].startswith('modules-') and parts[0].endswith('.js'): hash = parts[0].replace('modules-', '').replace('.min.js', '') return make_absolute(os.path.join(i18n.CACHE_PATH, '%s-%s-%s.cache' % ('modules.min.js', hash, locale))) elif parts[0].startswith('modules-') and parts[0].endswith('.css'): return filechain.MODULE_STATIC_CACHE_PATH + os.sep + 'css' + os.sep + parts[0] return False
def run(blocking=True): # get confs global_cfg = splunk_to_cherry_cfg('web', 'settings') # allow command line arguments to override the configuration # eg. --httpport=80 args = util.args_to_dict() # debugging can be turned on from the command line with --debug if args.get('debug'): del args['debug'] logger.setLevel(logging.DEBUG) for lname, litem in logger.manager.loggerDict.items(): if not isinstance(litem, logging.PlaceHolder): logger.debug("Updating logger=%s to level=DEBUG" % lname) litem.setLevel(logging.DEBUG) args['js_logger_mode'] = 'Server' args['js_no_cache'] = True global_cfg.update(args) # support SPLUNK_BINDIP backwards compatibly. -- overrides web.conf if os.environ.has_key('SPLUNK_BINDIP'): global_cfg['server.socket_host'] = os.environ['SPLUNK_BINDIP'].strip() global_cfg['server.socket_port'] = global_cfg['httpport'] if normalizeBoolean(global_cfg.get('enableSplunkWebSSL', False)): logger.info('Enabling SSL') priv_key_path = str(global_cfg['privKeyPath']) ssl_certificate = str(global_cfg['caCertPath']) ssl_ciphers = str(global_cfg['cipherSuite']) if os.path.isabs(priv_key_path): global_cfg['server.ssl_private_key'] = priv_key_path else: global_cfg['server.ssl_private_key'] = make_splunkhome_path([priv_key_path]) if os.path.isabs(ssl_certificate): global_cfg['server.ssl_certificate'] = ssl_certificate else: global_cfg['server.ssl_certificate'] = make_splunkhome_path([ssl_certificate]) if not os.path.exists(global_cfg['server.ssl_private_key']): raise ValueError("%s Not Found" % global_cfg['server.ssl_private_key']) if not os.path.exists(global_cfg['server.ssl_certificate']): raise ValueError("%s Not Found" % global_cfg['server.ssl_certificate']) if global_cfg.get('supportSSLV3Only'): global_cfg['server.ssl_v3_only'] = True if ssl_ciphers: global_cfg['server.ssl_ciphers'] = ssl_ciphers else: # make sure the secure flag is not set on session cookies if we're not serving over SSL global_cfg['tools.sessions.secure'] = False # setup cherrypy logging infrastructure if global_cfg.has_key('log.access_file'): filename = make_absolute(global_cfg['log.access_file'], BASE_LOG_PATH) maxsize = int(global_cfg.get('log.access_maxsize', 0)) maxcount = int(global_cfg.get('log.access_maxfiles', 5)) if maxsize > 0: cherrypy.log.access_file = '' h = logging.handlers.RotatingFileHandler(filename, 'a', maxsize, maxcount) h.setLevel(logging.INFO) h.setFormatter(_cplogging.logfmt) cherrypy.log.access_log.addHandler(h) del global_cfg['log.access_file'] else: global_cfg['log.access_file'] = filename if global_cfg.has_key('log.error_file'): # we've already committed to web_service.log by this point del global_cfg['log.error_file'] cherrypy.log.error_file = '' cherrypy.log.error_log.addHandler(splunk_log_handler) if global_cfg.has_key('log.error_maxsize'): splunk_log_handler.maxBytes = int(global_cfg['log.error_maxsize']) splunk_log_handler.backupCount = int(global_cfg.get('log.error_maxfiles', 5)) # now that we have somewhere to log, test the ssl keys. - SPL-34126 # Lousy solution, but python's ssl itself hangs with encrypted keys, so avoid hang by # bailing with a message if global_cfg['enableSplunkWebSSL']: for cert_file in (global_cfg['server.ssl_private_key'], global_cfg['server.ssl_certificate']): if is_encrypted_cert(cert_file): logger.error("""Specified cert '%s' is encrypted with a passphrase. SplunkWeb does not support passphrase-encrypted keys at this time. To resolve the problem, decrypt the keys on disk, generate new passphrase-less keys, or disable ssl for SplunkWeb.""" % cert_file) raise Exception("Unsupported encrypted cert file.") # set login settings if global_cfg.get('tools.sessions.storage_type') == 'file': global_cfg['tools.sessions.storage_path'] = make_absolute(global_cfg['tools.sessions.storage_path']) # SPL-16963: add port number to session key to allow for sessions for multiple # instances to run on a single host, without mutually logging each other out. global_cfg['tools.sessions.name'] = "session_id_%s" % global_cfg['httpport'] # set mako template cache directory global_cfg.setdefault('mako_cache_path', MAKO_CACHE_PATH) root_name = global_cfg.get('root_endpoint', FAILSAFE_ROOT_ENDPOINT).strip('/') ctrl = TopController() cfg = {'global' : global_cfg} # initialize all of the custom endpoints that are registered in the # apps ctrl.custom.load_handlers() # Serve static files if so configured if global_cfg.has_key('static_endpoint'): mount_static(ctrl, global_cfg, cfg) if global_cfg.has_key('testing_endpoint'): if (global_cfg.get('static_dir','') == '') : logger.warn('testing endpoint configured, but no testing directory. Falling back to ' + FAILSAFE_TESTING_DIR) staticdir = make_absolute(global_cfg.get('testing_dir', FAILSAFE_TESTING_DIR), '') cfg[global_cfg['testing_endpoint']] = { 'tools.staticdir.on' : True, 'tools.staticdir.dir' : staticdir, 'tools.staticdir.strip_version' : True } if global_cfg.has_key('rss_endpoint'): logger.debug('Checking for shared storage location') rssdir = get_rss_parent_dir() if len(rssdir) > 0: logger.debug('Using shared storage location: %s' % rssdir) else: rssdir = make_absolute(global_cfg.get('rss_dir', FAILSAFE_RSS_DIR), '') logger.debug('No shared storage location configured, using: %s' % rssdir) cfg[global_cfg['rss_endpoint']] = { 'tools.staticdir.on' : True, 'tools.staticdir.dir' : rssdir, 'tools.staticdir.strip_version' : False, 'tools.staticdir.default_ext' : 'xml', 'error_page.404': make_splunkhome_path([FAILSAFE_STATIC_DIR, 'html', 'rss_404.html']) } # Modules served statically out of /modules or out of an app's modules dir def module_resolver(section, branch, dir): from lib.apps import local_apps # first part of branch is the module name parts = os.path.normpath(branch.strip('/')).replace(os.path.sep, '/').split('/') locale = i18n.current_lang(True) if not parts: return False module_path = local_apps.getModulePath(parts[0]) if module_path: fn = os.path.join(module_path, *parts[1:]) if fn.endswith('.js') and os.path.exists(fn): return i18n.translate_js(fn) # returns the path to a cached file containing the original js + json translation map return fn elif parts[0].startswith('modules-') and parts[0].endswith('.js'): hash = parts[0].replace('modules-', '').replace('.min.js', '') return make_absolute(os.path.join(i18n.CACHE_PATH, '%s-%s-%s.cache' % ('modules.min.js', hash, locale))) elif parts[0].startswith('modules-') and parts[0].endswith('.css'): return filechain.MODULE_STATIC_CACHE_PATH + os.sep + 'css' + os.sep + parts[0] return False moddir = make_absolute(global_cfg.get('module_dir', FAILSAFE_MODULE_PATH)) cfg['/modules'] = { 'tools.staticdir.strip_version' : True, 'tools.staticdir.on' : True, 'tools.staticdir.match' : re.compile(r'.*\.(?!html$|spec$|py$)'), # only files with extensions other than .html, .py and .spec are served 'tools.staticdir.dir' : moddir, 'tools.staticdir.resolver' : module_resolver, 'tools.staticdir.content_types' : {'js' : 'application/javascript'} # correct python's application/x-javascript } cfg['/'] = { 'request.dispatch': i18n.I18NDispatcher(), } # enable gzip + i18n goodness if global_cfg.get('enable_gzip', False): cfg['/'].update({ 'tools.gzip.on' : True, 'tools.gzip.mime_types' : ['text/plain', 'text/html', 'text/css', 'application/javascript', 'application/x-javascript'], }) #cfg['/']['tools.gzip.on'] = False # Set maximum filesize we can receive (in MB) maxsize = global_cfg.get('max_upload_size', DEFAULT_MAX_UPLOAD_SIZE) cfg['global']['server.max_request_body_size'] = int(maxsize) * 1024 * 1024 if global_cfg.get('enable_throttle', False): from lib import throttle cfg['global'].update({ 'tools.throttle.on' : True, 'tools.throttle.bandwidth': int(global_cfg.get('throttle_bandwidth', 50)), 'tools.throttle.latency': int(global_cfg.get('throttle_latency', 100)) }) if global_cfg.get('enable_log_runtime', False): points = global_cfg.get('enable_log_runtime') if points == 'All': points = 'on_start_resource,before_request_body,before_handler,before_finalize,on_end_resource,on_end_request' if points is True: points = 'on_end_resource' for point in points.split(','): def log_closure(point): def log(): import time starttime = cherrypy.response.time endtime = time.time() delta = (endtime - starttime) * 1000 logger.warn('log_runtime point=%s path="%s" start=%f end=%f delta_ms=%.1f' % (point, cherrypy.request.path_info, starttime, endtime, delta)) return log setattr(cherrypy.tools, 'log_'+point, cherrypy.Tool(point, log_closure(point))) cfg['/']['tools.log_%s.on' % point] = True if global_cfg.get('storm_enabled'): from splunk.appserver.mrsparkle.lib.storm import hook_storm_session hook_storm_session() # setup handler to create and remove the pidfile pid_path = make_absolute(global_cfg.get('pid_path', PID_PATH)) ProcessID(cherrypy.engine, pid_path).subscribe() # # process splunkd status information # startup.initVersionInfo() # set start time for restart checking cfg['global']['start_time'] = time.time() # setup global error handling page cfg['global']['error_page.default'] = error.handleError # # TODO: refactor me into locale stuff # cfg['global']['DISPATCH_TIME_FORMAT'] = '%s.%Q' # END # Common splunk paths cfg['global']['etc_path'] = make_absolute(SPLUNK_ETC_PATH) cfg['global']['site_packages_path'] = make_absolute(SPLUNK_SITE_PACKAGES_PATH) cfg['global']['mrsparkle_path'] = make_absolute(SPLUNK_MRSPARKLE_PATH) listen_on_ipv6 = global_cfg.get('listenOnIPv6') socket_host = global_cfg.get('server.socket_host') if not socket_host: if listen_on_ipv6: socket_host = global_cfg['server.socket_host'] = '::' else: socket_host = global_cfg['server.socket_host'] = '0.0.0.0' logger.info("server.socket_host defaulting to %s" % socket_host) if ':' in socket_host: if not listen_on_ipv6: logger.warn('server.socket_host was set to IPv6 address "%s", so ignoring listenOnIPv6 value of "%s"' % (socket_host, listen_on_ipv6)) else: if listen_on_ipv6: logger.warn('server.socket_host was to to IPv4 address "%s", so ignoring listenOnIPv6 values of "%s"' % (socket_host, listen_on_ipv6)) if socket_host == '::': # Start a second server to listen to the IPV6 socket if isinstance(listen_on_ipv6, bool) or listen_on_ipv6.lower() != 'only': global_cfg['server.socket_host'] = '0.0.0.0' from cherrypy import _cpserver from cherrypy import _cpwsgi_server server2 = _cpserver.Server() server2.httpserver = _cpwsgi_server.CPWSGIServer() server2.httpserver.bind_addr = ('::', global_cfg['server.socket_port']) server2.socket_host = '::' server2.socket_port = global_cfg['server.socket_port'] for key in ('ssl_private_key', 'ssl_certificate', 'ssl_v3_only', 'ssl_ciphers'): if 'server.'+key in global_cfg: setattr(server2, key, global_cfg['server.'+key]) setattr(server2.httpserver, key, global_cfg['server.'+key]) server2.subscribe() if root_name: # redirect / to the root endpoint cherrypy.tree.mount(RootController(), '/', cfg) cherrypy.config.update(cfg) if global_cfg.get('enable_profile', False): from cherrypy.lib import profiler cherrypy.tree.graft( profiler.make_app(cherrypy.Application(ctrl, '/' + root_name, cfg), path=global_cfg.get('profile_path', '/tmp/profile')), '/' + root_name ) else: cherrypy.tree.mount(ctrl, '/' + root_name, cfg) cherrypy.engine.signal_handler.subscribe() # this makes Ctrl-C work when running in nodaemon if splunk.clilib.cli_common.isWindows: from cherrypy.process import win32 cherrypy.console_control_handler = win32.ConsoleCtrlHandler(cherrypy.engine) cherrypy.engine.console_control_handler.subscribe() # log active config for k in sorted(cherrypy.config): logger.info('CONFIG: %s (%s): %s' % (k, type(cherrypy.config[k]).__name__, cherrypy.config[k])) cherrypy.engine.start() # clean up caches on init filechain.clear_cache() i18n.init_js_cache() if blocking: # this routine that starts this as a windows service will not want us to block here. cherrypy.engine.block()
def chain_modules_js(files): # we could create a lock for each hash instead of a global lock here # but the savings of potentially generating/checking different module groups concurrently # probably isn't worth managing a dictionary of locks with _chain_modules_js_lock: logger.debug('Chaining and minifying modules JS') try: locale = lib.i18n.current_lang(True) modules = libmodule.moduleMapper.getInstalledModules() hash = generate_file_list_hash(files) cache_filename = make_absolute(os.path.join(lib.i18n.CACHE_PATH, '%s-%s-%s.cache' % (MODULE_JS_FILE_PREFIX, hash, locale))) if os.path.exists(cache_filename) and os.path.getsize(cache_filename) != 0: cache_mtime = os.path.getmtime(cache_filename) # check if root directory was modified (app installed, etc., where indiv. timestamps may be well in the past) if cache_mtime < os.path.getmtime(os.path.join(MRSPARKLE, 'modules')) or cache_mtime < os.path.getmtime(make_absolute(os.path.join('etc', 'apps'))): os.unlink(cache_filename) else: # check individual files, so if they've been touched we'll poison the cache for input_filename in files: parts = os.path.normpath(input_filename.strip('/')).replace(os.path.sep, '/').split('/') if len(parts) == 2: input_path = os.path.join(MRSPARKLE, *parts) else: module_path = local_apps.getModulePath(parts[1]) input_path = os.path.join(module_path, *parts[2:]) if cache_mtime < os.path.getmtime(input_path): os.unlink(cache_filename) break if os.path.exists(cache_filename) and os.path.getsize(cache_filename) != 0: return cache_filename output_fh = file(cache_filename, 'wb') # many duplicate JS translation blocks blocks = [] js = '' wrap_try_catch = splunk.util.normalizeBoolean(cherrypy.config.get('trap_module_exceptions', True)) for input_filename in files: parts = os.path.normpath(input_filename.strip('/')).replace(os.path.sep, '/').split('/') if len(parts) == 2: input_path = os.path.join(MRSPARKLE, *parts) # since we don't have the module name from something like /modules/AbstractModule.js, try # to figure it out from the module list for key in modules: if modules[key]['js'].endswith(os.path.join(*parts)): moduleName = key break else: module_path = local_apps.getModulePath(parts[1]) input_path = os.path.join(module_path, *parts[2:]) for key in modules: if modules[key]['js'].endswith(os.path.join(*parts)): moduleName = key break block = lib.i18n.generate_wrapped_js(input_path, locale) if block: if block not in blocks: blocks.append(block) if wrap_try_catch: js += 'try{' + block else: js += block input_temp_fh = file(input_path, 'r') js += input_temp_fh.read() + ';' input_temp_fh.close() if wrap_try_catch: js += '}catch(e){var err="The module \'%s\' in the \'%s\' app has thrown an unexpected error and may not function properly. Contact the app author or disable the app to remove this message. ";if(window.console){window.console.log(err);}$(function(){Splunk.Messenger.System.getInstance().send("error","%s",err);});}' % (moduleName, modules[moduleName]['appName'], moduleName) minifier = Popen([PATH_TO_JSMIN], stdin = subprocess.PIPE, stderr = subprocess.STDOUT, stdout = subprocess.PIPE, close_fds = True) (data, err) = minifier.communicate(js) if minifier.returncode != 0: logger.error('While minifying modules JavaScript, jsmin (pid %d) returned code %d' % (minifier.pid, minifier.returncode)) logger.error('Disabling minification of JavaScript and CSS') cherrypy.config['minify_js'] = False cherrypy.config['minify_css'] = False else: output_fh.write(data) output_fh.close() return cache_filename except IOError: logger.error('While minifying modules JavaScript, the following exception was thrown: %s Stack: %s' % (traceback.format_exc(), traceback.format_stack())) logger.error('Disabling minification of JavaScript and CSS') cherrypy.config['minify_js'] = False cherrypy.config['minify_css'] = False try: if os.path.exists(cache_filename): os.unlink(cache_filename) except: pass finally: try: input_temp_fh.close() except: pass try: output_fh.close() except: pass
def chain_common_js(): ''' Add translations to the common JS in share/splunk/search_mrsparkle/exposed/js, EXCLUDING anything in contrib/, which does not need translations ''' with _chain_common_js_lock: logger.debug('Chaining and minifying common JS') try: locale = cherrypy.request.lang # defaults to en_US, handy for precaching in root.py js_root = os.path.join(MRSPARKLE, 'exposed', 'js') cache_filename = make_absolute( os.path.join( lib.i18n.CACHE_PATH, '%s-%s-%s.cache' % ('common.min.js', hashlib.md5('common.min.js').hexdigest(), locale))) js_filenames = startup.generateJSManifest(True) if os.path.exists( cache_filename) and os.path.getsize(cache_filename) != 0: cache_mtime = os.path.getmtime(cache_filename) # if any .js files were touched, one of js_root or js_root/contrib will have the bumped timestamp if cache_mtime < os.path.getmtime( js_root) or cache_mtime < os.path.getmtime(js_root + os.sep + 'contrib'): os.unlink(cache_filename) elif cache_mtime < os.path.getmtime( os.path.join( os.path.dirname(os.path.abspath(__file__)), 'startup.py')): os.unlink(cache_filename) else: return cache_filename output_fh = file(cache_filename, 'wb') # many duplicate JS translation snippets blocks = [] js = '' for js_filename in js_filenames: if js_filename == 'i18n.js': path = lib.i18n.dispatch_i18n_js( os.path.join(js_root, 'i18n.js')) input_temp_fh = file(path, 'rb') js += input_temp_fh.read() input_temp_fh.close() else: path = os.path.join(js_root, js_filename) if os.sep + 'contrib' + os.sep not in path: block = lib.i18n.generate_wrapped_js(path, locale) if block and block not in blocks: blocks.append(block) js += block input_temp_fh = file(path, 'r') js += input_temp_fh.read() + ';' input_temp_fh.close() minifier = Popen([PATH_TO_JSMIN], stdin=subprocess.PIPE, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, close_fds=True) (data, err) = minifier.communicate(js) if minifier.returncode != 0: logger.error( 'While minifying common JavaScript, jsmin (pid %d) returned code %d' % (minifier.pid, minifier.returncode)) logger.error('Disabling minification of JavaScript and CSS') cherrypy.config['minify_js'] = False cherrypy.config['minify_css'] = False else: output_fh.write(data) output_fh.close() return cache_filename except IOError: logger.error( 'While minifying common JavaScript, the following exception was thrown: %s Stack: %s' % (traceback.format_exc(), traceback.format_stack())) logger.error('Disabling minification of JavaScript and CSS') cherrypy.config['minify_js'] = False cherrypy.config['minify_css'] = False try: if os.path.exists(cache_filename): os.unlink(cache_filename) except: pass finally: try: input_temp_fh.close() except: pass try: output_fh.close() except: pass
def mount_static(ctrl, global_cfg, cfg): static_endpoint = global_cfg['static_endpoint'] static_app_dir= make_absolute('etc/apps', '') # resolver for static content bundled with applications def static_app_resolver(section, branch, dir): """ Resolver that pulls application specific assets. """ parts = branch.split('/') subbranch, app, asset = parts[0], parts[1], '/'.join(parts[2:] ) appstaticdir = os.path.normpath(os.path.join(dir, app, 'appserver', 'static')) fn = os.path.normpath(os.path.join(appstaticdir, asset)) if fn.startswith(appstaticdir) and fn.startswith(os.path.normpath(dir)) and os.path.exists(fn): sp = os.path.splitext(asset) if sp[1] == '.js' and not asset.startswith('js/contrib') and 'i18noff' not in cherrypy.request.query_string: i18n_cache = i18n.translate_js(fn) if i18n_cache: return i18n_cache return fn return False def static_resolver(section, branch, dir): """resolver that knows how to add translations to javascript files""" # chain off to another resolver for statics served from application bundles. # overrides the 'dir' param with where applications are stored. if branch.startswith('app/'): return static_app_resolver(section, branch, static_app_dir) sp = os.path.splitext(branch) fn = os.path.join(dir, branch) if branch == 'js/i18n.js': return i18n.dispatch_i18n_js(fn) # send the locale data with the i18n.js system elif branch.endswith('common.min.js'): return filechain.chain_common_js() # returns the path to a cached file containing the finished cache file elif branch.startswith('js/splunkjs'): return False elif not branch.startswith('js/contrib') and sp[1] == '.js' and os.path.exists(fn) and 'i18noff' not in cherrypy.request.query_string: return i18n.translate_js(fn) # returns the path to a cached file containing the original js + json translation map return False # fallback to the default handler if (global_cfg.get('static_dir','') == '') : logger.warn('static endpoint configured, but no static directory. Falling back to ' + FAILSAFE_STATIC_DIR) staticdir = make_absolute(global_cfg.get('static_dir', FAILSAFE_STATIC_DIR), '') global_cfg['staticdir'] = staticdir cfg[static_endpoint] = { 'tools.sessions.on' : False, # no session required for static resources 'tools.staticdir.on' : True, 'tools.staticdir.dir' : staticdir, 'tools.staticdir.strip_version' : True, 'tools.staticdir.resolver' : static_resolver, 'tools.staticdir.content_types' : { 'js' : 'application/javascript', 'css': 'text/css', 'cache': 'text/javascript', # correct python's application/x-javascript 'woff': 'application/font-woff' }, 'tools.gzip.on' : True, 'tools.gzip.mime_types' : ['text/plain', 'text/html', 'text/css', 'application/javascript', 'application/x-javascript', 'text/javascript'] } faviconFile = 'favicon.ico' if 'product_type' in cherrypy.config: if cherrypy.config['product_type'] == 'hunk': faviconFile = 'favicon_hunk.ico' ctrl.robots_txt = cherrypy.tools.staticfile.handler(os.path.join(staticdir, 'robots.txt')) ctrl.favicon_ico = cherrypy.tools.staticfile.handler(os.path.join(staticdir, 'img', faviconFile))
import math from lib import util, apps import cPickle as pickle import json import logging import lxml.etree as et import splunk.util import re, string import time logger = logging.getLogger('splunk.appserver.lib.i18n') ISO8609_MICROTIME = '%Y-%m-%dT%H:%M:%S.%Q%z' LOCALE_PATH = os.path.join(os.path.dirname(__file__), '..', 'locale') CACHE_PATH = util.make_absolute( os.path.join('var', 'lib', 'splunk', 'appserver', 'i18n')) INTERNAL_APPS = [ 'gettingstarted', 'launcher', 'learned', 'legacy', 'sample_app', 'search', 'splunkdeploymentmonitor', 'splunkforwarder', 'splunklightforwarder', 'user-prefs' ] def current_lang(as_string=False): """ Return the user's current language/locale If as_string==True then returns a string fr, fr_FR, fr_FR.ISO8859-1 etc else returns a tuple (lang, locale, encoding) """ if as_string: return cherrypy.request.lang
def run(blocking=True): # get confs global_cfg = splunk_to_cherry_cfg('web', 'settings') # allow command line arguments to override the configuration # eg. --httpport=80 args = util.args_to_dict() # debugging can be turned on from the command line with --debug if args.get('debug'): del args['debug'] logger.setLevel(logging.DEBUG) for lname, litem in logger.manager.loggerDict.items(): if not isinstance(litem, logging.PlaceHolder): logger.debug("Updating logger=%s to level=DEBUG" % lname) litem.setLevel(logging.DEBUG) args['js_logger_mode'] = 'Server' args['js_no_cache'] = True global_cfg.update(args) # support SPLUNK_BINDIP backwards compatibly. -- overrides web.conf if os.environ.has_key('SPLUNK_BINDIP'): global_cfg['server.socket_host'] = os.environ[ 'SPLUNK_BINDIP'].strip() global_cfg['server.socket_port'] = global_cfg['httpport'] if normalizeBoolean(global_cfg.get('enableSplunkWebSSL', False)): logger.info('Enabling SSL') priv_key_path = str(global_cfg['privKeyPath']) ssl_certificate = str(global_cfg['caCertPath']) ssl_ciphers = str(global_cfg['cipherSuite']) if os.path.isabs(priv_key_path): global_cfg['server.ssl_private_key'] = priv_key_path else: global_cfg['server.ssl_private_key'] = make_splunkhome_path( [priv_key_path]) if os.path.isabs(ssl_certificate): global_cfg['server.ssl_certificate'] = ssl_certificate else: global_cfg['server.ssl_certificate'] = make_splunkhome_path( [ssl_certificate]) if not os.path.exists(global_cfg['server.ssl_private_key']): raise ValueError("%s Not Found" % global_cfg['server.ssl_private_key']) if not os.path.exists(global_cfg['server.ssl_certificate']): raise ValueError("%s Not Found" % global_cfg['server.ssl_certificate']) if global_cfg.get('supportSSLV3Only'): global_cfg['server.ssl_v3_only'] = True if ssl_ciphers: global_cfg['server.ssl_ciphers'] = ssl_ciphers else: # make sure the secure flag is not set on session cookies if we're not serving over SSL global_cfg['tools.sessions.secure'] = False # setup cherrypy logging infrastructure if global_cfg.has_key('log.access_file'): filename = make_absolute(global_cfg['log.access_file'], BASE_LOG_PATH) maxsize = int(global_cfg.get('log.access_maxsize', 0)) maxcount = int(global_cfg.get('log.access_maxfiles', 5)) if maxsize > 0: cherrypy.log.access_file = '' h = logging.handlers.RotatingFileHandler( filename, 'a', maxsize, maxcount) h.setLevel(logging.INFO) h.setFormatter(_cplogging.logfmt) cherrypy.log.access_log.addHandler(h) del global_cfg['log.access_file'] else: global_cfg['log.access_file'] = filename if global_cfg.has_key('log.error_file'): # we've already committed to web_service.log by this point del global_cfg['log.error_file'] cherrypy.log.error_file = '' cherrypy.log.error_log.addHandler(splunk_log_handler) if global_cfg.has_key('log.error_maxsize'): splunk_log_handler.maxBytes = int(global_cfg['log.error_maxsize']) splunk_log_handler.backupCount = int( global_cfg.get('log.error_maxfiles', 5)) # now that we have somewhere to log, test the ssl keys. - SPL-34126 # Lousy solution, but python's ssl itself hangs with encrypted keys, so avoid hang by # bailing with a message if global_cfg['enableSplunkWebSSL']: for cert_file in (global_cfg['server.ssl_private_key'], global_cfg['server.ssl_certificate']): if is_encrypted_cert(cert_file): logger.error( """Specified cert '%s' is encrypted with a passphrase. SplunkWeb does not support passphrase-encrypted keys at this time. To resolve the problem, decrypt the keys on disk, generate new passphrase-less keys, or disable ssl for SplunkWeb.""" % cert_file) raise Exception("Unsupported encrypted cert file.") # set login settings if global_cfg.get('tools.sessions.storage_type') == 'file': global_cfg['tools.sessions.storage_path'] = make_absolute( global_cfg['tools.sessions.storage_path']) # SPL-16963: add port number to session key to allow for sessions for multiple # instances to run on a single host, without mutually logging each other out. global_cfg[ 'tools.sessions.name'] = "session_id_%s" % global_cfg['httpport'] # set mako template cache directory global_cfg.setdefault('mako_cache_path', MAKO_CACHE_PATH) root_name = global_cfg.get('root_endpoint', FAILSAFE_ROOT_ENDPOINT).strip('/') ctrl = TopController() cfg = {'global': global_cfg} # initialize all of the custom endpoints that are registered in the # apps ctrl.custom.load_handlers() # Serve static files if so configured if global_cfg.has_key('static_endpoint'): mount_static(ctrl, global_cfg, cfg) if global_cfg.has_key('testing_endpoint'): if (global_cfg.get('static_dir', '') == ''): logger.warn( 'testing endpoint configured, but no testing directory. Falling back to ' + FAILSAFE_TESTING_DIR) staticdir = make_absolute( global_cfg.get('testing_dir', FAILSAFE_TESTING_DIR), '') cfg[global_cfg['testing_endpoint']] = { 'tools.staticdir.on': True, 'tools.staticdir.dir': staticdir, 'tools.staticdir.strip_version': True } if global_cfg.has_key('rss_endpoint'): logger.debug('Checking for shared storage location') rssdir = get_rss_parent_dir() if len(rssdir) > 0: logger.debug('Using shared storage location: %s' % rssdir) else: rssdir = make_absolute( global_cfg.get('rss_dir', FAILSAFE_RSS_DIR), '') logger.debug( 'No shared storage location configured, using: %s' % rssdir) cfg[global_cfg['rss_endpoint']] = { 'tools.staticdir.on': True, 'tools.staticdir.dir': rssdir, 'tools.staticdir.strip_version': False, 'tools.staticdir.default_ext': 'xml', 'error_page.404': make_splunkhome_path( [FAILSAFE_STATIC_DIR, 'html', 'rss_404.html']) } # Modules served statically out of /modules or out of an app's modules dir def module_resolver(section, branch, dir): from lib.apps import local_apps # first part of branch is the module name parts = os.path.normpath(branch.strip('/')).replace( os.path.sep, '/').split('/') locale = i18n.current_lang(True) if not parts: return False module_path = local_apps.getModulePath(parts[0]) if module_path: fn = os.path.join(module_path, *parts[1:]) if fn.endswith('.js') and os.path.exists(fn): return i18n.translate_js( fn ) # returns the path to a cached file containing the original js + json translation map return fn elif parts[0].startswith('modules-') and parts[0].endswith('.js'): hash = parts[0].replace('modules-', '').replace('.min.js', '') return make_absolute( os.path.join( i18n.CACHE_PATH, '%s-%s-%s.cache' % ('modules.min.js', hash, locale))) elif parts[0].startswith('modules-') and parts[0].endswith('.css'): return filechain.MODULE_STATIC_CACHE_PATH + os.sep + 'css' + os.sep + parts[ 0] return False moddir = make_absolute( global_cfg.get('module_dir', FAILSAFE_MODULE_PATH)) cfg['/modules'] = { 'tools.staticdir.strip_version': True, 'tools.staticdir.on': True, 'tools.staticdir.match': re.compile( r'.*\.(?!html$|spec$|py$)' ), # only files with extensions other than .html, .py and .spec are served 'tools.staticdir.dir': moddir, 'tools.staticdir.resolver': module_resolver, 'tools.staticdir.content_types': { 'js': 'application/javascript' } # correct python's application/x-javascript } cfg['/'] = { 'request.dispatch': i18n.I18NDispatcher(), } # enable gzip + i18n goodness if global_cfg.get('enable_gzip', False): cfg['/'].update({ 'tools.gzip.on': True, 'tools.gzip.mime_types': [ 'text/plain', 'text/html', 'text/css', 'application/javascript', 'application/x-javascript' ], }) #cfg['/']['tools.gzip.on'] = False # Set maximum filesize we can receive (in MB) maxsize = global_cfg.get('max_upload_size', DEFAULT_MAX_UPLOAD_SIZE) cfg['global']['server.max_request_body_size'] = int( maxsize) * 1024 * 1024 if global_cfg.get('enable_throttle', False): from lib import throttle cfg['global'].update({ 'tools.throttle.on': True, 'tools.throttle.bandwidth': int(global_cfg.get('throttle_bandwidth', 50)), 'tools.throttle.latency': int(global_cfg.get('throttle_latency', 100)) }) if global_cfg.get('enable_log_runtime', False): points = global_cfg.get('enable_log_runtime') if points == 'All': points = 'on_start_resource,before_request_body,before_handler,before_finalize,on_end_resource,on_end_request' if points is True: points = 'on_end_resource' for point in points.split(','): def log_closure(point): def log(): import time starttime = cherrypy.response.time endtime = time.time() delta = (endtime - starttime) * 1000 logger.warn( 'log_runtime point=%s path="%s" start=%f end=%f delta_ms=%.1f' % (point, cherrypy.request.path_info, starttime, endtime, delta)) return log setattr(cherrypy.tools, 'log_' + point, cherrypy.Tool(point, log_closure(point))) cfg['/']['tools.log_%s.on' % point] = True if global_cfg.get('storm_enabled'): from splunk.appserver.mrsparkle.lib.storm import hook_storm_session hook_storm_session() # setup handler to create and remove the pidfile pid_path = make_absolute(global_cfg.get('pid_path', PID_PATH)) ProcessID(cherrypy.engine, pid_path).subscribe() # # process splunkd status information # startup.initVersionInfo() # set start time for restart checking cfg['global']['start_time'] = time.time() # setup global error handling page cfg['global']['error_page.default'] = error.handleError # # TODO: refactor me into locale stuff # cfg['global']['DISPATCH_TIME_FORMAT'] = '%s.%Q' # END # Common splunk paths cfg['global']['etc_path'] = make_absolute(SPLUNK_ETC_PATH) cfg['global']['site_packages_path'] = make_absolute( SPLUNK_SITE_PACKAGES_PATH) cfg['global']['mrsparkle_path'] = make_absolute(SPLUNK_MRSPARKLE_PATH) listen_on_ipv6 = global_cfg.get('listenOnIPv6') socket_host = global_cfg.get('server.socket_host') if not socket_host: if listen_on_ipv6: socket_host = global_cfg['server.socket_host'] = '::' else: socket_host = global_cfg['server.socket_host'] = '0.0.0.0' logger.info("server.socket_host defaulting to %s" % socket_host) if ':' in socket_host: if not listen_on_ipv6: logger.warn( 'server.socket_host was set to IPv6 address "%s", so ignoring listenOnIPv6 value of "%s"' % (socket_host, listen_on_ipv6)) else: if listen_on_ipv6: logger.warn( 'server.socket_host was to to IPv4 address "%s", so ignoring listenOnIPv6 values of "%s"' % (socket_host, listen_on_ipv6)) if socket_host == '::': # Start a second server to listen to the IPV6 socket if isinstance(listen_on_ipv6, bool) or listen_on_ipv6.lower() != 'only': global_cfg['server.socket_host'] = '0.0.0.0' from cherrypy import _cpserver from cherrypy import _cpwsgi_server server2 = _cpserver.Server() server2.httpserver = _cpwsgi_server.CPWSGIServer() server2.httpserver.bind_addr = ( '::', global_cfg['server.socket_port']) server2.socket_host = '::' server2.socket_port = global_cfg['server.socket_port'] for key in ('ssl_private_key', 'ssl_certificate', 'ssl_v3_only', 'ssl_ciphers'): if 'server.' + key in global_cfg: setattr(server2, key, global_cfg['server.' + key]) setattr(server2.httpserver, key, global_cfg['server.' + key]) server2.subscribe() if root_name: # redirect / to the root endpoint cherrypy.tree.mount(RootController(), '/', cfg) cherrypy.config.update(cfg) if global_cfg.get('enable_profile', False): from cherrypy.lib import profiler cherrypy.tree.graft( profiler.make_app(cherrypy.Application(ctrl, '/' + root_name, cfg), path=global_cfg.get('profile_path', '/tmp/profile')), '/' + root_name) else: cherrypy.tree.mount(ctrl, '/' + root_name, cfg) cherrypy.engine.signal_handler.subscribe() # this makes Ctrl-C work when running in nodaemon if splunk.clilib.cli_common.isWindows: from cherrypy.process import win32 cherrypy.console_control_handler = win32.ConsoleCtrlHandler( cherrypy.engine) cherrypy.engine.console_control_handler.subscribe() # log active config for k in sorted(cherrypy.config): logger.info( 'CONFIG: %s (%s): %s' % (k, type(cherrypy.config[k]).__name__, cherrypy.config[k])) cherrypy.engine.start() # clean up caches on init filechain.clear_cache() i18n.init_js_cache() if blocking: # this routine that starts this as a windows service will not want us to block here. cherrypy.engine.block()
import math from lib import util, apps import cPickle as pickle import json import logging import lxml.etree as et import splunk.util import re, string import time logger = logging.getLogger('splunk.appserver.lib.i18n') ISO8609_MICROTIME='%Y-%m-%dT%H:%M:%S.%Q%z' LOCALE_PATH = os.path.join(os.path.dirname(__file__), '..', 'locale') CACHE_PATH = util.make_absolute(os.path.join('var', 'lib', 'splunk', 'appserver', 'i18n')) INTERNAL_APPS = ['gettingstarted', 'launcher', 'learned', 'legacy', 'sample_app', 'search', 'splunkdeploymentmonitor', 'splunkforwarder', 'splunklightforwarder', 'user-prefs'] def current_lang(as_string=False): """ Return the user's current language/locale If as_string==True then returns a string fr, fr_FR, fr_FR.ISO8859-1 etc else returns a tuple (lang, locale, encoding) """ if as_string: return cherrypy.request.lang return parse_localestr(cherrypy.request.lang) def parse_localestr(locale): """ Parse a locale string such as en, fr_FR, fr_FR.ISO8859-1 into language, locale and encoding
def chain_modules_js(files): # we could create a lock for each hash instead of a global lock here # but the savings of potentially generating/checking different module groups concurrently # probably isn't worth managing a dictionary of locks with _chain_modules_js_lock: logger.debug('Chaining and minifying modules JS') try: locale = lib.i18n.current_lang(True) modules = libmodule.moduleMapper.getInstalledModules() hash = generate_file_list_hash(files) cache_filename = make_absolute( os.path.join( lib.i18n.CACHE_PATH, '%s-%s-%s.cache' % (MODULE_JS_FILE_PREFIX, hash, locale))) if os.path.exists( cache_filename) and os.path.getsize(cache_filename) != 0: cache_mtime = os.path.getmtime(cache_filename) # check if root directory was modified (app installed, etc., where indiv. timestamps may be well in the past) if cache_mtime < os.path.getmtime( os.path.join( MRSPARKLE, 'modules')) or cache_mtime < os.path.getmtime( make_absolute(os.path.join('etc', 'apps'))): os.unlink(cache_filename) else: # check individual files, so if they've been touched we'll poison the cache for input_filename in files: parts = os.path.normpath( input_filename.strip('/')).replace( os.path.sep, '/').split('/') if len(parts) == 2: input_path = os.path.join(MRSPARKLE, *parts) else: module_path = local_apps.getModulePath(parts[1]) input_path = os.path.join(module_path, *parts[2:]) if cache_mtime < os.path.getmtime(input_path): os.unlink(cache_filename) break if os.path.exists(cache_filename ) and os.path.getsize(cache_filename) != 0: return cache_filename output_fh = file(cache_filename, 'wb') # many duplicate JS translation blocks blocks = [] js = '' wrap_try_catch = splunk.util.normalizeBoolean( cherrypy.config.get('trap_module_exceptions', True)) for input_filename in files: parts = os.path.normpath(input_filename.strip('/')).replace( os.path.sep, '/').split('/') if len(parts) == 2: input_path = os.path.join(MRSPARKLE, *parts) # since we don't have the module name from something like /modules/AbstractModule.js, try # to figure it out from the module list for key in modules: if modules[key]['js'].endswith(os.path.join(*parts)): moduleName = key break else: module_path = local_apps.getModulePath(parts[1]) input_path = os.path.join(module_path, *parts[2:]) for key in modules: if modules[key]['js'].endswith(os.path.join(*parts)): moduleName = key break block = lib.i18n.generate_wrapped_js(input_path, locale) if block: if block not in blocks: blocks.append(block) if wrap_try_catch: js += 'try{' + block else: js += block input_temp_fh = file(input_path, 'r') js += input_temp_fh.read() + ';' input_temp_fh.close() if wrap_try_catch: js += '}catch(e){var err="The module \'%s\' in the \'%s\' app has thrown an unexpected error and may not function properly. Contact the app author or disable the app to remove this message. ";if(window.console){window.console.log(err);}$(function(){Splunk.Messenger.System.getInstance().send("error","%s",err);});}' % ( moduleName, modules[moduleName]['appName'], moduleName) minifier = Popen([PATH_TO_JSMIN], stdin=subprocess.PIPE, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, close_fds=True) (data, err) = minifier.communicate(js) if minifier.returncode != 0: logger.error( 'While minifying modules JavaScript, jsmin (pid %d) returned code %d' % (minifier.pid, minifier.returncode)) logger.error('Disabling minification of JavaScript and CSS') cherrypy.config['minify_js'] = False cherrypy.config['minify_css'] = False else: output_fh.write(data) output_fh.close() return cache_filename except IOError: logger.error( 'While minifying modules JavaScript, the following exception was thrown: %s Stack: %s' % (traceback.format_exc(), traceback.format_stack())) logger.error('Disabling minification of JavaScript and CSS') cherrypy.config['minify_js'] = False cherrypy.config['minify_css'] = False try: if os.path.exists(cache_filename): os.unlink(cache_filename) except: pass finally: try: input_temp_fh.close() except: pass try: output_fh.close() except: pass
def chain_modules_css(files): logger.debug('Chaining and minifying modules CSS') try: if not os.path.exists(os.path.join(MODULE_STATIC_CACHE_PATH, 'css')): os.makedirs(os.path.join(MODULE_STATIC_CACHE_PATH, 'css')) hash = generate_file_list_hash(files) cache_filename = make_absolute( os.path.join(MODULE_STATIC_CACHE_PATH, 'css', MODULE_CSS_FILE_PREFIX + hash + '.min.css')) if os.path.exists( cache_filename) and os.path.getsize(cache_filename) != 0: cache_mtime = os.path.getmtime(cache_filename) for input_filename in files: if input_filename.startswith('/modules/'): parts = os.path.normpath( input_filename.strip('/')).replace(os.path.sep, '/').split('/') if len(parts) == 2: input_path = os.path.join(MRSPARKLE, *parts) else: module_path = local_apps.getModulePath(parts[1]) input_path = os.path.join(module_path, *parts[2:]) if cache_mtime < os.path.getmtime(input_path): os.unlink(cache_filename) break else: # cache_filename opened 'wb' below, no need to unlink() here if it's 0 bytes pass if os.path.exists( cache_filename) and os.path.getsize(cache_filename) != 0: return cache_filename output_fh = file(cache_filename, 'wb') for input_filename in files: if input_filename.startswith('/modules/'): parts = os.path.normpath(input_filename.strip('/')).replace( os.path.sep, '/').split('/') if len(parts) == 2: input_path = os.path.join(MRSPARKLE, *parts) else: module_path = local_apps.getModulePath(parts[1]) input_path = os.path.join(module_path, *parts[2:]) input_temp_fh = file(input_path, 'rb') output_fh.write(cssmin.cssmin(input_temp_fh.read())) input_temp_fh.close() return cache_filename except IOError: logger.error( 'While minifying modules CSS, the following exception was thrown: %s Stack: %s' % (traceback.format_exc(), traceback.format_stack())) logger.error('Disabling minification of JavaScript and CSS') cherrypy.config['minify_js'] = False cherrypy.config['minify_css'] = False try: if os.path.exists(cache_filename): os.unlink(cache_filename) except: pass finally: try: input_temp_fh.close() except: pass try: output_fh.close() except: pass
splunk.rest.SPLUNKD_CONNECTION_TIMEOUT = defaultSplunkdConnectionTimeout except TypeError, e: logger.error("Exception while trying to get splunkdConnectionTimeout from web.conf e=%s" % e) splunk.rest.SPLUNKD_CONNECTION_TIMEOUT = defaultSplunkdConnectionTimeout finally: logger.info("splunkdConnectionTimeout=%s" % splunk.rest.SPLUNKD_CONNECTION_TIMEOUT) # # TODO: refactor me into locale stuff # cfg['global']['DISPATCH_TIME_FORMAT'] = '%s.%Q' # END # Common splunk paths cfg['global']['etc_path'] = make_absolute(SPLUNK_ETC_PATH) cfg['global']['site_packages_path'] = make_absolute(SPLUNK_SITE_PACKAGES_PATH) cfg['global']['mrsparkle_path'] = make_absolute(SPLUNK_MRSPARKLE_PATH) listen_on_ipv6 = global_cfg.get('listenOnIPv6') socket_host = global_cfg.get('server.socket_host') if not socket_host: if listen_on_ipv6: socket_host = global_cfg['server.socket_host'] = '::' else: socket_host = global_cfg['server.socket_host'] = '0.0.0.0' logger.info("server.socket_host defaulting to %s" % socket_host) if ':' in socket_host: if not listen_on_ipv6: logger.warn('server.socket_host was set to IPv6 address "%s", so ignoring listenOnIPv6 value of "%s"' % (socket_host, listen_on_ipv6))
import lib.i18n import cssmin from lib.util import make_absolute, Popen import splunk.util import logging import startup import splunk.appserver.mrsparkle.lib.module as libmodule from lib.apps import local_apps import cherrypy logger = logging.getLogger('splunk.appserver.lib.i18n') MODULE_STATIC_CACHE_PATH = make_absolute(os.path.join('var', 'lib', 'splunk', 'appserver' ,'modules', 'static')) MRSPARKLE = make_absolute(os.path.join('share', 'splunk', 'search_mrsparkle')) PATH_TO_JSMIN = make_absolute(os.path.join('bin', 'jsmin')) # define the filename prefixes for the cached versions of the concatenated # static resources MODULE_JS_FILE_PREFIX = 'modules.min.js' MODULE_CSS_FILE_PREFIX = 'modules-' ''' This is the logic to minify and chain various sets of CSS and JavaScript. The discrete chunks are the modules JS (anything found in getInstalledModules() in moduleMapper), common JS (anything in exposed/js and exposed JS/contrib) and the modules CSS (again, from getInstalledModules())
def run(blocking=True): # get confs global_cfg = splunk_to_cherry_cfg('web', 'settings') # allow command line arguments to override the configuration # eg. --httpport=80 args = util.args_to_dict() # splunkd proxied mode proxied_arg = args.get('proxied') global_cfg['is_proxied'] = False if proxied_arg: del args['proxied'] proxied_parts = proxied_arg.split(',') if len(proxied_parts) == 2: proxied_ip_addr = proxied_parts[0] proxied_port = int(proxied_parts[1]) logger.info('Proxied mode ip_address=%s port=%s:' % (proxied_ip_addr, proxied_port)) global_cfg['is_proxied'] = True global_cfg['startwebserver'] = 1 global_cfg['httpport'] = proxied_port global_cfg['enableSplunkWebSSL'] = False global_cfg['remoteUser'] = '******' global_cfg['SSOMode'] = 'strict' global_cfg['trustedIP'] = proxied_ip_addr global_cfg['server.socket_host'] = proxied_ip_addr else: logger.warn("Proxied mode flag invalid '%s'. --proxied=' IP_ADDR PORT'" % proxied_arg) # debugging can be turned on from the command line with --debug if args.get('debug'): del args['debug'] logger.setLevel(logging.DEBUG) for lname, litem in logger.manager.loggerDict.items(): if not isinstance(litem, logging.PlaceHolder): logger.debug("Updating logger=%s to level=DEBUG" % lname) litem.setLevel(logging.DEBUG) args['js_logger_mode'] = 'Server' args['js_no_cache'] = True global_cfg.update(args) # support SPLUNK_BINDIP backwards compatibly. -- overrides web.conf if (not global_cfg['is_proxied']) and os.environ.has_key('SPLUNK_BINDIP'): global_cfg['server.socket_host'] = os.environ['SPLUNK_BINDIP'].strip() global_cfg['server.socket_port'] = global_cfg['httpport'] if normalizeBoolean(global_cfg.get('enableSplunkWebSSL', False)): logger.info('Enabling SSL') priv_key_path = str(global_cfg['privKeyPath']) ssl_certificate = str(global_cfg['caCertPath']) ssl_ciphers = str(global_cfg['cipherSuite']) if os.path.isabs(priv_key_path): global_cfg['server.ssl_private_key'] = priv_key_path else: global_cfg['server.ssl_private_key'] = make_splunkhome_path([priv_key_path]) if os.path.isabs(ssl_certificate): global_cfg['server.ssl_certificate'] = ssl_certificate else: global_cfg['server.ssl_certificate'] = make_splunkhome_path([ssl_certificate]) if not os.path.exists(global_cfg['server.ssl_private_key']): raise ValueError("%s Not Found" % global_cfg['server.ssl_private_key']) if not os.path.exists(global_cfg['server.ssl_certificate']): raise ValueError("%s Not Found" % global_cfg['server.ssl_certificate']) if global_cfg.get('supportSSLV3Only'): global_cfg['server.ssl_v3_only'] = True if ssl_ciphers: global_cfg['server.ssl_ciphers'] = ssl_ciphers else: # make sure the secure flag is not set on session cookies if we're not serving over SSL global_cfg['tools.sessions.secure'] = False # setup cherrypy logging infrastructure if global_cfg.has_key('log.access_file'): filename = make_absolute(global_cfg['log.access_file'], BASE_LOG_PATH) maxsize = int(global_cfg.get('log.access_maxsize', 0)) maxcount = int(global_cfg.get('log.access_maxfiles', 5)) if maxsize > 0: cherrypy.log.access_file = '' h = logging.handlers.RotatingFileHandler(filename, 'a', maxsize, maxcount) h.setLevel(logging.INFO) h.setFormatter(_cplogging.logfmt) cherrypy.log.access_log.addHandler(h) del global_cfg['log.access_file'] else: global_cfg['log.access_file'] = filename if global_cfg.has_key('log.error_file'): # we've already committed to web_service.log by this point del global_cfg['log.error_file'] cherrypy.log.error_file = '' cherrypy.log.error_log.addHandler(splunk_log_handler) if global_cfg.has_key('log.error_maxsize'): splunk_log_handler.maxBytes = int(global_cfg['log.error_maxsize']) splunk_log_handler.backupCount = int(global_cfg.get('log.error_maxfiles', 5)) # now that we have somewhere to log, test the ssl keys. - SPL-34126 # Lousy solution, but python's ssl itself hangs with encrypted keys, so avoid hang by # bailing with a message if global_cfg['enableSplunkWebSSL']: for cert_file in (global_cfg['server.ssl_private_key'], global_cfg['server.ssl_certificate']): if is_encrypted_cert(cert_file): logger.error("""Specified cert '%s' is encrypted with a passphrase. SplunkWeb does not support passphrase-encrypted keys at this time. To resolve the problem, decrypt the keys on disk, generate new passphrase-less keys, or disable ssl for SplunkWeb.""" % cert_file) raise Exception("Unsupported encrypted cert file.") # set login settings if global_cfg.get('tools.sessions.storage_type') == 'file': global_cfg['tools.sessions.storage_path'] = make_absolute(global_cfg['tools.sessions.storage_path']) # SPL-16963: add port number to session key to allow for sessions for multiple # instances to run on a single host, without mutually logging each other out. global_cfg['tools.sessions.name'] = "session_id_%s" % global_cfg['httpport'] global_cfg['tools.csrfcookie.name'] = "splunkweb_csrf_token_%s" % global_cfg['httpport'] # set mako template cache directory global_cfg.setdefault('mako_cache_path', MAKO_CACHE_PATH) root_name = global_cfg.get('root_endpoint', FAILSAFE_ROOT_ENDPOINT).strip('/') ctrl = TopController() cfg = {'global' : global_cfg} # initialize all of the custom endpoints that are registered in the # apps ctrl.custom.load_handlers() # Serve static files if so configured if global_cfg.has_key('static_endpoint'): mount_static(ctrl, global_cfg, cfg) if global_cfg.has_key('testing_endpoint'): if (global_cfg.get('static_dir','') == '') : logger.warn('testing endpoint configured, but no testing directory. Falling back to ' + FAILSAFE_TESTING_DIR) staticdir = make_absolute(global_cfg.get('testing_dir', FAILSAFE_TESTING_DIR), '') cfg[global_cfg['testing_endpoint']] = { 'tools.staticdir.on' : True, 'tools.staticdir.dir' : staticdir, 'tools.staticdir.strip_version' : True } if global_cfg.has_key('rss_endpoint'): logger.debug('Checking for shared storage location') rssdir = get_rss_parent_dir() if len(rssdir) > 0: logger.debug('Using shared storage location: %s' % rssdir) else: rssdir = make_absolute(global_cfg.get('rss_dir', FAILSAFE_RSS_DIR), '') logger.debug('No shared storage location configured, using: %s' % rssdir) cfg[global_cfg['rss_endpoint']] = { 'tools.staticdir.on' : True, 'tools.staticdir.dir' : rssdir, 'tools.staticdir.strip_version' : False, 'tools.staticdir.default_ext' : 'xml', 'error_page.404': make_splunkhome_path([FAILSAFE_STATIC_DIR, 'html', 'rss_404.html']) } # Modules served statically out of /modules or out of an app's modules dir def module_resolver(section, branch, dir): from lib.apps import local_apps # first part of branch is the module name parts = os.path.normpath(branch.strip('/')).replace(os.path.sep, '/').split('/') locale = i18n.current_lang(True) if not parts: return False module_path = local_apps.getModulePath(parts[0]) if module_path: # this means there is a module named parts[0] # SPL-51365 images should load irrespective of css_minification. if parts[0]==parts[1]: # ignore of repetition of module name # happens for image request when minify_css=False fn = os.path.join(module_path, *parts[2:]) else: fn = os.path.join(module_path, *parts[1:]) #verified while fixing SPL-47422 #pylint: disable=E1103 if fn.endswith('.js') and os.path.exists(fn): return i18n.translate_js(fn) # returns the path to a cached file containing the original js + json translation map return fn elif parts[0].startswith('modules-') and parts[0].endswith('.js'): hash = parts[0].replace('modules-', '').replace('.min.js', '') return make_absolute(os.path.join(i18n.CACHE_PATH, '%s-%s-%s.cache' % ('modules.min.js', hash, locale))) elif parts[0].startswith('modules-') and parts[0].endswith('.css'): return filechain.MODULE_STATIC_CACHE_PATH + os.sep + 'css' + os.sep + parts[0] return False moddir = make_absolute(global_cfg.get('module_dir', FAILSAFE_MODULE_PATH)) cfg['/modules'] = { 'tools.staticdir.strip_version' : True, 'tools.staticdir.on' : True, 'tools.staticdir.match' : re.compile(r'.*\.(?!html$|spec$|py$)'), # only files with extensions other than .html, .py and .spec are served 'tools.staticdir.dir' : moddir, 'tools.staticdir.resolver' : module_resolver, 'tools.staticdir.content_types' : {'js' : 'application/javascript', 'css': 'text/css', 'cache': 'text/javascript'} # correct python's application/x-javascript } cfg['/'] = { 'request.dispatch': i18n.I18NDispatcher(), } # enable gzip + i18n goodness if global_cfg.get('enable_gzip', False): cfg['/'].update({ 'tools.gzip.on' : True, 'tools.gzip.mime_types' : ['text/plain', 'text/html', 'text/css', 'application/javascript', 'application/x-javascript', 'application/json'], }) #cfg['/']['tools.gzip.on'] = False # Set maximum filesize we can receive (in MB) maxsize = global_cfg.get('max_upload_size', DEFAULT_MAX_UPLOAD_SIZE) cfg['global']['server.max_request_body_size'] = int(maxsize) * 1024 * 1024 if global_cfg.get('enable_throttle', False): from lib import throttle cfg['global'].update({ 'tools.throttle.on' : True, 'tools.throttle.bandwidth': int(global_cfg.get('throttle_bandwidth', 50)), 'tools.throttle.latency': int(global_cfg.get('throttle_latency', 100)) }) if global_cfg.get('enable_log_runtime', False): points = global_cfg.get('enable_log_runtime') if points == 'All': points = 'on_start_resource,before_request_body,before_handler,before_finalize,on_end_resource,on_end_request' if points is True: points = 'on_end_resource' for point in points.split(','): def log_closure(point): def log(): import time starttime = cherrypy.response.time endtime = time.time() delta = (endtime - starttime) * 1000 logger.warn('log_runtime point=%s path="%s" start=%f end=%f delta_ms=%.1f' % (point, cherrypy.request.path_info, starttime, endtime, delta)) return log setattr(cherrypy.tools, 'log_'+point, cherrypy.Tool(point, log_closure(point))) cfg['/']['tools.log_%s.on' % point] = True if global_cfg.get('storm_enabled'): from splunk.appserver.mrsparkle.lib.storm import hook_storm_session hook_storm_session() if global_cfg.get('override_JSON_MIME_type_with_text_plain', False): import splunk.appserver.mrsparkle splunk.appserver.mrsparkle.MIME_JSON = "text/plain; charset=UTF-8" logger.info("overriding JSON MIME type with '%s'" % splunk.appserver.mrsparkle.MIME_JSON) # setup handler to create and remove the pidfile pid_path = make_absolute(global_cfg.get('pid_path', PID_PATH)) ProcessID(cherrypy.engine, pid_path).subscribe() # # process splunkd status information # startup.initVersionInfo() # set start time for restart checking cfg['global']['start_time'] = time.time() # setup global error handling page cfg['global']['error_page.default'] = error.handleError # set splunkd connection timeout import splunk.rest defaultSplunkdConnectionTimeout = 30 try: splunkdConnectionTimeout = int(global_cfg.get('splunkdConnectionTimeout',defaultSplunkdConnectionTimeout)) if splunkdConnectionTimeout < defaultSplunkdConnectionTimeout: splunkdConnectionTimeout = defaultSplunkdConnectionTimeout splunk.rest.SPLUNKD_CONNECTION_TIMEOUT = splunkdConnectionTimeout except ValueError, e: logger.error("Exception while trying to get splunkdConnectionTimeout from web.conf e=%s" % e) splunk.rest.SPLUNKD_CONNECTION_TIMEOUT = defaultSplunkdConnectionTimeout
def chain_common_js(): ''' Add translations to the common JS in share/splunk/search_mrsparkle/exposed/js, EXCLUDING anything in contrib/, which does not need translations ''' with _chain_common_js_lock: logger.debug('Chaining and minifying common JS') try: locale = cherrypy.request.lang # defaults to en_US, handy for precaching in root.py js_root = os.path.join(MRSPARKLE, 'exposed', 'js') cache_filename = make_absolute(os.path.join(lib.i18n.CACHE_PATH, '%s-%s-%s.cache' % ('common.min.js', hashlib.md5('common.min.js').hexdigest(), locale))) js_filenames = startup.generateJSManifest(True) if os.path.exists(cache_filename) and os.path.getsize(cache_filename) != 0: cache_mtime = os.path.getmtime(cache_filename) # if any .js files were touched, one of js_root or js_root/contrib will have the bumped timestamp if cache_mtime < os.path.getmtime(js_root) or cache_mtime < os.path.getmtime(js_root + os.sep + 'contrib'): os.unlink(cache_filename) elif cache_mtime < os.path.getmtime(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'startup.py')): os.unlink(cache_filename) else: return cache_filename output_fh = file(cache_filename, 'wb') # many duplicate JS translation snippets blocks = [] js = '' for js_filename in js_filenames: if js_filename == 'i18n.js': path = lib.i18n.dispatch_i18n_js(os.path.join(js_root, 'i18n.js')) input_temp_fh = file(path, 'rb') js += input_temp_fh.read() input_temp_fh.close() else: path = os.path.join(js_root, js_filename) if os.sep + 'contrib' + os.sep not in path: block = lib.i18n.generate_wrapped_js(path, locale) if block and block not in blocks: blocks.append(block) js += block input_temp_fh = file(path, 'r') js += input_temp_fh.read() + ';' input_temp_fh.close() minifier = Popen([PATH_TO_JSMIN], stdin = subprocess.PIPE, stderr = subprocess.STDOUT, stdout = subprocess.PIPE, close_fds = True) (data, err) = minifier.communicate(js) if minifier.returncode != 0: logger.error('While minifying common JavaScript, jsmin (pid %d) returned code %d' % (minifier.pid, minifier.returncode)) logger.error('Disabling minification of JavaScript and CSS') cherrypy.config['minify_js'] = False cherrypy.config['minify_css'] = False else: output_fh.write(data) output_fh.close() return cache_filename except IOError: logger.error('While minifying common JavaScript, the following exception was thrown: %s Stack: %s' % (traceback.format_exc(), traceback.format_stack())) logger.error('Disabling minification of JavaScript and CSS') cherrypy.config['minify_js'] = False cherrypy.config['minify_css'] = False try: if os.path.exists(cache_filename): os.unlink(cache_filename) except: pass finally: try: input_temp_fh.close() except: pass try: output_fh.close() except: pass
def chain_modules_css(files): logger.debug('Chaining and minifying modules CSS') try: if not os.path.exists(os.path.join(MODULE_STATIC_CACHE_PATH, 'css')): os.makedirs(os.path.join(MODULE_STATIC_CACHE_PATH, 'css')) hash = generate_file_list_hash(files) cache_filename = make_absolute(os.path.join(MODULE_STATIC_CACHE_PATH, 'css', MODULE_CSS_FILE_PREFIX + hash + '.min.css')) if os.path.exists(cache_filename) and os.path.getsize(cache_filename) != 0: cache_mtime = os.path.getmtime(cache_filename) for input_filename in files: if input_filename.startswith('/modules/'): parts = os.path.normpath(input_filename.strip('/')).replace(os.path.sep, '/').split('/') if len(parts) == 2: input_path = os.path.join(MRSPARKLE, *parts) else: module_path = local_apps.getModulePath(parts[1]) input_path = os.path.join(module_path, *parts[2:]) if cache_mtime < os.path.getmtime(input_path): os.unlink(cache_filename) break else: # cache_filename opened 'wb' below, no need to unlink() here if it's 0 bytes pass if os.path.exists(cache_filename) and os.path.getsize(cache_filename) != 0: return cache_filename output_fh = file(cache_filename, 'wb') for input_filename in files: if input_filename.startswith('/modules/'): parts = os.path.normpath(input_filename.strip('/')).replace(os.path.sep, '/').split('/') if len(parts) == 2: input_path = os.path.join(MRSPARKLE, *parts) else: module_path = local_apps.getModulePath(parts[1]) input_path = os.path.join(module_path, *parts[2:]) input_temp_fh = file(input_path, 'rb') output_fh.write(cssmin.cssmin(input_temp_fh.read())) input_temp_fh.close() return cache_filename except IOError: logger.error('While minifying modules CSS, the following exception was thrown: %s Stack: %s' % (traceback.format_exc(), traceback.format_stack())) logger.error('Disabling minification of JavaScript and CSS') cherrypy.config['minify_js'] = False cherrypy.config['minify_css'] = False try: if os.path.exists(cache_filename): os.unlink(cache_filename) except: pass finally: try: input_temp_fh.close() except: pass try: output_fh.close() except: pass
import lib.i18n import cssmin from lib.util import make_absolute, Popen import splunk.util import logging import startup import splunk.appserver.mrsparkle.lib.module as libmodule from lib.apps import local_apps import cherrypy logger = logging.getLogger('splunk.appserver.lib.i18n') MODULE_STATIC_CACHE_PATH = make_absolute( os.path.join('var', 'lib', 'splunk', 'appserver', 'modules', 'static')) MRSPARKLE = make_absolute(os.path.join('share', 'splunk', 'search_mrsparkle')) PATH_TO_JSMIN = make_absolute(os.path.join('bin', 'jsmin')) # define the filename prefixes for the cached versions of the concatenated # static resources MODULE_JS_FILE_PREFIX = 'modules.min.js' MODULE_CSS_FILE_PREFIX = 'modules-' ''' This is the logic to minify and chain various sets of CSS and JavaScript. The discrete chunks are the modules JS (anything found in getInstalledModules() in moduleMapper), common JS (anything in exposed/js and exposed JS/contrib) and the modules CSS (again, from getInstalledModules()) These routines should be called either at startup or at build time, unless i18n can be sped up enough to