def setUpModule(): logRoot = tempfile.mkdtemp() infoFile = os.path.join(logRoot, 'config_info.log') errorFile = os.path.join(logRoot, 'config_error.log') cfg = config.getConfig() cfg['logging'] = { 'log_root': logRoot, 'info_log_file': infoFile, 'error_log_file': errorFile, 'original_error_log_file': errorFile, # so we can change error_log_file } cfg = config.getConfig() base.startServer()
def configureLogging(self, logConfig={}, oneFile=False): cfg = config.getConfig() if oneFile: cfg['logging']['error_log_file'] = cfg['logging']['info_log_file'] else: cfg['logging']['error_log_file'] = cfg['logging']['original_error_log_file'] self.infoFile = cfg['logging']['info_log_file'] self.errorFile = cfg['logging']['error_log_file'] if os.path.exists(self.infoFile): os.unlink(self.infoFile) if os.path.exists(self.errorFile): os.unlink(self.errorFile) cfg['logging'].update(logConfig) cfg = config.getConfig() girder.logger = girder._setupLogger()
def testPlugins(self): resp = self.request(path='/system/plugins', user=self.users[0]) self.assertStatusOk(resp) self.assertIn('all', resp.json) pluginRoots = [os.path.join(os.path.dirname(os.path.dirname(__file__)), 'test_plugins'), os.path.join(os.path.dirname(os.path.dirname(__file__)), 'test_additional_plugins')] conf = config.getConfig() conf['plugins'] = {'plugin_directory': ':'.join(pluginRoots)} resp = self.request( path='/system/plugins', method='PUT', user=self.users[0], params={'plugins': 'not_a_json_list'}) self.assertStatus(resp, 400) resp = self.request( path='/system/plugins', method='PUT', user=self.users[0], params={'plugins': '["has_deps"]'}) self.assertStatusOk(resp) enabled = resp.json['value'] self.assertEqual(len(enabled), 3) self.assertTrue('test_plugin' in enabled) self.assertTrue('does_nothing' in enabled) resp = self.request( path='/system/plugins', method='PUT', user=self.users[0], params={'plugins': '["has_nonexistent_deps"]'}, exception=True) self.assertStatus(resp, 500) self.assertEqual(resp.json['message'], ("Required dependency a_plugin_that_does_not_exist" " does not exist."))
def load(self, info): getPlugin('worker').load(info) unbindGirderEventsByHandlerName('large_image') ModelImporter.registerModel('image_item', ImageItem, 'large_image') large_image.config.setConfig('logger', girder.logger) large_image.config.setConfig('logprint', girder.logprint) # Load girder's large_image config curConfig = config.getConfig().get('large_image') for key, value in six.iteritems(curConfig or {}): large_image.config.setConfig(key, value) girder_tilesource.loadGirderTileSources() TilesItemResource(info['apiRoot']) info['apiRoot'].large_image = LargeImageResource() Item().exposeFields(level=AccessType.READ, fields='largeImage') events.bind('data.process', 'large_image', _postUpload) events.bind('jobs.job.update.after', 'large_image', _updateJob) events.bind('model.job.save', 'large_image', _updateJob) events.bind('model.job.remove', 'large_image', _updateJob) events.bind('model.folder.save.after', 'large_image', invalidateLoadModelCache) events.bind('model.group.save.after', 'large_image', invalidateLoadModelCache) events.bind('model.user.save.after', 'large_image', invalidateLoadModelCache) events.bind('model.collection.save.after', 'large_image', invalidateLoadModelCache) events.bind('model.item.remove', 'large_image', invalidateLoadModelCache) events.bind('model.item.copy.prepare', 'large_image', prepareCopyItem) events.bind('model.item.copy.after', 'large_image', handleCopyItem) events.bind('model.item.save.after', 'large_image', invalidateLoadModelCache) events.bind('model.file.save.after', 'large_image', checkForLargeImageFiles) events.bind('model.item.remove', 'large_image.removeThumbnails', removeThumbnails) events.bind('server_fuse.unmount', 'large_image', large_image.cache_util.cachesClear) events.bind('model.file.remove', 'large_image', handleRemoveFile)
def __init__(self, item_id=None, token=None, name='gaia_result.json', uri='', **kwargs): """ Read and write GeoJSON data to/from Girder :param item_id: Item id to read/write from/to :param uri: location of temporary file :param kwargs: Other keyword arguments """ self.id = item_id self.token = token if uri: self.uri = uri else: tmpdir = tempfile.mkdtemp() self.uri = tempfile.mkstemp(suffix='.json', dir=tmpdir)[1] self.filename = name girderPort = config.getConfig()['server.socket_port'] client = girder_client.GirderClient(port=girderPort) client.token = token self.client = client self.meta = self.client.getItem(item_id) super(MinervaVectorIO, self).__init__(uri=self.uri, **kwargs)
def load(self, info): events.bind('geometa.created', 'name', itemAddedToCollection) # Add bind event for last item deleted in geometa collection # This is probably the wrong event # events.bind('model.item.remove', 'name', # itemRemovedFromCollection) info['apiRoot'].collection.route('GET', (':id', 'geobrowser'), singleCollectionHandler) info['apiRoot'].collection.route('GET', ('geobrowser', ), listCollectionHandler) info['apiRoot'].collection.route('GET', ('geobrowser', 'search'), facetedSearchHandler) info['apiRoot'].collection.route('PUT', ('geobrowser', ), forceRecomputeAllHandler) info['apiRoot'].collection.route('DELETE', ('geobrowser', ), forceDeleteAllHandler) frontEndResource = os.path.realpath( resource_filename('geobrowser_plugin', 'external_web_client')) if (os.path.exists(frontEndResource) or config.getConfig()['server']['mode'] != 'development'): info['config']['/geobrowser'] = { 'tools.staticdir.on': True, 'tools.staticdir.dir': frontEndResource, 'tools.staticdir.index': 'index.html' }
def __init__(self): self.name = None self._indices = [] self._textIndex = None self.initialize() db_cfg = getDbConfig() db_connection = getDbConnection() cur_config = config.getConfig() dbName = db_cfg['database'] self.database = db_connection[dbName] self.collection = self.database[self.name] for index in self._indices: if isinstance(index, (list, tuple)): self.collection.ensure_index(index[0], **index[1]) else: self.collection.ensure_index(index) if type(self._textIndex) is dict: textIdx = [(k, 'text') for k in self._textIndex.keys()] try: self.collection.ensure_index( textIdx, weights=self._textIndex, default_language=self._textLanguage) except pymongo.errors.OperationFailure: print( TerminalColor.warning('WARNING: Text search not enabled.'))
def setUp(self): base.TestCase.setUp(self) # Create temporary directories for testing installations self.baseDir = tempfile.mkdtemp() self.pluginDir = os.path.join(self.baseDir, 'plugin') os.mkdir(self.pluginDir) # dump some plugins into tarballs self.singlePluginTarball = os.path.join(self.baseDir, 'single.tgz') t = tarfile.TarFile( name=self.singlePluginTarball, mode='w' ) t.add(os.path.join(pluginRoot, 'has_deps'), arcname='single') t.close() self.combinedPluginTarball = os.path.join(self.baseDir, 'multi.tgz') t = tarfile.TarFile( name=self.combinedPluginTarball, mode='w' ) t.add(os.path.join(pluginRoot, 'has_deps'), arcname='multi1') t.add(os.path.join(pluginRoot, 'has_deps'), arcname='multi2') t.close() # set the plugin path conf = config.getConfig() conf['plugins'] = {'plugin_directory': self.pluginDir}
def getPluginDir(curConfig=None): """ Returns the /path/to the currently configured plugin directory. """ if curConfig is None: curConfig = config.getConfig() # This uses the plugin directory specified in the config first. if "plugins" in curConfig and "plugin_directory" in curConfig["plugins"]: pluginsDir = curConfig["plugins"]["plugin_directory"] # If none is specified, it looks if there is a plugin directory next # to the girder python package. This is the case when running from the # git repository. elif os.path.isdir(os.path.join(ROOT_DIR, "plugins")): pluginsDir = os.path.join(ROOT_DIR, "plugins") # As a last resort, use plugins inside the girder python package. # This is intended to occur when girder is pip installed. else: pluginsDir = os.path.join(PACKAGE_DIR, "plugins") if not os.path.exists(pluginsDir): try: os.makedirs(pluginsDir) except OSError: if not os.path.exists(pluginsDir): print(TerminalColor.warning("Could not create plugin directory.")) pluginsDir = None return pluginsDir
def setUpModule(): logRoot = tempfile.mkdtemp() infoFile = os.path.join(logRoot, 'filter.log') cfg = config.getConfig() cfg['log.access_file'] = infoFile cfg['logging'] = {'log_root': logRoot, 'info_log_file': infoFile} base.startServer()
def _setupCache(): """ Setup caching based on configuration file. Cache backends are forcibly replaced because Girder initially configures the regions with the null backends. """ curConfig = config.getConfig() if curConfig['cache']['enabled']: # Replace existing backend, this is necessary # because they're initially configured with the null backend cacheConfig = { 'cache.global.replace_existing_backend': True, 'cache.request.replace_existing_backend': True } curConfig['cache'].update(cacheConfig) cache.configure_from_config(curConfig['cache'], 'cache.global.') requestCache.configure_from_config(curConfig['cache'], 'cache.request.') else: # Reset caches back to null cache (in the case of server teardown) cache.configure(backend='dogpile.cache.null', replace_existing_backend=True) requestCache.configure(backend='dogpile.cache.null', replace_existing_backend=True) # Although the rateLimitBuffer has no pre-existing backend, this method may be called multiple # times in testing (where caches were already configured) rateLimitBuffer.configure(backend='dogpile.cache.memory', replace_existing_backend=True)
def __init__(self, templatePath=None): if not templatePath: templatePath = os.path.join(constants.PACKAGE_DIR, 'api', 'api_docs.mako') super(ApiDocs, self).__init__(templatePath) curConfig = config.getConfig() mode = curConfig['server'].get('mode', '') self.vars = { 'apiRoot': '', 'staticRoot': '', 'brandName': Setting().get(SettingKey.BRAND_NAME), 'mode': mode } events.unbind('model.setting.save.after', CoreEventHandler.WEBROOT_SETTING_CHANGE) events.bind('model.setting.save.after', CoreEventHandler.WEBROOT_SETTING_CHANGE, self._onSettingSave) events.unbind('model.setting.remove', CoreEventHandler.WEBROOT_SETTING_CHANGE) events.bind('model.setting.remove', CoreEventHandler.WEBROOT_SETTING_CHANGE, self._onSettingRemove)
def getApiUrl(url=None, preferReferer=False): """ In a request thread, call this to get the path to the root of the REST API. The returned path does *not* end in a forward slash. :param url: URL from which to extract the base URL. If not specified, uses the server root system setting. If that is not specified, uses `cherrypy.url()` :param preferReferer: if no url is specified, this is true, and this is in a cherrypy request that has a referer header that contains the api string, use that referer as the url. """ apiStr = config.getConfig()['server']['api_root'] if not url: if preferReferer and apiStr in cherrypy.request.headers.get('referer', ''): url = cherrypy.request.headers['referer'] else: root = Setting().get(SettingKey.SERVER_ROOT) if root: return posixpath.join(root, apiStr.lstrip('/')) url = url or cherrypy.url() idx = url.find(apiStr) if idx < 0: raise GirderException('Could not determine API root in %s.' % url) return url[:idx + len(apiStr)]
def configureLogging(self, logConfig={}, oneFile=False): cfg = config.getConfig() if oneFile: cfg['logging']['error_log_file'] = cfg['logging']['info_log_file'] else: cfg['logging']['error_log_file'] = cfg['logging'][ 'original_error_log_file'] self.infoFile = cfg['logging']['info_log_file'] self.errorFile = cfg['logging']['error_log_file'] if os.path.exists(self.infoFile): os.unlink(self.infoFile) if os.path.exists(self.errorFile): os.unlink(self.errorFile) cfg['logging'].update(logConfig) cfg = config.getConfig() girder.logger = girder._setupLogger()
def testPlugins(self): resp = self.request(path="/system/plugins", user=self.users[0]) self.assertStatusOk(resp) self.assertIn("all", resp.json) pluginRoots = [ os.path.join(os.path.dirname(os.path.dirname(__file__)), "test_plugins"), os.path.join(os.path.dirname(os.path.dirname(__file__)), "test_additional_plugins"), ] conf = config.getConfig() conf["plugins"] = {"plugin_directory": ":".join(pluginRoots)} resp = self.request( path="/system/plugins", method="PUT", user=self.users[0], params={"plugins": "not_a_json_list"} ) self.assertStatus(resp, 400) resp = self.request( path="/system/plugins", method="PUT", user=self.users[0], params={"plugins": '["has_deps"]'} ) self.assertStatusOk(resp) enabled = resp.json["value"] self.assertEqual(len(enabled), 3) self.assertTrue("test_plugin" in enabled) self.assertTrue("does_nothing" in enabled) resp = self.request( path="/system/plugins", method="PUT", user=self.users[0], params={"plugins": '["has_nonexistent_deps"]'}, exception=True, ) self.assertStatus(resp, 400) self.assertEqual(resp.json["message"], ("Required plugin a_plugin_that_does_not_exist" " does not exist."))
def getConfig(self): defaultConfig = {} if config: curConfig = config.getConfig().get('large_image', defaultConfig) else: curConfig = defaultConfig return curConfig
def getDbConfig(): """Get the database configuration values from the cherrypy config.""" cfg = config.getConfig() if 'database' in cfg: return cfg['database'] else: return {}
def getPluginDirs(curConfig=None): """Return an ordered list of directories that plugins can live in.""" failedPluginDirs = set() if curConfig is None: curConfig = config.getConfig() if 'plugins' in curConfig and 'plugin_directory' in curConfig['plugins']: pluginDirs = curConfig['plugins']['plugin_directory'].split(':') else: pluginDirs = [defaultPluginDir()] for pluginDir in pluginDirs: if not os.path.exists(pluginDir): try: os.makedirs(pluginDir) except OSError: if not os.path.exists(pluginDir): print( TerminalColor.warning( 'Could not create plugin directory %s.' % pluginDir)) failedPluginDirs.add(pluginDir) return [dir for dir in pluginDirs if dir not in failedPluginDirs]
def load(info): notebook = Notebook() info['apiRoot'].ythub = ytHub() info['apiRoot'].notebook = notebook info['apiRoot'].frontend = Frontend() info['apiRoot'].folder.route('GET', (':id', 'listing'), listFolder) info['apiRoot'].item.route('GET', (':id', 'listing'), listItem) info['apiRoot'].item.route('PUT', (':id', 'check'), checkItem) info['apiRoot'].folder.route('GET', (':id', 'rootpath'), folderRootpath) info['apiRoot'].folder.route('PUT', (':id', 'check'), checkFolder) info['apiRoot'].collection.route('PUT', (':id', 'check'), checkCollection) curConfig = config.getConfig() if curConfig['server']['mode'] == 'testing': cull_period = 1 else: cull_period = int(curConfig['server'].get('heartbeat', -1)) if cull_period > 0: def _heartbeat(): events.trigger('heartbeat') logger.info('Starting Heartbeat every %i s' % cull_period) heartbeat = cherrypy.process.plugins.Monitor( cherrypy.engine, _heartbeat, frequency=cull_period, name="Heartbeat") heartbeat.subscribe() events.bind('heartbeat', 'ythub', notebook.cullNotebooks) events.bind('model.user.save.created', 'ythub', addDefaultFolders)
def getPluginDir(curConfig=None): """ Returns the /path/to the currently configured plugin directory. """ if curConfig is None: curConfig = config.getConfig() # This uses the plugin directory specified in the config first. if 'plugins' in curConfig and 'plugin_directory' in curConfig['plugins']: pluginsDir = curConfig['plugins']['plugin_directory'] # If none is specified, it looks if there is a plugin directory next # to the girder python package. This is the case when running from the # git repository. elif os.path.isdir(os.path.join(ROOT_DIR, 'plugins')): pluginsDir = os.path.join(ROOT_DIR, 'plugins') # As a last resort, use plugins inside the girder python package. # This is intended to occur when girder is pip installed. else: pluginsDir = os.path.join(PACKAGE_DIR, 'plugins') if not os.path.exists(pluginsDir): try: os.makedirs(pluginsDir) except OSError: if not os.path.exists(pluginsDir): print( TerminalColor.warning('Could not create plugin directory.') ) pluginsDir = None return pluginsDir
def _setupCache(): """ Setup caching based on configuration file. Cache backends are forcibly replaced because Girder initially configures the regions with the null backends. """ curConfig = config.getConfig() if curConfig['cache']['enabled']: # Replace existing backend, this is necessary # because they're initially configured with the null backend cacheConfig = { 'cache.global.replace_existing_backend': True, 'cache.request.replace_existing_backend': True } curConfig['cache'].update(cacheConfig) cache.configure_from_config(curConfig['cache'], 'cache.global.') requestCache.configure_from_config(curConfig['cache'], 'cache.request.') else: # Reset caches back to null cache (in the case of server teardown) cache.configure(backend='dogpile.cache.null', replace_existing_backend=True) requestCache.configure(backend='dogpile.cache.null', replace_existing_backend=True)
def getApiUrl(url=None, preferReferer=False): """ In a request thread, call this to get the path to the root of the REST API. The returned path does *not* end in a forward slash. :param url: URL from which to extract the base URL. If not specified, uses the server root system setting. If that is not specified, uses `cherrypy.url()` :param preferReferer: if no url is specified, this is true, and this is in a cherrypy request that has a referer header that contains the api string, use that referer as the url. """ apiStr = config.getConfig()['server']['api_root'] if not url: if preferReferer and apiStr in cherrypy.request.headers.get( 'referer', ''): url = cherrypy.request.headers['referer'] else: root = Setting().get(SettingKey.SERVER_ROOT) if root: return posixpath.join(root, apiStr.lstrip('/')) url = url or cherrypy.url() idx = url.find(apiStr) if idx < 0: raise GirderException('Could not determine API root in %s.' % url) return url[:idx + len(apiStr)]
def endpointDecorator(self, *path, **params): _setCommonCORSHeaders() cherrypy.lib.caching.expires(0) cherrypy.request.girderRequestUid = str(uuid.uuid4()) setResponseHeader('Girder-Request-Uid', cherrypy.request.girderRequestUid) try: val = fun(self, path, params) # If this is a partial response, we set the status appropriately if 'Content-Range' in cherrypy.response.headers: cherrypy.response.status = 206 val = _mongoCursorToList(val) if callable(val): # If the endpoint returned anything callable (function, # lambda, functools.partial), we assume it's a generator # function for a streaming response. cherrypy.response.stream = True _logRestRequest(self, path, params) return val() if isinstance(val, cherrypy.lib.file_generator): # Don't do any post-processing of static files return val if isinstance(val, types.GeneratorType): val = list(val) except RestException as e: val = _handleRestException(e) except AccessException as e: val = _handleAccessException(e) except GirderException as e: val = _handleGirderException(e) except ValidationException as e: val = _handleValidationException(e) except cherrypy.HTTPRedirect: raise except Exception: # These are unexpected failures; send a 500 status logger.exception('500 Error') cherrypy.response.status = 500 val = dict(type='internal', uid=cherrypy.request.girderRequestUid) if config.getConfig()['server']['mode'] == 'production': # Sanitize errors in production mode val['message'] = 'An unexpected error occurred on the server.' else: # Provide error details in non-production modes t, value, tb = sys.exc_info() val['message'] = '%s: %s' % (t.__name__, repr(value)) val['trace'] = traceback.extract_tb(tb) resp = _createResponse(val) _logRestRequest(self, path, params) return resp
def _digest(self, alg, password, salt=None): """ Helper method to perform the password digest. :param alg: The hash algorithm to use. :type alg: str - 'sha512' | 'bcrypt' :param password: The password to digest. :type password: str :param salt: The salt to use. In the case of bcrypt, when storing the password, pass None; when testing the password, pass the hashed value. :type salt: None or str :returns: The hashed value as a string. """ cur_config = config.getConfig() if alg == 'sha512': return hashlib.sha512((password + salt).encode('utf8')).hexdigest() elif alg == 'bcrypt': try: import bcrypt except ImportError: raise Exception( 'Bcrypt module is not installed. See girder.local.cfg.') password = password.encode('utf8') if salt is None: rounds = int(cur_config['auth']['bcrypt_rounds']) return bcrypt.hashpw(password, bcrypt.gensalt(rounds)) else: if isinstance(salt, six.text_type): salt = salt.encode('utf8') return bcrypt.hashpw(password, salt) else: raise Exception('Unsupported hash algorithm: %s' % alg)
def testLogRoute(self): logRoot = os.path.join(ROOT_DIR, 'tests', 'cases', 'dummylogs') config.getConfig()['logging'] = {'log_root': logRoot} resp = self.request(path='/system/log', user=self.users[1], params={ 'log': 'error', 'bytes': 0 }) self.assertStatus(resp, 403) resp = self.request(path='/system/log', user=self.users[0], params={ 'log': 'error', 'bytes': 0 }, isJson=False) self.assertStatusOk(resp) self.assertEqual( self.getBody(resp), '=== Last 12 bytes of %s/error.log: ===\n\nHello world\n' % logRoot) resp = self.request(path='/system/log', user=self.users[0], params={ 'log': 'error', 'bytes': 6 }, isJson=False) self.assertStatusOk(resp) self.assertEqual( self.getBody(resp), '=== Last 6 bytes of %s/error.log: ===\n\nworld\n' % logRoot) resp = self.request(path='/system/log', user=self.users[0], params={ 'log': 'error', 'bytes': 18 }, isJson=False) self.assertStatusOk(resp) self.assertEqual( self.getBody(resp), '=== Last 18 bytes of %s/error.log: ===\n\nmonde\nHello world\n' % logRoot) resp = self.request(path='/system/log', user=self.users[0], params={ 'log': 'info', 'bytes': 6 }, isJson=False) self.assertStatusOk(resp) self.assertEqual( self.getBody(resp), '=== Last 0 bytes of %s/info.log: ===\n\n' % logRoot) del config.getConfig()['logging']
def mountServer(path, database=None, fuseOptions=None, quiet=False, plugins=None): """ Perform the mount. :param path: the mount location. :param database: a database connection URI, if it contains '://'. Otherwise, the default database is used. :param fuseOptions: a comma-separated string of options to pass to the FUSE mount. A key without a value is taken as True. Boolean values are case insensitive. For instance, 'foreground' or 'foreground=True' will keep this program running until the SIGTERM or unmounted. :param quiet: if True, suppress Girder logs. :param plugins: an optional list of plugins to enable. If None, use the plugins that are configured. """ if quiet: curConfig = config.getConfig() curConfig.setdefault('logging', {})['log_quiet'] = True curConfig.setdefault('logging', {})['log_level'] = 'FATAL' girder._attachFileLogHandlers() if database and '://' in database: cherrypy.config['database']['uri'] = database if plugins is not None: plugins = plugins.split(',') webroot, appconf = configureServer(plugins=plugins) girder._setupCache() opClass = ServerFuse(stat=os.stat(path)) options = { # By default, we run in the background so the mount command returns # immediately. If we run in the foreground, a SIGTERM will shut it # down 'foreground': False, # Cache files if their size and timestamp haven't changed. # This lets the OS buffer files efficiently. 'auto_cache': True, # We aren't specifying our own inos 'use_ino': False, # read-only file system 'ro': True, } if sys.platform != 'darwin': # Automatically unmount when we try to mount again options['auto_unmount'] = True if fuseOptions: for opt in fuseOptions.split(','): if '=' in opt: key, value = opt.split('=', 1) value = (False if value.lower() == 'false' else True if value.lower() == 'true' else value) else: key, value = opt, True if key in ('use_ino', 'ro', 'rw') and options.get(key) != value: logprint.warning('Ignoring the %s=%r option' % (key, value)) continue options[key] = value Setting().set(SettingKey.GIRDER_MOUNT_INFORMATION, {'path': path, 'mounttime': time.time()}) FUSELogError(opClass, path, **options)
def mountServer(path, database=None, fuseOptions=None, quiet=False, plugins=None): """ Perform the mount. :param path: the mount location. :param database: a database connection URI, if it contains '://'. Otherwise, the default database is used. :param fuseOptions: a comma-separated string of options to pass to the FUSE mount. A key without a value is taken as True. Boolean values are case insensitive. For instance, 'foreground' or 'foreground=True' will keep this program running until the SIGTERM or unmounted. :param quiet: if True, suppress Girder logs. :param plugins: an optional list of plugins to enable. If None, use the plugins that are configured. """ if quiet: curConfig = config.getConfig() curConfig.setdefault('logging', {})['log_quiet'] = True curConfig.setdefault('logging', {})['log_level'] = 'FATAL' girder._setupLogger() if database and '://' in database: cherrypy.config['database']['uri'] = database if plugins is not None: plugins = plugins.split(',') webroot, appconf = configureServer(plugins=plugins) girder._setupCache() opClass = ServerFuse(stat=os.stat(path)) options = { # By default, we run in the background so the mount command returns # immediately. If we run in the foreground, a SIGTERM will shut it # down 'foreground': False, # Cache files if their size and timestamp haven't changed. # This lets the OS buffer files efficiently. 'auto_cache': True, # We aren't specifying our own inos 'use_ino': False, # read-only file system 'ro': True, } if sys.platform != 'darwin': # Automatically unmount when we try to mount again options['auto_unmount'] = True if fuseOptions: for opt in fuseOptions.split(','): if '=' in opt: key, value = opt.split('=', 1) value = (False if value.lower() == 'false' else True if value.lower() == 'true' else value) else: key, value = opt, True if key in ('use_ino', 'ro', 'rw') and options.get(key) != value: logprint.warning('Ignoring the %s=%r option' % (key, value)) continue options[key] = value Setting().set(SettingKey.GIRDER_MOUNT_INFORMATION, {'path': path, 'mounttime': time.time()}) FUSELogError(opClass, path, **options)
def validate(self, doc): """ Validate the user every time it is stored in the database. """ doc['login'] = doc.get('login', '').lower().strip() doc['email'] = doc.get('email', '').lower().strip() doc['firstName'] = doc.get('firstName', '').strip() doc['lastName'] = doc.get('lastName', '').strip() cur_config = config.getConfig() if 'salt' not in doc: # pragma: no cover # Internal error, this should not happen raise Exception('Tried to save user document with no salt.') if not doc['firstName']: raise ValidationException('First name must not be empty.', 'firstName') if not doc['lastName']: raise ValidationException('Last name must not be empty.', 'lastName') if '@' in doc['login']: # Hard-code this constraint so we can always easily distinguish # an email address from a login raise ValidationException('Login may not contain "@".', 'login') if not re.match(cur_config['users']['login_regex'], doc['login']): raise ValidationException( cur_config['users']['login_description'], 'login') if not re.match(cur_config['users']['email_regex'], doc['email']): raise ValidationException('Invalid email address.', 'email') # Ensure unique logins q = {'login': doc['login']} if '_id' in doc: q['_id'] = {'$ne': doc['_id']} existing = self.findOne(q) if existing is not None: raise ValidationException('That login is already registered.', 'login') # Ensure unique emails q = {'email': doc['email']} if '_id' in doc: q['_id'] = {'$ne': doc['_id']} existing = self.findOne(q) if existing is not None: raise ValidationException('That email is already registered.', 'email') # If this is the first user being created, make it an admin existing = self.findOne({}) if existing is None: doc['admin'] = True return doc
def validate(self, doc): """ Validate the user every time it is stored in the database. """ doc['login'] = doc.get('login', '').lower().strip() doc['email'] = doc.get('email', '').lower().strip() doc['firstName'] = doc.get('firstName', '').strip() doc['lastName'] = doc.get('lastName', '').strip() cur_config = config.getConfig() if 'salt' not in doc: # pragma: no cover # Internal error, this should not happen raise Exception('Tried to save user document with no salt.') if not doc['firstName']: raise ValidationException('First name must not be empty.', 'firstName') if not doc['lastName']: raise ValidationException('Last name must not be empty.', 'lastName') if '@' in doc['login']: # Hard-code this constraint so we can always easily distinguish # an email address from a login raise ValidationException('Login may not contain "@".', 'login') if not re.match(cur_config['users']['login_regex'], doc['login']): raise ValidationException( cur_config['users']['login_description'], 'login') if not re.match(cur_config['users']['email_regex'], doc['email']): raise ValidationException('Invalid email address.', 'email') # Ensure unique logins q = {'login': doc['login']} if '_id' in doc: q['_id'] = {'$ne': doc['_id']} existing = self.find(q, limit=1) if existing.count(True) > 0: raise ValidationException('That login is already registered.', 'login') # Ensure unique emails q = {'email': doc['email']} if '_id' in doc: q['_id'] = {'$ne': doc['_id']} existing = self.find(q, limit=1) if existing.count(True) > 0: raise ValidationException('That email is already registered.', 'email') # If this is the first user being created, make it an admin existing = self.find({}, limit=1) if existing.count(True) == 0: doc['admin'] = True return doc
def __init__(self, templatePath=None): if not templatePath: templatePath = os.path.join(constants.PACKAGE_DIR, 'api', 'api_docs.mako') super(ApiDocs, self).__init__(templatePath) curConfig = config.getConfig() self.vars['mode'] = curConfig['server'].get('mode', '')
def setUp(self): pluginRoot = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'test_plugins') conf = config.getConfig() conf['plugins'] = {'plugin_directory': pluginRoot} base.enabledPlugins.append('test_plugin') base.startServer()
def testRestartWhenNotUsingCherryPyServer(self): # Restart should be disallowed conf = config.getConfig() conf['server']['cherrypy_server'] = False resp = self.request(path='/system/restart', method='PUT', user=self.users[0]) self.assertStatus(resp, 403)
def setUpModule(): pluginRoot = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'test_plugins') conf = config.getConfig() conf['plugins'] = {'plugin_directory': pluginRoot} base.enabledPlugins = ['test_plugin'] base.startServer()
def setUpModule(): pluginRoot = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'test_plugins') conf = config.getConfig() conf['plugins'] = {'plugin_directory': pluginRoot} base.enabledPlugins.append('mail_test') base.startServer()
def _configureStaticRoutes(webroot, plugins, event=None): """ Configures static routes for a given webroot. This function is also run when the route table setting is modified to allow for dynamically changing static routes at runtime. """ # This was triggered by some unrelated setting changing if event is not None and event.info[ 'key'] != constants.SettingKey.ROUTE_TABLE: return routeTable = loadRouteTable() # If the static route is a URL, leave it alone if '://' in routeTable[constants.GIRDER_STATIC_ROUTE_ID]: apiStaticRoot = routeTable[constants.GIRDER_STATIC_ROUTE_ID] staticRoot = routeTable[constants.GIRDER_STATIC_ROUTE_ID] else: # Make the staticRoot relative to the api_root, if possible. The api_root # could be relative or absolute, but it needs to be in an absolute form for # relpath to behave as expected. We always expect the api_root to # contain at least two components, but the reference from static needs to # be from only the first component. apiRootBase = posixpath.split( posixpath.join('/', config.getConfig()['server']['api_root']))[0] apiStaticRoot = posixpath.relpath( routeTable[constants.GIRDER_STATIC_ROUTE_ID], apiRootBase) staticRoot = posixpath.relpath( routeTable[constants.GIRDER_STATIC_ROUTE_ID], routeTable[constants.GIRDER_ROUTE_ID]) webroot.updateHtmlVars({ 'apiRoot': config.getConfig()['server']['api_root'], 'staticRoot': staticRoot, 'plugins': plugins }) webroot.api.v1.updateHtmlVars({ 'apiRoot': config.getConfig()['server']['api_root'], 'staticRoot': apiStaticRoot })
def loadPlugins(plugins, root, appconf, apiRoot=None, curConfig=None, buildDag=True): """ Loads a set of plugins into the application. :param plugins: The set of plugins to load, by directory name. :type plugins: list :param root: The root node of the server tree. :type root: object :param appconf: The server's cherrypy configuration object. :type appconf: dict :param apiRoot: The cherrypy api root object. :type apiRoot: object or None :param curConfig: A girder config object to use. :type curConfig: dict or None :param buildDag: If the ``plugins`` parameter is already a topo-sorted list with all dependencies resolved, set this to False and it will skip rebuilding the DAG. Otherwise the dependency resolution and sorting will occur within this method. :type buildDag: bool :returns: A 3-tuple containing the modified root, config, and apiRoot objects. :rtype tuple: """ # Register a pseudo-package for the root of all plugins. This must be # present in the system module list in order to avoid import warnings. if curConfig is None: curConfig = _config.getConfig() if 'plugins' in curConfig and 'plugin_directory' in curConfig['plugins']: print(TerminalColor.warning( 'Warning: the plugin_directory setting is deprecated. Please use ' 'the `girder-install plugin` command and remove this setting from ' 'your config file.')) if ROOT_PLUGINS_PACKAGE not in sys.modules: module = imp.new_module(ROOT_PLUGINS_PACKAGE) girder.plugins = module sys.modules[ROOT_PLUGINS_PACKAGE] = module print(TerminalColor.info('Resolving plugin dependencies...')) if buildDag: plugins = getToposortedPlugins(plugins, curConfig, ignoreMissing=True) for plugin in plugins: try: root, appconf, apiRoot = loadPlugin( plugin, root, appconf, apiRoot, curConfig=curConfig) print(TerminalColor.success('Loaded plugin "%s"' % plugin)) except Exception: print(TerminalColor.error( 'ERROR: Failed to load plugin "%s":' % plugin)) girder.logger.exception('Plugin load failure: %s' % plugin) traceback.print_exc() return root, appconf, apiRoot
def getConfigurationOption(self, section, key, params): configSection = config.getConfig().get(section) if configSection is None: raise ResourcePathNotFound('No section with that name exists.') elif key not in configSection: raise ResourcePathNotFound('No key with that name exists.') else: return configSection.get(key)
def _disableRealDatabaseConnectivity(): from girder.utility.config import getConfig class MockDict(dict): def get(self, *args, **kwargs): raise Exception('You must use the "db" fixture in tests that connect to the database.') with unittest.mock.patch.dict(getConfig(), {'database': MockDict()}): yield
def loadPlugins(plugins, root, appconf, apiRoot=None, curConfig=None): """ Loads a set of plugins into the application. The list passed in should not already contain dependency information; dependent plugins will be loaded automatically. :param plugins: The set of plugins to load, by directory name. :type plugins: list :param root: The root node of the server tree. :param appconf: The server's cherrypy configuration object. :type appconf: dict :returns: A list of plugins that were actually loaded, once dependencies were resolved and topological sort was performed. """ # Register a pseudo-package for the root of all plugins. This must be # present in the system module list in order to avoid import warnings. if curConfig is None: curConfig = config.getConfig() if 'plugins' in curConfig and 'plugin_directory' in curConfig['plugins']: pluginDir = curConfig['plugins']['plugin_directory'] elif os.path.exists(os.path.join(PACKAGE_DIR, 'plugins')): pluginDir = os.path.join(PACKAGE_DIR, 'plugins') else: pluginDir = os.path.join(ROOT_DIR, 'plugins') if ROOT_PLUGINS_PACKAGE not in sys.modules: sys.modules[ROOT_PLUGINS_PACKAGE] = type( '', (), { '__path__': pluginDir, '__package__': ROOT_PLUGINS_PACKAGE, '__name__': ROOT_PLUGINS_PACKAGE })() print TerminalColor.info('Resolving plugin dependencies...') filteredDepGraph = { pluginName: info['dependencies'] for pluginName, info in findAllPlugins(curConfig).iteritems() if pluginName in plugins } for pset in toposort(filteredDepGraph): for plugin in pset: try: root, appconf, apiRoot = loadPlugin(plugin, root, appconf, apiRoot, curConfig=curConfig) print TerminalColor.success( 'Loaded plugin "{}"'.format(plugin)) except Exception: print TerminalColor.error( 'ERROR: Failed to load plugin "{}":'.format(plugin)) traceback.print_exc() return root, appconf, apiRoot
def setUp(self): base.TestCase.setUp(self) self.baseDir = tempfile.mkdtemp() self.pluginDir = os.path.join(self.baseDir, 'plugins') os.mkdir(self.pluginDir) conf = config.getConfig() conf['plugins'] = {'plugin_directory': self.pluginDir}
def setUp(self): base.TestCase.setUp(self) self.baseDir = tempfile.mkdtemp() self.pluginDir = os.path.join(self.baseDir, "plugins") os.mkdir(self.pluginDir) conf = config.getConfig() conf["plugins"] = {"plugin_directory": self.pluginDir}
def __init__(self, templatePath): with open(templatePath) as templateFile: # This may raise an IOError, but there's no way to recover self.template = templateFile.read() # Rendering occurs lazily on the first GET request self.indexHtml = None self.vars = {} self.config = config.getConfig()
def setUpModule(): base.startServer() pluginRoots = [os.path.join(os.path.dirname(os.path.dirname(__file__)), 'test_plugins'), os.path.join(os.path.dirname(os.path.dirname(__file__)), 'test_additional_plugins')] conf = config.getConfig() conf['plugins'] = {'plugin_directory': ':'.join(pluginRoots)}
def loadPlugins(plugins, root, appconf, apiRoot=None, curConfig=None, buildDag=True): """ Loads a set of plugins into the application. :param plugins: The set of plugins to load, by directory name. :type plugins: list :param root: The root node of the server tree. :type root: object :param appconf: The server's cherrypy configuration object. :type appconf: dict :param apiRoot: The cherrypy api root object. :type apiRoot: object or None :param curConfig: A girder config object to use. :type curConfig: dict or None :param buildDag: If the ``plugins`` parameter is already a topo-sorted list with all dependencies resolved, set this to False and it will skip rebuilding the DAG. Otherwise the dependency resolution and sorting will occur within this method. :type buildDag: bool :returns: A 3-tuple containing the modified root, config, and apiRoot objects. :rtype tuple: """ # Register a pseudo-package for the root of all plugins. This must be # present in the system module list in order to avoid import warnings. if curConfig is None: curConfig = _config.getConfig() if 'plugins' in curConfig and 'plugin_directory' in curConfig['plugins']: logprint.warning( 'Warning: the plugin_directory setting is deprecated. Please use ' 'the `girder-install plugin` command and remove this setting from ' 'your config file.') if ROOT_PLUGINS_PACKAGE not in sys.modules: module = imp.new_module(ROOT_PLUGINS_PACKAGE) girder.plugins = module sys.modules[ROOT_PLUGINS_PACKAGE] = module logprint.info('Resolving plugin dependencies...') if buildDag: plugins = getToposortedPlugins(plugins, curConfig, ignoreMissing=True) for plugin in plugins: try: root, appconf, apiRoot = loadPlugin( plugin, root, appconf, apiRoot, curConfig=curConfig) logprint.success('Loaded plugin "%s"' % plugin) except Exception: logprint.exception( 'ERROR: Failed to load plugin "%s":' % plugin) return root, appconf, apiRoot
def testLogRoute(self): logRoot = os.path.join(ROOT_DIR, "tests", "cases", "dummylogs") config.getConfig()["logging"] = {"log_root": logRoot} resp = self.request(path="/system/log", user=self.users[1], params={"log": "error", "bytes": 0}) self.assertStatus(resp, 403) resp = self.request(path="/system/log", user=self.users[0], params={"log": "error", "bytes": 0}, isJson=False) self.assertStatusOk(resp) self.assertEqual(self.getBody(resp), "=== Last 12 bytes of %s/error.log: ===\n\nHello world\n" % logRoot) resp = self.request(path="/system/log", user=self.users[0], params={"log": "error", "bytes": 6}, isJson=False) self.assertStatusOk(resp) self.assertEqual(self.getBody(resp), "=== Last 6 bytes of %s/error.log: ===\n\nworld\n" % logRoot) resp = self.request(path="/system/log", user=self.users[0], params={"log": "info", "bytes": 6}, isJson=False) self.assertStatusOk(resp) self.assertEqual(self.getBody(resp), "=== Last 0 bytes of %s/info.log: ===\n\n" % logRoot) del config.getConfig()["logging"]
def setUp(self): pluginRoot = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'test_plugins') conf = config.getConfig() conf['plugins'] = {'plugin_directory': pluginRoot} base.enabledPlugins.append('test_plugin') base.startServer() # Make sure our import semantics work as expected for plugins from girder.plugins import test_plugin self.assertEqual(type(test_plugin), types.ModuleType)
def endpointDecorator(self, *path, **params): _setCommonCORSHeaders() cherrypy.lib.caching.expires(0) try: val = fun(self, path, params) # If this is a partial response, we set the status appropriately if 'Content-Range' in cherrypy.response.headers: cherrypy.response.status = 206 val = _mongoCursorToList(val) if callable(val): # If the endpoint returned anything callable (function, # lambda, functools.partial), we assume it's a generator # function for a streaming response. cherrypy.response.stream = True _logRestRequest(self, path, params) return val() if isinstance(val, cherrypy.lib.file_generator): # Don't do any post-processing of static files return val if isinstance(val, types.GeneratorType): val = list(val) except RestException as e: val = _handleRestException(e) except AccessException as e: val = _handleAccessException(e) except GirderException as e: val = _handleGirderException(e) except ValidationException as e: val = _handleValidationException(e) except cherrypy.HTTPRedirect: raise except Exception: # These are unexpected failures; send a 500 status logger.exception('500 Error') cherrypy.response.status = 500 t, value, tb = sys.exc_info() val = {'message': '%s: %s' % (t.__name__, repr(value)), 'type': 'internal'} curConfig = config.getConfig() if curConfig['server']['mode'] != 'production': # Unless we are in production mode, send a traceback too val['trace'] = traceback.extract_tb(tb) resp = _createResponse(val) _logRestRequest(self, path, params) return resp