def load(self, info): getPlugin('worker').load(info) unbindGirderEventsByHandlerName('large_image') ModelImporter.registerModel('image_item', ImageItem, 'large_image') large_image.config.setConfig('logger', girder.logger) large_image.config.setConfig('logprint', girder.logprint) # Load girder's large_image config curConfig = config.getConfig().get('large_image') for key, value in six.iteritems(curConfig or {}): large_image.config.setConfig(key, value) girder_tilesource.loadGirderTileSources() TilesItemResource(info['apiRoot']) info['apiRoot'].large_image = LargeImageResource() Item().exposeFields(level=AccessType.READ, fields='largeImage') events.bind('data.process', 'large_image', _postUpload) events.bind('jobs.job.update.after', 'large_image', _updateJob) events.bind('model.job.save', 'large_image', _updateJob) events.bind('model.job.remove', 'large_image', _updateJob) events.bind('model.folder.save.after', 'large_image', invalidateLoadModelCache) events.bind('model.group.save.after', 'large_image', invalidateLoadModelCache) events.bind('model.user.save.after', 'large_image', invalidateLoadModelCache) events.bind('model.collection.save.after', 'large_image', invalidateLoadModelCache) events.bind('model.item.remove', 'large_image', invalidateLoadModelCache) events.bind('model.item.copy.prepare', 'large_image', prepareCopyItem) events.bind('model.item.copy.after', 'large_image', handleCopyItem) events.bind('model.item.save.after', 'large_image', invalidateLoadModelCache) events.bind('model.file.save.after', 'large_image', checkForLargeImageFiles) events.bind('model.item.remove', 'large_image.removeThumbnails', removeThumbnails) events.bind('server_fuse.unmount', 'large_image', large_image.cache_util.cachesClear) events.bind('model.file.remove', 'large_image', handleRemoveFile)
def load(self, info): info["apiRoot"].viame = Viame() info["apiRoot"].viame_detection = ViameDetection() # Relocate Girder info["serverRoot"], info["serverRoot"].girder = ( ClientWebroot(), info["serverRoot"], ) info["serverRoot"].api = info["serverRoot"].girder.api events.bind( "filesystem_assetstore_imported", "check_annotations", check_existing_annotations, ) # Create dependency on worker plugin.getPlugin('worker').load(info) Setting().set( 'worker.api_url', os.environ.get('WORKER_API_URL', 'http://*****:*****@rabbit/'), ) Setting().set( 'worker.backend', os.environ.get('WORKER_BACKEND', 'amqp://*****:*****@rabbit/'), )
def load(self, info): getPlugin('large_image').load(info) ModelImporter.registerModel('annotation', Annotation, 'large_image') info['apiRoot'].annotation = AnnotationResource() # Ask for some models to make sure their singletons are initialized. Annotation() # add copyAnnotations option to POST resource/copy, POST item/{id}/copy # and POST folder/{id}/copy info['apiRoot'].resource.copyResources.description.param( 'copyAnnotations', 'Copy annotations when copying resources (default true)', required=False, dataType='boolean') info['apiRoot'].item.copyItem.description.param( 'copyAnnotations', 'Copy annotations when copying item (default true)', required=False, dataType='boolean') info['apiRoot'].folder.copyFolder.description.param( 'copyAnnotations', 'Copy annotations when copying folder (default true)', required=False, dataType='boolean')
def load(self, info): getPlugin('jobs').load(info) info['apiRoot'].nli = NLI() events.bind('jobs.job.update.after', 'nlisim', update_status) job_model = Job() job_model.exposeFields(level=constants.AccessType.ADMIN, fields={'args', 'kwargs'})
def load(self, info): plugin.getPlugin('oauth').load(info) name = 'globus_endpoints' events.bind('rest.get.item.before', name, _globusChildItems) events.bind('rest.get.file/:id/download.before', name, _globusFileDownload) events.bind('rest.get.folder.before', name, _globusChildFolders) events.bind('rest.get.folder/:id.before', name, _globusFolderInfo) events.bind('rest.get.folder/:id/details.before', name, _globusFolderDetails) events.bind('rest.get.folder/:id/rootpath.before', name, _globusRootPath) events.bind('rest.get.item/:id.before', name, _globusItemInfo) events.bind('rest.get.item/:id/download.before', name, _globusFileDownload) events.bind('rest.get.item/:id/files.before', name, _globusFileList) events.bind('rest.get.item/:id/rootpath.before', name, _globusRootPath) events.bind('oauth.auth_callback.after', name, _saveGlobusToken) # TODO file GET Globus._AUTH_SCOPES += [ 'urn:globus:auth:scope:transfer.api.globus.org:all', 'https://auth.globus.org/scopes/56ceac29-e98a-440a-a594-b41e7a084b62/all' # petrel RS ]
def testPluginLoadOrder(registry, logprint): plugin.getPlugin('plugin0').load({}) assert plugin.loadedPlugins() == ['plugin1', 'plugin2', 'plugin0'] logprint.success.assert_has_calls([ mock.call('Loaded plugin "plugin1"'), mock.call('Loaded plugin "plugin2"'), mock.call('Loaded plugin "plugin0"') ])
def testPluginLoadWithDeps(registry, logprint): pluginDefinition = plugin.getPlugin('plugin2') pluginDefinition.load({}) assert pluginDefinition.loaded is True logprint.success.assert_any_call('Loaded plugin "plugin2"') assert plugin.getPlugin('plugin1').loaded is True logprint.success.assert_any_call('Loaded plugin "plugin1"') assert plugin.getPlugin('plugin3').loaded is False
def testLoadPluginWithError(registry, logprint): with pytest.raises(Exception) as exception1: plugin.getPlugin('throws').load({}) logprint.exception.assert_called_once_with('Failed to load plugin throws') assert 'throws' in plugin.getPluginFailureInfo() with pytest.raises(Exception) as exception2: plugin.getPlugin('throws').load({}) assert exception1.value is not exception2.value
def load(self, info): getPlugin('jobs').load(info) name = 'thumbnails' info['apiRoot'].thumbnail = rest.Thumbnail() for model in (Item(), Collection(), Folder(), User()): model.exposeFields(level=AccessType.READ, fields='_thumbnails') events.bind('model.%s.remove' % model.name, name, removeThumbnails) events.bind('model.file.remove', name, removeThumbnailLink) events.bind('data.process', name, _onUpload)
def load(self, info): ModelImporter.registerModel('annotationItem', AnnotationItem, plugin='dive_server') ModelImporter.registerModel('revisionLogItem', RevisionLogItem, plugin='dive_server') info["apiRoot"].dive_annotation = AnnotationResource("dive_annotation") info["apiRoot"].dive_configuration = ConfigurationResource("dive_configuration") info["apiRoot"].dive_dataset = DatasetResource("dive_dataset") info["apiRoot"].dive_rpc = RpcResource("dive_rpc") # Setup route additions for exsting resources info["apiRoot"].user.route("PUT", (":id", "use_private_queue"), use_private_queue) User().exposeFields(AccessType.READ, constants.UserPrivateQueueEnabledMarker) # Expose Job dataset assocation Job().exposeFields(AccessType.READ, constants.JOBCONST_DATASET_ID) DIVE_MAIL_TEMPLATES = Path(os.path.realpath(__file__)).parent / 'mail_templates' mail_utils.addTemplateDirectory(str(DIVE_MAIL_TEMPLATES)) # Relocate Girder info["serverRoot"], info["serverRoot"].girder = ( ClientWebroot(), info["serverRoot"], ) info["serverRoot"].api = info["serverRoot"].girder.api events.bind( "filesystem_assetstore_imported", "process_fs_import", process_fs_import, ) events.bind( "s3_assetstore_imported", "process_s3_import", process_s3_import, ) events.bind( 'model.user.save.created', 'send_new_user_email', send_new_user_email, ) # Create dependency on worker plugin.getPlugin('worker').load(info) Setting().set( 'worker.api_url', os.environ.get('WORKER_API_URL', 'http://girder:8080/api/v1'), ) broker_url = os.environ.get('CELERY_BROKER_URL', None) if broker_url is None: raise RuntimeError('CELERY_BROKER_URL must be set') Setting().set('worker.broker', broker_url)
def load(self, info): # load plugins you depend on plugin.getPlugin('slicer_cli_web').load(info) from slicer_cli_web.models import DockerImageItem # We want to bind to the endpoint for each algorithm we care about; # the endpoint might have a Girder ID as part of it, so we enumerate # the endpoints for this process. for image in DockerImageItem.findAllImages(): for cli in image.getCLIs(): path = 'slicer_cli_web/cli/%s/run' % cli._id events.bind('rest.post.%s.before' % path, 'slicer_job_adjuster', _onRun) events.bind('data.process', 'slicer_job_adjuster', _onUpload)
def load(self, info): # Load dependency plugins getPlugin('cumulus_plugin').load(info) getPlugin('taskflow').load(info) info['apiRoot'].queues = Queue() # Remove taskflows that are not running anymore from the list of running # taskflows stored in the Queue model cleanup_failed_taskflows() # Listen to changes in the status of the taskflows, and update the Queues # if needed events.bind('cumulus.taskflow.status_update', 'queues', on_taskflow_status_update)
def load(self, info): getPlugin('worker').load(info) dist = os.path.join(os.path.dirname(__file__), 'dist') webroot = staticFile(os.path.join(dist, 'index.html')) registerPluginWebroot(webroot, 'stroke') info['config']['/stroke_static'] = { 'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(dist, 'stroke_static') } info['config']['/itk'] = { 'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(dist, 'itk') } info['apiRoot'].study = Study() info['apiRoot'].series = Series() info['apiRoot'].item.route('GET', (':id', 'dicom_thumbnail', ':uid'), _getThumbnail) info['apiRoot'].item.route('POST', (':id', 'dicom_thumbnail'), _createThumbnail) Folder().ensureIndex(('isStudy', {'sparse': True})) Folder().exposeFields(level=AccessType.READ, fields={ 'isStudy', 'nSeries', 'studyDate', 'patientId', 'studyModality' }) Item().exposeFields(level=AccessType.READ, fields={'isSeries'}) events.bind('model.file.finalizeUpload.after', 'stroke', _handleUpload) events.bind('model.item.remove', 'stroke.decrement_series_count', _decrementSeriesCount) events.bind('model.item.remove', 'stroke.clean_thumbnails', lambda e: _removeThumbnails(e.info)) # Guest user support events.bind('model.user.authenticate', 'stroke', _authenticateGuestUser) try: User().createUser(login='******', password='******', firstName='Guest', lastName='User', email='*****@*****.**') except ValidationException: pass
def load(self, info): from . import dicom plugin.getPlugin('dicom_viewer').load(info) File.archiveList = archiveList File.archiveOpen = archiveOpen route = info['apiRoot'].file route.route('GET', (':id', 'archive'), getArchiveList) route.route('GET', (':id', 'archive', 'download'), downloadArchiveFile) route.route('GET', (':id', 'archive', 'download', ':name'), downloadArchiveFileWithName) dicom.add_archive_access(info)
def testPluginLoadReturn(registry): plugin1Definition = plugin.getPlugin('plugin1') assert plugin1Definition.load(info={}) == 5 # The value should be returned every time load is called assert plugin1Definition.load(info={}) == 5
def uploadFile(self, params): """ Providing this works around a limitation in phantom that makes us unable to upload binary files, or at least ones that contain certain byte values. The path parameter should be provided relative to the root directory of the repository. """ self.requireParams(('folderId', 'path'), params) if params['path'].startswith('${'): # relative to plugin e.g. ${my_plugin}/path end = params['path'].find('}') plugin = params['path'][2:end] plugin = getPlugin(plugin) if plugin is None: raise Exception('Invalid plugin %s.' % plugin) root = os.path.dirname(inspect.getfile(plugin.__class__)) path = root + params['path'][end + 1:] else: # assume relative to core package path = os.path.join(ROOT_DIR, params['path']) name = os.path.basename(path) folder = Folder().load(params['folderId'], force=True) upload = Upload().createUpload( user=self.getCurrentUser(), name=name, parentType='folder', parent=folder, size=os.path.getsize(path)) with open(path, 'rb') as fd: file = Upload().handleChunk(upload, fd) return file
def testPluginMetadata(registry): pluginDef = plugin.getPlugin('nodeps') assert pluginDef.name == 'nodeps' assert pluginDef.version == '1.0.0' assert pluginDef.url == 'url' assert pluginDef.description == 'description' assert pluginDef.npmPackages() == {}
def testPluginWithNPMPackage(registry): with tempfile.NamedTemporaryFile() as packageJson: packageJson.write(b'{"name": "@girder/test_plugin"}') packageJson.flush() pluginDef = plugin.getPlugin('client_plugin') with mock.patch.object(plugin, 'resource_filename', return_value=packageJson.name): assert '@girder/test_plugin' in pluginDef.npmPackages()
def uploadFile(self, params): """ Providing this works around a limitation in phantom that makes us unable to upload binary files, or at least ones that contain certain byte values. The path parameter should be provided relative to the root directory of the repository. """ self.requireParams(('folderId', 'path'), params) if params['path'].startswith( '${'): # relative to plugin e.g. ${my_plugin}/path end = params['path'].find('}') plugin = params['path'][2:end] plugin = getPlugin(plugin) if plugin is None: raise Exception('Invalid plugin %s.' % plugin) root = os.path.dirname(inspect.getfile(plugin.__class__)) path = root + params['path'][end + 1:] else: # assume relative to core package path = os.path.join(ROOT_DIR, params['path']) name = os.path.basename(path) folder = Folder().load(params['folderId'], force=True) upload = Upload().createUpload(user=self.getCurrentUser(), name=name, parentType='folder', parent=folder, size=os.path.getsize(path)) with open(path, 'rb') as fd: file = Upload().handleChunk(upload, fd) return file
def _pluginNameToResponse(name): p = plugin.getPlugin(name) return { 'name': p.displayName, 'description': p.description, 'url': p.url, 'version': p.version }
def load(self, info): getPlugin('jobs').load(info) info['apiRoot'].worker = Worker() events.bind('jobs.schedule', 'worker', event_handlers.schedule) events.bind('jobs.status.validate', 'worker', event_handlers.validateJobStatus) events.bind('jobs.status.validTransitions', 'worker', event_handlers.validTransitions) events.bind('jobs.cancel', 'worker', event_handlers.cancel) events.bind('model.job.save.after', 'worker', event_handlers.attachJobInfoSpec) events.bind('model.job.save', 'worker', event_handlers.attachParentJob) Job().exposeFields(AccessType.SITE_ADMIN, {'celeryTaskId', 'celeryQueue'})
def load(self, info): getPlugin('jobs').load(info) # load plugins you depend on getPlugin('worker').load(info) # load plugins you depend on DockerImageItem.prepare() # passed in resource name must match the attribute added to info[apiroot] resource = DockerResource('slicer_cli_web') info['apiRoot'].slicer_cli_web = resource ModelImporter.model('job', 'jobs').exposeFields(level=AccessType.READ, fields={'slicerCLIBindings'}) events.bind('jobs.job.update.after', resource.resourceName, resource.addRestEndpoints) events.bind('data.process', 'slicer_cli_web', _onUpload)
def testSinglePluginLoad(registry, logprint): pluginDefinition = plugin.getPlugin('plugin1') pluginDefinition.load({}) assert pluginDefinition.loaded is True logprint.success.assert_any_call('Loaded plugin "plugin1"') pluginDefinition.load({}) pluginDefinition._testLoadMock.assert_called_once()
def load(self, info): getPlugin('cumulus_plugin').load(info) events.bind('rest.get.folder.before', 'cluster_filesystem', _folder_before) events.bind('rest.get.folder/:id.before', 'cluster_filesystem', _folder_id_before) events.bind('rest.get.item.before', 'cluster_filesystem', _item_before) events.bind('rest.get.item/:id.before', 'cluster_filesystem', _item_id_before) events.bind('rest.get.item/:id/files.before', 'cluster_filesystem', _item_files_before) events.bind('rest.get.file/:id.before', 'cluster_filesystem', _file_id_before) events.bind('rest.get.file/:id/download.before', 'cluster_filesystem', _file_download_before) info['apiRoot'].clusters.route('GET', (':id', 'filesystem'), _get_path)
def testLoadPluginsSingle(registry, logprint): plugin._loadPlugins(info={}, names=['plugin1']) assert set(plugin.loadedPlugins()) == {'plugin1'} plugin1Definition = plugin.getPlugin('plugin1') assert plugin1Definition is not None assert plugin1Definition.loaded is True plugin1Definition._testLoadMock.assert_called_once() logprint.success.assert_any_call('Loaded plugin "plugin1"')
def load(self, info): getPlugin('worker').load(info) events.bind('model.item.remove', __name__, lambda e: _removeThumbnails(e.info)) events.bind('model.file.finalizeUpload.after', __name__, _handleUpload) File().ensureIndex(([('interactive_thumbnails_uid', 1), ('attachedToId', 1)], { 'sparse': True })) File().exposeFields(level=AccessType.READ, fields={'interactive_thumbnails_info'}) Item().exposeFields(level=AccessType.READ, fields={'hasInteractiveThumbnail'}) info['apiRoot'].item.route('GET', (':id', 'interactive_thumbnail', ':uid'), _getThumbnail) info['apiRoot'].item.route('POST', (':id', 'interactive_thumbnail'), _createThumbnail)
def validateCorePluginsEnabled(doc): """ Ensures that the set of plugins passed in is a list of valid plugin names. Removes any invalid plugin names, removes duplicates, and adds all transitive dependencies to the enabled list. """ if not isinstance(doc['value'], list): raise ValidationException('Plugins enabled setting must be a list.', 'value') for pluginName in doc['value']: if getPlugin(pluginName) is None: raise ValidationException('Required plugin %s does not exist.' % pluginName)
def testLoadPluginsExclusion(registry): # Ignoring installed but not-requested plugins only happens in the testing environment, but # is critical functionality plugin._loadPlugins(info={}, names=['plugin1']) assert set(plugin.loadedPlugins()) == {'plugin1'} for pluginName in ['plugin2', 'plugin3']: pluginDefinition = plugin.getPlugin(pluginName) assert pluginDefinition is not None assert pluginDefinition.loaded is False pluginDefinition._testLoadMock.assert_not_called()
def validateCorePluginsEnabled(doc): """ Ensures that the set of plugins passed in is a list of valid plugin names. Removes any invalid plugin names, removes duplicates, and adds all transitive dependencies to the enabled list. """ if not isinstance(doc['value'], list): raise ValidationException( 'Plugins enabled setting must be a list.', 'value') for pluginName in doc['value']: if getPlugin(pluginName) is None: raise ValidationException( 'Required plugin %s does not exist.' % pluginName)
def testLoadPluginsWithDeps(registry, logprint): plugin._loadPlugins(info={}, names=['plugin2']) assert set(plugin.loadedPlugins()) == {'plugin1', 'plugin2'} for pluginName in ['plugin1', 'plugin2']: pluginDefinition = plugin.getPlugin(pluginName) assert pluginDefinition is not None assert pluginDefinition.loaded is True pluginDefinition._testLoadMock.assert_called_once() # Since plugin1 is the dependant, it must be loaded first logprint.success.assert_has_calls([ mock.call('Loaded plugin "plugin1"'), mock.call('Loaded plugin "plugin2"') ], any_order=False)
def testPluginLoad(registry): plugin1Definition = plugin.getPlugin('plugin1') assert plugin1Definition is not None assert plugin1Definition.loaded is False plugin1Definition.load(info={}) assert plugin1Definition.loaded is True plugin1Definition._testLoadMock.assert_called_once() # Attempting to load a second time should do nothing plugin1Definition._testLoadMock.reset_mock() plugin1Definition.load(info={}) assert plugin1Definition.loaded is True plugin1Definition._testLoadMock.assert_not_called()
def load(self, info): getPlugin('gravatar').load(info) getPlugin('jobs').load(info) getPlugin('worker').load(info) getPlugin('thumbnails').load(info) mail_utils.addTemplateDirectory(os.path.join(_HERE, 'mail_templates')) ModelImporter.registerModel('challenge', Challenge, 'covalic') ModelImporter.registerModel('phase', Phase, 'covalic') ModelImporter.registerModel('submission', Submission, 'covalic') resource.allowedSearchTypes.add('challenge.covalic') info['apiRoot'].challenge = ChallengeResource() info['apiRoot'].challenge_phase = PhaseResource() info['apiRoot'].covalic_submission = SubmissionResource() webroot = WebrootBase(os.path.join(_HERE, 'webroot.mako')) webroot.updateHtmlVars({ 'pluginCss': [ plugin for plugin in loadedPlugins() if os.path.exists(os.path.join( STATIC_ROOT_DIR, 'built', 'plugins', plugin, 'plugin.min.css')) ], 'pluginJs': [ plugin for plugin in loadedPlugins() if os.path.exists(os.path.join( STATIC_ROOT_DIR, 'built', 'plugins', plugin, 'plugin.min.js')) ] }) registerPluginWebroot(webroot, 'covalic') events.bind('jobs.job.update', 'covalic', onJobUpdate) events.bind('model.setting.validate', 'covalic', validateSettings) events.bind('model.challenge_challenge.save.after', 'covalic', challengeSaved) events.bind('model.challenge_phase.save.after', 'covalic', onPhaseSave) events.bind('model.user.save.after', 'covalic', onUserSave)
def testPluginWithNoLoadMethod(registry, logprint): with pytest.raises(NotImplementedError): plugin.getPlugin('invalid').load({}) logprint.exception.assert_called_once_with('Failed to load plugin invalid')
def _collectPluginDependencies(): packages = {} for pluginName in allPlugins(): plugin = getPlugin(pluginName) packages.update(plugin.npmPackages()) return packages
def load(self, info): getPlugin('plugin1').load(info)
def load(self, info): plugin.getPlugin('plugin1').load(info) plugin.getPlugin('plugin2').load(info) super(DependsOnPlugin1and2, self).load(info)
def testPluginWithNoLoadMethod(registry): with pytest.raises(NotImplementedError): plugin.getPlugin('invalid').load({})
def testPluginWithDisplayName(registry): pluginDef = plugin.getPlugin('display') assert pluginDef.name == 'display' assert pluginDef.displayName == 'A plugin with a display name'
def testPluginWithNoDisplayName(registry): pluginDef = plugin.getPlugin('nodeps') assert pluginDef.name == 'nodeps' assert pluginDef.displayName == 'nodeps'
def load(self, info): getPlugin('item_tasks').load(info)
def load(self, info): getPlugin('jobs').load(info) getPlugin('worker').load(info) return load(info)