def load(info): registerAccessFlag(constants.ACCESS_FLAG_EXECUTE_TASK, name='Execute analyses', admin=True) TokenScope.describeScope(constants.TOKEN_SCOPE_EXECUTE_TASK, name='Execute tasks', description='Execute item tasks.') TokenScope.describeScope(constants.TOKEN_SCOPE_AUTO_CREATE_CLI, 'Item task auto-creation', 'Create new CLIs via automatic introspection.', admin=True) ModelImporter.model('item').ensureIndex( ['meta.isItemTask', { 'sparse': True }]) ModelImporter.model('item').exposeFields(level=AccessType.READ, fields='createdByJob') ModelImporter.model('job', 'jobs').exposeFields( level=AccessType.READ, fields={'itemTaskId', 'itemTaskBindings'}) events.bind('jobs.job.update', info['name'], _onJobSave) events.bind('data.process', info['name'], _onUpload) info['apiRoot'].item_task = ItemTask()
def testFiltering(self): job = self.model("job", "jobs").createJob(title="A job", type="t", user=self.users[1], public=True) job["_some_other_field"] = "foo" job = self.model("job", "jobs").save(job) resp = self.request("/job/{}".format(job["_id"])) self.assertStatusOk(resp) self.assertTrue("created" in resp.json) self.assertTrue("_some_other_field" not in resp.json) self.assertTrue("kwargs" not in resp.json) self.assertTrue("args" not in resp.json) resp = self.request("/job/{}".format(job["_id"]), user=self.users[0]) self.assertTrue("kwargs" in resp.json) self.assertTrue("args" in resp.json) def filterJob(event): event.info["job"]["_some_other_field"] = "bar" event.addResponse({"exposeFields": ["_some_other_field"], "removeFields": ["created"]}) events.bind("jobs.filter", "test", filterJob) resp = self.request("/job/{}".format(job["_id"])) self.assertStatusOk(resp) self.assertEqual(resp.json["_some_other_field"], "bar") self.assertTrue("created" not in resp.json)
def load(self, info): events.bind('geometa.created', 'name', itemAddedToCollection) # Add bind event for last item deleted in geometa collection # This is probably the wrong event # events.bind('model.item.remove', 'name', # itemRemovedFromCollection) info['apiRoot'].collection.route('GET', (':id', 'geobrowser'), singleCollectionHandler) info['apiRoot'].collection.route('GET', ('geobrowser', ), listCollectionHandler) info['apiRoot'].collection.route('GET', ('geobrowser', 'search'), facetedSearchHandler) info['apiRoot'].collection.route('PUT', ('geobrowser', ), forceRecomputeAllHandler) info['apiRoot'].collection.route('DELETE', ('geobrowser', ), forceDeleteAllHandler) frontEndResource = os.path.realpath( resource_filename('geobrowser_plugin', 'external_web_client')) if (os.path.exists(frontEndResource) or config.getConfig()['server']['mode'] != 'development'): info['config']['/geobrowser'] = { 'tools.staticdir.on': True, 'tools.staticdir.dir': frontEndResource, 'tools.staticdir.index': 'index.html' }
def load(info): girderRoot = info['serverRoot'] histomicsRoot = Webroot(_template) histomicsRoot.updateHtmlVars(girderRoot.vars) histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'}) info['serverRoot'].histomicstk = histomicsRoot info['serverRoot'].girder = girderRoot # create root resource for all REST end points of HistomicsTK resource = DockerResource('HistomicsTK') setattr(info['apiRoot'], resource.resourceName, resource) # load docker images from cache dockerImageModel = ModelImporter.model('docker_image_model', 'slicer_cli_web') dockerCache = dockerImageModel.loadAllImages() # generate REST end points for slicer CLIs of each docker image genRESTEndPointsForSlicerCLIsInDockerCache(resource, dockerCache) # auto-ingest annotations into database when a .anot file is uploaded events.bind('data.process', 'HistomicsTK', process_annotations) events.bind('jobs.job.update.after', resource.resourceName, resource.AddRestEndpoints)
def load(info): events.bind('jobs.schedule', 'worker', schedule) events.bind('jobs.status.validate', 'worker', validateJobStatus) ModelImporter.model('job', 'jobs').exposeFields(AccessType.SITE_ADMIN, {'celeryTaskId', 'celeryQueue'})
def load(info): # set the title of the HTML pages info['serverRoot'].updateHtmlVars({'title': 'ISIC Archive'}) # add event listeners events.bind('rest.get.describe/:resource.after', 'onDescribeResource', onDescribeResource) events.bind('model.job.save', 'onJobSave', onJobSave) # add custom model searching resource.allowedSearchTypes.update({ 'image.isic_archive', 'featureset.isic_archive', 'study.isic_archive', }) # register licenses for template usage mail_utils.addTemplateDirectory(os.path.join(info['pluginRootDir'], 'server', 'license_templates'), prepend=True) registerPluginWebroot(Webroot(), info['name']) # add static file serving info['config']['/uda'] = { 'tools.staticdir.on': 'True', 'tools.staticdir.dir': os.path.join(info['pluginRootDir'], 'custom') } # add dynamic root routes # root endpoints -> where a user may go and expect a UI class Root(object): pass legacyWebroot = Root() legacyWebroot.gallery = staticFile( os.path.join(info['pluginRootDir'], 'custom', 'gallery.html')) legacyWebroot.segment = staticFile( os.path.join(info['pluginRootDir'], 'custom', 'phase1.html')) legacyWebroot.annotate = staticFile( os.path.join(info['pluginRootDir'], 'custom', 'phase2.html')) registerPluginWebroot(legacyWebroot, 'markup') # create all necessary users, groups, collections, etc provisionDatabase() # add api routes # remove docs for default Girder API, to simplify page clearRouteDocs() # TODO: nest these under a "/isic" path? info['apiRoot'].annotation = api.AnnotationResource() info['apiRoot'].dataset = api.DatasetResource() info['apiRoot'].featureset = api.FeaturesetResource() info['apiRoot'].image = api.ImageResource() info['apiRoot'].segmentation = api.SegmentationResource() info['apiRoot'].study = api.StudyResource() info['apiRoot'].task = api.TaskResource() api.attachUserApi(info['apiRoot'].user)
def load(info): registerAccessFlag(constants.ACCESS_FLAG_EXECUTE_TASK, name='Execute analyses', admin=True) TokenScope.describeScope( constants.TOKEN_SCOPE_EXECUTE_TASK, name='Execute tasks', description='Execute item tasks.') TokenScope.describeScope( constants.TOKEN_SCOPE_AUTO_CREATE_CLI, 'Item task auto-creation', 'Create new CLIs via automatic introspection.', admin=True) ModelImporter.model('item').ensureIndex(['meta.isItemTask', {'sparse': True}]) ModelImporter.model('item').exposeFields(level=AccessType.READ, fields='createdByJob') ModelImporter.model('job', 'jobs').exposeFields(level=AccessType.READ, fields={ 'itemTaskId', 'itemTaskBindings'}) events.bind('jobs.job.update', info['name'], _onJobSave) events.bind('data.process', info['name'], _onUpload) info['apiRoot'].item_task = ItemTask() info['apiRoot'].item.route('POST', (':id', 'item_task_slicer_cli_description'), runSlicerCliTasksDescriptionForItem) info['apiRoot'].item.route('PUT', (':id', 'item_task_slicer_cli_xml'), configureItemTaskFromSlicerCliXml) info['apiRoot'].item.route('POST', (':id', 'item_task_json_description'), runJsonTasksDescriptionForItem) info['apiRoot'].item.route('PUT', (':id', 'item_task_json_specs'), configureItemTaskFromJson) info['apiRoot'].folder.route('POST', (':id', 'item_task_slicer_cli_description'), runSlicerCliTasksDescriptionForFolder) info['apiRoot'].folder.route('POST', (':id', 'item_task_slicer_cli_xml'), createItemTasksFromSlicerCliXml) info['apiRoot'].folder.route('POST', (':id', 'item_task_json_description'), runJsonTasksDescriptionForFolder) info['apiRoot'].folder.route('POST', (':id', 'item_task_json_specs'), createItemTasksFromJson)
def load(self, info): AssetstoreType.NEWT = 'newt' setAssetstoreAdapter(AssetstoreType.NEWT, NewtAssetstoreAdapter) events.bind('assetstore.update', 'newt', updateAssetstore) info['apiRoot'].newt = Newt() info['apiRoot'].newt_assetstores = NewtAssetstore()
def load(self, info): User().ensureIndex(((('oauth.provider', SortDir.ASCENDING), ('oauth.id', SortDir.ASCENDING)), {})) events.bind('no_password_login_attempt', 'oauth', checkOauthUser) info['apiRoot'].oauth = rest.OAuth()
def load(self, info): getPlugin('jobs').load(info) info['apiRoot'].nli = NLI() events.bind('jobs.job.update.after', 'nlisim', update_status) job_model = Job() job_model.exposeFields(level=constants.AccessType.ADMIN, fields={'args', 'kwargs'})
def initialize(self): self.name = 'user' self.ensureIndices(['login', 'email', 'groupInvites.groupId', 'size', 'created']) self.prefixSearchFields = ( 'login', ('firstName', 'i'), ('lastName', 'i')) self.ensureTextIndex({ 'login': 1, 'firstName': 1, 'lastName': 1 }, language='none') self.exposeFields(level=AccessType.READ, fields=( '_id', 'login', 'public', 'firstName', 'lastName', 'admin', 'created')) self.exposeFields(level=AccessType.ADMIN, fields=( 'size', 'email', 'groups', 'groupInvites', 'status', 'emailVerified')) events.bind('model.user.save.created', CoreEventHandler.USER_SELF_ACCESS, self._grantSelfAccess) events.bind('model.user.save.created', CoreEventHandler.USER_DEFAULT_FOLDERS, self._addDefaultFolders)
def load(info): """Initialize the plugin.""" info['apiRoot'].spec = spec.Spec() info['apiRoot'].graph = graph.Graph() ingest() GitHub.addScopes(['user:email', 'public_repo']) events.bind('oauth.auth_callback.after', 'cis', storeToken)
def initialize(self): self.name = 'user' self.ensureIndices(['login', 'email', 'groupInvites.groupId']) self.prefixSearchFields = ('login', ('firstName', 'i'), ('lastName', 'i')) self.ensureTextIndex({ 'login': 1, 'firstName': 1, 'lastName': 1 }, language='none') self.exposeFields(level=AccessType.READ, fields=('_id', 'login', 'public', 'firstName', 'lastName', 'admin', 'created')) self.exposeFields(level=AccessType.ADMIN, fields=('size', 'email', 'groups', 'groupInvites', 'status', 'emailVerified')) events.bind('model.user.save.created', CoreEventHandler.USER_SELF_ACCESS, self._grantSelfAccess) events.bind('model.user.save.created', CoreEventHandler.USER_DEFAULT_FOLDERS, self._addDefaultFolders)
def testAsyncEvents(self): name, failname = '_test.event', '_test.failure' handlerName = '_test.handler' events.bind(failname, handlerName, self._raiseException) events.bind(name, handlerName, self._incrementWithResponse) def callback(event): self.ctr += 1 self.responses = event.responses # Make sure an async handler that fails does not break the event loop # and that its callback is not triggered. self.assertEqual(events.daemon.eventQueue.qsize(), 0) events.daemon.trigger(failname, handlerName, callback) # Triggering the event before the daemon starts should do nothing self.assertEqual(events.daemon.eventQueue.qsize(), 1) events.daemon.trigger(name, {'amount': 2}, callback) self.assertEqual(events.daemon.eventQueue.qsize(), 2) self.assertEqual(self.ctr, 0) # Now run the asynchronous event handler, which should eventually # cause our counter to be incremented. events.daemon.start() time.sleep(0.1) self.assertEqual(events.daemon.eventQueue.qsize(), 0) self.assertEqual(self.ctr, 3) self.assertEqual(self.responses, ['foo'])
def setUp(self): base.TestCase.setUp(self) user = { "email": "*****@*****.**", "login": "******", "firstName": "First", "lastName": "Last", "password": "******", } self.user = self.model("user").createUser(**user) folders = self.model("folder").childFolders(parent=self.user, parentType="user", user=self.user) for folder in folders: if folder["public"] is True: self.publicFolder = folder else: self.privateFolder = folder secondUser = { "email": "*****@*****.**", "login": "******", "firstName": "Second", "lastName": "User", "password": "******", } self.secondUser = self.model("user").createUser(**secondUser) self.testForFinalizeUpload = False self.finalizeUploadBeforeCalled = False self.finalizeUploadAfterCalled = False events.bind("model.file.finalizeUpload.before", "_testFinalizeUploadBefore", self._testFinalizeUploadBefore) events.bind("model.file.finalizeUpload.after", "_testFinalizeUploadAfter", self._testFinalizeUploadAfter)
def __init__(self, templatePath=None): if not templatePath: templatePath = os.path.join(constants.PACKAGE_DIR, 'utility', 'webroot.mako') super(Webroot, self).__init__(templatePath) settings = Setting() self.vars = { 'plugins': [], 'apiRoot': '', 'staticRoot': '', # 'title' is depreciated use brandName instead 'title': 'Girder', 'brandName': settings.get(SettingKey.BRAND_NAME), 'bannerColor': settings.get(SettingKey.BANNER_COLOR), 'contactEmail': settings.get(SettingKey.CONTACT_EMAIL_ADDRESS), 'registrationPolicy': settings.get(SettingKey.REGISTRATION_POLICY), 'enablePasswordLogin': settings.get(SettingKey.ENABLE_PASSWORD_LOGIN) } events.bind('model.setting.save.after', CoreEventHandler.WEBROOT_SETTING_CHANGE, self._onSettingSave) events.bind('model.setting.remove', CoreEventHandler.WEBROOT_SETTING_CHANGE, self._onSettingRemove)
def load(info): AssetstoreType.HDFS = 'hdfs' events.bind('assetstore.update', 'hdfs_assetstore', updateAssetstore) events.bind('rest.post.assetstore.before', 'hdfs_assetstore', createAssetstore) assetstore_utilities.setAssetstoreAdapter(AssetstoreType.HDFS, HdfsAssetstoreAdapter) (Assetstore.createAssetstore.description.param( 'host', 'The namenode host (for HDFS type).', required=False).param( 'port', 'The namenode RPC port (for HDFS type).', required=False).param( 'path', 'Absolute path under which new files will be stored (' 'for HDFS type).', required=False). param('user', 'The effective user to use when calling HDFS RPCs (for ' 'HDFS type). This defaults to whatever system username the ' 'Girder server process is running under.', required=False).param( 'webHdfsPort', 'WebHDFS port for the namenode. You must enable ' 'WebHDFS on your Hadoop cluster if you want to write new files ' 'to the assetstore (for HDFS type).', required=False)) info['apiRoot'].hdfs_assetstore = HdfsAssetstoreResource()
def testFiltering(self): job = self.model('job', 'jobs').createJob( title='A job', type='t', user=self.users[1], public=True) job['_some_other_field'] = 'foo' job = self.model('job', 'jobs').save(job) resp = self.request('/job/{}'.format(job['_id'])) self.assertStatusOk(resp) self.assertTrue('created' in resp.json) self.assertTrue('_some_other_field' not in resp.json) self.assertTrue('kwargs' not in resp.json) self.assertTrue('args' not in resp.json) resp = self.request('/job/{}'.format(job['_id']), user=self.users[0]) self.assertTrue('kwargs' in resp.json) self.assertTrue('args' in resp.json) def filterJob(event): event.info['job']['_some_other_field'] = 'bar' event.addResponse({ 'exposeFields': ['_some_other_field'], 'removeFields': ['created'] }) events.bind('jobs.filter', 'test', filterJob) resp = self.request('/job/{}'.format(job['_id'])) self.assertStatusOk(resp) self.assertEqual(resp.json['_some_other_field'], 'bar') self.assertTrue('created' not in resp.json)
def addImage(self, name, status): """test the put endpoint, name can be a string or a list of strings""" event = threading.Event() def tempListener(self, girderEvent): job = girderEvent.info['job'] if (job['type'] == 'slicer_cli_web_job' and job['status'] in (JobStatus.SUCCESS, JobStatus.ERROR)): self.assertEqual(job['status'], status, 'The status of the job should match') events.unbind('jobs.job.update.after', 'HistomicsTK_add') event.set() self.addHandler = types.MethodType(tempListener, self) events.bind('jobs.job.update.after', 'HistomicsTK_add', self.addHandler) resp = self.request(path='/HistomicsTK/HistomicsTK/docker_image', user=self.admin, method='PUT', params={"name": json.dumps(name)}, isJson=False) self.assertStatus(resp, 200) if not event.wait(TIMEOUT): self.fail('adding the docker image is taking ' 'longer than %d seconds' % TIMEOUT) del self.addHandler
def load(info): notebook = Notebook() info['apiRoot'].ythub = ytHub() info['apiRoot'].notebook = notebook info['apiRoot'].frontend = Frontend() info['apiRoot'].folder.route('GET', (':id', 'listing'), listFolder) info['apiRoot'].item.route('GET', (':id', 'listing'), listItem) info['apiRoot'].item.route('PUT', (':id', 'check'), checkItem) info['apiRoot'].folder.route('GET', (':id', 'rootpath'), folderRootpath) info['apiRoot'].folder.route('PUT', (':id', 'check'), checkFolder) info['apiRoot'].collection.route('PUT', (':id', 'check'), checkCollection) curConfig = config.getConfig() if curConfig['server']['mode'] == 'testing': cull_period = 1 else: cull_period = int(curConfig['server'].get('heartbeat', -1)) if cull_period > 0: def _heartbeat(): events.trigger('heartbeat') logger.info('Starting Heartbeat every %i s' % cull_period) heartbeat = cherrypy.process.plugins.Monitor( cherrypy.engine, _heartbeat, frequency=cull_period, name="Heartbeat") heartbeat.subscribe() events.bind('heartbeat', 'ythub', notebook.cullNotebooks) events.bind('model.user.save.created', 'ythub', addDefaultFolders)
def initialize(self): super(Image, self).initialize() self.prefixSearchFields = ['lowerName', 'name'] events.bind('data.process', 'onSuperpixelsUpload', self.onSuperpixelsUpload)
def initialize(self): self._writeLock = threading.Lock() self.name = 'annotation' self.ensureIndices([ 'itemId', 'created', 'creatorId', ([ ('itemId', SortDir.ASCENDING), ('_active', SortDir.ASCENDING), ], {}), ([ ('_annotationId', SortDir.ASCENDING), ('_version', SortDir.DESCENDING), ], {}), 'updated', ]) self.ensureTextIndex({ 'annotation.name': 10, 'annotation.description': 1, }) self.exposeFields(AccessType.READ, ( 'annotation', '_version', '_elementQuery', '_active', ) + self.baseFields) events.bind('model.item.remove', 'large_image', self._onItemRemove) events.bind('model.item.copy.prepare', 'large_image.annotation', self._prepareCopyItem) events.bind('model.item.copy.after', 'large_image.annotation', self._handleCopyItem) self._historyEnabled = Setting().get( constants.PluginSettings.LARGE_IMAGE_ANNOTATION_HISTORY) # Listen for changes to our relevant settings events.bind('model.setting.save.after', 'large_image', self._onSettingChange) events.bind('model.setting.remove', 'large_image', self._onSettingChange)
def initialize(self): self.name = 'user' self.ensureIndices(['login', 'email', 'groupInvites.groupId', 'size', 'created']) self.prefixSearchFields = ( 'login', ('firstName', 'i'), ('lastName', 'i')) self.ensureTextIndex({ 'login': 1, 'firstName': 1, 'lastName': 1 }, language='none') self.exposeFields(level=AccessType.READ, fields=( '_id', 'login', 'public', 'firstName', 'lastName', 'admin', 'created')) self.exposeFields(level=AccessType.ADMIN, fields=( 'size', 'email', 'groups', 'groupInvites', 'status', 'emailVerified')) # To ensure compatibility with authenticator apps, other defaults shouldn't be changed self._TotpFactory = TOTP.using( # An application secret could be set here, if it existed wallet=None ) events.bind('model.user.save.created', CoreEventHandler.USER_SELF_ACCESS, self._grantSelfAccess) events.bind('model.user.save.created', CoreEventHandler.USER_DEFAULT_FOLDERS, self._addDefaultFolders)
def load(info): ModelImporter.model('job', 'jobs').exposeFields( level=AccessType.ADMIN, fields='processedFiles') ModelImporter.model('job', 'jobs').exposeFields( level=AccessType.SITE_ADMIN, fields='processedFiles') Osumo._cp_config['tools.staticdir.dir'] = os.path.join( os.path.relpath(info['pluginRootDir'], info['config']['/']['tools.staticdir.root']), 'web-external') # Move girder app to /girder, serve sumo app from / info['apiRoot'].osumo = Osumo() ( info['serverRoot'], info['serverRoot'].girder ) = ( info['apiRoot'].osumo, info['serverRoot'] ) info['serverRoot'].api = info['serverRoot'].girder.api info['serverRoot'].girder.api events.bind('data.process', 'osumo', info['apiRoot'].osumo.dataProcess)
def load(self, info): AssetstoreType.NEWT = 'newt' setAssetstoreAdapter(AssetstoreType.NEWT, NewtAssetstoreAdapter) events.bind('assetstore.update', 'newt', updateAssetstore) info['apiRoot'].newt = Newt() info['apiRoot'].newt_assetstores = NewtAssetstore() if hasattr(girder, '__version__') and girder.__version__[0] == '3': # Replace User._validateLogin to accept 3-letter user names def _validateNewtLogin(login): if '@' in login: # Hard-code this constraint so we can always easily distinguish # an email address from a login raise ValidationException('Login may not contain "@".', 'login') # For reference, girder's regex is r'^[a-z][\da-z\-\.]{3,}$' if not re.match(r'^[a-z][\da-z_\-\.]{2,}$', login): raise ValidationException( 'Login must be at least 3 characters, start with a letter, and may only contain ' 'letters, numbers, underscores, dashes, and periods.', 'login') User()._validateLogin = _validateNewtLogin
def load(self, info): name = 'virtual_folders' events.bind('model.folder.validate', name, _validateFolder) events.bind('model.item.validate', name, _validateItem) events.bind('rest.get.item.before', name, _virtualChildItems) events.bind('rest.post.folder.after', name, _folderUpdate) events.bind('rest.put.folder/:id.after', name, _folderUpdate) Folder().exposeFields(level=AccessType.READ, fields={'isVirtual'}) Folder().exposeFields(level=AccessType.SITE_ADMIN, fields={'virtualItemsQuery', 'virtualItemsSort'}) for endpoint in (FolderResource.updateFolder, FolderResource.createFolder): (endpoint.description.param( 'isVirtual', 'Whether this is a virtual folder.', required=False, dataType='boolean').param( 'virtualItemsQuery', 'Query to use to do virtual item lookup, as JSON.', required=False).param( 'virtualItemsSort', 'Sort to use during virtual item lookup, as JSON.', required=False))
def load(info): info["apiRoot"].user.route("GET", (":id", "gravatar"), getGravatar) ModelImporter.model("user").exposeFields(level=AccessType.READ, fields="gravatar_baseUrl") events.bind("model.setting.validate", "gravatar", _validateSettings) events.bind("rest.put.user/:id.before", "gravatar", _userUpdate)
def initialize(self): self.name = 'user' self.ensureIndices( ['login', 'email', 'groupInvites.groupId', 'size', 'created']) self.prefixSearchFields = ('login', ('firstName', 'i'), ('lastName', 'i')) self.ensureTextIndex({ 'login': 1, 'firstName': 1, 'lastName': 1 }, language='none') self.exposeFields(level=AccessType.READ, fields=('_id', 'login', 'public', 'firstName', 'lastName', 'admin', 'created')) self.exposeFields(level=AccessType.ADMIN, fields=('size', 'email', 'groups', 'groupInvites', 'status', 'emailVerified')) # To ensure compatibility with authenticator apps, other defaults shouldn't be changed self._TotpFactory = TOTP.using( # An application secret could be set here, if it existed wallet=None) events.bind('model.user.save.created', CoreEventHandler.USER_SELF_ACCESS, self._grantSelfAccess) events.bind('model.user.save.created', CoreEventHandler.USER_DEFAULT_FOLDERS, self._addDefaultFolders)
def load(info): # Move girder app to /girder, serve minerva app from / info['serverRoot'], info['serverRoot'].girder = (CustomAppRoot(), info['serverRoot']) info['serverRoot'].api = info['serverRoot'].girder.api shapefileREST = shapefile.Shapefile() info['apiRoot'].item.route('POST', (':id', 'geojson'), shapefileREST.createGeoJson) info['apiRoot'].item.route('GET', (':id', 'geojson'), shapefileREST.findGeoJson) # Admin endpoint for initializing the geonames database info['apiRoot'].geonames = geocodeREST = geocode.Geonames() info['apiRoot'].geonames.route('POST', ('setup',), geocodeREST.setup) info['apiRoot'].geonames.route('GET', ('geocode',), geocodeREST.geocode) events.bind('model.setting.validate', 'minerva', validate_settings) info['apiRoot'].minerva_dataset = dataset.Dataset() info['apiRoot'].minerva_analysis = analysis.Analysis() info['apiRoot'].minerva_session = session.Session() info['apiRoot'].minerva_dataset_s3 = s3_dataset.S3Dataset() info['apiRoot'].minerva_source = source.Source() info['apiRoot'].minerva_source_wms = wms_source.WmsSource() info['apiRoot'].minerva_dataset_wms = wms_dataset.WmsDataset()
def __init__(self, templatePath=None): if not templatePath: templatePath = os.path.join(constants.PACKAGE_DIR, 'api', 'api_docs.mako') super(ApiDocs, self).__init__(templatePath) curConfig = config.getConfig() mode = curConfig['server'].get('mode', '') self.vars = { 'apiRoot': '', 'staticRoot': '', 'brandName': Setting().get(SettingKey.BRAND_NAME), 'mode': mode } events.unbind('model.setting.save.after', CoreEventHandler.WEBROOT_SETTING_CHANGE) events.bind('model.setting.save.after', CoreEventHandler.WEBROOT_SETTING_CHANGE, self._onSettingSave) events.unbind('model.setting.remove', CoreEventHandler.WEBROOT_SETTING_CHANGE) events.bind('model.setting.remove', CoreEventHandler.WEBROOT_SETTING_CHANGE, self._onSettingRemove)
def load(info): events.bind('imagespace.imagesearch.qparams', 'adjust_qparams_for_maintype', add_maintype_to_qparams) events.bind('imagespace.solr_documents_from_field', 'upperbase_basename_for_resourcenames', uppercase_basename_for_resourcenames)
def load(info): AssetstoreType.ESSDIVE = 'essdive' setAssetstoreAdapter(AssetstoreType.ESSDIVE, EssDiveAssetstoreAdapter) events.bind('assetstore.update', 'essdive', updateAssetstore) info['apiRoot'].essdive_assetstores = EssDiveAssetstore()
def testModelSaveHooks(self): """ This tests the general correctness of the model save hooks """ self.ctr = 0 def preSave(event): if '_id' not in event.info: self.ctr += 1 def postSave(event): self.ctr += 2 events.bind('model.user.save', 'test', preSave) user = self.model('user').createUser( login='******', password='******', firstName='A', lastName='A', email='*****@*****.**') self.assertEqual(self.ctr, 1) events.bind('model.user.save.after', 'test', postSave) self.ctr = 0 user = self.model('user').save(user, triggerEvents=False) self.assertEqual(self.ctr, 0) self.model('user').save(user) self.assertEqual(self.ctr, 2) events.unbind('model.user.save', 'test') events.unbind('model.user.save.after', 'test')
def load(self, info): # Bind REST events events.bind('model.file.download.request', 'download_statistics', _onDownloadFileRequest) events.bind('model.file.download.complete', 'download_statistics', _onDownloadFileComplete) # Add download count fields to file model File().exposeFields(level=AccessType.READ, fields='downloadStatistics')
def testFiltering(self): job = self.model('job', 'jobs').createJob(title='A job', type='t', user=self.users[1], public=True) job['_some_other_field'] = 'foo' job = self.model('job', 'jobs').save(job) resp = self.request('/job/%s' % job['_id']) self.assertStatusOk(resp) self.assertTrue('created' in resp.json) self.assertTrue('_some_other_field' not in resp.json) self.assertTrue('kwargs' not in resp.json) self.assertTrue('args' not in resp.json) resp = self.request('/job/%s' % job['_id'], user=self.users[0]) self.assertTrue('kwargs' in resp.json) self.assertTrue('args' in resp.json) def filterJob(event): event.info['job']['_some_other_field'] = 'bar' event.addResponse({ 'exposeFields': ['_some_other_field'], 'removeFields': ['created'] }) events.bind('jobs.filter', 'test', filterJob) resp = self.request('/job/%s' % job['_id']) self.assertStatusOk(resp) self.assertEqual(resp.json['_some_other_field'], 'bar') self.assertTrue('created' not in resp.json)
def setUp(self): super(MetadataExtractorTestCase, self).setUp() self.processedCount = 0 events.bind('data.process', 'metadata_extractor_test', self._postUpload) self.password = '******' self.user = User().createUser( 'metadataextractor', self.password, 'Metadata', 'Extractor', '*****@*****.**') folders = Folder().childFolders(self.user, 'user', user=self.user) publicFolders = [folder for folder in folders if folder['public']] self.assertIsNotNone(publicFolders) self.name = 'Girder_Favicon.png' self.mimeType = 'image/png' self.item = Item().createItem(self.name, self.user, publicFolders[0]) self.path = os.path.join(ROOT_DIR, 'clients', 'web', 'static', 'img', self.name) upload = Upload().createUpload( self.user, self.name, 'item', self.item, os.path.getsize(self.path), self.mimeType) with open(self.path, 'rb') as fd: uploadedFile = Upload().handleChunk(upload, fd) self.assertHasKeys(uploadedFile, ['assetstoreId', 'created', 'creatorId', 'itemId', 'mimeType', 'name', 'size']) self._waitForProcessCount(1) self.name2 = 'small.tiff' self.item2 = Item().createItem(self.name2, self.user, publicFolders[0]) self.mimeType2 = 'image/tiff' file2 = os.path.join(os.path.dirname(__file__), 'files', 'small.tiff') Upload().uploadFromFile( open(file2), os.path.getsize(file2), self.name2, 'item', self.item2, self.user) self._waitForProcessCount(2)
def testModelSaveHooks(self): """ This tests the general correctness of the model save hooks """ self.ctr = 0 def preSave(event): if "_id" not in event.info: self.ctr += 1 def postSave(event): self.ctr += 2 events.bind("model.user.save", "test", preSave) user = self.model("user").createUser( login="******", password="******", firstName="A", lastName="A", email="*****@*****.**" ) self.assertEqual(self.ctr, 1) events.bind("model.user.save.after", "test", postSave) self.ctr = 0 user = self.model("user").save(user, triggerEvents=False) self.assertEqual(self.ctr, 0) self.model("user").save(user) self.assertEqual(self.ctr, 2) events.unbind("model.user.save", "test") events.unbind("model.user.save.after", "test")
def load(info): info['apiRoot'].user.route('GET', (':id', 'gravatar'), getGravatar) ModelImporter.model('user').exposeFields(level=AccessType.READ, fields='gravatar_baseUrl') events.bind('model.user.save', 'gravatar', _userUpdate)
def load(self, info): info["apiRoot"].viame = Viame() info["apiRoot"].viame_detection = ViameDetection() # Relocate Girder info["serverRoot"], info["serverRoot"].girder = ( ClientWebroot(), info["serverRoot"], ) info["serverRoot"].api = info["serverRoot"].girder.api events.bind( "filesystem_assetstore_imported", "check_annotations", check_existing_annotations, ) # Create dependency on worker plugin.getPlugin('worker').load(info) Setting().set( 'worker.api_url', os.environ.get('WORKER_API_URL', 'http://*****:*****@rabbit/'), ) Setting().set( 'worker.backend', os.environ.get('WORKER_BACKEND', 'amqp://*****:*****@rabbit/'), )
def setUp(self): base.TestCase.setUp(self) user = { 'email': '*****@*****.**', 'login': '******', 'firstName': 'First', 'lastName': 'Last', 'password': '******' } self.user = self.model('user').createUser(**user) folders = self.model('folder').childFolders( parent=self.user, parentType='user', user=self.user) for folder in folders: if folder['public'] is True: self.publicFolder = folder else: self.privateFolder = folder secondUser = { 'email': '*****@*****.**', 'login': '******', 'firstName': 'Second', 'lastName': 'User', 'password': '******' } self.secondUser = self.model('user').createUser(**secondUser) self.testForFinalizeUpload = False self.finalizeUploadBeforeCalled = False self.finalizeUploadAfterCalled = False events.bind('model.file.finalizeUpload.before', '_testFinalizeUploadBefore', self._testFinalizeUploadBefore) events.bind('model.file.finalizeUpload.after', '_testFinalizeUploadAfter', self._testFinalizeUploadAfter)
def testModelSaveHooks(self): """ This tests the general correctness of the model save hooks """ self.ctr = 0 def preSave(event): if '_id' not in event.info: self.ctr += 1 def postSave(event): self.ctr += 2 events.bind('model.user.save', 'test', preSave) user = self.model('user').createUser(login='******', password='******', firstName='A', lastName='A', email='*****@*****.**') self.assertEqual(self.ctr, 1) events.bind('model.user.save.after', 'test', postSave) self.ctr = 0 user = self.model('user').save(user, triggerEvents=False) self.assertEqual(self.ctr, 0) self.model('user').save(user) self.assertEqual(self.ctr, 2) events.unbind('model.user.save', 'test') events.unbind('model.user.save.after', 'test')
def load(info): info['apiRoot'].user.route('GET', (':id', 'gravatar'), getGravatar) ModelImporter.model('user').exposeFields( level=AccessType.READ, fields='gravatar_baseUrl') events.bind('model.user.save', 'gravatar', _userUpdate)
def testFilesystemAssetstoreUpload(self): self._testUpload() # Test that a delete during an upload still results in one file adapter = assetstore_utilities.getAssetstoreAdapter(self.assetstore) size = 101 data = six.BytesIO(b' ' * size) files = [] files.append( self.model('upload').uploadFromFile(data, size, 'progress', parentType='folder', parent=self.folder, assetstore=self.assetstore)) fullPath0 = adapter.fullPath(files[0]) conditionRemoveDone = threading.Condition() conditionInEvent = threading.Condition() def waitForCondition(*args, **kwargs): # Single that we are in the event and then wait to be told that # the delete has occured before returning. with conditionInEvent: conditionInEvent.notify() with conditionRemoveDone: conditionRemoveDone.wait() def uploadFileWithWait(): size = 101 data = six.BytesIO(b' ' * size) files.append( self.model('upload').uploadFromFile( data, size, 'progress', parentType='folder', parent=self.folder, assetstore=self.assetstore)) events.bind('model.file.finalizeUpload.before', 'waitForCondition', waitForCondition) # We create an upload that is bound to an event that waits during the # finalizeUpload.before event so that the remove will be executed # during this time. with conditionInEvent: t = threading.Thread(target=uploadFileWithWait) t.start() conditionInEvent.wait() self.assertTrue(os.path.exists(fullPath0)) self.model('file').remove(files[0]) # We shouldn't actually remove the file here self.assertTrue(os.path.exists(fullPath0)) with conditionRemoveDone: conditionRemoveDone.notify() t.join() events.unbind('model.file.finalizeUpload.before', 'waitForCondition') fullPath1 = adapter.fullPath(files[0]) self.assertEqual(fullPath0, fullPath1) self.assertTrue(os.path.exists(fullPath1))
def initialize(self): super(User, self).initialize() # Note, this will not expose this field though the upstream User API self.exposeFields(level=AccessType.READ, fields=('acceptTerms',)) events.bind('model.user.save.created', 'onUserCreated', self._onUserCreated)
def load(info): ext = ResourceExt(info) events.bind("model.setting.save.after", "provenanceMain", ext.bindModels) events.bind("provenance.initialize", "provenanceMain", ext.bindModels) events.trigger("provenance.initialize", info={}) events.bind("model.file.save", "provenanceMain", ext.fileSaveHandler) events.bind("model.file.save.created", "provenanceMain", ext.fileSaveCreatedHandler) events.bind("model.file.remove", "provenance", ext.fileRemoveHandler)
def load(info): info['apiRoot'].user.route('GET', (':id', 'gravatar'), getGravatar) ModelImporter.model('user').exposeFields( level=AccessType.READ, fields='gravatar_baseUrl') events.bind('model.setting.validate', 'gravatar', validateSettings) events.bind('rest.put.user/:id.before', 'gravatar', userUpdate)
def load(info): AssetstoreType.SFTP = 'sftp' setAssetstoreAdapter(AssetstoreType.SFTP, SftpAssetstoreAdapter) events.bind('assetstore.update', 'sftp', updateAssetstore) events.bind('assetstore.sftp.credentials.get', 'sftp', retrieve_credentials) info['apiRoot'].sftp_assetstores = SftpAssetstoreResource()
def pop(self, queue, limit, user): queue, popped = self._pop_many(queue, limit, user) for task in popped: events.bind('cumulus.taskflow.status_update', str(task['taskflowId']), taskflow_status_callback(task['taskflowId'], queue, user)) self._start_taskflow(task['taskflowId'], task['start_params'], user) return queue
def initialize(self): super(User, self).initialize() # Note, this will not expose this field though the upstream User API self.exposeFields(level=AccessType.READ, fields=('acceptTerms', )) events.bind('model.user.save.created', 'onUserCreated', self._onUserCreated)
def load(info): ext = ResourceExt(info) events.bind('model.setting.save.after', 'provenanceMain', ext.bindModels) events.bind('provenance.initialize', 'provenanceMain', ext.bindModels) events.trigger('provenance.initialize', info={}) events.bind('model.file.save', 'provenanceMain', ext.fileSaveHandler) events.bind('model.file.save.created', 'provenanceMain', ext.fileSaveCreatedHandler) events.bind('model.file.remove', 'provenance', ext.fileRemoveHandler)
def _testUploadReference(self): eventList = [] def processEvent(event): eventList.append(event.info) events.bind('model.file.finalizeUpload.after', 'lib_test', processEvent) path = os.path.join(self.libTestDir, 'sub0', 'f') size = os.path.getsize(path) with open(path) as fh: self.client.uploadFile( self.publicFolder['_id'], fh, name='test1', size=size, parentType='folder', reference='test1_reference') self.assertEqual(len(eventList), 1) self.assertEqual(eventList[0]['upload']['reference'], 'test1_reference') self.client.uploadFileToItem(str(eventList[0]['file']['itemId']), path, reference='test2_reference') self.assertEqual(len(eventList), 2) self.assertEqual(eventList[1]['upload']['reference'], 'test2_reference') self.assertNotEqual(eventList[0]['file']['_id'], eventList[1]['file']['_id']) with open(path, 'ab') as fh: fh.write(b'test') size = os.path.getsize(path) self.client.uploadFileToItem(str(eventList[0]['file']['itemId']), path, reference='test3_reference') self.assertEqual(len(eventList), 3) self.assertEqual(eventList[2]['upload']['reference'], 'test3_reference') self.assertNotEqual(eventList[0]['file']['_id'], eventList[2]['file']['_id']) self.assertEqual(eventList[1]['file']['_id'], eventList[2]['file']['_id']) item = self.client.createItem(self.publicFolder['_id'], 'a second item') # Test explicit MIME type setting file = self.client.uploadFileToItem(item['_id'], path, mimeType='image/jpeg') self.assertEqual(file['mimeType'], 'image/jpeg') # Test guessing of MIME type testPath = os.path.join(self.libTestDir, 'out.txt') with open(testPath, 'w') as fh: fh.write('test') file = self.client.uploadFileToItem(item['_id'], testPath) self.assertEqual(file['mimeType'], 'text/plain') # Test uploading to a folder self.client.uploadFileToFolder( str(self.publicFolder['_id']), path, reference='test4_reference') self.assertEqual(len(eventList), 6) self.assertEqual(eventList[-1]['upload']['reference'], 'test4_reference') self.assertNotEqual(eventList[2]['file']['_id'], eventList[-1]['file']['_id'])
def load(info): # set the title of the HTML pages info['serverRoot'].updateHtmlVars({'title': 'ISIC Archive'}) # add event listeners events.bind('rest.get.describe/:resource.after', 'onDescribeResource', onDescribeResource) events.bind('model.job.save', 'onJobSave', onJobSave) # add custom model searching resource.allowedSearchTypes.update({ 'image.isic_archive', 'featureset.isic_archive', 'study.isic_archive', }) # register licenses for template usage mail_utils.addTemplateDirectory( os.path.join(info['pluginRootDir'], 'server', 'license_templates'), prepend=True) registerPluginWebroot(Webroot(), info['name']) # add static file serving info['config']['/uda'] = { 'tools.staticdir.on': 'True', 'tools.staticdir.dir': os.path.join(info['pluginRootDir'], 'custom') } # add dynamic root routes # root endpoints -> where a user may go and expect a UI class Root(object): pass legacyWebroot = Root() legacyWebroot.gallery = staticFile( os.path.join(info['pluginRootDir'], 'custom', 'gallery.html')) legacyWebroot.segment = staticFile( os.path.join(info['pluginRootDir'], 'custom', 'phase1.html')) legacyWebroot.annotate = staticFile( os.path.join(info['pluginRootDir'], 'custom', 'phase2.html')) registerPluginWebroot(legacyWebroot, 'markup') # create all necessary users, groups, collections, etc provisionDatabase() # add api routes # remove docs for default Girder API, to simplify page clearRouteDocs() # TODO: nest these under a "/isic" path? info['apiRoot'].annotation = api.AnnotationResource() info['apiRoot'].dataset = api.DatasetResource() info['apiRoot'].featureset = api.FeaturesetResource() info['apiRoot'].image = api.ImageResource() info['apiRoot'].segmentation = api.SegmentationResource() info['apiRoot'].study = api.StudyResource() info['apiRoot'].task = api.TaskResource() api.attachUserApi(info['apiRoot'].user)
def load(self, info): User().ensureIndex(( (('oauth.provider', SortDir.ASCENDING), ('oauth.id', SortDir.ASCENDING)), {})) User().reconnect() events.bind('no_password_login_attempt', 'oauth', checkOauthUser) info['apiRoot'].oauth = rest.OAuth()
def testUploadReference(self): eventList = [] client = girder_client.GirderClient(port=os.environ['GIRDER_PORT']) # Register a user user = client.createResource('user', params={ 'firstName': 'First', 'lastName': 'Last', 'login': '******', 'password': '******', 'email': '*****@*****.**' }) client.authenticate('mylogin', 'password') folders = client.listFolder( parentId=user['_id'], parentFolderType='user', name='Public') publicFolder = folders[0] def processEvent(event): eventList.append(event.info) events.bind('data.process', 'lib_test', processEvent) path = os.path.join(self.libTestDir, 'sub0', 'f') size = os.path.getsize(path) client.uploadFile(publicFolder['_id'], open(path), name='test1', size=size, parentType='folder', reference='test1_reference') starttime = time.time() while (not events.daemon.eventQueue.empty() and time.time() - starttime < 5): time.sleep(0.05) self.assertEqual(len(eventList), 1) self.assertEqual(eventList[0]['reference'], 'test1_reference') client.uploadFileToItem(str(eventList[0]['file']['itemId']), path, reference='test2_reference') while (not events.daemon.eventQueue.empty() and time.time() - starttime < 5): time.sleep(0.05) self.assertEqual(len(eventList), 2) self.assertEqual(eventList[1]['reference'], 'test2_reference') self.assertNotEqual(eventList[0]['file']['_id'], eventList[1]['file']['_id']) open(path, 'ab').write(b'test') size = os.path.getsize(path) client.uploadFileToItem(str(eventList[0]['file']['itemId']), path, reference='test3_reference') while (not events.daemon.eventQueue.empty() and time.time() - starttime < 5): time.sleep(0.05) self.assertEqual(len(eventList), 3) self.assertEqual(eventList[2]['reference'], 'test3_reference') self.assertNotEqual(eventList[0]['file']['_id'], eventList[2]['file']['_id']) self.assertEqual(eventList[1]['file']['_id'], eventList[2]['file']['_id'])
def __init__(self): # Do the bindings before calling __init__(), in case a derived class # wants to change things in initialize() events.bind('model.user.remove', CoreEventHandler.ACCESS_CONTROL_CLEANUP, self._cleanupDeletedEntity) events.bind('model.group.remove', CoreEventHandler.ACCESS_CONTROL_CLEANUP, self._cleanupDeletedEntity) super(AccessControlledModel, self).__init__()