def wrapped(*args, **kwargs): if not rest.getCurrentToken(): raise AccessException( 'You must be logged in or have a valid auth token.') if required: Token().requireScope(rest.getCurrentToken(), scope) return fun(*args, **kwargs)
def loadModel(resource, model, plugin='_core', id=None, allowCookie=False, level=None): """ Load a model based on id using the current cherrypy token parameter for authentication, caching the results. This must be called in a cherrypy context. :param resource: the resource class instance calling the function. Used for access to the current user and model importer. :param model: the model name, e.g., 'item'. :param plugin: the plugin name when loading a plugin model. :param id: a string id of the model to load. :param allowCookie: true if the cookie authentication method is allowed. :param level: access level desired. :returns: the loaded model. """ key = tokenStr = None if 'token' in cherrypy.request.params: # Token as a parameter tokenStr = cherrypy.request.params.get('token') elif 'Girder-Token' in cherrypy.request.headers: tokenStr = cherrypy.request.headers['Girder-Token'] elif 'girderToken' in cherrypy.request.cookie and allowCookie: tokenStr = cherrypy.request.cookie['girderToken'].value key = (model, tokenStr, id) cacheEntry = LoadModelCache.get(key) if cacheEntry and cacheEntry['expiry'] > time.time(): entry = cacheEntry['result'] cacheEntry['hits'] += 1 else: # we have to get the token separately from the user if we are using # cookies. if allowCookie: getCurrentToken(allowCookie) setattr(cherrypy.request, 'girderAllowCookie', True) entry = resource.model(model, plugin).load(id=id, level=level, user=resource.getCurrentUser()) # If the cache becomes too large, just dump it -- this is simpler # than dropping the oldest values and avoids having to add locking. if len(LoadModelCache) > LoadModelCacheMaxEntries: LoadModelCache.clear() LoadModelCache[key] = { 'id': id, 'model': model, 'tokenId': tokenStr, 'expiry': time.time() + LoadModelCacheExpiryDuration, 'result': entry, 'hits': 0 } return entry
def loadModel(resource, model, plugin='_core', id=None, allowCookie=False, level=None): """ Load a model based on id using the current cherrypy token parameter for authentication, caching the results. This must be called in a cherrypy context. :param resource: the resource class instance calling the function. Used for access to the current user and model importer. :param model: the model name, e.g., 'item'. :param plugin: the plugin name when loading a plugin model. :param id: a string id of the model to load. :param allowCookie: true if the cookie authentication method is allowed. :param level: access level desired. :returns: the loaded model. """ key = tokenStr = None if 'token' in cherrypy.request.params: # Token as a parameter tokenStr = cherrypy.request.params.get('token') elif 'Girder-Token' in cherrypy.request.headers: tokenStr = cherrypy.request.headers['Girder-Token'] elif 'girderToken' in cherrypy.request.cookie and allowCookie: tokenStr = cherrypy.request.cookie['girderToken'].value if tokenStr: key = (model, tokenStr, id) cacheEntry = LoadModelCache.get(key) if cacheEntry and cacheEntry['expiry'] > time.time(): entry = cacheEntry['result'] cacheEntry['hits'] += 1 else: # we have to get the token separately from the user if we are using # cookies. if allowCookie: getCurrentToken(allowCookie) entry = resource.model(model, plugin).load( id=id, level=level, user=resource.getCurrentUser()) if key: # If the cache becomes too large, just dump it -- this is simpler # than dropping the oldest values and avoids having to add locking. if len(LoadModelCache) > LoadModelCacheMaxEntries: LoadModelCache.clear() LoadModelCache[key] = { 'id': id, 'model': model, 'tokenId': tokenStr, 'expiry': time.time() + LoadModelCacheExpiryDuration, 'result': entry, 'hits': 0 } return entry
def _get_path(cluster, path): basename = os.path.basename(path) token = getCurrentToken() with get_connection(token['_id'], cluster) as conn: entry = conn.stat(path) entry_id = _generate_id(cluster['_id'], path) parent_id = _generate_id(cluster['_id'], os.path.dirname(path)) model = { '_id': entry_id, 'size': entry.st_size, 'name': basename, 'created': _mtime_isoformat(entry.st_mtime), 'updated': _mtime_isoformat(entry.st_mtime) } if stat.S_ISDIR(entry.st_mode): model['_modelType'] = 'folder' model['description'] = '' model['parentCollection'] = 'folder' model['parentId'] = parent_id model['public'] = False return model elif stat.S_ISREG(entry.st_mode): model['_modelType'] = "file" model['assetstoreId'] = None model["exts"] = [os.path.splitext(basename)[1]] model['itemId'] = parent_id, model['mimeType'] = 'application/octet-stream' return model
def nearestNeighborIndex(item, user, descriptorIndex): """ Get the nearest neighbor index from a given item and descriptor index. :param item: Item to find the nn index from, usually the item that the user is performing the nearest neighbors search on. :param user: The owner of the .smqtk folder. :param descriptorIndex: The relevant descriptor index. """ folder = ModelImporter.model('folder') _GirderDataElement = functools.partial(GirderDataElement, api_root=getApiUrl(), token=getCurrentToken()['_id']) smqtkFolder = folder.createFolder(folder.load(item['folderId'], user=user), '.smqtk', reuseExisting=True) try: meanVecFileId = localSmqtkFileIdFromName(smqtkFolder, 'mean_vec.npy') rotationFileId = localSmqtkFileIdFromName(smqtkFolder, 'rotation.npy') hash2uuidsFileId = localSmqtkFileIdFromName(smqtkFolder, 'hash2uuids.pickle') except Exception: logger.warn('SMQTK files didn\'t exist for performing NN on %s' % item['_id']) return None # TODO Should these be Girder data elements? Unnecessary HTTP requests. functor = ItqFunctor(mean_vec_cache=_GirderDataElement(meanVecFileId), rotation_cache=_GirderDataElement(rotationFileId)) hash2uuidsKV = MemoryKeyValueStore(_GirderDataElement(hash2uuidsFileId)) return LSHNearestNeighborIndex(functor, descriptorIndex, hash2uuidsKV, read_only=True)
def _uploadComplete(event): """ Called after an upload finishes. We check if our current token is a special authorized upload token, and if so, delete it. TODO we could alternatively keep a reference count inside each token that authorized more than a single upload at a time, and just decrement it here. """ token = getCurrentToken() if 'authorizedUploadId' in token: user = ModelImporter.model('user').load(token['userId'], force=True) item = ModelImporter.model('item').load(event.info['file']['itemId'], force=True) # Save the metadata on the item item['description'] = token['authorizedUploadDescription'] item['authorizedUploadEmail'] = token['authorizedUploadEmail'] ModelImporter.model('item').save(item) text = mail_utils.renderTemplate('authorized_upload.uploadFinished.mako', { 'itemId': item['_id'], 'itemName': item['name'], 'itemDescription': item.get('description', '') }) mail_utils.sendEmail(to=user['email'], subject='Authorized upload complete', text=text) ModelImporter.model('token').remove(token)
def wrapped(event, **kwargs): if 'params' in event.info and key in event.info['params']: id = event.info['params'][key] elif key in event.info: id = event.info[key] else: # Request is not well formed, delegate to core. return cluster_id = None try: decoded_id = urllib.parse.unquote_plus(id) (cluster_id, path) = _parse_id(decoded_id) # If we have successfully decoded the id, then prevent the default event.preventDefault() except ValueError: pass if cluster_id is not None: cluster = Cluster().load(cluster_id, user=getCurrentUser()) token = getCurrentToken() with get_connection(token['_id'], cluster) as conn: response = func(conn, path, cluster=cluster, encoded_id=id) event.addResponse(response)
def nearestNeighborIndex(item, user, descriptorSet): """ Get the nearest neighbor index from a given item and descriptor set. :param item: Item to find the nn index from, usually the item that the user is performing the nearest neighbors search on. :param user: The owner of the .smqtk folder. :param descriptorSet: The relevant descriptor set. """ folder = ModelImporter.model('folder') _GirderDataElement = functools.partial(GirderDataElement, api_root=getApiUrl(), token=getCurrentToken()['_id']) smqtkFolder = folder.createFolder(folder.load(item['folderId'], user=user), '.smqtk', reuseExisting=True) try: meanVecFileId = localSmqtkFileIdFromName(smqtkFolder, 'mean_vec.npy') rotationFileId = localSmqtkFileIdFromName(smqtkFolder, 'rotation.npy') hash2uuidsFileId = localSmqtkFileIdFromName(smqtkFolder, 'hash2uuids.pickle') except Exception: logger.warn('SMQTK files didn\'t exist for performing NN on %s' % item['_id']) return None # TODO Should these be Girder data elements? Unnecessary HTTP requests. functor = ItqFunctor(mean_vec_cache=_GirderDataElement(meanVecFileId), rotation_cache=_GirderDataElement(rotationFileId)) hash2uuidsKV = MemoryKeyValueStore(_GirderDataElement(hash2uuidsFileId)) return LSHNearestNeighborIndex(functor, descriptorSet, hash2uuidsKV, read_only=True)
def _uploadComplete(event): """ Called after an upload finishes. We check if our current token is a special authorized upload token, and if so, delete it. TODO we could alternatively keep a reference count inside each token that authorized more than a single upload at a time, and just decrement it here. """ token = getCurrentToken() if token and 'authorizedUploadId' in token: user = User().load(token['userId'], force=True) item = Item().load(event.info['file']['itemId'], force=True) # Save the metadata on the item item['description'] = token['authorizedUploadDescription'] item['authorizedUploadEmail'] = token['authorizedUploadEmail'] Item().save(item) text = mail_utils.renderTemplate( 'authorized_upload.uploadFinished.mako', { 'itemId': item['_id'], 'itemName': item['name'], 'itemDescription': item.get('description', '') }) mail_utils.sendEmail(to=user['email'], subject='Authorized upload complete', text=text) Token().remove(token)
def _startContainer(self, container): settings = Setting() psRoot = settings.get(PluginSettings.PRIVATE_STORAGE_PATH) restUrl = rest.getApiUrl() token = rest.getCurrentToken()['_id'] sessionId = str(container['sessionId']) mountId = efs.mount(sessionId, '/tmp/' + sessionId, psRoot, restUrl, token) container['mountId'] = mountId container['status'] = 'Running' self.save(container)
def _authorizeUploadStep(event): """ Called before any requests dealing with partially completed uploads. Sets the request thread user to the authorized upload token creator if the requested upload is an authorized upload. """ token = getCurrentToken() uploadId = ObjectId(event.info['params'].get('uploadId')) if token and 'authorizedUploadId' in token and token['authorizedUploadId'] == uploadId: user = ModelImporter.model('user').load(token['userId'], force=True) setCurrentUser(user)
def process(self, workingSet, options, params): """ Run the complete processing workflow. """ user = self.getCurrentUser() apiUrl = getApiUrl() token = getCurrentToken() outputFolder = self._outputFolder(workingSet) requestInfo = RequestInfo(user=user, apiUrl=apiUrl, token=token) workflowManager = DanesfieldWorkflowManager.instance() jobId = workflowManager.initJob(requestInfo, workingSet, outputFolder, options) workflowManager.advance(jobId=jobId)
def _authorizeInitUpload(event): """ Called when initializing an upload, prior to the default handler. Checks if the user is passing an authorized upload token, and if so, sets the current request-thread user to be whoever created the token. """ token = getCurrentToken() params = event.info['params'] tokenModel = ModelImporter.model('token') parentType = params.get('parentType') parentId = params.get('parentId', '') requiredScopes = {TOKEN_SCOPE_AUTHORIZED_UPLOAD, 'authorized_upload_folder_%s' % parentId} if parentType == 'folder' and tokenModel.hasScope(token=token, scope=requiredScopes): user = ModelImporter.model('user').load(token['userId'], force=True) setCurrentUser(user)
def _storeUploadId(event): """ Called after an upload is first initialized successfully. Sets the authorized upload ID in the token, ensuring it can be used for only this upload. """ returnVal = event.info['returnVal'] token = getCurrentToken() tokenModel = ModelImporter.model('token') isAuthorizedUpload = tokenModel.hasScope(token, TOKEN_SCOPE_AUTHORIZED_UPLOAD) if isAuthorizedUpload and returnVal.get('_modelType', 'upload') == 'upload': params = event.info['params'] token['scope'].remove(TOKEN_SCOPE_AUTHORIZED_UPLOAD) token['authorizedUploadId'] = returnVal['_id'] token['authorizedUploadDescription'] = params.get('authorizedUploadDescription', '') token['authorizedUploadEmail'] = params.get('authorizedUploadEmail') tokenModel.save(token)
def _authorizeUploadStep(event): """ Called before any requests dealing with partially completed uploads. Sets the request thread user to the authorized upload token creator if the requested upload is an authorized upload. """ token = getCurrentToken() try: uploadId = ObjectId(event.info['params'].get('uploadId', '')) except InvalidId: # Take no action, 'uploadId' will be validated again by the endpoint return if token and 'authorizedUploadId' in token and token[ 'authorizedUploadId'] == uploadId: user = User().load(token['userId'], force=True) setCurrentUser(user)
def __init__(self, *args, **kwargs): gc = kwargs.pop('gc', None) try: if gc is None: # We need to resolve Girder's API URL, but girder_worker can # specify a different value than what Girder gets from a rest # request. # Girder 3 try: from girder_worker.girder_plugin.utils import getWorkerApiUrl except ImportError: # Girder 2 try: from girder.plugins.worker.utils import getWorkerApiUrl # Fall back if the worker plugin is unavailble except ImportError: from girder.api.rest import getApiUrl as getWorkerApiUrl self.gc = GirderClient(apiUrl=getWorkerApiUrl()) from girder.api.rest import getCurrentUser if getCurrentUser(): from girder.constants import TokenScope from girder.models.token import Token token = Token().createToken( days=7, scope=[TokenScope.DATA_READ, TokenScope.DATA_WRITE], user=getCurrentUser(), )['_id'] else: from girder.api.rest import getCurrentToken token = getCurrentToken()['_id'] self.gc.token = token else: self.gc = gc except ImportError: self.gc = None
def finalizeUpload(self, upload, assetstore=None): """ This should only be called manually in the case of creating an empty file, i.e. one that has no chunks. :param upload: The upload document. :type upload: dict :param assetstore: If known, the containing assetstore for the upload. :type assetstore: dict :returns: The file object that was created. """ events.trigger('model.upload.finalize', upload) if assetstore is None: assetstore = self.model('assetstore').load(upload['assetstoreId']) if 'fileId' in upload: # Updating an existing file's contents file = self.model('file').load(upload['fileId'], force=True) # Delete the previous file contents from the containing assetstore assetstore_utilities.getAssetstoreAdapter( self.model('assetstore').load( file['assetstoreId'])).deleteFile(file) item = self.model('item').load(file['itemId'], force=True) self.model('file').propagateSizeChange( item, upload['size'] - file['size']) # Update file info file['creatorId'] = upload['userId'] file['created'] = datetime.datetime.utcnow() file['assetstoreId'] = assetstore['_id'] file['size'] = upload['size'] # If the file was previously imported, it is no longer. if file.get('imported'): file['imported'] = False else: # Creating a new file record if upload.get('attachParent'): item = None elif upload['parentType'] == 'folder': # Create a new item with the name of the file. item = self.model('item').createItem( name=upload['name'], creator={'_id': upload['userId']}, folder={'_id': upload['parentId']}) elif upload['parentType'] == 'item': item = self.model('item').load(id=upload['parentId'], force=True) else: item = None file = self.model('file').createFile( item=item, name=upload['name'], size=upload['size'], creator={'_id': upload['userId']}, assetstore=assetstore, mimeType=upload['mimeType'], saveFile=False) if upload.get('attachParent'): if upload['parentType'] and upload['parentId']: file['attachedToType'] = upload['parentType'] file['attachedToId'] = upload['parentId'] adapter = assetstore_utilities.getAssetstoreAdapter(assetstore) file = adapter.finalizeUpload(upload, file) event_document = {'file': file, 'upload': upload} events.trigger('model.file.finalizeUpload.before', event_document) file = self.model('file').save(file) events.trigger('model.file.finalizeUpload.after', event_document) self.remove(upload) # Add an async event for handlers that wish to process this file. eventParams = { 'file': file, 'assetstore': assetstore, 'currentToken': rest.getCurrentToken(), 'currentUser': rest.getCurrentUser() } if 'reference' in upload: eventParams['reference'] = upload['reference'] events.daemon.trigger('data.process', eventParams) return file
def wrapped(*args, **kwargs): if not rest.getCurrentToken(): raise AccessException('You must be logged in or have a valid auth token.') if required: Token().requireScope(rest.getCurrentToken(), scope) return fun(*args, **kwargs)
def accessDecorator(*args, **kwargs): token = rest.getCurrentToken() if not token: raise AccessException('You must be logged in or supply a valid ' 'session token.') return fun(*args, **kwargs)
def finalizeUpload(self, upload, assetstore=None): """ This should only be called manually in the case of creating an empty file, i.e. one that has no chunks. :param upload: The upload document. :type upload: dict :param assetstore: If known, the containing assetstore for the upload. :type assetstore: dict :returns: The file object that was created. """ from .assetstore import Assetstore from .file import File from .item import Item from girder.utility import assetstore_utilities events.trigger('model.upload.finalize', upload) if assetstore is None: assetstore = Assetstore().load(upload['assetstoreId']) logger.info('(#####)girder/girder/model/upload.py:finalizeUpload:assetstore='+str(assetstore)) if 'fileId' in upload: # Updating an existing file's contents file = File().load(upload['fileId'], force=True) # Delete the previous file contents from the containing assetstore assetstore_utilities.getAssetstoreAdapter( Assetstore().load(file['assetstoreId'])).deleteFile(file) item = Item().load(file['itemId'], force=True) File().propagateSizeChange(item, upload['size'] - file['size']) # Update file info file['creatorId'] = upload['userId'] file['created'] = datetime.datetime.utcnow() file['assetstoreId'] = assetstore['_id'] file['size'] = upload['size'] # If the file was previously imported, it is no longer. if file.get('imported'): file['imported'] = False logger.info('(#####)girder/girder/model/upload.py:finalizeUpload:Updating+'+str(file)) else: # Creating a new file record if upload.get('attachParent'): item = None elif upload['parentType'] == 'folder': # Create a new item with the name of the file. item = Item().createItem( name=upload['name'], creator={'_id': upload['userId']}, folder={'_id': upload['parentId']}) ##################################################### ################# Insert annotation start ########### ##################################################### import pymongo mongoClient = pymongo.MongoClient('mongodb://localhost:27017/') db_girder = mongoClient['girder'] coll_annotation = db_girder["annotation"] now = datetime.datetime.utcnow() annotation_id = ObjectId() annotation={ "_id" : annotation_id, ### "itemId" need to be update "itemId" : item['_id'], ################################################# # "updated" : ISODate("2018-12-13T08:59:09.307Z"), "groups" : [], # "created" : ISODate("2018-12-13T08:56:23.083Z"), "_version" : 1, "annotation" : { "name" : '标注1' }, "access" : { "users" : [ { "flags" : [], "id" : ObjectId("5b9b7d25d4a48a28c5f8ef84"), "level" : 2 } ], "groups" : [] }, 'created': now, 'updated': now, "creatorId" : ObjectId(upload['userId']), "public" : False, "updatedId" : ObjectId(upload['userId']) } result = coll_annotation.insert_one(annotation) ##################################################### ################# Insert annotation END ########### ##################################################### elif upload['parentType'] == 'item': item = Item().load(id=upload['parentId'], force=True) else: item = None logger.info('(#####)girder/girder/model/upload.py:finalizeUpload:item+'+str(item)) file = File().createFile( item=item, name=upload['name'], size=upload['size'], creator={'_id': upload['userId']}, assetstore=assetstore, mimeType=upload['mimeType'], saveFile=False) if upload.get('attachParent'): if upload['parentType'] and upload['parentId']: file['attachedToType'] = upload['parentType'] file['attachedToId'] = upload['parentId'] logger.info('(#####)girder/girder/model/upload.py:finalizeUpload:Creating='+str(file)) adapter = assetstore_utilities.getAssetstoreAdapter(assetstore) file = adapter.finalizeUpload(upload, file) event_document = {'file': file, 'upload': upload} events.trigger('model.file.finalizeUpload.before', event_document) file = File().save(file) events.trigger('model.file.finalizeUpload.after', event_document) if '_id' in upload: self.remove(upload) logger.info('Upload complete. Upload=%s File=%s User=%s' % ( upload['_id'], file['_id'], upload['userId'])) # Add an async event for handlers that wish to process this file. eventParams = { 'file': file, 'assetstore': assetstore, 'currentToken': rest.getCurrentToken(), 'currentUser': rest.getCurrentUser() } if 'reference' in upload: eventParams['reference'] = upload['reference'] events.daemon.trigger('data.process', eventParams) logger.info('(#####)girder/girder/model/upload.py:finalizeUpload:file3='+str(file)) return file
def finalizeUpload(self, upload, assetstore=None): """ This should only be called manually in the case of creating an empty file, i.e. one that has no chunks. :param upload: The upload document. :type upload: dict :param assetstore: If known, the containing assetstore for the upload. :type assetstore: dict :returns: The file object that was created. """ events.trigger('model.upload.finalize', upload) if assetstore is None: assetstore = self.model('assetstore').load(upload['assetstoreId']) if 'fileId' in upload: # Updating an existing file's contents file = self.model('file').load(upload['fileId'], force=True) # Delete the previous file contents from the containing assetstore assetstore_utilities.getAssetstoreAdapter( self.model('assetstore').load( file['assetstoreId'])).deleteFile(file) item = self.model('item').load(file['itemId'], force=True) self.model('file').propagateSizeChange( item, upload['size'] - file['size']) # Update file info file['creatorId'] = upload['userId'] file['created'] = datetime.datetime.utcnow() file['assetstoreId'] = assetstore['_id'] file['size'] = upload['size'] # If the file was previously imported, it is no longer. if file.get('imported'): file['imported'] = False else: # Creating a new file record if upload.get('attachParent'): item = None elif upload['parentType'] == 'folder': # Create a new item with the name of the file. item = self.model('item').createItem( name=upload['name'], creator={'_id': upload['userId']}, folder={'_id': upload['parentId']}) elif upload['parentType'] == 'item': item = self.model('item').load( id=upload['parentId'], force=True) else: item = None file = self.model('file').createFile( item=item, name=upload['name'], size=upload['size'], creator={'_id': upload['userId']}, assetstore=assetstore, mimeType=upload['mimeType'], saveFile=False) if upload.get('attachParent'): if upload['parentType'] and upload['parentId']: file['attachedToType'] = upload['parentType'] file['attachedToId'] = upload['parentId'] adapter = assetstore_utilities.getAssetstoreAdapter(assetstore) file = adapter.finalizeUpload(upload, file) event_document = {'file': file, 'upload': upload} events.trigger('model.file.finalizeUpload.before', event_document) file = self.model('file').save(file) events.trigger('model.file.finalizeUpload.after', event_document) if '_id' in upload: self.remove(upload) # Add an async event for handlers that wish to process this file. eventParams = { 'file': file, 'assetstore': assetstore, 'currentToken': rest.getCurrentToken(), 'currentUser': rest.getCurrentUser() } if 'reference' in upload: eventParams['reference'] = upload['reference'] events.daemon.trigger('data.process', eventParams) return file
def wrapped(*iargs, **ikwargs): if not rest.getCurrentToken(): raise AccessException( 'You must be logged in or have a valid auth token.') return args[0](*iargs, **ikwargs)
def wrapped(*iargs, **ikwargs): if not rest.getCurrentToken(): raise AccessException( 'You must be logged in or have a valid auth token.') return fun(*iargs, **ikwargs)