def makeDicomItem(self, item): """ Try to convert an existing item into a "DICOM item", which contains a "dicomMeta" field with DICOM metadata that is common to all DICOM files. """ metadataReference = None dicomFiles = [] for file in Item().childFiles(item): dicomMeta = _parseFile(file) if dicomMeta is None: continue dicomFiles.append(_extractFileData(file, dicomMeta)) metadataReference = (dicomMeta if metadataReference is None else _removeUniqueMetadata( metadataReference, dicomMeta)) setResponseTimeLimit() if dicomFiles: # Sort the dicom files dicomFiles.sort(key=_getDicomFileSortKey) # Store in the item item['dicom'] = {'meta': metadataReference, 'files': dicomFiles} # Save the item Item().save(item)
def _uploadComplete(event): """ Called after an upload finishes. We check if our current token is a special authorized upload token, and if so, delete it. TODO we could alternatively keep a reference count inside each token that authorized more than a single upload at a time, and just decrement it here. """ token = getCurrentToken() if token and 'authorizedUploadId' in token: user = User().load(token['userId'], force=True) item = Item().load(event.info['file']['itemId'], force=True) # Save the metadata on the item item['description'] = token['authorizedUploadDescription'] item['authorizedUploadEmail'] = token['authorizedUploadEmail'] Item().save(item) text = mail_utils.renderTemplate('authorized_upload.uploadFinished.mako', { 'itemId': item['_id'], 'itemName': item['name'], 'itemDescription': item.get('description', '') }) mail_utils.sendMail('Authorized upload complete', text, [user['email']]) Token().remove(token)
def testDicomWithBinaryValues(self): # One of the test files in the pydicom module will throw an IOError # when parsing metadata. We should work around that and still be able # to import the file samplePath = os.path.join(os.path.dirname(os.path.abspath( pydicom.__file__)), 'data', 'test_files', 'OBXXXX1A.dcm') admin, user = self.users # Create a collection, folder, and item collection = Collection().createCollection('collection5', admin, public=True) folder = Folder().createFolder(collection, 'folder5', parentType='collection', public=True) item = Item().createItem('item5', admin, folder) # Upload this dicom file with open(samplePath, 'rb') as fp, _EventHelper('dicom_viewer.upload.success') as helper: dcmFile = Upload().uploadFromFile( obj=fp, size=os.path.getsize(samplePath), name=os.path.basename(samplePath), parentType='item', parent=item, mimeType='application/dicom', user=user ) self.assertIsNotNone(dcmFile) # Wait for handler success event handled = helper.wait() self.assertTrue(handled) # Check if the 'dicomItem' is well processed dicomItem = Item().load(item['_id'], force=True) self.assertIn('dicom', dicomItem) self.assertHasKeys(dicomItem['dicom'], ['meta', 'files'])
def testFileProcessHandler(self): admin, user = self.users # Create a collection, folder, and item collection = Collection().createCollection('collection1', admin, public=True) folder = Folder().createFolder(collection, 'folder1', parentType='collection', public=True) item = Item().createItem('item1', admin, folder) # Upload non-DICOM files self._uploadNonDicomFiles(item, admin) nonDicomItem = Item().load(item['_id'], force=True) self.assertIsNone(nonDicomItem.get('dicom')) # Upload DICOM files self._uploadDicomFiles(item, admin) # Check if the 'dicomItem' is well processed dicomItem = Item().load(item['_id'], force=True) self.assertIn('dicom', dicomItem) self.assertHasKeys(dicomItem['dicom'], ['meta', 'files']) # Check if the files list contain the good keys and all the file are well sorted for i in range(0, 4): self.assertTrue('_id' in dicomItem['dicom']['files'][i]) self.assertTrue('name' in dicomItem['dicom']['files'][i]) self.assertEqual(dicomItem['dicom']['files'][i]['name'], 'dicomFile{}.dcm'.format(i)) self.assertTrue('SeriesNumber' in dicomItem['dicom']['files'][i]['dicom']) self.assertTrue('InstanceNumber' in dicomItem['dicom']['files'][i]['dicom']) self.assertTrue('SliceLocation' in dicomItem['dicom']['files'][i]['dicom']) # Check the common metadata self.assertIsNotNone(dicomItem['dicom']['meta'])
def updateSize(self, doc): """ Recursively recomputes the size of this folder and its underlying folders and fixes the sizes as needed. :param doc: The folder. :type doc: dict """ from girderformindlogger.models.item import Item size = 0 fixes = 0 # recursively fix child folders but don't include their size children = self.find({ 'parentId': doc['_id'], 'parentCollection': 'folder' }) for child in children: _, f = self.updateSize(child) fixes += f # get correct size from child items itemModel = Item() for item in self.childItems(doc): s, f = itemModel.updateSize(item) size += s fixes += f # fix value if incorrect if size != doc.get('size'): self.update({'_id': doc['_id']}, update={'$set': {'size': size}}) fixes += 1 return size, fixes
def clean(self, folder, progress=None, **kwargs): """ Delete all contents underneath a folder recursively, but leave the folder itself. :param folder: The folder document to delete. :type folder: dict :param progress: A progress context to record progress on. :type progress: girderformindlogger.utility.progress.ProgressContext or None. """ from girderformindlogger.models.item import Item setResponseTimeLimit() # Delete all child items itemModel = Item() items = itemModel.find({ 'folderId': folder['_id'] }) for item in items: setResponseTimeLimit() itemModel.remove(item, progress=progress, **kwargs) if progress: progress.update(increment=1, message='Deleted item %s' % item['name']) # subsequent operations take a long time, so free the cursor's resources items.close() # Delete all child folders folders = self.find({ 'parentId': folder['_id'], 'parentCollection': 'folder' }) for subfolder in folders: self.remove(subfolder, progress=progress, **kwargs) folders.close()
def updateItemLicense(event): """ REST event handler to update item with license parameter, if provided. """ params = event.info['params'] if 'license' not in params: return itemModel = Item() item = itemModel.load(event.info['returnVal']['_id'], force=True, exc=True) newLicense = validateString(params['license']) if item['license'] == newLicense: return # Ensure that new license name is in configured list of licenses. # # Enforcing this here, instead of when validating the item, avoids an extra # database lookup (for the settings) on every future item save. if newLicense: licenseSetting = Setting().get(PluginSettings.LICENSES) validLicense = any( license['name'] == newLicense for group in licenseSetting for license in group['licenses']) if not validLicense: raise ValidationException( 'License name must be in configured list of licenses.', 'license') item['license'] = newLicense item = itemModel.save(item) event.preventDefault() event.addResponse(item)
def testMakeDicomItem(self): admin, user = self.users # create a collection, folder, and item collection = Collection().createCollection('collection2', admin, public=True) folder = Folder().createFolder(collection, 'folder2', parentType='collection', public=True) item = Item().createItem('item2', admin, folder) # Upload files self._uploadDicomFiles(item, admin) # Check the endpoint 'parseDicom' for an admin user dicomItem = Item().load(item['_id'], force=True) dicomItem = self._purgeDicomItem(dicomItem) path = '/item/%s/parseDicom' % dicomItem.get('_id') resp = self.request(path=path, method='POST', user=admin) self.assertStatusOk(resp) dicomItem = Item().load(item['_id'], force=True) self.assertIn('dicom', dicomItem) self.assertHasKeys(dicomItem['dicom'], ['meta', 'files']) # Check the endpoint 'parseDicom' for an non admin user dicomItem = Item().load(item['_id'], force=True) dicomItem = self._purgeDicomItem(dicomItem) path = '/item/%s/parseDicom' % dicomItem.get('_id') resp = self.request(path=path, method='POST', user=user) self.assertStatus(resp, 403)
def copyFolderComponents(self, srcFolder, newFolder, creator, progress, firstFolder=None): """ Copy the items, subfolders, and extended data of a folder that was just copied. :param srcFolder: the original folder. :type srcFolder: dict :param newFolder: the new folder. :type newFolder: dict :param creator: user representing the creator of the new folder. :type creator: dict :param progress: a progress context to record process on. :type progress: girderformindlogger.utility.progress.ProgressContext or None. :param firstFolder: if not None, the first folder copied in a tree of folders. :returns: the new folder document. """ from girderformindlogger.models.item import Item # copy metadata and other extension values updated = False if srcFolder['meta']: newFolder['meta'] = copy.deepcopy(srcFolder['meta']) updated = True filteredFolder = self.filter(newFolder, creator) for key in srcFolder: if key not in filteredFolder and key not in newFolder: newFolder[key] = copy.deepcopy(srcFolder[key]) updated = True if updated: newFolder = self.save(newFolder, triggerEvents=False) # Give listeners a chance to change things events.trigger('model.folder.copy.prepare', (srcFolder, newFolder)) # copy items itemModel = Item() for item in self.childItems(folder=srcFolder): setResponseTimeLimit() itemModel.copyItem(item, creator, folder=newFolder) if progress: progress.update(increment=1, message='Copied item ' + item['name']) # copy subfolders for sub in self.childFolders(parentType='folder', parent=srcFolder, user=creator): if firstFolder and firstFolder['_id'] == sub['_id']: continue self.copyFolder(sub, parent=newFolder, parentType='folder', creator=creator, progress=progress) events.trigger('model.folder.copy.after', newFolder) if progress: progress.update(increment=1, message='Copied folder ' + newFolder['name']) # Reload to get updated size value return self.load(newFolder['_id'], force=True)
def testVirtualFolderValidation(self): # Can't make folder virtual if it has children subfolder = Folder().createFolder(self.f1, 'sub', creator=self.admin) self.f1['isVirtual'] = True with six.assertRaisesRegex( self, ValidationException, 'Virtual folders may not contain child folders.'): Folder().save(self.f1) Folder().remove(subfolder) item = Item().createItem('i', creator=self.admin, folder=self.f1) with six.assertRaisesRegex( self, ValidationException, 'Virtual folders may not contain child items.'): Folder().save(self.f1) Item().remove(item) Folder().save(self.f1) # Can't make subfolders or items under a virtual folder with six.assertRaisesRegex( self, ValidationException, 'You may not place items under a virtual folder.'): Item().createItem('i', creator=self.admin, folder=self.f1) with six.assertRaisesRegex( self, ValidationException, 'You may not place folders under a virtual folder.'): Folder().createFolder(self.f1, 'f', creator=self.admin) # Can't move an item under a virtual folder item = Item().createItem('i', creator=self.admin, folder=self.f2) with six.assertRaisesRegex( self, ValidationException, 'You may not place items under a virtual folder.'): Item().move(item, self.f1) # Ensure JSON for query self.f1['virtualItemsQuery'] = 'not JSON' with six.assertRaisesRegex( self, ValidationException, 'The virtual items query must be valid JSON.'): Folder().save(self.f1) del self.f1['virtualItemsQuery'] self.f1['virtualItemsSort'] = 'not JSON' with six.assertRaisesRegex( self, ValidationException, 'The virtual items sort must be valid JSON.'): Folder().save(self.f1)
def index_folder(folderId): if Folder().load(folderId, force=True) is None: raise ValueError('folderId={} was not a valid folder'.format(folderId)) items = Item().find({'folderId': ObjectId(folderId)}) subfolders = Folder().find({'parentId': ObjectId(folderId)}) files = [] for item in items: for file in Item().childFiles(item, fields={'_id': True}): fileId = file['_id'] files.append(fileId) for folder in subfolders: files += index_folder(folder['_id']) return files
def _updateDescendants(self, folderId, updateQuery): """ This helper is used to update all items and folders underneath a profile. This is expensive, so think carefully before using it. :param folderId: The _id of the profile at the root of the subtree. :param updateQuery: The mongo query to apply to all of the children of the profile. :type updateQuery: dict """ from girderformindlogger.models.item import Item self.update(query={ 'appletId': folderId, 'parentCollection': 'profile' }, update=updateQuery, multi=True) Item().update(query={ 'folderId': folderId, }, update=updateQuery, multi=True) q = {'appletId': folderId, 'parentCollection': 'profile'} for child in self.find(q): self._updateDescendants(child['_id'], updateQuery)
def isOrphan(self, file): """ Returns True if this file is orphaned (its item or attached entity is missing). :param file: The file to check. :type file: dict """ if file.get('attachedToId'): attachedToType = file.get('attachedToType') if isinstance(attachedToType, six.string_types): modelType = ModelImporter.model(attachedToType) elif isinstance(attachedToType, list) and len(attachedToType) == 2: modelType = ModelImporter.model(*attachedToType) else: # Invalid 'attachedToType' return True if isinstance( modelType, (acl_mixin.AccessControlMixin, AccessControlledModel)): attachedDoc = modelType.load(file.get('attachedToId'), force=True) else: attachedDoc = modelType.load(file.get('attachedToId')) else: from girderformindlogger.models.item import Item attachedDoc = Item().load(file.get('itemId'), force=True) return not attachedDoc
def testSearchForDicomItem(self): admin, user = self.users # Create a collection, folder, and item collection = Collection().createCollection('collection3', admin, public=True) folder = Folder().createFolder(collection, 'folder3', parentType='collection', public=True) item = Item().createItem('item3', admin, folder) # Upload files self._uploadDicomFiles(item, admin) # Search for DICOM item with 'brain research' as common key/value resp = self.request(path='/resource/search', params={ 'q': 'brain research', 'mode': 'dicom', 'types': json.dumps(['item']) }) self.assertStatusOk(resp) self.assertEqual(len(resp.json['item']), 1) self.assertEqual(resp.json['item'][0]['name'], 'item3') # Search for DICOM item with substring 'in resea' as common key/value resp = self.request(path='/resource/search', params={ 'q': 'in resea', 'mode': 'dicom', 'types': json.dumps(['item']) }) self.assertStatusOk(resp) self.assertEqual(len(resp.json['item']), 1) self.assertEqual(resp.json['item'][0]['name'], 'item3')
def load(self, info): Item().exposeFields(level=AccessType.READ, fields={'dicom'}) events.bind('data.process', 'dicom_viewer', _uploadHandler) # Add the DICOM search mode only once search.addSearchMode('dicom', dicomSubstringSearchHandler) dicomItem = DicomItem() info['apiRoot'].item.route('POST', (':id', 'parseDicom'), dicomItem.makeDicomItem)
def dicomSubstringSearchHandler(query, types, user=None, level=None, limit=0, offset=0): """ Provide a substring search on both keys and values. """ if types != ['item']: raise RestException('The dicom search is only able to search in Item.') if not isinstance(query, six.string_types): raise RestException('The search query must be a string.') jsQuery = """ function() { var queryKey = %(query)s.toLowerCase(); var queryValue = queryKey; var dicomMeta = obj.dicom.meta; return Object.keys(dicomMeta).some( function(key) { return (key.toLowerCase().indexOf(queryKey) !== -1) || dicomMeta[key].toString().toLowerCase().indexOf(queryValue) !== -1; }) } """ % { # This could eventually be a separately-defined key and value 'query': json.dumps(query) } # Sort the documents inside MongoDB cursor = Item().find({'dicom': {'$exists': True}, '$where': jsQuery}) # Filter the result result = { 'item': [ Item().filter(doc, user) for doc in Item().filterResultsByPermission( cursor, user, level, limit, offset) ] } return result
def _uploadHandler(event): """ Whenever an additional file is uploaded to a "DICOM item", remove any DICOM metadata that is no longer common to all DICOM files in the item. """ file = event.info['file'] fileMetadata = _parseFile(file) if fileMetadata is None: return item = Item().load(file['itemId'], force=True) if 'dicom' in item: item['dicom']['meta'] = _removeUniqueMetadata(item['dicom']['meta'], fileMetadata) else: # In this case the uploaded file is the first of the item item['dicom'] = {'meta': fileMetadata, 'files': []} item['dicom']['files'].append(_extractFileData(file, fileMetadata)) item['dicom']['files'].sort(key=_getDicomFileSortKey) Item().save(item) events.trigger('dicom_viewer.upload.success')
def _importDataAsItem(self, name, user, folder, path, files, reuseExisting=True, params=None): params = params or {} item = Item().createItem( name=name, creator=user, folder=folder, reuseExisting=reuseExisting) events.trigger('filesystem_assetstore_imported', {'id': item['_id'], 'type': 'item', 'importPath': path}) for fname in files: fpath = os.path.join(path, fname) if self.shouldImportFile(fpath, params): self.importFile(item, fpath, user, name=fname)
def _importFileToFolder(self, name, user, parent, parentType, path): if parentType != 'folder': raise ValidationException( 'Files cannot be imported directly underneath a %s.' % parentType) item = Item().createItem(name=name, creator=user, folder=parent, reuseExisting=True) events.trigger('filesystem_assetstore_imported', { 'id': item['_id'], 'type': 'item', 'importPath': path }) self.importFile(item, path, user, name=name)
def load(self, info): getPlugin('jobs').load(info) name = 'thumbnails' info['apiRoot'].thumbnail = rest.Thumbnail() for model in (Item(), Collection(), Folder(), User()): model.exposeFields(level=AccessType.READ, fields='_thumbnails') events.bind('model.%s.remove' % model.name, name, removeThumbnails) events.bind('model.file.remove', name, removeThumbnailLink) events.bind('data.process', name, _onUpload)
def _pruneOrphans(self, progress): count = 0 models = [File(), Folder(), Item()] steps = sum(model.find().count() for model in models) progress.update(total=steps, current=0) for model in models: for doc in model.find(): progress.update(increment=1) if model.isOrphan(doc): model.remove(doc) count += 1 return count
def testVirtualQuery(self): for i in range(10): item = Item().createItem(str(i), creator=self.admin, folder=(self.f1, self.f2)[i % 2]) Item().setMetadata(item, {'someVal': i}) self.virtual['virtualItemsQuery'] = json.dumps( {'meta.someVal': { '$gt': 5 }}) self.virtual = Folder().save(self.virtual) def listItems(): resp = self.request('/item', user=self.user, params={'folderId': self.virtual['_id']}) self.assertStatusOk(resp) return resp.json self.assertEqual(listItems(), []) # Grant permission on the first folder Folder().setUserAccess(self.f1, self.user, AccessType.READ, save=True) self.assertEqual([i['name'] for i in listItems()], ['6', '8']) # Grant permission on the second folder Folder().setUserAccess(self.f2, self.user, AccessType.READ, save=True) self.assertEqual([i['name'] for i in listItems()], ['6', '7', '8', '9']) # Add a custom sort self.virtual['virtualItemsSort'] = json.dumps([('meta.someVal', SortDir.DESCENDING)]) self.virtual = Folder().save(self.virtual) self.assertEqual([i['name'] for i in listItems()], ['9', '8', '7', '6']) # Using childItems on a vfolder should not yield any results self.assertEqual(list(Folder().childItems(self.virtual)), [])
def _virtualChildItems(self, event): params = event.info['params'] if 'folderId' not in params: return # This is not a child listing request user = self.getCurrentUser() folder = Folder().load(params['folderId'], user=user, level=AccessType.READ) if not folder.get('isVirtual') or 'virtualItemsQuery' not in folder: return # Parent is not a virtual folder, proceed as normal limit, offset, sort = self.getPagingParameters(params, defaultSortField='name') q = json_util.loads(folder['virtualItemsQuery']) if 'virtualItemsSort' in folder: sort = json.loads(folder['virtualItemsSort']) item = Item() # These items may reside in folders that the user cannot read, so we must filter items = item.filterResultsByPermission( item.find(q, sort=sort), user, level=AccessType.READ, limit=limit, offset=offset) event.preventDefault().addResponse([item.filter(i, user) for i in items])
def load(self, info): # Bind REST events events.bind('rest.post.item.after', 'item_licenses', postItemAfter) events.bind('rest.post.item/:id/copy.after', 'item_licenses', postItemCopyAfter) events.bind('rest.put.item/:id.after', 'item_licenses', putItemAfter) # Bind validation events events.bind('model.item.validate', 'item_licenses', validateItem) # Add license field to item model Item().exposeFields(level=AccessType.READ, fields='license') # Add endpoint to get list of licenses info['apiRoot'].item.route('GET', ('licenses',), getLicenses)
def _list(self, model, document): entries = [] if model in ('collection', 'user', 'folder'): for folder in Folder().childFolders( parent=document, parentType=model, user=self.server.girderUser): entries.append(_stat(folder, 'folder')) if model == 'folder': for item in Folder().childItems(document): entries.append(_stat(item, 'item')) elif model == 'item': for file in Item().childFiles(document): entries.append(_stat(file, 'file')) return entries
def __init__(self): super(Item, self).__init__() self.resourceName = 'item' self._model = ItemModel() self.route('DELETE', (':id', ), self.deleteItem) self.route('GET', (), self.find) self.route('GET', (':id', ), self.getItem) self.route('GET', (':id', 'files'), self.getFiles) self.route('GET', (':id', 'download'), self.download) self.route('GET', (':id', 'rootpath'), self.rootpath) self.route('POST', (), self.createItem) self.route('PUT', (':id', ), self.updateItem) self.route('POST', (':id', 'copy'), self.copyItem) self.route('PUT', (':id', 'metadata'), self.setMetadata) self.route('DELETE', (':id', 'metadata'), self.deleteMetadata)
def _onUpload(event): """ Thumbnail creation can be requested on file upload by passing a reference field that is a JSON object of the following form: { "thumbnail": { "width": 123, "height": 123, "crop": True } } At least one of ``width`` or ``height`` must be passed. The ``crop`` parameter is optional. """ file = event.info['file'] if 'itemId' not in file: return try: ref = json.loads(event.info.get('reference', '')) except ValueError: return if not isinstance(ref, dict) or not isinstance(ref.get('thumbnail'), dict): return width = max(0, ref['thumbnail'].get('width', 0)) height = max(0, ref['thumbnail'].get('height', 0)) if not width and not height: return if not isinstance(width, int) or not isinstance(height, int): return item = Item().load(file['itemId'], force=True) crop = bool(ref['thumbnail'].get('crop', True)) utils.scheduleThumbnailJob(file=file, attachToType='item', attachToId=item['_id'], user=event.info['currentUser'], width=width, height=height, crop=crop)
def childItems(self, folder, limit=0, offset=0, sort=None, filters=None, **kwargs): """ Generator function that yields child items in a folder. Passes any kwargs to the find function. :param folder: The parent folder. :param limit: Result limit. :param offset: Result offset. :param sort: The sort structure to pass to pymongo. :param filters: Additional query operators. """ from girderformindlogger.models.item import Item q = { 'folderId': folder['_id'] } q.update(filters or {}) return Item().find(q, limit=limit, offset=offset, sort=sort, **kwargs)
def _getBaseResource(self, model, resource): """ Get the base resource for something pertaining to quota policies. If the base resource has no quota policy, return (None, None). :param model: the initial model type. Could be file, item, folder, user, or collection. :param resource: the initial resource document. :returns: A pair ('model', 'resource'), where 'model' is the base model type, either 'user' or 'collection'., and 'resource' is the base resource document or the id of that document. """ if isinstance(resource, six.string_types + (ObjectId, )): try: resource = ModelImporter.model(model).load(id=resource, force=True) except ImportError: return None, None if model == 'file': model = 'item' resource = Item().load(id=resource['itemId'], force=True) if model in ('folder', 'item'): if ('baseParentType' not in resource or 'baseParentId' not in resource): resource = ModelImporter.model(model).load(id=resource['_id'], force=True) if ('baseParentType' not in resource or 'baseParentId' not in resource): return None, None model = resource['baseParentType'] resourceId = resource['baseParentId'] resource = ModelImporter.model(model).load(id=resourceId, force=True) if model in ('user', 'collection') and resource: # Ensure the base resource has a quota field so we can use the # default quota if appropriate if QUOTA_FIELD not in resource: resource[QUOTA_FIELD] = {} if not resource or QUOTA_FIELD not in resource: return None, None return model, resource
def _propagateSizeToItem(self, event): """ This callback updates an item's size to include that of a newly-created file. This generally should not be called or overridden directly. This should not be unregistered, as that would cause item, folder, and collection sizes to be inaccurate. """ # This task is not performed in "createFile", in case # "saveFile==False". The item size should be updated only when it's # certain that the file will actually be saved. It is also possible for # "model.file.save" to set "defaultPrevented", which would prevent the # item from being saved initially. from girderformindlogger.models.item import Item fileDoc = event.info itemId = fileDoc.get('itemId') if itemId and fileDoc.get('size'): item = Item().load(itemId, force=True) self.propagateSizeChange(item, fileDoc['size'])