def clean(self, folder, progress=None, **kwargs): """ Delete all contents underneath a folder recursively, but leave the folder itself. :param folder: The folder document to delete. :type folder: dict :param progress: A progress context to record progress on. :type progress: girder.utility.progress.ProgressContext or None. """ setResponseTimeLimit() # Delete all child items items = self.model('item').find({ 'folderId': folder['_id'] }) for item in items: setResponseTimeLimit() self.model('item').remove(item, progress=progress, **kwargs) if progress: progress.update(increment=1, message='Deleted item %s' % item['name']) # subsequent operations take a long time, so free the cursor's resources items.close() # Delete all child folders folders = self.find({ 'parentId': folder['_id'], 'parentCollection': 'folder' }) for subfolder in folders: self.remove(subfolder, progress=progress, **kwargs) folders.close()
def copyFolderComponents(self, srcFolder, newFolder, creator, progress, firstFolder=None): """ Copy the items, subfolders, and extended data of a folder that was just copied. :param srcFolder: the original folder. :type srcFolder: dict :param newFolder: the new folder. :type newFolder: dict :param creator: user representing the creator of the new folder. :type creator: dict :param progress: a progress context to record process on. :type progress: girder.utility.progress.ProgressContext or None. :param firstFolder: if not None, the first folder copied in a tree of folders. :returns: the new folder document. """ from .item import Item # copy metadata and other extension values if 'meta' in srcFolder: newFolder['meta'] = copy.deepcopy(srcFolder['meta']) filteredFolder = self.filter(newFolder, creator) for key in srcFolder: if key not in filteredFolder and key not in newFolder: newFolder[key] = copy.deepcopy(srcFolder[key]) newFolder = self.save(newFolder, triggerEvents=False) # Give listeners a chance to change things events.trigger('model.folder.copy.prepare', (srcFolder, newFolder)) # copy items itemModel = Item() for item in self.childItems(folder=srcFolder): setResponseTimeLimit() itemModel.copyItem(item, creator, folder=newFolder) if progress: progress.update(increment=1, message='Copied item ' + item['name']) # copy subfolders for sub in self.childFolders(parentType='folder', parent=srcFolder, user=creator): if firstFolder and firstFolder['_id'] == sub['_id']: continue self.copyFolder(sub, parent=newFolder, parentType='folder', creator=creator, progress=progress) events.trigger('model.folder.copy.after', newFolder) if progress: progress.update(increment=1, message='Copied folder ' + newFolder['name']) # Reload to get updated size value return self.load(newFolder['_id'], force=True)
def clean(self, folder, progress=None, **kwargs): """ Delete all contents underneath a folder recursively, but leave the folder itself. :param folder: The folder document to delete. :type folder: dict :param progress: A progress context to record progress on. :type progress: girder.utility.progress.ProgressContext or None. """ from .item import Item setResponseTimeLimit() # Delete all child items itemModel = Item() items = itemModel.find({'folderId': folder['_id']}) for item in items: setResponseTimeLimit() itemModel.remove(item, progress=progress, **kwargs) if progress: progress.update(increment=1, message='Deleted item %s' % item['name']) # subsequent operations take a long time, so free the cursor's resources items.close() # Delete all child folders folders = self.find({ 'parentId': folder['_id'], 'parentCollection': 'folder' }) for subfolder in folders: self.remove(subfolder, progress=progress, **kwargs) folders.close()
def updateAnnotation(self, annotation, params): # Set the response time limit to a very long value setResponseTimeLimit(86400) user = self.getCurrentUser() item = Item().load(annotation.get('itemId'), force=True) if item is not None: Item().requireAccess(item, user=user, level=AccessType.WRITE) # If we have a content length, then we have replacement JSON. If # elements are not included, don't replace them returnElements = True if cherrypy.request.body.length: oldElements = annotation.get('annotation', {}).get('elements') annotation['annotation'] = self.getBodyJson() if 'elements' not in annotation['annotation'] and oldElements: annotation['annotation']['elements'] = oldElements returnElements = False if params.get('itemId'): newitem = Item().load(params['itemId'], force=True) Item().requireAccess(newitem, user=user, level=AccessType.WRITE) annotation['itemId'] = newitem['_id'] try: annotation = Annotation().updateAnnotation(annotation, updateUser=user) except ValidationException as exc: logger.exception('Failed to validate annotation') raise RestException( "Validation Error: JSON doesn't follow schema (%r)." % (exc.args, )) if not returnElements and 'elements' in annotation['annotation']: del annotation['annotation']['elements'] return annotation
def makeDicomItem(self, item): """ Try to convert an existing item into a "DICOM item", which contains a "dicomMeta" field with DICOM metadata that is common to all DICOM files. """ metadataReference = None dicomFiles = [] for file in Item().childFiles(item): dicomMeta = _parseFile(file) if dicomMeta is None: continue dicomFiles.append(_extractFileData(file, dicomMeta)) metadataReference = ( dicomMeta if metadataReference is None else _removeUniqueMetadata(metadataReference, dicomMeta) ) setResponseTimeLimit() if dicomFiles: # Sort the dicom files dicomFiles.sort(key=_getDicomFileSortKey) # Store in the item item['dicom'] = { 'meta': metadataReference, 'files': dicomFiles } # Save the item Item().save(item)
def makeDicomItem(self, item): """ Try to convert an existing item into a "DICOM item", which contains a "dicomMeta" field with DICOM metadata that is common to all DICOM files. """ metadataReference = None dicomFiles = [] for file in Item().childFiles(item): dicomMeta = _parseFile(file) if dicomMeta is None: continue dicomFiles.append(_extractFileData(file, dicomMeta)) metadataReference = (dicomMeta if metadataReference is None else _removeUniqueMetadata( metadataReference, dicomMeta)) setResponseTimeLimit() if dicomFiles: # Sort the dicom files dicomFiles.sort(key=_getDicomFileSortKey) # Store in the item item['dicom'] = {'meta': metadataReference, 'files': dicomFiles} # Save the item Item().save(item)
def copyFolder(self, srcFolder, parent=None, name=None, description=None, parentType=None, public=None, creator=None, progress=None, firstFolder=None): """ Copy a folder, including all child items and child folders. :param srcFolder: the folder to copy. :type srcFolder: dict :param parent: The parent document. Must be a folder, user, or collection. :type parent: dict :param name: The name of the new folder. None to copy the original name. :type name: str :param description: Description for the new folder. None to copy the original description. :type description: str :param parentType: What type the parent is: ('folder' | 'user' | 'collection') :type parentType: str :param public: Public read access flag. None to inherit from parent, 'original' to inherit from original folder. :type public: bool, None, or 'original'. :param creator: user representing the creator of the new folder. :type creator: dict :param progress: a progress context to record process on. :type progress: girder.utility.progress.ProgressContext or None. :param firstFolder: if not None, the first folder copied in a tree of folders. :returns: the new folder document. """ setResponseTimeLimit() if parentType is None: parentType = srcFolder['parentCollection'] parentType = parentType.lower() if parentType not in ('folder', 'user', 'collection'): raise ValidationException('The parentType must be folder, ' 'collection, or user.') if parent is None: parent = self.model(parentType).load(srcFolder['parentId'], force=True) if name is None: name = srcFolder['name'] if description is None: description = srcFolder['description'] if public is not None and isinstance(public, six.string_types): if public == 'original': public = srcFolder.get('public', None) else: public = public == 'true' newFolder = self.createFolder( parentType=parentType, parent=parent, name=name, description=description, public=public, creator=creator, allowRename=True) if firstFolder is None: firstFolder = newFolder return self.copyFolderComponents( srcFolder, newFolder, creator, progress, firstFolder)
def deleteAnnotation(self, annotation, params): # Ensure that we have write access to the parent item item = Item().load(annotation.get('itemId'), force=True) if item is not None: Item().requireAccess(item, user=self.getCurrentUser(), level=AccessType.WRITE) setResponseTimeLimit(86400) Annotation().remove(annotation)
def revertAnnotationHistory(self, id, version): setResponseTimeLimit(86400) annotation = Annotation().revertVersion(id, version, self.getCurrentUser()) if not annotation: raise RestException('Annotation history version not found.') # Don't return the elements -- it can be too verbose if 'elements' in annotation['annotation']: del annotation['annotation']['elements'] return annotation
def copyFolderComponents(self, srcFolder, newFolder, creator, progress, firstFolder=None): """ Copy the items, subfolders, and extended data of a folder that was just copied. :param srcFolder: the original folder. :type srcFolder: dict :param newFolder: the new folder. :type newFolder: dict :param creator: user representing the creator of the new folder. :type creator: dict :param progress: a progress context to record process on. :type progress: girder.utility.progress.ProgressContext or None. :param firstFolder: if not None, the first folder copied in a tree of folders. :returns: the new folder document. """ from .item import Item # copy metadata and other extension values updated = False if srcFolder['meta']: newFolder['meta'] = copy.deepcopy(srcFolder['meta']) updated = True filteredFolder = self.filter(newFolder, creator) for key in srcFolder: if key not in filteredFolder and key not in newFolder: newFolder[key] = copy.deepcopy(srcFolder[key]) updated = True if updated: newFolder = self.save(newFolder, triggerEvents=False) # Give listeners a chance to change things events.trigger('model.folder.copy.prepare', (srcFolder, newFolder)) # copy items itemModel = Item() for item in self.childItems(folder=srcFolder): setResponseTimeLimit() itemModel.copyItem(item, creator, folder=newFolder) if progress: progress.update(increment=1, message='Copied item ' + item['name']) # copy subfolders for sub in self.childFolders(parentType='folder', parent=srcFolder, user=creator): if firstFolder and firstFolder['_id'] == sub['_id']: continue self.copyFolder(sub, parent=newFolder, parentType='folder', creator=creator, progress=progress) events.trigger('model.folder.copy.after', newFolder) if progress: progress.update(increment=1, message='Copied folder ' + newFolder['name']) # Reload to get updated size value return self.load(newFolder['_id'], force=True)
def deleteItemAnnotations(self, item): setResponseTimeLimit(86400) user = self.getCurrentUser() query = {'_active': {'$ne': False}, 'itemId': item['_id']} count = 0 for annotation in Annotation().find(query, limit=0, sort=[('_id', 1)]): annot = Annotation().load(annotation['_id'], user=user, getElements=False) if annot: Annotation().remove(annot) count += 1 return count
def remove(self, folder, progress=None, **kwargs): """ Delete a folder recursively. :param folder: The folder document to delete. :type folder: dict :param progress: A progress context to record progress on. :type progress: girder.utility.progress.ProgressContext or None. """ setResponseTimeLimit() # Delete all child items items = self.model('item').find({ 'folderId': folder['_id'] }) for item in items: setResponseTimeLimit() self.model('item').remove(item, progress=progress, **kwargs) if progress: progress.update(increment=1, message='Deleted item ' + item['name']) # subsequent operations take a long time, so free the cursor's resources items.close() # Delete all child folders folders = self.find({ 'parentId': folder['_id'], 'parentCollection': 'folder' }) for subfolder in folders: self.remove(subfolder, progress=progress, **kwargs) folders.close() # Delete pending uploads into this folder uploads = self.model('upload').find({ 'parentId': folder['_id'], 'parentType': 'folder' }) for upload in uploads: self.model('upload').remove(upload, progress=progress, **kwargs) uploads.close() # Delete this folder AccessControlledModel.remove(self, folder, progress=progress, **kwargs) if progress: progress.update(increment=1, message='Deleted folder ' + folder['name'])
def makeDicomItem(self, item): """ Try to convert an existing item into a "DICOM item", which contains a "dicomMeta" field with DICOM metadata that is common to all DICOM files. """ metadataReference = None dicomFiles = [] lastFile = None for file in Item().childFiles(item): lastFile = file dicomMeta = _parseFile(file) if dicomMeta: dicomFiles.append(_extractFileData(file, dicomMeta)) metadataReference = (dicomMeta if metadataReference is None else _removeUniqueMetadata( metadataReference, dicomMeta)) setResponseTimeLimit() if not dicomFiles and lastFile is not None and hasattr( File(), 'archiveList'): try: for path in File().archiveList(lastFile)['names']: dicomMeta = _parseFile(lastFile, path) if dicomMeta: dicomFiles.append( _extractFileData(lastFile, dicomMeta, path)) metadataReference = (dicomMeta if metadataReference is None else _removeUniqueMetadata( metadataReference, dicomMeta)) setResponseTimeLimit() except GirderException: pass if dicomFiles: # Sort the dicom files dicomFiles.sort(key=_getDicomFileSortKey) # Store in the item item['dicom'] = {'meta': metadataReference, 'files': dicomFiles} # Save the item Item().save(item)
def deleteExpired(self): cursor = Folder().find({'isPhotomorph': True}) now = datetime.datetime.utcnow() emailExp = datetime.timedelta(days=DAYS_UNTIL_EMAIL) dataExp = datetime.timedelta(days=DAYS_UNTIL_DELETION) for folder in cursor: setResponseTimeLimit() if folder['created'] + dataExp < now: logger.info('Delete timelapse %s (uid=%s)' % (folder['name'], folder['creatorId'])) Folder().remove(folder) elif not folder.get('timelapseEmailSent' ) and folder['created'] + emailExp < now: try: user = User().load(folder['creatorId'], force=True, exc=True) text = renderTemplate( 'timelapse.deletePending.mako', params={ 'folder': folder, 'days': DAYS_UNTIL_DELETION, 'url': getEmailUrlPrefix() + '#timelapse', 'deletionDate': (folder['created'] + dataExp).strftime(DATE_FMT) }) sendMail(DELETE_SUBJECT, text, [user['email']]) Folder().update({'_id': folder['_id']}, {'$set': { 'timelapseEmailSent': True }}, multi=False) except Exception: logger.exception('Error sending email for folder: %s' % folder['_id'])
def checkConsistency(self, stage, progress=None): """ Check all of the items and make sure they are valid. This operates in stages, since some actions should be done before other models that rely on items and some need to be done after. The stages are: * count - count how many items need to be checked. * remove - remove lost items * verify - verify and fix existing items :param stage: which stage of the check to run. See above. :param progress: an optional progress context to update. :returns: numItems: number of items to check or processed, numChanged: number of items changed. """ if stage == 'count': numItems = self.find(limit=1).count() return numItems, 0 elif stage == 'remove': # Check that all items are in existing folders. Any that are not # can be deleted. Perhaps we should put them in a lost+found # instead folderIds = self.model('folder').collection.distinct('_id') lostItems = self.find({ '$or': [{'folderId': {'$nin': folderIds}}, {'folderId': {'$exists': False}}]}) numItems = itemsLeft = lostItems.count() if numItems: if progress is not None: progress.update(message='Removing orphaned items') for item in lostItems: setResponseTimeLimit() self.collection.delete_one({'_id': item['_id']}) if progress is not None: itemsLeft -= 1 progress.update(increment=1, message='Removing ' 'orphaned items (%d left)' % itemsLeft) return numItems, numItems elif stage == 'verify': # Check items sizes items = self.find() numItems = itemsLeft = items.count() itemsCorrected = 0 if progress is not None: progress.update(message='Checking items') for item in items: itemCorrected = False setResponseTimeLimit() oldSize = item.get('size', 0) newSize = self.recalculateSize(item) if newSize != oldSize: itemCorrected = True newBaseParent = self.parentsToRoot(item, force=True)[0] if item['baseParentType'] != newBaseParent['type'] or \ item['baseParentId'] != newBaseParent['object']['_id']: self.update( {'_id': item['_id']}, update={'$set': { 'baseParentType': newBaseParent['type'], 'baseParentId': newBaseParent['object']['_id'] }}) itemCorrected = True if itemCorrected: itemsCorrected += 1 if progress is not None: itemsLeft -= 1 progress.update(increment=1, message='Checking items (%d ' 'left)' % itemsLeft) return numItems, itemsCorrected
def getTilesRegion(self, item, params): _adjustParams(params) params = self._parseParams(params, True, [ ('left', float, 'region', 'left'), ('top', float, 'region', 'top'), ('right', float, 'region', 'right'), ('bottom', float, 'region', 'bottom'), ('regionWidth', float, 'region', 'width'), ('regionHeight', float, 'region', 'height'), ('units', str, 'region', 'units'), ('unitsWH', str, 'region', 'unitsWH'), ('width', int, 'output', 'maxWidth'), ('height', int, 'output', 'maxHeight'), ('fill', str), ('magnification', float, 'scale', 'magnification'), ('mm_x', float, 'scale', 'mm_x'), ('mm_y', float, 'scale', 'mm_y'), ('exact', bool, 'scale', 'exact'), ('frame', int), ('encoding', str), ('jpegQuality', int), ('jpegSubsampling', int), ('tiffCompression', str), ('style', str), ('resample', 'boolOrInt'), ('contentDisposition', str), ('contentDispositionFileName', str) ]) _handleETag('getTilesRegion', item, params) setResponseTimeLimit(86400) try: regionData, regionMime = self.imageItemModel.getRegion( item, **params) except TileGeneralException as e: raise RestException(e.args[0]) except ValueError as e: raise RestException('Value Error: %s' % e.args[0]) subname = str(int(params.get('region')['left'])) + ',' + str(int(params.get('region')['top'])) self._setContentDisposition( item, params.get('contentDisposition'), regionMime, subname, params.get('contentDispositionFilename')) setResponseHeader('Content-Type', regionMime) if isinstance(regionData, pathlib.Path): BUF_SIZE = 65536 def stream(): try: with regionData.open('rb') as f: while True: data = f.read(BUF_SIZE) if not data: break yield data finally: regionData.unlink() return stream setRawResponse() return regionData
def _getAnnotation(self, user, id, params): """ Get a generator function that will yield the json of an annotation. :param user: the user that needs read access on the annoation and its parent item. :param id: the annotation id. :param params: paging and region parameters for the annotation. :returns: a function that will return a generator. """ # Set the response time limit to a very long value setResponseTimeLimit(86400) annotation = Annotation().load(id, region=params, user=user, level=AccessType.READ, getElements=False) if annotation is None: raise RestException('Annotation not found', 404) # Ensure that we have read access to the parent item. We could fail # faster when there are permissions issues if we didn't load the # annotation elements before checking the item access permissions. # This had been done via the filtermodel decorator, but that doesn't # work with yielding the elements one at a time. annotation = Annotation().filter(annotation, self.getCurrentUser()) annotation['annotation']['elements'] = [] breakStr = b'"elements": [' base = json.dumps(annotation, sort_keys=True, allow_nan=False, cls=JsonEncoder).encode('utf8').split(breakStr) centroids = str(params.get('centroids')).lower() == 'true' def generateResult(): info = {} idx = 0 yield base[0] yield breakStr collect = [] if centroids: # Add a null byte to indicate the start of the binary data yield b'\x00' for element in Annotationelement().yieldElements( annotation, params, info): # The json conversion is fastest if we use defaults as much as # possible. The only value in an annotation element that needs # special handling is the id, so cast that ourselves and then # use a json encoder in the most compact form. if isinstance(element, dict): element['id'] = str(element['id']) else: element = struct.pack('>QL', int(element[0][:16], 16), int(element[0][16:24], 16)) + struct.pack( '<fffl', *element[1:]) # Use ujson; it is much faster. The standard json library # could be used in its most default mode instead like so: # result = json.dumps(element, separators=(',', ':')) # Collect multiple elements before emitting them. This # balances using less memoryand streaming right away with # efficiency in dumping the json. Experimentally, 100 is # significantly faster than 10 and not much slower than 1000. collect.append(element) if len(collect) >= 100: if isinstance(collect[0], dict): yield (b',' if idx else b'') + ujson.dumps(collect).encode('utf8')[1:-1] else: yield b''.join(collect) idx += 1 collect = [] if len(collect): if isinstance(collect[0], dict): yield (b',' if idx else b'') + ujson.dumps(collect).encode('utf8')[1:-1] else: yield b''.join(collect) if centroids: # Add a final null byte to indicate the end of the binary data yield b'\x00' yield base[1].rstrip().rstrip(b'}') yield b', "_elementQuery": ' yield json.dumps(info, sort_keys=True, allow_nan=False, cls=JsonEncoder).encode('utf8') yield b'}' if centroids: setResponseHeader('Content-Type', 'application/octet-stream') else: setResponseHeader('Content-Type', 'application/json') return generateResult
def deleteOldAnnotations(self, age, versions): setResponseTimeLimit(86400) return Annotation().removeOldAnnotations(True, age, versions)
def getOldAnnotations(self, age, versions): setResponseTimeLimit(86400) return Annotation().removeOldAnnotations(False, age, versions)