def _checkUploadSize(self, upload): """ Check if an upload will fit within a quota restriction. :param upload: an upload document. :returns: None if the upload is allowed, otherwise a dictionary of information about the quota restriction. """ origSize = 0 if 'fileId' in upload: file = File().load(id=upload['fileId'], force=True) origSize = int(file.get('size', 0)) model, resource = self._getBaseResource('file', file) else: model, resource = self._getBaseResource(upload['parentType'], upload['parentId']) if resource is None: return None fileSizeQuota = self._getFileSizeQuota(model, resource) if not fileSizeQuota: return None newSize = resource['size'] + upload['size'] - origSize # always allow replacement with a smaller object if newSize <= fileSizeQuota or upload['size'] < origSize: return None left = fileSizeQuota - resource['size'] if left < 0: left = 0 return {'fileSizeQuota': fileSizeQuota, 'sizeNeeded': upload['size'] - origSize, 'quotaLeft': left, 'quotaUsed': resource['size']}
def delete(self, id, user): stem_image = self.load(id, user=user, level=AccessType.WRITE) if not stem_image: raise RestException('StemImage not found.', 404) public = stem_image.get('public', False) # Try to load the file and check if it was imported. # If it was imported, delete the item containing the file. # If loading/removing the file fails, remove the stem image anyways try: f = FileModel().load(stem_image['fileId'], level=AccessType.WRITE, user=user) if f.get('imported', False) is True: item = ItemModel().load(f['itemId'], level=AccessType.WRITE, user=user) if item['folderId'] == self._get_import_folder(user, public)['_id']: ItemModel().remove(item) except: pass return self.remove(stem_image)
def _getLargeImagePath(self): # If self.mayHaveAdjacentFiles is True, we try to use the girder # mount where companion files appear next to each other. largeImageFileId = self.item['largeImage']['fileId'] largeImageFile = File().load(largeImageFileId, force=True) try: largeImagePath = None if (self.mayHaveAdjacentFiles(largeImageFile) and hasattr(File(), 'getGirderMountFilePath')): try: if (largeImageFile.get('imported') and File().getLocalFilePath(largeImageFile) == largeImageFile['path']): largeImagePath = largeImageFile['path'] except Exception: pass if not largeImagePath: try: largeImagePath = File().getGirderMountFilePath( largeImageFile) except FilePathException: pass if not largeImagePath: try: largeImagePath = File().getLocalFilePath(largeImageFile) except AttributeError as e: raise TileSourceException( 'No local file path for this file: %s' % e.args[0]) return largeImagePath except (TileSourceAssetstoreException, FilePathException): raise except (KeyError, ValidationException, TileSourceException) as e: raise TileSourceException('No large image file in this item: %s' % e.args[0])
def _getLargeImagePath(self): """ GDAL can read directly from http/https/ftp via /vsicurl. If this is a link file, try to use it. """ try: largeImageFileId = self.item['largeImage']['fileId'] largeImageFile = File().load(largeImageFileId, force=True) if (StrictVersion(gdal.__version__) >= StrictVersion('2.1.3') and largeImageFile.get('linkUrl') and not largeImageFile.get('assetstoreId') and re.match( r'(http(|s)|ftp)://', largeImageFile['linkUrl'])): largeImagePath = '/vsicurl/' + largeImageFile['linkUrl'] logger.info('Using %s' % largeImagePath) return largeImagePath except Exception: pass return GirderTileSource._getLargeImagePath(self)
def create(self, **kwargs): model = {} for prop in self.create_props: prop_value = kwargs.get(prop['name'], prop.get('default')) if prop_value is not None: if prop.get('type') == 'file': file = File().load(prop_value, user=getCurrentUser(), level=AccessType.READ) if file is None: raise ValidationException('File doesn\'t exists: %s' % prop_value) if not isinstance(prop_value, ObjectId): prop_value = ObjectId(prop_value) elif prop.get('type') == ObjectId: if isinstance(prop_value, list): prop_value = [ObjectId(x) for x in prop_value] else: prop_value = ObjectId(prop_value) elif prop.get('type') == 'timestamp': prop_value = parseTimestamp(prop_value) model[prop['name']] = prop_value self.setPublic(model, public=kwargs.get('public', False)) user = kwargs.get('user') self.setUserAccess(model, user=user, level=AccessType.ADMIN) model['owner'] = user['_id'] if edp_group() is not None: self.setGroupAccess(model, edp_group(), AccessType.ADMIN) saved_model = self.save(model) # Now spawn thumbnail jobs if the model contains any image for prop in self.create_props: prop_value = kwargs.get(prop['name'], prop.get('default')) if prop_value is not None and prop.get('type') == 'file': file = File().load(prop_value, user=getCurrentUser(), level=AccessType.READ) mime_type = file.get('mimeType', '') if mime_type is not None and mime_type.startswith('image/'): self._create_thumbnail(file, saved_model, prop['name'], user) return saved_model
def _getLargeImagePath(self): # If self.mayHaveAdjacentFiles is True, we try to use the girder # mount where companion files appear next to each other. try: largeImageFileId = self.item['largeImage']['fileId'] if not hasattr(self, 'mayHaveAdjacentFiles'): # The item has adjacent files if there are any files that # are not the large image file or an original file it # was derived from. This is always the case if there are 3 # or more files. fileIds = [ str(file['_id']) for file in Item().childFiles(self.item, limit=3) ] knownIds = [str(largeImageFileId)] if 'originalId' in self.item['largeImage']: knownIds.append(str(self.item['largeImage']['originalId'])) self.mayHaveAdjacentFiles = (len(fileIds) >= 3 or fileIds[0] not in knownIds or fileIds[-1] not in knownIds) largeImageFile = File().load(largeImageFileId, force=True) if (any(ext in KnownExtensionsWithAdjacentFiles for ext in largeImageFile['exts']) or largeImageFile.get('mimeType') in KnownMimeTypesWithAdjacentFiles): self.mayHaveAdjacentFiles = True largeImagePath = None if self.mayHaveAdjacentFiles and hasattr(File(), 'getGirderMountFilePath'): try: largeImagePath = File().getGirderMountFilePath( largeImageFile) except FilePathException: pass if not largeImagePath: try: largeImagePath = File().getLocalFilePath(largeImageFile) except AttributeError as e: raise TileSourceException( 'No local file path for this file: %s' % e.args[0]) return largeImagePath except (TileSourceAssetstoreException, FilePathException): raise except (KeyError, ValidationException, TileSourceException) as e: raise TileSourceException('No large image file in this item: %s' % e.args[0])
def update(self, model, model_updates, user=None, parent=None): if user is None: user = getCurrentUser() query = {'_id': model['_id']} if self.parent_key is not None: query[self.parent_key] = parent['_id'] updates = {} for prop in model_updates: if prop in self.mutable_props: prop_value = model_updates[prop] if prop in self.file_props: file = File().load(prop_value, user=user, level=AccessType.READ) if file is None: raise ValidationException('File doesn\'t exists: %s' % prop_value) if not isinstance(prop_value, ObjectId): prop_value = ObjectId(prop_value) mime_type = file.get('mimeType', '') if mime_type is not None and mime_type.startswith( 'image/'): self._create_thumbnail(file, model, prop, user, updates) updates.setdefault('$set', {})[prop] = prop_value if updates: update_result = super(Base, self).update(query, update=updates, multi=False) if update_result.matched_count == 0: raise ValidationException('Invalid id (%s)' % model['_id']) return self.load(model['_id'], user=user, level=AccessType.READ) return model
def importData(self, parent, parentType, params, progress, user, **kwargs): """ Import a list of tables, each to a file within a distinct item. Each table specification in the list is an object which must have a 'table' key. It may optionally have other connection information such as 'database' and 'schema'. If there is a 'name' key, the name is used for the item and file. If there is a 'database' key, a subfolder is created within the specified parent with that name. If a user or collection is specified for the top level and no database key is specified, the default database name (from the assetstore) is used. If the specific item and file already exists and is from the same assetstore, it is updated. If the specific item already exists and is not from the same assetstore (or not marked that it was imported), an error is given. :param parent: The parent object to import into. Must be a folder, user, collection, item, or file. :param parentType: The model type of the parent object. :param params: Additional parameters required for the import process: tables: a list of tables to add. If there is already an item with an exact table name, it is updated. sort: default sort parameter. Used in plain downloads. fields: default fields parameter. Used in plain downloads. filters: default filters parameter. Used in plain downloads. group: default group parameter. Used in plain downloads. format: default format parameter. Used in plain downloads. replace: if False, don't replace an existing file/item with the name, but always create new entries. A parentType of file will always replace the existing data of a file :type params: dict :param progress: Object on which to record progress if possible. :type progress: :py:class:`girder.utility.progress.ProgressContext` :param user: The Girder user performing the import. :type user: dict or None :return: a list of objects, each of which has an item and file entry with the items and files that were imported. """ uri = (self.assetstore['database'].get('uri') if self.assetstore['database'].get('uri') else params['uri']) defaultDatabase = dbs.databaseFromUri(uri) response = [] createdFolder = createdItem = createdFile = False for table in params['tables']: if isinstance(table, six.string_types): dbinfo = {'table': table} else: dbinfo = table.copy() if not self.assetstore['database'].get('uri'): dbinfo['uri'] = uri name = dbinfo.pop('name', dbinfo['table']) progress.update(message='Importing %s' % name) # Find or create a folder if needed if 'database' not in dbinfo and parentType == 'folder': folder = parent elif parentType not in ('file', 'item'): folderName = dbinfo.get('database', defaultDatabase) folder = Folder().findOne({ 'parentId': parent['_id'], 'name': folderName, 'parentCollection': parentType }) if folder is None: folder = Folder().createFolder(parent, folderName, parentType=parentType, creator=user) createdFolder = True if parentType == 'file': # for files, we'll create a provisional file below, then # delete the original assetstore entry and modify the # existing file entry with the updated values before saving. item = Item().load(parent['itemId'], force=True) elif parentType == 'item': item = parent else: # Create an item if needed item = Item().findOne({ 'folderId': folder['_id'], 'name': name }) if item is None or params.get('replace') is False: item = Item().createItem(name=name, creator=user, folder=folder) createdItem = True # Create a file if needed file = File().findOne({'name': name, 'itemId': item['_id']}) if file is None or params.get( 'replace') is False or parentType == 'file': file = File().createFile(creator=user, item=item, name=name, size=0, assetstore=self.assetstore, mimeType=dbFormatList.get( preferredFormat( params.get('format'))), saveFile=False) createdFile = True if file.get(DB_INFO_KEY) and not file[DB_INFO_KEY].get('imported'): raise GirderException( 'A file for table %s is present but cannot be updated ' 'because it wasn\'t imported.' % name) try: file = self._importDataFile(file, parent, parentType, dbinfo, params) except GirderException as exc: self._importDataCleanup(file if createdFile else None, item if createdItem else None, folder if createdFolder else None) raise exc response.append({'item': item, 'file': file}) return response
def importData(self, parent, parentType, params, progress, user, **kwargs): """ Import a list of tables, each to a file within a distinct item. Each table specification in the list is an object which must have a 'table' key. It may optionally have other connection information such as 'database' and 'schema'. If there is a 'name' key, the name is used for the item and file. If there is a 'database' key, a subfolder is created within the specified parent with that name. If a user or collection is specified for the top level and no database key is specified, the default database name (from the assetstore) is used. If the specific item and file already exists and is from the same assetstore, it is updated. If the specific item already exists and is not from the same assetstore (or not marked that it was imported), an error is given. :param parent: The parent object to import into. Must be a folder, user, collection, item, or file. :param parentType: The model type of the parent object. :param params: Additional parameters required for the import process: tables: a list of tables to add. If there is already an item with an exact table name, it is updated. sort: default sort parameter. Used in plain downloads. fields: default fields parameter. Used in plain downloads. filters: default filters parameter. Used in plain downloads. group: default group parameter. Used in plain downloads. format: default format parameter. Used in plain downloads. replace: if False, don't replace an existing file/item with the name, but always create new entries. A parentType of file will always replace the existing data of a file :type params: dict :param progress: Object on which to record progress if possible. :type progress: :py:class:`girder.utility.progress.ProgressContext` :param user: The Girder user performing the import. :type user: dict or None :return: a list of objects, each of which has an item and file entry with the items and files that were imported. """ uri = (self.assetstore['database'].get('uri') if self.assetstore['database'].get('uri') else params['uri']) defaultDatabase = dbs.databaseFromUri(uri) response = [] createdFolder = createdItem = createdFile = False for table in params['tables']: if isinstance(table, six.string_types): dbinfo = {'table': table} else: dbinfo = table.copy() if not self.assetstore['database'].get('uri'): dbinfo['uri'] = uri name = dbinfo.pop('name', dbinfo['table']) progress.update(message='Importing %s' % name) # Find or create a folder if needed if 'database' not in dbinfo and parentType == 'folder': folder = parent elif parentType not in ('file', 'item'): folderName = dbinfo.get('database', defaultDatabase) folder = Folder().findOne({ 'parentId': parent['_id'], 'name': folderName, 'parentCollection': parentType }) if folder is None: folder = Folder().createFolder( parent, folderName, parentType=parentType, creator=user) createdFolder = True if parentType == 'file': # for files, we'll create a provisional file below, then # delete the original assetstore entry and modify the # existing file entry with the updated values before saving. item = Item().load(parent['itemId'], force=True) elif parentType == 'item': item = parent else: # Create an item if needed item = Item().findOne({ 'folderId': folder['_id'], 'name': name }) if item is None or params.get('replace') is False: item = Item().createItem( name=name, creator=user, folder=folder) createdItem = True # Create a file if needed file = File().findOne({ 'name': name, 'itemId': item['_id'] }) if file is None or params.get('replace') is False or parentType == 'file': file = File().createFile( creator=user, item=item, name=name, size=0, assetstore=self.assetstore, mimeType=dbFormatList.get(preferredFormat(params.get( 'format'))), saveFile=False) createdFile = True if file.get(DB_INFO_KEY) and not file[DB_INFO_KEY].get('imported'): raise GirderException( 'A file for table %s is present but cannot be updated ' 'because it wasn\'t imported.' % name) try: file = self._importDataFile(file, parent, parentType, dbinfo, params) except GirderException as exc: self._importDataCleanup( file if createdFile else None, item if createdItem else None, folder if createdFolder else None) raise exc response.append({'item': item, 'file': file}) return response
def convert_image_job(job): import tempfile from girder_jobs.constants import JobStatus from girder_jobs.models.job import Job from girder.constants import AccessType from girder.models.file import File from girder.models.folder import Folder from girder.models.item import Item from girder.models.upload import Upload from girder.models.user import User kwargs = job['kwargs'] item = Item().load(kwargs.pop('itemId'), force=True) fileObj = File().load(kwargs.pop('fileId'), force=True) userId = kwargs.pop('userId', None) user = User().load(userId, force=True) if userId else None folder = Folder().load(kwargs.pop('folderId', item['folderId']), user=user, level=AccessType.WRITE) name = kwargs.pop('name', None) job = Job().updateJob(job, log='Started large image conversion\n', status=JobStatus.RUNNING) logger = logging.getLogger('large-image-converter') handler = JobLogger(job=job) logger.addHandler(handler) # We could increase the default logging level here # logger.setLevel(logging.DEBUG) try: inputPath = None if not fileObj.get('imported'): try: inputPath = File().getGirderMountFilePath(fileObj) except Exception: pass inputPath = inputPath or File().getLocalFilePath(fileObj) with tempfile.TemporaryDirectory() as tempdir: dest = create_tiff( inputFile=inputPath, inputName=fileObj['name'], outputDir=tempdir, **kwargs, ) job = Job().updateJob(job, log='Storing result\n') with open(dest, 'rb') as fobj: fileObj = Upload().uploadFromFile( fobj, size=os.path.getsize(dest), name=name or os.path.basename(dest), parentType='folder', parent=folder, user=user, ) job = Job().load(job['_id'], force=True) job.setdefault('results', {}) job['results'].setdefault('file', []) job['results']['file'].append(fileObj['_id']) job = Job().save(job) except Exception as exc: status = JobStatus.ERROR logger.exception('Failed in large image conversion') job = Job().updateJob(job, log='Failed in large image conversion (%s)\n' % exc, status=status) else: status = JobStatus.SUCCESS job = Job().updateJob(job, log='Finished large image conversion\n', status=status) finally: logger.removeHandler(handler)