def fileFromItem(item): """ Return the file contained in an item. Raise an exception if the item doesn't contain exactly one file. """ files = Item().childFiles(item, limit=2) if files.count() != 1: raise DanesfieldWorkflowException( 'Item must contain %d files, but should contain only one.' % files.count()) return files[0]
def _virtualItemPosition(self, event): params = event.info['params'] response = _virtualChildItemsFind(self, params) if response is None: return # This is not a virtual folder child listing request q, sort, user, limit, offset = response itemId = event.info['id'] item = Item().load(itemId, user=user, level=AccessType.READ) if not len(sort): raise RestException('Invalid sort mode.') filters = [] for idx in range(len(sort) + 1): dir = '$lt' if sort[min(idx, len(sort) - 1)][1] == SortDir.ASCENDING else '$gt' filter = {} for idx2 in range(idx): filter[sort[idx2][0]] = item.get(sort[idx2][0]) if idx < len(sort): filter[sort[idx][0]] = {dir: item.get(sort[idx][0])} else: filter['_id'] = {dir: item['_id']} filters.append(filter) q = {'$and': [q, {'$or': filters}]} items = Item().findWithPermissions(q, sort=sort, user=user, level=AccessType.READ, limit=limit, offset=offset) event.preventDefault().addResponse(items.count())
def validate(self, doc, **kwargs): """Ensure the item has valid metadata.""" super(Aperio, self).validate(doc, **kwargs) meta = doc.setdefault('aperio', {}) imageId = meta.get('image') try: Item().load(imageId, force=True) except Exception: raise ValidationException( 'The item is not associated with a valid image.') files = Item().childFiles(doc) if files.count() != 1: raise ValidationException( 'The annotation item must have exactly one file.') meta.setdefault('tag', None) return doc
def _virtualChildItems(self, event): params = event.info['params'] response = _virtualChildItemsFind(self, params) if response is None: return # This is not a virtual folder child listing request q, sort, user, limit, offset = response # These items may reside in folders that the user cannot read, so we must # find with permissions items = Item().findWithPermissions(q, sort=sort, user=user, level=AccessType.READ, limit=limit, offset=offset) # We have to add this here, as we can't use filtermodel since we return the # results in addResponse. if callable(getattr(items, 'count', None)): cherrypy.response.headers['Girder-Total-Count'] = items.count() items = [Item().filter(i, user) for i in items] event.preventDefault().addResponse(items)
def createThumbnailsJob(job): """ Create thumbnails for all of the large image items. :param job: the job object including kwargs which contains: spec: an array, each entry of which is the parameter dictionary for the model getThumbnail function. logInterval: the time in seconds between log messages. This also controls the granularity of cancelling the job. concurrent: the number of threads to use. 0 for the number of cpus. """ job = Job().updateJob(job, log='Started creating large image thumbnails\n', status=JobStatus.RUNNING) concurrency = int(job['kwargs'].get('concurrent', 0)) concurrency = psutil.cpu_count( logical=True) if concurrency < 1 else concurrency status = { 'checked': 0, 'created': 0, 'failed': 0, } spec = job['kwargs']['spec'] logInterval = float(job['kwargs'].get('logInterval', 10)) job = Job().updateJob(job, log='Creating thumbnails (%d concurrent)\n' % concurrency) nextLogTime = time.time() + logInterval tasks = [] # This could be switched from ThreadPoolExecutor to ProcessPoolExecutor # without any other changes. Doing so would probably improve parallel # performance, but may not work reliably under Python 2.x. pool = concurrent.futures.ThreadPoolExecutor(max_workers=concurrency) try: # Get a cursor with the list of images items = Item().find({'largeImage.fileId': {'$exists': True}}) if hasattr(items, 'count'): status['items'] = items.count() status['specs'] = len(spec) nextitem = cursorNextOrNone(items) while len(tasks) or nextitem is not None: # Create more tasks than we strictly need so if one finishes before # we check another will be ready. This is balanced with not # creating too many to avoid excessive memory use. As such, we # can't do a simple iteration over the database cursor, as it will # be exhausted before we are done. while len(tasks) < concurrency * 4 and nextitem is not None: tasks.append( pool.submit(createThumbnailsJobTask, nextitem, spec)) nextitem = cursorNextOrNone(items) # Wait a short time or until the oldest task is complete try: tasks[0].result(0.1) except concurrent.futures.TimeoutError: pass # Remove completed tasks from our list, adding their results to the # status. for pos in range(len(tasks) - 1, -1, -1): if tasks[pos].done(): r = tasks[pos].result() status['created'] += r['created'] status['checked'] += r['checked'] status['failed'] += r['failed'] status['lastFailed'] = r.get('lastFailed', status.get('lastFailed')) tasks[pos:pos + 1] = [] # Periodically, log the state of the job and check if it was # deleted or canceled. if time.time() > nextLogTime: job, msg = createThumbnailsJobLog(job, status) # Check if the job was deleted or canceled; if so, quit job = Job().load(id=job['_id'], force=True) if not job or job['status'] in (JobStatus.CANCELED, JobStatus.ERROR): cause = { None: 'deleted', JobStatus.CANCELED: 'canceled', JobStatus.ERROR: 'stopped due to error', }[None if not job else job.get('status')] msg = 'Large image thumbnails job %s' % cause logger.info(msg) # Cancel any outstanding tasks. If they haven't started, # they are discarded. Those that have started will still # run, though. for task in tasks: task.cancel() return nextLogTime = time.time() + logInterval except Exception: logger.exception('Error with large image create thumbnails job') Job().updateJob(job, log='Error creating large image thumbnails\n', status=JobStatus.ERROR) return finally: # Clean up the task pool asynchronously pool.shutdown(False) job, msg = createThumbnailsJobLog(job, status, 'Finished: ', JobStatus.SUCCESS) logger.info(msg)
def createOrUpdateExtension(self, app_id, os, arch, baseName, repository_type, repository_url, revision, app_revision, packagetype, codebase, description, release, icon_url, development_status, category, enabled, homepage, screenshots, contributors): """ Upload an extension package in the database, in a specific release with providing ``release_id``. Or by default in the **'Nightly'** folder. :param app_id: The ID of the application. :param os: The operation system used for the extension. :param arch: The architecture compatible with the extension. :param baseName: The base name of the extension. :param repository_type: The type of repository (github, gitlab, ...). :param repository_url: The Url of the repository. :param revision: The revision of the extension. :param app_revision: The revision of the application. :param packagetype: Type of the extension. :param codebase: The codebase baseName. :param description: The description of the extension. :return: The status of the upload. """ creator = self.getCurrentUser() application = self._model.load(app_id, user=creator) release_folder = None # Find the release by metadata revision releases = self._model.childFolders(application, 'Folder', user=creator) for folder in releases: if 'meta' in folder: if folder['meta']['revision'] == app_revision: release_folder = folder break if not release_folder: # Only the nightly folder in the list release_folder = list( self._model.childFolders( application, 'Folder', user=creator, filters={'name': constants.NIGHTLY_RELEASE_NAME})) if not release_folder: raise Exception('The %s folder not found.' % constants.NIGHTLY_RELEASE_NAME) release_folder = release_folder[0] params = { 'app_id': app_id, 'baseName': baseName, 'os': os, 'arch': arch, 'repository_type': repository_type, 'repository_url': repository_url, 'revision': revision, 'app_revision': app_revision, 'packagetype': packagetype, 'codebase': codebase, 'description': description } if release: params['release'] = release if icon_url: params['icon_url'] = icon_url if development_status: params['development_status'] = development_status if category: params['category'] = category if enabled: params['enabled'] = enabled if homepage: params['homepage'] = homepage if screenshots: params['screenshots'] = screenshots if contributors: params['contributors'] = contributors name = application['meta']['extensionNameTemplate'].format(**params) filters = {'name': name} # Only one extensions should be in this list extensions = list(ExtensionModel().get(release_folder, filters=filters)) if not len(extensions): # The extension doesn't exist yet: extension = ExtensionModel().createExtension( name, creator, release_folder, params) elif len(extensions) == 1: extension = extensions[0] else: raise Exception( 'Too many extensions found for the same name :"%s"' % name) # Check the file inside the extension Item files = Item().childFiles(extension) if files.count() == 1: old_file = files.next() # catch the event of upload success and remove the file events.bind('model.file.finalizeUpload.after', 'application', File().remove(old_file)) elif not files.count(): # Extension new or empty pass else: raise Exception("More than 1 binary file in the extension.") old_meta = { 'baseName': extension['meta']['baseName'], 'os': extension['meta']['os'], 'arch': extension['meta']['arch'], 'revision': extension['meta']['revision'], 'app_revision': extension['meta']['app_revision'] } identifier_meta = { 'baseName': baseName, 'os': os, 'arch': arch, 'revision': revision, 'app_revision': app_revision } if identifier_meta == old_meta and len(extensions): # The revision is the same than these before, no need to upload extension = ExtensionModel().setMetadata(extension, params) events.unbind('model.file.finalizeUpload.after', 'application') # Ready to upload the binary file return extension