def _createLargeImageJob(self, item, fileObj, user, token): import large_image_tasks.tasks from girder_worker_utils.transforms.girder_io import GirderUploadToItem from girder_worker_utils.transforms.contrib.girder_io import GirderFileIdAllowDirect from girder_worker_utils.transforms.common import TemporaryDirectory outputName = os.path.splitext(fileObj['name'])[0] + '.tiff' if outputName == fileObj['name']: outputName = (os.path.splitext(fileObj['name'])[0] + '.' + time.strftime('%Y%m%d-%H%M%S') + '.tiff') try: localPath = File().getLocalFilePath(fileObj) except (FilePathException, AttributeError): localPath = None job = large_image_tasks.tasks.create_tiff.delay( girder_job_title=u'TIFF Conversion: %s' % fileObj['name'], girder_job_other_fields={ 'meta': { 'creator': 'large_image', 'itemId': str(item['_id']), 'task': 'createImageItem', } }, inputFile=GirderFileIdAllowDirect(str(fileObj['_id']), fileObj['name'], localPath), outputName=outputName, outputDir=TemporaryDirectory(), girder_result_hooks=[ GirderUploadToItem(str(item['_id']), False), ]) return job.job
def _createLargeImageJob(self, item, fileObj, user, token, **kwargs): import large_image_tasks.tasks from girder_worker_utils.transforms.common import TemporaryDirectory from girder_worker_utils.transforms.contrib.girder_io import GirderFileIdAllowDirect from girder_worker_utils.transforms.girder_io import GirderUploadToItem try: localPath = File().getLocalFilePath(fileObj) except (FilePathException, AttributeError): localPath = None job = large_image_tasks.tasks.create_tiff.apply_async(kwargs=dict( girder_job_title='TIFF Conversion: %s' % fileObj['name'], girder_job_other_fields={'meta': { 'creator': 'large_image', 'itemId': str(item['_id']), 'task': 'createImageItem', }}, inputFile=GirderFileIdAllowDirect(str(fileObj['_id']), fileObj['name'], localPath), inputName=fileObj['name'], outputDir=TemporaryDirectory(), girder_result_hooks=[ GirderUploadToItem(str(item['_id']), False), ], **kwargs, ), countdown=int(kwargs['countdown']) if kwargs.get('countdown') else None) return job.job
def asr( self, treeFileId, tableFileId, selectedColumn, resultSummaryItemId, plotItemId ): result = asr.delay( GirderFileId(treeFileId), GirderFileId(tableFileId), selectedColumn, girder_result_hooks=[ GirderUploadToItem(resultSummaryItemId), GirderUploadToItem(plotItemId) ]) return result.job
def run_clip_task(self, item, geometry, name, folder): girderFile = [i for i in Item().childFiles(item, limit=1)][0] output = Item().createItem(name, creator=self.getCurrentUser(), folder=folder) result = clip_task.delay( GirderFileId(str(girderFile['_id'])), geometry, name, girder_result_hooks=[GirderUploadToItem(str(output['_id']))]) return result.job
def pgls( self, treeFileId, tableFileId, correlation, independentVariable, dependentVariable, modelFitSummaryItemId, plotItemId ): result = pgls.delay( GirderFileId(treeFileId), GirderFileId(tableFileId), correlation, independentVariable, dependentVariable, girder_result_hooks=[ GirderUploadToItem(modelFitSummaryItemId), GirderUploadToItem(plotItemId) ]) return result.job
def createHistogramJob(self, item, file_, user=None, token=None, notify=False, bins=None, label=False, bitmask=False): if bins is None: bins = Setting().get(PluginSettings.DEFAULT_BINS) if file_['itemId'] != item['_id']: raise ValueError('The file must be in the item.') girder_job_title = 'Histogram computation for item %s' % item['_id'] girder_job_type = 'histogram' fakeId = uuid.uuid4().hex other_fields = { 'meta': { 'creator': 'histogram', 'task': 'createHistogram', 'fakeId': fakeId, } } reference = json.dumps({'isHistogram': True, 'fakeId': fakeId}) result = histogramExecutor.delay( GirderFileId(str(file_['_id'])), label, bins, bitmask, girder_job_title=girder_job_title, girder_job_type=girder_job_type, girder_job_other_fields=other_fields, girder_result_hooks=[ GirderUploadToItem(str(item['_id']), delete_file=True, upload_kwargs={'reference': reference}) ]) histogram = { 'expected': True, 'notify': True, 'itemId': item['_id'], 'bins': bins, 'label': label, 'bitmask': bitmask, # 'jobId': result.job['_id'], 'fakeId': fakeId } self.save(histogram) return histogram
def crop_task(self, item, by_item, name, folder): target_file = [i for i in Item().childFiles(item, limit=1)][0] by_file = [i for i in Item().childFiles(by_item, limit=1)][0] output = Item().createItem(name, creator=self.getCurrentUser(), folder=folder) driver = item.get('geometa', {}).get('driver', None) if not driver: raise ValidationException('Unsupported target dataset') result = crop_task.delay( GirderFileId(str(target_file['_id'])), driver, GirderFileId(str(by_file['_id'])), name, girder_result_hooks=[GirderUploadToItem(str(output['_id']))]) return result.job
def generatePCAPlot(self, file): file_id = str(file['_id']) item_id = str(file['itemId']) if file['mimeType'] != 'text/csv': raise RestException("File must be of type 'text/csv'", code=422) a = gw_pca.delay(file['name'], GirderFileId(file_id), girder_result_hooks=[ GirderItemMetadata(item_id), GirderUploadToItem(item_id) ]) return a.job
def csv_column_append(self, fileId, itemId): result = column_append.delay(GirderFileId(fileId), girder_result_hooks=[GirderUploadToItem(itemId)]) return result.job