def createData(admin, user): collection = Collection().createCollection('Test Collection', admin) collPrivateFolder = Folder().createFolder(collection, 'Private', parentType='collection', public=False, creator=admin) adminPublicFolder = path_utils.lookUpPath('/user/admin/Public', filter=False, force=True)['document'] adminSubFolder = Folder().createFolder(adminPublicFolder, 'Folder 1', creator=admin) item1 = Item().createItem('Item 1', admin, adminPublicFolder) item2 = Item().createItem('Item 2', admin, adminPublicFolder) item3 = Item().createItem('It\\em/3', admin, adminSubFolder) item4 = Item().createItem('Item 4', admin, collPrivateFolder) item5 = Item().createItem('Item 5', admin, collPrivateFolder) # just use this file itself as a test file filepath = os.path.realpath(__file__) filelen = os.path.getsize(filepath) file1 = Upload().uploadFromFile(open(filepath, 'rb'), filelen, 'File 1', parentType='item', parent=item1, user=admin) file2 = Upload().uploadFromFile(open(filepath, 'rb'), filelen, 'File 2', parentType='item', parent=item1, user=admin) file3 = Upload().uploadFromFile(open(filepath, 'rb'), filelen, 'File 3', parentType='item', parent=item2, user=admin) file4 = Upload().uploadFromFile(open(filepath, 'rb'), filelen, 'File 4', parentType='item', parent=item3, user=admin) file5 = Upload().uploadFromFile(open(filepath, 'rb'), filelen, 'File 5', parentType='item', parent=item4, user=admin) return { 'collection': collection, 'collPrivateFolder': collPrivateFolder, 'adminPublicFolder': adminPublicFolder, 'adminSubFolder': adminSubFolder, 'items': [item1, item2, item3, item4, item5], 'files': [file1, file2, file3, file4, file5], 'filelen': filelen }
def createNotebooks(event): user = event.info folder_model = Folder() result = lookUpPath('user/%s/Private' % user['login'], force=True) private_folder = result['document'] oc_folder = folder_model.createFolder(private_folder, 'oc', parentType='folder', creator=user, public=True, reuseExisting=True) notebook_folder = folder_model.createFolder(oc_folder, 'notebooks', parentType='folder', creator=user, public=True, reuseExisting=True) notebooks_dir = os.path.join(os.path.dirname(__file__), 'notebooks') upload_model = Upload() for file in glob.glob('%s/*.ipynb' % notebooks_dir): size = os.path.getsize(file) name = os.path.basename(file) with open(file, 'rb') as fp: upload_model.uploadFromFile( fp, size=size, name=name, parentType='folder', parent={'_id': ObjectId(notebook_folder['_id'])}, user=user, mimeType='application/x-ipynb+json')
def getPartialUploads(self, uploadId, userId, parentId, assetstoreId, minimumAge, includeUntracked, limit, offset, sort): filters = {} if uploadId is not None: filters['uploadId'] = uploadId if userId is not None: filters['userId'] = userId if assetstoreId is not None: filters['assetstoreId'] = assetstoreId if parentId is not None: filters['parentId'] = parentId if minimumAge is not None: filters['minimumAge'] = minimumAge uploadList = list(Upload().list(filters=filters, limit=limit, offset=offset, sort=sort)) if includeUntracked and (limit == 0 or len(uploadList) < limit): untrackedList = Upload().untrackedUploads('list', assetstoreId) if limit == 0: uploadList += untrackedList elif len(uploadList) < limit: uploadList += untrackedList[:limit - len(uploadList)] return uploadList
def discardPartialUploads(self, uploadId, userId, parentId, assetstoreId, minimumAge, includeUntracked): filters = {} if uploadId is not None: filters['uploadId'] = uploadId if userId is not None: filters['userId'] = userId if assetstoreId is not None: filters['assetstoreId'] = assetstoreId if parentId is not None: filters['parentId'] = parentId if minimumAge is not None: filters['minimumAge'] = minimumAge uploadList = list(Upload().list(filters=filters)) # Move the results to list that isn't a cursor so we don't have to have # the cursor sitting around while we work on the data. for upload in uploadList: try: Upload().cancelUpload(upload) except OSError as exc: if exc.errno == errno.EACCES: raise GirderException( 'Failed to delete upload.', 'girder.api.v1.system.delete-upload-failed') raise if includeUntracked: uploadList += Upload().untrackedUploads('delete', assetstoreId) return uploadList
def uploadFile(self, params): """ Providing this works around a limitation in phantom that makes us unable to upload binary files, or at least ones that contain certain byte values. The path parameter should be provided relative to the root directory of the repository. """ self.requireParams(('folderId', 'path'), params) if params['path'].startswith( '${'): # relative to plugin e.g. ${my_plugin}/path end = params['path'].find('}') plugin = params['path'][2:end] plugin = getPlugin(plugin) if plugin is None: raise Exception('Invalid plugin %s.' % plugin) root = os.path.dirname(inspect.getfile(plugin.__class__)) path = root + params['path'][end + 1:] else: # assume relative to core package path = os.path.join(ROOT_DIR, params['path']) name = os.path.basename(path) folder = Folder().load(params['folderId'], force=True) upload = Upload().createUpload(user=self.getCurrentUser(), name=name, parentType='folder', parent=folder, size=os.path.getsize(path)) with open(path, 'rb') as fd: file = Upload().handleChunk(upload, fd) return file
def testData(server, user, fsAssetstore, waitForProcessing): folders = Folder().childFolders(user, 'user', user=user) publicFolders = [folder for folder in folders if folder['public']] assert publicFolders is not None name = 'Girder_Favicon.png' mimeType = 'image/png' item = Item().createItem(name, user, publicFolders[0]) # fix path = os.path.join(os.path.dirname(__file__), 'files', name) upload = Upload().createUpload( user, name, 'item', item, os.path.getsize(path), mimeType) with open(path, 'rb') as fd: uploadedFile = Upload().handleChunk(upload, fd) for key in ['assetstoreId', 'created', 'creatorId', 'itemId', 'mimeType', 'name', 'size']: assert key in uploadedFile waitForProcessing(1) name2 = 'small.tiff' item2 = Item().createItem(name2, user, publicFolders[0]) file2 = os.path.join(os.path.dirname(__file__), 'files', 'small.tiff') Upload().uploadFromFile( open(file2), os.path.getsize(file2), name2, 'item', item2, user) waitForProcessing(2) yield { 'item': item, 'name': name, 'mimeType': mimeType, 'path': path }
def testProvenanceFileWithoutItem(self): fileData = b'this is a test' file = Upload().uploadFromFile( obj=six.BytesIO(fileData), size=len(fileData), name='test', parentType=None, parent=None, user=self.admin) self.assertIsNone(file.get('itemId')) file['name'] = 'test2' file = File().save(file) File().remove(file)
def initUpload(self, parentType, parentId, name, size, mimeType, linkUrl, reference, assetstoreId): """ Before any bytes of the actual file are sent, a request should be made to initialize the upload. This creates the temporary record of the forthcoming upload that will be passed in chunks to the readChunk method. If you pass a "linkUrl" parameter, it will make a link file in the designated parent. """ user = self.getCurrentUser() parent = ModelImporter.model(parentType).load( id=parentId, user=user, level=AccessType.WRITE, exc=True) if linkUrl is not None: return self._model.filter( self._model.createLinkFile( url=linkUrl, parent=parent, name=name, parentType=parentType, creator=user, size=size, mimeType=mimeType), user) else: self.requireParams({'size': size}) assetstore = None if assetstoreId: self.requireAdmin( user, message='You must be an admin to select a destination assetstore.') assetstore = Assetstore().load(assetstoreId) chunk = None if size > 0 and cherrypy.request.headers.get('Content-Length'): ct = cherrypy.request.body.content_type.value if (ct not in cherrypy.request.body.processors and ct.split('/', 1)[0] not in cherrypy.request.body.processors): chunk = RequestBodyStream(cherrypy.request.body) if chunk is not None and chunk.getSize() <= 0: chunk = None try: # TODO: This can be made more efficient by adding # save=chunk is None # to the createUpload call parameters. However, since this is # a breaking change, that should be deferred until a major # version upgrade. upload = Upload().createUpload( user=user, name=name, parentType=parentType, parent=parent, size=size, mimeType=mimeType, reference=reference, assetstore=assetstore) except OSError as exc: if exc.errno == errno.EACCES: raise GirderException( 'Failed to create upload.', 'girder.api.v1.file.create-upload-failed') raise if upload['size'] > 0: if chunk: return Upload().handleChunk(upload, chunk, filter=True, user=user) return upload else: return self._model.filter(Upload().finalizeUpload(upload), user)
def testProvenanceFileWithoutItem(self): fileData = b'this is a test' file = Upload().uploadFromFile(obj=six.BytesIO(fileData), size=len(fileData), name='test', parentType=None, parent=None, user=self.admin) self.assertIsNone(file.get('itemId')) file['name'] = 'test2' file = File().save(file) File().remove(file)
def csv_detection_file(folder, detection_item, user): """ Ensures that the detection item has a file which is a csv. Returns the file document. """ file = Item().childFiles(detection_item)[0] if "csv" in file["exts"]: return file filename = ".".join([file["name"].split(".")[:-1][0], "csv"]) track_dict = json.loads(b"".join( list(File().download(file, headers=False)())).decode()) foldermeta = folder.get('meta', {}) fps = None imageFiles = None source_type = foldermeta.get('type', None) if source_type == VideoType: fps = foldermeta.get('fps', None) elif source_type == ImageSequenceType: imageFiles = [ f['name'] for f in Folder().childItems(folder, filters={ "lowerName": { "$regex": safeImageRegex } }).sort("lowerName") ] thresholds = folder.get("meta", {}).get("confidenceFilters", {}) csv_string = "".join((line for line in viame.export_tracks_as_csv( track_dict, excludeBelowThreshold=True, thresholds=thresholds, filenames=imageFiles, fps=fps, ))) csv_bytes = csv_string.encode() assetstore = Assetstore().findOne({"_id": file["assetstoreId"]}) new_file = File().findOne({"name": filename}) or File().createFile( user, detection_item, filename, len(csv_bytes), assetstore) upload = Upload().createUploadToFile(new_file, user, len(csv_bytes)) new_file = Upload().handleChunk(upload, csv_bytes) return new_file
def _make_girder_file(assetstore, user, name, contents=b''): folder = Folder().find({ 'parentId': user['_id'], 'name': 'Public' })[0] file = Upload().uploadFromFile( six.BytesIO(contents), size=len(contents), name=name, parentType='folder', parent=folder, user=user, assetstore=assetstore) if not contents: file = Upload().finalizeUpload(file, assetstore) files.append(file) return file
def requestOffset(self, upload): """ This should be called when resuming an interrupted upload. It will report the offset into the upload that should be used to resume. :param uploadId: The _id of the temp upload record being resumed. :returns: The offset in bytes that the client should use. """ offset = Upload().requestOffset(upload) if isinstance(offset, six.integer_types): upload['received'] = offset Upload().save(upload) return {'offset': offset} else: return offset
def testDicomWithBinaryValues(self): # One of the test files in the pydicom module will throw an IOError # when parsing metadata. We should work around that and still be able # to import the file samplePath = os.path.join(os.path.dirname(os.path.abspath( pydicom.__file__)), 'data', 'test_files', 'OBXXXX1A.dcm') admin, user = self.users # Create a collection, folder, and item collection = Collection().createCollection('collection5', admin, public=True) folder = Folder().createFolder(collection, 'folder5', parentType='collection', public=True) item = Item().createItem('item5', admin, folder) # Upload this dicom file with open(samplePath, 'rb') as fp, _EventHelper('dicom_viewer.upload.success') as helper: dcmFile = Upload().uploadFromFile( obj=fp, size=os.path.getsize(samplePath), name=os.path.basename(samplePath), parentType='item', parent=item, mimeType='application/dicom', user=user ) self.assertIsNotNone(dcmFile) # Wait for handler success event handled = helper.wait() self.assertTrue(handled) # Check if the 'dicomItem' is well processed dicomItem = Item().load(item['_id'], force=True) self.assertIn('dicom', dicomItem) self.assertHasKeys(dicomItem['dicom'], ['meta', 'files'])
def testDownloadLogging(server, recordModel, freshLog, admin, fsAssetstore): recordModel.collection.remove({}) # Clear existing records folder = Folder().find({'parentId': admin['_id'], 'name': 'Public'})[0] file = Upload().uploadFromFile(six.BytesIO(b'hello'), size=5, name='test', parentType='folder', parent=folder, user=admin, assetstore=fsAssetstore) recordModel.collection.remove({}) # Clear existing records File().download(file, headers=False, offset=2, endByte=4) records = recordModel.find() assert records.count() == 1 record = records[0] assert record['ip'] == '127.0.0.1' assert record['type'] == 'file.download' assert record['details']['fileId'] == file['_id'] assert record['details']['startByte'] == 2 assert record['details']['endByte'] == 4 assert isinstance(record['when'], datetime.datetime)
def test_archiveEndpoints(server, admin, fsAssetstore): # See comments above assert 'girder_archive_access' in loadedPlugins() testDir = os.path.dirname(os.path.realpath(__file__)) filePath = os.path.join(testDir, 'TCIA.image.tar.bz2') name = os.path.basename(filePath) publicFolder = Folder().find({ 'parentId': admin['_id'], 'name': 'Public', })[0] file = Upload().uploadFromFile(open(filePath, 'rb'), os.path.getsize(filePath), name, parentType='folder', parent=publicFolder, user=admin, assetstore=fsAssetstore) resp = server.request(path='/file/%s/archive' % file['_id'], user=admin) assert len(resp.json['names']) == 10 resp = server.request(path='/file/%s/archive/download' % file['_id'], params={'path': '002.dcm'}, user=admin, isJson=False) datalen = 0 for chunk in resp.body: if not isinstance(chunk, six.binary_type): chunk = chunk.encode('utf8') datalen += len(chunk) assert datalen == 527192
def saveSuperpixelMarkup(self, annotation, featureId, superpixelValues): image = Image().load(annotation['imageId'], force=True, exc=True) annotator = User().load(annotation['userId'], force=True, exc=True) markupMask = self._superpixelsToMaskMarkup(superpixelValues, image) markupMaskEncodedStream = ScikitSegmentationHelper.writeImage( markupMask, 'png') markupFile = Upload().uploadFromFile( obj=markupMaskEncodedStream, size=len(markupMaskEncodedStream.getvalue()), name='annotation_%s_%s.png' % ( annotation['_id'], # Rename features to ensure the file is downloadable on Windows featureId.replace(' : ', ' ; ').replace('/', ',')), # TODO: change this once a bug in upstream Girder is fixed parentType='annotation', parent=annotation, attachParent=True, user=annotator, mimeType='image/png') markupFile['superpixels'] = superpixelValues # TODO: remove this once a bug in upstream Girder is fixed markupFile['attachedToType'] = ['annotation', 'isic_archive'] markupFile = File().save(markupFile) annotation['markups'][featureId] = { 'fileId': markupFile['_id'], 'present': bool(markupMask.any()) } return Annotation().save(annotation)
def wrapper(self, event): params = event.info.get("params", {}) if "uploadId" in params: upload = Upload().load(params["uploadId"]) parent_id = str(upload["parentId"]) parent_type = upload["parentType"] else: parent_id = params.get("parentId") parent_type = params.get("parentType") or "folder" obj_id = event.info.get("id") folder_id = params.get("folderId") item_id = params.get("itemId") any_parent_id = parent_id or folder_id or item_id path = None if obj_id: path, root_id = VirtualObject.path_from_id(obj_id) elif any_parent_id and any_parent_id.startswith("wtlocal:"): path, root_id = VirtualObject.path_from_id(any_parent_id) elif (parent_id and parent_type == "folder") or folder_id: path, root_id = VirtualObject.path_from_id(parent_id or folder_id) if path: path = pathlib.Path(path) if path.is_absolute(): user = self.getCurrentUser() root = Folder().load(root_id, level=level, user=user, exc=True) func(self, event, path, root, user=user)
def saveElementAsFile(self, annotation, entries): """ If an element has a large points or values array, save that array to an attached file. :param annotation: the parent annotation. :param entries: the database entries document. Modified. """ item = Item().load(annotation['itemId'], force=True) element = entries[0]['element'].copy() entries[0]['element'] = element key = 'points' if 'points' in element else 'values' # Use the highest protocol support by all python versions we support data = pickle.dumps(element.pop(key), protocol=4) elementFile = Upload().uploadFromFile(io.BytesIO(data), size=len(data), name='_annotationElementData', parentType='item', parent=item, user=None, mimeType='application/json', attachParent=True) entries[0]['datafile'] = { 'key': key, 'fileId': elementFile['_id'], }
def move_to_curated_assetstore(event): info = event.info dataset = info['dataset'] approver = info['approver'] to_move = [dataset['imageFileId']] def _add(key, doc): if key in doc: to_move.append(doc[key]) for s in Structure().find(dataset['_id']): for k in ['cjsonFileId', 'xyzFileId', 'cmlFileId']: _add(k, s) for r in Reconstruction().find(dataset['_id']): _add('emdFileId', r) for p in Projection().find(dataset['_id']): _add('emdFileId', r) assetstore = get_currated_assetstore() for file_id in to_move: file = File().load(file_id, force=True) Upload().moveFileToAssetstore(file, approver, assetstore)
def read_chunk(self, event, path, root, user=None): params = event.info["params"] if "chunk" in params: chunk = params["chunk"] if isinstance(chunk, cherrypy._cpreqbody.Part): # Seek is the only obvious way to get the length of the part chunk.file.seek(0, os.SEEK_END) size = chunk.file.tell() chunk.file.seek(0, os.SEEK_SET) chunk = RequestBodyStream(chunk.file, size=size) else: chunk = RequestBodyStream(cherrypy.request.body) if not user: user = self.getCurrentUser() offset = int(params.get("offset", 0)) upload = Upload().load(params["uploadId"]) if upload["userId"] != user["_id"]: raise AccessException("You did not initiate this upload.") if upload["received"] != offset: raise RestException( "Server has received %s bytes, but client sent offset %s." % (upload["received"], offset) ) try: fobj = self._handle_chunk(upload, chunk, filter=True, user=user) event.preventDefault().addResponse(fobj) except IOError as exc: if exc.errno == errno.EACCES: raise Exception("Failed to store upload.") raise
def testAutoComputeHashes(self): with self.assertRaises(ValidationException): Setting().set(hashsum_download.PluginSettings.AUTO_COMPUTE, 'bad') old = hashsum_download.SUPPORTED_ALGORITHMS hashsum_download.SUPPORTED_ALGORITHMS = {'sha512', 'sha256'} Setting().set(hashsum_download.PluginSettings.AUTO_COMPUTE, True) file = Upload().uploadFromFile(obj=io.BytesIO(self.userData), size=len(self.userData), name='Another file', parentType='folder', parent=self.privateFolder, user=self.user) start = time.time() while time.time() < start + 15: file = File().load(file['_id'], force=True) if 'sha256' in file: break time.sleep(0.2) expected = hashlib.sha256() expected.update(self.userData) self.assertIn('sha256', file) self.assertEqual(file['sha256'], expected.hexdigest()) expected = hashlib.sha512() expected.update(self.userData) self.assertIn('sha512', file) self.assertEqual(file['sha512'], expected.hexdigest()) hashsum_download.SUPPORTED_ALGORITHMS = old
def updateFileContents(self, file, size, reference, assetstoreId): user = self.getCurrentUser() assetstore = None if assetstoreId: self.requireAdmin( user, message='You must be an admin to select a destination assetstore.') assetstore = Assetstore().load(assetstoreId) # Create a new upload record into the existing file upload = Upload().createUploadToFile( file=file, user=user, size=size, reference=reference, assetstore=assetstore) if upload['size'] > 0: return upload else: return self._model.filter(Upload().finalizeUpload(upload), user)
def moveFileToAssetstore(self, file, assetstore, progress): user = self.getCurrentUser() title = 'Moving file "%s" to assetstore "%s"' % (file['name'], assetstore['name']) with ProgressContext(progress, user=user, title=title, total=file['size']) as ctx: return Upload().moveFileToAssetstore( file=file, user=user, assetstore=assetstore, progress=ctx)
def readChunk(self, upload, offset, params): """ After the temporary upload record has been created (see initUpload), the bytes themselves should be passed up in ordered chunks. The user must remain logged in when passing each chunk, to authenticate that the writer of the chunk is the same as the person who initiated the upload. The passed offset is a verification mechanism for ensuring the server and client agree on the number of bytes sent/received. """ if 'chunk' in params: # If we see the undocumented "chunk" query string parameter, then we abort trying to # read the body, use the query string value as chunk, and pass it through to # Upload().handleChunk. This case is used by the direct S3 upload process. chunk = params['chunk'] else: chunk = RequestBodyStream(cherrypy.request.body) user = self.getCurrentUser() if upload['userId'] != user['_id']: raise AccessException('You did not initiate this upload.') if upload['received'] != offset: raise RestException( 'Server has received %s bytes, but client sent offset %s.' % ( upload['received'], offset)) try: return Upload().handleChunk(upload, chunk, filter=True, user=user) except IOError as exc: if exc.errno == errno.EACCES: raise Exception('Failed to store upload.') raise
def _savePDF(event): """ Extract PDF from submission ZIP file and save to a subfolder of the submission folder. Event info should contain the following fields: - submission: The submission document. - folder: The submission folder document. - file: The submission ZIP file document. """ submission = event.info['submission'] folder = event.info['folder'] file = event.info['file'] # Read submission ZIP file data into an in-memory buffer. # Reading into memory avoids managing temporary files and directories. zipData = _readFile(file) # Parse ZIP data to get PDF file name and data try: with zipfile.ZipFile(zipData) as zipFile: pdfItems = [ zipItem for zipItem in zipFile.infolist() if _isPDF(zipItem) ] if not pdfItems or len(pdfItems) > 1: logger.warning( 'Submission ZIP file contains multiple PDF files (FileId=%s)' % file['_id']) return pdfItem = pdfItems[0] pdfFileName = os.path.basename(pdfItem.filename) pdfData = zipFile.read(pdfItem) if not pdfData: logger.warning( 'Submission ZIP file contains empty PDF file (FileId=%s)' % file['_id']) return except zipfile.BadZipfile: logger.warning('Failed to process submission ZIP file (FileId=%s)' % file['_id']) return # Save PDF file to a subfolder of the submission folder user = User().load(submission['creatorId'], force=True) abstractFolder = Folder().createFolder(parent=folder, name='Abstract', creator=user) abstractFile = Upload().uploadFromFile(obj=io.BytesIO(pdfData), size=len(pdfData), name=pdfFileName, parentType='folder', parent=abstractFolder, user=user, mimeType='application/pdf') # Set submission documentation URL submission['documentationUrl'] = \ 'https://challenge.kitware.com/api/v1/file/%s/download?contentDisposition=inline' % \ abstractFile['_id'] ModelImporter.model('submission', 'covalic').save(submission)
def createImage(self, imageDataStream, imageDataSize, originalName, parentFolder, creator, dataset, batch): if not imageDataSize: # Upload().uploadFromFile will do nothing if the image is empty # See: https://github.com/girder/girder/issues/2773 return newIsicId = Setting().get(PluginSettings.MAX_ISIC_ID) + 1 image = self.createItem( name='ISIC_%07d' % newIsicId, creator=creator, folder=parentFolder, description='' ) Setting().set(PluginSettings.MAX_ISIC_ID, newIsicId) image['privateMeta'] = { 'originalFilename': originalName } image['meta'] = { 'acquisition': {}, 'clinical': {}, 'unstructured': {}, 'unstructuredExif': {}, 'tags': [], 'datasetId': dataset['_id'], 'batchId': batch['_id'] } image = Image().save(image) originalFile = Upload().uploadFromFile( obj=imageDataStream, size=imageDataSize, name='%s%s' % ( image['name'], os.path.splitext(originalName)[1].lower() ), parentType='item', parent=image, user=creator, mimeType=mimetypes.guess_type(originalName)[0], ) # reload image, since its 'size' has changed in the database image = self.load(image['_id'], force=True, exc=True) # this synchronously adds image['largeImage']['originalId'] and allows # the subsequent use of Image().originalFile and Image().imageData self._generateLargeimage(image, originalFile) self._generateSuperpixels(image) # TODO: copy license from dataset to image imageData = self.imageData(image) image['meta']['acquisition']['pixelsY'] = imageData.shape[0] image['meta']['acquisition']['pixelsX'] = imageData.shape[1] image = self.save(image) return image
def createNotebooks(event): # If there is no current asset store, just return try: Assetstore().getCurrent() except GirderException: print( TerminalColor.warning('WARNING: no current asset store. ' 'Notebook will not be created.')) return user = event.info folder_model = Folder() result = lookUpPath('user/%s/Private' % user['login'], force=True) private_folder = result['document'] oc_folder = folder_model.createFolder(private_folder, 'oc', parentType='folder', creator=user, public=True, reuseExisting=True) notebook_folder = folder_model.createFolder(oc_folder, 'notebooks', parentType='folder', creator=user, public=True, reuseExisting=True) notebooks_dir = os.path.join(os.path.dirname(__file__), 'notebooks') upload_model = Upload() for file in glob.glob('%s/*.ipynb' % notebooks_dir): size = os.path.getsize(file) name = os.path.basename(file) with open(file, 'rb') as fp: upload_model.uploadFromFile( fp, size=size, name=name, parentType='folder', parent={'_id': ObjectId(notebook_folder['_id'])}, user=user, mimeType='application/x-ipynb+json')
def finalizeUpload(self, upload): user = self.getCurrentUser() if upload['userId'] != user['_id']: raise AccessException('You did not initiate this upload.') # If we don't have as much data as we were told would be uploaded and # the upload hasn't specified it has an alternate behavior, refuse to # complete the upload. if upload['received'] != upload['size'] and 'behavior' not in upload: raise RestException( 'Server has only received %s bytes, but the file should be %s bytes.' % (upload['received'], upload['size'])) file = Upload().finalizeUpload(upload) extraKeys = file.get('additionalFinalizeKeys', ()) return self._model.filter(file, user, additionalKeys=extraKeys)
def cancelUpload(self, upload): user = self.getCurrentUser() if upload['userId'] != user['_id'] and not user['admin']: raise AccessException('You did not initiate this upload.') Upload().cancelUpload(upload) return {'message': 'Upload canceled.'}
def finalizeUpload(self, upload): user = self.getCurrentUser() if upload['userId'] != user['_id']: raise AccessException('You did not initiate this upload.') # If we don't have as much data as we were told would be uploaded and # the upload hasn't specified it has an alternate behavior, refuse to # complete the upload. if upload['received'] != upload['size'] and 'behavior' not in upload: raise RestException( 'Server has only received %s bytes, but the file should be %s bytes.' % (upload['received'], upload['size'])) file = Upload().finalizeUpload(upload) extraKeys = file.get('additionalFinalizeKeys', ()) return self._model.filter(file, user, additionalKeys=extraKeys)
def uploadFile(filePath, user, assetstore, folderName='Public', name=None): if name is None: name = os.path.basename(filePath) folder = namedFolder(user, folderName) file = Upload().uploadFromFile( open(filePath, 'rb'), os.path.getsize(filePath), name, parentType='folder', parent=folder, user=user, assetstore=assetstore) return file
def get_sample_data(adminUser, collName='Sample Images', folderName='Images'): """ As needed, download sample data. :param adminUser: a user to create and modify collections and folders. :param collName: the collection name where the data will be added. :param folderName: the folder name where the data will bed aded. :returns: the folder where the sample data is located. """ try: import girder_client except ImportError: logger.error('girder_client is unavailable. Cannot get sample data.') return from girder.models.item import Item from girder.models.upload import Upload from girder_large_image.models.image_item import ImageItem folder = get_collection_folder(adminUser, collName, folderName) remote = girder_client.GirderClient( apiUrl='https://data.kitware.com/api/v1') remoteFolder = remote.resourceLookup( '/collection/HistomicsTK/Deployment test images') sampleItems = [] for remoteItem in remote.listItem(remoteFolder['_id']): item = Item().findOne({ 'folderId': folder['_id'], 'name': remoteItem['name'] }) if item and len(list(Item().childFiles(item, limit=1))): sampleItems.append(item) continue if not item: item = Item().createItem(remoteItem['name'], creator=adminUser, folder=folder) for remoteFile in remote.listFile(remoteItem['_id']): with tempfile.NamedTemporaryFile() as tf: fileName = tf.name tf.close() logger.info('Downloading %s', remoteFile['name']) remote.downloadFile(remoteFile['_id'], fileName) Upload().uploadFromFile(open(fileName, 'rb'), os.path.getsize(fileName), name=remoteItem['name'], parentType='item', parent=item, user=adminUser) sampleItems.append(item) for item in sampleItems: if 'largeImage' not in item: logger.info('Making large_item %s', item['name']) try: ImageItem().createImageItem(item, createJob=False) except Exception: pass logger.info('done') return folder
def override(event): # Override thumbnail creation -- just grab the first 4 bytes self.assertIn('file', event.info) streamFn = event.info['streamFn'] stream = streamFn() contents = b''.join(stream()) uploadModel = Upload() upload = uploadModel.createUpload( user=self.admin, name='magic', parentType=None, parent=None, size=4) thumbnail = uploadModel.handleChunk(upload, contents[:4]) event.addResponse({ 'file': thumbnail }) event.preventDefault()
def __init__(self): super().__init__() self.resourceName = 'label' self.coll_m = Collection() self.file_m = File() self.folder_m = Folder() self.item_m = Item() self.upload_m = Upload() self.asset_m = Assetstore() self.setupRoutes()
class LabelResource(Resource): def __init__(self): super().__init__() self.resourceName = 'label' self.coll_m = Collection() self.file_m = File() self.folder_m = Folder() self.item_m = Item() self.upload_m = Upload() self.asset_m = Assetstore() self.setupRoutes() def setupRoutes(self): self.route('GET', (), handler=self.getLabelList) self.route('GET', (':label_id',), self.getLabel) self.route('GET', ('meta',), self.getLabelMeta) self.route('GET', ('create',), self.createLabelFile) self.route('GET', ('by_name',), self.getLabelByName) self.route('POST', (), self.postLabel) def createNewFile(self, folder, file_name): item = self.item_m.createItem(file_name, creator=self.getCurrentUser(), folder=folder, description='label file', reuseExisting=False) file = self.file_m.createFile(size=0, item=item, name=file_name, creator=self.getCurrentUser(), assetstore=self.asset_m.getCurrent(), mimeType="application/json") return file def copy(self, srcFile, destFile): upload = self.upload_m.createUploadToFile(destFile, self.getCurrentUser(), srcFile['size']) self.upload_m.handleChunk(upload=upload, chunk=RequestBodyStream(self.file_m.open(srcFile), size=destFile['size']), user=self.getCurrentUser()) return upload @access.public @autoDescribeRoute( Description('Get label list')) @rest.rawResponse def getLabelList(self): printOk('getLabelsList() was called!') try: collection = list(self.coll_m.list(user=self.getCurrentUser(), offset=0, limit=1))[0] files = self.coll_m.fileList(collection, user=self.getCurrentUser(), data=False, includeMetadata=True, mimeFilter=['application/json']) files = list(files) cherrypy.response.headers["Content-Type"] = "application/json" return dumps(files) except: printFail(traceback.print_exc) @staticmethod def getOwnerId(folder): aclList = Folder().getFullAccessList(folder) for acl in aclList['users']: if acl['level'] == AccessType.ADMIN: return str(acl['id']) return None def getConfigFolder(self, label_folder_id): label_folder = Folder().load(label_folder_id, user=self.getCurrentUser(), level=AccessType.READ) ownerId = self.getOwnerId(label_folder) config_folder = self.folder_m.load(label_folder['meta'][ownerId], level=AccessType.READ, user=self.getCurrentUser()) return config_folder def findConfig(self, folder_id): folder = self.getConfigFolder(folder_id) printOk2("Config folder {}".format(folder)) files = self.folder_m.fileList(folder, self.getCurrentUser(), data=False) for file_path, file in files: printOk(file) if file['name'] == "config.json": return file def __findFile(self, folder, file_name): item = list(self.item_m.find({'folderId': folder['_id'], 'name': file_name}).limit(1)) if not item: return None item = item[0] file = list(self.file_m.find({'itemId': item['_id']}).limit(1)) if not file: return None return file[0] @access.public @autoDescribeRoute( Description('Create a new label file if it doesnt exist') .param('file_name', 'label file name').param('folder_id', 'the parent folder id')) @rest.rawResponse def createLabelFile(self, file_name, folder_id): try: folder = self.folder_m.load(folder_id, user=self.getCurrentUser(), level=AccessType.WRITE) file = self.__findFile(folder, file_name) if not file: file = self.createNewFile(folder, file_name) config_file = self.findConfig(folder_id) if not config_file: printFail("No config file found") return errorMessage("No config file found") else: res = self.copy(config_file, file) return dumps({ "label_id": res['fileId'] }) return dumps({ "label_id": file['_id'] }) except: printFail(traceback.print_exc) cherrypy.response.status = 500 @access.public @autoDescribeRoute( Description('Get labels by file_name') .param('file_name', 'label file name').param('folder_id', 'the parent folder id')) @rest.rawResponse def getLabelByName(self, file_name, folder_id): try: folder = self.folder_m.load(folder_id, user=self.getCurrentUser(), level=AccessType.READ) file = self.__findFile(folder, file_name) cherrypy.response.headers["Content-Type"] = "application/json" if file: return self.file_m.download(file) else: return dumps({}) except: printFail(traceback.print_exc) cherrypy.response.status = 500 @access.public @autoDescribeRoute( Description('Get label by id') .param('label_id', 'label file id')) @rest.rawResponse def getLabel(self, label_id): try: file = self.file_m.load(label_id, level=AccessType.READ, user=self.getCurrentUser()) printOk2(file) cherrypy.response.headers["Content-Type"] = "application/json" return self.file_m.download(file) except: # Unknown slug printFail(traceback.print_exc) cherrypy.response.status = 404 @access.public @autoDescribeRoute( Description('Get label by id') .param('label_id', 'label file id')) def getLabelMeta(self, label_id): try: file = self.file_m.load(label_id, level=AccessType.READ, user=self.getCurrentUser()) cherrypy.response.headers["Content-Type"] = "application/json" return dumps(file) except: # Unknown slug printFail(traceback.print_exc) cherrypy.response.status = 404 @access.public @autoDescribeRoute( Description('Post label by id') .param('label_id', 'label file id')) @rest.rawResponse def postLabel(self, label_id, params): try: file = self.file_m.load(label_id, level=AccessType.WRITE, user=self.getCurrentUser()) cherrypy.response.headers["Content-Type"] = "application/json" params['labels'] = json.loads(params['labels']) data = json.dumps(params, indent=2, sort_keys=True) upload = writeData(self.getCurrentUser(), file, data) printOk2(file) printOk(upload) return dumps(upload) except: # Unknown slug printFail(traceback.print_exc) cherrypy.response.status = 404 @access.public @autoDescribeRoute( Description('Post label by id') .param('label_id', 'label file id')) @rest.rawResponse def strokeToOutline(self, strokes): pass