def adjustDBUri(cls, uri, *args, **kwargs): """ Adjust a uri to match the form sqlite requires. This can convert a Girder resource path to an aprpopriate physical file reference. :param uri: the uri to adjust. :returns: the adjusted uri """ uri = super(SqliteSAConnector, cls).adjustDBUri(uri, *args, **kwargs) if '://' in uri: uri = uri.split('://', 1)[0] + ':////' + uri.split('://', 1)[1].lstrip('/') uri = super(SqliteSAConnector, cls).adjustDBUri(uri, *args, **kwargs) # If we have a Girder resource path, convert it. If this looks like a # file but doesn't exist, check if it is a resource path. If this is # not a resoruce path to a file that we can read directly, treat this # the same as a missing file. if (':///' in uri and not os.path.exists(uri.split(':///', 1)[1])): resourcepath = path_util.lookUpPath( uri.split(':///', 1)[1], test=True, filter=False, force=True) if resourcepath and resourcepath['model'] == 'file': file = resourcepath['document'] adapter = File().getAssetstoreAdapter(file) if hasattr(adapter, 'fullPath'): filepath = adapter.fullPath(file) if os.path.exists(filepath): uri = uri.split(':///', 1)[0] + ':///' + filepath log.debug('Using Girder file for SQLite database') return uri
def open(self, path, flags, attr): obj = lookUpPath(path, filter=False, user=self.server.girderUser) if obj['model'] != 'file': return paramiko.SFTP_NO_SUCH_FILE return _FileHandle(obj['document'])
def createData(admin, user): collection = Collection().createCollection('Test Collection', admin) collPrivateFolder = Folder().createFolder(collection, 'Private', parentType='collection', public=False, creator=admin) adminPublicFolder = path_utils.lookUpPath('/user/admin/Public', filter=False, force=True)['document'] adminSubFolder = Folder().createFolder(adminPublicFolder, 'Folder 1', creator=admin) item1 = Item().createItem('Item 1', admin, adminPublicFolder) item2 = Item().createItem('Item 2', admin, adminPublicFolder) item3 = Item().createItem('It\\em/3', admin, adminSubFolder) item4 = Item().createItem('Item 4', admin, collPrivateFolder) item5 = Item().createItem('Item 5', admin, collPrivateFolder) # just use this file itself as a test file filepath = os.path.realpath(__file__) filelen = os.path.getsize(filepath) file1 = Upload().uploadFromFile(open(filepath, 'rb'), filelen, 'File 1', parentType='item', parent=item1, user=admin) file2 = Upload().uploadFromFile(open(filepath, 'rb'), filelen, 'File 2', parentType='item', parent=item1, user=admin) file3 = Upload().uploadFromFile(open(filepath, 'rb'), filelen, 'File 3', parentType='item', parent=item2, user=admin) file4 = Upload().uploadFromFile(open(filepath, 'rb'), filelen, 'File 4', parentType='item', parent=item3, user=admin) file5 = Upload().uploadFromFile(open(filepath, 'rb'), filelen, 'File 5', parentType='item', parent=item4, user=admin) return { 'collection': collection, 'collPrivateFolder': collPrivateFolder, 'adminPublicFolder': adminPublicFolder, 'adminSubFolder': adminSubFolder, 'items': [item1, item2, item3, item4, item5], 'files': [file1, file2, file3, file4, file5], 'filelen': filelen }
def createNotebooks(event): user = event.info folder_model = Folder() result = lookUpPath('user/%s/Private' % user['login'], force=True) private_folder = result['document'] oc_folder = folder_model.createFolder(private_folder, 'oc', parentType='folder', creator=user, public=True, reuseExisting=True) notebook_folder = folder_model.createFolder(oc_folder, 'notebooks', parentType='folder', creator=user, public=True, reuseExisting=True) notebooks_dir = os.path.join(os.path.dirname(__file__), 'notebooks') upload_model = Upload() for file in glob.glob('%s/*.ipynb' % notebooks_dir): size = os.path.getsize(file) name = os.path.basename(file) with open(file, 'rb') as fp: upload_model.uploadFromFile( fp, size=size, name=name, parentType='folder', parent={'_id': ObjectId(notebook_folder['_id'])}, user=user, mimeType='application/x-ipynb+json')
def pathDownload(self, path): user = self.getCurrentUser() path = '/'.join(path) resource = path_util.lookUpPath(path, user)['document'] if resource['_modelType'] == 'file': singleFile = resource else: model = self._getResourceModel(resource['_modelType'], 'fileList') singleFile = None for _path, file in model.fileList(doc=resource, user=user, subpath=True, data=False): if singleFile is None: singleFile = file else: singleFile = False break if singleFile is not False and singleFile is not None: offset, endByte = 0, None rangeHeader = cherrypy.lib.httputil.get_ranges( cherrypy.request.headers.get('Range'), singleFile.get('size', 0)) if rangeHeader and len(rangeHeader): offset, endByte = rangeHeader[0] singleFile = File().load(singleFile['_id'], user=user, level=AccessType.READ) return File().download(singleFile, offset, endByte=endByte) setResponseHeader('Content-Type', 'application/zip') setContentDisposition(resource.get('name', 'Resources') + '.zip') def stream(): zip = ziputil.ZipGenerator() for (path, file) in model.fileList( doc=resource, user=user, subpath=True): for data in zip.addFile(file, path): yield data yield zip.footer() return stream
def _getPath(self, path): """ Given a fuse path, return the associated resource. :param path: path within the fuse. :returns: a Girder resource dictionary. """ # If asked about a file in top level directory or the top directory, # return that it doesn't exist. Other methods should handle '', # '/user', and 'collection' before calling this method. if '/' not in path.rstrip('/')[1:]: raise fuse.FuseOSError(errno.ENOENT) try: # We can't filter the resource, since that removes files' # assetstore information and users' size information. resource = path_util.lookUpPath( path.rstrip('/'), filter=False, force=True) except (path_util.NotFoundException, AccessException): raise fuse.FuseOSError(errno.ENOENT) except ValidationException: raise fuse.FuseOSError(errno.EROFS) except Exception: logger.exception('ServerFuse server internal error') raise fuse.FuseOSError(errno.EROFS) return resource # {model, document}
def _getPath(self, path): """ Given a fuse path, return the associated resource. :param path: path within the fuse. :returns: a Girder resource dictionary. """ # If asked about a file in top level directory or the top directory, # return that it doesn't exist. Other methods should handle '', # '/user', and 'collection' before calling this method. if '/' not in path.rstrip('/')[1:]: raise fuse.FuseOSError(errno.ENOENT) try: # We can't filter the resource, since that removes files' # assetstore information and users' size information. resource = path_util.lookUpPath(path.rstrip('/'), filter=False, force=True) except (path_util.NotFoundException, AccessException): raise fuse.FuseOSError(errno.ENOENT) except ValidationException: raise fuse.FuseOSError(errno.EROFS) except Exception: logger.exception('ServerFuse server internal error') raise fuse.FuseOSError(errno.EROFS) return resource # {model, document}
def purge_leaf_folder(path): folder = path_utils.lookUpPath(path, user=admin)['document'] if Item().find({'folderId': folder['_id']}).count() > 0 or \ list(Folder().childFolders(folder, 'folder', user=admin)): return logger.info("Removing empty folder %s" % path) Folder().remove(folder) purge_leaf_folder(os.path.dirname(path))
def value_from_resource(value, adminUser): """ If a value is a string that startwith 'resource:', it is a path to an existing resource. Fetch it an return the string of the _id. :param value: a value :returns: the original value it is not a resource, or the string id of the resource. """ if str(value) == 'resourceid:admin': value = str(adminUser['_id']) elif str(value).startswith('resourceid:'): resource = path_util.lookUpPath(value.split(':', 1)[1], force=True)['document'] value = str(resource['_id']) elif str(value) == 'resource:admin': value = adminUser elif str(value).startswith('resource:'): value = path_util.lookUpPath(value.split(':', 1)[1], force=True)['document'] return value
def importData(self, parentId, parentType, public, copyToHome, dataMap, params): user = self.getCurrentUser() if not parentId or parentType not in ('folder', 'item'): parent = getOrCreateRootFolder(CATALOG_NAME) parentType = 'folder' else: parent = self.model(parentType).load(parentId, user=user, level=AccessType.WRITE, exc=True) progress = True importedData = dict(folder=[], item=[]) with ProgressContext(progress, user=user, title='Registering resources') as ctx: for data in dataMap: if data['repository'] == 'DataONE': importedData['folder'].append( register_DataONE_resource(parent, parentType, ctx, user, data['dataId'], name=data['name'])) elif data['repository'] == 'HTTP': importedData['item'].append( register_http_resource(parent, parentType, ctx, user, data['dataId'], data['name'])) if copyToHome: with ProgressContext(progress, user=user, title='Copying to workspace') as ctx: userDataFolder = path_util.lookUpPath( '/user/%s/Data' % user['login'], user) for folder in importedData['folder']: self.model('folder').copyFolder( folder, creator=user, name=folder['name'], parentType='folder', parent=userDataFolder['document'], description=folder['description'], public=folder['public'], progress=ctx) for item in importedData['item']: self.model('item').copyItem( item, creator=user, name=item['name'], folder=userDataFolder['document'], description=item['description'])
def migrate(user, apiUrl): print("\nMigrating data for user %s" % user['login']) # Get or create API key for migration apiKey = ApiKey().createApiKey(user, 'migration') # Look up the "Home" directory homeDir = lookUpPath("/user/%s/Home" % user['login'], user=user, test=True)["document"] print("Found homeDir %s" % homeDir) # Remove old Home, create new (needed for wt_home_dirs) print("Removing homeDir") Folder().remove(homeDir) print("Creating new homeDir") newHomeDir = Folder().createFolder(parent=user, name="Home", creator=user, parentType="user") # Mount home dir via webdav tmpDir = "/tmp/migrate/%s/" % user['login'] os.makedirs(tmpDir, exist_ok=True) print("Created tmpDir %s" % tmpDir) mount(apiKey['key'], tmpDir, newHomeDir['_id'], apiUrl) # Move Data directory, if present print("Moving files") try: if os.path.exists('/user/%s/Data' % user['login']): shutil.move('/user/%s/Data' % user['login'], tmpDir) if os.path.exists('/user/%s/Workspace' % user['login']): shutil.move('/user/%s/Workspace' % user['login'], tmpDir) if os.path.exists('/user/%s/Home' % user['login']): for file in glob.glob('/user/%s/Home/*' % user['login']): print(file) shutil.move(file, tmpDir) os.rmdir('/user/%s/Home' % user['login']) except Exception as e: print("Error moving files: %s" % str(e)) time.sleep(60) # Unmount unmount(tmpDir) # Remove tmp folder shutil.rmtree(tmpDir) # Remove the API key ApiKey().remove(apiKey)
def stat(self, path): path = path.rstrip('/') if path == '': info = paramiko.SFTPAttributes() info.st_size = 0 info.st_mode = 0o777 | stat.S_IFDIR info.filename = '/' return info elif path in ('/user', '/collection'): info = paramiko.SFTPAttributes() info.st_size = 0 info.st_mode = 0o777 | stat.S_IFDIR info.filename = path[1:] return info obj = lookUpPath(path, filter=False, user=self.server.girderUser) return _stat(obj['document'], obj['model'])
def createNotebooks(event): # If there is no current asset store, just return try: Assetstore().getCurrent() except GirderException: print( TerminalColor.warning('WARNING: no current asset store. ' 'Notebook will not be created.')) return user = event.info folder_model = Folder() result = lookUpPath('user/%s/Private' % user['login'], force=True) private_folder = result['document'] oc_folder = folder_model.createFolder(private_folder, 'oc', parentType='folder', creator=user, public=True, reuseExisting=True) notebook_folder = folder_model.createFolder(oc_folder, 'notebooks', parentType='folder', creator=user, public=True, reuseExisting=True) notebooks_dir = os.path.join(os.path.dirname(__file__), 'notebooks') upload_model = Upload() for file in glob.glob('%s/*.ipynb' % notebooks_dir): size = os.path.getsize(file) name = os.path.basename(file) with open(file, 'rb') as fp: upload_model.uploadFromFile( fp, size=size, name=name, parentType='folder', parent={'_id': ObjectId(notebook_folder['_id'])}, user=user, mimeType='application/x-ipynb+json')
def copyDatasetToHome(self, id, dataset, params): user = self.getCurrentUser() modelType = dataset['modelType'] user_target_resource = path_util.lookUpPath( '/user/%s/Data' % user['login'], user) user_folder = user_target_resource['document'] source_object = self.model(modelType).load( dataset['_id'], user=user, level=AccessType.READ, exc=True) if modelType == 'folder': self.model('folder').copyFolder( source_object, parent=user_folder, parentType='folder', public='original', creator=user) elif modelType == 'item': self.model('item').copyItem( source_object, user, folder=user_folder) return dataset # Wrong, but I cannot set it to 204, or 201
def pathRedirect(self, path): user = self.getCurrentUser() # Find the longest path that is a valid resource used = len(path) while used: try: resource = path_util.lookUpPath('/'.join(path[:used]), user)['document'] break except path_util.ResourcePathNotFound: if used == 1: raise used -= 1 path = [resource['_modelType'], str(resource['_id'])] + list(path[used:]) path_info = ('/'.join(cherrypy.request.path_info.split('/')[:2] + path)) # This locates the redirected handler cherrypy.request.get_resource(path_info) result = cherrypy.request.handler() setRawResponse() return result
def filesResponse(self, path): user = self.getCurrentUser() path = '/'.join(path) # Handle /collection and /user specially if path in ('user', 'collection'): if not cherrypy.request.path_info.endswith('/'): return self._forward() model = User() if path == 'user' else Collection() return self._listDirectory(path, model.list( user=user, sort=[('user' if path == 'user' else 'name', SortDir.ASCENDING)])) resource = path_util.lookUpPath(path, user)['document'] singleFile = None if resource['_modelType'] == 'file': singleFile = resource elif resource['_modelType'] == 'item': singleFile = None for _path, file in Item().fileList(doc=resource, user=user, subpath=True, data=False): if singleFile is None: singleFile = file else: singleFile = False break if singleFile is not False and singleFile is not None: offset, endByte = 0, None rangeHeader = cherrypy.lib.httputil.get_ranges( cherrypy.request.headers.get('Range'), singleFile.get('size', 0)) if rangeHeader and len(rangeHeader): offset, endByte = rangeHeader[0] singleFile = File().load(singleFile['_id'], user=user, level=AccessType.READ) return File().download(singleFile, offset, endByte=endByte) if not cherrypy.request.path_info.endswith('/'): return self._forward() children = [] if resource['_modelType'] != 'item': children.extend(Folder().childFolders( parentType=resource['_modelType'], parent=resource, user=user, sort=[('name', SortDir.ASCENDING)])) if resource['_modelType'] == 'folder': children.extend(Folder().childItems(resource, sort=[('name', SortDir.ASCENDING)])) if resource['_modelType'] == 'item': children.extend(Item().childFiles(resource, sort=[('name', SortDir.ASCENDING)])) return self._listDirectory(path, children)
def list_folder(self, path): path = path.rstrip('/') entries = [] if path == '': for model in ('collection', 'user'): info = paramiko.SFTPAttributes() info.st_size = 0 info.st_mode = 0o777 | stat.S_IFDIR info.filename = model.encode('utf8') entries.append(info) elif path in ('/user', '/collection'): model = path[1:] for doc in ModelImporter.model(model).list(user=self.server.girderUser): entries.append(_stat(doc, model)) else: obj = lookUpPath(path, filter=False, user=self.server.girderUser) return self._list(obj['model'], obj['document']) return entries
def createInstance(self, taleId, imageId, name, params): if taleId is None and imageId is None: raise RestException('You need to provide "imageId" or "taleId".') user = self.getCurrentUser() token = self.getCurrentToken() taleModel = self.model('tale', 'wholetale') if taleId: tale = taleModel.load(taleId, user=user, level=AccessType.READ) elif imageId: image = self.model('image', 'wholetale').load(imageId, user=user, level=AccessType.READ) userDataFolder = path_util.lookUpPath( '/user/%s/Data' % user['login'], user) folder = userDataFolder['document'] data = [{'type': 'folder', 'id': folder['_id']}] try: # Check if it already exists tale = next( taleModel.list(user=None, data=data, image=image, currentUser=user)) except StopIteration: title = 'Testing %s' % image['fullName'] tale = taleModel.createTale(image, data, creator=user, save=True, title=title, description=None, public=False) instanceModel = self.model('instance', 'wholetale') return instanceModel.createInstance(tale, user, token, name=name, save=True)
def extract(path): resources = [] if '/' in path: resources += list(Collection().find()) resources += list(User().find()) elif '/collection' in path: resources += list(Collection().find()) elif '/user' in path: resources += list(User().find()) else: resources += [lookUpPath(p)['document'] for p in path] for resource in resources: items = list(Item().find({'baseParentId': resource['_id']})) for item in items: files = list(File().find({'itemId': item['_id']})) for file in files: create_geometa(item, file)
def populate(ids, paths, data): if not (ids or paths): click.echo('Error: No destination specified') return data = json.load(open(data, 'r')) totalTargets = len(ids) + len(paths) success = 0 for collectionId in ids: collection = Collection().findOne({'_id': ObjectId(collectionId)}) if (collection): Collection().setMetadata(collection=collection, metadata=data) success += 1 else: click.echo('Warning: No collection found with ID: ' + collectionId) for path in paths: # Truncates anything past the collection level path = '/'.join(split(path.lstrip('/'))[0:2]) try: doc = lookUpPath(path, force=True) if doc['model'] != 'collection': click.echo('Warning: Ignoring non-collection path: ' + path) continue doc = doc['document'] except (ResourcePathNotFound): name = split(path)[1] doc = Collection().createCollection(name, reuseExisting=True) Collection().setMetadata(collection=doc, metadata=data) success += 1 click.echo('Successfully set metadata on ' + str(success) + '/' + str(totalTargets) + ' targets')
def lookup(self, params): self.requireParams('path', params) test = self.boolParam('test', params, default=False) return path_util.lookUpPath(params['path'], self.getCurrentUser(), test)['document']
def lookup(self, path, test): return path_util.lookUpPath(path, self.getCurrentUser(), test)['document']
def testMoveBetweenAssetstores(self): folder = six.next(Folder().childFolders(self.admin, parentType='user', force=True, filters={'name': 'Public'})) resp = self.request(path='/assetstore', method='GET', user=self.admin) self.assertStatusOk(resp) fs_assetstore = resp.json[0] # Clear any old DB data base.dropGridFSDatabase('girder_test_assetstore_move_assetstore') params = { 'name': 'New Name', 'type': AssetstoreType.GRIDFS, 'db': 'girder_test_assetstore_move_assetstore' } resp = self.request(path='/assetstore', method='POST', user=self.admin, params=params) self.assertStatusOk(resp) gridfs_assetstore = resp.json # Upload a file - it should go to the fs assetstore uploadData = 'helloworld' params = { 'parentType': 'folder', 'parentId': folder['_id'], 'name': 'sample1', 'size': len(uploadData), 'mimeType': 'text/plain' } resp = self.request(path='/file', method='POST', user=self.admin, params=params) self.assertStatusOk(resp) upload = resp.json resp = self.request(path='/file/chunk', method='POST', user=self.admin, body=uploadData, params={'uploadId': upload['_id']}, type='text/plain') self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id']) uploadedFiles = [resp.json] # Upload it again targetting a different assetstore params['assetstoreId'] = gridfs_assetstore['_id'] resp = self.request(path='/file', method='POST', user=self.admin, params=params) self.assertStatusOk(resp) upload = resp.json resp = self.request(path='/file/chunk', method='POST', user=self.admin, body=uploadData, params={'uploadId': upload['_id']}, type='text/plain') self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id']) uploadedFiles.append(resp.json) # Replace the first file, directing the replacement to a different # assetstore replaceParams = { 'size': len(uploadData), 'assetstoreId': gridfs_assetstore['_id'], } resp = self.request(path='/file/%s/contents' % uploadedFiles[0]['_id'], method='PUT', user=self.admin, params=replaceParams) self.assertStatusOk(resp) upload = resp.json resp = self.request(path='/file/chunk', method='POST', user=self.admin, body=uploadData, params={'uploadId': upload['_id']}, type='text/plain') self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id']) uploadedFiles[0] = resp.json # Move a file from the gridfs assetstore to the filesystem assetstore resp = self.request(path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT', user=self.admin, params={'assetstoreId': fs_assetstore['_id']}) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id']) uploadedFiles[0] = resp.json # Doing it again shouldn't change it. resp = self.request(path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT', user=self.admin, params={'assetstoreId': fs_assetstore['_id']}) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id']) uploadedFiles[0] = resp.json # We should be able to move it back resp = self.request(path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT', user=self.admin, params={'assetstoreId': gridfs_assetstore['_id']}) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id']) uploadedFiles[0] = resp.json # Test moving a file of zero length params['size'] = 0 resp = self.request(path='/file', method='POST', user=self.admin, params=params) self.assertStatusOk(resp) uploadedFiles.append(resp.json) resp = self.request(path='/file/%s/move' % uploadedFiles[2]['_id'], method='PUT', user=self.admin, params={'assetstoreId': fs_assetstore['_id']}) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id']) uploadedFiles[2] = resp.json # Test preventing the move via an event def stopMove(event): event.preventDefault() events.bind('model.upload.movefile', 'assetstore_test', stopMove) try: resp = self.request(path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT', user=self.admin, params={'assetstoreId': fs_assetstore['_id']}, isJson=False) self.assertFalse('Move should have been prevented') except AssertionError as exc: self.assertIn('could not be moved to assetstore', str(exc)) events.unbind('model.upload.movefile', 'assetstore_test') # Test files big enough to be multi-chunk chunkSize = Upload()._getChunkSize() data = io.BytesIO(b' ' * chunkSize * 2) uploadedFiles.append(Upload().uploadFromFile(data, chunkSize * 2, 'sample', parentType='folder', parent=folder, assetstore=fs_assetstore)) resp = self.request(path='/file/%s/move' % uploadedFiles[3]['_id'], method='PUT', user=self.admin, params={'assetstoreId': gridfs_assetstore['_id']}) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id']) uploadedFiles[3] = resp.json # Test progress size = chunkSize * 2 data = io.BytesIO(b' ' * size) upload = Upload().uploadFromFile(data, size, 'progress', parentType='folder', parent=folder, assetstore=fs_assetstore) params = {'assetstoreId': gridfs_assetstore['_id'], 'progress': True} resp = self.request(path='/file/%s/move' % upload['_id'], method='PUT', user=self.admin, params=params) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id']) resp = self.request(path='/notification/stream', method='GET', user=self.admin, isJson=False, params={'timeout': 1}) messages = self.getSseMessages(resp) self.assertEqual(len(messages), 1) self.assertEqual(messages[0]['type'], 'progress') self.assertEqual(messages[0]['data']['current'], size) # Test moving imported file # Create assetstore to import file into params = { 'name': 'ImportTest', 'type': AssetstoreType.FILESYSTEM, 'root': os.path.join(fs_assetstore['root'], 'import') } resp = self.request(path='/assetstore', method='POST', user=self.admin, params=params) self.assertStatusOk(resp) import_assetstore = resp.json # Import file params = { 'importPath': os.path.join(ROOT_DIR, 'tests', 'cases', 'py_client', 'testdata', 'world.txt'), 'destinationType': 'folder', } Assetstore().importData(import_assetstore, parent=folder, parentType='folder', params=params, progress=ProgressContext(False), user=self.admin, leafFoldersAsItems=False) file = path_util.lookUpPath('/user/admin/Public/world.txt/world.txt', self.admin)['document'] # Move file params = { 'assetstoreId': fs_assetstore['_id'], } resp = self.request(path='/file/%s/move' % file['_id'], method='PUT', user=self.admin, params=params) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id']) # Check that we can still download the file resp = self.request(path='/file/%s/download' % file['_id'], user=self.admin, isJson=False) self.assertStatusOk(resp)
def testMoveBetweenAssetstores(self): folder = six.next(self.model('folder').childFolders( self.admin, parentType='user', force=True, filters={ 'name': 'Public' })) resp = self.request(path='/assetstore', method='GET', user=self.admin) self.assertStatusOk(resp) fs_assetstore = resp.json[0] # Clear any old DB data base.dropGridFSDatabase('girder_test_assetstore_move_assetstore') params = { 'name': 'New Name', 'type': AssetstoreType.GRIDFS, 'db': 'girder_test_assetstore_move_assetstore' } resp = self.request(path='/assetstore', method='POST', user=self.admin, params=params) self.assertStatusOk(resp) gridfs_assetstore = resp.json # Upload a file - it should go to the fs assetstore uploadData = 'helloworld' params = { 'parentType': 'folder', 'parentId': folder['_id'], 'name': 'sample1', 'size': len(uploadData), 'mimeType': 'text/plain' } resp = self.request( path='/file', method='POST', user=self.admin, params=params) self.assertStatusOk(resp) upload = resp.json fields = [('offset', 0), ('uploadId', upload['_id'])] files = [('chunk', 'helloWorld.txt', uploadData)] resp = self.multipartRequest( path='/file/chunk', user=self.admin, fields=fields, files=files) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id']) uploadedFiles = [resp.json] # Upload it again targetting a different assetstore params['assetstoreId'] = gridfs_assetstore['_id'] resp = self.request( path='/file', method='POST', user=self.admin, params=params) self.assertStatusOk(resp) upload = resp.json fields = [('offset', 0), ('uploadId', upload['_id'])] files = [('chunk', 'helloWorld.txt', uploadData)] resp = self.multipartRequest( path='/file/chunk', user=self.admin, fields=fields, files=files) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id']) uploadedFiles.append(resp.json) # Replace the first file, directing the replacement to a different # assetstore replaceParams = { 'size': len(uploadData), 'assetstoreId': gridfs_assetstore['_id'], } resp = self.request( path='/file/%s/contents' % uploadedFiles[0]['_id'], method='PUT', user=self.admin, params=replaceParams) self.assertStatusOk(resp) upload = resp.json fields = [('offset', 0), ('uploadId', upload['_id'])] resp = self.multipartRequest( path='/file/chunk', user=self.admin, fields=fields, files=files) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id']) uploadedFiles[0] = resp.json # Move a file from the gridfs assetstore to the filesystem assetstore resp = self.request( path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT', user=self.admin, params={'assetstoreId': fs_assetstore['_id']}) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id']) uploadedFiles[0] = resp.json # Doing it again shouldn't change it. resp = self.request( path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT', user=self.admin, params={'assetstoreId': fs_assetstore['_id']}) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id']) uploadedFiles[0] = resp.json # We should be able to move it back resp = self.request( path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT', user=self.admin, params={'assetstoreId': gridfs_assetstore['_id']}) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id']) uploadedFiles[0] = resp.json # Test moving a file of zero length params['size'] = 0 resp = self.request( path='/file', method='POST', user=self.admin, params=params) self.assertStatusOk(resp) uploadedFiles.append(resp.json) resp = self.request( path='/file/%s/move' % uploadedFiles[2]['_id'], method='PUT', user=self.admin, params={'assetstoreId': fs_assetstore['_id']}) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id']) uploadedFiles[2] = resp.json # Test preventing the move via an event def stopMove(event): event.preventDefault() events.bind('model.upload.movefile', 'assetstore_test', stopMove) try: resp = self.request( path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT', user=self.admin, params={'assetstoreId': fs_assetstore['_id']}, isJson=False) self.assertFalse('Move should have been prevented') except AssertionError as exc: self.assertIn('could not be moved to assetstore', str(exc)) events.unbind('model.upload.movefile', 'assetstore_test') # Test files big enough to be multi-chunk chunkSize = self.model('upload')._getChunkSize() data = six.BytesIO(b' ' * chunkSize * 2) uploadedFiles.append(self.model('upload').uploadFromFile( data, chunkSize * 2, 'sample', parentType='folder', parent=folder, assetstore=fs_assetstore)) resp = self.request( path='/file/%s/move' % uploadedFiles[3]['_id'], method='PUT', user=self.admin, params={'assetstoreId': gridfs_assetstore['_id']}) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id']) uploadedFiles[3] = resp.json # Test progress size = chunkSize * 2 chunkSize = self.model('upload')._getChunkSize() data = six.BytesIO(b' ' * size) upload = self.model('upload').uploadFromFile( data, size, 'progress', parentType='folder', parent=folder, assetstore=fs_assetstore) params = { 'assetstoreId': gridfs_assetstore['_id'], 'progress': True } resp = self.request( path='/file/%s/move' % upload['_id'], method='PUT', user=self.admin, params=params) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id']) resp = self.request( path='/notification/stream', method='GET', user=self.admin, isJson=False, params={'timeout': 1}) messages = self.getSseMessages(resp) self.assertEqual(len(messages), 1) self.assertEqual(messages[0]['type'], 'progress') self.assertEqual(messages[0]['data']['current'], size) # Test moving imported file # Create assetstore to import file into params = { 'name': 'ImportTest', 'type': AssetstoreType.FILESYSTEM, 'root': os.path.join(fs_assetstore['root'], 'import') } resp = self.request(path='/assetstore', method='POST', user=self.admin, params=params) self.assertStatusOk(resp) import_assetstore = resp.json # Import file params = { 'importPath': os.path.join(ROOT_DIR, 'tests', 'cases', 'py_client', 'testdata', 'world.txt'), 'destinationType': 'folder', } self.model('assetstore').importData( import_assetstore, parent=folder, parentType='folder', params=params, progress=ProgressContext(False), user=self.admin, leafFoldersAsItems=False) file = path_util.lookUpPath('/user/admin/Public/world.txt/world.txt', self.admin, False)['document'] # Move file params = { 'assetstoreId': fs_assetstore['_id'], } resp = self.request( path='/file/%s/move' % file['_id'], method='PUT', user=self.admin, params=params) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id']) # Check that we can still download the file resp = self.request( path='/file/%s/download' % file['_id'], user=self.admin, isJson=False) self.assertStatusOk(resp)
def setUp(self): super(ManifestTestCase, self).setUp() global catalog_ready self.users = ( { "email": "*****@*****.**", "login": "******", "firstName": "Root", "lastName": "van Klompf", "password": "******", "admin": True, }, { "email": "*****@*****.**", "login": "******", "firstName": "Joe", "lastName": "Regular", "password": "******", }, { "email": "*****@*****.**", "login": "******", "firstName": "Henry", "lastName": "CoolLast", "password": "******", }, ) self.admin, self.user, self.userHenry = [ self.model("user").createUser(**user) for user in self.users ] self.new_authors = [ { "firstName": self.admin["firstName"], "lastName": self.admin["lastName"], "orcid": "https://orcid.org/1234", }, { "firstName": self.user["firstName"], "lastName": self.user["lastName"], "orcid": "https://orcid.org/9876", }, ] data_collection = self.model("collection").createCollection( "WholeTale Catalog", public=True, reuseExisting=True) catalog = self.model("folder").createFolder( data_collection, "WholeTale Catalog", parentType="collection", public=True, reuseExisting=True, ) # Tale map of values to check against in tests def restore_catalog(parent, current): for folder in current["folders"]: resp = self.request( path="/folder", method="POST", user=self.admin, params={ "parentId": parent["_id"], "name": folder["name"], "metadata": json.dumps(folder["meta"]), }, ) folderObj = resp.json restore_catalog(folderObj, folder) for obj in current["files"]: resp = self.request( path="/item", method="POST", user=self.admin, params={ "folderId": parent["_id"], "name": obj["name"], "metadata": json.dumps(obj["meta"]), }, ) item = resp.json self.request( path="/file", method="POST", user=self.admin, params={ "parentType": "item", "parentId": item["_id"], "name": obj["name"], "size": obj["size"], "mimeType": obj["mimeType"], "linkUrl": obj["linkUrl"], }, ) with open(os.path.join(DATA_PATH, "manifest_mock_catalog.json"), "r") as fp: data = json.load(fp) restore_catalog(catalog, data) catalog_ready = True dataSet = [] data_paths = [ "Humans and Hydrology at High Latitudes: Water Use Information", # D1 folder "Humans and Hydrology at High Latitudes: Water Use Information/usco2005.xls", # D1 file "Twin-mediated Crystal Growth: an Enigma Resolved/data/D_whites_darks_AJS.hdf", # Globus file "A Machine Learning Approach for Engineering Bulk Metallic Glass Alloys/data/Dmax", # Globus folder "www.gw-openscience.org/s/events/BBH_events_v3.json", # HTTP file "www.gw-openscience.org/s/events/GW170104", # HTTP folder ] root = "/collection/WholeTale Catalog/WholeTale Catalog" for path in data_paths: obj = lookUpPath(os.path.join(root, path)) dataSet.append({ "itemId": obj["document"]["_id"], "mountPath": obj["document"]["name"], "_modelType": obj["model"], }) self.tale_info = { "_id": ObjectId(), "name": "Main Tale", "description": "Tale Desc", "authors": self.new_authors, "creator": self.user, "public": True, "data": dataSet, "illustration": "linkToImage", } self.tale = self.model("tale", "wholetale").createTale( {"_id": self.tale_info["_id"]}, data=self.tale_info["data"], creator=self.tale_info["creator"], title=self.tale_info["name"], public=self.tale_info["public"], description=self.tale_info["description"], authors=self.tale_info["authors"], ) self.tale["imageInfo"] = { "digest": ("registry.local.wholetale.org/5c8fe826da39aa00013e9609/1552934951@" "sha256:4f604e6fab47f79e28251657347ca20ee89b737b4b1048c18ea5cf2fe9a9f098" ), "jobId": ObjectId("5c9009deda39aa0001d702b7"), "last_build": 1552943449, "repo2docker_version": "craigwillis/repo2docker:latest", "status": 3 } self.model('tale', 'wholetale').save(self.tale) self.tale2 = self.model("tale", "wholetale").createTale( {"_id": self.tale_info["_id"]}, data=[], creator=self.tale_info["creator"], title=self.tale_info["name"], public=self.tale_info["public"], description=self.tale_info["description"], authors=self.tale_info["authors"], )