def test_package_fetch(self): """ Package.fetch() on nested, relative keys """ package_ = Package().set_dir('/', DATA_DIR / 'nested') out_dir = 'output' new_package_ = package_.fetch(out_dir) expected = {'one.txt': '1', 'two.txt': '2', 'three.txt': '3'} file_count = 0 for dirpath, _, files in os.walk(out_dir): for name in files: file_count += 1 with open(os.path.join(dirpath, name)) as file_: assert name in expected, 'unexpected file: {}'.format(name) contents = file_.read().strip() assert contents == expected[name], \ 'unexpected contents in {}: {}'.format(name, contents) assert file_count == len(expected), \ 'fetch wrote {} files; expected: {}'.format(file_count, expected) # test that package re-rooting works as expected out_dir_abs_path = pathlib.Path(out_dir).resolve().as_uri() assert all( entry.physical_keys[0].startswith(out_dir_abs_path) for _, entry in new_package_.walk() )
def test_brackets(self): pkg = Package() pkg.set('asdf/jkl', LOCAL_MANIFEST) pkg.set('asdf/qwer', LOCAL_MANIFEST) pkg.set('qwer/asdf', LOCAL_MANIFEST) assert set(pkg.keys()) == {'asdf', 'qwer'} pkg2 = pkg['asdf'] assert set(pkg2.keys()) == {'jkl', 'qwer'} assert pkg['asdf']['qwer'].get() == LOCAL_MANIFEST.as_uri() assert pkg['asdf']['qwer'] == pkg['asdf/qwer'] == pkg[('asdf', 'qwer')] assert pkg[[]] == pkg pkg = (Package().set('foo', DATA_DIR / 'foo.txt', {'foo': 'blah'})) pkg['foo'].meta['target'] = 'unicode' pkg.build("Quilt/Test") assert pkg['foo'].deserialize() == '123\n' assert pkg['foo']() == '123\n' with pytest.raises(KeyError): pkg['baz'] with pytest.raises(TypeError): pkg[b'asdf'] with pytest.raises(TypeError): pkg[0]
def test_validate(self): pkg_name = 'test/name' msg = 'test message' meta = {'some': 'meta'} pkg = Package() pkg.set_meta(meta) workflow_validator = self.get_workflow_validator( data_to_store=mock.sentinel.data_to_store) methods_to_mock = ( 'validate_name', 'validate_message', 'validate_metadata', 'validate_entries', ) with mock.patch.multiple( workflows.WorkflowValidator, **dict.fromkeys(methods_to_mock, mock.DEFAULT)) as mocks: assert workflow_validator.validate( name=pkg_name, pkg=pkg, message=msg, ) is mock.sentinel.data_to_store mocks['validate_name'].assert_called_once_with(pkg_name) mocks['validate_message'].assert_called_once_with(msg) mocks['validate_metadata'].assert_called_once_with(meta) mocks['validate_entries'].assert_called_once_with(pkg)
def test_local_package_delete_overlapping(self): """ Verify local package delete works when multiple packages reference the same tophash. """ top_hash = Package().build("Quilt/Test1").top_hash top_hash = Package().build("Quilt/Test2").top_hash assert 'Quilt/Test1' in quilt3.list_packages() assert top_hash in [ p.name for p in (LOCAL_REGISTRY / '.quilt/packages').iterdir() ] quilt3.delete_package('Quilt/Test1') assert 'Quilt/Test1' not in quilt3.list_packages() assert top_hash in [ p.name for p in (LOCAL_REGISTRY / '.quilt/packages').iterdir() ] quilt3.delete_package('Quilt/Test2') assert 'Quilt/Test2' not in quilt3.list_packages() assert top_hash not in [ p.name for p in (LOCAL_REGISTRY / '.quilt/packages').iterdir() ]
def test_default_registry(self): new_pkg = Package() # Create a dummy file to add to the package. test_file_name = 'bar' with open(test_file_name, "w") as fd: fd.write('test_file_content_string') test_file = Path(fd.name) # Build a new package into the local registry. new_pkg = new_pkg.set('foo', test_file_name) top_hash = new_pkg.build("Quilt/Test").top_hash # Verify manifest is registered by hash. out_path = LOCAL_REGISTRY / ".quilt/packages" / top_hash with open(out_path) as fd: pkg = Package.load(fd) assert test_file.resolve().as_uri() == pkg['foo'].physical_keys[0] # Verify latest points to the new location. named_pointer_path = LOCAL_REGISTRY / ".quilt/named_packages/Quilt/Test/latest" with open(named_pointer_path) as fd: assert fd.read().replace('\n', '') == top_hash # Test unnamed packages. new_pkg = Package() new_pkg = new_pkg.set('bar', test_file_name) top_hash = new_pkg.build("Quilt/Test").top_hash out_path = LOCAL_REGISTRY / ".quilt/packages" / top_hash with open(out_path) as fd: pkg = Package.load(fd) assert test_file.resolve().as_uri() == pkg['bar'].physical_keys[0]
def test(self): entries_data = { 'b/a': bytes(1), 'a/b': bytes(2), 'c': bytes(3), } pkg = Package() for lk, data in entries_data.items(): pkg.set(lk, data) workflow_validator = self.get_workflow_validator() assert workflow_validator.get_pkg_entries_for_validation(pkg) == [ { 'logical_key': 'a/b', 'size': 2, }, { 'logical_key': 'b/a', 'size': 1, }, { 'logical_key': 'c', 'size': 3, }, ]
def test_load_into_quilt(self): """ Verify loading local manifest and data into S3. """ top_hash = '5333a204bbc6e21607c2bc842f4a77d2e21aa6147cf2bf493dbf6282188d01ca' self.s3_stubber.add_response(method='put_object', service_response={'VersionId': 'v1'}, expected_params={ 'Body': ANY, 'Bucket': 'my_test_bucket', 'Key': 'Quilt/package/foo', 'Metadata': { 'helium': '{}' } }) self.s3_stubber.add_response(method='put_object', service_response={'VersionId': 'v2'}, expected_params={ 'Body': ANY, 'Bucket': 'my_test_bucket', 'Key': '.quilt/packages/' + top_hash, 'Metadata': { 'helium': 'null' } }) self.s3_stubber.add_response( method='put_object', service_response={'VersionId': 'v3'}, expected_params={ 'Body': top_hash.encode(), 'Bucket': 'my_test_bucket', 'Key': '.quilt/named_packages/Quilt/package/1234567890', 'Metadata': { 'helium': 'null' } }) self.s3_stubber.add_response( method='put_object', service_response={'VersionId': 'v4'}, expected_params={ 'Body': top_hash.encode(), 'Bucket': 'my_test_bucket', 'Key': '.quilt/named_packages/Quilt/package/latest', 'Metadata': { 'helium': 'null' } }) new_pkg = Package() # Create a dummy file to add to the package. contents = 'blah' test_file = Path('bar') test_file.write_text(contents) new_pkg = new_pkg.set('foo', test_file) with patch('time.time', return_value=1234567890): new_pkg.push('Quilt/package', 's3://my_test_bucket/')
def test_default_install_location(self): """Verify that pushes to the default local install location work as expected""" with patch('quilt3.Package._materialize') as materialize_mock: pkg_name = 'Quilt/nice-name' Package.install(pkg_name, registry='s3://my-test-bucket') materialize_mock.assert_called_once_with( quilt3.util.get_install_location().rstrip('/') + '/' + pkg_name, )
def test_remote_repr(self): with patch('quilt3.packages.get_size_and_meta', return_value=(0, dict(), '0')): TEST_REPR = ("(remote Package)\n" " └─asdf\n") pkg = Package() pkg.set('asdf', 's3://my-bucket/asdf') assert repr(pkg) == TEST_REPR TEST_REPR = ("(remote Package)\n" " └─asdf\n" " └─qwer\n") pkg = Package() pkg.set('asdf', 's3://my-bucket/asdf') pkg.set('qwer', LOCAL_MANIFEST) assert repr(pkg) == TEST_REPR
def exec_module(cls, module): """ Module executor. """ name_parts = module.__name__.split('.') registry = get_from_config('default_local_registry') if module.__name__ == 'quilt3.data': # __path__ must be set even if the package is virtual. Since __path__ will be # scanned by all other finders preceding this one in sys.meta_path order, make sure # it points to someplace lacking importable objects module.__path__ = MODULE_PATH return module elif len(name_parts) == 3: # e.g. module.__name__ == quilt3.data.foo namespace = name_parts[2] # we do not know the name the user will ask for, so populate all valid names for pkg in list_packages(): pkg_user, pkg_name = pkg.split('/') if pkg_user == namespace: module.__dict__[pkg_name] = Package.browse( pkg, registry=registry) module.__path__ = MODULE_PATH return module else: assert False
def download_test_resources(args: Args): # Try running the download pipeline try: # Get test resources dir resources_dir = ( Path(__file__).parent.parent / "aicsimageio" / "tests" / "resources" ).resolve() resources_dir.mkdir(exist_ok=True) # Get quilt package package = Package.browse( "aicsimageio/test_resources", "s3://aics-modeling-packages-test-resources", top_hash=args.top_hash, ) # Download package["resources"].fetch(resources_dir) log.info(f"Completed package download.") # Catch any exception except Exception as e: log.error("=============================================") if args.debug: log.error("\n\n" + traceback.format_exc()) log.error("=============================================") log.error("\n\n" + str(e) + "\n") log.error("=============================================") sys.exit(1)
def test_fetch(self): """ Verify fetching a package entry. """ pkg = (Package().set('foo', DATA_DIR / 'foo.txt', { 'user_meta': 'blah' }).set('bar', DATA_DIR / 'foo.txt', {'user_meta': 'blah'})) pkg['foo'].meta['target'] = 'unicode' pkg['bar'].meta['target'] = 'unicode' with open(DATA_DIR / 'foo.txt') as fd: assert fd.read().replace('\n', '') == '123' # Copy foo.text to bar.txt pkg['foo'].fetch('data/bar.txt') with open('data/bar.txt') as fd: assert fd.read().replace('\n', '') == '123' # Raise an error if you copy to yourself. with pytest.raises(shutil.SameFileError): pkg.set('foo', DATA_DIR / 'foo.txt')['foo'].fetch(DATA_DIR / 'foo.txt') # The key gets re-rooted correctly. pkg = quilt3.Package().set('foo', DATA_DIR / 'foo.txt') new_pkg_entry = pkg['foo'].fetch('bar.txt') out_abs_path = pathlib.Path("bar.txt").resolve().as_uri() assert new_pkg_entry.physical_keys[0] == out_abs_path
def test_local_package_delete(self): """Verify local package delete works.""" top_hash = Package().build("Quilt/Test").top_hash assert 'Quilt/Test' in quilt3.list_packages() quilt3.delete_package('Quilt/Test') assert 'Quilt/Test' not in quilt3.list_packages()
def test_package_entry_meta(self): pkg = (Package().set('foo', DATA_DIR / 'foo.txt', { 'value': 'blah' }).set('bar', DATA_DIR / 'foo.txt', {'value': 'blah2'})) pkg['foo']._meta['target'] = 'unicode' pkg['bar']._meta['target'] = 'unicode' assert pkg['foo'].meta == {'value': 'blah'} assert pkg['bar'].meta == {'value': 'blah2'} assert pkg['foo']._meta == { 'target': 'unicode', 'user_meta': { 'value': 'blah' } } assert pkg['bar']._meta == { 'target': 'unicode', 'user_meta': { 'value': 'blah2' } } pkg['foo'].set_meta({'value': 'other value'}) assert pkg['foo'].meta == {'value': 'other value'} assert pkg['foo']._meta == { 'target': 'unicode', 'user_meta': { 'value': 'other value' } }
def test_fetch_default_dest(tmpdir): """Verify fetching a package entry to a default destination.""" with patch('quilt3.packages.copy_file') as copy_mock: (Package() .set('foo', os.path.join(os.path.dirname(__file__), 'data', 'foo.txt'))['foo'] .fetch()) filepath = fix_url(os.path.join(os.path.dirname(__file__), 'data', 'foo.txt')) copy_mock.assert_called_once_with(filepath, ANY)
def test_list_local_packages(self): """Verify that list returns packages in the appdirs directory.""" # Build a new package into the local registry. Package().build("Quilt/Foo") Package().build("Quilt/Bar") Package().build("Quilt/Test") # Verify packages are returned. pkgs = list(quilt3.list_packages()) assert len(pkgs) == 3 assert "Quilt/Foo" in pkgs assert "Quilt/Bar" in pkgs # Verify specifying a local path explicitly works as expected. assert list(pkgs) == list( quilt3.list_packages(LOCAL_REGISTRY.as_posix()))
def test_validate_pkg_entries(self, get_pkg_entries_for_validation_mock): pkg = Package() workflow_validator = self.get_workflow_validator( entries_validator=self.JSON_SCHEMA_VALIDATOR_CLS(True)) workflow_validator.validate_entries(pkg) get_pkg_entries_for_validation_mock.assert_called_once_with(pkg)
def test_push_restrictions(self): p = Package() # disallow pushing not to the top level of a remote S3 registry with pytest.raises(QuiltException): p.push('Quilt/Test', 's3://test-bucket/foo/bar') # disallow pushing to the local filesystem (use install instead) with pytest.raises(QuiltException): p.push('Quilt/Test', './') # disallow pushing the package manifest to remote but package data to local with pytest.raises(QuiltException): p.push('Quilt/Test', 's3://test-bucket', dest='./') # disallow pushing the pacakge manifest to remote but package data to a different remote with pytest.raises(QuiltException): p.push('Quilt/Test', 's3://test-bucket', dest='s3://other-test-bucket')
def test_top_hash_stable(self): """Ensure that top_hash() never changes for a given manifest""" registry = DATA_DIR.as_posix() top_hash = '20de5433549a4db332a11d8d64b934a82bdea8f144b4aecd901e7d4134f8e733' pkg = Package.browse('foo/bar', registry=registry, top_hash=top_hash) assert pkg.top_hash == top_hash, \ "Unexpected top_hash for {}/packages/.quilt/packages/{}".format(registry, top_hash)
def test_set_package_entry(self): """ Set the physical key for a PackageEntry""" pkg = (Package().set('foo', DATA_DIR / 'foo.txt', { 'user_meta': 'blah' }).set('bar', DATA_DIR / 'foo.txt', {'user_meta': 'blah'})) pkg['foo'].meta['target'] = 'unicode' pkg['bar'].meta['target'] = 'unicode' # Build a dummy file to add to the map. with open('bar.txt', "w") as fd: fd.write('test_file_content_string') test_file = Path(fd.name) pkg['bar'].set('bar.txt') assert test_file.resolve().as_uri() == pkg['bar'].physical_keys[0] # Test shortcut codepath pkg = Package().set('bar.txt') assert test_file.resolve().as_uri() == pkg['bar.txt'].physical_keys[0]
def test_validate_pkg_entries_fail(self, get_pkg_entries_for_validation_mock): pkg = Package() workflow_validator = self.get_workflow_validator( entries_validator=self.JSON_SCHEMA_VALIDATOR_CLS(False)) with pytest.raises(workflows.WorkflowValidationError): workflow_validator.validate_entries(pkg) get_pkg_entries_for_validation_mock.assert_called_once_with(pkg)
def test_local_repr(self): TEST_REPR = ("(local Package)\n" " └─asdf\n" " └─path1/\n" " └─asdf\n" " └─qwer\n" " └─path2/\n" " └─first/\n" " └─asdf\n" " └─second/\n" " └─asdf\n" " └─qwer\n") pkg = Package() pkg.set('asdf', LOCAL_MANIFEST) pkg.set('qwer', LOCAL_MANIFEST) pkg.set('path1/asdf', LOCAL_MANIFEST) pkg.set('path1/qwer', LOCAL_MANIFEST) pkg.set('path2/first/asdf', LOCAL_MANIFEST) pkg.set('path2/second/asdf', LOCAL_MANIFEST) assert repr(pkg) == TEST_REPR
def test_browse_package_from_registry(self): """ Verify loading manifest locally and from s3 """ with patch('quilt3.Package._from_path') as pkgmock: registry = LOCAL_REGISTRY.resolve().as_uri() pkg = Package() pkgmock.return_value = pkg top_hash = pkg.top_hash pkg = Package.browse('Quilt/nice-name', top_hash=top_hash) assert '{}/.quilt/packages/{}'.format(registry, top_hash) \ in [x[0][0] for x in pkgmock.call_args_list] pkgmock.reset_mock() with patch('quilt3.packages.get_bytes') as dl_mock: dl_mock.return_value = (top_hash.encode('utf-8'), None) pkg = Package.browse('Quilt/nice-name') assert registry + '/.quilt/named_packages/Quilt/nice-name/latest' \ == dl_mock.call_args_list[0][0][0] assert '{}/.quilt/packages/{}'.format(registry, top_hash) \ in [x[0][0] for x in pkgmock.call_args_list] pkgmock.reset_mock() remote_registry = 's3://asdf/foo' # remote load pkg = Package.browse('Quilt/nice-name', registry=remote_registry, top_hash=top_hash) assert '{}/.quilt/packages/{}'.format(remote_registry, top_hash) \ in [x[0][0] for x in pkgmock.call_args_list] pkgmock.reset_mock() pkg = Package.browse('Quilt/nice-name', top_hash=top_hash, registry=remote_registry) assert '{}/.quilt/packages/{}'.format(remote_registry, top_hash) \ in [x[0][0] for x in pkgmock.call_args_list] pkgmock.reset_mock() with patch('quilt3.packages.get_bytes') as dl_mock: dl_mock.return_value = (top_hash.encode('utf-8'), None) pkg = Package.browse('Quilt/nice-name', registry=remote_registry) assert '{}/.quilt/packages/{}'.format(remote_registry, top_hash) \ in [x[0][0] for x in pkgmock.call_args_list] # registry failure case with patch('quilt3.packages.get_from_config', return_value=fix_url(os.path.dirname(__file__))): with pytest.raises(FileNotFoundError): Package.browse('Quilt/nice-name')
def test_remote_install(self): """Verify that installing from a local package works as expected.""" remote_registry = Path('.').resolve().as_uri() quilt3.config(default_local_registry=remote_registry, default_remote_registry=remote_registry) with patch('quilt3.Package.push') as push_mock: pkg = Package() pkg.build('Quilt/nice-name') with patch('quilt3.Package._materialize') as materialize_mock, \ patch('quilt3.Package.build') as build_mock: materialize_mock.return_value = pkg dest_registry = quilt3.util.get_from_config( 'default_local_registry') quilt3.Package.install('Quilt/nice-name', dest='./') materialize_mock.assert_called_once_with(fix_url('./')) build_mock.assert_called_once_with('Quilt/nice-name', message=None, registry=dest_registry)
def test_s3_set_dir(self): """ Verify building a package from an S3 directory. """ with patch('quilt3.packages.list_object_versions') as list_object_versions_mock: pkg = Package() list_object_versions_mock.return_value = ([ dict(Key='foo/a.txt', VersionId='xyz', IsLatest=True, Size=10), dict(Key='foo/x/y.txt', VersionId='null', IsLatest=True, Size=10), dict(Key='foo/z.txt', VersionId='123', IsLatest=False, Size=10), ], []) pkg.set_dir('', 's3://bucket/foo/', meta='test_meta') assert pkg['a.txt'].physical_keys[0] == 's3://bucket/foo/a.txt?versionId=xyz' assert pkg['x']['y.txt'].physical_keys[0] == 's3://bucket/foo/x/y.txt' assert pkg.meta == "test_meta" assert pkg['x']['y.txt'].size == 10 # GH368 list_object_versions_mock.assert_called_with('bucket', 'foo/') list_object_versions_mock.reset_mock() pkg.set_dir('bar', 's3://bucket/foo') assert pkg['bar']['a.txt'].physical_keys[0] == 's3://bucket/foo/a.txt?versionId=xyz' assert pkg['bar']['x']['y.txt'].physical_keys[0] == 's3://bucket/foo/x/y.txt' assert pkg['bar']['a.txt'].size == 10 # GH368 list_object_versions_mock.assert_called_with('bucket', 'foo/')
def test_commit_message_on_push(self): """ Verify commit messages populate correctly on push.""" with patch('botocore.client.BaseClient._make_api_call', new=mock_make_api_call), \ patch('quilt3.Package._materialize') as materialize_mock, \ patch('quilt3.Package._build') as build_mock: with open(REMOTE_MANIFEST) as fd: pkg = Package.load(fd) materialize_mock.return_value = pkg pkg.push('Quilt/test_pkg_name', 's3://test-bucket', message='test_message') build_mock.assert_called_once_with( 'Quilt/test_pkg_name', registry='s3://test-bucket', message='test_message' )
def download_test_resources(args: Args): # Try running the download pipeline try: # Get test resources dir resources_dir = (Path(__file__).parent.parent / "napari_aicsimageio" / "tests" / "resources").resolve() resources_dir.mkdir(exist_ok=True) # Use or read top hash if args.top_hash is None: with open(Path(__file__).parent / "TEST_RESOURCES_HASH.txt", "r") as f: top_hash = f.readline().rstrip() else: top_hash = args.top_hash log.info(f"Downloading test resources using top hash: {top_hash}") # Get quilt package Package.install( "aicsimageio/test_resources", "s3://aics-modeling-packages-test-resources", dest=resources_dir, top_hash=top_hash, path="resources", ) log.info(f"Completed package download.") # Catch any exception except Exception as e: log.error("=============================================") if args.debug: log.error("\n\n" + traceback.format_exc()) log.error("=============================================") log.error("\n\n" + str(e) + "\n") log.error("=============================================") sys.exit(1)
def test_list_local_packages(self): """Verify that list returns packages in the appdirs directory.""" # Build a new package into the local registry. with patch('time.time', return_value=1234567890): Package().build("Quilt/Foo") Package().build("Quilt/Bar") Package().build("Quilt/Test") # Verify packages are returned. pkgs = list(quilt3.list_packages()) assert len(pkgs) == 3 assert "Quilt/Foo" in pkgs assert "Quilt/Bar" in pkgs versions = set(quilt3.list_package_versions('Quilt/Foo')) assert versions == { ('latest', '2a5a67156ca9238c14d12042db51c5b52260fdd5511b61ea89b58929d6e1769b'), ('1234567890', '2a5a67156ca9238c14d12042db51c5b52260fdd5511b61ea89b58929d6e1769b'), } # Verify specifying a local path explicitly works as expected. assert list(pkgs) == list(quilt3.list_packages(LOCAL_REGISTRY.as_posix()))
def test_map(self): pkg = Package() pkg.set('as/df', LOCAL_MANIFEST) pkg.set('as/qw', LOCAL_MANIFEST) assert set(pkg.map(lambda lk, entry: lk)) == {'as/df', 'as/qw'} pkg['as'].set_meta({'foo': 'bar'}) assert set(pkg.map(lambda lk, entry: lk, include_directories=True)) ==\ {'as/df', 'as/qw', 'as/'}
def test_iter(self): pkg = Package() assert not pkg pkg.set('asdf', LOCAL_MANIFEST) assert list(pkg) == ['asdf'] pkg.set('jkl;', REMOTE_MANIFEST) assert set(pkg) == {'asdf', 'jkl;'}