def test_build(tmpdir): """Verify that build dumps the manifest to appdirs directory.""" new_pkg = Package() # Create a dummy file to add to the package. test_file_name = 'bar' with open(test_file_name, "w") as fd: fd.write('test_file_content_string') test_file = Path(fd.name) # Build a new package into the local registry. new_pkg = new_pkg.set('foo', test_file_name) top_hash = new_pkg.build("Quilt/Test") # Verify manifest is registered by hash. out_path = Path(BASE_PATH, ".quilt/packages", top_hash) with open(out_path) as fd: pkg = Package.load(fd) assert test_file.resolve().as_uri() == pkg['foo'].physical_keys[0] # Verify latest points to the new location. named_pointer_path = Path(BASE_PATH, ".quilt/named_packages/Quilt/Test/latest") with open(named_pointer_path) as fd: assert fd.read().replace('\n', '') == top_hash # Test unnamed packages. new_pkg = Package() new_pkg = new_pkg.set('bar', test_file_name) top_hash = new_pkg.build() out_path = Path(BASE_PATH, ".quilt/packages", top_hash) with open(out_path) as fd: pkg = Package.load(fd) assert test_file.resolve().as_uri() == pkg['bar'].physical_keys[0]
def test_list_local_packages(tmpdir): """Verify that list returns packages in the appdirs directory.""" temp_local_registry = Path(os.path.join(tmpdir, 'test_registry')).as_uri() with patch('t4.packages.get_package_registry', lambda path: temp_local_registry), \ patch('t4.api.get_package_registry', lambda path: temp_local_registry): # Build a new package into the local registry. Package().build("Quilt/Foo") Package().build("Quilt/Bar") Package().build("Quilt/Test") # Verify packages are returned. pkgs = t4.list_packages() assert len(pkgs) == 3 assert "Quilt/Foo" in pkgs assert "Quilt/Bar" in pkgs # Test unnamed packages are not added. Package().build() pkgs = t4.list_packages() assert len(pkgs) == 3 # Verify manifest is registered by hash when local path given pkgs = t4.list_packages("/") assert "Quilt/Foo" in pkgs assert "Quilt/Bar" in pkgs
def test_brackets(self): pkg = Package() pkg.set('asdf/jkl', LOCAL_MANIFEST) pkg.set('asdf/qwer', LOCAL_MANIFEST) pkg.set('qwer/asdf', LOCAL_MANIFEST) assert set(pkg.keys()) == {'asdf', 'qwer'} pkg2 = pkg['asdf'] assert set(pkg2.keys()) == {'jkl', 'qwer'} assert pkg['asdf']['qwer'].get() == LOCAL_MANIFEST.as_uri() assert pkg['asdf']['qwer'] == pkg['asdf/qwer'] == pkg[('asdf', 'qwer')] assert pkg[[]] == pkg pkg = (Package().set('foo', DATA_DIR / 'foo.txt', {'foo': 'blah'})) pkg['foo'].meta['target'] = 'unicode' pkg.build() assert pkg['foo'].deserialize() == '123\n' assert pkg['foo']() == '123\n' with pytest.raises(KeyError): pkg['baz'] with pytest.raises(TypeError): pkg[b'asdf'] with pytest.raises(TypeError): pkg[0]
def test_brackets(): pkg = Package() pkg.set('asdf/jkl', LOCAL_MANIFEST) pkg.set('asdf/qwer', LOCAL_MANIFEST) pkg.set('qwer/asdf', LOCAL_MANIFEST) assert set(pkg.keys()) == {'asdf', 'qwer'} pkg2 = pkg['asdf'] assert set(pkg2.keys()) == {'jkl', 'qwer'} assert pkg['asdf']['qwer'].get() == pathlib.Path(LOCAL_MANIFEST).as_uri() assert pkg['asdf']['qwer'] == pkg['asdf/qwer'] == pkg[('asdf', 'qwer')] assert pkg[[]] == pkg pkg = (Package().set( 'foo', os.path.join(os.path.dirname(__file__), 'data', 'foo.txt'), {'foo': 'blah'})) pkg['foo'].meta['target'] = 'unicode' pkg.build() assert pkg['foo'].deserialize() == '123\n' assert pkg['foo']() == '123\n' with pytest.raises(KeyError): pkg['baz'] with pytest.raises(TypeError): pkg[b'asdf'] with pytest.raises(TypeError): pkg[0]
def test_default_registry(self): new_pkg = Package() # Create a dummy file to add to the package. test_file_name = 'bar' with open(test_file_name, "w") as fd: fd.write('test_file_content_string') test_file = Path(fd.name) # Build a new package into the local registry. new_pkg = new_pkg.set('foo', test_file_name) top_hash = new_pkg.build("Quilt/Test").top_hash # Verify manifest is registered by hash. out_path = Path(BASE_PATH, ".quilt/packages", top_hash) with open(out_path) as fd: pkg = Package.load(fd) assert test_file.resolve().as_uri() == pkg['foo'].physical_keys[0] # Verify latest points to the new location. named_pointer_path = Path(BASE_PATH, ".quilt/named_packages/Quilt/Test/latest") with open(named_pointer_path) as fd: assert fd.read().replace('\n', '') == top_hash # Test unnamed packages. new_pkg = Package() new_pkg = new_pkg.set('bar', test_file_name) top_hash = new_pkg.build().top_hash out_path = Path(BASE_PATH, ".quilt/packages", top_hash) with open(out_path) as fd: pkg = Package.load(fd) assert test_file.resolve().as_uri() == pkg['bar'].physical_keys[0] new_base_path = Path(BASE_PATH, ".quilttest") with patch('t4.packages.get_from_config') as mock_config: mock_config.return_value = new_base_path top_hash = new_pkg.build("Quilt/Test").top_hash out_path = Path(new_base_path, ".quilt/packages", top_hash).resolve() with open(out_path) as fd: pkg = Package.load(fd) assert test_file.resolve().as_uri( ) == pkg['bar'].physical_keys[0] with patch('t4.packages.get_from_config') as mock_config: mock_config.return_value = new_base_path new_pkg.push("Quilt/Test") with open(out_path) as fd: pkg = Package.load(fd) assert pkg['bar'].physical_keys[0].endswith( '.quilttest/Quilt/Test/bar')
def test_remote_repr(self): with patch('t4.packages.get_size_and_meta', return_value=(0, dict(), '0')): TEST_REPR = ("(remote Package)\n" " └─asdf\n") pkg = Package() pkg.set('asdf', 's3://my-bucket/asdf') assert repr(pkg) == TEST_REPR TEST_REPR = ("(remote Package)\n" " └─asdf\n" " └─qwer\n") pkg = Package() pkg.set('asdf', 's3://my-bucket/asdf') pkg.set('qwer', LOCAL_MANIFEST) assert repr(pkg) == TEST_REPR
def test_long_repr(): pkg = Package() for i in range(30): pkg.set('path{}/asdf'.format(i), LOCAL_MANIFEST) r = repr(pkg) assert r.count('\n') == 20 assert r[-4:] == '...\n' pkg = Package() for i in range(10): pkg.set('path{}/asdf'.format(i), LOCAL_MANIFEST) pkg.set('path{}/qwer'.format(i), LOCAL_MANIFEST) pkgrepr = repr(pkg) assert pkgrepr.count('\n') == 20 assert pkgrepr.find('path9/') > 0
def test_load_into_t4(tmpdir): """ Verify loading local manifest and data into S3. """ with patch('t4.packages.put_bytes') as bytes_mock, \ patch('t4.data_transfer._upload_file') as file_mock, \ patch('t4.packages.get_remote_registry') as config_mock: config_mock.return_value = 's3://my_test_bucket' new_pkg = Package() # Create a dummy file to add to the package. contents = 'blah' test_file = pathlib.Path(tmpdir) / 'bar' test_file.write_text(contents) new_pkg = new_pkg.set('foo', test_file) new_pkg.push('Quilt/package', 's3://my_test_bucket/') # Manifest copied top_hash = new_pkg.top_hash() bytes_mock.assert_any_call( top_hash.encode(), 's3://my_test_bucket/.quilt/named_packages/Quilt/package/latest') bytes_mock.assert_any_call( ANY, 's3://my_test_bucket/.quilt/packages/' + top_hash) # Data copied file_mock.assert_called_once_with(ANY, len(contents), str(test_file), 'my_test_bucket', 'Quilt/package/foo', {})
def test_package_entry_meta(self): pkg = (Package().set('foo', DATA_DIR / 'foo.txt', { 'value': 'blah' }).set('bar', DATA_DIR / 'foo.txt', {'value': 'blah2'})) pkg['foo']._meta['target'] = 'unicode' pkg['bar']._meta['target'] = 'unicode' assert pkg['foo'].meta == {'value': 'blah'} assert pkg['bar'].meta == {'value': 'blah2'} assert pkg['foo']._meta == { 'target': 'unicode', 'user_meta': { 'value': 'blah' } } assert pkg['bar']._meta == { 'target': 'unicode', 'user_meta': { 'value': 'blah2' } } pkg['foo'].set_meta({'value': 'other value'}) assert pkg['foo'].meta == {'value': 'other value'} assert pkg['foo']._meta == { 'target': 'unicode', 'user_meta': { 'value': 'other value' } }
def test_fetch(self): """ Verify fetching a package entry. """ pkg = (Package().set('foo', DATA_DIR / 'foo.txt', { 'user_meta': 'blah' }).set('bar', DATA_DIR / 'foo.txt', {'user_meta': 'blah'})) pkg['foo'].meta['target'] = 'unicode' pkg['bar'].meta['target'] = 'unicode' with open(DATA_DIR / 'foo.txt') as fd: assert fd.read().replace('\n', '') == '123' # Copy foo.text to bar.txt pkg['foo'].fetch('data/bar.txt') with open('data/bar.txt') as fd: assert fd.read().replace('\n', '') == '123' # Raise an error if you copy to yourself. with pytest.raises(shutil.SameFileError): pkg.set('foo', DATA_DIR / 'foo.txt')['foo'].fetch(DATA_DIR / 'foo.txt') # The key gets re-rooted correctly. pkg = t4.Package().set('foo', DATA_DIR / 'foo.txt') new_pkg_entry = pkg['foo'].fetch('bar.txt') out_abs_path = f'file://{pathlib.Path(".").absolute().as_posix()}/bar.txt' assert new_pkg_entry.physical_keys[0] == out_abs_path
def test_package_fetch(self): """ Package.fetch() on nested, relative keys """ package_ = Package().set_dir('/', DATA_DIR / 'nested') out_dir = 'output' new_package_ = package_.fetch(out_dir) expected = {'one.txt': '1', 'two.txt': '2', 'three.txt': '3'} file_count = 0 for dirpath, _, files in os.walk(out_dir): for name in files: file_count += 1 with open(os.path.join(dirpath, name)) as file_: assert name in expected, 'unexpected file: {}'.format( file_) contents = file_.read().strip() assert contents == expected[name], \ 'unexpected contents in {}: {}'.format(name, contents) assert file_count == len(expected), \ 'fetch wrote {} files; expected: {}'.format(file_count, expected) # test that package re-rooting works as expected out_dir_abs_path = f'file://{pathlib.Path(out_dir).absolute().as_posix()}' assert all(entry.physical_keys[0].startswith(out_dir_abs_path) for _, entry in new_package_.walk())
def test_local_push(tmpdir): """ Verify loading local manifest and data into S3. """ with patch('t4.packages.put_bytes') as bytes_mock, \ patch('t4.data_transfer._copy_local_file') as file_mock, \ patch('t4.packages.get_remote_registry') as config_mock: config_mock.return_value = tmpdir / 'package_contents' new_pkg = Package() contents = 'blah' test_file = pathlib.Path(tmpdir) / 'bar' test_file.write_text(contents) new_pkg = new_pkg.set('foo', test_file) new_pkg.push('Quilt/package', tmpdir / 'package_contents') push_uri = pathlib.Path(tmpdir, 'package_contents').as_uri() # Manifest copied top_hash = new_pkg.top_hash() bytes_mock.assert_any_call( top_hash.encode(), push_uri + '/.quilt/named_packages/Quilt/package/latest') bytes_mock.assert_any_call(ANY, push_uri + '/.quilt/packages/' + top_hash) # Data copied file_mock.assert_called_once_with( ANY, len(contents), str(test_file), str(tmpdir / 'package_contents/Quilt/package/foo'), {})
def test_s3_set_dir(tmpdir): """ Verify building a package from an S3 directory. """ with patch( 't4.packages.list_object_versions') as list_object_versions_mock: pkg = Package() list_object_versions_mock.return_value = ([ dict(Key='foo/a.txt', VersionId='xyz', IsLatest=True), dict(Key='foo/x/y.txt', VersionId='null', IsLatest=True), dict(Key='foo/z.txt', VersionId='123', IsLatest=False), ], []) pkg.set_dir('', 's3://bucket/foo/') assert pkg['a.txt'].physical_keys[ 0] == 's3://bucket/foo/a.txt?versionId=xyz' assert pkg['x']['y.txt'].physical_keys[0] == 's3://bucket/foo/x/y.txt' list_object_versions_mock.assert_called_with('bucket', 'foo/') list_object_versions_mock.reset_mock() pkg.set_dir('bar', 's3://bucket/foo') assert pkg['bar']['a.txt'].physical_keys[ 0] == 's3://bucket/foo/a.txt?versionId=xyz' assert pkg['bar']['x']['y.txt'].physical_keys[ 0] == 's3://bucket/foo/x/y.txt' list_object_versions_mock.assert_called_with('bucket', 'foo/')
def test_invalid_key(self): pkg = Package() with pytest.raises(QuiltException): pkg.set('', LOCAL_MANIFEST) with pytest.raises(QuiltException): pkg.set('foo/', LOCAL_MANIFEST) with pytest.raises(QuiltException): pkg.set('foo', './') with pytest.raises(QuiltException): pkg.set('foo', os.path.dirname(__file__)) # we do not allow '.' or '..' files or filename separators with pytest.raises(QuiltException): pkg.set('.', LOCAL_MANIFEST) with pytest.raises(QuiltException): pkg.set('..', LOCAL_MANIFEST) with pytest.raises(QuiltException): pkg.set('./foo', LOCAL_MANIFEST) with pytest.raises(QuiltException): pkg.set('../foo', LOCAL_MANIFEST) with pytest.raises(QuiltException): pkg.set('foo/.', LOCAL_MANIFEST) with pytest.raises(QuiltException): pkg.set('foo/..', LOCAL_MANIFEST) with pytest.raises(QuiltException): pkg.set('foo/./bar', LOCAL_MANIFEST) with pytest.raises(QuiltException): pkg.set('foo/../bar', LOCAL_MANIFEST) with pytest.raises(QuiltException): pkg.set('s3://foo/.', LOCAL_MANIFEST) with pytest.raises(QuiltException): pkg.set('s3://foo/..', LOCAL_MANIFEST)
def test_remote_package_delete(tmpdir): """Verify remote package delete works.""" def list_packages_mock(*args, **kwargs): return ['Quilt/Test'] def _tophashes_with_packages_mock(*args, **kwargs): return {'101': {'Quilt/Test'}} def list_objects_mock(*args): return [{ 'Key': '.quilt/named_packages/Quilt/Test/0' }, { 'Key': '.quilt/named_packages/Quilt/Test/latest' }] def get_bytes_mock(*args): return b'101', None with patch('t4.Package.push', new=no_op_mock), \ patch('t4.api.list_packages', new=list_packages_mock), \ patch('t4.api._tophashes_with_packages', new=_tophashes_with_packages_mock), \ patch('t4.api.list_objects', new=list_objects_mock), \ patch('t4.api.get_bytes', new=get_bytes_mock), \ patch('t4.api.delete_object') as delete_mock: top_hash = Package().push('Quilt/Test', 's3://test-bucket') t4.delete_package('Quilt/Test', registry='s3://test-bucket') delete_mock.assert_any_call('test-bucket', '.quilt/packages/101') delete_mock.assert_any_call('test-bucket', '.quilt/named_packages/Quilt/Test/0') delete_mock.assert_any_call('test-bucket', '.quilt/named_packages/Quilt/Test/latest')
def test_package_entry_meta(): pkg = (Package().set( 'foo', os.path.join(os.path.dirname(__file__), 'data', 'foo.txt'), { 'value': 'blah' }).set('bar', os.path.join(os.path.dirname(__file__), 'data', 'foo.txt'), {'value': 'blah2'})) pkg['foo'].meta['target'] = 'unicode' pkg['bar'].meta['target'] = 'unicode' assert pkg['foo'].get_user_meta() == {'value': 'blah'} assert pkg['bar'].get_user_meta() == {'value': 'blah2'} assert pkg['foo'].meta == { 'target': 'unicode', 'user_meta': { 'value': 'blah' } } assert pkg['bar'].meta == { 'target': 'unicode', 'user_meta': { 'value': 'blah2' } } pkg['foo'].set_user_meta({'value': 'other value'}) assert pkg['foo'].get_user_meta() == {'value': 'other value'} assert pkg['foo'].meta == { 'target': 'unicode', 'user_meta': { 'value': 'other value' } }
def test_dir_meta(tmpdir): test_meta = {'test': 'meta'} pkg = Package() pkg.set('asdf/jkl', LOCAL_MANIFEST) pkg.set('asdf/qwer', LOCAL_MANIFEST) pkg.set('qwer/asdf', LOCAL_MANIFEST) pkg.set('qwer/as/df', LOCAL_MANIFEST) pkg.build() assert pkg['asdf'].get_meta() == {} assert pkg.get_meta() == {} assert pkg['qwer']['as'].get_meta() == {} pkg['asdf'].set_meta(test_meta) assert pkg['asdf'].get_meta() == test_meta pkg['qwer']['as'].set_meta(test_meta) assert pkg['qwer']['as'].get_meta() == test_meta pkg.set_meta(test_meta) assert pkg.get_meta() == test_meta dump_path = os.path.join(tmpdir, 'test_meta') with open(dump_path, 'w') as f: pkg.dump(f) with open(dump_path) as f: pkg2 = Package.load(f) assert pkg2['asdf'].get_meta() == test_meta assert pkg2['qwer']['as'].get_meta() == test_meta assert pkg2.get_meta() == test_meta
def test_updates(tmpdir): """ Verify building a package from a directory. """ pkg = (Package().set( 'foo', os.path.join(os.path.dirname(__file__), 'data', 'foo.txt'), { 'foo_meta': 'blah' }).set('bar', os.path.join(os.path.dirname(__file__), 'data', 'foo.txt'), {'bar_meta': 'blah'})) pkg['foo'].meta['target'] = 'unicode' pkg['bar'].meta['target'] = 'unicode' pkg.build() assert pkg['foo']() == '123\n' assert pkg['bar']() == '123\n' # Build a dummy file to add to the map. with open('bar.txt', "w") as fd: fd.write('test_file_content_string') test_file = Path(fd.name) pkg = pkg.update({'bar': 'bar.txt'}) assert test_file.resolve().as_uri() == pkg['bar'].physical_keys[0] assert pkg['foo']() == '123\n' # Build a dummy file to add to the map with a prefix. with open('baz.txt', "w") as fd: fd.write('test_file_content_string') test_file = Path(fd.name) pkg = pkg.update({'baz': 'baz.txt'}, prefix='prefix/') assert test_file.resolve().as_uri() == pkg['prefix/baz'].physical_keys[0] assert pkg['foo']() == '123\n'
def test_load_into_t4(self): """ Verify loading local manifest and data into S3. """ top_hash = '5333a204bbc6e21607c2bc842f4a77d2e21aa6147cf2bf493dbf6282188d01ca' self.s3_stubber.add_response(method='put_object', service_response={'VersionId': 'v1'}, expected_params={ 'Body': ANY, 'Bucket': 'my_test_bucket', 'Key': 'Quilt/package/foo', 'Metadata': { 'helium': '{}' } }) self.s3_stubber.add_response(method='put_object', service_response={'VersionId': 'v2'}, expected_params={ 'Body': ANY, 'Bucket': 'my_test_bucket', 'Key': '.quilt/packages/' + top_hash, 'Metadata': { 'helium': 'null' } }) self.s3_stubber.add_response( method='put_object', service_response={'VersionId': 'v3'}, expected_params={ 'Body': top_hash.encode(), 'Bucket': 'my_test_bucket', 'Key': '.quilt/named_packages/Quilt/package/1234567890', 'Metadata': { 'helium': 'null' } }) self.s3_stubber.add_response( method='put_object', service_response={'VersionId': 'v4'}, expected_params={ 'Body': top_hash.encode(), 'Bucket': 'my_test_bucket', 'Key': '.quilt/named_packages/Quilt/package/latest', 'Metadata': { 'helium': 'null' } }) new_pkg = Package() # Create a dummy file to add to the package. contents = 'blah' test_file = Path('bar') test_file.write_text(contents) new_pkg = new_pkg.set('foo', test_file) with patch('time.time', return_value=1234567890): new_pkg.push('Quilt/package', 's3://my_test_bucket/')
def test_map(): pkg = Package() pkg.set('as/df', LOCAL_MANIFEST) pkg.set('as/qw', LOCAL_MANIFEST) assert set(pkg.map(lambda lk, entry: lk)) == {'as/df', 'as/qw'} pkg['as'].set_meta({'foo': 'bar'}) assert set(pkg.map(lambda lk, entry: lk, include_directories=True)) ==\ {'as/df', 'as/qw', 'as/'}
def test_manifest(): pkg = Package() pkg.set('as/df', LOCAL_MANIFEST) pkg.set('as/qw', LOCAL_MANIFEST) top_hash = pkg.build() manifest = list(pkg.manifest) pkg2 = Package.browse(pkg_hash=top_hash) assert list(pkg.manifest) == list(pkg2.manifest)
def test_local_package_delete(tmpdir): """Verify local package delete works.""" top_hash = Package().build("Quilt/Test") t4.delete_package('Quilt/Test', registry=BASE_PATH) assert 'Quilt/Test' not in t4.list_packages() assert top_hash not in [ p.name for p in Path(BASE_PATH, '.quilt/packages').iterdir() ]
def test_iter(): pkg = Package() assert not pkg pkg.set('asdf', LOCAL_MANIFEST) assert list(pkg) == ['asdf'] pkg.set('jkl;', REMOTE_MANIFEST) assert set(pkg) == {'asdf', 'jkl;'}
def test_browse_package_from_registry(self): """ Verify loading manifest locally and from s3 """ with patch('t4.Package._from_path') as pkgmock: registry = BASE_PATH.as_uri() pkg = Package() pkgmock.return_value = pkg top_hash = pkg.top_hash # local registry load pkg = Package.browse(registry='local', top_hash=top_hash) assert '{}/.quilt/packages/{}'.format(registry, top_hash) \ in [x[0][0] for x in pkgmock.call_args_list] pkgmock.reset_mock() pkg = Package.browse('Quilt/nice-name', registry='local', top_hash=top_hash) assert '{}/.quilt/packages/{}'.format(registry, top_hash) \ in [x[0][0] for x in pkgmock.call_args_list] pkgmock.reset_mock() with patch('t4.packages.get_bytes') as dl_mock: dl_mock.return_value = (top_hash.encode('utf-8'), None) pkg = Package.browse('Quilt/nice-name', registry='local') assert registry + '/.quilt/named_packages/Quilt/nice-name/latest' \ == dl_mock.call_args_list[0][0][0] assert '{}/.quilt/packages/{}'.format(registry, top_hash) \ in [x[0][0] for x in pkgmock.call_args_list] pkgmock.reset_mock() remote_registry = 's3://asdf/foo' # remote load pkg = Package.browse('Quilt/nice-name', registry=remote_registry, top_hash=top_hash) assert '{}/.quilt/packages/{}'.format(remote_registry, top_hash) \ in [x[0][0] for x in pkgmock.call_args_list] pkgmock.reset_mock() pkg = Package.browse(top_hash=top_hash, registry=remote_registry) assert '{}/.quilt/packages/{}'.format(remote_registry, top_hash) \ in [x[0][0] for x in pkgmock.call_args_list] pkgmock.reset_mock() with patch('t4.packages.get_bytes') as dl_mock: dl_mock.return_value = (top_hash.encode('utf-8'), None) pkg = Package.browse('Quilt/nice-name', registry=remote_registry) assert '{}/.quilt/packages/{}'.format(remote_registry, top_hash) \ in [x[0][0] for x in pkgmock.call_args_list] # default remote registry failure case with patch('t4.packages.get_from_config', return_value=None): with pytest.raises(QuiltException): Package.browse('Quilt/nice-name')
def test_manifest(self): pkg = Package() pkg.set('as/df', LOCAL_MANIFEST) pkg.set('as/qw', LOCAL_MANIFEST) top_hash = pkg.build().top_hash manifest = list(pkg.manifest) pkg2 = Package.browse(top_hash=top_hash, registry='local') assert list(pkg.manifest) == list(pkg2.manifest)
def test_fetch_default_dest(tmpdir): """Verify fetching a package entry to a default destination.""" with patch('t4.packages.copy_file') as copy_mock: (Package().set( 'foo', os.path.join(os.path.dirname(__file__), 'data', 'foo.txt'))['foo'].fetch()) filepath = fix_url( os.path.join(os.path.dirname(__file__), 'data', 'foo.txt')) copy_mock.assert_called_once_with(filepath, ANY, ANY)
def test_local_package_delete_overlapping(tmpdir): """ Verify local package delete works when multiple packages reference the same tophash. """ top_hash = Package().build("Quilt/Test1") top_hash = Package().build("Quilt/Test2") t4.delete_package('Quilt/Test1', registry=BASE_PATH) assert 'Quilt/Test1' not in t4.list_packages() assert top_hash in [ p.name for p in Path(BASE_PATH, '.quilt/packages').iterdir() ] t4.delete_package('Quilt/Test2', registry=BASE_PATH) assert 'Quilt/Test2' not in t4.list_packages() assert top_hash not in [ p.name for p in Path(BASE_PATH, '.quilt/packages').iterdir() ]
def test_set_package_entry(self): """ Set the physical key for a PackageEntry""" pkg = (Package().set('foo', DATA_DIR / 'foo.txt', { 'user_meta': 'blah' }).set('bar', DATA_DIR / 'foo.txt', {'user_meta': 'blah'})) pkg['foo'].meta['target'] = 'unicode' pkg['bar'].meta['target'] = 'unicode' # Build a dummy file to add to the map. with open('bar.txt', "w") as fd: fd.write('test_file_content_string') test_file = Path(fd.name) pkg['bar'].set('bar.txt') assert test_file.resolve().as_uri() == pkg['bar'].physical_keys[0] # Test shortcut codepath pkg = Package().set('bar.txt') assert test_file.resolve().as_uri() == pkg['bar.txt'].physical_keys[0]
def test_remote_package_delete_overlapping(tmpdir): """ Verify remote package delete works when multiple packages reference the same tophash. """ def list_packages_mock(*args, **kwargs): return ['Quilt/Test1', 'Quilt/Test2'] def _tophashes_with_packages_mock(*args, **kwargs): return {'101': {'Quilt/Test1', 'Quilt/Test2'}} def list_objects_mock(*args): return [{ 'Key': '.quilt/named_packages/Quilt/Test1/0' }, { 'Key': '.quilt/named_packages/Quilt/Test1/latest' }, { 'Key': '.quilt/named_packages/Quilt/Test2/0' }, { 'Key': '.quilt/named_packages/Quilt/Test2/latest' }] def get_bytes_mock(*args): return b'101', None with patch('t4.Package.push', new=no_op_mock), \ patch('t4.api.list_packages', new=list_packages_mock), \ patch('t4.api._tophashes_with_packages', new=_tophashes_with_packages_mock), \ patch('t4.api.list_objects', new=list_objects_mock), \ patch('t4.api.get_bytes', new=get_bytes_mock), \ patch('t4.api.delete_object') as delete_mock: top_hash = Package().push('Quilt/Test1', 's3://test-bucket') top_hash = Package().push('Quilt/Test2', 's3://test-bucket') t4.delete_package('Quilt/Test1', registry='s3://test-bucket') # the reference count for the tophash 101 is still one, so it should still exist assert call('test-bucket', '.quilt/packages/101') not in delete_mock.call_args_list delete_mock.assert_any_call('test-bucket', '.quilt/named_packages/Quilt/Test1/0') delete_mock.assert_any_call( 'test-bucket', '.quilt/named_packages/Quilt/Test1/latest')
def test_keys(): pkg = Package() assert not pkg.keys() pkg.set('asdf', LOCAL_MANIFEST) assert set(pkg.keys()) == {'asdf'} pkg.set('jkl;', REMOTE_MANIFEST) assert set(pkg.keys()) == {'asdf', 'jkl;'} pkg.delete('asdf') assert set(pkg.keys()) == {'jkl;'}