def test_brackets(self): pkg = Package() pkg.set('asdf/jkl', LOCAL_MANIFEST) pkg.set('asdf/qwer', LOCAL_MANIFEST) pkg.set('qwer/asdf', LOCAL_MANIFEST) assert set(pkg.keys()) == {'asdf', 'qwer'} pkg2 = pkg['asdf'] assert set(pkg2.keys()) == {'jkl', 'qwer'} assert pkg['asdf']['qwer'].get() == LOCAL_MANIFEST.as_uri() assert pkg['asdf']['qwer'] == pkg['asdf/qwer'] == pkg[('asdf', 'qwer')] assert pkg[[]] == pkg pkg = (Package().set('foo', DATA_DIR / 'foo.txt', {'foo': 'blah'})) pkg['foo'].meta['target'] = 'unicode' pkg.build() assert pkg['foo'].deserialize() == '123\n' assert pkg['foo']() == '123\n' with pytest.raises(KeyError): pkg['baz'] with pytest.raises(TypeError): pkg[b'asdf'] with pytest.raises(TypeError): pkg[0]
def test_package_fetch(self): """ Package.fetch() on nested, relative keys """ package_ = Package().set_dir('/', DATA_DIR / 'nested') out_dir = 'output' new_package_ = package_.fetch(out_dir) expected = {'one.txt': '1', 'two.txt': '2', 'three.txt': '3'} file_count = 0 for dirpath, _, files in os.walk(out_dir): for name in files: file_count += 1 with open(os.path.join(dirpath, name)) as file_: assert name in expected, 'unexpected file: {}'.format( file_) contents = file_.read().strip() assert contents == expected[name], \ 'unexpected contents in {}: {}'.format(name, contents) assert file_count == len(expected), \ 'fetch wrote {} files; expected: {}'.format(file_count, expected) # test that package re-rooting works as expected out_dir_abs_path = f'file://{pathlib.Path(out_dir).absolute().as_posix()}' assert all(entry.physical_keys[0].startswith(out_dir_abs_path) for _, entry in new_package_.walk())
def test_build(tmpdir): """Verify that build dumps the manifest to appdirs directory.""" new_pkg = Package() # Create a dummy file to add to the package. test_file_name = 'bar' with open(test_file_name, "w") as fd: fd.write('test_file_content_string') test_file = Path(fd.name) # Build a new package into the local registry. new_pkg = new_pkg.set('foo', test_file_name) top_hash = new_pkg.build("Quilt/Test") # Verify manifest is registered by hash. out_path = Path(BASE_PATH, ".quilt/packages", top_hash) with open(out_path) as fd: pkg = Package.load(fd) assert test_file.resolve().as_uri() == pkg['foo'].physical_keys[0] # Verify latest points to the new location. named_pointer_path = Path(BASE_PATH, ".quilt/named_packages/Quilt/Test/latest") with open(named_pointer_path) as fd: assert fd.read().replace('\n', '') == top_hash # Test unnamed packages. new_pkg = Package() new_pkg = new_pkg.set('bar', test_file_name) top_hash = new_pkg.build() out_path = Path(BASE_PATH, ".quilt/packages", top_hash) with open(out_path) as fd: pkg = Package.load(fd) assert test_file.resolve().as_uri() == pkg['bar'].physical_keys[0]
def test_list_local_packages(tmpdir): """Verify that list returns packages in the appdirs directory.""" temp_local_registry = Path(os.path.join(tmpdir, 'test_registry')).as_uri() with patch('t4.packages.get_package_registry', lambda path: temp_local_registry), \ patch('t4.api.get_package_registry', lambda path: temp_local_registry): # Build a new package into the local registry. Package().build("Quilt/Foo") Package().build("Quilt/Bar") Package().build("Quilt/Test") # Verify packages are returned. pkgs = t4.list_packages() assert len(pkgs) == 3 assert "Quilt/Foo" in pkgs assert "Quilt/Bar" in pkgs # Test unnamed packages are not added. Package().build() pkgs = t4.list_packages() assert len(pkgs) == 3 # Verify manifest is registered by hash when local path given pkgs = t4.list_packages("/") assert "Quilt/Foo" in pkgs assert "Quilt/Bar" in pkgs
def test_brackets(): pkg = Package() pkg.set('asdf/jkl', LOCAL_MANIFEST) pkg.set('asdf/qwer', LOCAL_MANIFEST) pkg.set('qwer/asdf', LOCAL_MANIFEST) assert set(pkg.keys()) == {'asdf', 'qwer'} pkg2 = pkg['asdf'] assert set(pkg2.keys()) == {'jkl', 'qwer'} assert pkg['asdf']['qwer'].get() == pathlib.Path(LOCAL_MANIFEST).as_uri() assert pkg['asdf']['qwer'] == pkg['asdf/qwer'] == pkg[('asdf', 'qwer')] assert pkg[[]] == pkg pkg = (Package().set( 'foo', os.path.join(os.path.dirname(__file__), 'data', 'foo.txt'), {'foo': 'blah'})) pkg['foo'].meta['target'] = 'unicode' pkg.build() assert pkg['foo'].deserialize() == '123\n' assert pkg['foo']() == '123\n' with pytest.raises(KeyError): pkg['baz'] with pytest.raises(TypeError): pkg[b'asdf'] with pytest.raises(TypeError): pkg[0]
def test_load_into_t4(self): """ Verify loading local manifest and data into S3. """ top_hash = '5333a204bbc6e21607c2bc842f4a77d2e21aa6147cf2bf493dbf6282188d01ca' self.s3_stubber.add_response(method='put_object', service_response={'VersionId': 'v1'}, expected_params={ 'Body': ANY, 'Bucket': 'my_test_bucket', 'Key': 'Quilt/package/foo', 'Metadata': { 'helium': '{}' } }) self.s3_stubber.add_response(method='put_object', service_response={'VersionId': 'v2'}, expected_params={ 'Body': ANY, 'Bucket': 'my_test_bucket', 'Key': '.quilt/packages/' + top_hash, 'Metadata': { 'helium': 'null' } }) self.s3_stubber.add_response( method='put_object', service_response={'VersionId': 'v3'}, expected_params={ 'Body': top_hash.encode(), 'Bucket': 'my_test_bucket', 'Key': '.quilt/named_packages/Quilt/package/1234567890', 'Metadata': { 'helium': 'null' } }) self.s3_stubber.add_response( method='put_object', service_response={'VersionId': 'v4'}, expected_params={ 'Body': top_hash.encode(), 'Bucket': 'my_test_bucket', 'Key': '.quilt/named_packages/Quilt/package/latest', 'Metadata': { 'helium': 'null' } }) new_pkg = Package() # Create a dummy file to add to the package. contents = 'blah' test_file = Path('bar') test_file.write_text(contents) new_pkg = new_pkg.set('foo', test_file) with patch('time.time', return_value=1234567890): new_pkg.push('Quilt/package', 's3://my_test_bucket/')
def test_default_install_location(self): """Verify that pushes to the default local install location work as expected""" with patch('t4.Package.push') as push_mock: Package.install('Quilt/nice-name', registry='s3://my-test-bucket') push_mock.assert_called_once_with( dest=t4.util.get_install_location(), name='Quilt/nice-name', registry=ANY)
def test_browse_package_from_registry(self): """ Verify loading manifest locally and from s3 """ with patch('t4.Package._from_path') as pkgmock: registry = BASE_PATH.as_uri() pkg = Package() pkgmock.return_value = pkg top_hash = pkg.top_hash # local registry load pkg = Package.browse(registry='local', top_hash=top_hash) assert '{}/.quilt/packages/{}'.format(registry, top_hash) \ in [x[0][0] for x in pkgmock.call_args_list] pkgmock.reset_mock() pkg = Package.browse('Quilt/nice-name', registry='local', top_hash=top_hash) assert '{}/.quilt/packages/{}'.format(registry, top_hash) \ in [x[0][0] for x in pkgmock.call_args_list] pkgmock.reset_mock() with patch('t4.packages.get_bytes') as dl_mock: dl_mock.return_value = (top_hash.encode('utf-8'), None) pkg = Package.browse('Quilt/nice-name', registry='local') assert registry + '/.quilt/named_packages/Quilt/nice-name/latest' \ == dl_mock.call_args_list[0][0][0] assert '{}/.quilt/packages/{}'.format(registry, top_hash) \ in [x[0][0] for x in pkgmock.call_args_list] pkgmock.reset_mock() remote_registry = 's3://asdf/foo' # remote load pkg = Package.browse('Quilt/nice-name', registry=remote_registry, top_hash=top_hash) assert '{}/.quilt/packages/{}'.format(remote_registry, top_hash) \ in [x[0][0] for x in pkgmock.call_args_list] pkgmock.reset_mock() pkg = Package.browse(top_hash=top_hash, registry=remote_registry) assert '{}/.quilt/packages/{}'.format(remote_registry, top_hash) \ in [x[0][0] for x in pkgmock.call_args_list] pkgmock.reset_mock() with patch('t4.packages.get_bytes') as dl_mock: dl_mock.return_value = (top_hash.encode('utf-8'), None) pkg = Package.browse('Quilt/nice-name', registry=remote_registry) assert '{}/.quilt/packages/{}'.format(remote_registry, top_hash) \ in [x[0][0] for x in pkgmock.call_args_list] # default remote registry failure case with patch('t4.packages.get_from_config', return_value=None): with pytest.raises(QuiltException): Package.browse('Quilt/nice-name')
def test_remote_install(self): """Verify that installing from a local package works as expected.""" with patch('t4.packages.get_from_config') as get_config_mock, \ patch('t4.Package.push') as push_mock: remote_registry = '.' get_config_mock.return_value = remote_registry pkg = Package() pkg.build('Quilt/nice-name') t4.Package.install('Quilt/nice-name', dest='./') push_mock.assert_called_once_with(dest='./', name='Quilt/nice-name', registry=remote_registry)
def test_remote_repr(self): with patch('t4.packages.get_size_and_meta', return_value=(0, dict(), '0')): TEST_REPR = ("(remote Package)\n" " └─asdf\n") pkg = Package() pkg.set('asdf', 's3://my-bucket/asdf') assert repr(pkg) == TEST_REPR TEST_REPR = ("(remote Package)\n" " └─asdf\n" " └─qwer\n") pkg = Package() pkg.set('asdf', 's3://my-bucket/asdf') pkg.set('qwer', LOCAL_MANIFEST) assert repr(pkg) == TEST_REPR
def test_package_entry_meta(self): pkg = (Package().set('foo', DATA_DIR / 'foo.txt', { 'value': 'blah' }).set('bar', DATA_DIR / 'foo.txt', {'value': 'blah2'})) pkg['foo']._meta['target'] = 'unicode' pkg['bar']._meta['target'] = 'unicode' assert pkg['foo'].meta == {'value': 'blah'} assert pkg['bar'].meta == {'value': 'blah2'} assert pkg['foo']._meta == { 'target': 'unicode', 'user_meta': { 'value': 'blah' } } assert pkg['bar']._meta == { 'target': 'unicode', 'user_meta': { 'value': 'blah2' } } pkg['foo'].set_meta({'value': 'other value'}) assert pkg['foo'].meta == {'value': 'other value'} assert pkg['foo']._meta == { 'target': 'unicode', 'user_meta': { 'value': 'other value' } }
def test_fetch(self): """ Verify fetching a package entry. """ pkg = (Package().set('foo', DATA_DIR / 'foo.txt', { 'user_meta': 'blah' }).set('bar', DATA_DIR / 'foo.txt', {'user_meta': 'blah'})) pkg['foo'].meta['target'] = 'unicode' pkg['bar'].meta['target'] = 'unicode' with open(DATA_DIR / 'foo.txt') as fd: assert fd.read().replace('\n', '') == '123' # Copy foo.text to bar.txt pkg['foo'].fetch('data/bar.txt') with open('data/bar.txt') as fd: assert fd.read().replace('\n', '') == '123' # Raise an error if you copy to yourself. with pytest.raises(shutil.SameFileError): pkg.set('foo', DATA_DIR / 'foo.txt')['foo'].fetch(DATA_DIR / 'foo.txt') # The key gets re-rooted correctly. pkg = t4.Package().set('foo', DATA_DIR / 'foo.txt') new_pkg_entry = pkg['foo'].fetch('bar.txt') out_abs_path = f'file://{pathlib.Path(".").absolute().as_posix()}/bar.txt' assert new_pkg_entry.physical_keys[0] == out_abs_path
def exec_module(cls, module): """ Module executor. """ name_parts = module.__name__.split('.') registry = get_from_config('default_local_registry') if module.__name__ == 't4.data': # __path__ must be set even if the package is virtual. Since __path__ will be # scanned by all other finders preceding this one in sys.meta_path order, make sure # it points to someplace lacking importable objects module.__path__ = MODULE_PATH return module elif len(name_parts) == 3: # e.g. module.__name__ == t4.data.foo namespace = name_parts[2] # we do not know the name the user will ask for, so populate all valid names for pkg in list_packages(): pkg_user, pkg_name = pkg.split('/') if pkg_user == namespace: module.__dict__[pkg_name] = Package.browse( pkg, registry=registry) module.__path__ = MODULE_PATH return module else: assert False
def test_updates(tmpdir): """ Verify building a package from a directory. """ pkg = (Package().set( 'foo', os.path.join(os.path.dirname(__file__), 'data', 'foo.txt'), { 'foo_meta': 'blah' }).set('bar', os.path.join(os.path.dirname(__file__), 'data', 'foo.txt'), {'bar_meta': 'blah'})) pkg['foo'].meta['target'] = 'unicode' pkg['bar'].meta['target'] = 'unicode' pkg.build() assert pkg['foo']() == '123\n' assert pkg['bar']() == '123\n' # Build a dummy file to add to the map. with open('bar.txt', "w") as fd: fd.write('test_file_content_string') test_file = Path(fd.name) pkg = pkg.update({'bar': 'bar.txt'}) assert test_file.resolve().as_uri() == pkg['bar'].physical_keys[0] assert pkg['foo']() == '123\n' # Build a dummy file to add to the map with a prefix. with open('baz.txt', "w") as fd: fd.write('test_file_content_string') test_file = Path(fd.name) pkg = pkg.update({'baz': 'baz.txt'}, prefix='prefix/') assert test_file.resolve().as_uri() == pkg['prefix/baz'].physical_keys[0] assert pkg['foo']() == '123\n'
def test_remote_package_delete(tmpdir): """Verify remote package delete works.""" def list_packages_mock(*args, **kwargs): return ['Quilt/Test'] def _tophashes_with_packages_mock(*args, **kwargs): return {'101': {'Quilt/Test'}} def list_objects_mock(*args): return [{ 'Key': '.quilt/named_packages/Quilt/Test/0' }, { 'Key': '.quilt/named_packages/Quilt/Test/latest' }] def get_bytes_mock(*args): return b'101', None with patch('t4.Package.push', new=no_op_mock), \ patch('t4.api.list_packages', new=list_packages_mock), \ patch('t4.api._tophashes_with_packages', new=_tophashes_with_packages_mock), \ patch('t4.api.list_objects', new=list_objects_mock), \ patch('t4.api.get_bytes', new=get_bytes_mock), \ patch('t4.api.delete_object') as delete_mock: top_hash = Package().push('Quilt/Test', 's3://test-bucket') t4.delete_package('Quilt/Test', registry='s3://test-bucket') delete_mock.assert_any_call('test-bucket', '.quilt/packages/101') delete_mock.assert_any_call('test-bucket', '.quilt/named_packages/Quilt/Test/0') delete_mock.assert_any_call('test-bucket', '.quilt/named_packages/Quilt/Test/latest')
def test_package_entry_meta(): pkg = (Package().set( 'foo', os.path.join(os.path.dirname(__file__), 'data', 'foo.txt'), { 'value': 'blah' }).set('bar', os.path.join(os.path.dirname(__file__), 'data', 'foo.txt'), {'value': 'blah2'})) pkg['foo'].meta['target'] = 'unicode' pkg['bar'].meta['target'] = 'unicode' assert pkg['foo'].get_user_meta() == {'value': 'blah'} assert pkg['bar'].get_user_meta() == {'value': 'blah2'} assert pkg['foo'].meta == { 'target': 'unicode', 'user_meta': { 'value': 'blah' } } assert pkg['bar'].meta == { 'target': 'unicode', 'user_meta': { 'value': 'blah2' } } pkg['foo'].set_user_meta({'value': 'other value'}) assert pkg['foo'].get_user_meta() == {'value': 'other value'} assert pkg['foo'].meta == { 'target': 'unicode', 'user_meta': { 'value': 'other value' } }
def test_local_push(self): """ Verify loading local manifest and data into a local dir. """ top_hash = '5333a204bbc6e21607c2bc842f4a77d2e21aa6147cf2bf493dbf6282188d01ca' new_pkg = Package() contents = 'blah' test_file = Path('bar') test_file.write_text(contents) new_pkg = new_pkg.set('foo', test_file) new_pkg.push('Quilt/package', 'package_contents') push_dir = Path('package_contents') assert (push_dir / '.quilt/named_packages/Quilt/package/latest' ).read_text() == top_hash assert (push_dir / ('.quilt/packages/' + top_hash)).exists() assert (push_dir / 'Quilt/package/foo').read_text() == contents
def test_local_package_delete(tmpdir): """Verify local package delete works.""" top_hash = Package().build("Quilt/Test") t4.delete_package('Quilt/Test', registry=BASE_PATH) assert 'Quilt/Test' not in t4.list_packages() assert top_hash not in [ p.name for p in Path(BASE_PATH, '.quilt/packages').iterdir() ]
def test_browse_package_from_registry(): """ Verify loading manifest locally and from s3 """ with patch('t4.Package._from_path') as pkgmock: registry = BASE_PATH.as_uri() pkg = Package() pkgmock.return_value = pkg pkghash = pkg.top_hash() # default registry load pkg = Package.browse(pkg_hash=pkghash) assert '{}/.quilt/packages/{}'.format(registry, pkghash) \ in [x[0][0] for x in pkgmock.call_args_list] pkgmock.reset_mock() pkg = Package.browse('Quilt/nice-name', pkg_hash=pkghash) assert '{}/.quilt/packages/{}'.format(registry, pkghash) \ in [x[0][0] for x in pkgmock.call_args_list] pkgmock.reset_mock() with patch('t4.packages.get_bytes') as dl_mock: dl_mock.return_value = (pkghash.encode('utf-8'), None) pkg = Package.browse('Quilt/nice-name') assert registry + '/.quilt/named_packages/Quilt/nice-name/latest' \ == dl_mock.call_args_list[0][0][0] assert '{}/.quilt/packages/{}'.format(registry, pkghash) \ in [x[0][0] for x in pkgmock.call_args_list] pkgmock.reset_mock() remote_registry = 's3://asdf/foo' # remote load pkg = Package.browse('Quilt/nice-name', registry=remote_registry, pkg_hash=pkghash) assert '{}/.quilt/packages/{}'.format(remote_registry, pkghash) \ in [x[0][0] for x in pkgmock.call_args_list] pkgmock.reset_mock() pkg = Package.browse(pkg_hash=pkghash, registry=remote_registry) assert '{}/.quilt/packages/{}'.format(remote_registry, pkghash) \ in [x[0][0] for x in pkgmock.call_args_list] pkgmock.reset_mock() with patch('t4.packages.get_bytes') as dl_mock: dl_mock.return_value = (pkghash.encode('utf-8'), None) pkg = Package.browse('Quilt/nice-name', registry=remote_registry) assert '{}/.quilt/packages/{}'.format(remote_registry, pkghash) \ in [x[0][0] for x in pkgmock.call_args_list]
def test_set_package_entry(self): """ Set the physical key for a PackageEntry""" pkg = (Package().set('foo', DATA_DIR / 'foo.txt', { 'user_meta': 'blah' }).set('bar', DATA_DIR / 'foo.txt', {'user_meta': 'blah'})) pkg['foo'].meta['target'] = 'unicode' pkg['bar'].meta['target'] = 'unicode' # Build a dummy file to add to the map. with open('bar.txt', "w") as fd: fd.write('test_file_content_string') test_file = Path(fd.name) pkg['bar'].set('bar.txt') assert test_file.resolve().as_uri() == pkg['bar'].physical_keys[0] # Test shortcut codepath pkg = Package().set('bar.txt') assert test_file.resolve().as_uri() == pkg['bar.txt'].physical_keys[0]
def test_top_hash_stable(self): """Ensure that top_hash() never changes for a given manifest""" registry = DATA_DIR top_hash = '20de5433549a4db332a11d8d64b934a82bdea8f144b4aecd901e7d4134f8e733' pkg = Package.browse(registry=registry, top_hash=top_hash) assert pkg.top_hash == top_hash, \ "Unexpected top_hash for {}/.quilt/packages/{}".format(registry, top_hash)
def test_repr(): TEST_REPR = ("asdf\n" "path1/\n" " asdf\n" " qwer\n" "path2/\n" " first/\n" " asdf\n" " second/\n" " asdf\n" "qwer\n") pkg = Package() pkg.set('asdf', LOCAL_MANIFEST) pkg.set('qwer', LOCAL_MANIFEST) pkg.set('path1/asdf', LOCAL_MANIFEST) pkg.set('path1/qwer', LOCAL_MANIFEST) pkg.set('path2/first/asdf', LOCAL_MANIFEST) pkg.set('path2/second/asdf', LOCAL_MANIFEST) assert repr(pkg) == TEST_REPR
def test_local_package_delete_overlapping(tmpdir): """ Verify local package delete works when multiple packages reference the same tophash. """ top_hash = Package().build("Quilt/Test1") top_hash = Package().build("Quilt/Test2") t4.delete_package('Quilt/Test1', registry=BASE_PATH) assert 'Quilt/Test1' not in t4.list_packages() assert top_hash in [ p.name for p in Path(BASE_PATH, '.quilt/packages').iterdir() ] t4.delete_package('Quilt/Test2', registry=BASE_PATH) assert 'Quilt/Test2' not in t4.list_packages() assert top_hash not in [ p.name for p in Path(BASE_PATH, '.quilt/packages').iterdir() ]
def test_top_hash_stable(): """Ensure that top_hash() never changes for a given manifest""" registry = Path(__file__).parent / 'data' pkg_hash = '20de5433549a4db332a11d8d64b934a82bdea8f144b4aecd901e7d4134f8e733' pkg = Package.browse(registry=registry, pkg_hash=pkg_hash) assert pkg.top_hash() == pkg_hash, \ "Unexpected top_hash for {}/.quilt/packages/{}".format(registry, pkg_hash)
def test_fetch_default_dest(tmpdir): """Verify fetching a package entry to a default destination.""" with patch('t4.packages.copy_file') as copy_mock: (Package().set( 'foo', os.path.join(os.path.dirname(__file__), 'data', 'foo.txt'))['foo'].fetch()) filepath = fix_url( os.path.join(os.path.dirname(__file__), 'data', 'foo.txt')) copy_mock.assert_called_once_with(filepath, ANY, ANY)
def test_remote_package_delete_overlapping(tmpdir): """ Verify remote package delete works when multiple packages reference the same tophash. """ def list_packages_mock(*args, **kwargs): return ['Quilt/Test1', 'Quilt/Test2'] def _tophashes_with_packages_mock(*args, **kwargs): return {'101': {'Quilt/Test1', 'Quilt/Test2'}} def list_objects_mock(*args): return [{ 'Key': '.quilt/named_packages/Quilt/Test1/0' }, { 'Key': '.quilt/named_packages/Quilt/Test1/latest' }, { 'Key': '.quilt/named_packages/Quilt/Test2/0' }, { 'Key': '.quilt/named_packages/Quilt/Test2/latest' }] def get_bytes_mock(*args): return b'101', None with patch('t4.Package.push', new=no_op_mock), \ patch('t4.api.list_packages', new=list_packages_mock), \ patch('t4.api._tophashes_with_packages', new=_tophashes_with_packages_mock), \ patch('t4.api.list_objects', new=list_objects_mock), \ patch('t4.api.get_bytes', new=get_bytes_mock), \ patch('t4.api.delete_object') as delete_mock: top_hash = Package().push('Quilt/Test1', 's3://test-bucket') top_hash = Package().push('Quilt/Test2', 's3://test-bucket') t4.delete_package('Quilt/Test1', registry='s3://test-bucket') # the reference count for the tophash 101 is still one, so it should still exist assert call('test-bucket', '.quilt/packages/101') not in delete_mock.call_args_list delete_mock.assert_any_call('test-bucket', '.quilt/named_packages/Quilt/Test1/0') delete_mock.assert_any_call( 'test-bucket', '.quilt/named_packages/Quilt/Test1/latest')
def test_materialize_from_remote(tmpdir): """ Verify loading data and mainfest transforms from S3. """ with patch('botocore.client.BaseClient._make_api_call', new=mock_make_api_call): with open(REMOTE_MANIFEST) as fd: pkg = Package.load(fd) with patch('t4.data_transfer._download_file'), \ patch('t4.Package.build', new=no_op_mock), \ patch('t4.packages.get_remote_registry') as config_mock: config_mock.return_value = tmpdir mat_pkg = pkg.push('Quilt/test_pkg_name', tmpdir / 'pkg')
def test_local_repr(self): TEST_REPR = ("(local Package)\n" " └─asdf\n" " └─path1/\n" " └─asdf\n" " └─qwer\n" " └─path2/\n" " └─first/\n" " └─asdf\n" " └─second/\n" " └─asdf\n" " └─qwer\n") pkg = Package() pkg.set('asdf', LOCAL_MANIFEST) pkg.set('qwer', LOCAL_MANIFEST) pkg.set('path1/asdf', LOCAL_MANIFEST) pkg.set('path1/qwer', LOCAL_MANIFEST) pkg.set('path2/first/asdf', LOCAL_MANIFEST) pkg.set('path2/second/asdf', LOCAL_MANIFEST) assert repr(pkg) == TEST_REPR
def test_commit_message_on_push(self): """ Verify commit messages populate correctly on push.""" with patch('botocore.client.BaseClient._make_api_call', new=mock_make_api_call): with open(REMOTE_MANIFEST) as fd: pkg = Package.load(fd) pkg.push('Quilt/test_pkg_name', 'pkg', message='test_message') assert pkg._meta['message'] == 'test_message' # ensure messages are strings with pytest.raises(ValueError): pkg.push('Quilt/test_pkg_name', 'pkg', message={})
def test_package_fetch(tmpdir): """ Package.fetch() on nested, relative keys """ input_dir = os.path.dirname(__file__) package_ = Package().set_dir('/', os.path.join(input_dir, 'data', 'nested')) out_dir = os.path.join(tmpdir, 'output') package_.fetch(out_dir) expected = {'one.txt': '1', 'two.txt': '2', 'three.txt': '3'} file_count = 0 for dirpath, _, files in os.walk(out_dir): for name in files: file_count += 1 with open(os.path.join(out_dir, dirpath, name)) as file_: assert name in expected, 'unexpected file: {}'.format(file_) contents = file_.read().strip() assert contents == expected[name], \ 'unexpected contents in {}: {}'.format(name, contents) assert file_count == len(expected), \ 'fetch wrote {} files; expected: {}'.format(file_count, expected)