def test_export_rev(script, tmpdir): """Test that a Bazaar branch can be exported, specifying a rev.""" source_dir = tmpdir / 'test-source' source_dir.mkdir() # Create a single file that is changed by two revisions. create_file(source_dir / 'test_file', 'something initial') _vcs_add(script, str(source_dir), vcs='bazaar') create_file(source_dir / 'test_file', 'something new') script.run( 'bzr', 'commit', '-q', '--author', 'pip <*****@*****.**>', '-m', 'change test file', cwd=source_dir, ) export_dir = tmpdir / 'export' url = hide_url('bzr+' + _test_path_to_file_url(source_dir) + '@1') Bazaar().export(str(export_dir), url=url) with open(export_dir / 'test_file', 'r') as f: assert f.read() == 'something initial'
def test_export(script, tmpdir): """Test that a Bazaar branch can be exported.""" source_dir = tmpdir / 'test-source' source_dir.mkdir() create_file(source_dir / 'test_file', 'something') _vcs_add(script, str(source_dir), vcs='bazaar') export_dir = str(tmpdir / 'export') url = hide_url('bzr+' + _test_path_to_file_url(source_dir)) Bazaar().export(export_dir, url=url) assert os.listdir(export_dir) == ['test_file']
def test_export(script, tmpdir): """Test that a Bazaar branch can be exported.""" source_dir = tmpdir / 'test-source' source_dir.mkdir() create_file(source_dir / 'test_file', 'something') _vcs_add(script, str(source_dir), vcs='bazaar') export_dir = str(tmpdir / 'export') url = 'bzr+' + _test_path_to_file_url(source_dir) Bazaar().export(export_dir, url=url) assert os.listdir(export_dir) == ['test_file']
def test_compressed_listing(tmpdir): def in_tmpdir(paths): li = [] for path in paths: li.append( str( os.path.normcase( os.path.join(tmpdir, path.replace("/", os.path.sep))))) return li sample = in_tmpdir([ "lib/mypkg.dist-info/METADATA", "lib/mypkg.dist-info/PKG-INFO", "lib/mypkg/would_be_removed.txt", "lib/mypkg/would_be_skipped.skip.txt", "lib/mypkg/__init__.py", "lib/mypkg/my_awesome_code.py", "lib/mypkg/__pycache__/my_awesome_code-magic.pyc", "lib/mypkg/support/support_file.py", "lib/mypkg/support/more_support.py", "lib/mypkg/support/would_be_skipped.skip.py", "lib/mypkg/support/__pycache__/support_file-magic.pyc", "lib/random_other_place/file_without_a_dot_pyc", "bin/mybin", ]) # Create the required files for fname in sample: create_file(fname, "random blub") # Remove the files to be skipped from the paths sample = [path for path in sample if ".skip." not in path] expected_remove = in_tmpdir([ "bin/mybin", "lib/mypkg.dist-info/*", "lib/mypkg/*", "lib/random_other_place/file_without_a_dot_pyc", ]) expected_skip = in_tmpdir([ "lib/mypkg/would_be_skipped.skip.txt", "lib/mypkg/support/would_be_skipped.skip.py", ]) will_remove, will_skip = compress_for_output_listing(sample) assert sorted(expected_skip) == sorted(compact(will_skip)) assert sorted(expected_remove) == sorted(compact(will_remove))
def test_compressed_listing(tmpdir): def in_tmpdir(paths): li = [] for path in paths: li.append( str(os.path.join(tmpdir, path.replace("/", os.path.sep))) ) return li sample = in_tmpdir([ "lib/mypkg.dist-info/METADATA", "lib/mypkg.dist-info/PKG-INFO", "lib/mypkg/would_be_removed.txt", "lib/mypkg/would_be_skipped.skip.txt", "lib/mypkg/__init__.py", "lib/mypkg/my_awesome_code.py", "lib/mypkg/__pycache__/my_awesome_code-magic.pyc", "lib/mypkg/support/support_file.py", "lib/mypkg/support/more_support.py", "lib/mypkg/support/would_be_skipped.skip.py", "lib/mypkg/support/__pycache__/support_file-magic.pyc", "lib/random_other_place/file_without_a_dot_pyc", "bin/mybin", ]) # Create the required files for fname in sample: create_file(fname, "random blub") # Remove the files to be skipped from the paths sample = [path for path in sample if ".skip." not in path] expected_remove = in_tmpdir([ "bin/mybin", "lib/mypkg.dist-info/*", "lib/mypkg/*", "lib/random_other_place/file_without_a_dot_pyc", ]) expected_skip = in_tmpdir([ "lib/mypkg/would_be_skipped.skip.txt", "lib/mypkg/support/would_be_skipped.skip.py", ]) will_remove, will_skip = compress_for_output_listing(sample) assert sorted(expected_skip) == sorted(compact(will_skip)) assert sorted(expected_remove) == sorted(compact(will_remove))
def test_unpack_http_url_bad_downloaded_checksum(mock_unpack_file): """ If already-downloaded file has bad checksum, re-download. """ base_url = 'http://www.example.com/somepackage.tgz' contents = b'downloaded' download_hash = hashlib.new('sha1', contents) link = Link(base_url + '#sha1=' + download_hash.hexdigest()) session = Mock() session.get = Mock() response = session.get.return_value = MockResponse(contents) response.headers = {'content-type': 'application/x-tar'} response.url = base_url downloader = Downloader(session, progress_bar="on") download_dir = mkdtemp() try: downloaded_file = os.path.join(download_dir, 'somepackage.tgz') create_file(downloaded_file, 'some contents') unpack_http_url( link, 'location', downloader=downloader, download_dir=download_dir, hashes=Hashes({'sha1': [download_hash.hexdigest()]}) ) # despite existence of downloaded file with bad hash, downloaded again session.get.assert_called_once_with( 'http://www.example.com/somepackage.tgz', headers={"Accept-Encoding": "identity"}, stream=True, ) # cached file is replaced with newly downloaded file with open(downloaded_file) as fh: assert fh.read() == 'downloaded' finally: rmtree(download_dir)
def test_unpack_http_url_bad_downloaded_checksum(mock_unpack_file): """ If already-downloaded file has bad checksum, re-download. """ base_url = 'http://www.example.com/somepackage.tgz' contents = b'downloaded' download_hash = hashlib.new('sha1', contents) link = Link(base_url + '#sha1=' + download_hash.hexdigest()) session = Mock() session.get = Mock() response = session.get.return_value = MockResponse(contents) response.headers = {'content-type': 'application/x-tar'} response.url = base_url download_dir = mkdtemp() try: downloaded_file = os.path.join(download_dir, 'somepackage.tgz') create_file(downloaded_file, 'some contents') unpack_http_url( link, 'location', download_dir=download_dir, session=session, hashes=Hashes({'sha1': [download_hash.hexdigest()]}) ) # despite existence of downloaded file with bad hash, downloaded again session.get.assert_called_once_with( 'http://www.example.com/somepackage.tgz', headers={"Accept-Encoding": "identity"}, stream=True, ) # cached file is replaced with newly downloaded file with open(downloaded_file) as fh: assert fh.read() == 'downloaded' finally: rmtree(download_dir)
def test_export_rev(script, tmpdir): """Test that a Bazaar branch can be exported, specifying a rev.""" source_dir = tmpdir / 'test-source' source_dir.mkdir() # Create a single file that is changed by two revisions. create_file(source_dir / 'test_file', 'something initial') _vcs_add(script, str(source_dir), vcs='bazaar') create_file(source_dir / 'test_file', 'something new') script.run( 'bzr', 'commit', '-q', '--author', 'pip <*****@*****.**>', '-m', 'change test file', cwd=source_dir, ) export_dir = tmpdir / 'export' url = 'bzr+' + _test_path_to_file_url(source_dir) + '@1' Bazaar().export(str(export_dir), url=url) with open(export_dir / 'test_file', 'r') as f: assert f.read() == 'something initial'