def test_sign(self): # Test that dulwich signatures can be verified by CGit c1, c2, c3 = build_commit_graph(self.repo.object_store, [[1], [2, 1], [3, 1, 2]]) self.repo.refs[b"HEAD"] = c3.id cfg = self.repo.get_config() cfg.set(("user", ), "signingKey", PorcelainGpgTestCase.DEFAULT_KEY_ID) self.import_default_key() porcelain.tag_create( self.repo.path, b"tryme", b"foo <*****@*****.**>", b"bar", annotated=True, sign=True, ) run_git_or_fail( [ "--git-dir={}".format(self.repo.controldir()), "tag", "-v", "tryme" ], env={'GNUPGHOME': os.environ['GNUPGHOME']}, )
def setUp(self): CompatTestCase.setUp(self) DulwichClientTestBase.setUp(self) if check_for_daemon(limit=1): raise SkipTest("git-daemon was already running on port %s" % protocol.TCP_GIT_PORT) fd, self.pidfile = tempfile.mkstemp(prefix="dulwich-test-git-client", suffix=".pid") os.fdopen(fd).close() run_git_or_fail( [ "daemon", "--verbose", "--export-all", "--pid-file=%s" % self.pidfile, "--base-path=%s" % self.gitroot, "--detach", "--reuseaddr", "--enable=receive-pack", "--enable=upload-archive", "--listen=localhost", self.gitroot, ], cwd=self.gitroot, ) if not check_for_daemon(): raise SkipTest("git-daemon failed to start")
def test_send_pack_from_shallow_clone(self): c = self._client() server_new_path = os.path.join(self.gitroot, "server_new.export") run_git_or_fail(["config", "http.uploadpack", "true"], cwd=server_new_path) run_git_or_fail(["config", "http.receivepack", "true"], cwd=server_new_path) remote_path = self._build_path("/server_new.export") with repo.Repo(self.dest) as local: result = c.fetch(remote_path, local, depth=1) for r in result.refs.items(): local.refs.set_if_equals(r[0], None, r[1]) tree_id = local[local.head()].tree for filename, contents in [ ("bar", "bar contents"), ("zop", "zop contents"), ]: tree_id = self._add_file(local, tree_id, filename, contents) commit_id = local.do_commit( message=b"add " + filename.encode("utf-8"), committer=b"Joe Example <*****@*****.**>", tree=tree_id, ) sendrefs = dict(local.get_refs()) del sendrefs[b"HEAD"] c.send_pack(remote_path, lambda _: sendrefs, local.generate_pack_data) with repo.Repo(server_new_path) as remote: self.assertEqual(remote.head(), commit_id)
def test_new_shallow_clone_from_dulwich(self): require_git_version(self.min_single_branch_version) self._source_repo = self.import_repo("server_new.export") self._stub_repo = _StubRepo("shallow") self.addCleanup(tear_down_repo, self._stub_repo) port = self._start_server(self._source_repo) # Fetch at depth 1 run_git_or_fail( [ "clone", "--mirror", "--depth=1", "--no-single-branch", self.url(port), self._stub_repo.path, ] ) clone = self._stub_repo = Repo(self._stub_repo.path) expected_shallow = [ b"35e0b59e187dd72a0af294aedffc213eaa4d03ff", b"514dc6d3fbfe77361bcaef320c4d21b72bc10be9", ] self.assertEqual(expected_shallow, _get_shallow(clone)) self.assertReposNotEqual(clone, self._source_repo)
def disable_ff_and_make_dummy_commit(self): # disable non-fast-forward pushes to the server dest = repo.Repo(os.path.join(self.gitroot, 'dest')) run_git_or_fail(['config', 'receive.denyNonFastForwards', 'true'], cwd=dest.path) commit_id = self.make_dummy_commit(dest) return dest, commit_id
def test_fetch_same_depth_into_shallow_clone_from_dulwich(self): require_git_version(self.min_single_branch_version) self._source_repo = self.import_repo("server_new.export") self._stub_repo = _StubRepo("shallow") self.addCleanup(tear_down_repo, self._stub_repo) port = self._start_server(self._source_repo) # Fetch at depth 2 run_git_or_fail( [ "clone", "--mirror", "--depth=2", "--no-single-branch", self.url(port), self._stub_repo.path, ] ) clone = self._stub_repo = Repo(self._stub_repo.path) # Fetching at the same depth is a no-op. run_git_or_fail( ["fetch", "--depth=2", self.url(port)] + self.branch_args(), cwd=self._stub_repo.path, ) expected_shallow = [ b"94de09a530df27ac3bb613aaecdd539e0a0655e1", b"da5cd81e1883c62a25bb37c4d1f8ad965b29bf8d", ] self.assertEqual(expected_shallow, _get_shallow(clone)) self.assertReposNotEqual(clone, self._source_repo)
def test_push_to_dulwich(self): self.import_repos() self.assertReposNotEqual(self._old_repo, self._new_repo) port = self._start_server(self._old_repo) run_git_or_fail(["push", self.url(port)] + self.branch_args(), cwd=self._new_repo.path) self.assertReposEqual(self._old_repo, self._new_repo)
def test_verify(self): # Test that CGit signatures can be verified by dulwich c1, c2, c3 = build_commit_graph(self.repo.object_store, [[1], [2, 1], [3, 1, 2]]) self.repo.refs[b"HEAD"] = c3.id self.import_default_key() run_git_or_fail( [ "--git-dir={}".format(self.repo.controldir()), "tag", "-u", PorcelainGpgTestCase.DEFAULT_KEY_ID, "-m", "foo", "verifyme", ], env={ 'GNUPGHOME': os.environ['GNUPGHOME'], 'GIT_COMMITTER_NAME': 'Joe Example', 'GIT_COMMITTER_EMAIL': '*****@*****.**', }, ) tag = self.repo[b"refs/tags/verifyme"] self.assertNotEqual(tag.signature, None) tag.verify()
def setUp(self): CompatTestCase.setUp(self) DulwichClientTestBase.setUp(self) self._httpd = HTTPGitServer(("localhost", 0), self.gitroot) self.addCleanup(self._httpd.shutdown) threading.Thread(target=self._httpd.serve_forever).start() run_git_or_fail(["config", "http.uploadpack", "true"], cwd=self.dest) run_git_or_fail(["config", "http.receivepack", "true"], cwd=self.dest)
def test_push_to_dulwich(self): self.import_repos() self.assertReposNotEqual(self._old_repo, self._new_repo) port = self._start_server(self._old_repo) run_git_or_fail(['push', self.url(port)] + self.branch_args(), cwd=self._new_repo.path) self.assertReposEqual(self._old_repo, self._new_repo)
def disable_ff_and_make_dummy_commit(self): # disable non-fast-forward pushes to the server dest = repo.Repo(os.path.join(self.gitroot, 'dest')) run_git_or_fail(['config', 'receive.denyNonFastForwards', 'true'], cwd=dest.path) b = objects.Blob.from_string('hi') dest.object_store.add_object(b) t = index.commit_tree(dest.object_store, [('hi', b.id, 0100644)])
def setUp(self): CompatTestCase.setUp(self) DulwichClientTestBase.setUp(self) self._httpd = HTTPGitServer(("localhost", 0), self.gitroot) self.addCleanup(self._httpd.shutdown) threading.Thread(target=self._httpd.serve_forever).start() run_git_or_fail(['config', 'http.uploadpack', 'true'], cwd=self.dest) run_git_or_fail(['config', 'http.receivepack', 'true'], cwd=self.dest)
def test_fetch_from_dulwich(self): self.import_repos() self.assertReposNotEqual(self._old_repo, self._new_repo) port = self._start_server(self._new_repo) run_git_or_fail(["fetch", self.url(port)] + self.branch_args(), cwd=self._old_repo.path) # flush the pack cache so any new packs are picked up self._old_repo.object_store._pack_cache = None self.assertReposEqual(self._old_repo, self._new_repo)
def test_push_to_dulwich_no_op(self): self._old_repo = import_repo('server_old.export') self._new_repo = import_repo('server_old.export') self.assertReposEqual(self._old_repo, self._new_repo) port = self._start_server(self._old_repo) run_git_or_fail(['push', self.url(port)] + self.branch_args(), cwd=self._new_repo.path) self.assertReposEqual(self._old_repo, self._new_repo)
def test_push_to_dulwich_no_op(self): self._old_repo = self.import_repo('server_old.export') self._new_repo = self.import_repo('server_old.export') self.assertEqual(self._old_repo, self._new_repo) port = self._start_server(self._old_repo) run_git_or_fail(['push', self.url(port)] + self.branch_args(), cwd=self._new_repo.path) self.assertEqual(self._old_repo, self._new_repo)
def test_push_to_dulwich_issue_88_standard(self): # Same thing, but we reverse the role of the server/client # and do a push instead. self._source_repo = self.import_repo("issue88_expect_ack_nak_client.export") self._client_repo = self.import_repo("issue88_expect_ack_nak_server.export") port = self._start_server(self._source_repo) run_git_or_fail(["push", self.url(port), "master"], cwd=self._client_repo.path) self.assertReposEqual(self._source_repo, self._client_repo)
def test_push_to_dulwich_remove_branch(self): self._old_repo = self.import_repo("server_old.export") self._new_repo = self.import_repo("server_old.export") self.assertReposEqual(self._old_repo, self._new_repo) port = self._start_server(self._old_repo) run_git_or_fail(["push", self.url(port), ":master"], cwd=self._new_repo.path) self.assertEqual(list(self._old_repo.get_refs().keys()), [b"refs/heads/branch"])
def test_push_to_dulwich_issue_88_standard(self): # Same thing, but we reverse the role of the server/client # and do a push instead. self._source_repo = self.import_repo('issue88_expect_ack_nak_client.export') self._client_repo = self.import_repo('issue88_expect_ack_nak_server.export') port = self._start_server(self._source_repo) run_git_or_fail(['push', self.url(port), 'master',], cwd=self._client_repo.path) self.assertReposEqual(self._source_repo, self._client_repo)
def test_fetch_from_dulwich(self): self.import_repos() self.assertReposNotEqual(self._old_repo, self._new_repo) port = self._start_server(self._new_repo) run_git_or_fail(['fetch', self.url(port)] + self.branch_args(), cwd=self._old_repo.path) # flush the pack cache so any new packs are picked up self._old_repo.object_store._pack_cache_time = 0 self.assertReposEqual(self._old_repo, self._new_repo)
def setUp(self): if '__pypy__' in sys.modules: self.skipTest('urllib3 not available for pypy in debian') CompatTestCase.setUp(self) DulwichClientTestBase.setUp(self) self._httpd = HTTPGitServer(("localhost", 0), self.gitroot) self.addCleanup(self._httpd.shutdown) threading.Thread(target=self._httpd.serve_forever).start() run_git_or_fail(['config', 'http.uploadpack', 'true'], cwd=self.dest) run_git_or_fail(['config', 'http.receivepack', 'true'], cwd=self.dest)
def test_push_to_dulwich_no_op(self): self._old_repo = import_repo("server_old.export") self.addCleanup(tear_down_repo, self._old_repo) self._new_repo = import_repo("server_old.export") self.addCleanup(tear_down_repo, self._new_repo) self.assertReposEqual(self._old_repo, self._new_repo) port = self._start_server(self._old_repo) run_git_or_fail(["push", self.url(port)] + self.branch_args(), cwd=self._new_repo.path) self.assertReposEqual(self._old_repo, self._new_repo)
def test_push_to_dulwich_remove_branch(self): self._old_repo = import_repo('server_old.export') self._new_repo = import_repo('server_old.export') self.assertReposEqual(self._old_repo, self._new_repo) port = self._start_server(self._old_repo) run_git_or_fail(['push', self.url(port), ":master"], cwd=self._new_repo.path) self.assertEquals( self._old_repo.get_refs().keys(), ["refs/heads/branch"])
def test_fetch_from_dulwich_no_op(self): self._old_repo = self.import_repo('server_old.export') self._new_repo = self.import_repo('server_old.export') self.assertReposEqual(self._old_repo, self._new_repo) port = self._start_server(self._new_repo) run_git_or_fail(['fetch', self.url(port)] + self.branch_args(), cwd=self._old_repo.path) # flush the pack cache so any new packs are picked up self._old_repo.object_store._pack_cache_time = 0 self.assertReposEqual(self._old_repo, self._new_repo)
def test_fetch_from_dulwich_issue_88_standard(self): # Basically an integration test to see that the ACK/NAK # generation works on repos with common head. self._source_repo = self.import_repo("issue88_expect_ack_nak_server.export") self._client_repo = self.import_repo("issue88_expect_ack_nak_client.export") port = self._start_server(self._source_repo) run_git_or_fail(["fetch", self.url(port), "master"], cwd=self._client_repo.path) self.assertObjectStoreEqual( self._source_repo.object_store, self._client_repo.object_store )
def test_fetch_from_dulwich_issue_88_alternative(self): # likewise, but the case where the two repos have no common parent self._source_repo = self.import_repo('issue88_expect_ack_nak_other.export') self._client_repo = self.import_repo('issue88_expect_ack_nak_client.export') port = self._start_server(self._source_repo) self.assertRaises(KeyError, self._client_repo.get_object, b'02a14da1fc1fc13389bbf32f0af7d8899f2b2323') run_git_or_fail(['fetch', self.url(port), 'master',], cwd=self._client_repo.path) self.assertEqual(b'commit', self._client_repo.get_object( b'02a14da1fc1fc13389bbf32f0af7d8899f2b2323').type_name)
def test_fetch_from_dulwich_no_op(self): self._old_repo = import_repo("server_old.export") self.addCleanup(tear_down_repo, self._old_repo) self._new_repo = import_repo("server_old.export") self.addCleanup(tear_down_repo, self._new_repo) self.assertReposEqual(self._old_repo, self._new_repo) port = self._start_server(self._new_repo) run_git_or_fail(["fetch", self.url(port)] + self.branch_args(), cwd=self._old_repo.path) # flush the pack cache so any new packs are picked up self._old_repo.object_store._pack_cache = None self.assertReposEqual(self._old_repo, self._new_repo)
def create_new_worktree(self, repo_dir, branch): """Create a new worktree using git-worktree. :param repo_dir: The directory of the main working tree. :param branch: The branch or commit to checkout in the new worktree. :returns: The path to the new working tree. """ temp_dir = tempfile.mkdtemp() run_git_or_fail(['worktree', 'add', temp_dir, branch], cwd=repo_dir) self.addCleanup(rmtree_ro, temp_dir) return temp_dir
def test_fetch_from_dulwich(self): self.import_repos() with self._old_repo as old_repo: with self._new_repo as new_repo: self.assertReposNotEqual(old_repo, new_repo) port = self._start_server(new_repo) run_git_or_fail(['fetch', self.url(port)] + self.branch_args(), cwd=old_repo.path) # flush the pack cache so any new packs are picked up old_repo.object_store.close() self.assertReposEqual(old_repo, new_repo)
def test_fetch_from_dulwich_issue_88_alternative(self): # likewise, but the case where the two repos have no common parent self._source_repo = self.import_repo('issue88_expect_ack_nak_other.export') self._client_repo = self.import_repo('issue88_expect_ack_nak_client.export') port = self._start_server(self._source_repo) self.assertRaises(KeyError, self._client_repo.get_object, b'02a14da1fc1fc13389bbf32f0af7d8899f2b2323') run_git_or_fail(['fetch', self.url(port), 'master',], cwd=self._client_repo.path) self.assertEqual(b'commit', self._client_repo.get_object( b'02a14da1fc1fc13389bbf32f0af7d8899f2b2323').type_name)
def test_fetch_from_dulwich_issue_88_standard(self): # Basically an integration test to see that the ACK/NAK # generation works on repos with common head. self._source_repo = self.import_repo('issue88_expect_ack_nak_server.export') self._client_repo = self.import_repo('issue88_expect_ack_nak_client.export') port = self._start_server(self._source_repo) run_git_or_fail(['fetch', self.url(port), 'master',], cwd=self._client_repo.path) self.assertObjectStoreEqual( self._source_repo.object_store, self._client_repo.object_store)
def test_clone_from_dulwich_empty(self): old_repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, old_repo_dir) self._old_repo = Repo.init_bare(old_repo_dir) port = self._start_server(self._old_repo) new_repo_base_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, new_repo_base_dir) new_repo_dir = os.path.join(new_repo_base_dir, "empty_new") run_git_or_fail(["clone", self.url(port), new_repo_dir], cwd=new_repo_base_dir) new_repo = Repo(new_repo_dir) self.assertReposEqual(self._old_repo, new_repo)
def test_git_worktree_list(self): output = run_git_or_fail(['worktree', 'list'], cwd=self._repo.path) worktrees = self._parse_worktree_list(output) self.assertEqual(len(worktrees), self._number_of_working_tree) self.assertEqual(worktrees[0][1], '(bare)') self.assertEqual(worktrees[0][0], self._mainworktree_repo.path) output = run_git_or_fail(['worktree', 'list'], cwd=self._mainworktree_repo.path) worktrees = self._parse_worktree_list(output) self.assertEqual(len(worktrees), self._number_of_working_tree) self.assertEqual(worktrees[0][1], '(bare)') self.assertEqual(worktrees[0][0], self._mainworktree_repo.path)
def create_new_worktree(self, repo_dir, branch): """Create a new worktree using git-worktree. :param repo_dir: The directory of the main working tree. :param branch: The branch or commit to checkout in the new worktree. :returns: The path to the new working tree. """ temp_dir = tempfile.mkdtemp() run_git_or_fail(['worktree', 'add', temp_dir, branch], cwd=repo_dir) self.addCleanup(rmtree_ro, temp_dir) return temp_dir
def test_new_shallow_clone_from_dulwich(self): self._source_repo = import_repo("server_new.export") self.addCleanup(tear_down_repo, self._source_repo) self._stub_repo = _StubRepo("shallow") self.addCleanup(tear_down_repo, self._stub_repo) port = self._start_server(self._source_repo) # Fetch at depth 1 run_git_or_fail(["clone", "--mirror", "--depth=1", "--no-single-branch", self.url(port), self._stub_repo.path]) clone = self._stub_repo = Repo(self._stub_repo.path) expected_shallow = ["94de09a530df27ac3bb613aaecdd539e0a0655e1", "da5cd81e1883c62a25bb37c4d1f8ad965b29bf8d"] self.assertEqual(expected_shallow, _get_shallow(clone)) self.assertReposNotEqual(clone, self._source_repo)
def test_fetch_from_dulwich_no_op(self): self._old_repo = import_repo('server_old.export') self.addCleanup(tear_down_repo, self._old_repo) self._new_repo = import_repo('server_old.export') self.addCleanup(tear_down_repo, self._new_repo) self.assertReposEqual(self._old_repo, self._new_repo) port = self._start_server(self._new_repo) run_git_or_fail(['fetch', self.url(port)] + self.branch_args(), cwd=self._old_repo.path) # flush the pack cache so any new packs are picked up self._old_repo.object_store._pack_cache_time = 0 self.assertReposEqual(self._old_repo, self._new_repo)
def test_push_to_dulwich_remove_branch(self): self._old_repo = import_repo('server_old.export') self.addCleanup(tear_down_repo, self._old_repo) self._new_repo = import_repo('server_old.export') self.addCleanup(tear_down_repo, self._new_repo) self.assertReposEqual(self._old_repo, self._new_repo) port = self._start_server(self._old_repo) run_git_or_fail(['push', self.url(port), ":master"], cwd=self._new_repo.path) self.assertEquals( self._old_repo.get_refs().keys(), ["refs/heads/branch"])
def test_clone_from_dulwich_empty(self): old_repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, old_repo_dir) self._old_repo = Repo.init_bare(old_repo_dir) port = self._start_server(self._old_repo) new_repo_base_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, new_repo_base_dir) new_repo_dir = os.path.join(new_repo_base_dir, 'empty_new') run_git_or_fail(['clone', self.url(port), new_repo_dir], cwd=new_repo_base_dir) new_repo = Repo(new_repo_dir) self.assertReposEqual(self._old_repo, new_repo)
def test_git_worktree_list(self): # 'git worktree list' was introduced in 2.7.0 require_git_version((2, 7, 0)) output = run_git_or_fail(["worktree", "list"], cwd=self._repo.path) worktrees = self._parse_worktree_list(output) self.assertEqual(len(worktrees), self._number_of_working_tree) self.assertEqual(worktrees[0][1], "(bare)") self.assertTrue(os.path.samefile(worktrees[0][0], self._mainworktree_repo.path)) output = run_git_or_fail(["worktree", "list"], cwd=self._mainworktree_repo.path) worktrees = self._parse_worktree_list(output) self.assertEqual(len(worktrees), self._number_of_working_tree) self.assertEqual(worktrees[0][1], "(bare)") self.assertTrue(os.path.samefile(worktrees[0][0], self._mainworktree_repo.path))
def test_new_shallow_clone_from_dulwich(self): self._source_repo = import_repo('server_new.export') self.addCleanup(tear_down_repo, self._source_repo) self._stub_repo = _StubRepo('shallow') self.addCleanup(tear_down_repo, self._stub_repo) port = self._start_server(self._source_repo) # Fetch at depth 1 run_git_or_fail(['clone', '--mirror', '--depth=1', '--no-single-branch', self.url(port), self._stub_repo.path]) clone = self._stub_repo = Repo(self._stub_repo.path) expected_shallow = ['94de09a530df27ac3bb613aaecdd539e0a0655e1', 'da5cd81e1883c62a25bb37c4d1f8ad965b29bf8d'] self.assertEqual(expected_shallow, _get_shallow(clone)) self.assertReposNotEqual(clone, self._source_repo)
def test_new_shallow_clone_from_dulwich(self): require_git_version(self.min_single_branch_version) self._source_repo = self.import_repo('server_new.export') self._stub_repo = _StubRepo('shallow') self.addCleanup(tear_down_repo, self._stub_repo) port = self._start_server(self._source_repo) # Fetch at depth 1 run_git_or_fail(['clone', '--mirror', '--depth=1', '--no-single-branch', self.url(port), self._stub_repo.path]) clone = self._stub_repo = Repo(self._stub_repo.path) expected_shallow = [b'35e0b59e187dd72a0af294aedffc213eaa4d03ff', b'514dc6d3fbfe77361bcaef320c4d21b72bc10be9'] self.assertEqual(expected_shallow, _get_shallow(clone)) self.assertNotEqual(clone, self._source_repo)
def test_new_shallow_clone_from_dulwich(self): require_git_version(self.min_single_branch_version) self._source_repo = self.import_repo('server_new.export') self._stub_repo = _StubRepo('shallow') self.addCleanup(tear_down_repo, self._stub_repo) port = self._start_server(self._source_repo) # Fetch at depth 1 run_git_or_fail(['clone', '--mirror', '--depth=1', '--no-single-branch', self.url(port), self._stub_repo.path]) clone = self._stub_repo = Repo(self._stub_repo.path) expected_shallow = [b'94de09a530df27ac3bb613aaecdd539e0a0655e1', b'da5cd81e1883c62a25bb37c4d1f8ad965b29bf8d'] self.assertEqual(expected_shallow, _get_shallow(clone)) self.assertReposNotEqual(clone, self._source_repo)
def test_delta_medium_object(self): # This tests an object set that will have a copy operation # 2**20 in size. with self.get_pack(pack1_sha) as orig_pack: orig_blob = orig_pack[a_sha] new_blob = Blob() new_blob.data = orig_blob.data + (b'x' * 2 ** 20) new_blob_2 = Blob() new_blob_2.data = new_blob.data + b'y' all_to_pack = list(orig_pack.pack_tuples()) + [(new_blob, None), (new_blob_2, None)] pack_path = os.path.join(self._tempdir, b'pack_with_deltas') write_pack(pack_path, all_to_pack, deltify=True) output = run_git_or_fail(['verify-pack', '-v', pack_path]) self.assertEqual(set(x[0].id for x in all_to_pack), _git_verify_pack_object_list(output)) # We specifically made a new blob that should be a delta # against the blob a_sha, so make sure we really got only 3 # non-delta objects: got_non_delta = int(_NON_DELTA_RE.search(output).group('non_delta')) self.assertEqual( 3, got_non_delta, 'Expected 3 non-delta objects, got %d' % got_non_delta) # We expect one object to have a delta chain length of two # (new_blob_2), so let's verify that actually happens: self.assertIn(b'chain length = 2', output)
def test_delta_large_object(self): # This tests an object set that will have a copy operation # 2**25 in size. This is a copy large enough that it requires # two copy operations in git's binary delta format. raise SkipTest('skipping slow, large test') with self.get_pack(pack1_sha) as orig_pack: orig_blob = orig_pack[a_sha] new_blob = Blob() new_blob.data = 'big blob' + ('x' * 2 ** 25) new_blob_2 = Blob() new_blob_2.data = new_blob.data + 'y' all_to_pack = list(orig_pack.pack_tuples()) + [(new_blob, None), (new_blob_2, None)] pack_path = os.path.join(self._tempdir, "pack_with_deltas") write_pack(pack_path, all_to_pack, deltify=True) output = run_git_or_fail(['verify-pack', '-v', pack_path]) self.assertEqual(set(x[0].id for x in all_to_pack), _git_verify_pack_object_list(output)) # We specifically made a new blob that should be a delta # against the blob a_sha, so make sure we really got only 4 # non-delta objects: got_non_delta = int(_NON_DELTA_RE.search(output).group('non_delta')) self.assertEqual( 4, got_non_delta, 'Expected 4 non-delta objects, got %d' % got_non_delta)
def test_delta_large_object(self): # This tests an object set that will have a copy operation # 2**25 in size. This is a copy large enough that it requires # two copy operations in git's binary delta format. raise SkipTest('skipping slow, large test') orig_pack = self.get_pack(pack1_sha) orig_blob = orig_pack[a_sha] new_blob = Blob() new_blob.data = 'big blob' + ('x' * 2**25) new_blob_2 = Blob() new_blob_2.data = new_blob.data + 'y' all_to_pack = list(orig_pack.pack_tuples()) + [(new_blob, None), (new_blob_2, None)] pack_path = os.path.join(self._tempdir, "pack_with_deltas") write_pack(pack_path, all_to_pack, deltify=True) output = run_git_or_fail(['verify-pack', '-v', pack_path]) self.assertEqual(set(x[0].id for x in all_to_pack), _git_verify_pack_object_list(output)) # We specifically made a new blob that should be a delta # against the blob a_sha, so make sure we really got only 4 # non-delta objects: got_non_delta = int(_NON_DELTA_RE.search(output).group('non_delta')) self.assertEqual( 4, got_non_delta, 'Expected 4 non-delta objects, got %d' % got_non_delta)
def test_delta_medium_object(self): # This tests an object set that will have a copy operation # 2**20 in size. with self.get_pack(pack1_sha) as orig_pack: orig_blob = orig_pack[a_sha] new_blob = Blob() new_blob.data = orig_blob.data + (b'x' * 2**20) new_blob_2 = Blob() new_blob_2.data = new_blob.data + b'y' all_to_pack = list(orig_pack.pack_tuples()) + [(new_blob, None), (new_blob_2, None)] pack_path = os.path.join(self._tempdir, 'pack_with_deltas') write_pack(pack_path, all_to_pack, deltify=True) output = run_git_or_fail(['verify-pack', '-v', pack_path]) self.assertEqual(set(x[0].id for x in all_to_pack), _git_verify_pack_object_list(output)) # We specifically made a new blob that should be a delta # against the blob a_sha, so make sure we really got only 3 # non-delta objects: got_non_delta = int(_NON_DELTA_RE.search(output).group('non_delta')) self.assertEqual( 3, got_non_delta, 'Expected 3 non-delta objects, got %d' % got_non_delta) # We expect one object to have a delta chain length of two # (new_blob_2), so let's verify that actually happens: self.assertIn(b'chain length = 2', output)
def test_delta_large_object(self): # This tests an object set that will have a copy operation # 2**25 in size. This is a copy large enough that it requires # two copy operations in git's binary delta format. raise SkipTest("skipping slow, large test") with self.get_pack(pack1_sha) as orig_pack: new_blob = Blob() new_blob.data = "big blob" + ("x" * 2**25) new_blob_2 = Blob() new_blob_2.data = new_blob.data + "y" all_to_pack = list(orig_pack.pack_tuples()) + [ (new_blob, None), (new_blob_2, None), ] pack_path = os.path.join(self._tempdir, "pack_with_deltas") write_pack(pack_path, all_to_pack, deltify=True) output = run_git_or_fail(["verify-pack", "-v", pack_path]) self.assertEqual( {x[0].id for x in all_to_pack}, _git_verify_pack_object_list(output), ) # We specifically made a new blob that should be a delta # against the blob a_sha, so make sure we really got only 4 # non-delta objects: got_non_delta = int(_NON_DELTA_RE.search(output).group("non_delta")) self.assertEqual( 4, got_non_delta, "Expected 4 non-delta objects, got %d" % got_non_delta, )
def setUp(self): CompatTestCase.setUp(self) DulwichClientTestBase.setUp(self) if check_for_daemon(limit=1): raise SkipTest('git-daemon was already running on port %s' % protocol.TCP_GIT_PORT) fd, self.pidfile = tempfile.mkstemp(prefix='dulwich-test-git-client', suffix=".pid") os.fdopen(fd).close() run_git_or_fail( ['daemon', '--verbose', '--export-all', '--pid-file=%s' % self.pidfile, '--base-path=%s' % self.gitroot, '--detach', '--reuseaddr', '--enable=receive-pack', '--enable=upload-archive', '--listen=localhost', self.gitroot], cwd=self.gitroot) if not check_for_daemon(): raise SkipTest('git-daemon failed to start')
def setUp(self): CompatTestCase.setUp(self) DulwichClientTestBase.setUp(self) if check_for_daemon(limit=1): raise SkipTest('git-daemon was already running on port %s' % protocol.TCP_GIT_PORT) fd, self.pidfile = tempfile.mkstemp(prefix='dulwich-test-git-client', suffix=".pid") os.fdopen(fd).close() run_git_or_fail( ['daemon', '--verbose', '--export-all', '--pid-file=%s' % self.pidfile, '--base-path=%s' % self.gitroot, '--detach', '--reuseaddr', '--enable=receive-pack', '--listen=localhost', self.gitroot], cwd=self.gitroot) if not check_for_daemon(): raise SkipTest('git-daemon failed to start')
def test_git_worktree_list(self): # 'git worktree list' was introduced in 2.7.0 require_git_version((2, 7, 0)) output = run_git_or_fail(['worktree', 'list'], cwd=self._repo.path) worktrees = self._parse_worktree_list(output) self.assertEqual(len(worktrees), self._number_of_working_tree) self.assertEqual(worktrees[0][1], '(bare)') self.assertEqual(os.path.normcase(worktrees[0][0]), os.path.normcase(self._mainworktree_repo.path)) output = run_git_or_fail(['worktree', 'list'], cwd=self._mainworktree_repo.path) worktrees = self._parse_worktree_list(output) self.assertEqual(len(worktrees), self._number_of_working_tree) self.assertEqual(worktrees[0][1], '(bare)') self.assertEqual(os.path.normcase(worktrees[0][0]), os.path.normcase(self._mainworktree_repo.path))
def test_copy(self): with self.get_pack(pack1_sha) as origpack: self.assertSucceeds(origpack.index.check) pack_path = os.path.join(self._tempdir, "Elch") write_pack(pack_path, origpack.pack_tuples()) output = run_git_or_fail(['verify-pack', '-v', pack_path]) orig_shas = set(o.id for o in origpack.iterobjects()) self.assertEqual(orig_shas, _git_verify_pack_object_list(output))
def test_git_worktree_list(self): # 'git worktree list' was introduced in 2.7.0 require_git_version((2, 7, 0)) output = run_git_or_fail(['worktree', 'list'], cwd=self._repo.path) worktrees = self._parse_worktree_list(output) self.assertEqual(len(worktrees), self._number_of_working_tree) self.assertEqual(worktrees[0][1], '(bare)') self.assertEqual(os.path.normcase(worktrees[0][0]), os.path.normcase(self._mainworktree_repo.path)) output = run_git_or_fail( ['worktree', 'list'], cwd=self._mainworktree_repo.path) worktrees = self._parse_worktree_list(output) self.assertEqual(len(worktrees), self._number_of_working_tree) self.assertEqual(worktrees[0][1], '(bare)') self.assertEqual(os.path.normcase(worktrees[0][0]), os.path.normcase(self._mainworktree_repo.path))
def test_clone_from_dulwich_empty(self): old_repo_dir = os.path.join(tempfile.mkdtemp(), 'empty_old') run_git_or_fail(['init', '--quiet', '--bare', old_repo_dir]) self._old_repo = Repo(old_repo_dir) port = self._start_server(self._old_repo) new_repo_base_dir = tempfile.mkdtemp() try: new_repo_dir = os.path.join(new_repo_base_dir, 'empty_new') run_git_or_fail(['clone', self.url(port), new_repo_dir], cwd=new_repo_base_dir) new_repo = Repo(new_repo_dir) self.assertReposEqual(self._old_repo, new_repo) finally: # We don't create a Repo from new_repo_dir until after some errors # may have occurred, so don't depend on tearDown to clean it up. shutil.rmtree(new_repo_base_dir)
def test_clone_from_dulwich_empty(self): old_repo_dir = os.path.join(tempfile.mkdtemp(), 'empty_old') run_git_or_fail(['init', '--quiet', '--bare', old_repo_dir]) self._old_repo = Repo(old_repo_dir) port = self._start_server(self._old_repo) new_repo_base_dir = tempfile.mkdtemp() try: new_repo_dir = os.path.join(new_repo_base_dir, 'empty_new') run_git_or_fail(['clone', self.url(port), new_repo_dir], cwd=new_repo_base_dir) new_repo = Repo(new_repo_dir) self.assertReposEqual(self._old_repo, new_repo) finally: # We don't create a Repo from new_repo_dir until after some errors # may have occurred, so don't depend on tearDown to clean it up. shutil.rmtree(new_repo_base_dir)
def test_copy(self): with self.get_pack(pack1_sha) as origpack: self.assertSucceeds(origpack.index.check) pack_path = os.path.join(self._tempdir, b'Elch') write_pack(pack_path, origpack.pack_tuples()) output = run_git_or_fail(['verify-pack', '-v', pack_path]) orig_shas = set(o.id for o in origpack.iterobjects()) self.assertEqual(orig_shas, _git_verify_pack_object_list(output))
def test_shallow_clone_from_git_is_identical(self): require_git_version(self.min_single_branch_version) self._source_repo = self.import_repo('server_new.export') self._stub_repo_git = _StubRepo('shallow-git') self.addCleanup(tear_down_repo, self._stub_repo_git) self._stub_repo_dw = _StubRepo('shallow-dw') self.addCleanup(tear_down_repo, self._stub_repo_dw) # shallow clone using stock git, then using dulwich run_git_or_fail(['clone', '--mirror', '--depth=1', '--no-single-branch', 'file://' + self._source_repo.path, self._stub_repo_git.path]) port = self._start_server(self._source_repo) run_git_or_fail(['clone', '--mirror', '--depth=1', '--no-single-branch', self.url(port), self._stub_repo_dw.path]) # compare the two clones; they should be equal self.assertEqual(Repo(self._stub_repo_git.path), Repo(self._stub_repo_dw.path))
def test_fetch_same_depth_into_shallow_clone_from_dulwich(self): self._source_repo = import_repo('server_new.export') self.addCleanup(tear_down_repo, self._source_repo) self._stub_repo = _StubRepo('shallow') self.addCleanup(tear_down_repo, self._stub_repo) port = self._start_server(self._source_repo) # Fetch at depth 1 run_git_or_fail(['clone', '--mirror', '--depth=1', '--no-single-branch', self.url(port), self._stub_repo.path]) clone = self._stub_repo = Repo(self._stub_repo.path) # Fetching at the same depth is a no-op. run_git_or_fail( ['fetch', '--depth=1', self.url(port)] + self.branch_args(), cwd=self._stub_repo.path) expected_shallow = ['94de09a530df27ac3bb613aaecdd539e0a0655e1', 'da5cd81e1883c62a25bb37c4d1f8ad965b29bf8d'] self.assertEqual(expected_shallow, _get_shallow(clone)) self.assertReposNotEqual(clone, self._source_repo)
def test_shallow_clone_from_git_is_identical(self): require_git_version(self.min_single_branch_version) self._source_repo = self.import_repo('server_new.export') self._stub_repo_git = _StubRepo('shallow-git') self.addCleanup(tear_down_repo, self._stub_repo_git) self._stub_repo_dw = _StubRepo('shallow-dw') self.addCleanup(tear_down_repo, self._stub_repo_dw) # shallow clone using stock git, then using dulwich run_git_or_fail(['clone', '--mirror', '--depth=1', '--no-single-branch', 'file://' + self._source_repo.path, self._stub_repo_git.path]) port = self._start_server(self._source_repo) run_git_or_fail(['clone', '--mirror', '--depth=1', '--no-single-branch', self.url(port), self._stub_repo_dw.path]) # compare the two clones; they should be equal self.assertReposEqual(Repo(self._stub_repo_git.path), Repo(self._stub_repo_dw.path))
def test_fetch_full_depth_into_shallow_clone_from_dulwich(self): require_git_version(self.min_single_branch_version) self._source_repo = self.import_repo('server_new.export') self._stub_repo = _StubRepo('shallow') self.addCleanup(tear_down_repo, self._stub_repo) port = self._start_server(self._source_repo) # Fetch at depth 1 run_git_or_fail([ 'clone', '--mirror', '--depth=1', '--no-single-branch', self.url(port), self._stub_repo.path ]) clone = self._stub_repo = Repo(self._stub_repo.path) # Fetching at the same depth is a no-op. run_git_or_fail( ['fetch', '--depth=1', self.url(port)] + self.branch_args(), cwd=self._stub_repo.path) # The whole repo only has depth 3, so it should equal server_new. run_git_or_fail( ['fetch', '--depth=3', self.url(port)] + self.branch_args(), cwd=self._stub_repo.path) self.assertEqual([], _get_shallow(clone)) self.assertReposEqual(clone, self._source_repo)
def test_deltas_work(self): orig_pack = self.get_pack(pack1_sha) orig_blob = orig_pack[a_sha] new_blob = Blob() new_blob.data = orig_blob.data + "x" all_to_pack = list(orig_pack.pack_tuples()) + [(new_blob, None)] pack_path = os.path.join(self._tempdir, "pack_with_deltas") write_pack(pack_path, all_to_pack, deltify=True) output = run_git_or_fail(["verify-pack", "-v", pack_path]) self.assertEqual(set(x[0].id for x in all_to_pack), _git_verify_pack_object_list(output)) # We specifically made a new blob that should be a delta # against the blob a_sha, so make sure we really got only 3 # non-delta objects: got_non_delta = int(_NON_DELTA_RE.search(output).group("non_delta")) self.assertEqual(3, got_non_delta, "Expected 3 non-delta objects, got %d" % got_non_delta)