def test_working_tree(self): temp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, temp_dir) worktree_temp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, worktree_temp_dir) r = Repo.init(temp_dir) self.addCleanup(r.close) root_sha = r.do_commit(b'empty commit', committer=b'Test Committer <*****@*****.**>', author=b'Test Author <*****@*****.**>', commit_timestamp=12345, commit_timezone=0, author_timestamp=12345, author_timezone=0) r.refs[b'refs/heads/master'] = root_sha w = Repo._init_new_working_directory(worktree_temp_dir, r) self.addCleanup(w.close) new_sha = w.do_commit(b'new commit', committer=b'Test Committer <*****@*****.**>', author=b'Test Author <*****@*****.**>', commit_timestamp=12345, commit_timezone=0, author_timestamp=12345, author_timezone=0) w.refs[b'HEAD'] = new_sha self.assertEqual(os.path.abspath(r.controldir()), os.path.abspath(w.commondir())) self.assertEqual(r.refs.keys(), w.refs.keys()) self.assertNotEqual(r.head(), w.head())
def test_clone_from_dulwich_empty(self): old_repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, old_repo_dir) self._old_repo = Repo.init_bare(old_repo_dir) port = self._start_server(self._old_repo) new_repo_base_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, new_repo_base_dir) new_repo_dir = os.path.join(new_repo_base_dir, 'empty_new') run_git_or_fail(['clone', self.url(port), new_repo_dir], cwd=new_repo_base_dir) new_repo = Repo(new_repo_dir) self.assertReposEqual(self._old_repo, new_repo)
def init(path=".", bare=False): """Create a new git repository. :param path: Path to repository. :param bare: Whether to create a bare repository. :return: A Repo instance """ if not os.path.exists(path): os.mkdir(path) if bare: return Repo.init_bare(path) else: return Repo.init(path)
def test_remove_packed_without_peeled(self): refs_file = os.path.join(self._repo.path, 'packed-refs') f = GitFile(refs_file) refs_data = f.read() f.close() f = GitFile(refs_file, 'wb') f.write(b'\n'.join(l for l in refs_data.split(b'\n') if not l or l[0] not in b'#^')) f.close() self._repo = Repo(self._repo.path) refs = self._repo.refs self.assertTrue( refs.remove_if_equals(b'refs/heads/packed', b'42d06bd4b77fed026b154d16493e5deab78f02ec'))
def test_common_revisions(self): """ This test demonstrates that ``find_common_revisions()`` actually returns common heads, not revisions; dulwich already uses ``find_common_revisions()`` in such a manner (see ``Repo.fetch_objects()``). """ expected_shas = set([b'60dacdc733de308bb77bb76ce0fb0f9b44c9769e']) # Source for objects. r_base = self.open_repo('simple_merge.git') # Re-create each-side of the merge in simple_merge.git. # # Since the trees and blobs are missing, the repository created is # corrupted, but we're only checking for commits for the purpose of # this test, so it's immaterial. r1_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, r1_dir) r1_commits = [ b'ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd', # HEAD b'60dacdc733de308bb77bb76ce0fb0f9b44c9769e', b'0d89f20333fbb1d2f3a94da77f4981373d8f4310' ] r2_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, r2_dir) r2_commits = [ b'4cffe90e0a41ad3f5190079d7c8f036bde29cbe6', # HEAD b'60dacdc733de308bb77bb76ce0fb0f9b44c9769e', b'0d89f20333fbb1d2f3a94da77f4981373d8f4310' ] r1 = Repo.init_bare(r1_dir) for c in r1_commits: r1.object_store.add_object(r_base.get_object(c)) r1.refs[b'HEAD'] = r1_commits[0] r2 = Repo.init_bare(r2_dir) for c in r2_commits: r2.object_store.add_object(r_base.get_object(c)) r2.refs[b'HEAD'] = r2_commits[0] # Finally, the 'real' testing! shas = r2.object_store.find_common_revisions(r1.get_graph_walker()) self.assertEqual(set(shas), expected_shas) shas = r1.object_store.find_common_revisions(r2.get_graph_walker()) self.assertEqual(set(shas), expected_shas)
def test_init_with_info_grafts(self): r = self._repo r._put_named_file(os.path.join('info', 'grafts'), self._shas[-1] + b' ' + self._shas[0]) r = Repo(self._repo_dir) self.assertEqual({self._shas[-1]: [self._shas[0]]}, r._graftpoints)
def read_submodule_head(path): """Read the head commit of a submodule. :param path: path to the submodule :return: HEAD sha, None if not a valid head/repository """ from my_dulwich.errors import NotGitRepository from my_dulwich.repo import Repo try: repo = Repo(path) except NotGitRepository: return None try: return repo.head() except KeyError: return None
def test_get_unstaged_changes(self): """Unit test for get_unstaged_changes.""" repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: # Commit a dummy file then modify it foo1_fullpath = os.path.join(repo_dir, 'foo1') with open(foo1_fullpath, 'wb') as f: f.write(b'origstuff') foo2_fullpath = os.path.join(repo_dir, 'foo2') with open(foo2_fullpath, 'wb') as f: f.write(b'origstuff') repo.stage(['foo1', 'foo2']) repo.do_commit(b'test status', author=b'author <email>', committer=b'committer <email>') with open(foo1_fullpath, 'wb') as f: f.write(b'newstuff') # modify access and modify time of path os.utime(foo1_fullpath, (0, 0)) changes = get_unstaged_changes(repo.open_index(), repo_dir) self.assertEqual(list(changes), [b'foo1'])
def test_symlink(self): repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: # Populate repo filed = Blob.from_string(b'file d') filee = Blob.from_string(b'd') tree = Tree() tree[b'c/d'] = (stat.S_IFREG | 0o644, filed.id) tree[b'c/e'] = (stat.S_IFLNK, filee.id) # symlink repo.object_store.add_objects([(o, None) for o in [filed, filee, tree]]) build_index_from_tree(repo.path, repo.index_path(), repo.object_store, tree.id) # Verify index entries index = repo.open_index() # symlink to d epath = os.path.join(repo.path, 'c', 'e') self.assertTrue(os.path.exists(epath)) self.assertReasonableIndexEntry( index[b'c/e'], stat.S_IFLNK, 0 if sys.platform == 'win32' else 1, filee.id) self.assertFileContents(epath, 'd', symlink=True)
def test_git_dir(self): repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: # Populate repo filea = Blob.from_string(b'file a') filee = Blob.from_string(b'd') tree = Tree() tree[b'.git/a'] = (stat.S_IFREG | 0o644, filea.id) tree[b'c/e'] = (stat.S_IFREG | 0o644, filee.id) repo.object_store.add_objects([(o, None) for o in [filea, filee, tree]]) build_index_from_tree(repo.path, repo.index_path(), repo.object_store, tree.id) # Verify index entries index = repo.open_index() self.assertEqual(len(index), 1) # filea apath = os.path.join(repo.path, '.git', 'a') self.assertFalse(os.path.exists(apath)) # filee epath = os.path.join(repo.path, 'c', 'e') self.assertTrue(os.path.exists(epath)) self.assertReasonableIndexEntry(index[b'c/e'], stat.S_IFREG | 0o644, 1, filee.id) self.assertFileContents(epath, b'd')
def test_create_disk_non_bare_mkdir(self): tmp_dir = tempfile.mkdtemp() target_dir = os.path.join(tmp_dir, "target") self.addCleanup(shutil.rmtree, tmp_dir) repo = Repo.init(target_dir, mkdir=True) self.assertEqual(os.path.join(target_dir, '.git'), repo._controldir) self._check_repo_contents(repo, False)
def main(argv=sys.argv): """Entry point for starting an HTTP git server.""" import optparse parser = optparse.OptionParser() parser.add_option("-l", "--listen_address", dest="listen_address", default="localhost", help="Binding IP address.") parser.add_option("-p", "--port", dest="port", type=int, default=8000, help="Port to listen on.") options, args = parser.parse_args(argv) if len(args) > 1: gitdir = args[1] else: gitdir = os.getcwd() log_utils.default_logging_config() backend = DictBackend({'/': Repo(gitdir)}) app = make_wsgi_chain(backend) server = make_server(options.listen_address, options.port, app, handler_class=WSGIRequestHandlerLogger, server_class=WSGIServerLogger) logger.info('Listening for HTTP connections on %s:%d', options.listen_address, options.port) server.serve_forever()
def setUpClass(cls): cls.projdir = tempfile.mkdtemp() # temporary project directory cls.repo = Repo.init(cls.projdir) # test repo obj_store = cls.repo.object_store # test repo object store # commit 1 ('2017-01-19T01:06:43') cls.c1 = make_commit( id=cls.tag_test_data[cls.test_tags[0]][1], commit_time=cls.tag_test_data[cls.test_tags[0]][0], message=b'unannotated tag', author=cls.committer) obj_store.add_object(cls.c1) # tag 1: unannotated cls.t1 = cls.test_tags[0] cls.repo[b'refs/tags/' + cls.t1] = cls.c1.id # add unannotated tag # commit 2 ('2017-01-19T01:11:54') cls.c2 = make_commit( id=cls.tag_test_data[cls.test_tags[1]][1], commit_time=cls.tag_test_data[cls.test_tags[1]][0], message=b'annotated tag', parents=[cls.c1.id], author=cls.committer) obj_store.add_object(cls.c2) # tag 2: annotated ('2017-01-19T01:13:21') cls.t2 = make_tag(cls.c2, id=cls.tag_test_data[cls.test_tags[1]][2][1], name=cls.test_tags[1], tag_time=cls.tag_test_data[cls.test_tags[1]][2][0]) obj_store.add_object(cls.t2) cls.repo[b'refs/heads/master'] = cls.c2.id cls.repo[b'refs/tags/' + cls.t2.name] = cls.t2.id # add annotated tag
def test_fetch_full_depth_into_shallow_clone_from_dulwich(self): require_git_version(self.min_single_branch_version) self._source_repo = self.import_repo('server_new.export') self._stub_repo = _StubRepo('shallow') self.addCleanup(tear_down_repo, self._stub_repo) port = self._start_server(self._source_repo) # Fetch at depth 2 run_git_or_fail([ 'clone', '--mirror', '--depth=2', '--no-single-branch', self.url(port), self._stub_repo.path ]) clone = self._stub_repo = Repo(self._stub_repo.path) # Fetching at the same depth is a no-op. run_git_or_fail( ['fetch', '--depth=2', self.url(port)] + self.branch_args(), cwd=self._stub_repo.path) # The whole repo only has depth 4, so it should equal server_new. run_git_or_fail( ['fetch', '--depth=4', self.url(port)] + self.branch_args(), cwd=self._stub_repo.path) self.assertEqual([], _get_shallow(clone)) self.assertReposEqual(clone, self._source_repo)
def test_load_ignore(self): tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) repo = Repo.init(tmp_dir) with open(os.path.join(repo.path, '.gitignore'), 'wb') as f: f.write(b'/foo/bar\n') f.write(b'/dir2\n') f.write(b'/dir3/\n') os.mkdir(os.path.join(repo.path, 'dir')) with open(os.path.join(repo.path, 'dir', '.gitignore'), 'wb') as f: f.write(b'/blie\n') with open(os.path.join(repo.path, 'dir', 'blie'), 'wb') as f: f.write(b'IGNORED') p = os.path.join(repo.controldir(), 'info', 'exclude') with open(p, 'wb') as f: f.write(b'/excluded\n') m = IgnoreFilterManager.from_repo(repo) self.assertTrue(m.is_ignored('dir/blie')) self.assertIs(None, m.is_ignored(os.path.join('dir', 'bloe'))) self.assertIs(None, m.is_ignored('dir')) self.assertTrue(m.is_ignored(os.path.join('foo', 'bar'))) self.assertTrue(m.is_ignored(os.path.join('excluded'))) self.assertTrue(m.is_ignored(os.path.join( 'dir2', 'fileinignoreddir'))) self.assertFalse(m.is_ignored('dir3')) self.assertTrue(m.is_ignored('dir3/')) self.assertTrue(m.is_ignored('dir3/bla'))
def open_repo_closing(path_or_repo): """Open an argument that can be a repository or a path for a repository. returns a context manager that will close the repo on exit if the argument is a path, else does nothing if the argument is a repo. """ if isinstance(path_or_repo, BaseRepo): return _noop_context_manager(path_or_repo) return closing(Repo(path_or_repo))
def test_send_pack_with_changes(self): local = open_repo('a.git') self.addCleanup(tear_down_repo, local) target_path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, target_path) with Repo.init_bare(target_path) as target: self.send_and_verify(b"master", local, target)
def setUp(self): super(FileSystemBackendTests, self).setUp() self.path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.path) self.repo = Repo.init(self.path) if sys.platform == 'win32': self.backend = FileSystemBackend(self.path[0] + ':' + os.sep) else: self.backend = FileSystemBackend()
def open_repository(self, path): logger.debug('opening repository at %s', path) abspath = os.path.abspath(os.path.join(self.root, path)) + os.sep normcase_abspath = os.path.normcase(abspath) normcase_root = os.path.normcase(self.root) if not normcase_abspath.startswith(normcase_root): raise NotGitRepository("Path %r not inside root %r" % (path, self.root)) return Repo(abspath)
def setUp(self): super(WorkingTreeTestCase, self).setUp() self._worktree_path = self.create_new_worktree(self._repo.path, 'branch') self._worktree_repo = Repo(self._worktree_path) self.addCleanup(self._worktree_repo.close) self._mainworktree_repo = self._repo self._number_of_working_tree = 2 self._repo = self._worktree_repo
def setUp(self): super(InitNewWorkingDirectoryTestCase, self).setUp() self._other_worktree = self._repo worktree_repo_path = tempfile.mkdtemp() self.addCleanup(rmtree_ro, worktree_repo_path) self._repo = Repo._init_new_working_directory(worktree_repo_path, self._mainworktree_repo) self.addCleanup(self._repo.close) self._number_of_working_tree = 3
def test_clone_no_head(self): temp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, temp_dir) repo_dir = os.path.join(os.path.dirname(__file__), 'data', 'repos') dest_dir = os.path.join(temp_dir, 'a.git') shutil.copytree(os.path.join(repo_dir, 'a.git'), dest_dir, symlinks=True) r = Repo(dest_dir) del r.refs[b"refs/heads/master"] del r.refs[b"HEAD"] t = r.clone(os.path.join(temp_dir, 'b.git'), mkdir=True) self.assertEqual( { b'refs/tags/mytag': b'28237f4dc30d0d462658d6b937b08a0f0b6ef55a', b'refs/tags/mytag-packed': b'b0931cadc54336e78a1d980420e3268903b57a50', }, t.refs.as_dict())
def test_submodule(self): temp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, temp_dir) repo_dir = os.path.join(os.path.dirname(__file__), 'data', 'repos') shutil.copytree(os.path.join(repo_dir, 'a.git'), os.path.join(temp_dir, 'a.git'), symlinks=True) rel = os.path.relpath(os.path.join(repo_dir, 'submodule'), temp_dir) os.symlink(os.path.join(rel, 'dotgit'), os.path.join(temp_dir, '.git')) with Repo(temp_dir) as r: self.assertEqual(r.head(), b'a90fa2d900a17e99b433217e988c4eb4a2e9a097')
def test_nonempty(self): repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: # Populate repo filea = Blob.from_string(b'file a') fileb = Blob.from_string(b'file b') filed = Blob.from_string(b'file d') tree = Tree() tree[b'a'] = (stat.S_IFREG | 0o644, filea.id) tree[b'b'] = (stat.S_IFREG | 0o644, fileb.id) tree[b'c/d'] = (stat.S_IFREG | 0o644, filed.id) repo.object_store.add_objects([ (o, None) for o in [filea, fileb, filed, tree] ]) build_index_from_tree(repo.path, repo.index_path(), repo.object_store, tree.id) # Verify index entries index = repo.open_index() self.assertEqual(len(index), 3) # filea apath = os.path.join(repo.path, 'a') self.assertTrue(os.path.exists(apath)) self.assertReasonableIndexEntry(index[b'a'], stat.S_IFREG | 0o644, 6, filea.id) self.assertFileContents(apath, b'file a') # fileb bpath = os.path.join(repo.path, 'b') self.assertTrue(os.path.exists(bpath)) self.assertReasonableIndexEntry(index[b'b'], stat.S_IFREG | 0o644, 6, fileb.id) self.assertFileContents(bpath, b'file b') # filed dpath = os.path.join(repo.path, 'c', 'd') self.assertTrue(os.path.exists(dpath)) self.assertReasonableIndexEntry(index[b'c/d'], stat.S_IFREG | 0o644, 6, filed.id) self.assertFileContents(dpath, b'file d') # Verify no extra files self.assertEqual(['.git', 'a', 'b', 'c'], sorted(os.listdir(repo.path))) self.assertEqual(['d'], sorted(os.listdir(os.path.join(repo.path, 'c'))))
def test_ignored_contents(self): tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) repo = Repo.init(tmp_dir) with open(os.path.join(repo.path, '.gitignore'), 'wb') as f: f.write(b'a/*\n') f.write(b'!a/*.txt\n') m = IgnoreFilterManager.from_repo(repo) os.mkdir(os.path.join(repo.path, 'a')) self.assertIs(None, m.is_ignored('a')) self.assertIs(None, m.is_ignored('a/')) self.assertFalse(m.is_ignored('a/b.txt')) self.assertTrue(m.is_ignored('a/c.dat'))
def test_load_ignore_ignorecase(self): tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) repo = Repo.init(tmp_dir) config = repo.get_config() config.set(b'core', b'ignorecase', True) config.write_to_path() with open(os.path.join(repo.path, '.gitignore'), 'wb') as f: f.write(b'/foo/bar\n') f.write(b'/dir\n') m = IgnoreFilterManager.from_repo(repo) self.assertTrue(m.is_ignored(os.path.join('dir', 'blie'))) self.assertTrue(m.is_ignored(os.path.join('DIR', 'blie')))
def test_shallow_clone_from_git_is_identical(self): require_git_version(self.min_single_branch_version) self._source_repo = self.import_repo('server_new.export') self._stub_repo_git = _StubRepo('shallow-git') self.addCleanup(tear_down_repo, self._stub_repo_git) self._stub_repo_dw = _StubRepo('shallow-dw') self.addCleanup(tear_down_repo, self._stub_repo_dw) # shallow clone using stock git, then using dulwich run_git_or_fail([ 'clone', '--mirror', '--depth=1', '--no-single-branch', 'file://' + self._source_repo.path, self._stub_repo_git.path ]) port = self._start_server(self._source_repo) run_git_or_fail([ 'clone', '--mirror', '--depth=1', '--no-single-branch', self.url(port), self._stub_repo_dw.path ]) # compare the two clones; they should be equal self.assertReposEqual(Repo(self._stub_repo_git.path), Repo(self._stub_repo_dw.path))
def test_empty(self): repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: tree = Tree() repo.object_store.add_object(tree) build_index_from_tree(repo.path, repo.index_path(), repo.object_store, tree.id) # Verify index entries index = repo.open_index() self.assertEqual(len(index), 0) # Verify no files self.assertEqual(['.git'], os.listdir(repo.path))
def get_recent_tags(projdir=PROJDIR): """Get list of tags in order from newest to oldest and their datetimes. :param projdir: path to ``.git`` :returns: list of tags sorted by commit time from newest to oldest Each tag in the list contains the tag name, commit time, commit id, author and any tag meta. If a tag isn't annotated, then its tag meta is ``None``. Otherwise the tag meta is a tuple containing the tag time, tag id and tag name. Time is in UTC. """ with Repo(projdir) as project: # dulwich repository object refs = project.get_refs() # dictionary of refs and their SHA-1 values tags = {} # empty dictionary to hold tags, commits and datetimes # iterate over refs in repository for key, value in refs.items(): key = key.decode('utf-8') # compatible with Python-3 obj = project.get_object(value) # dulwich object from SHA-1 # don't just check if object is "tag" b/c it could be a "commit" # instead check if "tags" is in the ref-name if u'tags' not in key: # skip ref if not a tag continue # strip the leading text from refs to get "tag name" _, tag = key.rsplit(u'/', 1) # check if tag object is "commit" or "tag" pointing to a "commit" try: commit = obj.object # a tuple (commit class, commit id) except AttributeError: commit = obj tag_meta = None else: tag_meta = (datetime.datetime(*time.gmtime(obj.tag_time)[:6]), obj.id.decode('utf-8'), obj.name.decode('utf-8') ) # compatible with Python-3 commit = project.get_object(commit[1]) # commit object # get tag commit datetime, but dulwich returns seconds since # beginning of epoch, so use Python time module to convert it to # timetuple then convert to datetime tags[tag] = [ datetime.datetime(*time.gmtime(commit.commit_time)[:6]), commit.id.decode('utf-8'), commit.author.decode('utf-8'), tag_meta ] # compatible with Python-3 # return list of tags sorted by their datetimes from newest to oldest return sorted(tags.items(), key=lambda tag: tag[1][0], reverse=True)
def import_repo(self, name): """Import a repo from a fast-export file in a temporary directory. :param name: The name of the repository export file, relative to dulwich/tests/data/repos. :returns: An initialized Repo object that lives in a temporary directory. """ path = import_repo_to_dir(name) repo = Repo(path) def cleanup(): repo.close() rmtree_ro(os.path.dirname(path.rstrip(os.sep))) self.addCleanup(cleanup) return repo