def test_add_thin_pack_empty(self): o = DiskObjectStore(self.store_dir) f = BytesIO() entries = build_pack(f, [], store=o) self.assertEqual([], entries) o.add_thin_pack(f.read, None)
def test_loose_compression_level(self): alternate_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, alternate_dir) alternate_store = DiskObjectStore(alternate_dir, loose_compression_level=6) b2 = make_object(Blob, data=b"yummy data") alternate_store.add_object(b2)
def test_add_thin_pack(self): o = DiskObjectStore(self.store_dir) try: blob = make_object(Blob, data=b"yummy data") o.add_object(blob) f = BytesIO() entries = build_pack( f, [ (REF_DELTA, (blob.id, b"more yummy data")), ], store=o, ) with o.add_thin_pack(f.read, None) as pack: packed_blob_sha = sha_to_hex(entries[0][3]) pack.check_length_and_checksum() self.assertEqual(sorted([blob.id, packed_blob_sha]), list(pack)) self.assertTrue(o.contains_packed(packed_blob_sha)) self.assertTrue(o.contains_packed(blob.id)) self.assertEqual( (Blob.type_num, b"more yummy data"), o.get_raw(packed_blob_sha), ) finally: o.close()
def test_add_thin_pack_empty(self): o = DiskObjectStore(self.store_dir) f = BytesIO() entries = build_pack(f, [], store=o) self.assertEquals([], entries) o.add_thin_pack(f.read, None)
def _init_maybe_bare(cls, path, bare): for d in BASE_DIRECTORIES: os.mkdir(os.path.join(path, *d)) DiskObjectStore.init(os.path.join(path, OBJECTDIR)) ret = cls(path) ret.refs.set_symbolic_ref(b'HEAD', DEFAULT_REF) ret._init_files(bare) return ret
def create(worktree: Path, store: Path): List.wrap(BASE_DIRECTORIES)\ .smap(store.joinpath) %\ __.mkdir(parents=True, exist_ok=True) DiskObjectStore.init(str(store / OBJECTDIR)) repo = DulwichRepo(store, worktree) repo.refs.set_symbolic_ref(b'HEAD', _master_ref.encode()) return repo
def _init_maybe_bare(cls, path, bare): for d in BASE_DIRECTORIES: os.mkdir(os.path.join(path, *d)) DiskObjectStore.init(os.path.join(path, OBJECTDIR)) ret = cls(path) ret.refs.set_symbolic_ref(b"HEAD", b"refs/heads/master") ret._init_files(bare) return ret
def _init_maybe_bare(cls, path, bare): for d in BASE_DIRECTORIES: os.mkdir(os.path.join(path, *d)) DiskObjectStore.init(os.path.join(path, OBJECTDIR)) ret = cls(path) ret.refs.set_symbolic_ref("HEAD", "refs/heads/master") ret._init_files(bare) return ret
def test_add_pack(self): o = DiskObjectStore(self.store_dir) f, commit, abort = o.add_pack() try: b = make_object(Blob, data=b"more yummy data") write_pack_objects(f, [(b, None)]) except BaseException: abort() raise else: commit()
def test_add_pack(self): o = DiskObjectStore(self.store_dir) f, commit, abort = o.add_pack() try: b = make_object(Blob, data=b"more yummy data") write_pack_objects(f, [(b, None)]) except: abort() raise else: commit()
def test_add_alternate_path(self): store = DiskObjectStore(self.store_dir) self.assertEqual([], list(store._read_alternate_paths())) store.add_alternate_path(b'/foo/path') self.assertEqual([b'/foo/path'], list(store._read_alternate_paths())) store.add_alternate_path(b'/bar/path') self.assertEqual([b'/foo/path', b'/bar/path'], list(store._read_alternate_paths()))
def test_add_alternate_path(self): store = DiskObjectStore(self.store_dir) self.assertEqual([], list(store._read_alternate_paths())) store.add_alternate_path("/foo/path") self.assertEqual(["/foo/path"], list(store._read_alternate_paths())) store.add_alternate_path("/bar/path") self.assertEqual(["/foo/path", "/bar/path"], list(store._read_alternate_paths()))
def _init_maybe_bare(cls, path, bare): if not isinstance(path, bytes): path_bytes = path.encode(sys.getfilesystemencoding()) else: path_bytes = path for d in BASE_DIRECTORIES: os.mkdir(os.path.join(path_bytes, *d)) DiskObjectStore.init(os.path.join(path_bytes, OBJECTDIR)) ret = cls(path) ret.refs.set_symbolic_ref(b'HEAD', b"refs/heads/master") ret._init_files(bare) return ret
def __init__(self, root): if os.path.isdir(os.path.join(root, ".git", OBJECTDIR)): self.bare = False self._controldir = os.path.join(root, ".git") elif (os.path.isdir(os.path.join(root, OBJECTDIR)) and os.path.isdir(os.path.join(root, REFSDIR))): self.bare = True self._controldir = root else: raise NotGitRepository(root) self.path = root self.refs = DiskRefsContainer(self.controldir()) self.object_store = DiskObjectStore( os.path.join(self.controldir(), OBJECTDIR))
def test_add_thin_pack(self): o = DiskObjectStore(self.store_dir) blob = make_object(Blob, data='yummy data') o.add_object(blob) f = StringIO() entries = build_pack(f, [ (REF_DELTA, (blob.id, 'more yummy data')), ], store=o) pack = o.add_thin_pack(f.read, None) try: packed_blob_sha = sha_to_hex(entries[0][3]) pack.check_length_and_checksum() self.assertEqual(sorted([blob.id, packed_blob_sha]), list(pack)) self.assertTrue(o.contains_packed(packed_blob_sha)) self.assertTrue(o.contains_packed(blob.id)) self.assertEqual((Blob.type_num, 'more yummy data'), o.get_raw(packed_blob_sha)) finally: # FIXME: DiskObjectStore should have close() which do the following: for p in o._pack_cache or []: p.close() pack.close()
def init_bare(cls, path, mkdir=True): for d in BASE_DIRECTORIES: os.mkdir(os.path.join(path, *d)) DiskObjectStore.init(os.path.join(path, OBJECTDIR)) ret = cls(path) ret.refs.set_symbolic_ref("HEAD", "refs/heads/master") ret._put_named_file('description', "Unnamed repository") ret._put_named_file('config', """[core] repositoryformatversion = 0 filemode = true bare = false logallrefupdates = true """) ret._put_named_file(os.path.join('info', 'excludes'), '') return ret
def setUp(self): TestCase.setUp(self) self.store_dir = tempfile.mkdtemp() if not isinstance(self.store_dir, bytes): self.store_dir = self.store_dir.encode(sys.getfilesystemencoding()) self.addCleanup(shutil.rmtree, self.store_dir) self.store = DiskObjectStore.init(self.store_dir)
def __init__(self, root): if os.path.isdir(os.path.join(root, ".git", OBJECTDIR)): self.bare = False self._controldir = os.path.join(root, ".git") elif (os.path.isdir(os.path.join(root, OBJECTDIR)) and os.path.isdir(os.path.join(root, REFSDIR))): self.bare = True self._controldir = root elif (os.path.isfile(os.path.join(root, ".git"))): import re f = open(os.path.join(root, ".git"), 'r') try: _, path = re.match('(gitdir: )(.+$)', f.read()).groups() finally: f.close() self.bare = False self._controldir = os.path.join(root, path) else: raise NotGitRepository("No git repository was found at %(path)s" % dict(path=root)) self.path = root object_store = DiskObjectStore( os.path.join(self.controldir(), OBJECTDIR)) refs = DiskRefsContainer(self.controldir()) BaseRepo.__init__(self, object_store, refs) graft_file = self.get_named_file(os.path.join("info", "grafts")) if graft_file: self._graftpoints = parse_graftpoints(graft_file) self.hooks['pre-commit'] = PreCommitShellHook(self.controldir()) self.hooks['commit-msg'] = CommitMsgShellHook(self.controldir()) self.hooks['post-commit'] = PostCommitShellHook(self.controldir())
def test_add_alternate_path(self): store = DiskObjectStore(self.store_dir) self.assertEquals([], store._read_alternate_paths()) store.add_alternate_path("/foo/path") self.assertEquals(["/foo/path"], store._read_alternate_paths()) store.add_alternate_path("/bar/path") self.assertEquals( ["/foo/path", "/bar/path"], store._read_alternate_paths())
def test_add_alternate_path(self): store = DiskObjectStore(self.store_dir) self.assertEqual([], list(store._read_alternate_paths())) store.add_alternate_path(b'/foo/path') self.assertEqual([b'/foo/path'], list(store._read_alternate_paths())) store.add_alternate_path(b'/bar/path') self.assertEqual( [b'/foo/path', b'/bar/path'], list(store._read_alternate_paths()))
def test_alternates(self): alternate_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, alternate_dir) alternate_store = DiskObjectStore(alternate_dir) b2 = make_object(Blob, data="yummy data") alternate_store.add_object(b2) store = DiskObjectStore(self.store_dir) self.assertRaises(KeyError, store.__getitem__, b2.id) store.add_alternate_path(alternate_dir) self.assertEqual(b2, store[b2.id])
def test_rel_alternative_path(self): alternate_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, alternate_dir) alternate_store = DiskObjectStore(alternate_dir) b2 = make_object(Blob, data=b"yummy data") alternate_store.add_object(b2) store = DiskObjectStore(self.store_dir) self.assertRaises(KeyError, store.__getitem__, b2.id) store.add_alternate_path(os.path.relpath(alternate_dir, self.store_dir)) self.assertEqual(list(alternate_store), list(store.alternates[0])) self.assertIn(b2.id, store) self.assertEqual(b2, store[b2.id])
def test_alternates(self): alternate_dir = tempfile.mkdtemp() if not isinstance(alternate_dir, bytes): alternate_dir = alternate_dir.encode(sys.getfilesystemencoding()) self.addCleanup(shutil.rmtree, alternate_dir) alternate_store = DiskObjectStore(alternate_dir) b2 = make_object(Blob, data=b"yummy data") alternate_store.add_object(b2) store = DiskObjectStore(self.store_dir) self.assertRaises(KeyError, store.__getitem__, b2.id) store.add_alternate_path(alternate_dir) self.assertIn(b2.id, store) self.assertEqual(b2, store[b2.id])
def __init__(self, root): hidden_path = os.path.join(root, CONTROLDIR) if os.path.isdir(os.path.join(hidden_path, OBJECTDIR)): self.bare = False self._controldir = hidden_path elif (os.path.isdir(os.path.join(root, OBJECTDIR)) and os.path.isdir(os.path.join(root, REFSDIR))): self.bare = True self._controldir = root elif os.path.isfile(hidden_path): self.bare = False with open(hidden_path, 'r') as f: path = read_gitfile(f) self.bare = False self._controldir = os.path.join(root, path) else: raise NotGitRepository( "No git repository was found at %(path)s" % dict(path=root) ) commondir = self.get_named_file(COMMONDIR) if commondir is not None: with commondir: self._commondir = os.path.join( self.controldir(), os.fsdecode(commondir.read().rstrip(b"\r\n"))) else: self._commondir = self._controldir self.path = root config = self.get_config() object_store = DiskObjectStore.from_config( os.path.join(self.commondir(), OBJECTDIR), config) refs = DiskRefsContainer(self.commondir(), self._controldir, logger=self._write_reflog) BaseRepo.__init__(self, object_store, refs) self._graftpoints = {} graft_file = self.get_named_file(os.path.join("info", "grafts"), basedir=self.commondir()) if graft_file: with graft_file: self._graftpoints.update(parse_graftpoints(graft_file)) graft_file = self.get_named_file("shallow", basedir=self.commondir()) if graft_file: with graft_file: self._graftpoints.update(parse_graftpoints(graft_file)) self.hooks['pre-commit'] = PreCommitShellHook(self.controldir()) self.hooks['commit-msg'] = CommitMsgShellHook(self.controldir()) self.hooks['post-commit'] = PostCommitShellHook(self.controldir()) self.hooks['post-receive'] = PostReceiveShellHook(self.controldir())
def test_alternates(self): alternate_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, alternate_dir) alternate_store = DiskObjectStore(alternate_dir) b2 = make_object(Blob, data="yummy data") alternate_store.add_object(b2) store = DiskObjectStore(self.store_dir) self.assertRaises(KeyError, store.__getitem__, b2.id) store.add_alternate_path(alternate_dir) self.assertEquals(b2, store[b2.id])
def test_add_thin_pack(self): o = DiskObjectStore(self.store_dir) blob = make_object(Blob, data='yummy data') o.add_object(blob) f = BytesIO() entries = build_pack(f, [ (REF_DELTA, (blob.id, 'more yummy data')), ], store=o) pack = o.add_thin_pack(f.read, None) try: packed_blob_sha = sha_to_hex(entries[0][3]) pack.check_length_and_checksum() self.assertEqual(sorted([blob.id, packed_blob_sha]), list(pack)) self.assertTrue(o.contains_packed(packed_blob_sha)) self.assertTrue(o.contains_packed(blob.id)) self.assertEqual((Blob.type_num, 'more yummy data'), o.get_raw(packed_blob_sha)) finally: o.close() pack.close()
def test_rel_alternative_path(self): alternate_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, alternate_dir) alternate_store = DiskObjectStore(alternate_dir) b2 = make_object(Blob, data="yummy data") alternate_store.add_object(b2) store = DiskObjectStore(self.store_dir) self.assertRaises(KeyError, store.__getitem__, b2.id) store.add_alternate_path(os.path.relpath(alternate_dir, self.store_dir)) self.assertEqual(list(alternate_store), list(store.alternates[0])) self.assertIn(b2.id, store) self.assertEqual(b2, store[b2.id])
def test_add_thin_pack(self): o = DiskObjectStore(self.store_dir) blob = make_object(Blob, data='yummy data') o.add_object(blob) f = StringIO() entries = build_pack(f, [ (REF_DELTA, (blob.id, 'more yummy data')), ], store=o) pack = o.add_thin_pack(f.read, None) packed_blob_sha = sha_to_hex(entries[0][3]) pack.check_length_and_checksum() self.assertEqual(sorted([blob.id, packed_blob_sha]), list(pack)) self.assertTrue(o.contains_packed(packed_blob_sha)) self.assertTrue(o.contains_packed(blob.id)) self.assertEqual((Blob.type_num, 'more yummy data'), o.get_raw(packed_blob_sha))
def __init__(self, path): self.path = path if not isinstance(path, bytes): self._path_bytes = path.encode(sys.getfilesystemencoding()) else: self._path_bytes = path if os.path.isdir(os.path.join(self._path_bytes, b'.git', OBJECTDIR)): self.bare = False self._controldir = os.path.join(self._path_bytes, b'.git') elif (os.path.isdir(os.path.join(self._path_bytes, OBJECTDIR)) and os.path.isdir(os.path.join(self._path_bytes, REFSDIR))): self.bare = True self._controldir = self._path_bytes elif (os.path.isfile(os.path.join(self._path_bytes, b'.git'))): import re with open(os.path.join(self._path_bytes, b'.git'), 'rb') as f: _, gitdir = re.match(b'(gitdir: )(.+$)', f.read()).groups() self.bare = False self._controldir = os.path.join(self._path_bytes, gitdir) else: raise NotGitRepository("No git repository was found at %(path)s" % dict(path=path)) object_store = DiskObjectStore( os.path.join(self.controldir(), OBJECTDIR)) refs = DiskRefsContainer(self.controldir()) BaseRepo.__init__(self, object_store, refs) self._graftpoints = {} graft_file = self.get_named_file(os.path.join(b'info', b'grafts')) if graft_file: with graft_file: self._graftpoints.update(parse_graftpoints(graft_file)) graft_file = self.get_named_file(b'shallow') if graft_file: with graft_file: self._graftpoints.update(parse_graftpoints(graft_file)) self.hooks['pre-commit'] = PreCommitShellHook(self.controldir()) self.hooks['commit-msg'] = CommitMsgShellHook(self.controldir()) self.hooks['post-commit'] = PostCommitShellHook(self.controldir())
def __init__(self, root): hidden_path = os.path.join(root, CONTROLDIR) if os.path.isdir(os.path.join(hidden_path, OBJECTDIR)): self.bare = False self._controldir = hidden_path elif (os.path.isdir(os.path.join(root, OBJECTDIR)) and os.path.isdir(os.path.join(root, REFSDIR))): self.bare = True self._controldir = root elif os.path.isfile(hidden_path): self.bare = False with open(hidden_path, 'r') as f: path = read_gitfile(f) self.bare = False self._controldir = os.path.join(root, path) else: raise NotGitRepository( "No git repository was found at %(path)s" % dict(path=root) ) self.path = root object_store = DiskObjectStore(os.path.join(self.controldir(), OBJECTDIR)) refs = DiskRefsContainer(self.controldir()) BaseRepo.__init__(self, object_store, refs) self._graftpoints = {} graft_file = self.get_named_file(os.path.join("info", "grafts")) if graft_file: with graft_file: self._graftpoints.update(parse_graftpoints(graft_file)) graft_file = self.get_named_file("shallow") if graft_file: with graft_file: self._graftpoints.update(parse_graftpoints(graft_file)) self.hooks['pre-commit'] = PreCommitShellHook(self.controldir()) self.hooks['commit-msg'] = CommitMsgShellHook(self.controldir()) self.hooks['post-commit'] = PostCommitShellHook(self.controldir())
def setUp(self): TestCase.setUp(self) self.store_dir = tempfile.mkdtemp() self.store = DiskObjectStore.init(self.store_dir)
class Repo(object): """A local git repository. :ivar refs: Dictionary with the refs in this repository :ivar object_store: Dictionary-like object for accessing the objects """ def __init__(self, root): if os.path.isdir(os.path.join(root, ".git", OBJECTDIR)): self.bare = False self._controldir = os.path.join(root, ".git") elif (os.path.isdir(os.path.join(root, OBJECTDIR)) and os.path.isdir(os.path.join(root, REFSDIR))): self.bare = True self._controldir = root else: raise NotGitRepository(root) self.path = root self.refs = DiskRefsContainer(self.controldir()) self.object_store = DiskObjectStore( os.path.join(self.controldir(), OBJECTDIR)) def controldir(self): """Return the path of the control directory.""" return self._controldir def index_path(self): """Return path to the index file.""" return os.path.join(self.controldir(), INDEX_FILENAME) def open_index(self): """Open the index for this repository.""" from dulwich.index import Index return Index(self.index_path()) def has_index(self): """Check if an index is present.""" return os.path.exists(self.index_path()) def fetch_objects(self, determine_wants, graph_walker, progress): """Fetch the missing objects required for a set of revisions. :param determine_wants: Function that takes a dictionary with heads and returns the list of heads to fetch. :param graph_walker: Object that can iterate over the list of revisions to fetch and has an "ack" method that will be called to acknowledge that a revision is present. :param progress: Simple progress function that will be called with updated progress strings. :return: iterator over objects, with __len__ implemented """ wants = determine_wants(self.get_refs()) haves = self.object_store.find_common_revisions(graph_walker) return self.object_store.iter_shas( self.object_store.find_missing_objects(haves, wants, progress)) def get_graph_walker(self, heads=None): if heads is None: heads = self.refs.as_dict('refs/heads').values() return self.object_store.get_graph_walker(heads) def ref(self, name): """Return the SHA1 a ref is pointing to.""" try: return self.refs.follow(name) except KeyError: return self.get_packed_refs()[name] def get_refs(self): """Get dictionary with all refs.""" ret = {} try: if self.head(): ret['HEAD'] = self.head() except KeyError: pass ret.update(self.refs.as_dict()) ret.update(self.get_packed_refs()) return ret def get_packed_refs(self): """Get contents of the packed-refs file. :return: Dictionary mapping ref names to SHA1s :note: Will return an empty dictionary when no packed-refs file is present. """ path = os.path.join(self.controldir(), 'packed-refs') if not os.path.exists(path): return {} ret = {} f = open(path, 'rb') try: for entry in read_packed_refs(f): ret[entry[1]] = entry[0] return ret finally: f.close() def head(self): """Return the SHA1 pointed at by HEAD.""" return self.refs.follow('HEAD') def _get_object(self, sha, cls): assert len(sha) in (20, 40) ret = self.get_object(sha) if ret._type != cls._type: if cls is Commit: raise NotCommitError(ret) elif cls is Blob: raise NotBlobError(ret) elif cls is Tree: raise NotTreeError(ret) else: raise Exception("Type invalid: %r != %r" % (ret._type, cls._type)) return ret def get_object(self, sha): return self.object_store[sha] def get_parents(self, sha): return self.commit(sha).parents def commit(self, sha): return self._get_object(sha, Commit) def tree(self, sha): return self._get_object(sha, Tree) def tag(self, sha): return self._get_object(sha, Tag) def get_blob(self, sha): return self._get_object(sha, Blob) def revision_history(self, head): """Returns a list of the commits reachable from head. Returns a list of commit objects. the first of which will be the commit of head, then following theat will be the parents. Raises NotCommitError if any no commits are referenced, including if the head parameter isn't the sha of a commit. XXX: work out how to handle merges. """ # We build the list backwards, as parents are more likely to be older # than children pending_commits = [head] history = [] while pending_commits != []: head = pending_commits.pop(0) try: commit = self.commit(head) except KeyError: raise MissingCommitError(head) if commit in history: continue i = 0 for known_commit in history: if known_commit.commit_time > commit.commit_time: break i += 1 history.insert(i, commit) parents = commit.parents pending_commits += parents history.reverse() return history def __repr__(self): return "<Repo at %r>" % self.path def __getitem__(self, name): if len(name) in (20, 40): return self.object_store[name] return self.object_store[self.refs[name]] def __setitem__(self, name, value): if name.startswith("refs/") or name == "HEAD": if isinstance(value, ShaFile): self.refs[name] = value.id elif isinstance(value, str): self.refs[name] = value else: raise TypeError(value) raise ValueError(name) def __delitem__(self, name): if name.startswith("refs") or name == "HEAD": del self.refs[name] raise ValueError(name) @classmethod def init(cls, path, mkdir=True): controldir = os.path.join(path, ".git") os.mkdir(controldir) cls.init_bare(controldir) return cls(path) @classmethod def init_bare(cls, path, mkdir=True): for d in [[OBJECTDIR], [OBJECTDIR, "info"], [OBJECTDIR, "pack"], ["branches"], [REFSDIR], [REFSDIR, REFSDIR_TAGS], [REFSDIR, REFSDIR_HEADS], ["hooks"], ["info"]]: os.mkdir(os.path.join(path, *d)) ret = cls(path) ret.refs.set_ref("HEAD", "refs/heads/master") open(os.path.join(path, 'description'), 'wb').write("Unnamed repository") open(os.path.join(path, 'info', 'excludes'), 'wb').write("") return ret create = init_bare
def test_pack_dir(self): o = DiskObjectStore(self.store_dir) self.assertEqual(os.path.join(self.store_dir, "pack"), o.pack_dir)
def test_pack_dir(self): o = DiskObjectStore("foo") self.assertEquals(os.path.join("foo", "pack"), o.pack_dir)
def setUp(self): TestCase.setUp(self) self.store_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.store_dir) self.store = DiskObjectStore.init(self.store_dir)
def test_add_pack(self): o = DiskObjectStore(self.store_dir) f, commit = o.add_pack() b = make_object(Blob, data="more yummy data") write_pack_objects(f, [(b, None)]) commit()
def test_empty_packs(self): o = DiskObjectStore("foo") self.assertEquals([], o.packs)
def setUp(self): TestCase.setUp(self) if os.path.exists("foo"): shutil.rmtree("foo") os.makedirs(os.path.join("foo", "pack")) self.store = DiskObjectStore("foo")
class Repo(object): """A local git repository. :ivar refs: Dictionary with the refs in this repository :ivar object_store: Dictionary-like object for accessing the objects """ def __init__(self, root): if os.path.isdir(os.path.join(root, ".git", OBJECTDIR)): self.bare = False self._controldir = os.path.join(root, ".git") elif (os.path.isdir(os.path.join(root, OBJECTDIR)) and os.path.isdir(os.path.join(root, REFSDIR))): self.bare = True self._controldir = root else: raise NotGitRepository(root) self.path = root self.refs = DiskRefsContainer(self.controldir()) self.object_store = DiskObjectStore( os.path.join(self.controldir(), OBJECTDIR)) def controldir(self): """Return the path of the control directory.""" return self._controldir def index_path(self): """Return path to the index file.""" return os.path.join(self.controldir(), INDEX_FILENAME) def open_index(self): """Open the index for this repository.""" from dulwich.index import Index return Index(self.index_path()) def has_index(self): """Check if an index is present.""" return os.path.exists(self.index_path()) def fetch(self, target, determine_wants=None, progress=None): """Fetch objects into another repository. :param target: The target repository :param determine_wants: Optional function to determine what refs to fetch. :param progress: Optional progress function """ target.object_store.add_objects( self.fetch_objects(determine_wants, target.get_graph_walker(), progress)) return self.get_refs() def fetch_objects(self, determine_wants, graph_walker, progress): """Fetch the missing objects required for a set of revisions. :param determine_wants: Function that takes a dictionary with heads and returns the list of heads to fetch. :param graph_walker: Object that can iterate over the list of revisions to fetch and has an "ack" method that will be called to acknowledge that a revision is present. :param progress: Simple progress function that will be called with updated progress strings. :return: iterator over objects, with __len__ implemented """ wants = determine_wants(self.get_refs()) haves = self.object_store.find_common_revisions(graph_walker) return self.object_store.iter_shas( self.object_store.find_missing_objects(haves, wants, progress)) def get_graph_walker(self, heads=None): if heads is None: heads = self.refs.as_dict('refs/heads').values() return self.object_store.get_graph_walker(heads) def ref(self, name): """Return the SHA1 a ref is pointing to.""" try: return self.refs.follow(name) except KeyError: return self.get_packed_refs()[name] def get_refs(self): """Get dictionary with all refs.""" ret = {} try: if self.head(): ret['HEAD'] = self.head() except KeyError: pass ret.update(self.refs.as_dict()) ret.update(self.get_packed_refs()) return ret def get_packed_refs(self): """Get contents of the packed-refs file. :return: Dictionary mapping ref names to SHA1s :note: Will return an empty dictionary when no packed-refs file is present. """ path = os.path.join(self.controldir(), 'packed-refs') if not os.path.exists(path): return {} ret = {} f = open(path, 'rb') try: for entry in read_packed_refs(f): ret[entry[1]] = entry[0] return ret finally: f.close() def head(self): """Return the SHA1 pointed at by HEAD.""" return self.refs.follow('HEAD') def _get_object(self, sha, cls): assert len(sha) in (20, 40) ret = self.get_object(sha) if ret._type != cls._type: if cls is Commit: raise NotCommitError(ret) elif cls is Blob: raise NotBlobError(ret) elif cls is Tree: raise NotTreeError(ret) else: raise Exception("Type invalid: %r != %r" % (ret._type, cls._type)) return ret def get_object(self, sha): return self.object_store[sha] def get_parents(self, sha): return self.commit(sha).parents def commit(self, sha): return self._get_object(sha, Commit) def tree(self, sha): return self._get_object(sha, Tree) def tag(self, sha): return self._get_object(sha, Tag) def get_blob(self, sha): return self._get_object(sha, Blob) def revision_history(self, head): """Returns a list of the commits reachable from head. Returns a list of commit objects. the first of which will be the commit of head, then following theat will be the parents. Raises NotCommitError if any no commits are referenced, including if the head parameter isn't the sha of a commit. XXX: work out how to handle merges. """ # We build the list backwards, as parents are more likely to be older # than children pending_commits = [head] history = [] while pending_commits != []: head = pending_commits.pop(0) try: commit = self.commit(head) except KeyError: raise MissingCommitError(head) if commit in history: continue i = 0 for known_commit in history: if known_commit.commit_time > commit.commit_time: break i += 1 history.insert(i, commit) parents = commit.parents pending_commits += parents history.reverse() return history def __repr__(self): return "<Repo at %r>" % self.path def __getitem__(self, name): if len(name) in (20, 40): return self.object_store[name] return self.object_store[self.refs[name]] def __setitem__(self, name, value): if name.startswith("refs/") or name == "HEAD": if isinstance(value, ShaFile): self.refs[name] = value.id elif isinstance(value, str): self.refs[name] = value else: raise TypeError(value) raise ValueError(name) def __delitem__(self, name): if name.startswith("refs") or name == "HEAD": del self.refs[name] raise ValueError(name) def do_commit(self, committer, message, author=None, commit_timestamp=None, commit_timezone=None, author_timestamp=None, author_timezone=None, tree=None): """Create a new commit. :param committer: Committer fullname :param message: Commit message :param author: Author fullname (defaults to committer) :param commit_timestamp: Commit timestamp (defaults to now) :param commit_timezone: Commit timestamp timezone (defaults to GMT) :param author_timestamp: Author timestamp (defaults to commit timestamp) :param author_timezone: Author timestamp timezone (defaults to commit timestamp timezone) :param tree: SHA1 of the tree root to use (if not specified the current index will be committed). :return: New commit SHA1 """ from dulwich.index import commit_index import time index = self.open_index() c = Commit() if tree is None: c.tree = commit_index(self.object_store, index) else: c.tree = tree c.committer = committer if commit_timestamp is None: commit_timestamp = time.time() c.commit_time = int(commit_timestamp) if commit_timezone is None: commit_timezone = 0 c.commit_timezone = commit_timezone if author is None: author = committer c.author = author if author_timestamp is None: author_timestamp = commit_timestamp c.author_time = int(author_timestamp) if author_timezone is None: author_timezone = commit_timezone c.author_timezone = author_timezone c.message = message self.object_store.add_object(c) self.refs["HEAD"] = c.id return c.id @classmethod def init(cls, path, mkdir=True): controldir = os.path.join(path, ".git") os.mkdir(controldir) cls.init_bare(controldir) return cls(path) @classmethod def init_bare(cls, path, mkdir=True): for d in [[OBJECTDIR], [OBJECTDIR, "info"], [OBJECTDIR, "pack"], ["branches"], [REFSDIR], [REFSDIR, REFSDIR_TAGS], [REFSDIR, REFSDIR_HEADS], ["hooks"], ["info"]]: os.mkdir(os.path.join(path, *d)) ret = cls(path) ret.refs.set_ref("HEAD", "refs/heads/master") open(os.path.join(path, 'description'), 'wb').write("Unnamed repository") open(os.path.join(path, 'info', 'excludes'), 'wb').write("") return ret create = init_bare
def test_read_alternate_paths(self): store = DiskObjectStore(self.store_dir) abs_path = os.path.abspath(os.path.normpath("/abspath")) # ensures in particular existence of the alternates file store.add_alternate_path(abs_path) self.assertEqual(set(store._read_alternate_paths()), {abs_path}) store.add_alternate_path("relative-path") self.assertIn( os.path.join(store.path, "relative-path"), set(store._read_alternate_paths()), ) # arguably, add_alternate_path() could strip comments. # Meanwhile it's more convenient to use it than to import INFODIR store.add_alternate_path("# comment") for alt_path in store._read_alternate_paths(): self.assertNotIn("#", alt_path)