def set_symbolic_ref(self, name, other, committer=None, timestamp=None, timezone=None, message=None): """Make a ref point at another ref. :param name: Name of the ref to set :param other: Name of the ref to point at :param message: Optional message to describe the change """ self._check_refname(name) self._check_refname(other) filename = self.refpath(name) f = GitFile(filename, 'wb') try: f.write(SYMREF + other + b'\n') sha = self.follow(name)[-1] self._log(name, sha, sha, committer=committer, timestamp=timestamp, timezone=timezone, message=message) except BaseException: f.abort() raise else: f.close()
def add_if_new(self, name, ref): """Add a new reference only if it does not already exist. This method follows symrefs, and only ensures that the last ref in the chain does not exist. :param name: The refname to set. :param ref: The new sha the refname will refer to. :return: True if the add was successful, False otherwise. """ try: realname, contents = self._follow(name) if contents is not None: return False except KeyError: realname = name self._check_refname(realname) filename = self.refpath(realname) ensure_dir_exists(os.path.dirname(filename)) f = GitFile(filename, 'wb') try: if os.path.exists(filename) or name in self.get_packed_refs(): f.abort() return False try: f.write(ref+"\n") except (OSError, IOError): f.abort() raise finally: f.close() return True
def add_if_new(self, name, ref): """Add a new reference only if it does not already exist. This method follows symrefs, and only ensures that the last ref in the chain does not exist. :param name: The refname to set. :param ref: The new sha the refname will refer to. :return: True if the add was successful, False otherwise. """ try: realname, contents = self._follow(name) if contents is not None: return False except KeyError: realname = name self._check_refname(realname) filename = self.refpath(realname) ensure_dir_exists(os.path.dirname(filename)) f = GitFile(filename, 'wb') try: if os.path.exists(filename) or name in self.get_packed_refs(): f.abort() return False try: f.write(ref + "\n") except (OSError, IOError): f.abort() raise finally: f.close() return True
def add_alternate_path(self, path): """Add an alternate path to this object store. """ try: os.mkdir(os.path.join(self.path, "info")) except OSError as e: if e.errno != errno.EEXIST: raise alternates_path = os.path.join(self.path, "info/alternates") f = GitFile(alternates_path, 'wb') try: try: orig_f = open(alternates_path, 'rb') except (OSError, IOError) as e: if e.errno != errno.ENOENT: raise else: try: f.write(orig_f.read()) finally: orig_f.close() f.write("%s\n" % path) finally: f.close() if not os.path.isabs(path): path = os.path.join(self.path, path) self.alternates.append(DiskObjectStore(path))
def _put_named_file(self, path, contents): """Write a file from the control dir with a specific name and contents. """ f = GitFile(os.path.join(self.controldir(), path), 'wb') try: f.write(contents) finally: f.close()
def test_open_twice(self): foo = self.path('foo') f1 = GitFile(foo, 'wb') f1.write('new') try: f2 = GitFile(foo, 'wb') self.fail() except OSError, e: self.assertEquals(errno.EEXIST, e.errno)
def test_remove_packed_without_peeled(self): refs_file = os.path.join(self._repo.path, "packed-refs") f = GitFile(refs_file) refs_data = f.read() f.close() f = GitFile(refs_file, "wb") f.write("\n".join(l for l in refs_data.split("\n") if not l or l[0] not in "#^")) f.close() self._repo = Repo(self._repo.path) refs = self._repo.refs self.assertTrue(refs.remove_if_equals("refs/heads/packed", "42d06bd4b77fed026b154d16493e5deab78f02ec"))
def _put_named_file(self, path, contents): """Write a file to the control dir with the given name and contents. :param path: The path to the file, relative to the control dir. :param contents: A string to write to the file. """ path = path.lstrip(os.path.sep) f = GitFile(os.path.join(self.controldir(), path), 'wb') try: f.write(contents) finally: f.close()
def fetch_refs(remote_name = 'origin', local='.'): """ Fetch references from a Git remote repository :param remote_name: <str> git name of remote repository, _default='origin'_ :param local: <str> full path to local repository, _default='.'_ :return entries: <TreeEntry> named tuples """ #import rpdb; rpdb.set_trace() # **Fetch refs from remote** # create a dulwich Repo object from path to local repo r = Repo(local) # local repository objsto = r.object_store # create a ObjectStore object from the local repo determine_wants = objsto.determine_wants_all # built in dulwich function gitdir = os.path.join(local, r.controldir()) # the git folder cnf_file = os.path.join(gitdir, 'config') # path to config cnf = ConfigFile.from_path(cnf_file) # config remote = cnf.get(('remote', remote_name), 'url') # url of remote # correctly parse host path and create dulwich Client object from it client, host_path = get_transport_and_path(remote) remote_refs = client.fetch(host_path, r, determine_wants, sys.stdout.write) # **Store refs fetched by dulwich** dulwich_refs = os.path.join(gitdir, DULWICH_REFS) with open(dulwich_refs, 'wb') as file: writer = csv.writer(file, delimiter=' ') for key, value in remote_refs.items(): writer.writerow([key, value]) # **save remote refs shas for future checkout** remote_dir = os.path.join(gitdir, 'refs', 'remotes', remote_name) # .git/refs/remotes ensure_dir_exists(remote_dir) # built in dulwich function headref = 0 # head branch ref if remote_refs.has_key('HEAD'): headref = remote_refs.pop('HEAD') # sha of HEAD i_head = remote_refs.values().index(headref) # index of head ref head_branch = remote_refs.keys()[i_head] # name of head branch branch_key = head_branch.rsplit('/',1)[-1] # branch head_file = os.path.join(remote_dir, 'HEAD') # path to branch shas file head_ref = '/'.join(['refs','remotes',remote_name,branch_key]) with open(head_file, 'wb') as GitFile: GitFile.write('ref: ' + head_ref + '\n') # remote branch refs for key, value in remote_refs.items(): key = key.rsplit('/',1)[-1] # get just the remote's branch reffile = os.path.join(remote_dir, key) # path to branch shas file with open(reffile, 'wb') as GitFile: GitFile.write(value + '\n') if headref: remote_refs['HEAD'] = headref # restore HEAD sha return remote_refs
def test_remove_packed_without_peeled(self): refs_file = os.path.join(self._repo.path, 'packed-refs') f = GitFile(refs_file) refs_data = f.read() f.close() f = GitFile(refs_file, 'wb') f.write(b'\n'.join(l for l in refs_data.split(b'\n') if not l or l[0] not in b'#^')) f.close() self._repo = Repo(self._repo.path) refs = self._repo.refs self.assertTrue(refs.remove_if_equals( b'refs/heads/packed', b'42d06bd4b77fed026b154d16493e5deab78f02ec'))
def test_abort(self): foo = self.path('foo') foo_lock = '%s.lock' % foo with open(foo, 'rb') as orig_f: self.assertEqual(orig_f.read(), b'foo contents') f = GitFile(foo, 'wb') f.write(b'new contents') f.abort() self.assertTrue(f.closed) self.assertFalse(os.path.exists(foo_lock)) with open(foo, 'rb') as new_orig_f: self.assertEqual(new_orig_f.read(), b'foo contents')
def test_open_twice(self): foo = self.path('foo') f1 = GitFile(foo, 'wb') f1.write(b'new') try: f2 = GitFile(foo, 'wb') self.fail() except OSError as e: self.assertEqual(errno.EEXIST, e.errno) f1.write(b' contents') f1.close() # Ensure trying to open twice doesn't affect original. with open(foo, 'rb') as f: self.assertEqual(b'new contents', f.read())
def keep(self, msg=None): """Add a .keep file for the pack, preventing git from garbage collecting it. :param msg: A message written inside the .keep file; can be used later to determine whether or not a .keep file is obsolete. :return: The path of the .keep file, as a string. """ keepfile_name = '%s.keep' % self._basename keepfile = GitFile(keepfile_name, 'wb') try: if msg: keepfile.write(msg) keepfile.write('\n') finally: keepfile.close() return keepfile_name
def test_remove_packed_without_peeled(self): refs_file = os.path.join(self._repo.path, "packed-refs") f = GitFile(refs_file) refs_data = f.read() f.close() f = GitFile(refs_file, "wb") f.write(b"\n".join(line for line in refs_data.split(b"\n") if not line or line[0] not in b"#^")) f.close() self._repo = Repo(self._repo.path) refs = self._repo.refs self.assertTrue( refs.remove_if_equals( b"refs/heads/packed", b"42d06bd4b77fed026b154d16493e5deab78f02ec", ))
def add_object(self, obj): """Add a single object to this object store. :param obj: Object to add """ dir = os.path.join(self.path, obj.id[:2]) if not os.path.isdir(dir): os.mkdir(dir) path = os.path.join(dir, obj.id[2:]) if os.path.exists(path): return # Already there, no need to write again f = GitFile(path, 'wb') try: f.write(obj.as_legacy_object()) finally: f.close()
def test_abort(self): foo = self.path("foo") foo_lock = "%s.lock" % foo orig_f = open(foo, "rb") self.assertEqual(orig_f.read(), b"foo contents") orig_f.close() f = GitFile(foo, "wb") f.write(b"new contents") f.abort() self.assertTrue(f.closed) self.assertFalse(os.path.exists(foo_lock)) new_orig_f = open(foo, "rb") self.assertEqual(new_orig_f.read(), b"foo contents") new_orig_f.close()
def test_abort(self): foo = self.path(b'foo') foo_lock = foo + b'.lock' orig_f = open(foo, 'rb') self.assertEqual(orig_f.read(), b'foo contents') orig_f.close() f = GitFile(foo, 'wb') f.write(b'new contents') f.abort() self.assertTrue(f.closed) self.assertFalse(os.path.exists(foo_lock)) new_orig_f = open(foo, 'rb') self.assertEqual(new_orig_f.read(), b'foo contents') new_orig_f.close()
def test_abort(self): foo = self.path('foo') foo_lock = '%s.lock' % foo orig_f = open(foo, 'rb') self.assertEqual(orig_f.read(), b'foo contents') orig_f.close() f = GitFile(foo, 'wb') f.write(b'new contents') f.abort() self.assertTrue(f.closed) self.assertFalse(os.path.exists(foo_lock)) new_orig_f = open(foo, 'rb') self.assertEqual(new_orig_f.read(), b'foo contents') new_orig_f.close()
def test_open_twice(self): foo = self.path('foo') f1 = GitFile(foo, 'wb') f1.write(b'new') try: f2 = GitFile(foo, 'wb') self.fail() except OSError as e: self.assertEqual(errno.EEXIST, e.errno) else: f2.close() f1.write(b' contents') f1.close() # Ensure trying to open twice doesn't affect original. f = open(foo, 'rb') self.assertEqual(b'new contents', f.read()) f.close()
def add_if_new(self, name, ref): """Add a new reference only if it does not already exist.""" self._check_refname(name) filename = self.refpath(name) ensure_dir_exists(os.path.dirname(filename)) f = GitFile(filename, 'wb') try: if os.path.exists(filename) or name in self.get_packed_refs(): f.abort() return False try: f.write(ref+"\n") except (OSError, IOError): f.abort() raise finally: f.close() return True
def test_open_twice(self): foo = self.path("foo") f1 = GitFile(foo, "wb") f1.write(b"new") try: f2 = GitFile(foo, "wb") self.fail() except OSError as e: self.assertEqual(errno.EEXIST, e.errno) else: f2.close() f1.write(b" contents") f1.close() # Ensure trying to open twice doesn't affect original. f = open(foo, "rb") self.assertEqual(b"new contents", f.read()) f.close()
def set_symbolic_ref(self, name, other): """Make a ref point at another ref. :param name: Name of the ref to set :param other: Name of the ref to point at """ self._check_refname(name) self._check_refname(other) filename = self.refpath(name) try: f = GitFile(filename, 'wb') try: f.write(SYMREF + other + b'\n') except (IOError, OSError): f.abort() raise finally: f.close()
def test_open_twice(self): foo = self.path('foo') f1 = GitFile(foo, 'wb') f1.write(b'new') try: f2 = GitFile(foo, 'wb') self.fail() except FileLocked: pass else: f2.close() f1.write(b' contents') f1.close() # Ensure trying to open twice doesn't affect original. f = open(foo, 'rb') self.assertEqual(b'new contents', f.read()) f.close()
def test_open_twice(self): foo = self.path("foo") f1 = GitFile(foo, "wb") f1.write(b"new") try: f2 = GitFile(foo, "wb") self.fail() except FileLocked: pass else: f2.close() f1.write(b" contents") f1.close() # Ensure trying to open twice doesn't affect original. f = open(foo, "rb") self.assertEqual(b"new contents", f.read()) f.close()
def add_object(self, obj): """Add a single object to this object store. :param obj: Object to add """ dir = os.path.join(self.path, obj.id[:2]) try: os.mkdir(dir) except OSError as e: if e.errno != errno.EEXIST: raise path = os.path.join(dir, obj.id[2:]) if os.path.exists(path): return # Already there, no need to write again f = GitFile(path, 'wb') try: f.write(obj.as_legacy_object()) finally: f.close()
def set_if_equals(self, name, old_ref, new_ref): """Set a refname to new_ref only if it currently equals old_ref. This method follows all symbolic references, and can be used to perform an atomic compare-and-swap operation. :param name: The refname to set. :param old_ref: The old sha the refname must refer to, or None to set unconditionally. :param new_ref: The new sha the refname will refer to. :return: True if the set was successful, False otherwise. """ self._check_refname(name) try: realname, _ = self._follow(name) except KeyError: realname = name filename = self.refpath(realname) ensure_dir_exists(os.path.dirname(filename)) f = GitFile(filename, 'wb') try: if old_ref is not None: try: # read again while holding the lock orig_ref = self.read_loose_ref(realname) if orig_ref is None: orig_ref = self.get_packed_refs().get(realname, None) if orig_ref != old_ref: f.abort() return False except (OSError, IOError): f.abort() raise try: f.write(new_ref+"\n") except (OSError, IOError): f.abort() raise finally: f.close() return True
def set_if_equals(self, name, old_ref, new_ref): """Set a refname to new_ref only if it currently equals old_ref. This method follows all symbolic references, and can be used to perform an atomic compare-and-swap operation. :param name: The refname to set. :param old_ref: The old sha the refname must refer to, or None to set unconditionally. :param new_ref: The new sha the refname will refer to. :return: True if the set was successful, False otherwise. """ self._check_refname(name) try: realname, _ = self._follow(name) except KeyError: realname = name filename = self.refpath(realname) ensure_dir_exists(os.path.dirname(filename)) f = GitFile(filename, 'wb') try: if old_ref is not None: try: # read again while holding the lock orig_ref = self.read_loose_ref(realname) if orig_ref is None: orig_ref = self.get_packed_refs().get(realname, None) if orig_ref != old_ref: f.abort() return False except (OSError, IOError): f.abort() raise try: f.write(new_ref + "\n") except (OSError, IOError): f.abort() raise finally: f.close() return True
def test_write(self): foo = self.path('foo') foo_lock = '%s.lock' % foo with open(foo, 'rb') as orig_f: self.assertEqual(orig_f.read(), b'foo contents') self.assertFalse(os.path.exists(foo_lock)) f = GitFile(foo, 'wb') self.assertFalse(f.closed) self.assertRaises(AttributeError, getattr, f, 'not_a_file_property') self.assertTrue(os.path.exists(foo_lock)) f.write(b'new stuff') f.seek(4) f.write(b'contents') f.close() self.assertFalse(os.path.exists(foo_lock)) with open(foo, 'rb') as new_f: self.assertEqual(b'new contents', new_f.read())
def test_write(self): foo = self.path("foo") foo_lock = "%s.lock" % foo orig_f = open(foo, "rb") self.assertEqual(orig_f.read(), b"foo contents") orig_f.close() self.assertFalse(os.path.exists(foo_lock)) f = GitFile(foo, "wb") self.assertFalse(f.closed) self.assertRaises(AttributeError, getattr, f, "not_a_file_property") self.assertTrue(os.path.exists(foo_lock)) f.write(b"new stuff") f.seek(4) f.write(b"contents") f.close() self.assertFalse(os.path.exists(foo_lock)) new_f = open(foo, "rb") self.assertEqual(b"new contents", new_f.read()) new_f.close()
def test_write(self): foo = self.path('foo') foo_lock = '%s.lock' % foo orig_f = open(foo, 'rb') self.assertEqual(orig_f.read(), b'foo contents') orig_f.close() self.assertFalse(os.path.exists(foo_lock)) f = GitFile(foo, 'wb') self.assertFalse(f.closed) self.assertRaises(AttributeError, getattr, f, 'not_a_file_property') self.assertTrue(os.path.exists(foo_lock)) f.write(b'new stuff') f.seek(4) f.write(b'contents') f.close() self.assertFalse(os.path.exists(foo_lock)) new_f = open(foo, 'rb') self.assertEqual(b'new contents', new_f.read()) new_f.close()
def checkout(repo_path='.', co_ref='HEAD'): """ Checkout a reference from a Git repository :param repo_path: <str> path of repository :param co_ref: <str> name of checkout reference :return entries: <TreeEntry> named tuples """ # TODO: try using index.build_index_from_tree repo = Repo(repo_path) obj_sto = repo.object_store # TODO: catch not a reference tree_id = repo[co_ref].tree # TODO: error out if unstaged or uncommited files entries = [] for entry in obj_sto.iter_tree_contents(tree_id): entry_in_path = entry.in_path(repo.path) path = os.path.split(entry_in_path.path) ensure_dir_exists(path[0]) path = os.path.join(*path) with open(path, 'wb') as GitFile: GitFile.write(repo[entry_in_path.sha].data) os.chmod(path, entry_in_path.mode) entries.append(entry) return entries
def write_pack_index_v1(filename, entries, pack_checksum): """Write a new pack index file. :param filename: The filename of the new pack index file. :param entries: List of tuples with object name (sha), offset_in_pack, and crc32_checksum. :param pack_checksum: Checksum of the pack file. """ f = GitFile(filename, 'wb') f = SHA1Writer(f) fan_out_table = defaultdict(lambda: 0) for (name, offset, entry_checksum) in entries: fan_out_table[ord(name[0])] += 1 # Fan-out table for i in range(0x100): f.write(struct.pack(">L", fan_out_table[i])) fan_out_table[i+1] += fan_out_table[i] for (name, offset, entry_checksum) in entries: f.write(struct.pack(">L20s", offset, name)) assert len(pack_checksum) == 20 f.write(pack_checksum) f.close()
try: os.mkdir(os.path.join(self.path, "info")) except OSError, e: if e.errno != errno.EEXIST: raise alternates_path = os.path.join(self.path, "info/alternates") f = GitFile(alternates_path, 'wb') try: try: orig_f = open(alternates_path, 'rb') except (OSError, IOError), e: if e.errno != errno.ENOENT: raise else: try: f.write(orig_f.read()) finally: orig_f.close() f.write("%s\n" % path) finally: f.close() self.alternates.append(DiskObjectStore(path)) def _load_packs(self): pack_files = [] try: self._pack_cache_time = os.stat(self.pack_dir).st_mtime pack_dir_contents = os.listdir(self.pack_dir) for name in pack_dir_contents: # TODO: verify that idx exists first if name.startswith("pack-") and name.endswith(".pack"):
try: os.mkdir(os.path.join(self.path, "info")) except OSError, e: if e.errno != errno.EEXIST: raise alternates_path = os.path.join(self.path, "info/alternates") f = GitFile(alternates_path, 'wb') try: try: orig_f = open(alternates_path, 'rb') except (OSError, IOError), e: if e.errno != errno.ENOENT: raise else: try: f.write(orig_f.read()) finally: orig_f.close() f.write("%s\n" % path) finally: f.close() if not os.path.isabs(path): path = os.path.join(self.path, path) self.alternates.append(DiskObjectStore(path)) def _load_packs(self): pack_files = [] try: self._pack_cache_time = os.stat(self.pack_dir).st_mtime pack_dir_contents = os.listdir(self.pack_dir)
def write_pack_index_v2(filename, entries, pack_checksum): """Write a new pack index file. :param filename: The filename of the new pack index file. :param entries: List of tuples with object name (sha), offset_in_pack, and crc32_checksum. :param pack_checksum: Checksum of the pack file. """ f = GitFile(filename, 'wb') f = SHA1Writer(f) f.write('\377tOc') # Magic! f.write(struct.pack(">L", 2)) fan_out_table = defaultdict(lambda: 0) for (name, offset, entry_checksum) in entries: fan_out_table[ord(name[0])] += 1 # Fan-out table for i in range(0x100): f.write(struct.pack(">L", fan_out_table[i])) fan_out_table[i+1] += fan_out_table[i] for (name, offset, entry_checksum) in entries: f.write(name) for (name, offset, entry_checksum) in entries: f.write(struct.pack(">L", entry_checksum)) for (name, offset, entry_checksum) in entries: # FIXME: handle if MSBit is set in offset f.write(struct.pack(">L", offset)) # FIXME: handle table for pack files > 8 Gb assert len(pack_checksum) == 20 f.write(pack_checksum) f.close()
"""Add a single object to this object store. :param obj: Object to add """ dir = os.path.join(self.path, obj.id[:2]) try: os.mkdir(dir) except OSError, e: if e.errno != errno.EEXIST: raise path = os.path.join(dir, obj.id[2:]) if os.path.exists(path): return # Already there, no need to write again f = GitFile(path, 'wb') try: f.write(obj.as_legacy_object()) finally: f.close() @classmethod def init(cls, path): try: os.mkdir(path) except OSError, e: if e.errno != errno.EEXIST: raise os.mkdir(os.path.join(path, "info")) os.mkdir(os.path.join(path, PACKDIR)) return cls(path)