def remove_if_equals(self, name, old_ref): """Remove a refname only if it currently equals old_ref. This method does not follow symbolic references. It can be used to perform an atomic compare-and-delete operation. :param name: The refname to delete. :param old_ref: The old sha the refname must refer to, or None to delete unconditionally. :return: True if the delete was successful, False otherwise. """ self._check_refname(name) filename = self.refpath(name) ensure_dir_exists(os.path.dirname(filename)) f = GitFile(filename, 'wb') try: if old_ref is not None: orig_ref = self.read_loose_ref(name) if orig_ref is None: orig_ref = self.get_packed_refs().get(name, None) if orig_ref != old_ref: return False # may only be packed try: os.remove(filename) except OSError as e: if e.errno != errno.ENOENT: raise self._remove_packed_ref(name) finally: # never write, we just wanted the lock f.abort() return True
def read_loose_ref(self, name): """Read a reference file and return its contents. If the reference file a symbolic reference, only read the first line of the file. Otherwise, only read the first 40 bytes. :param name: the refname to read, relative to refpath :return: The contents of the ref file, or None if the file does not exist. :raises IOError: if any other error occurs """ filename = self.refpath(name) try: f = GitFile(filename, 'rb') try: header = f.read(len(SYMREF)) if header == SYMREF: # Read only the first line return header + iter(f).next().rstrip("\r\n") else: # Read only the first 40 bytes return header + f.read(40-len(SYMREF)) finally: f.close() except IOError, e: if e.errno == errno.ENOENT: return None raise
def get_packed_refs(self): """Get contents of the packed-refs file. :return: Dictionary mapping ref names to SHA1s :note: Will return an empty dictionary when no packed-refs file is present. """ # TODO: invalidate the cache on repacking if self._packed_refs is None: self._packed_refs = {} path = os.path.join(self.path, 'packed-refs') try: f = GitFile(path, 'rb') except IOError, e: if e.errno == errno.ENOENT: return {} raise try: first_line = iter(f).next().rstrip() if (first_line.startswith("# pack-refs") and " peeled" in first_line): self._peeled_refs = {} for sha, name, peeled in read_packed_refs_with_peeled(f): self._packed_refs[name] = sha if peeled: self._peeled_refs[name] = peeled else: f.seek(0) for sha, name in read_packed_refs(f): self._packed_refs[name] = sha finally: f.close()
def get_packed_refs(self): """Get contents of the packed-refs file. :return: Dictionary mapping ref names to SHA1s :note: Will return an empty dictionary when no packed-refs file is present. """ # TODO: invalidate the cache on repacking if self._packed_refs is None: # set both to empty because we want _peeled_refs to be # None if and only if _packed_refs is also None. self._packed_refs = {} self._peeled_refs = {} path = os.path.join(self.path, b'packed-refs') try: f = GitFile(path, 'rb') except IOError as e: if e.errno == errno.ENOENT: return {} raise with f: first_line = next(iter(f)).rstrip() if (first_line.startswith(b'# pack-refs') and b' peeled' in first_line): for sha, name, peeled in read_packed_refs_with_peeled(f): self._packed_refs[name] = sha if peeled: self._peeled_refs[name] = peeled else: f.seek(0) for sha, name in read_packed_refs(f): self._packed_refs[name] = sha return self._packed_refs
def remove_if_equals(self, name, old_ref, committer=None, timestamp=None, timezone=None, message=None): """Remove a refname only if it currently equals old_ref. This method does not follow symbolic references. It can be used to perform an atomic compare-and-delete operation. :param name: The refname to delete. :param old_ref: The old sha the refname must refer to, or None to delete unconditionally. :param message: Optional message :return: True if the delete was successful, False otherwise. """ self._check_refname(name) filename = self.refpath(name) ensure_dir_exists(os.path.dirname(filename)) f = GitFile(filename, 'wb') try: if old_ref is not None: orig_ref = self.read_loose_ref(name) if orig_ref is None: orig_ref = self.get_packed_refs().get(name, ZERO_SHA) if orig_ref != old_ref: return False # remove the reference file itself try: os.remove(filename) except OSError as e: if e.errno != errno.ENOENT: # may only be packed raise self._remove_packed_ref(name) self._log(name, old_ref, None, committer=committer, timestamp=timestamp, timezone=timezone, message=message) finally: # never write, we just wanted the lock f.abort() # outside of the lock, clean-up any parent directory that might now # be empty. this ensures that re-creating a reference of the same # name of what was previously a directory works as expected parent = name while True: try: parent, _ = parent.rsplit(b'/', 1) except ValueError: break parent_filename = self.refpath(parent) try: os.rmdir(parent_filename) except OSError as e: # this can be caused by the parent directory being # removed by another process, being not empty, etc. # in any case, this is non fatal because we already # removed the reference, just ignore it break return True
def writeIndex(self, filename, entries, pack_checksum): # FIXME: Write to StringIO instead rather than hitting disk ? f = GitFile(filename, "wb") try: self._write_fn(f, entries, pack_checksum) finally: f.close()
def _put_named_file(self, path, contents): """Write a file from the control dir with a specific name and contents. """ f = GitFile(os.path.join(self.controldir(), path), 'wb') try: f.write(contents) finally: f.close()
def write(self): """Write current contents of index to disk.""" f = GitFile(self._filename, 'wb') try: f = SHA1Writer(f) write_index_dict(f, self._byname) finally: f.close()
def write_to_path(self, path=None): """Write configuration to a file on disk.""" if path is None: path = self.path f = GitFile(path, 'wb') try: self.write_to_file(f) finally: f.close()
def from_path(cls, path): """Read configuration from a file on disk.""" f = GitFile(path, 'rb') try: ret = cls.from_file(f) ret.path = path return ret finally: f.close()
def test_open_twice(self): foo = self.path('foo') f1 = GitFile(foo, 'wb') f1.write('new') try: f2 = GitFile(foo, 'wb') self.fail() except OSError, e: self.assertEquals(errno.EEXIST, e.errno)
def test_abort_close_removed(self): foo = self.path('foo') f = GitFile(foo, 'wb') f._file.close() os.remove(foo+".lock") f.abort() self.assertTrue(f._closed)
def _complete_thin_pack(self, f, path, copier, indexer): """Move a specific file containing a pack into the pack directory. :note: The file should be on the same file system as the packs directory. :param f: Open file object for the pack. :param path: Path to the pack file. :param copier: A PackStreamCopier to use for writing pack data. :param indexer: A PackIndexer for indexing the pack. """ entries = list(indexer) # Update the header with the new number of objects. f.seek(0) write_pack_header(f, len(entries) + len(indexer.ext_refs())) # Must flush before reading (http://bugs.python.org/issue3207) f.flush() # Rescan the rest of the pack, computing the SHA with the new header. new_sha = compute_file_sha(f, end_ofs=-20) # Must reposition before writing (http://bugs.python.org/issue3207) f.seek(0, os.SEEK_CUR) # Complete the pack. for ext_sha in indexer.ext_refs(): assert len(ext_sha) == 20 type_num, data = self.get_raw(ext_sha) offset = f.tell() crc32 = write_pack_object(f, type_num, data, sha=new_sha) entries.append((ext_sha, offset, crc32)) pack_sha = new_sha.digest() f.write(pack_sha) f.close() # Move the pack in. entries.sort() pack_base_name = os.path.join( self.pack_dir, 'pack-' + iter_sha1(e[0] for e in entries)) os.rename(path, pack_base_name + '.pack') # Write the index. index_file = GitFile(pack_base_name + '.idx', 'wb') try: write_pack_index_v2(index_file, entries, pack_sha) index_file.close() finally: index_file.abort() # Add the pack to the store and return it. final_pack = Pack(pack_base_name) final_pack.check_length_and_checksum() self._add_known_pack(final_pack) return final_pack
def from_file(cls, filename): """Get the contents of a SHA file on disk""" size = os.path.getsize(filename) f = GitFile(filename, 'rb') try: map = mmap.mmap(f.fileno(), size, access=mmap.ACCESS_READ) shafile = cls._parse_file(map) return shafile finally: f.close()
def _parse_file(self): f = GitFile(self._filename, 'rb') try: magic = f.read(2) if self._is_legacy_object(magic): self._parse_legacy_object(f) else: self._parse_object(f) finally: f.close()
def add_if_new(self, name, ref): """Add a new reference only if it does not already exist. This method follows symrefs, and only ensures that the last ref in the chain does not exist. :param name: The refname to set. :param ref: The new sha the refname will refer to. :return: True if the add was successful, False otherwise. """ try: realname, contents = self._follow(name) if contents is not None: return False except KeyError: realname = name self._check_refname(realname) filename = self.refpath(realname) ensure_dir_exists(os.path.dirname(filename)) f = GitFile(filename, 'wb') try: if os.path.exists(filename) or name in self.get_packed_refs(): f.abort() return False try: f.write(ref+"\n") except (OSError, IOError): f.abort() raise finally: f.close() return True
def from_path(cls, path): f = GitFile(path, 'rb') try: obj = cls.from_file(f) obj._path = path obj._sha = FixedSha(filename_to_hex(path)) obj._file = None obj._magic = None return obj finally: f.close()
def load_pack_index(path): """Load an index file by path. :param filename: Path to the index file :return: A PackIndex loaded from the given path """ f = GitFile(path, 'rb') try: return load_pack_index_file(path, f) finally: f.close()
def from_path(cls, path): """Open a SHA file from disk.""" f = GitFile(path, "rb") try: obj = cls.from_file(f) obj._path = path obj._sha = FixedSha(filename_to_hex(path)) obj._file = None obj._magic = None return obj finally: f.close()
def _put_named_file(self, path, contents): """Write a file to the control dir with the given name and contents. :param path: The path to the file, relative to the control dir. :param contents: A string to write to the file. """ path = path.lstrip(os.path.sep) f = GitFile(os.path.join(self.controldir(), path), 'wb') try: f.write(contents) finally: f.close()
def fetch_refs(remote_name = 'origin', local='.'): """ Fetch references from a Git remote repository :param remote_name: <str> git name of remote repository, _default='origin'_ :param local: <str> full path to local repository, _default='.'_ :return entries: <TreeEntry> named tuples """ #import rpdb; rpdb.set_trace() # **Fetch refs from remote** # create a dulwich Repo object from path to local repo r = Repo(local) # local repository objsto = r.object_store # create a ObjectStore object from the local repo determine_wants = objsto.determine_wants_all # built in dulwich function gitdir = os.path.join(local, r.controldir()) # the git folder cnf_file = os.path.join(gitdir, 'config') # path to config cnf = ConfigFile.from_path(cnf_file) # config remote = cnf.get(('remote', remote_name), 'url') # url of remote # correctly parse host path and create dulwich Client object from it client, host_path = get_transport_and_path(remote) remote_refs = client.fetch(host_path, r, determine_wants, sys.stdout.write) # **Store refs fetched by dulwich** dulwich_refs = os.path.join(gitdir, DULWICH_REFS) with open(dulwich_refs, 'wb') as file: writer = csv.writer(file, delimiter=' ') for key, value in remote_refs.items(): writer.writerow([key, value]) # **save remote refs shas for future checkout** remote_dir = os.path.join(gitdir, 'refs', 'remotes', remote_name) # .git/refs/remotes ensure_dir_exists(remote_dir) # built in dulwich function headref = 0 # head branch ref if remote_refs.has_key('HEAD'): headref = remote_refs.pop('HEAD') # sha of HEAD i_head = remote_refs.values().index(headref) # index of head ref head_branch = remote_refs.keys()[i_head] # name of head branch branch_key = head_branch.rsplit('/',1)[-1] # branch head_file = os.path.join(remote_dir, 'HEAD') # path to branch shas file head_ref = '/'.join(['refs','remotes',remote_name,branch_key]) with open(head_file, 'wb') as GitFile: GitFile.write('ref: ' + head_ref + '\n') # remote branch refs for key, value in remote_refs.items(): key = key.rsplit('/',1)[-1] # get just the remote's branch reffile = os.path.join(remote_dir, key) # path to branch shas file with open(reffile, 'wb') as GitFile: GitFile.write(value + '\n') if headref: remote_refs['HEAD'] = headref # restore HEAD sha return remote_refs
def create_index_v2(self, filename, progress=None): """Create a version 2 index file for this data file. :param filename: Index filename. :param progress: Progress report function :return: Checksum of index file """ entries = self.sorted_entries(progress=progress) f = GitFile(filename, 'wb') try: return write_pack_index_v2(f, entries, self.calculate_checksum()) finally: f.close()
def write_pack(filename, objects, num_objects): """Write a new pack data file. :param filename: Path to the new pack file (without .pack extension) :param objects: Iterable over (object, path) tuples to write :param num_objects: Number of objects to write """ f = GitFile(filename + ".pack", 'wb') try: entries, data_sum = write_pack_data(f, objects, num_objects) finally: f.close() entries.sort() write_pack_index_v2(filename + ".idx", entries, data_sum)
def from_file(cls, filename): """Get the contents of a SHA file on disk.""" f = GitFile(filename, 'rb') try: try: obj = cls._parse_file_header(f) obj._sha = FixedSha(filename_to_hex(filename)) obj._needs_parsing = True obj._needs_serialization = True return obj except (IndexError, ValueError), e: raise ObjectFormatException("invalid object header") finally: f.close()
def test_remove_packed_without_peeled(self): refs_file = os.path.join(self._repo.path, "packed-refs") f = GitFile(refs_file) refs_data = f.read() f.close() f = GitFile(refs_file, "wb") f.write("\n".join(l for l in refs_data.split("\n") if not l or l[0] not in "#^")) f.close() self._repo = Repo(self._repo.path) refs = self._repo.refs self.assertTrue(refs.remove_if_equals("refs/heads/packed", "42d06bd4b77fed026b154d16493e5deab78f02ec"))
def test_readonly(self): f = GitFile(self.path("foo"), "rb") self.assertTrue(isinstance(f, io.IOBase)) self.assertEqual(b"foo contents", f.read()) self.assertEqual(b"", f.read()) f.seek(4) self.assertEqual(b"contents", f.read()) f.close()
def test_readonly(self): f = GitFile(self.path('foo'), 'rb') self.assertTrue(isinstance(f, io.IOBase)) self.assertEqual(b'foo contents', f.read()) self.assertEqual(b'', f.read()) f.seek(4) self.assertEqual(b'contents', f.read()) f.close()
def test_abort_close(self): foo = self.path('foo') f = GitFile(foo, 'wb') f.abort() try: f.close() except (IOError, OSError): self.fail() f = GitFile(foo, 'wb') f.close() try: f.abort() except (IOError, OSError): self.fail()
def test_abort(self): foo = self.path('foo') foo_lock = '%s.lock' % foo with open(foo, 'rb') as orig_f: self.assertEqual(orig_f.read(), b'foo contents') f = GitFile(foo, 'wb') f.write(b'new contents') f.abort() self.assertTrue(f.closed) self.assertFalse(os.path.exists(foo_lock)) with open(foo, 'rb') as new_orig_f: self.assertEqual(new_orig_f.read(), b'foo contents')
def __init__(self, filename, file=None, size=None): """Create a pack index object. Provide it with the name of the index file to consider, and it will map it whenever required. """ self._filename = filename # Take the size now, so it can be checked each time we map the file to # ensure that it hasn't changed. if file is None: self._file = GitFile(filename, 'rb') else: self._file = file fileno = getattr(self._file, 'fileno', None) if fileno is not None: fd = self._file.fileno() if size is None: self._size = os.fstat(fd).st_size else: self._size = size self._contents = mmap.mmap(fd, self._size, access=mmap.ACCESS_READ) else: self._file.seek(0) self._contents = self._file.read() self._size = len(self._contents)
else: ret.append(os.path.join(self.path, l)) return ret finally: f.close() def add_alternate_path(self, path): """Add an alternate path to this object store. """ try: os.mkdir(os.path.join(self.path, "info")) except OSError, e: if e.errno != errno.EEXIST: raise alternates_path = os.path.join(self.path, "info/alternates") f = GitFile(alternates_path, 'wb') try: try: orig_f = open(alternates_path, 'rb') except (OSError, IOError), e: if e.errno != errno.ENOENT: raise else: try: f.write(orig_f.read()) finally: orig_f.close() f.write("%s\n" % path) finally: f.close()
def from_path(cls, path): """Open a SHA file from disk.""" with GitFile(path, "rb") as f: return cls.from_file(f)
def test_default_mode(self): f = GitFile(self.path('foo')) self.assertEqual(b'foo contents', f.read()) f.close()
def set_if_equals(self, name, old_ref, new_ref, committer=None, timestamp=None, timezone=None, message=None): """Set a refname to new_ref only if it currently equals old_ref. This method follows all symbolic references, and can be used to perform an atomic compare-and-swap operation. :param name: The refname to set. :param old_ref: The old sha the refname must refer to, or None to set unconditionally. :param new_ref: The new sha the refname will refer to. :param message: Set message for reflog :return: True if the set was successful, False otherwise. """ self._check_refname(name) try: realnames, _ = self.follow(name) realname = realnames[-1] except (KeyError, IndexError): realname = name filename = self.refpath(realname) # make sure none of the ancestor folders is in packed refs probe_ref = os.path.dirname(realname) packed_refs = self.get_packed_refs() while probe_ref: if packed_refs.get(probe_ref, None) is not None: raise OSError(errno.ENOTDIR, 'Not a directory: {}'.format(filename)) probe_ref = os.path.dirname(probe_ref) ensure_dir_exists(os.path.dirname(filename)) with GitFile(filename, 'wb') as f: if old_ref is not None: try: # read again while holding the lock orig_ref = self.read_loose_ref(realname) if orig_ref is None: orig_ref = self.get_packed_refs().get( realname, ZERO_SHA) if orig_ref != old_ref: f.abort() return False except (OSError, IOError): f.abort() raise try: f.write(new_ref + b'\n') except (OSError, IOError): f.abort() raise self._log(realname, old_ref, new_ref, committer=committer, timestamp=timestamp, timezone=timezone, message=message) return True
def test_remove_packed_without_peeled(self): refs_file = os.path.join(self._repo.path, "packed-refs") f = GitFile(refs_file) refs_data = f.read() f.close() f = GitFile(refs_file, "wb") f.write(b"\n".join(line for line in refs_data.split(b"\n") if not line or line[0] not in b"#^")) f.close() self._repo = Repo(self._repo.path) refs = self._repo.refs self.assertTrue( refs.remove_if_equals( b"refs/heads/packed", b"42d06bd4b77fed026b154d16493e5deab78f02ec", ))
def test_open_twice(self): foo = self.path("foo") f1 = GitFile(foo, "wb") f1.write(b"new") try: f2 = GitFile(foo, "wb") self.fail() except FileLocked: pass else: f2.close() f1.write(b" contents") f1.close() # Ensure trying to open twice doesn't affect original. f = open(foo, "rb") self.assertEqual(b"new contents", f.read()) f.close()
def write_to_path(self, path=None): """Write configuration to a file on disk.""" if path is None: path = self.path with GitFile(path, 'wb') as f: self.write_to_file(f)
def _parse_path(self): with GitFile(self._path, 'rb') as f: self._parse_file(f)
def from_path(cls, path): """Read configuration from a file on disk.""" with GitFile(path, 'rb') as f: ret = cls.from_file(f) ret.path = path return ret
def writeIndex(self, filename, entries, pack_checksum): # FIXME: Write to BytesIO instead rather than hitting disk ? with GitFile(filename, "wb") as f: self._write_fn(f, entries, pack_checksum)
def remove_if_equals(self, name, old_ref, committer=None, timestamp=None, timezone=None, message=None): """Remove a refname only if it currently equals old_ref. This method does not follow symbolic references. It can be used to perform an atomic compare-and-delete operation. :param name: The refname to delete. :param old_ref: The old sha the refname must refer to, or None to delete unconditionally. :param message: Optional message :return: True if the delete was successful, False otherwise. """ self._check_refname(name) filename = self.refpath(name) ensure_dir_exists(os.path.dirname(filename)) f = GitFile(filename, 'wb') try: if old_ref is not None: orig_ref = self.read_loose_ref(name) if orig_ref is None: orig_ref = self.get_packed_refs().get(name, ZERO_SHA) if orig_ref != old_ref: return False # remove the reference file itself try: os.remove(filename) except OSError as e: if e.errno != errno.ENOENT: # may only be packed raise self._remove_packed_ref(name) self._log(name, old_ref, None, committer=committer, timestamp=timestamp, timezone=timezone, message=message) finally: # never write, we just wanted the lock f.abort() # outside of the lock, clean-up any parent directory that might now # be empty. this ensures that re-creating a reference of the same # name of what was previously a directory works as expected parent = name while True: try: parent, _ = parent.rsplit(b'/', 1) except ValueError: break parent_filename = self.refpath(parent) try: os.rmdir(parent_filename) except OSError: # this can be caused by the parent directory being # removed by another process, being not empty, etc. # in any case, this is non fatal because we already # removed the reference, just ignore it break return True
def __enter__(self): self._file = GitFile(self._path, 'wb') self._index = Index(self._path) return self._index
def test_open_twice(self): foo = self.path('foo') f1 = GitFile(foo, 'wb') f1.write(b'new') try: f2 = GitFile(foo, 'wb') self.fail() except FileLocked: pass else: f2.close() f1.write(b' contents') f1.close() # Ensure trying to open twice doesn't affect original. f = open(foo, 'rb') self.assertEqual(b'new contents', f.read()) f.close()
def _parse_path(self): f = GitFile(self._path, 'rb') try: self._parse_file(f) finally: f.close()
def stashes(self): try: with GitFile(self._reflog_path, "rb") as f: return reversed(list(read_reflog(f))) except FileNotFoundError: return []
def test_remove_packed_without_peeled(self): refs_file = os.path.join(self._repo.path, 'packed-refs') f = GitFile(refs_file) refs_data = f.read() f.close() f = GitFile(refs_file, 'wb') f.write(b'\n'.join(l for l in refs_data.split(b'\n') if not l or l[0] not in b'#^')) f.close() self._repo = Repo(self._repo.path) refs = self._repo.refs self.assertTrue( refs.remove_if_equals(b'refs/heads/packed', b'42d06bd4b77fed026b154d16493e5deab78f02ec'))
def set_if_equals(self, name, old_ref, new_ref): """Set a refname to new_ref only if it currently equals old_ref. This method follows all symbolic references, and can be used to perform an atomic compare-and-swap operation. :param name: The refname to set. :param old_ref: The old sha the refname must refer to, or None to set unconditionally. :param new_ref: The new sha the refname will refer to. :return: True if the set was successful, False otherwise. """ self._check_refname(name) try: realname, _ = self._follow(name) except KeyError: realname = name filename = self.refpath(realname) ensure_dir_exists(os.path.dirname(filename)) f = GitFile(filename, 'wb') try: if old_ref is not None: try: # read again while holding the lock orig_ref = self.read_loose_ref(realname) if orig_ref is None: orig_ref = self.get_packed_refs().get(realname, None) if orig_ref != old_ref: f.abort() return False except (OSError, IOError): f.abort() raise try: f.write(new_ref + "\n") except (OSError, IOError): f.abort() raise finally: f.close() return True
def _complete_thin_pack(self, f, path, copier, indexer): """Move a specific file containing a pack into the pack directory. Note: The file should be on the same file system as the packs directory. Args: f: Open file object for the pack. path: Path to the pack file. copier: A PackStreamCopier to use for writing pack data. indexer: A PackIndexer for indexing the pack. """ entries = list(indexer) # Update the header with the new number of objects. f.seek(0) write_pack_header(f, len(entries) + len(indexer.ext_refs())) # Must flush before reading (http://bugs.python.org/issue3207) f.flush() # Rescan the rest of the pack, computing the SHA with the new header. new_sha = compute_file_sha(f, end_ofs=-20) # Must reposition before writing (http://bugs.python.org/issue3207) f.seek(0, os.SEEK_CUR) # Complete the pack. for ext_sha in indexer.ext_refs(): assert len(ext_sha) == 20 type_num, data = self.get_raw(ext_sha) offset = f.tell() crc32 = write_pack_object( f, type_num, data, sha=new_sha, compression_level=self.pack_compression_level) entries.append((ext_sha, offset, crc32)) pack_sha = new_sha.digest() f.write(pack_sha) f.close() # Move the pack in. entries.sort() pack_base_name = self._get_pack_basepath(entries) target_pack = pack_base_name + '.pack' if sys.platform == 'win32': # Windows might have the target pack file lingering. Attempt # removal, silently passing if the target does not exist. try: os.remove(target_pack) except (IOError, OSError) as e: if e.errno != errno.ENOENT: raise os.rename(path, target_pack) # Write the index. index_file = GitFile(pack_base_name + '.idx', 'wb') try: write_pack_index_v2(index_file, entries, pack_sha) index_file.close() finally: index_file.abort() # Add the pack to the store and return it. final_pack = Pack(pack_base_name) final_pack.check_length_and_checksum() self._add_cached_pack(pack_base_name, final_pack) return final_pack
def test_default_mode(self): f = GitFile(self.path("foo")) self.assertEqual(b"foo contents", f.read()) f.close()
def from_path(cls, path) -> "ConfigFile": """Read configuration from a file on disk.""" with GitFile(path, "rb") as f: ret = cls.from_file(f) ret.path = path return ret
def _complete_thin_pack(self, f, path, copier, indexer): """Move a specific file containing a pack into the pack directory. :note: The file should be on the same file system as the packs directory. :param f: Open file object for the pack. :param path: Path to the pack file. :param copier: A PackStreamCopier to use for writing pack data. :param indexer: A PackIndexer for indexing the pack. """ entries = list(indexer) # Update the header with the new number of objects. f.seek(0) write_pack_header(f, len(entries) + len(indexer.ext_refs())) # Must flush before reading (http://bugs.python.org/issue3207) f.flush() # Rescan the rest of the pack, computing the SHA with the new header. new_sha = compute_file_sha(f, end_ofs=-20) # Must reposition before writing (http://bugs.python.org/issue3207) f.seek(0, os.SEEK_CUR) # Complete the pack. for ext_sha in indexer.ext_refs(): assert len(ext_sha) == 20 type_num, data = self.get_raw(ext_sha) offset = f.tell() crc32 = write_pack_object(f, type_num, data, sha=new_sha) entries.append((ext_sha, offset, crc32)) pack_sha = new_sha.digest() f.write(pack_sha) f.close() # Move the pack in. entries.sort() pack_base_name = self._get_pack_basepath(entries) if sys.platform == 'win32': try: os.rename(path, pack_base_name + '.pack') except WindowsError: os.remove(pack_base_name + '.pack') os.rename(path, pack_base_name + '.pack') else: os.rename(path, pack_base_name + '.pack') # Write the index. index_file = GitFile(pack_base_name + '.idx', 'wb') try: write_pack_index_v2(index_file, entries, pack_sha) index_file.close() finally: index_file.abort() # Add the pack to the store and return it. final_pack = Pack(pack_base_name) final_pack.check_length_and_checksum() self._add_known_pack(pack_base_name, final_pack) return final_pack
def __enter__(self): # TODO(jelmer): This can raise dulwich.file.FileLocked; # https://github.com/jelmer/xandikos/issues/66 self._file = GitFile(self._path, 'wb') self._index = Index(self._path) return self._index