def get_packed_refs(self): """Get contents of the packed-refs file. :return: Dictionary mapping ref names to SHA1s :note: Will return an empty dictionary when no packed-refs file is present. """ # TODO: invalidate the cache on repacking if self._packed_refs is None: # set both to empty because we want _peeled_refs to be # None if and only if _packed_refs is also None. self._packed_refs = {} self._peeled_refs = {} path = os.path.join(self.path, 'packed-refs') try: f = GitFile(path, 'rb') except IOError, e: if e.errno == errno.ENOENT: return {} raise try: first_line = iter(f).next().rstrip() if (first_line.startswith("# pack-refs") and " peeled" in first_line): for sha, name, peeled in read_packed_refs_with_peeled(f): self._packed_refs[name] = sha if peeled: self._peeled_refs[name] = peeled else: f.seek(0) for sha, name in read_packed_refs(f): self._packed_refs[name] = sha finally: f.close()
def add_if_new(self, name, ref): """Add a new reference only if it does not already exist. This method follows symrefs, and only ensures that the last ref in the chain does not exist. :param name: The refname to set. :param ref: The new sha the refname will refer to. :return: True if the add was successful, False otherwise. """ try: realname, contents = self._follow(name) if contents is not None: return False except KeyError: realname = name self._check_refname(realname) filename = self.refpath(realname) ensure_dir_exists(os.path.dirname(filename)) f = GitFile(filename, 'wb') try: if os.path.exists(filename) or name in self.get_packed_refs(): f.abort() return False try: f.write(ref + "\n") except (OSError, IOError): f.abort() raise finally: f.close() return True
def read_loose_ref(self, name): """Read a reference file and return its contents. If the reference file a symbolic reference, only read the first line of the file. Otherwise, only read the first 40 bytes. :param name: the refname to read, relative to refpath :return: The contents of the ref file, or None if the file does not exist. :raises IOError: if any other error occurs """ filename = self.refpath(name) try: f = GitFile(filename, 'rb') try: header = f.read(len(SYMREF)) if header == SYMREF: # Read only the first line return header + iter(f).next().rstrip("\r\n") else: # Read only the first 40 bytes return header + f.read(40-len(SYMREF)) finally: f.close() except IOError, e: if e.errno == errno.ENOENT: return None raise
def write(self): """Write current contents of index to disk.""" f = GitFile(self._filename, 'wb') try: f = SHA1Writer(f) write_index_dict(f, self._byname) finally: f.close()
def add_if_new(self, name, ref): """Add a new reference only if it does not already exist. This method follows symrefs, and only ensures that the last ref in the chain does not exist. :param name: The refname to set. :param ref: The new sha the refname will refer to. :return: True if the add was successful, False otherwise. """ try: realname, contents = self._follow(name) if contents is not None: return False except KeyError: realname = name self._check_refname(realname) filename = self.refpath(realname) ensure_dir_exists(os.path.dirname(filename)) f = GitFile(filename, 'wb') try: if os.path.exists(filename) or name in self.get_packed_refs(): f.abort() return False try: f.write(ref+"\n") except (OSError, IOError): f.abort() raise finally: f.close() return True
def load_pack_index(path): """Load an index file by path. :param filename: Path to the index file :return: A PackIndex loaded from the given path """ f = GitFile(path, 'rb') try: return load_pack_index_file(path, f) finally: f.close()
def from_path(cls, path): f = GitFile(path, 'rb') try: obj = cls.from_file(f) obj._path = path obj._sha = FixedSha(filename_to_hex(path)) obj._file = None obj._magic = None return obj finally: f.close()
def _put_named_file(self, path, contents): """Write a file to the control dir with the given name and contents. :param path: The path to the file, relative to the control dir. :param contents: A string to write to the file. """ path = path.lstrip(os.path.sep) f = GitFile(os.path.join(self.controldir(), path), 'wb') try: f.write(contents) finally: f.close()
def create_index_v2(self, filename, progress=None): """Create a version 2 index file for this data file. :param filename: Index filename. :param progress: Progress report function :return: Checksum of index file """ entries = self.sorted_entries(progress=progress) f = GitFile(filename, 'wb') try: return write_pack_index_v2(f, entries, self.calculate_checksum()) finally: f.close()
def remove_if_equals(self, name, old_ref): """Remove a refname only if it currently equals old_ref. This method does not follow symbolic references. It can be used to perform an atomic compare-and-delete operation. :param name: The refname to delete. :param old_ref: The old sha the refname must refer to, or None to delete unconditionally. :return: True if the delete was successful, False otherwise. """ self._check_refname(name) filename = self.refpath(name) ensure_dir_exists(os.path.dirname(filename)) f = GitFile(filename, 'wb') try: if old_ref is not None: orig_ref = self.read_loose_ref(name) if orig_ref is None: orig_ref = self.get_packed_refs().get(name, None) if orig_ref != old_ref: return False # may only be packed try: os.remove(filename) except OSError, e: if e.errno != errno.ENOENT: raise self._remove_packed_ref(name)
def __init__(self, filename, file=None, contents=None, size=None): """Create a pack index object. Provide it with the name of the index file to consider, and it will map it whenever required. """ self._filename = filename # Take the size now, so it can be checked each time we map the file to # ensure that it hasn't changed. if file is None: self._file = GitFile(filename, 'rb') else: self._file = file if contents is None: self._contents, self._size = _load_file_contents(self._file, size) else: self._contents, self._size = (contents, size)
def read(self): """Read current contents of index from disk.""" if not os.path.exists(self._filename): return f = GitFile(self._filename, 'rb') try: f = SHA1Reader(f) for x in read_index(f): self[x[0]] = tuple(x[1:]) # FIXME: Additional data? f.read(os.path.getsize(self._filename)-f.tell()-20) f.check_sha() finally: f.close()
def __init__(self, filename, file=None, size=None): """Create a PackData object representing the pack in the given filename. The file must exist and stay readable until the object is disposed of. It must also stay the same size. It will be mapped whenever needed. Currently there is a restriction on the size of the pack as the python mmap implementation is flawed. """ self._filename = filename self._size = size self._header_size = 12 if file is None: self._file = GitFile(self._filename, 'rb') else: self._file = file (version, self._num_objects) = read_pack_header(self._file.read) self._offset_cache = LRUSizeCache(1024*1024*20, compute_size=_compute_object_size) self.pack = None
def _remove_packed_ref(self, name): if self._packed_refs is None: return filename = os.path.join(self.path, 'packed-refs') # reread cached refs from disk, while holding the lock f = GitFile(filename, 'wb') try: self._packed_refs = None self.get_packed_refs() if name not in self._packed_refs: return del self._packed_refs[name] if name in self._peeled_refs: del self._peeled_refs[name] write_packed_refs(f, self._packed_refs, self._peeled_refs) f.close() finally: f.abort()
def read_loose_ref(self, name): """Read a reference file and return its contents. If the reference file a symbolic reference, only read the first line of the file. Otherwise, only read the first 40 bytes. :param name: the refname to read, relative to refpath :return: The contents of the ref file, or None if the file does not exist. :raises IOError: if any other error occurs """ filename = self.refpath(name) try: f = GitFile(filename, 'rb') try: header = f.read(len(SYMREF)) if header == SYMREF: # Read only the first line return header + iter(f).next().rstrip("\r\n") else: # Read only the first 40 bytes return header + f.read(40 - len(SYMREF)) finally: f.close() except IOError, e: if e.errno == errno.ENOENT: return None raise
def move_in_pack(self, path): """Move a specific file containing a pack into the pack directory. :note: The file should be on the same file system as the packs directory. :param path: Path to the pack file. """ p = PackData(path) entries = p.sorted_entries() basename = os.path.join(self.pack_dir, "pack-%s" % iter_sha1(entry[0] for entry in entries)) f = GitFile(basename+".idx", "wb") try: write_pack_index_v2(f, entries, p.get_stored_checksum()) finally: f.close() p.close() os.rename(path, basename + ".pack") final_pack = Pack(basename) self._add_known_pack(final_pack) return final_pack
def move_in_pack(self, path): """Move a specific file containing a pack into the pack directory. :note: The file should be on the same file system as the packs directory. :param path: Path to the pack file. """ p = PackData(path) entries = p.sorted_entries() basename = os.path.join( self.pack_dir, "pack-%s" % iter_sha1(entry[0] for entry in entries)) f = GitFile(basename + ".idx", "wb") try: write_pack_index_v2(f, entries, p.get_stored_checksum()) finally: f.close() p.close() os.rename(path, basename + ".pack") final_pack = Pack(basename) self._add_known_pack(final_pack) return final_pack
def set_symbolic_ref(self, name, other): """Make a ref point at another ref. :param name: Name of the ref to set :param other: Name of the ref to point at """ self._check_refname(name) self._check_refname(other) filename = self.refpath(name) try: f = GitFile(filename, 'wb') try: f.write(SYMREF + other + '\n') except (IOError, OSError): f.abort() raise finally: f.close()
def write_pack(filename, objects, num_objects): """Write a new pack data file. :param filename: Path to the new pack file (without .pack extension) :param objects: Iterable over (object, path) tuples to write :param num_objects: Number of objects to write :return: Tuple with checksum of pack file and index file """ f = GitFile(filename + ".pack", 'wb') try: entries, data_sum = write_pack_data(f, objects, num_objects) finally: f.close() entries.sort() f = GitFile(filename + ".idx", 'wb') try: return data_sum, write_pack_index_v2(f, entries, data_sum) finally: f.close()
class PackData(object): """The data contained in a packfile. Pack files can be accessed both sequentially for exploding a pack, and directly with the help of an index to retrieve a specific object. The objects within are either complete or a delta aginst another. The header is variable length. If the MSB of each byte is set then it indicates that the subsequent byte is still part of the header. For the first byte the next MS bits are the type, which tells you the type of object, and whether it is a delta. The LS byte is the lowest bits of the size. For each subsequent byte the LS 7 bits are the next MS bits of the size, i.e. the last byte of the header contains the MS bits of the size. For the complete objects the data is stored as zlib deflated data. The size in the header is the uncompressed object size, so to uncompress you need to just keep feeding data to zlib until you get an object back, or it errors on bad data. This is done here by just giving the complete buffer from the start of the deflated object on. This is bad, but until I get mmap sorted out it will have to do. Currently there are no integrity checks done. Also no attempt is made to try and detect the delta case, or a request for an object at the wrong position. It will all just throw a zlib or KeyError. """ def __init__(self, filename, file=None, size=None): """Create a PackData object representing the pack in the given filename. The file must exist and stay readable until the object is disposed of. It must also stay the same size. It will be mapped whenever needed. Currently there is a restriction on the size of the pack as the python mmap implementation is flawed. """ self._filename = filename self._size = size self._header_size = 12 if file is None: self._file = GitFile(self._filename, 'rb') else: self._file = file (version, self._num_objects) = read_pack_header(self._file.read) self._offset_cache = LRUSizeCache(1024*1024*20, compute_size=_compute_object_size) self.pack = None @classmethod def from_file(cls, file, size): return cls(str(file), file=file, size=size) @classmethod def from_path(cls, path): return cls(filename=path) def close(self): self._file.close() def _get_size(self): if self._size is not None: return self._size self._size = os.path.getsize(self._filename) if self._size < self._header_size: errmsg = ("%s is too small for a packfile (%d < %d)" % (self._filename, self._size, self._header_size)) raise AssertionError(errmsg) return self._size def __len__(self): """Returns the number of objects in this pack.""" return self._num_objects def calculate_checksum(self): """Calculate the checksum for this pack. :return: 20-byte binary SHA1 digest """ s = make_sha() self._file.seek(0) todo = self._get_size() - 20 while todo > 0: x = self._file.read(min(todo, 1<<16)) s.update(x) todo -= len(x) return s.digest() def get_ref(self, sha): """Get the object for a ref SHA, only looking in this pack.""" # TODO: cache these results if self.pack is None: raise KeyError(sha) offset = self.pack.index.object_index(sha) if not offset: raise KeyError(sha) type, obj = self.get_object_at(offset) return offset, type, obj def resolve_object(self, offset, type, obj, get_ref=None): """Resolve an object, possibly resolving deltas when necessary. :return: Tuple with object type and contents. """ if type not in DELTA_TYPES: return type, obj if get_ref is None: get_ref = self.get_ref if type == OFS_DELTA: (delta_offset, delta) = obj # TODO: clean up asserts and replace with nicer error messages assert isinstance(offset, int) assert isinstance(delta_offset, int) base_offset = offset-delta_offset type, base_obj = self.get_object_at(base_offset) assert isinstance(type, int) elif type == REF_DELTA: (basename, delta) = obj assert isinstance(basename, str) and len(basename) == 20 base_offset, type, base_obj = get_ref(basename) assert isinstance(type, int) type, base_chunks = self.resolve_object(base_offset, type, base_obj) chunks = apply_delta(base_chunks, delta) # TODO(dborowitz): This can result in poor performance if large base # objects are separated from deltas in the pack. We should reorganize # so that we apply deltas to all objects in a chain one after the other # to optimize cache performance. if offset is not None: self._offset_cache[offset] = type, chunks return type, chunks def iterobjects(self, progress=None): return PackObjectIterator(self, progress) def iterentries(self, progress=None): """Yield entries summarizing the contents of this pack. :param progress: Progress function, called with current and total object count. :return: iterator of tuples with (sha, offset, crc32) """ for offset, type, obj, crc32 in self.iterobjects(progress=progress): assert isinstance(offset, int) assert isinstance(type, int) assert isinstance(obj, list) or isinstance(obj, tuple) type, obj = self.resolve_object(offset, type, obj) yield obj_sha(type, obj), offset, crc32 def sorted_entries(self, progress=None): """Return entries in this pack, sorted by SHA. :param progress: Progress function, called with current and total object count :return: List of tuples with (sha, offset, crc32) """ ret = list(self.iterentries(progress=progress)) ret.sort() return ret def create_index_v1(self, filename, progress=None): """Create a version 1 file for this data file. :param filename: Index filename. :param progress: Progress report function :return: Checksum of index file """ entries = self.sorted_entries(progress=progress) f = GitFile(filename, 'wb') try: return write_pack_index_v1(f, entries, self.calculate_checksum()) finally: f.close() def create_index_v2(self, filename, progress=None): """Create a version 2 index file for this data file. :param filename: Index filename. :param progress: Progress report function :return: Checksum of index file """ entries = self.sorted_entries(progress=progress) f = GitFile(filename, 'wb') try: return write_pack_index_v2(f, entries, self.calculate_checksum()) finally: f.close() def create_index(self, filename, progress=None, version=2): """Create an index file for this data file. :param filename: Index filename. :param progress: Progress report function :return: Checksum of index file """ if version == 1: return self.create_index_v1(filename, progress) elif version == 2: return self.create_index_v2(filename, progress) else: raise ValueError("unknown index format %d" % version) def get_stored_checksum(self): """Return the expected checksum stored in this pack.""" self._file.seek(self._get_size()-20) return self._file.read(20) def check(self): """Check the consistency of this pack.""" actual = self.calculate_checksum() stored = self.get_stored_checksum() if actual != stored: raise ChecksumMismatch(stored, actual) def get_object_at(self, offset): """Given an offset in to the packfile return the object that is there. Using the associated index the location of an object can be looked up, and then the packfile can be asked directly for that object using this function. """ if offset in self._offset_cache: return self._offset_cache[offset] assert isinstance(offset, long) or isinstance(offset, int),\ "offset was %r" % offset assert offset >= self._header_size self._file.seek(offset) return unpack_object(self._file.read)[:2]
def add_object(self, obj): """Add a single object to this object store. :param obj: Object to add """ dir = os.path.join(self.path, obj.id[:2]) try: os.mkdir(dir) except OSError, e: if e.errno != errno.EEXIST: raise path = os.path.join(dir, obj.id[2:]) if os.path.exists(path): return # Already there, no need to write again f = GitFile(path, 'wb') try: f.write(obj.as_legacy_object()) finally: f.close() @classmethod def init(cls, path): try: os.mkdir(path) except OSError, e: if e.errno != errno.EEXIST: raise os.mkdir(os.path.join(path, "info")) os.mkdir(os.path.join(path, PACKDIR)) return cls(path)
def _parse_path(self): f = GitFile(self._path, 'rb') try: self._parse_file(f) finally: f.close()
def set_if_equals(self, name, old_ref, new_ref): """Set a refname to new_ref only if it currently equals old_ref. This method follows all symbolic references, and can be used to perform an atomic compare-and-swap operation. :param name: The refname to set. :param old_ref: The old sha the refname must refer to, or None to set unconditionally. :param new_ref: The new sha the refname will refer to. :return: True if the set was successful, False otherwise. """ try: realname, _ = self._follow(name) except KeyError: realname = name filename = self.refpath(realname) ensure_dir_exists(os.path.dirname(filename)) f = GitFile(filename, 'wb') try: if old_ref is not None: try: # read again while holding the lock orig_ref = self.read_loose_ref(realname) if orig_ref is None: orig_ref = self.get_packed_refs().get(realname, None) if orig_ref != old_ref: f.abort() return False except (OSError, IOError): f.abort() raise try: f.write(new_ref+"\n") except (OSError, IOError): f.abort() raise finally: f.close() return True
def set_if_equals(self, name, old_ref, new_ref): """Set a refname to new_ref only if it currently equals old_ref. This method follows all symbolic references, and can be used to perform an atomic compare-and-swap operation. :param name: The refname to set. :param old_ref: The old sha the refname must refer to, or None to set unconditionally. :param new_ref: The new sha the refname will refer to. :return: True if the set was successful, False otherwise. """ try: realname, _ = self._follow(name) except KeyError: realname = name filename = self.refpath(realname) ensure_dir_exists(os.path.dirname(filename)) f = GitFile(filename, 'wb') try: if old_ref is not None: try: # read again while holding the lock orig_ref = self.read_loose_ref(realname) if orig_ref is None: orig_ref = self.get_packed_refs().get(realname, None) if orig_ref != old_ref: f.abort() return False except (OSError, IOError): f.abort() raise try: f.write(new_ref + "\n") except (OSError, IOError): f.abort() raise finally: f.close() return True
class FilePackIndex(PackIndex): """Pack index that is based on a file. To do the loop it opens the file, and indexes first 256 4 byte groups with the first byte of the sha id. The value in the four byte group indexed is the end of the group that shares the same starting byte. Subtract one from the starting byte and index again to find the start of the group. The values are sorted by sha id within the group, so do the math to find the start and end offset and then bisect in to find if the value is present. """ def __init__(self, filename, file=None, contents=None, size=None): """Create a pack index object. Provide it with the name of the index file to consider, and it will map it whenever required. """ self._filename = filename # Take the size now, so it can be checked each time we map the file to # ensure that it hasn't changed. if file is None: self._file = GitFile(filename, 'rb') else: self._file = file if contents is None: self._contents, self._size = _load_file_contents(self._file, size) else: self._contents, self._size = (contents, size) def __eq__(self, other): # Quick optimization: if (isinstance(other, FilePackIndex) and self._fan_out_table != other._fan_out_table): return False return super(FilePackIndex, self).__eq__(other) def close(self): self._file.close() if getattr(self._contents, "close", None) is not None: self._contents.close() def __len__(self): """Return the number of entries in this pack index.""" return self._fan_out_table[-1] def _unpack_entry(self, i): """Unpack the i-th entry in the index file. :return: Tuple with object name (SHA), offset in pack file and CRC32 checksum (if known). """ raise NotImplementedError(self._unpack_entry) def _unpack_name(self, i): """Unpack the i-th name from the index file.""" raise NotImplementedError(self._unpack_name) def _unpack_offset(self, i): """Unpack the i-th object offset from the index file.""" raise NotImplementedError(self._unpack_offset) def _unpack_crc32_checksum(self, i): """Unpack the crc32 checksum for the i-th object from the index file.""" raise NotImplementedError(self._unpack_crc32_checksum) def _itersha(self): for i in range(len(self)): yield self._unpack_name(i) def iterentries(self): """Iterate over the entries in this pack index. :return: iterator over tuples with object name, offset in packfile and crc32 checksum. """ for i in range(len(self)): yield self._unpack_entry(i) def _read_fan_out_table(self, start_offset): ret = [] for i in range(0x100): fanout_entry = self._contents[start_offset+i*4:start_offset+(i+1)*4] ret.append(struct.unpack(">L", fanout_entry)[0]) return ret def check(self): """Check that the stored checksum matches the actual checksum.""" actual = self.calculate_checksum() stored = self.get_stored_checksum() if actual != stored: raise ChecksumMismatch(stored, actual) def calculate_checksum(self): """Calculate the SHA1 checksum over this pack index. :return: This is a 20-byte binary digest """ return make_sha(self._contents[:-20]).digest() def get_pack_checksum(self): """Return the SHA1 checksum stored for the corresponding packfile. :return: 20-byte binary digest """ return str(self._contents[-40:-20]) def get_stored_checksum(self): """Return the SHA1 checksum stored for this index. :return: 20-byte binary digest """ return str(self._contents[-20:]) def _object_index(self, sha): """See object_index. :param sha: A *binary* SHA string. (20 characters long)_ """ assert len(sha) == 20 idx = ord(sha[0]) if idx == 0: start = 0 else: start = self._fan_out_table[idx-1] end = self._fan_out_table[idx] i = bisect_find_sha(start, end, sha, self._unpack_name) if i is None: raise KeyError(sha) return self._unpack_offset(i)