def data(self): """The pack data object being used.""" if self._data is None: self._data = PackData(self._data_path) assert len(self.index) == len(self._data) idx_stored_checksum = self.index.get_pack_checksum() data_stored_checksum = self._data.get_stored_checksum() if idx_stored_checksum != data_stored_checksum: raise ChecksumMismatch(sha_to_hex(idx_stored_checksum), sha_to_hex(data_stored_checksum)) return self._data
def send_pack(self, path, determine_wants, generate_pack_contents): """Upload a pack to a remote repository. :param path: Repository path :param generate_pack_contents: Function that can return the shas of the objects to upload. """ old_refs, server_capabilities = self.read_refs() new_refs = determine_wants(old_refs) if not new_refs: self.proto.write_pkt_line(None) return {} want = [] have = [x for x in old_refs.values() if not x == "0" * 40] sent_capabilities = False for refname in set(new_refs.keys() + old_refs.keys()): old_sha1 = old_refs.get(refname, "0" * 40) new_sha1 = new_refs.get(refname, "0" * 40) if old_sha1 != new_sha1: if sent_capabilities: self.proto.write_pkt_line("%s %s %s" % (old_sha1, new_sha1, refname)) else: self.proto.write_pkt_line( "%s %s %s\0%s" % (old_sha1, new_sha1, refname, self.capabilities())) sent_capabilities = True if not new_sha1 in (have, "0" * 40): want.append(new_sha1) self.proto.write_pkt_line(None) if not want: return new_refs objects = generate_pack_contents(have, want) (entries, sha) = write_pack_data(self.proto.write_file(), objects, len(objects)) # read the final confirmation sha client_sha = self.proto.read(20) if not client_sha in (None, "", sha): raise ChecksumMismatch(sha, client_sha) return new_refs
def check(self): """Check this object for internal consistency. :raise ObjectFormatException: if the object is malformed in some way :raise ChecksumMismatch: if the object was created with a SHA that does not match its contents """ # TODO: if we find that error-checking during object parsing is a # performance bottleneck, those checks should be moved to the class's # check() method during optimization so we can still check the object # when necessary. old_sha = self.id try: self._deserialize(self.as_raw_chunks()) self._sha = None new_sha = self.id except Exception as e: raise ObjectFormatException(e) if old_sha != new_sha: raise ChecksumMismatch(new_sha, old_sha)
:raise ChecksumMismatch: if the object was created with a SHA that does not match its contents """ # TODO: if we find that error-checking during object parsing is a # performance bottleneck, those checks should be moved to the class's # check() method during optimization so we can still check the object # when necessary. old_sha = self.id try: self._deserialize(self.as_raw_chunks()) self._sha = None new_sha = self.id except Exception, e: raise ObjectFormatException(e) if old_sha != new_sha: raise ChecksumMismatch(new_sha, old_sha) def _header(self): return object_header(self.type, self.raw_length()) def raw_length(self): """Returns the length of the raw string of this object.""" ret = 0 for chunk in self.as_raw_chunks(): ret += len(chunk) return ret def _make_sha(self): ret = make_sha() ret.update(self._header()) for chunk in self.as_raw_chunks():
def check_sha(self): stored = self.f.read(20) if stored != self.sha1.digest(): raise ChecksumMismatch(self.sha1.hexdigest(), sha_to_hex(stored))