def test_deflated_smaller_window_buffer(self): # zlib on some systems uses smaller buffers, # resulting in a different header. # See https://github.com/libgit2/libgit2/pull/464 sf = ShaFile.from_file(BytesIO(small_buffer_zlib_object)) self.assertEqual(sf.type_name, "tag") self.assertEqual(sf.tagger, " <@localhost>")
def test_deflated_smaller_window_buffer(self): # zlib on some systems uses smaller buffers, # resulting in a different header. # See https://github.com/libgit2/libgit2/pull/464 sf = ShaFile.from_file(BytesIO(small_buffer_zlib_object)) self.assertEqual(sf.type_name, b'tag') self.assertEqual(sf.tagger, b' <@localhost>')
def _get_loose_object(self, sha): path = osutils.joinpath(self._split_loose_object(sha)) try: with self.transport.get(urlutils.quote_from_bytes(path)) as f: return ShaFile.from_file(f) except NoSuchFile: return None
def _get_loose_object(self, sha): path = self._get_shafile_path(sha) try: return ShaFile.from_path(path) except (OSError, IOError) as e: if e.errno == errno.ENOENT: return None raise
def _get_loose_object(self, sha): path = self._get_shafile_path(sha) try: return ShaFile.from_path(path) except (OSError, IOError), e: if e.errno == errno.ENOENT: return None raise
def iterobjects(self, get_raw=None): """Iterate over the objects in this pack.""" if get_raw is None: get_raw = self.get_raw for offset, type, obj, crc32 in self.data.iterobjects(): assert isinstance(offset, int) yield ShaFile.from_raw_string( *self.data.resolve_object(offset, type, obj, get_raw))
def _get_git_object(self, sha: bytes) -> ShaFile: # try to get the object from a pack file first to avoid flooding # git server with numerous HTTP requests for pack in list(self.packs): try: if sha in pack: return pack[sha] except (NotGitRepository, struct.error): # missing (dulwich http client raises NotGitRepository on 404) # or invalid pack index/content, remove it from global packs list logger.debug( "A pack file is missing or its content is invalid") self.packs.remove(pack) # fetch it from objects/ directory otherwise sha_hex = sha.decode() object_path = f"objects/{sha_hex[:2]}/{sha_hex[2:]}" return ShaFile.from_file(self._http_get(object_path))
def iterentries(self, ext_resolve_ref=None, progress=None): """Yield entries summarizing the contents of this pack. :param ext_resolve_ref: Optional function to resolve base objects (in case this is a thin pack) :param progress: Progress function, called with current and total object count. This will yield tuples with (sha, offset, crc32) """ found = {} postponed = defaultdict(list) class Postpone(Exception): """Raised to postpone delta resolving.""" def get_ref_text(sha): assert len(sha) == 20 if sha in found: return self.get_object_at(found[sha]) if ext_resolve_ref: try: return ext_resolve_ref(sha) except KeyError: pass raise Postpone, (sha, ) extra = [] todo = chain(self.iterobjects(progress=progress), extra) for (offset, type, obj, crc32) in todo: assert isinstance(offset, int) assert isinstance(type, int) assert isinstance(obj, tuple) or isinstance(obj, str) try: type, obj = self.resolve_object(offset, type, obj, get_ref_text) except Postpone, (sha, ): postponed[sha].append((offset, type, obj)) else: shafile = ShaFile.from_raw_string(type, obj) sha = shafile.sha().digest() found[sha] = offset yield sha, offset, crc32 extra.extend(postponed.get(sha, []))
def store(self, istream): obj = ShaFile.from_raw_string(type_to_type_id_map[istream.type], istream.read()) self._dw_repo.object_store.add_object(obj) istream.binsha = obj.sha().digest() return istream
def _do_test_count_blocks_chunks(self, count_blocks): blob = ShaFile.from_raw_chunks(Blob.type_num, [b"a\nb", b"\na\n"]) self.assertEqual({hash("a\n"): 4, hash("b\n"): 2}, count_blocks(blob))
def _do_test_count_blocks_chunks(self, count_blocks): blob = ShaFile.from_raw_chunks(Blob.type_num, ['a\nb', '\na\n']) self.assertEqual({hash('a\n'): 4, hash('b\n'): 2}, _count_blocks(blob))
def _get_shafile(self, sha): path = self._get_shafile_path(sha) if os.path.exists(path): return ShaFile.from_file(path) return None
def __getitem__(self, sha): stream = self._vf.get_record_stream([(sha,)], 'unordered', True) entry = next(stream) if entry.storage_kind == 'absent': raise KeyError(sha) return ShaFile._parse_legacy_object(entry.get_bytes_as('fulltext'))
def _do_test_count_blocks_chunks(self, count_blocks): blob = ShaFile.from_raw_chunks(Blob.type_num, [b'a\nb', b'\na\n']) self.assertBlockCountEqual({b'a\n': 4, b'b\n': 2}, _count_blocks(blob))
def iterobjects(self): """Iterate over the objects in this pack.""" for offset, type, obj, crc32 in self.data.iterobjects(): assert isinstance(offset, int) yield ShaFile.from_raw_chunks( *self.data.resolve_object(offset, type, obj))
def __getitem__(self, sha): """Obtain an object by SHA1.""" type_num, uncomp = self.get_raw(sha) return ShaFile.from_raw_string(type_num, uncomp)
def __getitem__(self, sha): """Obtain an object by SHA1.""" type_num, uncomp = self.get_raw(sha) return ShaFile.from_raw_string(type_num, uncomp, sha=sha)
def __getitem__(self, sha1): """Retrieve the specified SHA1.""" type, uncomp = self.get_raw(sha1) return ShaFile.from_raw_string(type, uncomp)
def _get_loose_object(self, sha): path = self._get_shafile_path(sha) try: return ShaFile.from_path(path) except FileNotFoundError: return None
def _do_test_count_blocks_chunks(self, count_blocks): blob = ShaFile.from_raw_chunks(Blob.type_num, [b"a\nb", b"\na\n"]) self.assertBlockCountEqual({b"a\n": 4, b"b\n": 2}, _count_blocks(blob))