def test_check(self): t = Tree sha = hex_to_sha(a_sha) # filenames self.assertCheckSucceeds(t, '100644 .a\0%s' % sha) self.assertCheckFails(t, '100644 \0%s' % sha) self.assertCheckFails(t, '100644 .\0%s' % sha) self.assertCheckFails(t, '100644 a/a\0%s' % sha) self.assertCheckFails(t, '100644 ..\0%s' % sha) # modes self.assertCheckSucceeds(t, '100644 a\0%s' % sha) self.assertCheckSucceeds(t, '100755 a\0%s' % sha) self.assertCheckSucceeds(t, '160000 a\0%s' % sha) # TODO more whitelisted modes self.assertCheckFails(t, '123456 a\0%s' % sha) self.assertCheckFails(t, '123abc a\0%s' % sha) # should fail check, but parses ok self.assertCheckFails(t, '0100644 foo\0' + sha) # shas self.assertCheckFails(t, '100644 a\0%s' % ('x' * 5)) self.assertCheckFails(t, '100644 a\0%s' % ('x' * 18 + '\0')) self.assertCheckFails(t, '100644 a\0%s\n100644 b\0%s' % ('x' * 21, sha)) # ordering sha2 = hex_to_sha(b_sha) self.assertCheckSucceeds(t, '100644 a\0%s\n100644 b\0%s' % (sha, sha)) self.assertCheckSucceeds(t, '100644 a\0%s\n100644 b\0%s' % (sha, sha2)) self.assertCheckFails(t, '100644 a\0%s\n100755 a\0%s' % (sha, sha2)) self.assertCheckFails(t, '100644 b\0%s\n100644 a\0%s' % (sha2, sha))
def read_packed_refs_with_peeled(f): """Read a packed refs file including peeled refs. Assumes the "# pack-refs with: peeled" line was already read. Yields tuples with ref names, SHA1s, and peeled SHA1s (or None). :param f: file-like object to read from, seek'ed to the second line """ last = None for l in f: if l[0] == "#": continue l = l.rstrip("\r\n") if l[0] == "^": if not last: raise PackedRefsException("unexpected peeled ref line") try: hex_to_sha(l[1:]) except (AssertionError, TypeError), e: raise PackedRefsException(e) sha, name = _split_ref_line(last) last = None yield (sha, name, l[1:]) else: if last: sha, name = _split_ref_line(last) yield (sha, name, None) last = l
def _split_proto_line(line, allowed): """Split a line read from the wire. :param line: The line read from the wire. :param allowed: An iterable of command names that should be allowed. Command names not listed below as possible return values will be ignored. If None, any commands from the possible return values are allowed. :return: a tuple having one of the following forms: ('want', obj_id) ('have', obj_id) ('done', None) (None, None) (for a flush-pkt) :raise UnexpectedCommandError: if the line cannot be parsed into one of the allowed return values. """ if not line: fields = [None] else: fields = line.rstrip('\n').split(' ', 1) command = fields[0] if allowed is not None and command not in allowed: raise UnexpectedCommandError(command) try: if len(fields) == 1 and command in ('done', None): return (command, None) elif len(fields) == 2 and command in ('want', 'have'): hex_to_sha(fields[1]) return tuple(fields) except (TypeError, AssertionError), e: raise GitProtocolError(e)
def test_object_sha1(self): """Tests that the correct object offset is returned from the index.""" p = self.get_pack_index(pack1_sha) self.assertRaises(KeyError, p.object_sha1, 876) self.assertEqual(p.object_sha1(178), hex_to_sha(a_sha)) self.assertEqual(p.object_sha1(138), hex_to_sha(tree_sha)) self.assertEqual(p.object_sha1(12), hex_to_sha(commit_sha))
def test_simple(self): myhexsha = b'd80c186a03f423a81b39df39dc87fd269736ca86' x = Tree() x[b'myname'] = (0o100755, myhexsha) self.assertEqual(b'100755 myname\0' + hex_to_sha(myhexsha), x.as_raw_string()) self.assertEqual(b'100755 myname\0' + hex_to_sha(myhexsha), bytes(x))
def _parse_refs(self, output): refs = {} for line in BytesIO(output): fields = line.rstrip(b"\n").split(b" ") self.assertEqual(3, len(fields)) refname, type_name, sha = fields check_ref_format(refname[5:]) hex_to_sha(sha) refs[refname] = (type_name, sha) return refs
def _split_proto_line(self, line): fields = line.rstrip('\n').split(' ', 1) if len(fields) == 1 and fields[0] == 'done': return ('done', None) elif len(fields) == 2 and fields[0] in ('want', 'have'): try: hex_to_sha(fields[1]) return tuple(fields) except (TypeError, AssertionError), e: raise GitProtocolError(e)
def _split_ref_line(line): """Split a single ref line into a tuple of SHA1 and name.""" fields = line.rstrip("\n").split(" ") if len(fields) != 2: raise PackedRefsException("invalid ref line '%s'" % line) sha, name = fields try: hex_to_sha(sha) except (AssertionError, TypeError), e: raise PackedRefsException(e)
def test_multiple_ext_refs(self): b1, b2 = self.store_blobs(['foo', 'bar']) f = BytesIO() entries = build_pack(f, [ (REF_DELTA, (b1.id, 'foo1')), (REF_DELTA, (b2.id, 'bar2')), ], store=self.store) pack_iter = self.make_pack_iter(f) self.assertEntriesMatch([0, 1], entries, pack_iter) self.assertEqual([hex_to_sha(b1.id), hex_to_sha(b2.id)], pack_iter.ext_refs())
def _get_shallow(repo): shallow_file = repo.get_named_file('shallow') if not shallow_file: return [] shallows = [] with shallow_file: for line in shallow_file: sha = line.strip() if not sha: continue hex_to_sha(sha) shallows.append(sha) return shallows
def _split_ref_line(line): """Split a single ref line into a tuple of SHA1 and name.""" fields = line.rstrip(b'\n').split(b' ') if len(fields) != 2: raise PackedRefsException("invalid ref line %r" % line) sha, name = fields try: hex_to_sha(sha) except (AssertionError, TypeError) as e: raise PackedRefsException(e) if not check_ref_format(name): raise PackedRefsException("invalid ref name %r" % name) return (sha, name)
def _get_shallow(repo): shallow_file = repo.get_named_file("shallow") if not shallow_file: return [] shallows = [] try: for line in shallow_file: sha = line.strip() if not sha: continue hex_to_sha(sha) shallows.append(sha) finally: shallow_file.close() return shallows
def __init__(self, pack_store, pack_checksum=None): """Create a new MemoryPackIndex. :param entries: Sequence of name, idx, crc32 (sorted) :param pack_checksum: Optional pack checksum """ self._by_sha = {} self._entries = [] q = db.Query(PackStoreIndex) q.filter("packref =", pack_store) for obj in q: sha = hex_to_sha(obj.sha) self._by_sha[sha] = obj.offset self._entries.append([sha, obj.offset, obj.crc32]) self._pack_checksum = hex_to_sha(pack_store.checksum)
def update_hg_bookmarks(self, remote_name): try: bms = bookmarks.parse(self.repo) if remote_name: heads = self.git.remote_refs(remote_name) else: branches = self.bookbranch.split(',') heads = dict((i, self.git.ref(i.strip())) for i in branches) if remote_name: base_name = (remote_name + '/') else: base_name = '' for head, sha in heads.iteritems(): if not sha: self.ui.warn(_("Could not resolve head %s.\n") % head) continue hgsha = hex_to_sha(self.map_hg_get(sha)) if not head == 'HEAD': bms[base_name + head] = hgsha if heads: bookmarks.write(self.repo, bms) except AttributeError: self.ui.warn(_('creating bookmarks failed, do you have' ' bookmarks enabled?\n'))
def test_add(self): myhexsha = "d80c186a03f423a81b39df39dc87fd269736ca86" x = Tree() x.add("myname", 0o100755, myhexsha) self.assertEqual(x["myname"], (0o100755, myhexsha)) self.assertEqual('100755 myname\0' + hex_to_sha(myhexsha), x.as_raw_string())
def test_add(self): myhexsha = b'd80c186a03f423a81b39df39dc87fd269736ca86' x = Tree() x.add(b'myname', 0o100755, myhexsha) self.assertEqual(x[b'myname'], (0o100755, myhexsha)) self.assertEqual(b'100755 myname\0' + hex_to_sha(myhexsha), x.as_raw_string())
def get_raw(self, name): """Obtain the raw text for an object. :param name: sha for the object. :return: tuple with numeric type and object contents. """ if len(name) == 40: sha = hex_to_sha(name) hexsha = name elif len(name) == 20: sha = name hexsha = None else: raise AssertionError("Invalid object name %r" % name) for pack in self.packs: try: return pack.get_raw(sha) except KeyError: pass if hexsha is None: hexsha = sha_to_hex(name) ret = self._get_loose_object(hexsha) if ret is not None: return ret.type_num, ret.as_raw_string() for alternate in self.alternates: try: return alternate.get_raw(hexsha) except KeyError: pass raise KeyError(hexsha)
def get_raw(self, name): """Obtain the raw text for an object. :param name: sha for the object. :return: tuple with object type and object contents. """ if len(name) == 40: sha = hex_to_sha(name) hexsha = name elif len(name) == 20: sha = name hexsha = None else: raise AssertionError for pack in self.packs: try: return pack.get_raw(sha) except KeyError: pass if hexsha is None: hexsha = sha_to_hex(name) ret = self._get_shafile(hexsha) if ret is not None: return ret.type, ret.as_raw_string() raise KeyError(hexsha)
def test_add(self): myhexsha = "d80c186a03f423a81b39df39dc87fd269736ca86" x = Tree() x.add("myname", 0100755, myhexsha) self.assertEquals(x["myname"], (0100755, myhexsha)) self.assertEquals('100755 myname\0' + hex_to_sha(myhexsha), x.as_raw_string())
def lookup_git_sha(self, sha): """Lookup a Git sha in the database. :param sha: Git object sha :return: (type, type_data) with type_data: commit: revid, tree sha blob: fileid, revid tree: fileid, revid """ if len(sha) == 40: sha = hex_to_sha(sha) value = self.db[b"git\0" + sha] for data in value.splitlines(): data = data.split(b"\0") type_name = data[0].decode('ascii') if type_name == "commit": if len(data) == 3: yield (type_name, (data[1], data[2], {})) else: yield (type_name, (data[1], data[2], { "testament3-sha1": data[3] })) elif type_name in ("tree", "blob"): yield (type_name, tuple(data[1:])) else: raise AssertionError("unknown type %r" % type_name)
def __setstate__(self, state): for key, value in state.items(): setattr(self, key, value) if '_sha' in state and hasattr(self, 'sha'): self.sha() if '_sha' in state and hasattr(self, '_hexsha'): self._sha = hex_to_sha(self._hexsha)
def test_ext_ref(self): blob, = self.store_blobs(['blob']) f = BytesIO() entries = build_pack(f, [(REF_DELTA, (blob.id, 'blob1'))], store=self.store) pack_iter = self.make_pack_iter(f) self.assertEntriesMatch([0], entries, pack_iter) self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
def test_ext_ref_multiple_times(self): blob, = self.store_blobs(['blob']) f = StringIO() entries = build_pack(f, [ (REF_DELTA, (blob.id, 'blob1')), (REF_DELTA, (blob.id, 'blob2')), ], store=self.store) pack_iter = self.make_pack_iter(f) self.assertEntriesMatch([0, 1], entries, pack_iter) self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
def test_add_old_order(self): myhexsha = "d80c186a03f423a81b39df39dc87fd269736ca86" x = Tree() warnings.simplefilter("ignore", DeprecationWarning) try: x.add(0100755, "myname", myhexsha) finally: warnings.resetwarnings() self.assertEqual(x["myname"], (0100755, myhexsha)) self.assertEqual("100755 myname\0" + hex_to_sha(myhexsha), x.as_raw_string())
def object_index(self, sha): """Return the index in to the corresponding packfile for the object. Given the name of an object it will return the offset that object lives at within the corresponding pack file. If the pack file doesn't have the object then None will be returned. """ if len(sha) == 40: sha = hex_to_sha(sha) return self._object_index(sha)
def update_hg_bookmarks(self, remote_name): try: bms = bookmarks.parse(self.repo) for head, sha in self.git.remote_refs(remote_name).iteritems(): hgsha = hex_to_sha(self.map_hg_get(sha)) if not head == 'HEAD': bms[remote_name + '/' + head] = hgsha bookmarks.write(self.repo, bms) except AttributeError: self.ui.warn(_('creating bookmarks failed, do you have' ' bookmarks enabled?\n'))
def test_add_old_order(self): myhexsha = "d80c186a03f423a81b39df39dc87fd269736ca86" x = Tree() warnings.simplefilter("ignore", DeprecationWarning) try: x.add(0o100755, "myname", myhexsha) finally: warnings.resetwarnings() self.assertEqual(x["myname"], (0o100755, myhexsha)) self.assertEqual('100755 myname\0' + hex_to_sha(myhexsha), x.as_raw_string())
def _do_test_parse_tree(self, parse_tree): dir = os.path.join(os.path.dirname(__file__), "data", "trees") o = Tree.from_path(hex_to_filename(dir, tree_sha)) self.assertEqual([("a", 0100644, a_sha), ("b", 0100644, b_sha)], list(parse_tree(o.as_raw_string()))) # test a broken tree that has a leading 0 on the file mode broken_tree = "0100644 foo\0" + hex_to_sha(a_sha) def eval_parse_tree(*args, **kwargs): return list(parse_tree(*args, **kwargs)) self.assertEqual([("foo", 0100644, a_sha)], eval_parse_tree(broken_tree)) self.assertRaises(ObjectFormatException, eval_parse_tree, broken_tree, strict=True)
def test_large(self): entry1_sha = hex_to_sha('4e6388232ec39792661e2e75db8fb117fc869ce6') entry2_sha = hex_to_sha('e98f071751bd77f59967bfa671cd2caebdccc9a2') entries = [(entry1_sha, 0xf2972d0830529b87, 24), (entry2_sha, (~0xf2972d0830529b87)&(2**64-1), 92)] if not self._supports_large: self.assertRaises(TypeError, self.index, 'single.idx', entries, pack_checksum) return idx = self.index('single.idx', entries, pack_checksum) self.assertEqual(idx.get_pack_checksum(), pack_checksum) self.assertEqual(2, len(idx)) actual_entries = list(idx.iterentries()) self.assertEqual(len(entries), len(actual_entries)) for mine, actual in zip(entries, actual_entries): my_sha, my_offset, my_crc = mine actual_sha, actual_offset, actual_crc = actual self.assertEqual(my_sha, actual_sha) self.assertEqual(my_offset, actual_offset) if self._has_crc32_checksum: self.assertEqual(my_crc, actual_crc) else: self.assertTrue(actual_crc is None)
def test_ext_ref_multiple_times(self): (blob, ) = self.store_blobs([b"blob"]) f = BytesIO() entries = build_pack( f, [ (REF_DELTA, (blob.id, b"blob1")), (REF_DELTA, (blob.id, b"blob2")), ], store=self.store, ) pack_iter = self.make_pack_iter(f) self.assertEntriesMatch([0, 1], entries, pack_iter) self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
def _do_test_parse_tree(self, parse_tree): dir = os.path.join(os.path.join(os.path.sep, 'tests'), 'data', 'trees') o = Tree.from_path(hex_to_filename(dir, tree_sha)) self.assertEquals([('a', 0100644, a_sha), ('b', 0100644, b_sha)], list(parse_tree(o.as_raw_string()))) # test a broken tree that has a leading 0 on the file mode broken_tree = '0100644 foo\0' + hex_to_sha(a_sha) def eval_parse_tree(*args, **kwargs): return list(parse_tree(*args, **kwargs)) self.assertEquals([('foo', 0100644, a_sha)], eval_parse_tree(broken_tree)) self.assertRaises(ObjectFormatException, eval_parse_tree, broken_tree, strict=True)
def test_check(self): t = Tree sha = hex_to_sha(a_sha) # filenames self.assertCheckSucceeds(t, b'100644 .a\0' + sha) self.assertCheckFails(t, b'100644 \0' + sha) self.assertCheckFails(t, b'100644 .\0' + sha) self.assertCheckFails(t, b'100644 a/a\0' + sha) self.assertCheckFails(t, b'100644 ..\0' + sha) self.assertCheckFails(t, b'100644 .git\0' + sha) # modes self.assertCheckSucceeds(t, b'100644 a\0' + sha) self.assertCheckSucceeds(t, b'100755 a\0' + sha) self.assertCheckSucceeds(t, b'160000 a\0' + sha) # TODO more whitelisted modes self.assertCheckFails(t, b'123456 a\0' + sha) self.assertCheckFails(t, b'123abc a\0' + sha) # should fail check, but parses ok self.assertCheckFails(t, b'0100644 foo\0' + sha) # shas self.assertCheckFails(t, b'100644 a\0' + (b'x' * 5)) self.assertCheckFails(t, b'100644 a\0' + (b'x' * 18) + b'\0') self.assertCheckFails( t, b'100644 a\0' + (b'x' * 21) + b'\n100644 b\0' + sha) # ordering sha2 = hex_to_sha(b_sha) self.assertCheckSucceeds( t, b'100644 a\0' + sha + b'\n100644 b\0' + sha) self.assertCheckSucceeds( t, b'100644 a\0' + sha + b'\n100644 b\0' + sha2) self.assertCheckFails(t, b'100644 a\0' + sha + b'\n100755 a\0' + sha2) self.assertCheckFails(t, b'100644 b\0' + sha2 + b'\n100644 a\0' + sha)
def _split_proto_line(line, allowed): """Split a line read from the wire. :param line: The line read from the wire. :param allowed: An iterable of command names that should be allowed. Command names not listed below as possible return values will be ignored. If None, any commands from the possible return values are allowed. :return: a tuple having one of the following forms: ('want', obj_id) ('have', obj_id) ('done', None) (None, None) (for a flush-pkt) :raise UnexpectedCommandError: if the line cannot be parsed into one of the allowed return values. """ if not line: fields = [None] else: fields = line.rstrip('\n').split(' ', 1) command = fields[0] if allowed is not None and command not in allowed: raise UnexpectedCommandError(command) try: if len(fields) == 1 and command in ('done', None): return (command, None) elif len(fields) == 2: if command in ('want', 'have', 'shallow', 'unshallow'): hex_to_sha(fields[1]) return tuple(fields) elif command == 'deepen': return command, int(fields[1]) except (TypeError, AssertionError) as e: raise GitProtocolError(e) raise GitProtocolError('Received invalid line from client: %s' % line)
def _do_test_parse_tree(self, parse_tree): dir = os.path.join(os.path.dirname(__file__), 'data', 'trees') o = Tree.from_path(hex_to_filename(dir, tree_sha)) self.assertEqual([('a', 0o100644, a_sha), ('b', 0o100644, b_sha)], list(parse_tree(o.as_raw_string()))) # test a broken tree that has a leading 0 on the file mode broken_tree = '0100644 foo\0' + hex_to_sha(a_sha) def eval_parse_tree(*args, **kwargs): return list(parse_tree(*args, **kwargs)) self.assertEqual([('foo', 0o100644, a_sha)], eval_parse_tree(broken_tree)) self.assertRaises(ObjectFormatException, eval_parse_tree, broken_tree, strict=True)
def test_ext_ref_chain_degenerate(self): # Test a degenerate case where the sender is sending a REF_DELTA # object that expands to an object already in the repository. blob, = self.store_blobs(['blob']) blob2, = self.store_blobs(['blob2']) assert blob.id < blob2.id f = BytesIO() entries = build_pack(f, [ (REF_DELTA, (blob.id, 'blob2')), (REF_DELTA, (0, 'blob3')), ], store=self.store) pack_iter = self.make_pack_iter(f) self.assertEntriesMatch([0, 1], entries, pack_iter) self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs())
def write_cache_entry(f, entry): """Write an index entry to a file. :param f: File object :param entry: Entry to write, tuple with: (name, ctime, mtime, dev, ino, mode, uid, gid, size, sha, flags) """ beginoffset = f.tell() (name, ctime, mtime, dev, ino, mode, uid, gid, size, sha, flags) = entry write_cache_time(f, ctime) write_cache_time(f, mtime) flags = len(name) | (flags &~ 0x0fff) f.write(struct.pack(">LLLLLL20sH", dev, ino, mode, uid, gid, size, hex_to_sha(sha), flags)) f.write(name) real_size = ((f.tell() - beginoffset + 8) & ~7) f.write("\0" * ((beginoffset + real_size) - f.tell()))
def test_single(self): entry_sha = hex_to_sha("6f670c0fb53f9463760b7295fbb814e965fb20c8") my_entries = [(entry_sha, 178, 42)] idx = self.index("single.idx", my_entries, pack_checksum) self.assertEquals(idx.get_pack_checksum(), pack_checksum) self.assertEquals(1, len(idx)) actual_entries = list(idx.iterentries()) self.assertEquals(len(my_entries), len(actual_entries)) for mine, actual in zip(my_entries, actual_entries): my_sha, my_offset, my_crc = mine actual_sha, actual_offset, actual_crc = actual self.assertEquals(my_sha, actual_sha) self.assertEquals(my_offset, actual_offset) if self._has_crc32_checksum: self.assertEquals(my_crc, actual_crc) else: self.assertTrue(actual_crc is None)
def test_single(self): entry_sha = hex_to_sha('6f670c0fb53f9463760b7295fbb814e965fb20c8') my_entries = [(entry_sha, 178, 42)] idx = self.index('single.idx', my_entries, pack_checksum) self.assertEqual(idx.get_pack_checksum(), pack_checksum) self.assertEqual(1, len(idx)) actual_entries = list(idx.iterentries()) self.assertEqual(len(my_entries), len(actual_entries)) for mine, actual in zip(my_entries, actual_entries): my_sha, my_offset, my_crc = mine actual_sha, actual_offset, actual_crc = actual self.assertEqual(my_sha, actual_sha) self.assertEqual(my_offset, actual_offset) if self._has_crc32_checksum: self.assertEqual(my_crc, actual_crc) else: self.assertTrue(actual_crc is None)
def test_iterobjects(self): p = self.get_pack_data(pack1_sha) commit_data = ('tree b2a2766a2879c209ab1176e7e778b81ae422eeaa\n' 'author James Westby <*****@*****.**> ' '1174945067 +0100\n' 'committer James Westby <*****@*****.**> ' '1174945067 +0100\n' '\n' 'Test commit\n') blob_sha = '6f670c0fb53f9463760b7295fbb814e965fb20c8' tree_data = '100644 a\0%s' % hex_to_sha(blob_sha) actual = [] for offset, type_num, chunks, crc32 in p.iterobjects(): actual.append((offset, type_num, ''.join(chunks), crc32)) self.assertEqual([(12, 1, commit_data, 3775879613L), (138, 2, tree_data, 912998690L), (178, 3, 'test 1\n', 1373561701L)], actual)
def write_cache_entry(f, entry): """Write an index entry to a file. :param f: File object :param entry: Entry to write, tuple with: (name, ctime, mtime, dev, ino, mode, uid, gid, size, sha, flags) """ beginoffset = f.tell() (name, ctime, mtime, dev, ino, mode, uid, gid, size, sha, flags) = entry write_cache_time(f, ctime) write_cache_time(f, mtime) flags = len(name) | (flags & ~0x0fff) f.write( struct.pack(">LLLLLL20sH", dev, ino, mode, uid, gid, size, hex_to_sha(sha), flags)) f.write(name) real_size = ((f.tell() - beginoffset + 8) & ~7) f.write("\0" * ((beginoffset + real_size) - f.tell()))
def add_object(self, obj, bzr_key_data, path): if isinstance(obj, tuple): (type_name, hexsha) = obj sha = hex_to_sha(hexsha) else: type_name = obj.type_name.decode('ascii') sha = obj.sha().digest() if type_name == "commit": self.db[b"commit\0" + self.revid] = b"\0".join((sha, obj.tree)) if type(bzr_key_data) is not dict: raise TypeError(bzr_key_data) type_data = (self.revid, obj.tree) try: type_data += (bzr_key_data["testament3-sha1"],) except KeyError: pass self._commit = obj elif type_name == "blob": if bzr_key_data is None: return self.db[b"\0".join( (b"blob", bzr_key_data[0], bzr_key_data[1]))] = sha type_data = bzr_key_data elif type_name == "tree": if bzr_key_data is None: return type_data = bzr_key_data else: raise AssertionError entry = b"\0".join((type_name.encode('ascii'), ) + type_data) + b"\n" key = b"git\0" + sha try: oldval = self.db[key] except KeyError: self.db[key] = entry else: if not oldval.endswith(b'\n'): self.db[key] = b"".join([oldval, b"\n", entry]) else: self.db[key] = b"".join([oldval, entry])
def get_raw(self, name): """Obtain the raw fulltext for an object. Args: name: sha for the object. Returns: tuple with numeric type and object contents. """ if name == ZERO_SHA: raise KeyError(name) if len(name) == 40: sha = hex_to_sha(name) hexsha = name elif len(name) == 20: sha = name hexsha = None else: raise AssertionError("Invalid object name %r" % (name,)) for pack in self._iter_cached_packs(): try: return pack.get_raw(sha) except (KeyError, PackFileDisappeared): pass if hexsha is None: hexsha = sha_to_hex(name) ret = self._get_loose_object(hexsha) if ret is not None: return ret.type_num, ret.as_raw_string() # Maybe something else has added a pack with the object # in the mean time? for pack in self._update_pack_cache(): try: return pack.get_raw(sha) except KeyError: pass for alternate in self.alternates: try: return alternate.get_raw(hexsha) except KeyError: pass raise KeyError(hexsha)
def test_iterobjects(self): with self.get_pack_data(pack1_sha) as p: commit_data = ( b"tree b2a2766a2879c209ab1176e7e778b81ae422eeaa\n" b"author James Westby <*****@*****.**> " b"1174945067 +0100\n" b"committer James Westby <*****@*****.**> " b"1174945067 +0100\n" b"\n" b"Test commit\n") blob_sha = b"6f670c0fb53f9463760b7295fbb814e965fb20c8" tree_data = b"100644 a\0" + hex_to_sha(blob_sha) actual = [] for offset, type_num, chunks, crc32 in p.iterobjects(): actual.append((offset, type_num, b"".join(chunks), crc32)) self.assertEqual( [ (12, 1, commit_data, 3775879613), (138, 2, tree_data, 912998690), (178, 3, b"test 1\n", 1373561701), ], actual, )
def test_simple(self): myhexsha = "d80c186a03f423a81b39df39dc87fd269736ca86" x = Tree() x["myname"] = (0100755, myhexsha) self.assertEquals('100755 myname\0' + hex_to_sha(myhexsha), x.as_raw_string())
def test_simple(self): self.assertEqual(b'\xab\xcd' * 10, hex_to_sha(b'abcd' * 10))
def test_simple(self): self.assertEqual("\xab\xcd" * 10, hex_to_sha("abcd" * 10))
def test_head(self): output = self._run_git(["rev-parse", "HEAD"]) head_sha = output.rstrip(b"\n") hex_to_sha(head_sha) self.assertEqual(head_sha, self._repo.refs[b"HEAD"])
self.assertEqual(crc32, unpacked.crc32) self.assertEqual('x', unused) def test_write_pack_object_sha(self): f = BytesIO() f.write('header') offset = f.tell() sha_a = sha1('foo') sha_b = sha_a.copy() write_pack_object(f, Blob.type_num, 'blob', sha=sha_a) self.assertNotEqual(sha_a.digest(), sha_b.digest()) sha_b.update(f.getvalue()[offset:]) self.assertEqual(sha_a.digest(), sha_b.digest()) pack_checksum = hex_to_sha('721980e866af9a5f93ad674144e1459b8ba3e7b7') class BaseTestPackIndexWriting(object): def assertSucceeds(self, func, *args, **kwargs): try: func(*args, **kwargs) except ChecksumMismatch as e: self.fail(e) def index(self, filename, entries, pack_checksum): raise NotImplementedError(self.index) def test_empty(self): idx = self.index('empty.idx', [], pack_checksum)
def test_head(self): output = self._run_git(['rev-parse', 'HEAD']) head_sha = output.rstrip(b'\n') hex_to_sha(head_sha) self.assertEqual(head_sha, self._repo.refs[b'HEAD'])