Exemple #1
0
    def test_loose_correctness(self):
        """based on the pack(s) of our packed object DB, we will just copy and verify all objects in the back
        into the loose object db (memory).
        This should help finding dormant issues like this one https://github.com/gitpython-developers/GitPython/issues/220
        faster
        :note: It doesn't seem this test can find the issue unless the given pack contains highly compressed
        data files, like archives."""
        from gitdb.util import bin_to_hex
        pdb = GitDB(os.path.join(self.gitrepopath, 'objects'))
        mdb = MemoryDB()
        for c, sha in enumerate(pdb.sha_iter()):
            ostream = pdb.stream(sha)
            # the issue only showed on larger files which are hardly compressible ...
            if ostream.type != str_blob_type:
                continue
            istream = IStream(ostream.type, ostream.size, ostream.stream)
            mdb.store(istream)
            assert istream.binsha == sha, "Failed on object %s" % bin_to_hex(
                sha).decode('ascii')
            # this can fail ... sometimes, so the packs dataset should be huge
            assert len(mdb.stream(sha).read()) == ostream.size

            if c and c % 1000 == 0:
                print(
                    "Verified %i loose object compression/decompression cycles"
                    % c,
                    file=sys.stderr)
            mdb._cache.clear()
Exemple #2
0
    def test_decompress_reader_special_case(self):
        odb = LooseObjectDB(fixture_path('objects'))
        mdb = MemoryDB()
        for sha in (b'888401851f15db0eed60eb1bc29dec5ddcace911',
                    b'7bb839852ed5e3a069966281bb08d50012fb309b',):
            ostream = odb.stream(hex_to_bin(sha))

            # if there is a bug, we will be missing one byte exactly !
            data = ostream.read()
            assert len(data) == ostream.size

            # Putting it back in should yield nothing new - after all, we have
            dump = mdb.store(IStream(ostream.type, ostream.size, BytesIO(data)))
            assert dump.hexsha == sha
Exemple #3
0
    def test_decompress_reader_special_case(self):
        odb = LooseObjectDB(fixture_path('objects'))
        mdb = MemoryDB()
        for sha in (b'888401851f15db0eed60eb1bc29dec5ddcace911',
                    b'7bb839852ed5e3a069966281bb08d50012fb309b',):
            ostream = odb.stream(hex_to_bin(sha))

            # if there is a bug, we will be missing one byte exactly !
            data = ostream.read()
            assert len(data) == ostream.size

            # Putting it back in should yield nothing new - after all, we have
            dump = mdb.store(IStream(ostream.type, ostream.size, BytesIO(data)))
            assert dump.hexsha == sha
Exemple #4
0
    def test_loose_correctness(self):
        """based on the pack(s) of our packed object DB, we will just copy and verify all objects in the back
        into the loose object db (memory).
        This should help finding dormant issues like this one https://github.com/gitpython-developers/GitPython/issues/220
        faster
        :note: It doesn't seem this test can find the issue unless the given pack contains highly compressed
        data files, like archives."""
        from gitdb.util import bin_to_hex
        pdb = GitDB(os.path.join(self.gitrepopath, 'objects'))
        mdb = MemoryDB()
        for c, sha in enumerate(pdb.sha_iter()):
            ostream = pdb.stream(sha)
            # the issue only showed on larger files which are hardly compressible ...
            if ostream.type != str_blob_type:
                continue
            istream = IStream(ostream.type, ostream.size, ostream.stream)
            mdb.store(istream)
            assert istream.binsha == sha, "Failed on object %s" % bin_to_hex(sha).decode('ascii')
            # this can fail ... sometimes, so the packs dataset should be huge
            assert len(mdb.stream(sha).read()) == ostream.size

            if c and c % 1000 == 0:
                print("Verified %i loose object compression/decompression cycles" % c, file=sys.stderr)
            mdb._cache.clear()