def test_base(self): ldb = LooseObjectDB(fixture_path("../../../.git/objects")) for sha1 in ldb.sha_iter(): oinfo = ldb.info(sha1) ostream = ldb.stream(sha1) assert oinfo[:3] == ostream[:3] assert len(ostream.read()) == ostream.size assert ldb.has_object(oinfo.binsha) # END for each sha in database # assure we close all files try: del(ostream) del(oinfo) except UnboundLocalError: pass # END ignore exception if there are no loose objects data = "my data" istream = IStream("blob", len(data), StringIO(data)) # the object does not yet have a sha assert istream.binsha is None ldb.store(istream) # now the sha is set assert len(istream.binsha) == 20 assert ldb.has_object(istream.binsha) # async operation # Create a reader from an iterator reader = IteratorReader(ldb.sha_iter()) # get reader for object streams info_reader = ldb.stream_async(reader) # read one info = info_reader.read(1)[0] # read all the rest until depletion ostreams = info_reader.read() # set the pool to use two threads pool.set_size(2) # synchronize the mode of operation pool.set_size(0)
def test_base(self): ldb = LooseObjectDB(os.path.join(self.gitrepopath, 'objects')) for sha1 in ldb.sha_iter(): oinfo = ldb.info(sha1) ostream = ldb.stream(sha1) assert oinfo[:3] == ostream[:3] assert len(ostream.read()) == ostream.size assert ldb.has_object(oinfo.binsha) # END for each sha in database # assure we close all files try: del(ostream) del(oinfo) except UnboundLocalError: pass # END ignore exception if there are no loose objects data = "my data".encode("ascii") istream = IStream("blob", len(data), BytesIO(data)) # the object does not yet have a sha assert istream.binsha is None ldb.store(istream) # now the sha is set assert len(istream.binsha) == 20 assert ldb.has_object(istream.binsha)
def test_base(self): ldb = LooseObjectDB(os.path.join(self.gitrepopath, "objects")) for sha1 in ldb.sha_iter(): oinfo = ldb.info(sha1) ostream = ldb.stream(sha1) assert oinfo[:3] == ostream[:3] assert len(ostream.read()) == ostream.size assert ldb.has_object(oinfo.binsha) # END for each sha in database # assure we close all files try: del (ostream) del (oinfo) except UnboundLocalError: pass # END ignore exception if there are no loose objects data = "my data".encode("ascii") istream = IStream("blob", len(data), BytesIO(data)) # the object does not yet have a sha assert istream.binsha is None ldb.store(istream) # now the sha is set assert len(istream.binsha) == 20 assert ldb.has_object(istream.binsha)
def test_basics(self, path): ldb = LooseObjectDB(path) # write data self._assert_object_writing(ldb) # verify sha iteration and size shas = list(ldb.sha_iter()) assert shas and len(shas[0]) == 20 assert len(shas) == ldb.size() # verify find short object long_sha = bin_to_hex(shas[-1]) for short_sha in (long_sha[:20], long_sha[:5]): assert bin_to_hex(ldb.partial_to_complete_sha_hex(short_sha)) == long_sha # END for each sha self.failUnlessRaises(BadObject, ldb.partial_to_complete_sha_hex, '0000')
def test_basics(self, path): ldb = LooseObjectDB(path) # write data self._assert_object_writing(ldb) self._assert_object_writing_async(ldb) # verify sha iteration and size shas = list(ldb.sha_iter()) assert shas and len(shas[0]) == 20 assert len(shas) == ldb.size() # verify find short object long_sha = bin_to_hex(shas[-1]) for short_sha in (long_sha[:20], long_sha[:5]): assert bin_to_hex(ldb.partial_to_complete_sha_hex(short_sha)) == long_sha # END for each sha self.failUnlessRaises(BadObject, ldb.partial_to_complete_sha_hex, '0000')