Example #1
0
	def test_decompress_reader_special_case(self):
		odb = PureLooseObjectODB(fixture_path('objects'))
		ostream = odb.stream(hex_to_bin('7bb839852ed5e3a069966281bb08d50012fb309b'))
		
		# if there is a bug, we will be missing one byte exactly !
		data = ostream.read()
		assert len(data) == ostream.size
Example #2
0
    def test_decompress_reader_special_case(self):
        odb = PureLooseObjectODB(fixture_path('objects'))
        ostream = odb.stream(
            hex_to_bin('7bb839852ed5e3a069966281bb08d50012fb309b'))

        # if there is a bug, we will be missing one byte exactly !
        data = ostream.read()
        assert len(data) == ostream.size
Example #3
0
 def test_base(self):
     ldb = PureLooseObjectODB(fixture_path("../../../.git/objects"))
     
     for sha1 in ldb.sha_iter():
         oinfo = ldb.info(sha1)
         ostream = ldb.stream(sha1)
         assert oinfo[:3] == ostream[:3]
         
         assert len(ostream.read()) == ostream.size
         assert ldb.has_object(oinfo.binsha)
     # END for each sha in database
     # assure we close all files
     try:
         del(ostream)
         del(oinfo)
     except UnboundLocalError:
         pass
     # END ignore exception if there are no loose objects
         
     data = "my data"
     istream = IStream("blob", len(data), StringIO(data))
     
     # the object does not yet have a sha
     assert istream.binsha is None
     ldb.store(istream)
     # now the sha is set
     assert len(istream.binsha) == 20
     assert ldb.has_object(istream.binsha)
     
     
     # async operation
     # Create a reader from an iterator
     reader = IteratorReader(ldb.sha_iter())
     
     # get reader for object streams
     info_reader = ldb.stream_async(reader)
     
     # read one
     info = info_reader.read(1)[0]
     
     # read all the rest until depletion
     ostreams = info_reader.read()
     
     # set the pool to use two threads
     pool.set_size(2)
     
     # synchronize the mode of operation
     pool.set_size(0)
Example #4
0
    def test_basics(self, path):
        ldb = PureLooseObjectODB(path)

        # write data
        self._assert_object_writing(ldb)
        self._assert_object_writing_async(ldb)

        # verify sha iteration and size
        shas = list(ldb.sha_iter())
        assert shas and len(shas[0]) == 20

        assert len(shas) == ldb.size()

        # verify find short object
        long_sha = bin_to_hex(shas[-1])
        for short_sha in (long_sha[:20], long_sha[:5]):
            assert bin_to_hex(ldb.partial_to_complete_sha_hex(short_sha)) == long_sha
        # END for each sha

        self.failUnlessRaises(BadObject, ldb.partial_to_complete_sha_hex, '0000')
Example #5
0
    def test_basics(self, path):
        ldb = PureLooseObjectODB(path)

        # write data
        self._assert_object_writing(ldb)
        self._assert_object_writing_async(ldb)

        # verify sha iteration and size
        shas = list(ldb.sha_iter())
        assert shas and len(shas[0]) == 20

        assert len(shas) == ldb.size()

        # verify find short object
        long_sha = bin_to_hex(shas[-1])
        for short_sha in (long_sha[:20], long_sha[:5]):
            assert bin_to_hex(
                ldb.partial_to_complete_sha_hex(short_sha)) == long_sha
        # END for each sha

        self.failUnlessRaises(BadObject, ldb.partial_to_complete_sha_hex,
                              '0000')
Example #6
0
 def test_writing(self, path):
     mdb = PureMemoryDB()
     
     # write data
     self._assert_object_writing_simple(mdb)
     
     # test stream copy
     ldb = PureLooseObjectODB(path)
     assert ldb.size() == 0
     num_streams_copied = mdb.stream_copy(mdb.sha_iter(), ldb)
     assert num_streams_copied == mdb.size()
     
     assert ldb.size() == mdb.size()
     for sha in mdb.sha_iter():
         assert ldb.has_object(sha)
         assert ldb.stream(sha).read() == mdb.stream(sha).read() 
Example #7
0
    def test_base(self):
        ldb = PureLooseObjectODB(fixture_path("../../../.git/objects"))

        for sha1 in ldb.sha_iter():
            oinfo = ldb.info(sha1)
            ostream = ldb.stream(sha1)
            assert oinfo[:3] == ostream[:3]

            assert len(ostream.read()) == ostream.size
            assert ldb.has_object(oinfo.binsha)
        # END for each sha in database
        # assure we close all files
        try:
            del (ostream)
            del (oinfo)
        except UnboundLocalError:
            pass
        # END ignore exception if there are no loose objects

        data = "my data"
        istream = IStream("blob", len(data), StringIO(data))

        # the object does not yet have a sha
        assert istream.binsha is None
        ldb.store(istream)
        # now the sha is set
        assert len(istream.binsha) == 20
        assert ldb.has_object(istream.binsha)

        # async operation
        # Create a reader from an iterator
        reader = IteratorReader(ldb.sha_iter())

        # get reader for object streams
        info_reader = ldb.stream_async(reader)

        # read one
        info = info_reader.read(1)[0]

        # read all the rest until depletion
        ostreams = info_reader.read()

        # set the pool to use two threads
        pool.set_size(2)

        # synchronize the mode of operation
        pool.set_size(0)