Exemplo n.º 1
0
class TestMerkleLeaf(unittest.TestCase):
    """ Test MerkleLeaf functionality. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    # utility functions #############################################

    # actual unit tests #############################################
    def do_test_simple_constructor(self, hashtype):
        """ Test constructor for specific SHA type. """

        check_hashtype(hashtype)
        # pylint: disable=redefined-variable-type
        if hashtype == HashTypes.SHA1:
            sha = hashlib.sha1()
        elif hashtype == HashTypes.SHA2:
            sha = hashlib.sha256()
        elif hashtype == HashTypes.SHA3:
            sha = hashlib.sha3_256()

        file_name = self.rng.next_file_name(8)
        nnn = self.rng.some_bytes(8)
        sha.update(nnn)
        hash0 = sha.digest()

        leaf0 = MerkleLeaf(file_name, hashtype, hash0)
        self.assertEqual(file_name, leaf0.name)
        self.assertEqual(hash0, leaf0.bin_hash)

        file_name2 = file_name
        while file_name2 == file_name:
            file_name2 = self.rng.next_file_name(8)
        nnn = self.rng.some_bytes(8)
        self.rng.next_bytes(nnn)
        sha.update(nnn)
        hash1 = sha.digest()
        leaf1 = MerkleLeaf(file_name2, hashtype, hash1)
        self.assertEqual(file_name2, leaf1.name)
        self.assertEqual(hash1, leaf1.bin_hash)

        self.assertTrue(leaf0.equal(leaf0))
        self.assertFalse(leaf0.equal(leaf1))

        # XXX USE NLHTree instead
        #pair0    = leaf0.toPair()
        #leaf0bis = MerkleLeaf.createFromPair(pair0)
        #self.assertEqual(leaf0bis, leaf0)

        #pair1    = leaf1.toPair()
        #leaf1bis = MerkleLeaf.createFromPair(pair1)
        #self.assertEqual(leaf1bis, leaf1)

    def test_simple_constructor(self):
        """ Test constructor for various SHA types. """
        for hashtype in HashTypes:
            self.do_test_simple_constructor(hashtype=hashtype)
Exemplo n.º 2
0
class TestPKCS7Padding(unittest.TestCase):
    """ test PKCS7 padding """
    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    def do_test_padding(self, length):
        """ length is in bytes. """

        data_ = bytearray(length)
        self.rng.next_bytes(data_)
        data = bytes(data_)

        padder = padding.PKCS7(AES_BLOCK_BITS).padder()
        padded_data = padder.update(data) + padder.finalize()

        # round up to the next higher number of whole blocks
        if length % AES_BLOCK_BYTES == 0:
            expected_len = length + AES_BLOCK_BYTES
        else:
            expected_len = ((length + AES_BLOCK_BYTES - 1) // AES_BLOCK_BYTES)\
                * AES_BLOCK_BYTES
        delta = expected_len - length  # number of bytes of padding
        self.assertEqual(padded_data[-1], delta)

        self.assertEqual(len(padded_data), expected_len)

        unpadder = padding.PKCS7(AES_BLOCK_BITS).unpadder()
        data_out = unpadder.update(padded_data) + unpadder.finalize()
        self.assertEqual(data_out, data)

    def test_padding(self):
        """ test PKCS7 padding """

        self.do_test_padding(7)
        self.do_test_padding(15)
        self.do_test_padding(16)
        self.do_test_padding(17)
        self.do_test_padding(31)
        self.do_test_padding(32)
        self.do_test_padding(33)
Exemplo n.º 3
0
class TestTimestamp(unittest.TestCase):
    """ Ostensibly tests BuildList timestamp.  (Why?) """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    def test_sha1_file(self):
        """
        Verify functioning of xlu.file_sha1hex().
        """

        blk_count = 1 + self.rng.next_int16(3)     # so 1 to 3
        # last block will usually be only partically populated
        byte_count = BuildList.BLOCK_SIZE * (blk_count - 1) +\
            self.rng.next_int16(BuildList.BLOCK_SIZE)

        data = bytearray(byte_count)        # that many null bytes
        self.rng.next_bytes(data)           # fill with random data
        d_val = hashlib.new('sha1')
        d_val.update(data)
        hash_ = d_val.hexdigest()

        # make a unique test file name
        file_name = self.rng.next_file_name(8)
        path_to_file = os.path.join('tmp', file_name)
        while os.path.exists(path_to_file):
            file_name = self.rng.next_file_name(8)
            path_to_file = os.path.join('tmp', file_name)

        with open(path_to_file, 'wb') as file:
            file.write(data)

        file_hash = file_sha1hex(path_to_file)

        self.assertEqual(hash_, file_hash)
Exemplo n.º 4
0
class TestNLHLeaf(unittest.TestCase):
    """ Test NLHLeaf-related functions. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    # utility functions #############################################

    # actual unit tests #############################################
    def do_test_simple_constructor(self, hashtype):
        """ Test constructor for specific hash. """

        check_hashtype(hashtype)
        if hashtype == HashTypes.SHA1:
            sha = hashlib.sha1()
        elif hashtype == HashTypes.SHA2:
            sha = hashlib.sha256()
        elif hashtype == HashTypes.SHA3:
            sha = hashlib.sha3_256()
        elif hashtype == HashTypes.BLAKE2B:
            sha = hashlib.blake2b(digest_size=32)
        else:
            raise NotImplementedError

        name = self.rng.next_file_name(8)
        nnn = self.rng.some_bytes(8)
        self.rng.next_bytes(nnn)
        sha.update(nnn)
        hash0 = sha.digest()

        leaf0 = NLHLeaf(name, hash0, hashtype)
        self.assertEqual(name, leaf0.name)
        self.assertEqual(hash0, leaf0.bin_hash)

        name2 = name
        while name2 == name:
            name2 = self.rng.next_file_name(8)
        nnn = self.rng.some_bytes(8)
        self.rng.next_bytes(nnn)
        sha.update(nnn)
        hash1 = sha.digest()
        leaf1 = NLHLeaf(name2, hash1, hashtype)
        self.assertEqual(name2, leaf1.name)
        self.assertEqual(hash1, leaf1.bin_hash)

        self.assertEqual(leaf0, leaf0)
        self.assertEqual(leaf1, leaf1)
        self.assertFalse(leaf0 == leaf1)

        leaf0c = leaf0.clone()
        self.assertEqual(leaf0c, leaf0)

        leaf1c = leaf1.clone()
        self.assertEqual(leaf1c, leaf1)

    def test_simplest_constructor(self):
        """ Test simple constructor for various hashes. """

        for hashtype in HashTypes:
            self.do_test_simple_constructor(hashtype)
Exemplo n.º 5
0
class TestRandomDir(unittest.TestCase):
    """ Test building quasi-random data files and directory structures. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    # utility functions #############################################

    # actual unit tests #############################################

    def do_test_random_dir(self, hashtype):
        """ Test building random directories with specific SHA hash type. """
        check_hashtype(hashtype)
        depth = 1 + self.rng.next_int16(3)       # so 1 to 3
        width = 1 + self.rng.next_int16(16)      # so 1 to 16

        blk_count = 1 + self.rng.next_int16(3)     # so 1 to 3
        # last block will usually be only partically populated
        max_len = BuildList.BLOCK_SIZE * (blk_count - 1) +\
            self.rng.next_int16(BuildList.BLOCK_SIZE)
        min_len = 1

        # we want the directory name to be unique
        path_to_dir = os.path.join('tmp', self.rng.next_file_name(8))
        while os.path.exists(path_to_dir):
            path_to_dir = os.path.join('tmp', self.rng.next_file_name(8))

        self.rng.next_data_dir(path_to_dir, depth, width, max_len, min_len)

        data = bytearray(max_len)            # that many null bytes
        self.rng.next_bytes(data)            # fill with random data
        if hashtype == HashTypes.SHA1:
            sha = hashlib.sha1()
        elif hashtype == HashTypes.SHA2:
            sha = hashlib.sha256()
        elif hashtype == HashTypes.SHA3:
            # pylint:disable=no-member
            sha = hashlib.sha3_256()
        elif hashtype == HashTypes.BLAKE2B:
            sha = hashlib.blake2b(digest_size=32)
        else:
            raise NotImplementedError
        sha.update(data)
        hash_ = sha.hexdigest()
        file_name = self.rng.next_file_name(8)
        path_to_file = os.path.join('tmp', file_name)
        while os.path.exists(path_to_file):
            file_name = self.rng.next_file_name(8)
            path_to_file = os.path.join('tmp', file_name)

        with open(path_to_file, 'wb') as file:
            file.write(data)

        if hashtype == HashTypes.SHA1:
            file_hash = file_sha1hex(path_to_file)
        elif hashtype == HashTypes.SHA2:
            file_hash = file_sha2hex(path_to_file)
        elif hashtype == HashTypes.SHA3:
            file_hash = file_sha3hex(path_to_file)
        elif hashtype == HashTypes.BLAKE2B:
            file_hash = file_blake2b_hex(path_to_file)
        else:
            raise NotImplementedError
        self.assertEqual(hash_, file_hash)

    def test_random_dir(self):
        """ Test building random directories with supported SHA hash types. """
        for hashtype in HashTypes:
            self.do_test_random_dir(hashtype)
Exemplo n.º 6
0
class TestFieldImpl(unittest.TestCase):

    def setUp(self):
        self.rng = SimpleRNG(time.time())

#       data = StringIO(ZOGGERY_PROTO_SPEC)
#       p = StringProtoSpecParser(data)   # data should be file-like
#       self.str_obj_model = p.parse()     # object model from string serialization
# self.proto_name = self.str_obj_model.name  # the dotted name of the
# protocol

    def tearDown(self):
        pass

    # utility functions #############################################

    def make_registries(self, protocol):
        node_reg = R.NodeReg()
        proto_reg = R.ProtoReg(protocol, node_reg)
        msg_reg = R.MsgReg(proto_reg)
        return (node_reg, proto_reg, msg_reg)

    def le_msg_values(self):
        """ returns a list """
        timestamp = int(time.time())
        node_id = [0] * 20
        key = [0] * 20
        length = self.rng.next_int32(256 * 256)
        # let's have some random bytes
        self.rng.next_bytes(node_id)
        self.rng.next_bytes(key)
        by_ = 'who is responsible'
        path = '/home/jdd/tarballs/something.tar.gz'
        return [timestamp, node_id, key, length, by_, path]

    def lil_big_msg_values(self):
        """
        This returns a list of random-ish values in order by field type
        so that values[_F_FLOAT], for example, is a random float value.
        """
        values = []

        # 2016-03-30 This is NOT in sync with littleBigTest.py,
        #   because I have added a None for lMsg at _L_MSG

        values.append(self.rng.next_boolean())       # vBoolReqField         0
        values.append(self.rng.next_int16())         # vEnumReqField         1

        values.append(self.rng.next_int32())         # vInt32ReqField        2
        values.append(self.rng.next_int64())         # vInt64ReqField        3

        values.append(self.rng.next_int32())         # vuInt32ReqField       4
        values.append(self.rng.next_int32())         # vuInt64ReqField       5
        values.append(self.rng.next_int64())         # vsInt32ReqField       6
        values.append(self.rng.next_int64())         # vsInt64ReqField       7

        values.append(self.rng.next_int32())         # fsInt32ReqField       8
        values.append(self.rng.next_int32())         # fuInt32ReqField       9
        values.append(self.rng.next_real())          # fFloatReqField        10

        values.append(self.rng.next_int64())         # fsInt64ReqField       11
        values.append(self.rng.next_int64())         # fuInt64ReqField       12

        values.append(self.rng.next_real())          # fDoubleReqField       13

        # lStringReqField       14
        values.append(self.rng.next_file_name(16))

        rnd_len = 16 + self.rng.next_int16(49)
        byte_buf = bytearray(rnd_len)
        self.rng.next_bytes(byte_buf)
        values.append(bytes(byte_buf))               # lBytesReqField        15

        values.append(None)                         # <-------- for lMsg    16

        b128_buf = bytearray(16)
        self.rng.next_bytes(b128_buf)
        values.append(bytes(b128_buf))               # fBytes16ReqField      17

        b160_buf = bytearray(20)
        self.rng.next_bytes(b160_buf)
        values.append(bytes(b160_buf))               # fBytes20ReqField      18

        b256_buf = bytearray(32)
        self.rng.next_bytes(b256_buf)
        values.append(bytes(b256_buf))               # fBytes32ReqField      19

        return values

    # actual unit tests #############################################

    def check_field_impl_against_spec(self,
                                      proto_name, msg_name,   # not actually tested
                                      field_spec, value):    # significant for tests

        self.assertIsNotNone(field_spec)
        dotted_name = "%s.%s" % (proto_name, msg_name)
        cls = make_field_class(dotted_name, field_spec)         # a class
        if '__dict__' in dir(cls):
            print('\nGENERATED FieldImpl CLASS DICTIONARY')
            for exc in list(cls.__dict__.keys()):
                print("  %-20s %s" % (exc, cls.__dict__[exc]))

        self.assertIsNotNone(cls)
        file = cls(value)                                      # an instance
        self.assertIsNotNone(file)
        self.assertTrue(isinstance(file, cls))

        # instance attributes -----------------------------
        # we verify that the properties work correctly

        self.assertEqual(field_spec.name, file._name)
        self.assertEqual(field_spec.field_type_ndx, file.field_type)
        self.assertEqual(field_spec.quantifier, file.quantifier)
        self.assertEqual(field_spec.field_nbr, file.field_nbr)
        self.assertIsNone(file.default)          # not an elegant test

        # instance attribute ------------------------------
        # we can read back the value assigned to the instance

        self.assertEqual(value, file.value)

        # with slots enabled, this is never seen ----------
        # because __dict__ is not in the list of valid
        # attributes for f
        if '__dict__' in dir(file):
            print('\nGENERATED FieldImpl INSTANCE DICTIONARY')
            for item in list(file.__dict__.keys()):
                print("%-20s %s" % (item, file.__dict__[item]))

    def test_field_impl(self):

        node_reg, proto_reg, msg_reg = self.make_registries(
            PROTOCOL_UNDER_TEST)
        values = self.lil_big_msg_values()

        # DEBUG
        print("testFieldImpl: there are %d values" % len(values))
        # END

        # There are 18 values corresponding to the 18 field types;
        # _L_MSG should be skipped

        for tstamp in range(FieldTypes.F_BYTES32 + 1):
            # DEBUG
            print("testFieldImpl: t = %d" % tstamp)
            # END
            if tstamp == FieldTypes.L_MSG:
                continue

            # default quantifier is Q_REQ_, default is None

            field_name = 'field%d' % tstamp
            field_spec = M.FieldSpec(
                msg_reg, field_name, tstamp, field_nbr=tstamp + 100)

            self.check_field_impl_against_spec(
                PROTOCOL_UNDER_TEST, MSG_UNDER_TEST,
                field_spec, values[tstamp])

    # TEST FIELD SPEC -----------------------------------------------

    def do_field_spec_test(self, name, field_type, quantifier=M.Q_REQUIRED,
                           field_nbr=0, default=None):

        node_reg, proto_reg, msg_reg = self.make_registries(
            PROTOCOL_UNDER_TEST)

        # XXX Defaults are ignored for now.
        file = M.FieldSpec(
            msg_reg,
            name,
            field_type,
            quantifier,
            field_nbr,
            default)

        self.assertEqual(name, file.name)
        self.assertEqual(field_type, file.field_type_ndx)
        self.assertEqual(quantifier, file.quantifier)
        self.assertEqual(field_nbr, file.field_nbr)
        if default is not None:
            self.assertEqual(default, file.default)

        expected_repr = "%s %s%s @%d \n" % (
            name, file.field_type_name, M.q_name(quantifier), field_nbr)
        # DEFAULTS NOT SUPPORTED
        self.assertEqual(expected_repr, file.__repr__())

    def test_quantifiers(self):
        q_name = M.q_name
        self.assertEqual('', q_name(M.Q_REQUIRED))
        self.assertEqual('?', q_name(M.Q_OPTIONAL))
        self.assertEqual('*', q_name(M.Q_STAR))
        self.assertEqual('+', q_name(M.Q_PLUS))

    def test_field_spec(self):
        # default is not implemented yet
        self.do_field_spec_test('foo', FieldTypes.V_UINT32, M.Q_REQUIRED, 9)
        self.do_field_spec_test('bar', FieldTypes.V_SINT32, M.Q_STAR, 17)
        self.do_field_spec_test(
            'node_id',
            FieldTypes.F_BYTES20,
            M.Q_OPTIONAL,
            92)
        self.do_field_spec_test('tix', FieldTypes.V_BOOL, M.Q_PLUS, 147)
Exemplo n.º 7
0
class TestMerkleLeaf(unittest.TestCase):
    """ Test MerkleLeaf functionality. """
    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    # utility functions #############################################

    # actual unit tests #############################################
    def do_test_simple_constructor(self, hashtype):
        """ Test constructor for specific SHA type. """

        check_hashtype(hashtype)
        if hashtype == HashTypes.SHA1:
            sha = XLSHA1()
        elif hashtype == HashTypes.SHA2:
            sha = XLSHA2()
        elif hashtype == HashTypes.SHA3:
            sha = XLSHA3()
        elif hashtype == HashTypes.BLAKE2B:
            sha = XLBLAKE2B_256()
        else:
            raise NotImplementedError

        file_name = self.rng.next_file_name(8)
        nnn = self.rng.some_bytes(8)
        sha.update(nnn)
        hash0 = sha.digest()

        leaf0 = MerkleLeaf(file_name, hashtype, hash0)
        self.assertEqual(file_name, leaf0.name)
        self.assertEqual(hash0, leaf0.bin_hash)

        file_name2 = file_name
        while file_name2 == file_name:
            file_name2 = self.rng.next_file_name(8)
        nnn = self.rng.some_bytes(8)
        self.rng.next_bytes(nnn)
        sha.update(nnn)
        hash1 = sha.digest()
        leaf1 = MerkleLeaf(file_name2, hashtype, hash1)
        self.assertEqual(file_name2, leaf1.name)
        self.assertEqual(hash1, leaf1.bin_hash)

        self.assertTrue(leaf0 == leaf0)
        self.assertFalse(leaf0 == leaf1)

        # XXX USE NLHTree instead
        # pair0    = leaf0.toPair()
        # leaf0bis = MerkleLeaf.createFromPair(pair0)
        # self.assertEqual(leaf0bis, leaf0)

        # pair1    = leaf1.toPair()
        # leaf1bis = MerkleLeaf.createFromPair(pair1)
        # self.assertEqual(leaf1bis, leaf1)

    def test_simple_constructor(self):
        """ Test constructor for various hash types. """
        for hashtype in HashTypes:
            self.do_test_simple_constructor(hashtype=hashtype)
Exemplo n.º 8
0
class TestLittleBig(unittest.TestCase):

    def setUp(self):
        self.rng = SimpleRNG(time.time())
        data = StringIO(LITTLE_BIG_PROTO_SPEC)
        ppp = StringProtoSpecParser(data)   # data should be file-like
        self.str_obj_model = ppp.parse()     # object model from string serialization
        self.proto_name = self.str_obj_model.name  # the dotted name of the protocol

    def tearDown(self):
        pass

    # utility functions #############################################

    def lil_big_msg_values(self):
        values = []
        # XXX these MUST be kept in sync with littleBigTest.py
        values.append(self.rng.next_boolean())       # vBoolReqField
        values.append(self.rng.next_int16())         # vEnumReqField

        values.append(self.rng.next_int32())         # vuInt32ReqField
        values.append(self.rng.next_int32())         # vuInt64ReqField
        values.append(self.rng.next_int64())         # vsInt32ReqField
        values.append(self.rng.next_int64())         # vsInt64ReqField

        # #vuInt32ReqField
        # #vuInt64ReqField

        values.append(self.rng.next_int32())         # fsInt32ReqField
        values.append(self.rng.next_int32())         # fuInt32ReqField
        values.append(self.rng.next_real())          # fFloatReqField

        values.append(self.rng.next_int64())         # fsInt64ReqField
        values.append(self.rng.next_int64())         # fuInt64ReqField
        values.append(self.rng.next_real())          # fDoubleReqField

        values.append(self.rng.next_file_name(16))    # lStringReqField

        rnd_len = 16 + self.rng.next_int16(49)
        byte_buf = bytearray(rnd_len)
        self.rng.next_bytes(byte_buf)
        values.append(bytes(byte_buf))               # lBytesReqField

        b128_buf = bytearray(16)
        self.rng.next_bytes(b128_buf)
        values.append(bytes(b128_buf))               # fBytes16ReqField

        b160_buf = bytearray(20)
        self.rng.next_bytes(b160_buf)
        values.append(bytes(b160_buf))               # fBytes20ReqField

        b256_buf = bytearray(32)
        self.rng.next_bytes(b256_buf)
        values.append(bytes(b256_buf))               # fBytes32ReqField

        return values

    # actual unit tests #############################################
    def check_field_impl_against_spec(
            self, proto_name, msg_name, field_spec, value):
        self.assertIsNotNone(field_spec)
        dotted_name = "%s.%s" % (proto_name, msg_name)
        cls = make_field_class(dotted_name, field_spec)
        if '__dict__' in dir(cls):
            print('\nGENERATED FieldImpl CLASS DICTIONARY')
            for exc in list(cls.__dict__.keys()):
                print("%-20s %s" % (exc, cls.__dict__[exc]))

        self.assertIsNotNone(cls)
        file = cls(value)
        self.assertIsNotNone(file)

        # class attributes --------------------------------
        self.assertEqual(field_spec.name, file.name)
        self.assertEqual(field_spec.field_type_ndx, file.field_type)
        self.assertEqual(field_spec.quantifier, file.quantifier)
        self.assertEqual(field_spec.field_nbr, file.field_nbr)
        self.assertIsNone(file.default)          # not an elegant test

        # instance attribute ------------------------------
        self.assertEqual(value, file.value)

        # with slots enabled, this is never seen ----------
        # because __dict__ is not in the list of valid
        # attributes for f
        if '__dict__' in dir(file):
            print('\nGENERATED FieldImpl INSTANCE DICTIONARY')
            for item in list(file.__dict__.keys()):
                print("%-20s %s" % (item, file.__dict__[item]))     # GEEP

    def test_field_impl(self):
        msg_spec = self.str_obj_model.msgs[0]

        # the fields in this imaginary logEntry
        values = self.lil_big_msg_values()

        for i in range(len(msg_spec)):
            print(
                "\nDEBUG: field %u ------------------------------------------------------" %
                i)
            field_spec = msg_spec[i]
            self.check_field_impl_against_spec(
                self.proto_name, msg_spec.name, field_spec, values[i])

    def test_caching(self):
        self.assertTrue(isinstance(self.str_obj_model, M.ProtoSpec))
        # XXX A HACK WHILE WE CHANGE INTERFACE ------------
        msg_spec = self.str_obj_model.msgs[0]
        name = msg_spec.name

        cls0 = make_msg_class(self.str_obj_model, name)
        # DEBUG
        print("Constructed Clz0 name is '%s'" % cls0.name)
        # END
        self.assertEqual(name, cls0.name)
        cls1 = make_msg_class(self.str_obj_model, name)
        self.assertEqual(name, cls1.name)

        # END HACK ----------------------------------------
        # we cache classe, so the two should be the same
        self.assertEqual(id(cls0), id(cls1))

        # chan    = Channel(BUFSIZE)
        values = self.lil_big_msg_values()
        lil_big_msg0 = cls0(values)
        lil_big_msg1 = cls0(values)
        # we don't cache instances, so these will differ
        self.assertNotEqual(id(lil_big_msg0), id(lil_big_msg1))

        field_spec = msg_spec[0]
        dotted_name = "%s.%s" % (self.proto_name, msg_spec.name)
        f0cls = make_field_class(dotted_name, field_spec)
        f1cls = make_field_class(dotted_name, field_spec)
        self.assertEqual(id(f0cls), id(f1cls))

    def test_little_big(self):
        self.assertIsNotNone(self.str_obj_model)
        self.assertTrue(isinstance(self.str_obj_model, M.ProtoSpec))
        self.assertEqual('org.xlattice.fieldz.test.littleBigProto',
                         self.str_obj_model.name)

        self.assertEqual(0, len(self.str_obj_model.enums))
        self.assertEqual(1, len(self.str_obj_model.msgs))
        self.assertEqual(0, len(self.str_obj_model.seqs))

        msg_spec = self.str_obj_model.msgs[0]

        # Create a channel ------------------------------------------
        # its buffer will be used for both serializing the instance
        # data and, by deserializing it, for creating a second instance.
        chan = Channel(BUFSIZE)
        buf = chan.buffer
        self.assertEqual(BUFSIZE, len(buf))

        # create the LittleBigMsg class ------------------------------
        little_big_msg_cls = make_msg_class(self.str_obj_model, msg_spec.name)

        # -------------------------------------------------------------
        # XXX the following fails because field 2 is seen as a property
        # instead of a list
        if False:        # DEBUGGING
            print('\nLittleBigMsg CLASS DICTIONARY')
            for (ndx, key) in enumerate(little_big_msg_cls.__dict__.keys()):
                print(
                    "%3u: %-20s %s" %
                    (ndx, key, little_big_msg_cls.__dict__[key]))
        # -------------------------------------------------------------

        # create a message instance ---------------------------------
        values = self.lil_big_msg_values()            # quasi-random values
        lil_big_msg = little_big_msg_cls(values)

        # __setattr__ in MetaMsg raises exception on any attempt
        # to add new attributes.  This works at the class level but
        # NOT at the instance level
        #
        if True:
            try:
                lil_big_msg.foo = 42
                self.fail(
                    "ERROR: attempt to assign new instance attribute succeeded")
            except AttributeError as a_exc:
                # DEBUG
                print(
                    "ATTR ERROR ATTEMPTING TO SET lilBigMsg.foo: " +
                    str(a_exc))
                # END
                pass

        if '__dict__' in dir(lil_big_msg):
            print('\nlilBigMsg INSTANCE DICTIONARY')
            for exc in list(lil_big_msg.__dict__.keys()):
                print("%-20s %s" % (exc, lil_big_msg.__dict__[exc]))

        # lilBigMsg.name is a property
        try:
            lil_big_msg.name = 'boo'
            self.fail("ERROR: attempt to change message name succeeded")
        except AttributeError:
            pass

        self.assertEqual(msg_spec.name, lil_big_msg.name)
        # we don't have any nested enums or messages
        self.assertEqual(0, len(lil_big_msg.enums))
        self.assertEqual(0, len(lil_big_msg.msgs))

        self.assertEqual(17, len(lil_big_msg.field_classes))
        # number of fields in instance
        self.assertEqual(17, len(lil_big_msg))
        for i in range(len(lil_big_msg)):
            self.assertEqual(values[i], lil_big_msg[i].value)

        # serialize the object to the channel -----------------------
        print("\nDEBUG: PHASE A ######################################")
        nnn = lil_big_msg.write_stand_alone(chan)

        old_position = chan.position
        chan.flip()
        self.assertEqual(old_position, chan.limit)
        self.assertEqual(0, chan.position)

        # deserialize the channel, making a clone of the message ----
        (read_back, nn2) = little_big_msg_cls.read(
            chan, self.str_obj_model)  # sOM is protoSpec
        self.assertIsNotNone(read_back)
        self.assertEqual(nnn, nn2)

        # verify that the messages are identical --------------------
        self.assertTrue(lil_big_msg.__eq__(read_back))

        print("\nDEBUG: PHASE B ######################################")
        # produce another message from the same values --------------
        lil_big_msg2 = little_big_msg_cls(values)
        chan2 = Channel(BUFSIZE)
        nnn = lil_big_msg2.write_stand_alone(chan2)
        chan2.flip()
        (copy2, nn3) = little_big_msg_cls.read(chan2, self.str_obj_model)
        self.assertIsNotNone(copy2)
        self.assertEqual(nnn, nn3)
        self.assertTrue(lil_big_msg.__eq__(copy2))
        self.assertTrue(lil_big_msg2.__eq__(copy2))

        # test clear()
        chan2.position = 97
        chan2.limit = 107
        chan2.clear()
        self.assertEqual(0, chan2.limit)
        self.assertEqual(0, chan2.position)
Exemplo n.º 9
0
class TestTFWriter(unittest.TestCase):

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    # utility functions #############################################
    def dump_buffer(self, buf):
        for i in range(16):
            print("0x%02x " % buf[i], end=' ')
        print()

    # actual unit tests #############################################

    # these two methods are all that's left of testTFBuffer.py
    def test_buffer_ctor(self):
        buffer = [0] * BUFSIZE
        tf_buf = TFBuffer(TEST_MSG_SPEC, BUFSIZE, buffer)
        self.assertEqual(0, tf_buf.position)
        self.assertEqual(BUFSIZE, tf_buf.capacity)

    def test_buffer_creator(self):
        BUFSIZE = 1024
        tf_buf = TFBuffer.create(TEST_MSG_SPEC, BUFSIZE)
        self.assertTrue(isinstance(tf_buf, TFBuffer))
        self.assertEqual(0, tf_buf.position)
        self.assertEqual(BUFSIZE, tf_buf.capacity)

    # and these two methods are all that's left of testTFReader.py
    def test_reader_ctor(self):
        BUFSIZE = 1024
        buffer = bytearray(BUFSIZE)
        tf_reader = TFReader(TEST_MSG_SPEC, BUFSIZE, buffer)
        self.assertEqual(0, tf_reader.position)
        self.assertEqual(BUFSIZE, tf_reader.capacity)
        self.assertEqual(BUFSIZE, len(tf_reader.buffer))

    def test_reader_creator(self):
        BUFSIZE = 1024
        tf_reader = TFReader.create(TEST_MSG_SPEC, BUFSIZE)
        self.assertTrue(isinstance(tf_reader, TFReader))
        self.assertEqual(0, tf_reader.position)
        self.assertEqual(BUFSIZE, tf_reader.capacity)

    # next two are specific to TFWriter
    def test_writer_ctor(self):
        BUFSIZE = 1024
        buffer = bytearray(BUFSIZE)
        tf_writer = TFWriter(TEST_MSG_SPEC, BUFSIZE, buffer)
        self.assertEqual(0, tf_writer.position)
        self.assertEqual(BUFSIZE, tf_writer.capacity)

    def test_writer_creator(self):
        BUFSIZE = 1024
        tf_writer = TFWriter.create(TEST_MSG_SPEC, BUFSIZE)
        self.assertTrue(isinstance(tf_writer, TFWriter))
        self.assertEqual(0, tf_writer.position)
        self.assertEqual(BUFSIZE, tf_writer.capacity)

    def do_round_trip_field(self, writer, reader, idx, field_type, value):
        writer.put_next(idx, value)
#       # DEBUG
#       tfBuf   = writer.buffer
#       print "after put buffer is " ,
#       self.dumpBuffer(tfBuf)
#       # END
        reader.get_next()
        self.assertEqual(idx, reader.field_nbr)
        # XXX THIS SHOULD WORK:
        # self.assertEqual( fType, reader.fType    )
        self.assertEqual(value, reader.value)
        return idx + 1

    def test_writing_and_reading(self):
        BUFSIZE = 16 * 1024
        tf_writer = TFWriter.create(TEST_MSG_SPEC, BUFSIZE)
        tf_buf = tf_writer.buffer       # we share the buffer
        tf_reader = TFReader(TEST_MSG_SPEC, BUFSIZE, tf_buf)

        idx = 0                           # 0-based field number

        # field types encoded as varints (8) ========================
        # These are tested in greater detail in testVarint.py; the
        # tests here are to exercise their use in a heterogeneous
        # buffer

        # field 0: _V_UINT32
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'vuint32', 0x1f)
        self.assertEqual(1, idx)         # DEBUG XXX

        # field 1: _V_UINT32
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'vuint32', 0x172f3e4d)

        # field 2:  _V_UINT64
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'vuint64', 0x12345678abcdef3e)

        # field 3: vsInt32
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'vsint32', 192)

        # field 4: vsInt32
        # _V_SINT32 (zig-zag encoded, optimal for small values near zero)
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'vsint32', -192)

        # field 5: _V_SINT64
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'vsint64', -193)  # GEEP

        # field 6: _V_UINT32
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'vuint32', 0x172f3e4d)
        # field 7: _V_UINT64
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'vuint64', 0xffffffff172f3e4d)

        # _V_BOOL
        # XXX NOT IMPLEMENTED, NOT TESTED

        # _V_ENUM
        # XXX NOT IMPLEMENTED, NOT TESTED

        # encoded as fixed length 32 bit fields =====================
        # field 8: _F_INT32
        idx = self.do_round_trip_field(tf_writer, tf_reader, idx, 'fint32',
                                       0x172f3e4d)
        # _F_FLOAT
        # XXX STUB XXX not implemented

        # encoded as fixed length 64 bit fields =====================
        # field 9: _F_INT64
        idx = self.do_round_trip_field(tf_writer, tf_reader, idx, 'fint64',
                                       0xffffffff172f3e4d)
        # _F_DOUBLE
        # XXX STUB XXX not implemented

        # encoded as varint len followed by byte[len] ===============
        # field 10: _L_STRING
        string = self.rng.next_file_NAME(16)
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'lstring', string)

        # field 11: _L_BYTES
        b_val = bytearray(8 + self.rng.next_int16(16))
        self.rng.next_bytes(b_val)
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'lbytes', b_val)

        # _L_MSG
        # XXX STUB XXX not implemented

        # fixed length byte sequences, byte[N} ======================
        # field 12: _F_BYTES16
        self.rng.next_bytes(B128)
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'fbytes16', B128)

        # field 13: _F_BYTES20
        self.rng.next_bytes(B160)
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'fbytes20', B160)

        # may want to introduce eg fNodeID20 and fSha1Key types
        # field 14: _F_BYTES32
        self.rng.next_bytes(B256)
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'fbytes32', B256)