class TestLenPlus(unittest.TestCase): """ Test length-preceded fields and data types. """ def setUp(self): self.rng = SimpleRNG(time.time()) def tearDown(self): pass def round_trip(self, string): """ Verify that a unicode string converted to wire format and then back again is the same string. This tests writing and reading a string of bytes as the first and only field in a buffer. """ chan = Channel(LEN_BUFF) # -- write the bytearray ------------------------------------ field_nbr = 1 + self.rng.next_int16(1024) write_len_plus_field(chan, string, field_nbr) chan.flip() # # DEBUG # print("buffer after writing lenPlus field: " + str(chan.buffer)) # # END # -- read the value written --------------------------------- # first the header (which is a varint) ------------ (field_type, field_nbr2,) = read_field_hdr(chan) offset2 = chan.position self.assertEqual(PrimTypes.LEN_PLUS, field_type) self.assertEqual(field_nbr, field_nbr2) self.assertEqual( length_as_varint(field_hdr_val(field_nbr, PrimTypes.LEN_PLUS)), offset2) # then the actual value written ------------------- tstamp = read_raw_len_plus(chan) offset3 = chan.position self.assertEqual(string, tstamp) self.assertEqual( offset2 + length_as_varint( len(string)) + len(string), offset3) def test_encode_decode(self): """ Test round tripping utf-8 strings. """ self.round_trip(''.encode('utf8')) self.round_trip('ndx_'.encode('utf8')) self.round_trip('should be a random string of bytes'.encode('utf8'))
def test_random_value(self): """ Verify that hashlib.sha2 returns the same digest for a few quasi-random values. """ rng = SimpleRNG() for _ in range(4): count = 16 + rng.next_int16(48) data = rng.some_bytes(count) my_hex = XLSHA3(data).hexdigest() expected = hashlib.sha3_256(data).hexdigest() self.assertEqual(my_hex, expected)
class TestAddingFunctions(unittest.TestCase): """ Test adding functions to an existing class. """ def setUp(self): self.rng = SimpleRNG(time.time()) def tearDown(self): pass def test_adding_funcs(self): """ Demonstrate that functions can be added to instances dynamically and that 'self' within the added functions is interpreteed correctly. """ obj = SimpleClass() x_val = self.rng.next_int16() y_val = self.rng.next_int16() q_val = self.rng.next_int16() r_val = self.rng.next_int16() # Test adding a function to an instance; this syntax is # specific to Python3. # 'object' 'instnace' 'owner' # pylint: disable=no-member obj.adder = simple_adder.__get__(SimpleClass, obj) self.assertEqual(obj.adder(x_val, y_val), x_val + y_val) # Confirm that 'self' is interpreted correctly in the added # functions. # pylint: disable=no-member obj.plus42 = add42.__get__(SimpleClass, obj) self.assertEqual(obj.plus42(q_val), q_val + 42) # Newly added methods interacting with methods defined in the # class. self.assertEqual( obj.subtractor(obj.plus42(q_val), obj.adder(x_val, r_val)), (q_val + 42) - (x_val + r_val) + 42)
class TestTimestamp(unittest.TestCase): """ Ostensibly tests BuildList timestamp. (Why?) """ def setUp(self): self.rng = SimpleRNG(time.time()) def tearDown(self): pass def test_sha1_file(self): """ Verify functioning of xlu.file_sha1hex(). """ blk_count = 1 + self.rng.next_int16(3) # so 1 to 3 # last block will usually be only partically populated byte_count = BuildList.BLOCK_SIZE * (blk_count - 1) +\ self.rng.next_int16(BuildList.BLOCK_SIZE) data = bytearray(byte_count) # that many null bytes self.rng.next_bytes(data) # fill with random data d_val = hashlib.new('sha1') d_val.update(data) hash_ = d_val.hexdigest() # make a unique test file name file_name = self.rng.next_file_name(8) path_to_file = os.path.join('tmp', file_name) while os.path.exists(path_to_file): file_name = self.rng.next_file_name(8) path_to_file = os.path.join('tmp', file_name) with open(path_to_file, 'wb') as file: file.write(data) file_hash = file_sha1hex(path_to_file) self.assertEqual(hash_, file_hash)
def test_random_value(self): """ Verify that pyblake2.blake2b and hashlib.blake2b return the same digest for a few quasi-random values. This test only makes sense for more recent versions of hashlib which support blake2. """ if sys.version_info >= (3, 6): rng = SimpleRNG() for _ in range(4): count = 16 + rng.next_int16(48) data = rng.some_bytes(count) my_hex = XLBLAKE2B_256(data).hexdigest() expected = pyblake2.blake2b(data, digest_size=32).hexdigest() self.assertEqual(my_hex, expected)
class TestCrypto(unittest.TestCase): """ Test "crypto" functionality (line of spaces cache) for xlattic_py. """ def setUp(self): self.rng = SimpleRNG() def test_spaces(self): """ Do a simple test of line-of-spaces caching. """ for _ in range(4): count = self.rng.next_int16(32) spaces = SP.get_spaces(count) self.assertEqual(len(spaces), count) for ch_ in spaces: self.assertEqual(ch_, ' ')
class TestDropFromU(unittest.TestCase): """ Test the drop_from_u_dir functionality. """ def setUp(self): self.rng = SimpleRNG(time.time()) def tearDown(self): pass def populate_tree(self, tree, data_path, u_dir, hashtype): """ Generate nnn and nnn unique random values, where nnn is at least 16. """ nnn = 16 + self.rng.next_int16(16) # DEBUG # print("nnn = %d" % nnn) # EnnnD values = [] hashes = [] for count in range(nnn): # generate datum ------------------------------ datum = self.rng.some_bytes(32 + self.rng.next_int16(32)) values.append(datum) # generate hash = bin_key ---------------------- if hashtype == HashTypes.SHA1: sha = hashlib.sha1() elif hashtype == HashTypes.SHA2: sha = hashlib.sha256() elif hashtype == HashTypes.SHA3: sha = hashlib.sha3_256() elif hashtype == HashTypes.BLAKE2B: sha = hashlib.blake2b(digest_size=32) else: raise NotImplementedError sha.update(datum) bin_key = sha.digest() hex_key = sha.hexdigest() hashes.append(bin_key) # write data file ----------------------------- file_name = 'value%04d' % count path_to_file = os.path.join(data_path, file_name) with open(path_to_file, 'wb') as file: # DEBUG # print("writing %s to %s" % (hex_key, path_to_file)) # END file.write(datum) # insert leaf into tree ----------------------- # path_from_top = os.path.join(top_name, file_name) leaf = NLHLeaf(file_name, bin_key, hashtype) tree.insert(leaf) # DEBUG # print(" inserting <%s %s>" % (leaf.name, leaf.hex_hash)) # END # write data into uDir ------------------------ u_dir.put_data(datum, hex_key) return values, hashes def generate_udt(self, struc, hashtype): """ Generate under ./tmp a data directory with random content, a uDir containing the same data, and an NLHTree that matches. uDir has the directory structure (DIR_FLAT, DIR16x16, DIR256x256, etc requested. Hashes are SHA1 if using SHA1 is True, SHA256 otherwise. values is a list of binary values, each the content of a file under dataDir. Each value contains a non-zero number of bytes. hashes is a list of the SHA hashes of the values. Each hash is a binary value. If using SHA1 it consists of 20 bytes. return uPath, data_path, tree, hashes, values """ # make a unique U directory under ./tmp/ os.makedirs('tmp', mode=0o755, exist_ok=True) u_root_name = self.rng.next_file_name(8) u_path = os.path.join('tmp', u_root_name) while os.path.exists(u_path): u_root_name = self.rng.next_file_name(8) u_path = os.path.join('tmp', u_root_name) # DEBUG # print("u_root_name = %s" % u_root_name) # END # create uDir and the NLHTree u_dir = UDir(u_path, struc, hashtype) self.assertTrue(os.path.exists(u_path)) # make a unique data directory under tmp/ data_tmp = self.rng.next_file_name(8) tmp_path = os.path.join('tmp', data_tmp) while os.path.exists(tmp_path): data_tmp = self.rng.next_file_name(8) tmp_path = os.path.join('tmp', data_tmp) # dataDir must have same base name as NLHTree top_name = self.rng.next_file_name(8) data_path = os.path.join(tmp_path, top_name) os.makedirs(data_path, mode=0o755) # DEBUG # print("data_tmp = %s" % data_tmp) # print("top_name = %s" % top_name) # print('data_path = %s' % data_path) # END tree = NLHTree(top_name, hashtype) values, hashes = self.populate_tree(tree, data_path, u_dir, hashtype) return u_path, data_path, tree, hashes, values # --------------------------------------------------------------- def do_test_with_ephemeral_tree(self, struc, hashtype): """ Generate a tmp/ subdirectory containing a quasi-random data directory and corresponding uDir and NLHTree serialization. We use the directory strucure (struc) and hash type (hashtype) indicated, running various consistency tests on the three. """ u_path, data_path, tree, hashes, values = self.generate_udt( struc, hashtype) # DEBUG # print("TREE:\n%s" % tree) # END # verify that the dataDir matches the nlhTree tree2 = NLHTree.create_from_file_system(data_path, hashtype) # DEBUG # print("TREE2:\n%s" % tree2) # END self.assertEqual(tree2, tree) nnn = len(values) # number of values present hex_hashes = [] for count in range(nnn): hex_hashes.append(hexlify(hashes[count]).decode('ascii')) ndxes = [ndx for ndx in range(nnn)] # indexes into lists self.rng.shuffle(ndxes) # shuffled kkk = self.rng.next_int16(nnn) # we will drop this many indexes # DEBUG # print("dropping %d from %d elements" % (kkk, nnn)) # END drop_me = ndxes[0:kkk] # indexes of values to drop keep_me = ndxes[kkk:] # of those which should still be present # construct an NLHTree containing values to be dropped from uDir clone = tree.clone() for count in keep_me: name = 'value%04d' % count clone.delete(name) # the parameter is a glob ! # these values should be absent from q: they won't be dropped from uDir for count in keep_me: name = 'value%04d' % count xxx = clone.find(name) self.assertEqual(len(xxx), 0) # these values shd still be present in clone: they'll be dropped from # UDir for count in drop_me: name = 'value%04d' % count xxx = clone.find(name) self.assertEqual(len(xxx), 1) # the clone subtree contains those elements which will be dropped # from uDir unmatched = clone.drop_from_u_dir(u_path) # was unmatched # DEBUG # for x in unmatched: # (relPath, hash) # print("unmatched: %s %s" % (x[0], x[1])) # END self.assertEqual(len(unmatched), 0) u_dir = UDir(u_path, struc, hashtype) self.assertTrue(os.path.exists(u_path)) # these values should still be present in uDir for count in keep_me: hex_hash = hex_hashes[count] self.assertTrue(u_dir.exists(hex_hash)) # these values should NOT be present in UDir for count in drop_me: hex_hash = hex_hashes[count] self.assertFalse(u_dir.exists(hex_hash)) def test_with_ephemeral_tree(self): """ Generate tmp/ subdirectories containing a quasi-random data directory and corresponding uDir and NLHTree serialization, using various directory structures and hash types. """ for struc in DirStruc: for hashtype in HashTypes: self.do_test_with_ephemeral_tree(struc, hashtype)
class TestTFWriter(unittest.TestCase): def setUp(self): self.rng = SimpleRNG(time.time()) def tearDown(self): pass # utility functions ############################################# def dump_buffer(self, buf): for i in range(16): print("0x%02x " % buf[i], end=' ') print() # actual unit tests ############################################# # these two methods are all that's left of testTFBuffer.py def test_buffer_ctor(self): buffer = [0] * BUFSIZE tf_buf = TFBuffer(TEST_MSG_SPEC, BUFSIZE, buffer) self.assertEqual(0, tf_buf.position) self.assertEqual(BUFSIZE, tf_buf.capacity) def test_buffer_creator(self): BUFSIZE = 1024 tf_buf = TFBuffer.create(TEST_MSG_SPEC, BUFSIZE) self.assertTrue(isinstance(tf_buf, TFBuffer)) self.assertEqual(0, tf_buf.position) self.assertEqual(BUFSIZE, tf_buf.capacity) # and these two methods are all that's left of testTFReader.py def test_reader_ctor(self): BUFSIZE = 1024 buffer = bytearray(BUFSIZE) tf_reader = TFReader(TEST_MSG_SPEC, BUFSIZE, buffer) self.assertEqual(0, tf_reader.position) self.assertEqual(BUFSIZE, tf_reader.capacity) self.assertEqual(BUFSIZE, len(tf_reader.buffer)) def test_reader_creator(self): BUFSIZE = 1024 tf_reader = TFReader.create(TEST_MSG_SPEC, BUFSIZE) self.assertTrue(isinstance(tf_reader, TFReader)) self.assertEqual(0, tf_reader.position) self.assertEqual(BUFSIZE, tf_reader.capacity) # next two are specific to TFWriter def test_writer_ctor(self): BUFSIZE = 1024 buffer = bytearray(BUFSIZE) tf_writer = TFWriter(TEST_MSG_SPEC, BUFSIZE, buffer) self.assertEqual(0, tf_writer.position) self.assertEqual(BUFSIZE, tf_writer.capacity) def test_writer_creator(self): BUFSIZE = 1024 tf_writer = TFWriter.create(TEST_MSG_SPEC, BUFSIZE) self.assertTrue(isinstance(tf_writer, TFWriter)) self.assertEqual(0, tf_writer.position) self.assertEqual(BUFSIZE, tf_writer.capacity) def do_round_trip_field(self, writer, reader, idx, field_type, value): writer.put_next(idx, value) # # DEBUG # tfBuf = writer.buffer # print "after put buffer is " , # self.dumpBuffer(tfBuf) # # END reader.get_next() self.assertEqual(idx, reader.field_nbr) # XXX THIS SHOULD WORK: # self.assertEqual( fType, reader.fType ) self.assertEqual(value, reader.value) return idx + 1 def test_writing_and_reading(self): BUFSIZE = 16 * 1024 tf_writer = TFWriter.create(TEST_MSG_SPEC, BUFSIZE) tf_buf = tf_writer.buffer # we share the buffer tf_reader = TFReader(TEST_MSG_SPEC, BUFSIZE, tf_buf) idx = 0 # 0-based field number # field types encoded as varints (8) ======================== # These are tested in greater detail in testVarint.py; the # tests here are to exercise their use in a heterogeneous # buffer # field 0: _V_UINT32 idx = self.do_round_trip_field( tf_writer, tf_reader, idx, 'vuint32', 0x1f) self.assertEqual(1, idx) # DEBUG XXX # field 1: _V_UINT32 idx = self.do_round_trip_field( tf_writer, tf_reader, idx, 'vuint32', 0x172f3e4d) # field 2: _V_UINT64 idx = self.do_round_trip_field( tf_writer, tf_reader, idx, 'vuint64', 0x12345678abcdef3e) # field 3: vsInt32 idx = self.do_round_trip_field( tf_writer, tf_reader, idx, 'vsint32', 192) # field 4: vsInt32 # _V_SINT32 (zig-zag encoded, optimal for small values near zero) idx = self.do_round_trip_field( tf_writer, tf_reader, idx, 'vsint32', -192) # field 5: _V_SINT64 idx = self.do_round_trip_field( tf_writer, tf_reader, idx, 'vsint64', -193) # GEEP # field 6: _V_UINT32 idx = self.do_round_trip_field( tf_writer, tf_reader, idx, 'vuint32', 0x172f3e4d) # field 7: _V_UINT64 idx = self.do_round_trip_field( tf_writer, tf_reader, idx, 'vuint64', 0xffffffff172f3e4d) # _V_BOOL # XXX NOT IMPLEMENTED, NOT TESTED # _V_ENUM # XXX NOT IMPLEMENTED, NOT TESTED # encoded as fixed length 32 bit fields ===================== # field 8: _F_INT32 idx = self.do_round_trip_field(tf_writer, tf_reader, idx, 'fint32', 0x172f3e4d) # _F_FLOAT # XXX STUB XXX not implemented # encoded as fixed length 64 bit fields ===================== # field 9: _F_INT64 idx = self.do_round_trip_field(tf_writer, tf_reader, idx, 'fint64', 0xffffffff172f3e4d) # _F_DOUBLE # XXX STUB XXX not implemented # encoded as varint len followed by byte[len] =============== # field 10: _L_STRING string = self.rng.next_file_NAME(16) idx = self.do_round_trip_field( tf_writer, tf_reader, idx, 'lstring', string) # field 11: _L_BYTES b_val = bytearray(8 + self.rng.next_int16(16)) self.rng.next_bytes(b_val) idx = self.do_round_trip_field( tf_writer, tf_reader, idx, 'lbytes', b_val) # _L_MSG # XXX STUB XXX not implemented # fixed length byte sequences, byte[N} ====================== # field 12: _F_BYTES16 self.rng.next_bytes(B128) idx = self.do_round_trip_field( tf_writer, tf_reader, idx, 'fbytes16', B128) # field 13: _F_BYTES20 self.rng.next_bytes(B160) idx = self.do_round_trip_field( tf_writer, tf_reader, idx, 'fbytes20', B160) # may want to introduce eg fNodeID20 and fSha1Key types # field 14: _F_BYTES32 self.rng.next_bytes(B256) idx = self.do_round_trip_field( tf_writer, tf_reader, idx, 'fbytes32', B256)
class TestRSA(unittest.TestCase): """ Test RSA crypto routines. """ def setUp(self): self.rng = SimpleRNG(time.time()) def tearDown(self): pass def test_rsa_serialization(self): """ Exercise basic RSA functions. These include key generation, public key extraction, serialization/deserialization for pem and der formats, and digital signing and verification. """ # ignore warning about renaming internal to cryptography warnings.filterwarnings("ignore", category=PendingDeprecationWarning) tmp_dir = 'tmp' os.makedirs(tmp_dir, exist_ok=True, mode=0o755) while True: sub_dir = self.rng.next_file_name(12) node_dir = os.path.join(tmp_dir, sub_dir) if not os.path.exists(node_dir): break # DEBUG print("node_dir is %s" % node_dir) # END os.mkdir(node_dir, mode=0o755) # RSA PRIVATE KEY GENERATION ----------------------------- sk_priv = rsa.generate_private_key( public_exponent=65537, key_size=1024, # cheap key for testing backend=default_backend()) sk_ = sk_priv.public_key() self.assertEqual(sk_priv.key_size, 1024) # PEM FORMAT RSA PRIVATE KEY ROUND-TRIPPED ------------------ pem = sk_priv.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization.NoEncryption()) key_file = os.path.join(node_dir, 'skPriv.pem') with open(key_file, 'wb') as file: # written as bytes file.write(pem) self.assertTrue(os.path.exists(key_file)) with open(key_file, 'rb') as file: sk2_priv = serialization.load_pem_private_key( file.read(), password=None, backend=default_backend()) # NUMBERS AND KEY EQUALITY ---------------------------------- # get the public part of the key sk2_ = sk2_priv.public_key() # __eq__() for public part of RSA keys ------------- # FAILS because __eq__() has not been defined # self.assertEqual(sk2_, sk_) def check_equal_rsa_pub_key(sk2_, sk_): """ __eq__ functionalitiy for RSA public keys. """ pub_n = sk_.public_numbers() pub_n2 = sk2_.public_numbers() self.assertEqual(pub_n2.e, pub_n.e) self.assertEqual(pub_n2.n, pub_n.n) check_equal_rsa_pub_key(sk2_, sk_) def check_equal_rsa_priv_key(sk2_priv, sk_priv): """ __eq__ functionalitiy for RSA private keys. """ pri_n = sk_priv.private_numbers() pri_n2 = sk2_priv.private_numbers() # the library guarantees this: p is the larger factor self.assertTrue(pri_n.p > pri_n.q) self.assertTrue(pri_n2.p == pri_n.p and pri_n2.q == pri_n.q and pri_n2.d == pri_n.d and pri_n2.dmp1 == pri_n.dmp1 and pri_n2.dmq1 == pri_n.dmq1 and pri_n2.iqmp == pri_n.iqmp) check_equal_rsa_priv_key(sk2_priv, sk_priv) # DER DE/SERIALIZATION ROUND-TRIPPED ------------------------ der = sk_priv.private_bytes( encoding=serialization.Encoding.DER, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization.NoEncryption()) der_key_file = os.path.join(node_dir, 'skPriv.der') with open(der_key_file, 'wb') as file: # written as bytes file.write(der) self.assertTrue(os.path.exists(der_key_file)) with open(der_key_file, 'rb') as file: sk3_priv = serialization.load_der_private_key( file.read(), password=None, backend=default_backend()) check_equal_rsa_priv_key(sk3_priv, sk_priv) # OpenSSH PUBLIC KEY DE/SERIALIZATION ROUND-TRIPPED --------- ssh_bytes = sk_.public_bytes(encoding=serialization.Encoding.OpenSSH, format=serialization.PublicFormat.OpenSSH) ssh_key_file = os.path.join(node_dir, 'sk.ssh') with open(ssh_key_file, 'wb') as file: # written as bytes file.write(ssh_bytes) self.assertTrue(os.path.exists(ssh_key_file)) with open(ssh_key_file, 'rb') as file: sk4_ = serialization.load_ssh_public_key(file.read(), backend=default_backend()) check_equal_rsa_pub_key(sk4_, sk_) # GEEP 175 # PEM FORMAT RSA PUBLIC KEY ROUND-TRIPPED ------------------- pem = sk_.public_bytes(encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.PKCS1) key_file = os.path.join(node_dir, 'sk.pem') with open(key_file, 'wb') as file: # written as bytes file.write(pem) self.assertTrue(os.path.exists(key_file)) with open(key_file, 'rb') as file: sk5_ = serialization.load_pem_public_key( file.read(), backend=default_backend()) # GEEP 193 check_equal_rsa_pub_key(sk5_, sk_) def test_dig_sig(self): """ Test digital signatures using a range of hash types. """ for using in [ HashTypes.SHA1, HashTypes.SHA2, ]: self.do_test_dig_sig(using) def do_test_dig_sig(self, hashtype): """" Verify calculation of digital signature using speciic hash type. """ if hashtype == HashTypes.SHA1: sha = hashes.SHA1 elif hashtype == HashTypes.SHA2: sha = hashes.SHA256 sk_priv = rsa.generate_private_key( public_exponent=65537, key_size=1024, # cheap key for testing backend=default_backend()) sk_ = sk_priv.public_key() print("WARNING: cannot use hashlib's sha code with pyca cryptography") print("WARNING: pyca cryptography does not support sha3/keccak") signer = sk_priv.signer( padding.PSS(mgf=padding.MGF1(sha()), salt_length=padding.PSS.MAX_LENGTH), sha()) count = 64 + self.rng.next_int16(192) # [64..256) data = bytes(self.rng.some_bytes(count)) signer.update(data) signature = signer.finalize() # a binary value; bytes # BEGIN interlude: conversion to/from base64, w/ 76-byte lines b64sig = base64.encodebytes(signature).decode('utf-8') sig2 = base64.decodebytes(b64sig.encode('utf-8')) self.assertEqual(sig2, signature) # END interlude --------------------------------------------- verifier = sk_.verifier( signature, padding.PSS(mgf=padding.MGF1(sha()), salt_length=padding.PSS.MAX_LENGTH), sha()) verifier.update(data) try: verifier.verify() # digital signature verification succeeded except InvalidSignature: self.fail("dig sig verification unexpectedly failed") # twiddle a random byte in data array to make verification fail data2 = bytearray(data) which = self.rng.next_int16(count) data2[which] = 0xff & ~data2[which] data3 = bytes(data2) verifier = sk_.verifier( signature, # same digital signature padding.PSS(mgf=padding.MGF1(sha()), salt_length=padding.PSS.MAX_LENGTH), sha()) verifier.update(data3) try: verifier.verify() self.fail("expected verification of modified message to fail") except InvalidSignature: pass # digital signature verification failed
class TestOptionz(unittest.TestCase): """ Test the basic Optionz classes. """ def setUp(self): self.rng = SimpleRNG(time.time()) def tearDown(self): pass # utility functions ############################################# # actual unit tests ############################################# def test_bare_optionz(self): """ Create an Optionz instance, check for expected attibutes. """ my_optz = Z('fred') self.assertEqual(my_optz.name, 'fred') self.assertEqual(my_optz.desc, None) self.assertEqual(my_optz.epilog, None) self.assertEqual(len(my_optz), 0) my_optz = Z('frank', 'frivolous', 'fabulous') self.assertEqual(my_optz.name, 'frank') self.assertEqual(my_optz.desc, 'frivolous') self.assertEqual(my_optz.epilog, 'fabulous') self.assertEqual(len(my_optz), 0) def test_z_option(self): """ Populate an Optionz object, check for expected attr. """ z_name = self.rng.next_file_name(8) z_desc = self.rng.next_file_name(64) z_epilog = self.rng.next_file_name(64) my_optz = Z(z_name, z_desc, z_epilog) self.assertEqual(my_optz.name, z_name) self.assertEqual(my_optz.desc, z_desc) self.assertEqual(my_optz.epilog, z_epilog) self.assertEqual(len(my_optz), 0) # booleans -------------------------------------------------- b_dflt_val = True b_desc = "I'm small" bool_opt = BoolOption('bO', default=b_dflt_val, desc=b_desc) self.assertEqual(bool_opt.name, 'bO') self.assertEqual(bool_opt.default, b_dflt_val) self.assertEqual(bool_opt.desc, b_desc) # name valType default desc b_check = my_optz.add_option('bO', ValType.BOOL, b_dflt_val, b_desc) self.assertEqual(len(my_optz), 1) self.assertEqual(bool_opt, b_check) # choice lists ---------------------------------------------- # NOTE We should probably require that list elements be of # compatible types. For the moment we just assume that elements # are all strings. # succeeds if default in list of choices ---------- my_size = 2 + self.rng.next_int16(4) # so in [2..5] choice = self.rng.next_file_name(8) choices = [choice] while len(choices) < my_size: if choice not in choices: choices.append(choice) choice = self.rng.next_file_name(8) c_dflt_val = choices[self.rng.next_int16(my_size)] c_desc = 'a list' choice_opt = ChoiceOption('cO', choices, c_dflt_val, c_desc) self.assertEqual(choice_opt.name, 'cO') self.assertEqual(choice_opt.choices, choices) self.assertEqual(choice_opt.default, c_dflt_val) self.assertEqual(choice_opt.desc, "a list") # fails if default is NOT in list of choices ------ my_size = 2 + self.rng.next_int16(4) # so in [2..5] choice = self.rng.next_file_name(8) b_choices = [choice] while len(b_choices) < my_size: if choice not in b_choices: b_choices.append(choice) choice = self.rng.next_file_name(8) dflt_val = self.rng.next_file_name(8) while dflt_val in choices: dflt_val = self.rng.next_file_name(8) try: ChoiceOption('bC', choices, default=dflt_val, desc="a list") self.fail('added default value not in list of choices') except BaseException: pass c_check = my_optz.add_choice_option('cO', choices, c_dflt_val, c_desc) self.assertEqual(len(my_optz), 2) self.assertEqual(choice_opt, c_check) # floats ---------------------------------------------------- f_dflt_val = self.rng.next_real() f_desc = 'bubbly' float_opt = FloatOption('fO', default=f_dflt_val, desc=f_desc) self.assertEqual(float_opt.name, 'fO') self.assertEqual(float_opt.default, f_dflt_val) self.assertEqual(float_opt.desc, f_desc) # name valType default desc f_check = my_optz.add_option('fO', ValType.FLOAT, f_dflt_val, f_desc) self.assertEqual(len(my_optz), 3) self.assertEqual(float_opt, f_check) # ints ------------------------------------------------------ i_dflt_val = self.rng.next_int32() i_desc = 'discrete' int_opt = IntOption('iO', default=i_dflt_val, desc=i_desc) self.assertEqual(int_opt.name, 'iO') self.assertEqual(int_opt.default, i_dflt_val) self.assertEqual(int_opt.desc, i_desc) # name valType default desc i_check = my_optz.add_option('iO', ValType.INT, i_dflt_val, i_desc) self.assertEqual(len(my_optz), 4) self.assertEqual(int_opt, i_check) # lists ----------------------------------------------------- size_val = self.rng.next_int16() # select polarity of size randomly if self.rng.next_boolean(): size_val = - size_val l_desc = "chunky" list_opt = ListOption('lO', default=size_val, desc=l_desc) self.assertEqual(list_opt.name, 'lO') self.assertEqual(list_opt.default, size_val) self.assertEqual(list_opt.size, size_val) self.assertEqual(list_opt.desc, l_desc) zero_val = 0 var_list_opt = ListOption('zO', default=zero_val, desc="skinny") self.assertEqual(var_list_opt.name, 'zO') self.assertEqual(var_list_opt.default, zero_val) self.assertEqual(var_list_opt.desc, "skinny") # name valType default desc l_check = my_optz.add_option('lO', ValType.LIST, size_val, l_desc) self.assertEqual(len(my_optz), 5) self.assertEqual(list_opt, l_check) # strings --------------------------------------------------- s_dflt_val = self.rng.next_file_name(12) s_desc = "wiggly" str_opt = StrOption('sO', default=s_dflt_val, desc=s_desc) self.assertEqual(str_opt.name, 'sO') self.assertEqual(str_opt.default, s_dflt_val) self.assertEqual(str_opt.desc, s_desc) # name valType default desc s_check = my_optz.add_option('sO', ValType.STR, s_dflt_val, s_desc) self.assertEqual(len(my_optz), 6) self.assertEqual(str_opt, s_check)
class TestNodeID(unittest.TestCase): """ Verify that an XLNodeID behaves like one. """ def setUp(self): self.rng = SimpleRNG(time.time()) def tearDown(self): pass def test_valid_node_id(self): """ Tests the funnction of that name. """ # tests that should fail self.assertFalse(XLNodeID.is_valid_node_id(None)) self.assertFalse(XLNodeID.is_valid_node_id('foo')) # not bytes-like self.assertFalse(XLNodeID.is_valid_node_id(b'bar')) # wrong length self.assertFalse(XLNodeID.is_valid_node_id(42)) # an int # tests that should succeed val = bytes(SHA1_BIN_LEN) self.assertTrue(XLNodeID.is_valid_node_id(val)) val = bytes(SHA2_BIN_LEN) self.assertTrue(XLNodeID.is_valid_node_id(val)) val = bytes(SHA3_BIN_LEN) self.assertTrue(XLNodeID.is_valid_node_id(val)) def expect_failure(self, val): """ Expect object construction to fail. """ try: XLNodeID(val) self.fail("XLNodeID constructed with bad ID") except XLNodeIDError: pass def expect_success(self, val): """ Try to build a NodeID with val (a bytes valaue). """ try: nodeid = XLNodeID(val) # succeeded except XLNodeIDError: self.fail("ctor raised with good ID") val2 = nodeid.value self.assertIsNotNone(val2) self.assertFalse(val2 is val) # not the same object self.assertEqual(val2, val) # but a valid deep copy def test_ctor(self): """ Tests the XLNodeID constructor. """ # tests that should fail self.expect_failure(None) # id may not be None self.expect_failure('foo') # not bytes-like self.expect_failure(b'bar') # wrong length self.expect_failure(42) # an int self.expect_failure(bytes(SHA1_BIN_LEN - 1)) self.expect_failure(bytes(SHA1_BIN_LEN + 1)) self.expect_failure(bytes(SHA2_BIN_LEN - 1)) self.expect_failure(bytes(SHA2_BIN_LEN + 1)) self.expect_failure(bytes(SHA3_BIN_LEN - 1)) self.expect_failure(bytes(SHA3_BIN_LEN + 1)) # tests that should succeed val = self.rng.some_bytes(SHA1_BIN_LEN) self.expect_success(val) val = self.rng.some_bytes(SHA2_BIN_LEN) self.expect_success(val) val = self.rng.some_bytes(SHA3_BIN_LEN) self.expect_success(val) def do_test_cloning(self, length): """ Verify that cloning works for a given number of bytes. """ val = self.rng.some_bytes(length) id1 = XLNodeID(val) id2 = id1.clone() self.assertTrue(id1 is not id2) self.assertEqual(id1.value, id2.value) def test_cloning(self): """ Test cloning for bytes-like objects of a given number of bytes. """ for length in [SHA1_BIN_LEN, SHA2_BIN_LEN, SHA3_BIN_LEN]: self.do_test_cloning(length) def do_test_comparison(self, length): """ For a quasi-random byte sequence of a given length, verify that comparison opertors work. """ val = self.rng.some_bytes(length) # a byte array of that length # pick a random index into that byte array ndx = 1 + self.rng.next_int16(length - 1) if val[ndx] == 0: val[ndx] = 1 if val[ndx] == 255: val[ndx] = 254 # make a couple of clones of the byte array v_bigger = deepcopy(val) v_bigger[ndx] += 1 v_smaller = deepcopy(val) v_smaller[ndx] -= 1 self.assertTrue(v_bigger > val) self.assertTrue(v_smaller < val) # use these values to make NodeIDs n_bigger = XLNodeID(v_bigger) n_middle = XLNodeID(val) n_smaller = XLNodeID(v_smaller) # compare them self.assertTrue(n_bigger > n_middle) self.assertTrue(n_smaller < n_middle) # equality checks self.assertEqual(n_middle, n_middle) self.assertFalse(n_bigger == n_middle) self.assertFalse(n_middle == n_smaller) def test_comparion(self): """ Test comparison operators for IDs of standard lengths. """ for length in [SHA1_BIN_LEN, SHA2_BIN_LEN, SHA3_BIN_LEN]: self.do_test_comparison(length)
class TestLittleBig(unittest.TestCase): def setUp(self): self.rng = SimpleRNG(time.time()) data = StringIO(LITTLE_BIG_PROTO_SPEC) ppp = StringProtoSpecParser(data) # data should be file-like self.str_obj_model = ppp.parse() # object model from string serialization self.proto_name = self.str_obj_model.name # the dotted name of the protocol def tearDown(self): pass # utility functions ############################################# def lil_big_msg_values(self): values = [] # XXX these MUST be kept in sync with littleBigTest.py values.append(self.rng.next_boolean()) # vBoolReqField values.append(self.rng.next_int16()) # vEnumReqField values.append(self.rng.next_int32()) # vuInt32ReqField values.append(self.rng.next_int32()) # vuInt64ReqField values.append(self.rng.next_int64()) # vsInt32ReqField values.append(self.rng.next_int64()) # vsInt64ReqField # #vuInt32ReqField # #vuInt64ReqField values.append(self.rng.next_int32()) # fsInt32ReqField values.append(self.rng.next_int32()) # fuInt32ReqField values.append(self.rng.next_real()) # fFloatReqField values.append(self.rng.next_int64()) # fsInt64ReqField values.append(self.rng.next_int64()) # fuInt64ReqField values.append(self.rng.next_real()) # fDoubleReqField values.append(self.rng.next_file_name(16)) # lStringReqField rnd_len = 16 + self.rng.next_int16(49) byte_buf = bytearray(rnd_len) self.rng.next_bytes(byte_buf) values.append(bytes(byte_buf)) # lBytesReqField b128_buf = bytearray(16) self.rng.next_bytes(b128_buf) values.append(bytes(b128_buf)) # fBytes16ReqField b160_buf = bytearray(20) self.rng.next_bytes(b160_buf) values.append(bytes(b160_buf)) # fBytes20ReqField b256_buf = bytearray(32) self.rng.next_bytes(b256_buf) values.append(bytes(b256_buf)) # fBytes32ReqField return values # actual unit tests ############################################# def check_field_impl_against_spec( self, proto_name, msg_name, field_spec, value): self.assertIsNotNone(field_spec) dotted_name = "%s.%s" % (proto_name, msg_name) cls = make_field_class(dotted_name, field_spec) if '__dict__' in dir(cls): print('\nGENERATED FieldImpl CLASS DICTIONARY') for exc in list(cls.__dict__.keys()): print("%-20s %s" % (exc, cls.__dict__[exc])) self.assertIsNotNone(cls) file = cls(value) self.assertIsNotNone(file) # class attributes -------------------------------- self.assertEqual(field_spec.name, file.name) self.assertEqual(field_spec.field_type_ndx, file.field_type) self.assertEqual(field_spec.quantifier, file.quantifier) self.assertEqual(field_spec.field_nbr, file.field_nbr) self.assertIsNone(file.default) # not an elegant test # instance attribute ------------------------------ self.assertEqual(value, file.value) # with slots enabled, this is never seen ---------- # because __dict__ is not in the list of valid # attributes for f if '__dict__' in dir(file): print('\nGENERATED FieldImpl INSTANCE DICTIONARY') for item in list(file.__dict__.keys()): print("%-20s %s" % (item, file.__dict__[item])) # GEEP def test_field_impl(self): msg_spec = self.str_obj_model.msgs[0] # the fields in this imaginary logEntry values = self.lil_big_msg_values() for i in range(len(msg_spec)): print( "\nDEBUG: field %u ------------------------------------------------------" % i) field_spec = msg_spec[i] self.check_field_impl_against_spec( self.proto_name, msg_spec.name, field_spec, values[i]) def test_caching(self): self.assertTrue(isinstance(self.str_obj_model, M.ProtoSpec)) # XXX A HACK WHILE WE CHANGE INTERFACE ------------ msg_spec = self.str_obj_model.msgs[0] name = msg_spec.name cls0 = make_msg_class(self.str_obj_model, name) # DEBUG print("Constructed Clz0 name is '%s'" % cls0.name) # END self.assertEqual(name, cls0.name) cls1 = make_msg_class(self.str_obj_model, name) self.assertEqual(name, cls1.name) # END HACK ---------------------------------------- # we cache classe, so the two should be the same self.assertEqual(id(cls0), id(cls1)) # chan = Channel(BUFSIZE) values = self.lil_big_msg_values() lil_big_msg0 = cls0(values) lil_big_msg1 = cls0(values) # we don't cache instances, so these will differ self.assertNotEqual(id(lil_big_msg0), id(lil_big_msg1)) field_spec = msg_spec[0] dotted_name = "%s.%s" % (self.proto_name, msg_spec.name) f0cls = make_field_class(dotted_name, field_spec) f1cls = make_field_class(dotted_name, field_spec) self.assertEqual(id(f0cls), id(f1cls)) def test_little_big(self): self.assertIsNotNone(self.str_obj_model) self.assertTrue(isinstance(self.str_obj_model, M.ProtoSpec)) self.assertEqual('org.xlattice.fieldz.test.littleBigProto', self.str_obj_model.name) self.assertEqual(0, len(self.str_obj_model.enums)) self.assertEqual(1, len(self.str_obj_model.msgs)) self.assertEqual(0, len(self.str_obj_model.seqs)) msg_spec = self.str_obj_model.msgs[0] # Create a channel ------------------------------------------ # its buffer will be used for both serializing the instance # data and, by deserializing it, for creating a second instance. chan = Channel(BUFSIZE) buf = chan.buffer self.assertEqual(BUFSIZE, len(buf)) # create the LittleBigMsg class ------------------------------ little_big_msg_cls = make_msg_class(self.str_obj_model, msg_spec.name) # ------------------------------------------------------------- # XXX the following fails because field 2 is seen as a property # instead of a list if False: # DEBUGGING print('\nLittleBigMsg CLASS DICTIONARY') for (ndx, key) in enumerate(little_big_msg_cls.__dict__.keys()): print( "%3u: %-20s %s" % (ndx, key, little_big_msg_cls.__dict__[key])) # ------------------------------------------------------------- # create a message instance --------------------------------- values = self.lil_big_msg_values() # quasi-random values lil_big_msg = little_big_msg_cls(values) # __setattr__ in MetaMsg raises exception on any attempt # to add new attributes. This works at the class level but # NOT at the instance level # if True: try: lil_big_msg.foo = 42 self.fail( "ERROR: attempt to assign new instance attribute succeeded") except AttributeError as a_exc: # DEBUG print( "ATTR ERROR ATTEMPTING TO SET lilBigMsg.foo: " + str(a_exc)) # END pass if '__dict__' in dir(lil_big_msg): print('\nlilBigMsg INSTANCE DICTIONARY') for exc in list(lil_big_msg.__dict__.keys()): print("%-20s %s" % (exc, lil_big_msg.__dict__[exc])) # lilBigMsg.name is a property try: lil_big_msg.name = 'boo' self.fail("ERROR: attempt to change message name succeeded") except AttributeError: pass self.assertEqual(msg_spec.name, lil_big_msg.name) # we don't have any nested enums or messages self.assertEqual(0, len(lil_big_msg.enums)) self.assertEqual(0, len(lil_big_msg.msgs)) self.assertEqual(17, len(lil_big_msg.field_classes)) # number of fields in instance self.assertEqual(17, len(lil_big_msg)) for i in range(len(lil_big_msg)): self.assertEqual(values[i], lil_big_msg[i].value) # serialize the object to the channel ----------------------- print("\nDEBUG: PHASE A ######################################") nnn = lil_big_msg.write_stand_alone(chan) old_position = chan.position chan.flip() self.assertEqual(old_position, chan.limit) self.assertEqual(0, chan.position) # deserialize the channel, making a clone of the message ---- (read_back, nn2) = little_big_msg_cls.read( chan, self.str_obj_model) # sOM is protoSpec self.assertIsNotNone(read_back) self.assertEqual(nnn, nn2) # verify that the messages are identical -------------------- self.assertTrue(lil_big_msg.__eq__(read_back)) print("\nDEBUG: PHASE B ######################################") # produce another message from the same values -------------- lil_big_msg2 = little_big_msg_cls(values) chan2 = Channel(BUFSIZE) nnn = lil_big_msg2.write_stand_alone(chan2) chan2.flip() (copy2, nn3) = little_big_msg_cls.read(chan2, self.str_obj_model) self.assertIsNotNone(copy2) self.assertEqual(nnn, nn3) self.assertTrue(lil_big_msg.__eq__(copy2)) self.assertTrue(lil_big_msg2.__eq__(copy2)) # test clear() chan2.position = 97 chan2.limit = 107 chan2.clear() self.assertEqual(0, chan2.limit) self.assertEqual(0, chan2.position)
class TestFixedLen(unittest.TestCase): """" Test encoding and decoding fixed length data types, particularly at boundary values. """ def setUp(self): self.rng = SimpleRNG(time.time()) def tearDown(self): pass def round_trip32(self, nnn): """ Test writing and reading a 32-bit integer as the first and only field in a buffer. """ chan = Channel(LEN_BUFF) # -- write 32-bit value ------------------------------------- field_nbr = 1 + self.rng.next_int16(1024) write_b32_field(chan, nnn, field_nbr) chan.flip() # -- read 32-bit value -------------------------------------- # first the header (which is a varint) ------------ (field_type, field_nbr2) = read_field_hdr(chan) offset2 = chan.position self.assertEqual(PrimTypes.B32, field_type) self.assertEqual(field_nbr, field_nbr2) self.assertEqual( length_as_varint(field_hdr_val(field_nbr, PrimTypes.B32)), offset2) # then the varint proper -------------------------- varint_ = read_raw_b32(chan) offset3 = chan.position self.assertEqual(nnn, varint_) self.assertEqual(offset2 + 4, offset3) def round_trip64(self, nnn): """ Test writing and reading a 64-bit integer as the first and only field in a buffer """ chan = Channel(LEN_BUFF) # -- write 64-bit value ------------------------------------- field_nbr = 1 + self.rng.next_int16(1024) write_b64_field(chan, nnn, field_nbr) chan.flip() # # DEBUG # buf = chan.buffer # print "buffer after writing varint field: ", # dumpBuffer(buf) # # END # -- read 64-bit value -------------------------------------- # first the header (which is a varint) ------------ (field_type, field_nbr2) = read_field_hdr(chan) offset2 = chan.position self.assertEqual(PrimTypes.B64, field_type) self.assertEqual(field_nbr, field_nbr2) self.assertEqual( length_as_varint(field_hdr_val(field_nbr, PrimTypes.B64)), offset2) # then the varint proper -------------------------- varint_ = read_raw_b64(chan) offset3 = chan.position self.assertEqual(nnn, varint_) self.assertEqual(offset2 + 8, offset3) def test_encode_decode(self): """ Test encoding and decoding boundary values. """ self.round_trip32(0) self.round_trip32(42) self.round_trip32(0x7f) self.round_trip32(0x80) self.round_trip32(0x3fff) self.round_trip32(0x4000) self.round_trip32(0x1fffff) self.round_trip32(0x200000) self.round_trip32(0xfffffff) self.round_trip32(0x10000000) self.round_trip32(0xffffffff) self.round_trip64(0) self.round_trip64(42) self.round_trip64(0x7f) self.round_trip64(0x80) self.round_trip64(0x3fff) self.round_trip64(0x4000) self.round_trip64(0x1fffff) self.round_trip64(0x200000) self.round_trip64(0xfffffff) self.round_trip64(0x10000000) self.round_trip64(0x7ffffffff) self.round_trip64(0x800000000) self.round_trip64(0x3ffffffffff) self.round_trip64(0x40000000000) self.round_trip64(0x1ffffffffffff) self.round_trip64(0x2000000000000) self.round_trip64(0xffffffffffffff) self.round_trip64(0x100000000000000) self.round_trip64(0x7fffffffffffffff) self.round_trip64(0x8000000000000000) self.round_trip64(0xffffffffffffffff)
class TestRandomDir(unittest.TestCase): """ Test building quasi-random data files and directory structures. """ def setUp(self): self.rng = SimpleRNG(time.time()) def tearDown(self): pass # utility functions ############################################# # actual unit tests ############################################# def do_test_random_dir(self, hashtype): """ Test building random directories with specific SHA hash type. """ check_hashtype(hashtype) depth = 1 + self.rng.next_int16(3) # so 1 to 3 width = 1 + self.rng.next_int16(16) # so 1 to 16 blk_count = 1 + self.rng.next_int16(3) # so 1 to 3 # last block will usually be only partically populated max_len = BuildList.BLOCK_SIZE * (blk_count - 1) +\ self.rng.next_int16(BuildList.BLOCK_SIZE) min_len = 1 # we want the directory name to be unique path_to_dir = os.path.join('tmp', self.rng.next_file_name(8)) while os.path.exists(path_to_dir): path_to_dir = os.path.join('tmp', self.rng.next_file_name(8)) self.rng.next_data_dir(path_to_dir, depth, width, max_len, min_len) data = bytearray(max_len) # that many null bytes self.rng.next_bytes(data) # fill with random data if hashtype == HashTypes.SHA1: sha = hashlib.sha1() elif hashtype == HashTypes.SHA2: sha = hashlib.sha256() elif hashtype == HashTypes.SHA3: # pylint:disable=no-member sha = hashlib.sha3_256() elif hashtype == HashTypes.BLAKE2B: sha = hashlib.blake2b(digest_size=32) else: raise NotImplementedError sha.update(data) hash_ = sha.hexdigest() file_name = self.rng.next_file_name(8) path_to_file = os.path.join('tmp', file_name) while os.path.exists(path_to_file): file_name = self.rng.next_file_name(8) path_to_file = os.path.join('tmp', file_name) with open(path_to_file, 'wb') as file: file.write(data) if hashtype == HashTypes.SHA1: file_hash = file_sha1hex(path_to_file) elif hashtype == HashTypes.SHA2: file_hash = file_sha2hex(path_to_file) elif hashtype == HashTypes.SHA3: file_hash = file_sha3hex(path_to_file) elif hashtype == HashTypes.BLAKE2B: file_hash = file_blake2b_hex(path_to_file) else: raise NotImplementedError self.assertEqual(hash_, file_hash) def test_random_dir(self): """ Test building random directories with supported SHA hash types. """ for hashtype in HashTypes: self.do_test_random_dir(hashtype)
class TestFieldTypes(unittest.TestCase): """ Actually tests the method used for instantiating and importing an instance of the FieldTypes class. """ def setUp(self): self.rng = SimpleRNG(time.time()) def tearDown(self): pass def test_new_fieldtypes(self): """ Test the new definition of FieldTypes introduced 2017-01-30. """ self.assertEqual(len(FieldTypes), FieldTypes.F_BYTES32.value + 1) for ndx, _ in enumerate(FieldTypes): self.assertEqual(_.value, ndx) # round trip member to sym and back to member self.assertEqual(FieldTypes.from_sym(_.sym), _) def test_constants(self): """ Verify that our constants are immutable and conversion between string and integer forms works as expected. """ self.assertEqual(len(FieldTypes), 18) # pylint: disable=unsubscriptable-object self.assertEqual(FieldTypes.V_BOOL.value, 0) self.assertEqual(FieldTypes.V_BOOL.sym, 'vbool') self.assertEqual(FieldTypes.F_BYTES32.value, len(FieldTypes) - 1) self.assertEqual(FieldTypes.F_BYTES32.sym, 'fbytes32') def test_len_funcs(self): """ Verify that varint length functions return correct values. Tests are performed using randomly selected field numbers (in the range 0 .. (2^16)-1) and integer values in the same range. """ ndx = self.rng.next_int16() # random field number value = self.rng.next_int16() # random integer value # == varint types =========================================== # ERROR because field_hdr_len 2nd param should be PrimType # ******************************************************** len_ = raw.field_hdr_len(ndx, FieldTypes.V_BOOL) self.assertEqual(len_ + 1, typed.vbool_len(True, ndx)) self.assertEqual(len_ + 1, typed.vbool_len(False, ndx)) len_ = raw.field_hdr_len(ndx, FieldTypes.V_ENUM) zzz = len_ + raw.length_as_varint(value) self.assertEqual(zzz, typed.venum_len(value, ndx)) # self.assertEqual( x, typed.vEnumLen(-x, n) ) value = self.rng.next_int32() self.assertTrue(value >= 0) len_ = raw.field_hdr_len(ndx, FieldTypes.V_UINT32) zzz = len_ + raw.length_as_varint(value) self.assertEqual(zzz, typed.vuint32_len(value, ndx)) value = self.rng.next_int32() self.assertTrue(value >= 0) value = value - 0x80000000 len_ = raw.field_hdr_len(ndx, FieldTypes.V_SINT32) ppp = typed.encode_sint32(value) zzz = len_ + raw.length_as_varint(ppp) self.assertEqual(zzz, typed.vsint32_len(value, ndx)) value = self.rng.next_int64() self.assertTrue(value >= 0) len_ = raw.field_hdr_len(ndx, FieldTypes.V_UINT64) zzz = len_ + raw.length_as_varint(value) self.assertEqual(zzz, typed.vuint64_len(value, ndx)) value = self.rng.next_int64() self.assertTrue(value >= 0) value = value - 0x8000000000000000 len_ = raw.field_hdr_len(ndx, FieldTypes.V_SINT64) ppp = typed.encode_sint64(value) zzz = len_ + raw.length_as_varint(ppp) self.assertEqual(zzz, typed.vsint64_len(value, ndx)) # == fixed length 4 byte ==================================== value = self.rng.next_int64() # value should be ignored self.assertTrue(value >= 0) value = value - 0x8000000000000000 # x is a signed 64 bit value whose value should be irrelevant len_ = raw.field_hdr_len(ndx, FieldTypes.F_UINT32) self.assertEqual(len_ + 4, typed.fuint32_len(value, ndx)) len_ = raw.field_hdr_len(ndx, FieldTypes.F_SINT32) self.assertEqual(len_ + 4, typed.fsint32_len(value, ndx)) len_ = raw.field_hdr_len(ndx, FieldTypes.F_FLOAT) self.assertEqual(len_ + 4, typed.ffloat_len(value, ndx)) # == fixed length 8 byte ==================================== # n is that signed 64 bit value whose value should be irrelevant len_ = raw.field_hdr_len(ndx, FieldTypes.F_UINT64) self.assertEqual(len_ + 8, typed.fuint64_len(value, ndx)) len_ = raw.field_hdr_len(ndx, FieldTypes.F_SINT64) self.assertEqual(len_ + 8, typed.fsint64_len(value, ndx)) len_ = raw.field_hdr_len(ndx, FieldTypes.F_DOUBLE) self.assertEqual(len_ + 8, typed.fdouble_len(value, ndx)) # == LEN PLUS types ========================================= def do_len_plus_test(length, ndx): """ Verify that fields of interesting lengths have expected raw encodings. """ string = [0] * length k = len(string) len_ = raw.field_hdr_len(ndx, FieldTypes.L_BYTES) expected_len = len_ + raw.length_as_varint(k) + k self.assertEqual(expected_len, typed.lbytes_len(string, ndx)) # -- lString --------------------------------------- string = self.rng.next_file_name(256) len_ = raw.field_hdr_len(ndx, FieldTypes.L_STRING) k = len(string) expected_len = len_ + raw.length_as_varint(k) + k self.assertEqual(expected_len, typed.l_string_len(string, ndx)) # -- lBytes ---------------------------------------- do_len_plus_test(0x7f, ndx) do_len_plus_test(0x80, ndx) do_len_plus_test(0x3fff, ndx) do_len_plus_test(0x4000, ndx) # -- lMsg ------------------------------------------ # XXX STUB # -- fixed length byte arrays ------------------------------- buf = [0] * 512 # length functions should ignore actual size len_ = raw.field_hdr_len(ndx, FieldTypes.F_BYTES16) self.assertEqual(len_ + 16, typed.fbytes16_len(buf, ndx)) len_ = raw.field_hdr_len(ndx, FieldTypes.F_BYTES20) self.assertEqual(len_ + 20, typed.fbytes20_len(buf, ndx)) len_ = raw.field_hdr_len(ndx, FieldTypes.F_BYTES32) self.assertEqual(len_ + 32, typed.fbytes32_len(buf, ndx))
class TestFieldImpl(unittest.TestCase): def setUp(self): self.rng = SimpleRNG(time.time()) # data = StringIO(ZOGGERY_PROTO_SPEC) # p = StringProtoSpecParser(data) # data should be file-like # self.str_obj_model = p.parse() # object model from string serialization # self.proto_name = self.str_obj_model.name # the dotted name of the # protocol def tearDown(self): pass # utility functions ############################################# def make_registries(self, protocol): node_reg = R.NodeReg() proto_reg = R.ProtoReg(protocol, node_reg) msg_reg = R.MsgReg(proto_reg) return (node_reg, proto_reg, msg_reg) def le_msg_values(self): """ returns a list """ timestamp = int(time.time()) node_id = [0] * 20 key = [0] * 20 length = self.rng.next_int32(256 * 256) # let's have some random bytes self.rng.next_bytes(node_id) self.rng.next_bytes(key) by_ = 'who is responsible' path = '/home/jdd/tarballs/something.tar.gz' return [timestamp, node_id, key, length, by_, path] def lil_big_msg_values(self): """ This returns a list of random-ish values in order by field type so that values[_F_FLOAT], for example, is a random float value. """ values = [] # 2016-03-30 This is NOT in sync with littleBigTest.py, # because I have added a None for lMsg at _L_MSG values.append(self.rng.next_boolean()) # vBoolReqField 0 values.append(self.rng.next_int16()) # vEnumReqField 1 values.append(self.rng.next_int32()) # vInt32ReqField 2 values.append(self.rng.next_int64()) # vInt64ReqField 3 values.append(self.rng.next_int32()) # vuInt32ReqField 4 values.append(self.rng.next_int32()) # vuInt64ReqField 5 values.append(self.rng.next_int64()) # vsInt32ReqField 6 values.append(self.rng.next_int64()) # vsInt64ReqField 7 values.append(self.rng.next_int32()) # fsInt32ReqField 8 values.append(self.rng.next_int32()) # fuInt32ReqField 9 values.append(self.rng.next_real()) # fFloatReqField 10 values.append(self.rng.next_int64()) # fsInt64ReqField 11 values.append(self.rng.next_int64()) # fuInt64ReqField 12 values.append(self.rng.next_real()) # fDoubleReqField 13 # lStringReqField 14 values.append(self.rng.next_file_name(16)) rnd_len = 16 + self.rng.next_int16(49) byte_buf = bytearray(rnd_len) self.rng.next_bytes(byte_buf) values.append(bytes(byte_buf)) # lBytesReqField 15 values.append(None) # <-------- for lMsg 16 b128_buf = bytearray(16) self.rng.next_bytes(b128_buf) values.append(bytes(b128_buf)) # fBytes16ReqField 17 b160_buf = bytearray(20) self.rng.next_bytes(b160_buf) values.append(bytes(b160_buf)) # fBytes20ReqField 18 b256_buf = bytearray(32) self.rng.next_bytes(b256_buf) values.append(bytes(b256_buf)) # fBytes32ReqField 19 return values # actual unit tests ############################################# def check_field_impl_against_spec(self, proto_name, msg_name, # not actually tested field_spec, value): # significant for tests self.assertIsNotNone(field_spec) dotted_name = "%s.%s" % (proto_name, msg_name) cls = make_field_class(dotted_name, field_spec) # a class if '__dict__' in dir(cls): print('\nGENERATED FieldImpl CLASS DICTIONARY') for exc in list(cls.__dict__.keys()): print(" %-20s %s" % (exc, cls.__dict__[exc])) self.assertIsNotNone(cls) file = cls(value) # an instance self.assertIsNotNone(file) self.assertTrue(isinstance(file, cls)) # instance attributes ----------------------------- # we verify that the properties work correctly self.assertEqual(field_spec.name, file._name) self.assertEqual(field_spec.field_type_ndx, file.field_type) self.assertEqual(field_spec.quantifier, file.quantifier) self.assertEqual(field_spec.field_nbr, file.field_nbr) self.assertIsNone(file.default) # not an elegant test # instance attribute ------------------------------ # we can read back the value assigned to the instance self.assertEqual(value, file.value) # with slots enabled, this is never seen ---------- # because __dict__ is not in the list of valid # attributes for f if '__dict__' in dir(file): print('\nGENERATED FieldImpl INSTANCE DICTIONARY') for item in list(file.__dict__.keys()): print("%-20s %s" % (item, file.__dict__[item])) def test_field_impl(self): node_reg, proto_reg, msg_reg = self.make_registries( PROTOCOL_UNDER_TEST) values = self.lil_big_msg_values() # DEBUG print("testFieldImpl: there are %d values" % len(values)) # END # There are 18 values corresponding to the 18 field types; # _L_MSG should be skipped for tstamp in range(FieldTypes.F_BYTES32 + 1): # DEBUG print("testFieldImpl: t = %d" % tstamp) # END if tstamp == FieldTypes.L_MSG: continue # default quantifier is Q_REQ_, default is None field_name = 'field%d' % tstamp field_spec = M.FieldSpec( msg_reg, field_name, tstamp, field_nbr=tstamp + 100) self.check_field_impl_against_spec( PROTOCOL_UNDER_TEST, MSG_UNDER_TEST, field_spec, values[tstamp]) # TEST FIELD SPEC ----------------------------------------------- def do_field_spec_test(self, name, field_type, quantifier=M.Q_REQUIRED, field_nbr=0, default=None): node_reg, proto_reg, msg_reg = self.make_registries( PROTOCOL_UNDER_TEST) # XXX Defaults are ignored for now. file = M.FieldSpec( msg_reg, name, field_type, quantifier, field_nbr, default) self.assertEqual(name, file.name) self.assertEqual(field_type, file.field_type_ndx) self.assertEqual(quantifier, file.quantifier) self.assertEqual(field_nbr, file.field_nbr) if default is not None: self.assertEqual(default, file.default) expected_repr = "%s %s%s @%d \n" % ( name, file.field_type_name, M.q_name(quantifier), field_nbr) # DEFAULTS NOT SUPPORTED self.assertEqual(expected_repr, file.__repr__()) def test_quantifiers(self): q_name = M.q_name self.assertEqual('', q_name(M.Q_REQUIRED)) self.assertEqual('?', q_name(M.Q_OPTIONAL)) self.assertEqual('*', q_name(M.Q_STAR)) self.assertEqual('+', q_name(M.Q_PLUS)) def test_field_spec(self): # default is not implemented yet self.do_field_spec_test('foo', FieldTypes.V_UINT32, M.Q_REQUIRED, 9) self.do_field_spec_test('bar', FieldTypes.V_SINT32, M.Q_STAR, 17) self.do_field_spec_test( 'node_id', FieldTypes.F_BYTES20, M.Q_OPTIONAL, 92) self.do_field_spec_test('tix', FieldTypes.V_BOOL, M.Q_PLUS, 147)
class TestVarint(unittest.TestCase): """ Test reading and writing low-level data types. """ def setUp(self): self.rng = SimpleRNG(time.time()) def tearDown(self): pass # actual unit tests ############################################# def test_length_as_varint(self): """ Verify the length in bytes of various hex values is as expected.""" len_ = length_as_varint self.assertEqual(1, len_(0)) self.assertEqual(1, len_(0x7f)) self.assertEqual(2, len_(0x80)) self.assertEqual(2, len_(0x3fff)) self.assertEqual(3, len_(0x4000)) self.assertEqual(3, len_(0x1fffff)) self.assertEqual(4, len_(0x200000)) self.assertEqual(4, len_(0xfffffff)) self.assertEqual(5, len_(0x10000000)) self.assertEqual(5, len_(0x7ffffffff)) self.assertEqual(6, len_(0x800000000)) self.assertEqual(6, len_(0x3ffffffffff)) self.assertEqual(7, len_(0x40000000000)) self.assertEqual(7, len_(0x1ffffffffffff)) self.assertEqual(8, len_(0x2000000000000)) self.assertEqual(8, len_(0xffffffffffffff)) self.assertEqual(9, len_(0x100000000000000)) self.assertEqual(9, len_(0x7fffffffffffffff)) self.assertEqual(10, len_(0x8000000000000000)) # the next test fails if I don't parenthesize the shift term or # convert >1 to /2 big_number = 0x80000000000000000 + (self.rng.next_int64() > 1) self.assertEqual(10, len_(big_number)) # MAKE SURE THIS WORKS WITH SIGNED NUMBERS def round_trip(self, nnn): """ Test writing and reading a varint as the first and only field in a buffer. """ # -- write varint ------------------------------------------- field_nbr = 1 + self.rng.next_int16(1024) chan = Channel(LEN_BUFFER) write_varint_field(chan, nnn, field_nbr) chan.flip() # -- read varint -------------------------------------------- # first the header (which is a varint) ------------ (prim_type, field_nbr2) = read_field_hdr(chan) offset2 = chan.position self.assertEqual(PrimTypes.VARINT, prim_type) self.assertEqual(field_nbr, field_nbr2) self.assertEqual(length_as_varint(field_nbr << 3), offset2) # then the varint proper -------------------------- varint_ = read_raw_varint(chan) chan.flip() offset3 = chan.limit self.assertEqual(nnn, varint_) self.assertEqual(offset2 + length_as_varint(nnn), offset3) def test_encode_decode(self): """ Test converting certain values to varint and back again. All varints are handled as 64 bit unsigned ints. WE MAY SOMETIMES WANT TO RESTRICT THEM TO uint32s. Other than 42, these are the usual border values. """ self.round_trip(0) self.round_trip(42) self.round_trip(0x7f) self.round_trip(0x80) self.round_trip(0x3fff) self.round_trip(0x4000) self.round_trip(0x1fffff) self.round_trip(0x200000) self.round_trip(0xfffffff) self.round_trip(0x10000000) self.round_trip(0x7ffffffff) self.round_trip(0x800000000) self.round_trip(0x3ffffffffff) self.round_trip(0x40000000000) self.round_trip(0x1ffffffffffff) self.round_trip(0x2000000000000) self.round_trip(0xffffffffffffff) self.round_trip(0x100000000000000) self.round_trip(0x7fffffffffffffff) self.round_trip(0x8000000000000000) self.round_trip(0xffffffffffffffff)