예제 #1
0
class TestBuildList(unittest.TestCase):
    """ Test BuildList.listgen functionality. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    def do_listgen_test(self, title, hashtype, dirstruc):
        """
        Test buildlist functionality for specific hash type and DirStruc.
        """

        # MAJOR ERROR: This code logs to .dvcz/buildlist, the actual
        # project log!  Fix is:
        dvcz_dir = os.path.join('tmp', self.rng.next_file_name(8))
        while os.path.exists(dvcz_dir):
            dvcz_dir = os.path.join('tmp', self.rng.next_file_name(8))
        os.mkdir(dvcz_dir, 0o744)

        # create the BuildList from what's in DATA_DIR
        # -- RESTRUCTURE and just do this once for each hashtype -- in
        #    other words, this should be in a higher level function, one
        #    which runs a test for each dirstruc
        BuildList.list_gen(
            title=title,
            data_dir=DATA_DIR,
            dvcz_dir=dvcz_dir,  # THE FIX
            # list_file=        # lastBuildList
            logging=True,
            u_path=os.path.join('tmp', str(hashtype.value), dirstruc.name),
            hashtype=hashtype,
            using_indir=True
        )

        # THE SAME BUILDLIST IS USED FOR EACH OF THE THREE DIRSTRUCS
        # UNFINISHED

        # Compare the BuildList with
        # UNFINISHED

    def test_build_list(self):
        """ Test listgen functionality for suppored hash types. """

        # DEBUG
        # print("DATA_DIR is '%s'" % DATA_DIR)
        # END
        self.assertTrue(os.path.exists(DATA_DIR))
        self.assertTrue(os.path.exists(RSA_FILE))

        for hashtype in HashTypes:
            for dirstruc in DirStruc:
                self.do_listgen_test('SHA test', hashtype, dirstruc)
예제 #2
0
 def test_random_value(self):
     """
     Verify that hashlib.sha2 returns the same digest for a few
     quasi-random values.
     """
     rng = SimpleRNG()
     for _ in range(4):
         count = 16 + rng.next_int16(48)
         data = rng.some_bytes(count)
         my_hex = XLSHA3(data).hexdigest()
         expected = hashlib.sha3_256(data).hexdigest()
         self.assertEqual(my_hex, expected)
예제 #3
0
 def test_random_value(self):
     """
     Verify that pyblake2.blake2b and hashlib.blake2b return the same
     digest for a few quasi-random values.  This test only makes sense
     for more recent versions of hashlib which support blake2.
     """
     if sys.version_info >= (3, 6):
         rng = SimpleRNG()
         for _ in range(4):
             count = 16 + rng.next_int16(48)
             data = rng.some_bytes(count)
             my_hex = XLBLAKE2B_256(data).hexdigest()
             expected = pyblake2.blake2b(data, digest_size=32).hexdigest()
             self.assertEqual(my_hex, expected)
예제 #4
0
class TestPKCS7Padding(unittest.TestCase):
    """ test PKCS7 padding """
    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    def do_test_padding(self, length):
        """ length is in bytes. """

        data_ = bytearray(length)
        self.rng.next_bytes(data_)
        data = bytes(data_)

        padder = padding.PKCS7(AES_BLOCK_BITS).padder()
        padded_data = padder.update(data) + padder.finalize()

        # round up to the next higher number of whole blocks
        if length % AES_BLOCK_BYTES == 0:
            expected_len = length + AES_BLOCK_BYTES
        else:
            expected_len = ((length + AES_BLOCK_BYTES - 1) // AES_BLOCK_BYTES)\
                * AES_BLOCK_BYTES
        delta = expected_len - length  # number of bytes of padding
        self.assertEqual(padded_data[-1], delta)

        self.assertEqual(len(padded_data), expected_len)

        unpadder = padding.PKCS7(AES_BLOCK_BITS).unpadder()
        data_out = unpadder.update(padded_data) + unpadder.finalize()
        self.assertEqual(data_out, data)

    def test_padding(self):
        """ test PKCS7 padding """

        self.do_test_padding(7)
        self.do_test_padding(15)
        self.do_test_padding(16)
        self.do_test_padding(17)
        self.do_test_padding(31)
        self.do_test_padding(32)
        self.do_test_padding(33)
예제 #5
0
class TestLenPlus(unittest.TestCase):
    """ Test length-preceded fields and data types. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    def round_trip(self, string):
        """
        Verify that a unicode string converted to wire format and then
        back again is the same string.

        This tests writing and reading a string of bytes as the first and
        only field in a buffer.
        """
        chan = Channel(LEN_BUFF)

        # -- write the bytearray ------------------------------------
        field_nbr = 1 + self.rng.next_int16(1024)
        write_len_plus_field(chan, string, field_nbr)
        chan.flip()

#       # DEBUG
#       print("buffer after writing lenPlus field: " + str(chan.buffer))
#       # END

        # -- read the value written ---------------------------------
        # first the header (which is a varint) ------------
        (field_type, field_nbr2,) = read_field_hdr(chan)
        offset2 = chan.position
        self.assertEqual(PrimTypes.LEN_PLUS, field_type)
        self.assertEqual(field_nbr, field_nbr2)
        self.assertEqual(
            length_as_varint(field_hdr_val(field_nbr, PrimTypes.LEN_PLUS)),
            offset2)

        # then the actual value written -------------------
        tstamp = read_raw_len_plus(chan)
        offset3 = chan.position
        self.assertEqual(string, tstamp)
        self.assertEqual(
            offset2 +
            length_as_varint(
                len(string)) +
            len(string),
            offset3)

    def test_encode_decode(self):
        """ Test round tripping utf-8 strings. """
        self.round_trip(''.encode('utf8'))
        self.round_trip('ndx_'.encode('utf8'))
        self.round_trip('should be a random string of bytes'.encode('utf8'))
예제 #6
0
class TestAddingFunctions(unittest.TestCase):
    """ Test adding functions to an existing class. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    def test_adding_funcs(self):
        """
        Demonstrate that functions can be added to instances dynamically
        and that 'self' within the added functions is interpreteed
        correctly.
        """

        obj = SimpleClass()
        x_val = self.rng.next_int16()
        y_val = self.rng.next_int16()
        q_val = self.rng.next_int16()
        r_val = self.rng.next_int16()

        # Test adding a function to an instance; this syntax is
        # specific to Python3.
        #           'object'           'instnace'  'owner'
        # pylint: disable=no-member
        obj.adder = simple_adder.__get__(SimpleClass, obj)
        self.assertEqual(obj.adder(x_val, y_val), x_val + y_val)

        # Confirm that 'self' is interpreted correctly in the added
        # functions.
        # pylint: disable=no-member
        obj.plus42 = add42.__get__(SimpleClass, obj)
        self.assertEqual(obj.plus42(q_val), q_val + 42)

        # Newly added methods interacting with methods defined in the
        # class.
        self.assertEqual(
            obj.subtractor(obj.plus42(q_val), obj.adder(x_val, r_val)),
            (q_val + 42) - (x_val + r_val) + 42)
예제 #7
0
class TestMerkleLeaf(unittest.TestCase):
    """ Test MerkleLeaf functionality. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    # utility functions #############################################

    # actual unit tests #############################################
    def do_test_simple_constructor(self, hashtype):
        """ Test constructor for specific SHA type. """

        check_hashtype(hashtype)
        # pylint: disable=redefined-variable-type
        if hashtype == HashTypes.SHA1:
            sha = hashlib.sha1()
        elif hashtype == HashTypes.SHA2:
            sha = hashlib.sha256()
        elif hashtype == HashTypes.SHA3:
            sha = hashlib.sha3_256()

        file_name = self.rng.next_file_name(8)
        nnn = self.rng.some_bytes(8)
        sha.update(nnn)
        hash0 = sha.digest()

        leaf0 = MerkleLeaf(file_name, hashtype, hash0)
        self.assertEqual(file_name, leaf0.name)
        self.assertEqual(hash0, leaf0.bin_hash)

        file_name2 = file_name
        while file_name2 == file_name:
            file_name2 = self.rng.next_file_name(8)
        nnn = self.rng.some_bytes(8)
        self.rng.next_bytes(nnn)
        sha.update(nnn)
        hash1 = sha.digest()
        leaf1 = MerkleLeaf(file_name2, hashtype, hash1)
        self.assertEqual(file_name2, leaf1.name)
        self.assertEqual(hash1, leaf1.bin_hash)

        self.assertTrue(leaf0.equal(leaf0))
        self.assertFalse(leaf0.equal(leaf1))

        # XXX USE NLHTree instead
        #pair0    = leaf0.toPair()
        #leaf0bis = MerkleLeaf.createFromPair(pair0)
        #self.assertEqual(leaf0bis, leaf0)

        #pair1    = leaf1.toPair()
        #leaf1bis = MerkleLeaf.createFromPair(pair1)
        #self.assertEqual(leaf1bis, leaf1)

    def test_simple_constructor(self):
        """ Test constructor for various SHA types. """
        for hashtype in HashTypes:
            self.do_test_simple_constructor(hashtype=hashtype)
예제 #8
0
class TestPBKDF2(unittest.TestCase):
    """ Test key derivation function pbkdf2 """
    def setUp(self):
        self.rng = SimpleRNG()

    def test_pbkdf2(self):
        """ Do a simple test of line-of-spaces caching. """

        for hashtype in [HashTypes.SHA1, HashTypes.SHA2]:
            salt = self.rng.some_bytes(8)
            key = pbkdf2('foo', salt, hashtype)
            # for now, that's good enough
            _ = key
예제 #9
0
class TestMerkleTree2(unittest.TestCase):
    """ Test MerkleTree behavior with deeper directories. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    def do_test_deepish_trees(self, hashtype):
        """
        Build a directory of random data, then its MerkleTree, then
        round trip to a serialization and back.
        """

        tree_top = os.path.join('tmp', self.rng.next_file_name(MAX_NAME_LEN))
        while os.path.exists(tree_top):
            tree_top = os.path.join(
                'tmp', self.rng.next_file_name(MAX_NAME_LEN))

        # Generate a quasi-random data directory, 7 deep, up to 5 files/dir
        self.rng.next_data_dir(tree_top, depth=7, width=5, max_len=4096)

        # Build a MerkleTree specifying the directory.
        tree = MerkleTree.create_from_file_system(tree_top, hashtype)

        # ROUND TRIP 1 ----------------------------------------------

        # Serialize it.
        ser = tree.__str__()

        # Deserialize to make another MerkleTree.
        tree2 = MerkleTree.create_from_serialization(ser, hashtype)

        self.assertTrue(tree2.__eq__(tree))
        self.assertEqual(tree2, tree)           # identical test

        # ROUND TRIP 2 ----------------------------------------------
        strings = ser.split('\n')
        strings = strings[:-1]
        tree3 = MerkleTree.create_from_string_array(strings, hashtype)
        self.assertEqual(tree3, tree)

        # ROUND TRIP 3 ----------------------------------------------
        filename = os.path.join('tmp', self.rng.next_file_name(8))
        while os.path.exists(filename):
            filename = os.path.join('tmp', self.rng.next_file_name(8))
        with open(filename, 'w') as file:
            file.write(ser)

        tree4 = MerkleTree.create_from_file(filename, hashtype)
        self.assertEqual(tree4, tree)

    def test_deepish_trees(self):
        """ Test behavior of deeper trees using various SHA hash types. """

        for hashtype in HashTypes:
            self.do_test_deepish_trees(hashtype)
예제 #10
0
class TestCrypto(unittest.TestCase):
    """ Test "crypto" functionality (line of spaces cache) for xlattic_py. """
    def setUp(self):
        self.rng = SimpleRNG()

    def test_spaces(self):
        """ Do a simple test of line-of-spaces caching. """

        for _ in range(4):
            count = self.rng.next_int16(32)
            spaces = SP.get_spaces(count)
            self.assertEqual(len(spaces), count)
            for ch_ in spaces:
                self.assertEqual(ch_, ' ')
예제 #11
0
class TestMerkleTree2(unittest.TestCase):
    """ Test MerkleTree behavior with deeper directories. """
    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    def do_test_deepish_trees(self, hashtype):
        """
        Build a directory of random data, then its MerkleTree, then
        round trip to a serialization and back.
        """

        tree_top = os.path.join('tmp', self.rng.next_file_name(MAX_NAME_LEN))
        while os.path.exists(tree_top):
            tree_top = os.path.join('tmp',
                                    self.rng.next_file_name(MAX_NAME_LEN))

        # Generate a quasi-random data directory, 7 deep, up to 5 files/dir
        self.rng.next_data_dir(tree_top, depth=7, width=5, max_len=4096)

        # Build a MerkleTree specifying the directory.
        tree = MerkleTree.create_from_file_system(tree_top, hashtype)

        # ROUND TRIP 1 ----------------------------------------------

        # Serialize it.
        ser = tree.__str__()

        # Deserialize to make another MerkleTree.
        tree2 = MerkleTree.create_from_serialization(ser, hashtype)

        self.assertTrue(tree2.__eq__(tree))
        self.assertEqual(tree2, tree)  # identical test

        # ROUND TRIP 2 ----------------------------------------------
        strings = ser.split('\n')
        strings = strings[:-1]
        tree3 = MerkleTree.create_from_string_array(strings, hashtype)
        self.assertEqual(tree3, tree)

        # ROUND TRIP 3 ----------------------------------------------
        filename = os.path.join('tmp', self.rng.next_file_name(8))
        while os.path.exists(filename):
            filename = os.path.join('tmp', self.rng.next_file_name(8))
        with open(filename, 'w') as file:
            file.write(ser)

        tree4 = MerkleTree.create_from_file(filename, hashtype)
        self.assertEqual(tree4, tree)

    def test_deepish_trees(self):
        """ Test behavior of deeper trees using various SHA hash types. """

        for hashtype in HashTypes:
            self.do_test_deepish_trees(hashtype)
예제 #12
0
class TestNLHBase(unittest.TestCase):
    """ Test basic NLHTree functions. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    def do_test_constructor(self, hashtype):
        """ Check functionality of NLHBase constructor for specifc hash. """

        name = self.rng.next_file_name(8)
        base = NLHBase(name, hashtype)
        self.assertEqual(base.name, name)
        self.assertEqual(base.hashtype, hashtype)
        root = base.root
        curt = base.cur_tree
        self.assertEqual(root.name, curt.name)

    def test_constructor(self):
        """ Check functionality of NLHBase constructor.  """

        for hashtype in HashTypes:
            self.do_test_constructor(hashtype)

    def do_test_with_simple_tree(self, hashtype):
        """ XXX STUB: test simple tree with specific hash. """

        if hashtype == HashTypes.SHA1:
            sha = hashlib.sha1()
        elif hashtype == HashTypes.SHA2:
            sha = hashlib.sha256()
        elif hashtype == HashTypes.SHA3:
            # pylint:disable=no-member
            sha = hashlib.sha3_256()
        elif hashtype == HashTypes.BLAKE2B:
            sha = hashlib.blake2b(digest_size=32)
        else:
            raise NotImplementedError

        assert sha          # suppress warning

    def test_simple_tree(self):
        """ XXX STUB: test building simple tree. """
        for hashtype in HashTypes:
            self.do_test_with_simple_tree(hashtype)
예제 #13
0
    def rand_test(self, count=1000):
        """ Repeath a suite of tests N times. """

        rng = SimpleRNG(time.time())
        self.do_test(count, rng.random, ())
        self.do_test(count, rng.normalvariate, (0.0, 1.0))
        self.do_test(count, rng.lognormvariate, (0.0, 1.0))
        self.do_test(count, rng.vonmisesvariate, (0.0, 1.0))
        self.do_test(count, rng.gammavariate, (0.01, 1.0))
        self.do_test(count, rng.gammavariate, (0.1, 1.0))
        self.do_test(count, rng.gammavariate, (0.1, 2.0))
        self.do_test(count, rng.gammavariate, (0.5, 1.0))
        self.do_test(count, rng.gammavariate, (0.9, 1.0))
        self.do_test(count, rng.gammavariate, (1.0, 1.0))
        self.do_test(count, rng.gammavariate, (2.0, 1.0))
        self.do_test(count, rng.gammavariate, (20.0, 1.0))
        self.do_test(count, rng.gammavariate, (200.0, 1.0))
        self.do_test(count, rng.gauss, (0.0, 1.0))
        self.do_test(count, rng.betavariate, (3.0, 3.0))
        self.do_test(count, rng.triangular, (0.0, 1.0, 1.0 / 3.0))
예제 #14
0
class TestPKCS7Padding(unittest.TestCase):
    """ test PKCS7 padding """
    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    def do_test_padding(self, block_bytes, data_bytes):
        """ Both block size and data length are in bytes. """

        data = self.rng.some_bytes(data_bytes)
        extra = pkcs7_padding(data, block_bytes)
        padded = data + extra
        extra_bytes = len(extra)

        # verify that value of padding bytes is in each case the length
        for ndx, extra_byte in enumerate(extra):
            self.assertEqual(extra_byte, extra_bytes)  # byte contains length

        # verify that padded data structure is a whole number of blocks
        self.assertEqual(len(padded) % block_bytes, 0)

        # stripping ofd the padding should return the original value
        unpadded = strip_pkcs7_padding(padded, AES_BLOCK_BYTES)
        self.assertEqual(unpadded, data)

    def test_padding(self):
        """ test PKCS7 padding """

        self.do_test_padding(AES_BLOCK_BYTES, 7)
        self.do_test_padding(AES_BLOCK_BYTES, 8)
        self.do_test_padding(AES_BLOCK_BYTES, 9)

        self.do_test_padding(AES_BLOCK_BYTES, 15)
        self.do_test_padding(AES_BLOCK_BYTES, 16)
        self.do_test_padding(AES_BLOCK_BYTES, 17)

        self.do_test_padding(AES_BLOCK_BYTES, 63)
        self.do_test_padding(AES_BLOCK_BYTES, 64)
        self.do_test_padding(AES_BLOCK_BYTES, 65)
예제 #15
0
class TestTimestamp(unittest.TestCase):
    """ Ostensibly tests BuildList timestamp.  (Why?) """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    def test_sha1_file(self):
        """
        Verify functioning of xlu.file_sha1hex().
        """

        blk_count = 1 + self.rng.next_int16(3)     # so 1 to 3
        # last block will usually be only partically populated
        byte_count = BuildList.BLOCK_SIZE * (blk_count - 1) +\
            self.rng.next_int16(BuildList.BLOCK_SIZE)

        data = bytearray(byte_count)        # that many null bytes
        self.rng.next_bytes(data)           # fill with random data
        d_val = hashlib.new('sha1')
        d_val.update(data)
        hash_ = d_val.hexdigest()

        # make a unique test file name
        file_name = self.rng.next_file_name(8)
        path_to_file = os.path.join('tmp', file_name)
        while os.path.exists(path_to_file):
            file_name = self.rng.next_file_name(8)
            path_to_file = os.path.join('tmp', file_name)

        with open(path_to_file, 'wb') as file:
            file.write(data)

        file_hash = file_sha1hex(path_to_file)

        self.assertEqual(hash_, file_hash)
예제 #16
0
class TestFixedLen(unittest.TestCase):
    """"
    Test encoding and decoding fixed length data types, particularly at
    boundary values.
    """
    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    def round_trip32(self, nnn):
        """
        Test writing and reading a 32-bit integer as the first and
        only field in a buffer.
        """
        chan = Channel(LEN_BUFF)

        # -- write 32-bit value -------------------------------------
        field_nbr = 1 + self.rng.next_int16(1024)
        write_b32_field(chan, nnn, field_nbr)
        chan.flip()

        # -- read 32-bit value --------------------------------------
        # first the header (which is a varint) ------------
        (field_type, field_nbr2) = read_field_hdr(chan)
        offset2 = chan.position
        self.assertEqual(PrimTypes.B32, field_type)
        self.assertEqual(field_nbr, field_nbr2)
        self.assertEqual(
            length_as_varint(field_hdr_val(field_nbr, PrimTypes.B32)), offset2)

        # then the varint proper --------------------------
        varint_ = read_raw_b32(chan)
        offset3 = chan.position
        self.assertEqual(nnn, varint_)
        self.assertEqual(offset2 + 4, offset3)

    def round_trip64(self, nnn):
        """
        Test writing and reading a 64-bit integer as the first and
        only field in a buffer
        """
        chan = Channel(LEN_BUFF)

        # -- write 64-bit value -------------------------------------
        field_nbr = 1 + self.rng.next_int16(1024)
        write_b64_field(chan, nnn, field_nbr)
        chan.flip()

        #       # DEBUG
        #       buf = chan.buffer
        #       print "buffer after writing varint field: ",
        #       dumpBuffer(buf)
        #       # END

        # -- read 64-bit value --------------------------------------
        # first the header (which is a varint) ------------
        (field_type, field_nbr2) = read_field_hdr(chan)
        offset2 = chan.position
        self.assertEqual(PrimTypes.B64, field_type)
        self.assertEqual(field_nbr, field_nbr2)
        self.assertEqual(
            length_as_varint(field_hdr_val(field_nbr, PrimTypes.B64)), offset2)

        # then the varint proper --------------------------
        varint_ = read_raw_b64(chan)
        offset3 = chan.position
        self.assertEqual(nnn, varint_)
        self.assertEqual(offset2 + 8, offset3)

    def test_encode_decode(self):
        """ Test encoding and decoding boundary values. """

        self.round_trip32(0)
        self.round_trip32(42)
        self.round_trip32(0x7f)
        self.round_trip32(0x80)
        self.round_trip32(0x3fff)
        self.round_trip32(0x4000)
        self.round_trip32(0x1fffff)
        self.round_trip32(0x200000)
        self.round_trip32(0xfffffff)
        self.round_trip32(0x10000000)
        self.round_trip32(0xffffffff)

        self.round_trip64(0)
        self.round_trip64(42)
        self.round_trip64(0x7f)
        self.round_trip64(0x80)
        self.round_trip64(0x3fff)
        self.round_trip64(0x4000)
        self.round_trip64(0x1fffff)
        self.round_trip64(0x200000)
        self.round_trip64(0xfffffff)
        self.round_trip64(0x10000000)
        self.round_trip64(0x7ffffffff)
        self.round_trip64(0x800000000)
        self.round_trip64(0x3ffffffffff)
        self.round_trip64(0x40000000000)
        self.round_trip64(0x1ffffffffffff)
        self.round_trip64(0x2000000000000)
        self.round_trip64(0xffffffffffffff)
        self.round_trip64(0x100000000000000)
        self.round_trip64(0x7fffffffffffffff)
        self.round_trip64(0x8000000000000000)
        self.round_trip64(0xffffffffffffffff)
예제 #17
0
class TestMerkleDoc(unittest.TestCase):
    """ Test Merkletree functionality at the Document level. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    # utility functions #############################################
    def get_two_unique_directory_names(self):
        """ Generate two quasi-random directory names. """

        dir_name1 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_name2 = dir_name1
        while dir_name2 == dir_name1:
            dir_name2 = self.rng.next_file_name(MAX_NAME_LEN)
        self.assertTrue(len(dir_name1) > 0)
        self.assertTrue(len(dir_name2) > 0)
        self.assertTrue(dir_name1 != dir_name2)
        return (dir_name1, dir_name2)

    def make_one_named_test_directory(self, name, depth, width):
        """
        Create a randomly named directory under tmp/, removing any
        existing directory of that name.
        """

        dir_path = "tmp/%s" % name
        if os.path.exists(dir_path):
            if os.path.isfile(dir_path):
                os.unlink(dir_path)
            elif os.path.isdir(dir_path):
                shutil.rmtree(dir_path)
        self.rng.next_data_dir(dir_path, depth, width, 32)
        return dir_path

    def make_two_test_directories(self, depth, width):
        """
        Generate two different names, using them to create subdirectories
        of tmp/.
        """
        dir_name1 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_path1 = self.make_one_named_test_directory(dir_name1, depth, width)

        dir_name2 = dir_name1
        while dir_name2 == dir_name1:
            dir_name2 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_path2 = self.make_one_named_test_directory(dir_name2, depth, width)

        return (dir_name1, dir_path1, dir_name2, dir_path2)

    def verify_leaf_hash(self, node, path_to_file, hashtype):
        """
        Verify that a MerkleLeaf correctly describes a file, given a hash type.
        """
        check_hashtype(hashtype)
        self.assertTrue(os.path.exists(path_to_file))
        with open(path_to_file, "rb") as file:
            data = file.read()
        self.assertFalse(data is None)
        if hashtype == HashTypes.SHA1:
            sha = XLSHA1()
        elif hashtype == HashTypes.SHA2:
            sha = XLSHA2()
        elif hashtype == HashTypes.SHA3:
            # pylint: disable=no-member
            sha = XLSHA3()
        elif hashtype == HashTypes.BLAKE2B_256:
            # pylint: disable=no-member
            sha = XLBLAKE2B_256()
        else:
            raise NotImplementedError
        sha.update(data)
        hash_ = sha.digest()
        self.assertEqual(hash_, node.bin_hash)

    def verify_tree_hash(self, node, path_to_tree, hashtype):
        """
        Given a MerkleTree, verify that it correctly describes the
        directory whose path is passed.
        """
        # we assume that the node is a MerkleTree
        check_hashtype(hashtype)
        if node.nodes is None:
            self.assertEqual(None, node.bin_hash)
        else:
            hash_count = 0
            if hashtype == HashTypes.SHA1:
                sha = XLSHA1()
            elif hashtype == HashTypes.SHA2:
                sha = XLSHA2()
            elif hashtype == HashTypes.SHA3:
                # pylint: disable=no-member
                sha = XLSHA3()
            elif hashtype == HashTypes.BLAKE2B_256:
                sha = XLBLAKE2B_256()
            else:
                raise NotImplementedError
            for node_ in node.nodes:
                path_to_node = os.path.join(path_to_tree, node_.name)
                if isinstance(node_, MerkleLeaf):
                    self.verify_leaf_hash(node_, path_to_node, hashtype)
                elif isinstance(node_, MerkleTree):
                    self.verify_tree_hash(node_, path_to_node, hashtype)
                else:
                    print("DEBUG: unknown node type!")
                    self.fail("unknown node type!")
                if node_.bin_hash is not None:
                    hash_count += 1
                    sha.update(node_.bin_hash)

            if hash_count == 0:
                self.assertEqual(None, node.bin_hash)
            else:
                self.assertEqual(sha.digest(), node.bin_hash)

    # actual unit tests #############################################

    def test_bound_flat_dirs(self):
        """test directory is single level, with four data files"""
        for hashtype in HashTypes:
            self.do_test_bound_flat_dirs(hashtype)

    def do_test_bound_flat_dirs(self, hashtype):
        """ Test two flat directories with the specified hash type. """

        (dir_name1, dir_path1, dir_name2, dir_path2) =\
            self.make_two_test_directories(ONE, FOUR)

        doc1 = MerkleDoc.create_from_file_system(dir_path1, hashtype)
        tree1 = doc1.tree
        self.assertTrue(isinstance(tree1, MerkleTree))
        # pylint: disable=no-member
        self.assertEqual(dir_name1, tree1.name)
        self.assertTrue(doc1.bound)
        self.assertEqual(("tmp/%s" % dir_name1), dir_path1)
        # pylint: disable=no-member
        nodes1 = tree1.nodes
        self.assertTrue(nodes1 is not None)
        self.assertEqual(FOUR, len(nodes1))
        self.verify_tree_hash(tree1, dir_path1, hashtype)

        doc2 = MerkleDoc.create_from_file_system(dir_path2, hashtype)
        tree2 = doc2.tree
        # pylint: disable=no-member
        self.assertEqual(dir_name2, tree2.name)
        self.assertTrue(doc2.bound)
        self.assertEqual(("tmp/%s" % dir_name2), dir_path2)
        # pylint: disable=no-member
        nodes2 = tree2.nodes
        self.assertTrue(nodes2 is not None)
        self.assertEqual(FOUR, len(nodes2))
        self.verify_tree_hash(tree2, dir_path2, hashtype)

        self.assertEqual(tree1, tree1)
        self.assertFalse(tree1 == tree2)
        self.assertFalse(tree1 is None)

        doc1_str = doc1.to_string()
        doc1_rebuilt = MerkleDoc.create_from_serialization(doc1_str, hashtype)
        # DEBUG
        # print("flat doc:\n" + doc1Str)
        # print("rebuilt flat doc:\n" + doc1Rebuilt.toString())
        # END
        self.assertTrue(doc1 == doc1_rebuilt)

    def test_bound_needle_dirs(self):
        """test directories four deep with one data file at the lowest level"""
        for hashtype in HashTypes:
            self.do_test_bound_needle_dirs(hashtype)

    def do_test_bound_needle_dirs(self, hashtype):
        """ Run tests on two deeper directories. """
        check_hashtype(hashtype)
        (dir_name1, dir_path1, dir_name2, dir_path2) =\
            self.make_two_test_directories(FOUR, ONE)
        doc1 = MerkleDoc.create_from_file_system(dir_path1, hashtype)
        tree1 = doc1.tree
        # pylint: disable=no-member
        self.assertEqual(dir_name1, tree1.name)
        self.assertTrue(doc1.bound)
        self.assertEqual(("tmp/%s" % dir_name1), dir_path1)
        # pylint: disable=no-member
        nodes1 = tree1.nodes
        self.assertTrue(nodes1 is not None)
        self.assertEqual(ONE, len(nodes1))
        self.verify_tree_hash(tree1, dir_path1, hashtype)

        doc2 = MerkleDoc.create_from_file_system(dir_path2, hashtype)
        tree2 = doc2.tree
        # pylint: disable=no-member
        self.assertEqual(dir_name2, tree2.name)
        self.assertTrue(doc2.bound)
        self.assertEqual(("tmp/%s" % dir_name2), dir_path2)
        # pylint: disable=no-member
        nodes2 = tree2.nodes
        self.assertTrue(nodes2 is not None)
        self.assertEqual(ONE, len(nodes2))
        self.verify_tree_hash(tree2, dir_path2, hashtype)

        self.assertTrue(doc1 == doc1)
        self.assertFalse(doc1 == doc2)

        doc1_str = doc1.to_string()
        doc1_rebuilt = MerkleDoc.create_from_serialization(doc1_str, hashtype)
#       # DEBUG
#       print "needle doc:\n" + doc1Str
#       print "rebuilt needle doc:\n" + doc1Rebuilt.toString()
#       # END
        self.assertTrue(doc1 == doc1_rebuilt)
예제 #18
0
 def setUp(self):
     self.rng = SimpleRNG()
예제 #19
0
class TestOptionz(unittest.TestCase):
    """ Test the basic Optionz classes. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    # utility functions #############################################

    # actual unit tests #############################################

    def test_bare_optionz(self):
        """ Create an Optionz instance, check for expected attibutes. """

        my_optz = Z('fred')
        self.assertEqual(my_optz.name, 'fred')
        self.assertEqual(my_optz.desc, None)
        self.assertEqual(my_optz.epilog, None)
        self.assertEqual(len(my_optz), 0)

        my_optz = Z('frank', 'frivolous', 'fabulous')
        self.assertEqual(my_optz.name, 'frank')
        self.assertEqual(my_optz.desc, 'frivolous')
        self.assertEqual(my_optz.epilog, 'fabulous')
        self.assertEqual(len(my_optz), 0)

    def test_z_option(self):
        """ Populate an Optionz object, check for expected attr. """

        z_name = self.rng.next_file_name(8)
        z_desc = self.rng.next_file_name(64)
        z_epilog = self.rng.next_file_name(64)

        my_optz = Z(z_name, z_desc, z_epilog)

        self.assertEqual(my_optz.name, z_name)
        self.assertEqual(my_optz.desc, z_desc)
        self.assertEqual(my_optz.epilog, z_epilog)
        self.assertEqual(len(my_optz), 0)

        # booleans --------------------------------------------------

        b_dflt_val = True
        b_desc = "I'm small"
        bool_opt = BoolOption('bO', default=b_dflt_val, desc=b_desc)
        self.assertEqual(bool_opt.name, 'bO')
        self.assertEqual(bool_opt.default, b_dflt_val)
        self.assertEqual(bool_opt.desc, b_desc)

        #                        name    valType     default    desc
        b_check = my_optz.add_option('bO', ValType.BOOL, b_dflt_val, b_desc)
        self.assertEqual(len(my_optz), 1)
        self.assertEqual(bool_opt, b_check)

        # choice lists ----------------------------------------------

        # NOTE We should probably require that list elements be of
        # compatible types.  For the moment we just assume that elements
        # are all strings.

        # succeeds if default in list of choices ----------
        my_size = 2 + self.rng.next_int16(4)     # so in [2..5]
        choice = self.rng.next_file_name(8)
        choices = [choice]

        while len(choices) < my_size:
            if choice not in choices:
                choices.append(choice)
            choice = self.rng.next_file_name(8)

        c_dflt_val = choices[self.rng.next_int16(my_size)]
        c_desc = 'a list'
        choice_opt = ChoiceOption('cO', choices, c_dflt_val, c_desc)
        self.assertEqual(choice_opt.name, 'cO')
        self.assertEqual(choice_opt.choices, choices)
        self.assertEqual(choice_opt.default, c_dflt_val)
        self.assertEqual(choice_opt.desc, "a list")

        # fails if default is NOT in list of choices ------
        my_size = 2 + self.rng.next_int16(4)     # so in [2..5]
        choice = self.rng.next_file_name(8)
        b_choices = [choice]

        while len(b_choices) < my_size:
            if choice not in b_choices:
                b_choices.append(choice)
            choice = self.rng.next_file_name(8)

        dflt_val = self.rng.next_file_name(8)
        while dflt_val in choices:
            dflt_val = self.rng.next_file_name(8)

        try:
            ChoiceOption('bC', choices, default=dflt_val, desc="a list")
            self.fail('added default value not in list of choices')
        except BaseException:
            pass

        c_check = my_optz.add_choice_option('cO', choices, c_dflt_val, c_desc)
        self.assertEqual(len(my_optz), 2)
        self.assertEqual(choice_opt, c_check)

        # floats ----------------------------------------------------

        f_dflt_val = self.rng.next_real()
        f_desc = 'bubbly'
        float_opt = FloatOption('fO', default=f_dflt_val, desc=f_desc)
        self.assertEqual(float_opt.name, 'fO')
        self.assertEqual(float_opt.default, f_dflt_val)
        self.assertEqual(float_opt.desc, f_desc)

        #                        name    valType     default    desc
        f_check = my_optz.add_option('fO', ValType.FLOAT, f_dflt_val, f_desc)
        self.assertEqual(len(my_optz), 3)
        self.assertEqual(float_opt, f_check)

        # ints ------------------------------------------------------

        i_dflt_val = self.rng.next_int32()
        i_desc = 'discrete'
        int_opt = IntOption('iO', default=i_dflt_val, desc=i_desc)
        self.assertEqual(int_opt.name, 'iO')
        self.assertEqual(int_opt.default, i_dflt_val)
        self.assertEqual(int_opt.desc, i_desc)

        #                        name    valType     default    desc
        i_check = my_optz.add_option('iO', ValType.INT, i_dflt_val, i_desc)
        self.assertEqual(len(my_optz), 4)
        self.assertEqual(int_opt, i_check)

        # lists -----------------------------------------------------

        size_val = self.rng.next_int16()
        # select polarity of size randomly
        if self.rng.next_boolean():
            size_val = - size_val
        l_desc = "chunky"

        list_opt = ListOption('lO', default=size_val, desc=l_desc)
        self.assertEqual(list_opt.name, 'lO')
        self.assertEqual(list_opt.default, size_val)
        self.assertEqual(list_opt.size, size_val)
        self.assertEqual(list_opt.desc, l_desc)

        zero_val = 0
        var_list_opt = ListOption('zO', default=zero_val, desc="skinny")
        self.assertEqual(var_list_opt.name, 'zO')
        self.assertEqual(var_list_opt.default, zero_val)
        self.assertEqual(var_list_opt.desc, "skinny")

        #                        name    valType     default    desc
        l_check = my_optz.add_option('lO', ValType.LIST, size_val, l_desc)
        self.assertEqual(len(my_optz), 5)
        self.assertEqual(list_opt, l_check)

        # strings ---------------------------------------------------

        s_dflt_val = self.rng.next_file_name(12)
        s_desc = "wiggly"

        str_opt = StrOption('sO', default=s_dflt_val, desc=s_desc)
        self.assertEqual(str_opt.name, 'sO')
        self.assertEqual(str_opt.default, s_dflt_val)
        self.assertEqual(str_opt.desc, s_desc)

        #                        name    valType     default    desc
        s_check = my_optz.add_option('sO', ValType.STR, s_dflt_val, s_desc)
        self.assertEqual(len(my_optz), 6)
        self.assertEqual(str_opt, s_check)
예제 #20
0
 def setUp(self):
     self.rng = SimpleRNG(time.time())
예제 #21
0
class TestMerkleTree(unittest.TestCase):
    """ Test package functionality at the Tree level. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    # utility functions ---------------------------------------------

    def get_two_unique_directory_names(self):
        """ Make two different quasi-random directory names."""
        dir_name1 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_name2 = dir_name1
        while dir_name2 == dir_name1:
            dir_name2 = self.rng.next_file_name(MAX_NAME_LEN)
        self.assertTrue(len(dir_name1) > 0)
        self.assertTrue(len(dir_name2) > 0)
        self.assertTrue(dir_name1 != dir_name2)
        return (dir_name1, dir_name2)

    def make_one_named_test_directory(self, name, depth, width):
        """ Make a directory tree with a specific name, depth and width."""
        dir_path = "tmp/%s" % name
        if os.path.exists(dir_path):
            if os.path.isfile(dir_path):
                os.unlink(dir_path)
            elif os.path.isdir(dir_path):
                shutil.rmtree(dir_path)
        self.rng.next_data_dir(dir_path, depth, width, 32)
        return dir_path

    def make_two_test_directories(self, depth, width):
        """ Create two test directories with different names. """
        dir_name1 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_path1 = self.make_one_named_test_directory(dir_name1, depth, width)

        dir_name2 = dir_name1
        while dir_name2 == dir_name1:
            dir_name2 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_path2 = self.make_one_named_test_directory(dir_name2, depth, width)

        return (dir_name1, dir_path1, dir_name2, dir_path2)

    def verify_leaf_sha(self, node, path_to_file, hashtype):
        """
        Verify a leaf node is hashed correctly, using a specific SHA hash type.
        """
        self.assertTrue(os.path.exists(path_to_file))
        with open(path_to_file, "rb") as file:
            data = file.read()
        self.assertFalse(data is None)
        if hashtype == HashTypes.SHA1:
            sha = XLSHA1()
        elif hashtype == HashTypes.SHA2:
            sha = XLSHA2()
        elif hashtype == HashTypes.SHA3:
            sha = XLSHA3()
        elif hashtype == HashTypes.BLAKE2B:
            sha = XLBLAKE2B_256()
        else:
            raise NotImplementedError
        sha.update(data)
        hash_ = sha.digest()
        self.assertEqual(hash_, node.bin_hash)

    def verify_tree_sha(self, node, path_to_node, hashtype):
        """
        Verify tree elements are hashed correctly, assuming that the node
        is a MerkleTree, using a specific SHA hash type.
        """
        if node.nodes is None:
            self.assertEqual(None, node.bin_hash)
        else:
            hash_count = 0
            if hashtype == HashTypes.SHA1:
                sha = XLSHA1()
            elif hashtype == HashTypes.SHA2:
                sha = XLSHA2()
            elif hashtype == HashTypes.SHA3:
                sha = XLSHA3()
            elif hashtype == HashTypes.BLAKE2B:
                sha = XLBLAKE2B_256()
            else:
                raise NotImplementedError
            for node_ in node.nodes:
                path_to_file = os.path.join(path_to_node, node_.name)
                if isinstance(node_, MerkleLeaf):
                    self.verify_leaf_sha(node_, path_to_file, hashtype)
                elif isinstance(node_, MerkleTree):
                    self.verify_tree_sha(node_, path_to_file, hashtype)
                else:
                    self.fail("unknown node type!")
                if node_.bin_hash is not None:
                    hash_count += 1
                    sha.update(node_.bin_hash)

            # take care to compare values of the same type;
            # node.binHash is binary, node.hexHash is hex
            if hash_count == 0:
                self.assertEqual(None, node.bin_hash)
            else:
                self.assertEqual(sha.digest(), node.bin_hash)

    # unit tests ----------------------------------------------------

    def test_pathless_unbound(self):
        """
        Test basic characteristics of very simple MerkleTrees created
        using our standard SHA hash types.
        """
        for using in [HashTypes.SHA1, HashTypes.SHA2,
                      HashTypes.SHA3, HashTypes.BLAKE2B]:
            self.do_test_pathless_unbound(using)

    def do_test_pathless_unbound(self, hashtype):
        """
        Test basic characteristics of very simple MerkleTrees created
        using a specific SHA hash type.
        """
        (dir_name1, dir_name2) = self.get_two_unique_directory_names()

        check_hashtype(hashtype)
        tree1 = MerkleTree(dir_name1, hashtype)
        self.assertEqual(dir_name1, tree1.name)
        if hashtype == HashTypes.SHA1:
            self.assertEqual(SHA1_HEX_NONE, tree1.hex_hash)
        elif hashtype == HashTypes.SHA2:
            self.assertEqual(SHA2_HEX_NONE, tree1.hex_hash)
        elif hashtype == HashTypes.SHA3:
            self.assertEqual(SHA3_HEX_NONE, tree1.hex_hash)
        elif hashtype == HashTypes.BLAKE2B_256:
            self.assertEqual(BLAKE2B_256_HEX_NONE, tree1.hex_hash)
        else:
            raise NotImplementedError
        tree2 = MerkleTree(dir_name2, hashtype)
        self.assertEqual(dir_name2, tree2.name)

        # these tests remain skimpy
        self.assertFalse(tree1 is None)
        self.assertTrue(tree1 == tree1)
        self.assertFalse(tree1 == tree2)

        tree1_str = tree1.to_string(0)

        # there should be no indent on the first line
        self.assertFalse(tree1_str[0] == ' ')

        # no extra lines should be added
        lines = tree1_str.split('\n')
        # this split generates an extra blank line, because the serialization
        # ends with CR-LF
        if lines[-1] == '':
            lines = lines[:-1]
        self.assertEqual(1, len(lines))

        tree1_rebuilt = MerkleTree.create_from_serialization(
            tree1_str, hashtype)
        self.assertTrue(tree1 == tree1_rebuilt)

    def test_bound_flat_dirs(self):
        """
        Test handling of flat directories with a few data files
        using varioush SHA hash types.
        """
        for using in [HashTypes.SHA1, HashTypes.SHA2, HashTypes.SHA3, ]:
            self.do_test_bound_flat_dirs(using)

    def do_test_bound_flat_dirs(self, hashtype):
        """test directory is single level, with four data files"""

        check_hashtype(hashtype)
        (dir_name1, dir_path1, dir_name2, dir_path2) =\
            self.make_two_test_directories(ONE, FOUR)
        tree1 = MerkleTree.create_from_file_system(dir_path1, hashtype)
        self.assertEqual(dir_name1, tree1.name)
        nodes1 = tree1.nodes
        self.assertTrue(nodes1 is not None)
        self.assertEqual(FOUR, len(nodes1))
        self.verify_tree_sha(tree1, dir_path1, hashtype)

        tree2 = MerkleTree.create_from_file_system(dir_path2, hashtype)
        self.assertEqual(dir_name2, tree2.name)
        nodes2 = tree2.nodes
        self.assertTrue(nodes2 is not None)
        self.assertEqual(FOUR, len(nodes2))
        self.verify_tree_sha(tree2, dir_path2, hashtype)

        self.assertFalse(tree1 is None)
        self.assertTrue(tree1 == tree1)
        self.assertFalse(tree1 == tree2)

        tree1_str = tree1.to_string(0)
        tree1_rebuilt = MerkleTree.create_from_serialization(
            tree1_str, hashtype)
        self.assertTrue(tree1 == tree1_rebuilt)

    def test_bound_needle_dirs(self):
        """
        Test directories four deep with various SHA hash types.
        """
        for using in [HashTypes.SHA1, HashTypes.SHA2, HashTypes.SHA3, ]:
            self.do_test_bound_needle_dirs(using)

    def do_test_bound_needle_dirs(self, hashtype):
        """test directories four deep with one data file at the lowest level"""
        (dir_name1, dir_path1, dir_name2, dir_path2) =\
            self.make_two_test_directories(FOUR, ONE)
        tree1 = MerkleTree.create_from_file_system(dir_path1, hashtype)

        self.assertEqual(dir_name1, tree1.name)
        nodes1 = tree1.nodes
        self.assertTrue(nodes1 is not None)
        self.assertEqual(ONE, len(nodes1))
        self.verify_tree_sha(tree1, dir_path1, hashtype)

        tree2 = MerkleTree.create_from_file_system(dir_path2, hashtype)
        self.assertEqual(dir_name2, tree2.name)
        nodes2 = tree2.nodes
        self.assertTrue(nodes2 is not None)
        self.assertEqual(ONE, len(nodes2))
        self.verify_tree_sha(tree2, dir_path2, hashtype)

        self.assertTrue(tree1 == tree1)
        self.assertFalse(tree1 == tree2)

        tree1_str = tree1.to_string(0)
        tree1_rebuilt = MerkleTree.create_from_serialization(
            tree1_str, hashtype)
#       # DEBUG
#       print "NEEDLEDIR TREE1:\n" + tree1Str
#       print "REBUILT TREE1:\n" + tree1Rebuilt.toString("")
#       # END
        self.assertTrue(tree1 == tree1_rebuilt)

    # tests of bugs previously found --------------------------------

    def test_gray_boxes_bug1(self):
        """
        Verify that bug #1 in handling serialization of grayboxes website
        has been corrected.
        """
        serialization =\
            '721a08022dd26e7be98b723f26131786fd2c0dc3 grayboxes.com/\n' +\
            ' fcd3973c66230b9078a86a5642b4c359fe72d7da images/\n' +\
            '  15e47f4eb55197e1bfffae897e9d5ce4cba49623 grayboxes.gif\n' +\
            ' 2477b9ea649f3f30c6ed0aebacfa32cb8250f3df index.html\n'

        # create from string array ----------------------------------
        string = serialization.split('\n')
        string = string[:-1]
        self.assertEqual(4, len(string))

        tree2 = MerkleTree.create_from_string_array(string, HashTypes.SHA1)

        ser2 = tree2.to_string(0)
        self.assertEqual(serialization, ser2)

        # create from serialization ---------------------------------
        tree1 = MerkleTree.create_from_serialization(
            serialization, HashTypes.SHA1)

        ser1 = tree1.to_string(0)
        self.assertEqual(serialization, ser1)

        self.assertTrue(tree1 == tree2)

        # 2014-06-26 tagged this on here to test firstLineRE_1()
        first_line = string[0]
        match_ = MerkleTree.first_line_re_1().match(first_line)
        self.assertTrue(match_ is not None)
        self.assertEqual(match_.group(1), '')               # indent
        tree_hash = match_.group(2)
        dir_name = match_.group(3)
        self.assertEqual(tree_hash + ' ' + dir_name, first_line)

    def test_xlattice_bug1(self):
        """
        this test relies on dat.xlattice.org being locally present
        and an internally consistent merkleization
        """
        with open('tests/test_data/dat.xlattice.org', 'rb') as file:
            serialization = str(file.read(), 'utf-8')

        # create from serialization ---------------------------------
        tree1 = MerkleTree.create_from_serialization(
            serialization, HashTypes.SHA1)

#       # DEBUG
#       print "tree1 has %d nodes" % len(tree1.nodes)
#       with open('junk.tree1', 'w') as t:
#           t.write( tree1.toString(0) )
#       # END

        ser1 = tree1.to_string(0)
        self.assertEqual(serialization, ser1)

        # create from string array ----------------------------------
        string = serialization.split('\n')
        string = string[:-1]
        self.assertEqual(2511, len(string))

        tree2 = MerkleTree.create_from_string_array(string, HashTypes.SHA1)

        ser2 = tree2.to_string(0)
        self.assertEqual(serialization, ser2)

        self.assertTrue(tree1 == tree2)

    def test_gray_boxes_bug3(self):
        """ Test solution to bug in handling grayboxes website. """

        serialization =\
            '088d0e391e1a4872329e0f7ac5d45b2025363e26c199a7' + \
            '4ea39901d109afd6ba grayboxes.com/\n' +\
            ' 24652ddc14687866e6b1251589aee7e1e3079a87f80cd' + \
            '7775214f6d837612a90 images/\n' +\
            '  1eb774eef9be1e696f69a2f95711be37915aac283bb4' + \
            'b34dcbaf7d032233e090 grayboxes.gif\n' +\
            ' 6eacebda9fd55b59c0d2e48e2ed59ce9fd683379592f8' + \
            'e662b1de88e041f53c9 index.html\n'

        # create from string array ----------------------------------
        string = serialization.split('\n')
        string = string[:-1]
        self.assertEqual(4, len(string))

        tree2 = MerkleTree.create_from_string_array(string, HashTypes.SHA2)

        ser2 = tree2.to_string(0)
        self.assertEqual(serialization, ser2)

        # create from serialization ---------------------------------
        tree1 = MerkleTree.create_from_serialization(
            serialization, HashTypes.SHA2)

        ser1 = tree1.to_string(0)
        self.assertEqual(serialization, ser1)

        self.assertTrue(tree1 == tree2)

        # 2014-06-26 tagged this on here to test firstLineRE_1()
        first_line = string[0]
        match_ = MerkleTree.first_line_re_2().match(first_line)
        self.assertTrue(match_ is not None)
        self.assertEqual(match_.group(1), '')               # indent
        tree_hash = match_.group(2)
        dir_name = match_.group(3)
        self.assertEqual(tree_hash + ' ' + dir_name, first_line)

    def test_xlattice_bug3(self):
        """
        this test relies on dat2.xlattice.org being locally present
        and an internally consistent merkleization
        """
        with open('tests/test_data/dat2.xlattice.org', 'rb') as file:
            serialization = str(file.read(), 'utf-8')

        # create from serialization ---------------------------------
        tree1 = MerkleTree.create_from_serialization(
            serialization, HashTypes.SHA2)

        ser1 = tree1.to_string(0)
        self.assertEqual(serialization, ser1)

        # create from string array ----------------------------------
        string = serialization.split('\n')
        string = string[:-1]
        self.assertEqual(2511, len(string))

        tree2 = MerkleTree.create_from_string_array(string, HashTypes.SHA2)

        ser2 = tree2.to_string(0)
        self.assertEqual(serialization, ser2)

        self.assertTrue(tree1 == tree2)
예제 #22
0
class TestRSA(unittest.TestCase):
    """ Test RSA crypto routines.  """
    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    def test_rsa_serialization(self):
        """
        Exercise basic RSA functions.

        These include key generation, public key extraction,
        serialization/deserialization for pem and der formats, and
        digital signing and verification.
        """

        # ignore warning about renaming internal to cryptography
        warnings.filterwarnings("ignore", category=PendingDeprecationWarning)

        tmp_dir = 'tmp'
        os.makedirs(tmp_dir, exist_ok=True, mode=0o755)
        while True:
            sub_dir = self.rng.next_file_name(12)
            node_dir = os.path.join(tmp_dir, sub_dir)
            if not os.path.exists(node_dir):
                break
        # DEBUG
        print("node_dir is %s" % node_dir)
        # END
        os.mkdir(node_dir, mode=0o755)

        # RSA PRIVATE KEY GENERATION -----------------------------

        sk_priv = rsa.generate_private_key(
            public_exponent=65537,
            key_size=1024,  # cheap key for testing
            backend=default_backend())
        sk_ = sk_priv.public_key()

        self.assertEqual(sk_priv.key_size, 1024)

        # PEM FORMAT RSA PRIVATE KEY ROUND-TRIPPED ------------------

        pem = sk_priv.private_bytes(
            encoding=serialization.Encoding.PEM,
            format=serialization.PrivateFormat.PKCS8,
            encryption_algorithm=serialization.NoEncryption())

        key_file = os.path.join(node_dir, 'skPriv.pem')
        with open(key_file, 'wb') as file:
            # written as bytes
            file.write(pem)

        self.assertTrue(os.path.exists(key_file))
        with open(key_file, 'rb') as file:
            sk2_priv = serialization.load_pem_private_key(
                file.read(), password=None, backend=default_backend())

        # NUMBERS AND KEY EQUALITY ----------------------------------

        # get the public part of the key
        sk2_ = sk2_priv.public_key()

        # __eq__() for public part of RSA keys -------------

        # FAILS because __eq__()  has not been defined
        # self.assertEqual(sk2_, sk_)

        def check_equal_rsa_pub_key(sk2_, sk_):
            """  __eq__ functionalitiy for RSA public keys. """
            pub_n = sk_.public_numbers()
            pub_n2 = sk2_.public_numbers()

            self.assertEqual(pub_n2.e, pub_n.e)
            self.assertEqual(pub_n2.n, pub_n.n)

        check_equal_rsa_pub_key(sk2_, sk_)

        def check_equal_rsa_priv_key(sk2_priv, sk_priv):
            """  __eq__ functionalitiy for RSA private keys. """
            pri_n = sk_priv.private_numbers()
            pri_n2 = sk2_priv.private_numbers()

            # the library guarantees this: p is the larger factor
            self.assertTrue(pri_n.p > pri_n.q)

            self.assertTrue(pri_n2.p == pri_n.p and pri_n2.q == pri_n.q
                            and pri_n2.d == pri_n.d
                            and pri_n2.dmp1 == pri_n.dmp1
                            and pri_n2.dmq1 == pri_n.dmq1
                            and pri_n2.iqmp == pri_n.iqmp)

        check_equal_rsa_priv_key(sk2_priv, sk_priv)

        # DER DE/SERIALIZATION ROUND-TRIPPED ------------------------

        der = sk_priv.private_bytes(
            encoding=serialization.Encoding.DER,
            format=serialization.PrivateFormat.PKCS8,
            encryption_algorithm=serialization.NoEncryption())

        der_key_file = os.path.join(node_dir, 'skPriv.der')
        with open(der_key_file, 'wb') as file:
            # written as bytes
            file.write(der)

        self.assertTrue(os.path.exists(der_key_file))
        with open(der_key_file, 'rb') as file:
            sk3_priv = serialization.load_der_private_key(
                file.read(), password=None, backend=default_backend())

        check_equal_rsa_priv_key(sk3_priv, sk_priv)

        # OpenSSH PUBLIC KEY DE/SERIALIZATION ROUND-TRIPPED ---------

        ssh_bytes = sk_.public_bytes(encoding=serialization.Encoding.OpenSSH,
                                     format=serialization.PublicFormat.OpenSSH)

        ssh_key_file = os.path.join(node_dir, 'sk.ssh')
        with open(ssh_key_file, 'wb') as file:
            # written as bytes
            file.write(ssh_bytes)

        self.assertTrue(os.path.exists(ssh_key_file))
        with open(ssh_key_file, 'rb') as file:
            sk4_ = serialization.load_ssh_public_key(file.read(),
                                                     backend=default_backend())

        check_equal_rsa_pub_key(sk4_, sk_)  # GEEP 175

        # PEM FORMAT RSA PUBLIC KEY ROUND-TRIPPED -------------------

        pem = sk_.public_bytes(encoding=serialization.Encoding.PEM,
                               format=serialization.PublicFormat.PKCS1)

        key_file = os.path.join(node_dir, 'sk.pem')
        with open(key_file, 'wb') as file:
            # written as bytes
            file.write(pem)

        self.assertTrue(os.path.exists(key_file))
        with open(key_file, 'rb') as file:
            sk5_ = serialization.load_pem_public_key(
                file.read(), backend=default_backend())  # GEEP 193

        check_equal_rsa_pub_key(sk5_, sk_)

    def test_dig_sig(self):
        """ Test digital signatures using a range of hash types. """

        for using in [
                HashTypes.SHA1,
                HashTypes.SHA2,
        ]:
            self.do_test_dig_sig(using)

    def do_test_dig_sig(self, hashtype):
        """"
        Verify calculation of digital signature using speciic hash type.
        """

        if hashtype == HashTypes.SHA1:
            sha = hashes.SHA1
        elif hashtype == HashTypes.SHA2:
            sha = hashes.SHA256
        sk_priv = rsa.generate_private_key(
            public_exponent=65537,
            key_size=1024,  # cheap key for testing
            backend=default_backend())
        sk_ = sk_priv.public_key()

        print("WARNING: cannot use hashlib's sha code with pyca cryptography")
        print("WARNING: pyca cryptography does not support sha3/keccak")

        signer = sk_priv.signer(
            padding.PSS(mgf=padding.MGF1(sha()),
                        salt_length=padding.PSS.MAX_LENGTH), sha())

        count = 64 + self.rng.next_int16(192)  # [64..256)
        data = bytes(self.rng.some_bytes(count))

        signer.update(data)
        signature = signer.finalize()  # a binary value; bytes

        # BEGIN interlude: conversion to/from base64, w/ 76-byte lines
        b64sig = base64.encodebytes(signature).decode('utf-8')
        sig2 = base64.decodebytes(b64sig.encode('utf-8'))
        self.assertEqual(sig2, signature)
        # END interlude ---------------------------------------------

        verifier = sk_.verifier(
            signature,
            padding.PSS(mgf=padding.MGF1(sha()),
                        salt_length=padding.PSS.MAX_LENGTH), sha())
        verifier.update(data)

        try:
            verifier.verify()
            # digital signature verification succeeded
        except InvalidSignature:
            self.fail("dig sig verification unexpectedly failed")

        # twiddle a random byte in data array to make verification fail
        data2 = bytearray(data)
        which = self.rng.next_int16(count)
        data2[which] = 0xff & ~data2[which]
        data3 = bytes(data2)

        verifier = sk_.verifier(
            signature,  # same digital signature
            padding.PSS(mgf=padding.MGF1(sha()),
                        salt_length=padding.PSS.MAX_LENGTH),
            sha())
        verifier.update(data3)

        try:
            verifier.verify()
            self.fail("expected verification of modified message to fail")

        except InvalidSignature:
            pass  # digital signature verification failed
예제 #23
0
 def setUp(self):
     self.rng = SimpleRNG(time.time())
     data = StringIO(LITTLE_BIG_PROTO_SPEC)
     ppp = StringProtoSpecParser(data)   # data should be file-like
     self.str_obj_model = ppp.parse()     # object model from string serialization
     self.proto_name = self.str_obj_model.name  # the dotted name of the protocol
예제 #24
0
class TestLittleBig(unittest.TestCase):

    def setUp(self):
        self.rng = SimpleRNG(time.time())
        data = StringIO(LITTLE_BIG_PROTO_SPEC)
        ppp = StringProtoSpecParser(data)   # data should be file-like
        self.str_obj_model = ppp.parse()     # object model from string serialization
        self.proto_name = self.str_obj_model.name  # the dotted name of the protocol

    def tearDown(self):
        pass

    # utility functions #############################################

    def lil_big_msg_values(self):
        values = []
        # XXX these MUST be kept in sync with littleBigTest.py
        values.append(self.rng.next_boolean())       # vBoolReqField
        values.append(self.rng.next_int16())         # vEnumReqField

        values.append(self.rng.next_int32())         # vuInt32ReqField
        values.append(self.rng.next_int32())         # vuInt64ReqField
        values.append(self.rng.next_int64())         # vsInt32ReqField
        values.append(self.rng.next_int64())         # vsInt64ReqField

        # #vuInt32ReqField
        # #vuInt64ReqField

        values.append(self.rng.next_int32())         # fsInt32ReqField
        values.append(self.rng.next_int32())         # fuInt32ReqField
        values.append(self.rng.next_real())          # fFloatReqField

        values.append(self.rng.next_int64())         # fsInt64ReqField
        values.append(self.rng.next_int64())         # fuInt64ReqField
        values.append(self.rng.next_real())          # fDoubleReqField

        values.append(self.rng.next_file_name(16))    # lStringReqField

        rnd_len = 16 + self.rng.next_int16(49)
        byte_buf = bytearray(rnd_len)
        self.rng.next_bytes(byte_buf)
        values.append(bytes(byte_buf))               # lBytesReqField

        b128_buf = bytearray(16)
        self.rng.next_bytes(b128_buf)
        values.append(bytes(b128_buf))               # fBytes16ReqField

        b160_buf = bytearray(20)
        self.rng.next_bytes(b160_buf)
        values.append(bytes(b160_buf))               # fBytes20ReqField

        b256_buf = bytearray(32)
        self.rng.next_bytes(b256_buf)
        values.append(bytes(b256_buf))               # fBytes32ReqField

        return values

    # actual unit tests #############################################
    def check_field_impl_against_spec(
            self, proto_name, msg_name, field_spec, value):
        self.assertIsNotNone(field_spec)
        dotted_name = "%s.%s" % (proto_name, msg_name)
        cls = make_field_class(dotted_name, field_spec)
        if '__dict__' in dir(cls):
            print('\nGENERATED FieldImpl CLASS DICTIONARY')
            for exc in list(cls.__dict__.keys()):
                print("%-20s %s" % (exc, cls.__dict__[exc]))

        self.assertIsNotNone(cls)
        file = cls(value)
        self.assertIsNotNone(file)

        # class attributes --------------------------------
        self.assertEqual(field_spec.name, file.name)
        self.assertEqual(field_spec.field_type_ndx, file.field_type)
        self.assertEqual(field_spec.quantifier, file.quantifier)
        self.assertEqual(field_spec.field_nbr, file.field_nbr)
        self.assertIsNone(file.default)          # not an elegant test

        # instance attribute ------------------------------
        self.assertEqual(value, file.value)

        # with slots enabled, this is never seen ----------
        # because __dict__ is not in the list of valid
        # attributes for f
        if '__dict__' in dir(file):
            print('\nGENERATED FieldImpl INSTANCE DICTIONARY')
            for item in list(file.__dict__.keys()):
                print("%-20s %s" % (item, file.__dict__[item]))     # GEEP

    def test_field_impl(self):
        msg_spec = self.str_obj_model.msgs[0]

        # the fields in this imaginary logEntry
        values = self.lil_big_msg_values()

        for i in range(len(msg_spec)):
            print(
                "\nDEBUG: field %u ------------------------------------------------------" %
                i)
            field_spec = msg_spec[i]
            self.check_field_impl_against_spec(
                self.proto_name, msg_spec.name, field_spec, values[i])

    def test_caching(self):
        self.assertTrue(isinstance(self.str_obj_model, M.ProtoSpec))
        # XXX A HACK WHILE WE CHANGE INTERFACE ------------
        msg_spec = self.str_obj_model.msgs[0]
        name = msg_spec.name

        cls0 = make_msg_class(self.str_obj_model, name)
        # DEBUG
        print("Constructed Clz0 name is '%s'" % cls0.name)
        # END
        self.assertEqual(name, cls0.name)
        cls1 = make_msg_class(self.str_obj_model, name)
        self.assertEqual(name, cls1.name)

        # END HACK ----------------------------------------
        # we cache classe, so the two should be the same
        self.assertEqual(id(cls0), id(cls1))

        # chan    = Channel(BUFSIZE)
        values = self.lil_big_msg_values()
        lil_big_msg0 = cls0(values)
        lil_big_msg1 = cls0(values)
        # we don't cache instances, so these will differ
        self.assertNotEqual(id(lil_big_msg0), id(lil_big_msg1))

        field_spec = msg_spec[0]
        dotted_name = "%s.%s" % (self.proto_name, msg_spec.name)
        f0cls = make_field_class(dotted_name, field_spec)
        f1cls = make_field_class(dotted_name, field_spec)
        self.assertEqual(id(f0cls), id(f1cls))

    def test_little_big(self):
        self.assertIsNotNone(self.str_obj_model)
        self.assertTrue(isinstance(self.str_obj_model, M.ProtoSpec))
        self.assertEqual('org.xlattice.fieldz.test.littleBigProto',
                         self.str_obj_model.name)

        self.assertEqual(0, len(self.str_obj_model.enums))
        self.assertEqual(1, len(self.str_obj_model.msgs))
        self.assertEqual(0, len(self.str_obj_model.seqs))

        msg_spec = self.str_obj_model.msgs[0]

        # Create a channel ------------------------------------------
        # its buffer will be used for both serializing the instance
        # data and, by deserializing it, for creating a second instance.
        chan = Channel(BUFSIZE)
        buf = chan.buffer
        self.assertEqual(BUFSIZE, len(buf))

        # create the LittleBigMsg class ------------------------------
        little_big_msg_cls = make_msg_class(self.str_obj_model, msg_spec.name)

        # -------------------------------------------------------------
        # XXX the following fails because field 2 is seen as a property
        # instead of a list
        if False:        # DEBUGGING
            print('\nLittleBigMsg CLASS DICTIONARY')
            for (ndx, key) in enumerate(little_big_msg_cls.__dict__.keys()):
                print(
                    "%3u: %-20s %s" %
                    (ndx, key, little_big_msg_cls.__dict__[key]))
        # -------------------------------------------------------------

        # create a message instance ---------------------------------
        values = self.lil_big_msg_values()            # quasi-random values
        lil_big_msg = little_big_msg_cls(values)

        # __setattr__ in MetaMsg raises exception on any attempt
        # to add new attributes.  This works at the class level but
        # NOT at the instance level
        #
        if True:
            try:
                lil_big_msg.foo = 42
                self.fail(
                    "ERROR: attempt to assign new instance attribute succeeded")
            except AttributeError as a_exc:
                # DEBUG
                print(
                    "ATTR ERROR ATTEMPTING TO SET lilBigMsg.foo: " +
                    str(a_exc))
                # END
                pass

        if '__dict__' in dir(lil_big_msg):
            print('\nlilBigMsg INSTANCE DICTIONARY')
            for exc in list(lil_big_msg.__dict__.keys()):
                print("%-20s %s" % (exc, lil_big_msg.__dict__[exc]))

        # lilBigMsg.name is a property
        try:
            lil_big_msg.name = 'boo'
            self.fail("ERROR: attempt to change message name succeeded")
        except AttributeError:
            pass

        self.assertEqual(msg_spec.name, lil_big_msg.name)
        # we don't have any nested enums or messages
        self.assertEqual(0, len(lil_big_msg.enums))
        self.assertEqual(0, len(lil_big_msg.msgs))

        self.assertEqual(17, len(lil_big_msg.field_classes))
        # number of fields in instance
        self.assertEqual(17, len(lil_big_msg))
        for i in range(len(lil_big_msg)):
            self.assertEqual(values[i], lil_big_msg[i].value)

        # serialize the object to the channel -----------------------
        print("\nDEBUG: PHASE A ######################################")
        nnn = lil_big_msg.write_stand_alone(chan)

        old_position = chan.position
        chan.flip()
        self.assertEqual(old_position, chan.limit)
        self.assertEqual(0, chan.position)

        # deserialize the channel, making a clone of the message ----
        (read_back, nn2) = little_big_msg_cls.read(
            chan, self.str_obj_model)  # sOM is protoSpec
        self.assertIsNotNone(read_back)
        self.assertEqual(nnn, nn2)

        # verify that the messages are identical --------------------
        self.assertTrue(lil_big_msg.__eq__(read_back))

        print("\nDEBUG: PHASE B ######################################")
        # produce another message from the same values --------------
        lil_big_msg2 = little_big_msg_cls(values)
        chan2 = Channel(BUFSIZE)
        nnn = lil_big_msg2.write_stand_alone(chan2)
        chan2.flip()
        (copy2, nn3) = little_big_msg_cls.read(chan2, self.str_obj_model)
        self.assertIsNotNone(copy2)
        self.assertEqual(nnn, nn3)
        self.assertTrue(lil_big_msg.__eq__(copy2))
        self.assertTrue(lil_big_msg2.__eq__(copy2))

        # test clear()
        chan2.position = 97
        chan2.limit = 107
        chan2.clear()
        self.assertEqual(0, chan2.limit)
        self.assertEqual(0, chan2.position)
예제 #25
0
class TestMerkleDoc(unittest.TestCase):
    """ Test MerkleTree functionality at the document level. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    def get_two_unique_directory_names(self):
        """
        Get two candidate directory names, making sure that they differ.
        """
        dir_name1 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_name2 = dir_name1
        while dir_name2 == dir_name1:
            dir_name2 = self.rng.next_file_name(MAX_NAME_LEN)
        self.assertTrue(len(dir_name1) > 0)
        self.assertTrue(len(dir_name2) > 0)
        self.assertTrue(dir_name1 != dir_name2)
        return (dir_name1, dir_name2)

    def make_one_named_test_directory(self, name, depth, width):
        """
        Create a test directory with the name, depth, and width specified.
        The directory is under tmp/ ; subdirectories have random names
        and contents.
        """
        dir_path = "tmp/%s" % name
        if os.path.exists(dir_path):
            shutil.rmtree(dir_path)
        self.rng.next_data_dir(dir_path, depth, width, 32)
        return dir_path

    def make_two_test_directories(self, depth, width):
        """
        Create two test directories under tmp/ with distinct names but the
        depth and width specified.
        """
        dir_name1 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_path1 = self.make_one_named_test_directory(dir_name1, depth, width)

        dir_name2 = dir_name1
        while dir_name2 == dir_name1:
            dir_name2 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_path2 = self.make_one_named_test_directory(dir_name2, depth, width)

        return (dir_name1, dir_path1, dir_name2, dir_path2)

    def verify_leaf_sha256(self, node, path_to_file):
        """
        Verify that the content keys of the named file match the SHA
        hash of its contents.
        """
        self.assertTrue(os.path.exists(path_to_file))
        with open(path_to_file, "rb") as file:
            data = file.read()
        self.assertFalse(data is None)
        sha = XLSHA2()
        sha.update(data)
        hash_ = sha.digest()
        self.assertEqual(hash_, node.bin_hash)

    def verify_tree_sha256(self, node, path_to_tree):
        """
        Verify that the names (content keys) of files below the node
        (a Merkletree) have correct content keys, matching the SHA
        hash of the files.
        """
        if node.nodes is None:
            self.assertEqual(None, node.bin_hash)
        else:
            hash_count = 0
            sha = XLSHA2()
            for node_ in node.nodes:
                path_to_node = os.path.join(path_to_tree, node_.name)
                if isinstance(node_, MerkleLeaf):
                    self.verify_leaf_sha256(node_, path_to_node)
                elif isinstance(node_, MerkleTree):
                    self.verify_tree_sha256(node_, path_to_node)
                else:
                    print("DEBUG: unknown node type!")
                    self.fail("unknown node type!")
                if node_.bin_hash is not None:
                    hash_count += 1
                    sha.update(node_.bin_hash)

            if hash_count == 0:
                self.assertEqual(None, node.bin_hash)
            else:
                self.assertEqual(sha.digest(), node.bin_hash)

    # actual unit tests #############################################

    def test_bound_flat_dirs(self):
        """test directory is single level, with four data files"""

        dir_name1, dir_path1, dir_name2, dir_path2 = \
            self.make_two_test_directories(ONE, FOUR)
        doc1 = MerkleDoc.create_from_file_system(dir_path1)
        # pylint: disable=no-member
        tree1 = doc1.tree
        self.assertTrue(isinstance(tree1, MerkleTree))

        # pylint: disable=no-member
        self.assertEqual(dir_name1, tree1.name)
        self.assertTrue(doc1.bound)
        self.assertEqual(("tmp/%s" % dir_name1), dir_path1)
        # pylint: disable=no-member
        nodes1 = tree1.nodes
        self.assertTrue(nodes1 is not None)
        self.assertEqual(FOUR, len(nodes1))
        self.verify_tree_sha256(tree1, dir_path1)

        doc2 = MerkleDoc.create_from_file_system(dir_path2)
        tree2 = doc2.tree
        # pylint: disable=no-member
        self.assertEqual(dir_name2, tree2.name)
        self.assertTrue(doc2.bound)
        self.assertEqual(("tmp/%s" % dir_name2), dir_path2)
        # pylint: disable=no-member
        nodes2 = tree2.nodes
        self.assertTrue(nodes2 is not None)
        self.assertEqual(FOUR, len(nodes2))
        self.verify_tree_sha256(tree2, dir_path2)

        # pylint: disable=no-member
        self.assertTrue(tree1 == tree1)
        # pylint: disable=no-member
        self.assertFalse(tree1 == tree2)
        # pylint: disable=no-member
        self.assertFalse(tree1 is None)

        doc1_str = doc1.to_string()
        doc1_rebuilt = MerkleDoc.create_from_serialization(doc1_str)
        self.assertTrue(doc1 == doc1_rebuilt)

    def test_bound_needle_dirs(self):
        """test directories four deep with one data file at the lowest level"""
        (dir_name1, dir_path1, dir_name2, dir_path2) =\
            self.make_two_test_directories(FOUR, ONE)
        doc1 = MerkleDoc.create_from_file_system(dir_path1)
        tree1 = doc1.tree
        self.assertTrue(isinstance(tree1, MerkleTree))

        # pylint: disable=no-member
        self.assertEqual(dir_name1, tree1.name)
        self.assertTrue(doc1.bound)
        self.assertEqual(("tmp/%s" % dir_name1), dir_path1)
        # pylint: disable=no-member
        nodes1 = tree1.nodes
        self.assertTrue(nodes1 is not None)
        self.assertEqual(ONE, len(nodes1))
        self.verify_tree_sha256(tree1, dir_path1)

        doc2 = MerkleDoc.create_from_file_system(dir_path2)
        tree2 = doc2.tree
        # pylint: disable=no-member
        self.assertEqual(dir_name2, tree2.name)
        self.assertTrue(doc2.bound)
        self.assertEqual(("tmp/%s" % dir_name2), dir_path2)
        # pylint: disable=no-member
        nodes2 = tree2.nodes
        self.assertTrue(nodes2 is not None)
        self.assertEqual(ONE, len(nodes2))
        self.verify_tree_sha256(tree2, dir_path2)

        self.assertTrue(doc1 == doc1)
        self.assertFalse(doc1 == doc2)

        doc1_str = doc1.to_string()
        doc1_rebuilt = MerkleDoc.create_from_serialization(doc1_str)
#       # DEBUG
#       print "needle doc:\n" + doc1Str
#       print "rebuilt needle doc:\n" + doc1Rebuilt.toString()
#       # END
        self.assertTrue(doc1 == doc1_rebuilt)       # FOO
예제 #26
0
class TestNLHTree(unittest.TestCase):
    """ Test NLHTree-related functions. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    # utility functions #############################################
    def make_leaf(self, names_so_far, hashtype):
        """ Build a leaf with random name and data using specific hash. """

        while True:
            name = self.rng.next_file_name(8)
            if name not in names_so_far:
                names_so_far.add(name)
                break
        nnn = self.rng.some_bytes(8)        # 8 quasi-random bytes
        if hashtype == HashTypes.SHA1:
            sha = hashlib.sha1()
        elif hashtype == HashTypes.SHA2:
            sha = hashlib.sha256()
        elif hashtype == HashTypes.SHA3:
            sha = hashlib.sha3_256()
        sha.update(nnn)
        return NLHLeaf(name, sha.digest(), hashtype)

    # actual unit tests #############################################
    def test_simple_constructor(self):
        """ Build a tree with random name and data using various hashes. """

        for using in [HashTypes.SHA1, HashTypes.SHA2, HashTypes.SHA3, ]:
            self.do_test_simple_constructor(using)

    def do_test_simple_constructor(self, hashtype):
        """
        Build a tree with random name and data using specific hash type.
        """

        name = self.rng.next_file_name(8)
        tree = NLHTree(name, hashtype)
        self.assertEqual(tree.name, name)
        self.assertEqual(tree.hashtype, hashtype)
        self.assertEqual(len(tree.nodes), 0)

    def do_test_insert_4_leafs(self, hashtype):
        """
        Create 4 leaf nodes with random but unique names.  Insert
        them into a tree, verifying that the resulting sort is correct.
        """
        check_hashtype(hashtype)
        name = self.rng.next_file_name(8)
        tree = NLHTree(name, hashtype)
        leaf_names = set()
        a_leaf = self.make_leaf(leaf_names, hashtype)
        b_leaf = self.make_leaf(leaf_names, hashtype)
        c_leaf = self.make_leaf(leaf_names, hashtype)
        d_leaf = self.make_leaf(leaf_names, hashtype)
        self.assertEqual(len(tree.nodes), 0)
        tree.insert(a_leaf)
        self.assertEqual(len(tree.nodes), 1)
        tree.insert(b_leaf)
        self.assertEqual(len(tree.nodes), 2)
        tree.insert(c_leaf)
        self.assertEqual(len(tree.nodes), 3)
        tree.insert(d_leaf)
        self.assertEqual(len(tree.nodes), 4)
        # we expect the nodes to be sorted
        for ndx in range(3):
            self.assertTrue(tree.nodes[ndx].name < tree.nodes[ndx + 1].name)

        matches = tree.list('*')
        for ndx, qqq in enumerate(tree.nodes):
            self.assertEqual(matches[ndx], '  ' + qqq.name)

        self.assertEqual(tree, tree)
        tree2 = tree.clone()
        self.assertEqual(tree2, tree)

    def test_insert_4_leafs(self):
        """
        Test inserting 4 leafs into a tree using various hash types.
        """
        for using in [HashTypes.SHA1, HashTypes.SHA2, HashTypes.SHA3, ]:
            self.do_test_insert_4_leafs(using)
예제 #27
0
class TestPopulateDataDir(unittest.TestCase):
    """
    Test using a BuildList and existing content-keyed store to populate
    a data directory.
    """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    # utility functions #############################################

    def make_unique(self, below):
        """ create a unique subdirectory of the directory named """

        dir_path = os.path.join(below, self.rng.next_file_name(8))
        while os.path.exists(dir_path):
            dir_path = os.path.join(below, self.rng.next_file_name(8))
        os.makedirs(dir_path, mode=0o755)
        return dir_path

    # actual unit tests #############################################

    def do_pop_test(self, hashtype):
        """ Test populating a data directory for a specific hashtype. """

        check_hashtype(hashtype)
        # DEBUG
        # print("do_pop_test: %s" % hashtype)
        # EMD

        sk_priv = RSA.generate(1024)
        sk_ = sk_priv.publickey()

        if hashtype == HashTypes.SHA1:
            original_data = os.path.join('example1', 'dataDir')
            original_u = os.path.join('example1', 'uDir')
        elif hashtype == HashTypes.SHA2:
            original_data = os.path.join('example2', 'dataDir')
            original_u = os.path.join('example2', 'uDir')
        elif hashtype == HashTypes.SHA3:
            original_data = os.path.join('example3', 'data_dir')
            original_u = os.path.join('example3', 'uDir')

        blist = BuildList.create_from_file_system(
            'name_of_the_list', original_data, sk_, hashtype=hashtype)

        # should return an empty list: a basic sanity check
        unmatched = blist.check_in_data_dir(original_data)
        # DEBUG
        # print("UNMATCHED IN DATA DIR: ", unmatched)
        # if len(unmatched) > 0:
        #   print("BL:\n%s" % blist.__str__())
        #   print("in the buildlist, but not in uData:")
        #   for un in unmatched:
        #       print("    %s %s" % (un[1], un[0]))
        # END
        self.assertEqual(len(unmatched), 0)

        # should return an empty list: a basic sanity check
        unmatched = blist.check_in_u_dir(original_u)
        # DEBUG
        # print("UNMATCHED IN U DIR: ", unmatched)
        if unmatched:
            print("BL:\n%s" % blist.__str__())
            print("in the buildlist, but not in u_dir:")
            for unm in unmatched:
                print("    %s %s" % (unm[1], unm[0]))
        # END
        self.assertEqual(len(unmatched), 0)

        self.assertEqual(blist.title, 'name_of_the_list')
        self.assertEqual(blist.public_key, sk_)
        self.assertEqual(blist.timestamp, timestamp(0))
        self.assertEqual(blist.hashtype, hashtype)

        self.assertEqual(blist, blist)
        self.assertFalse(blist.verify())   # not signed yet

        blist.sign(sk_priv)
        sig = blist.dig_sig                 # this is the base64-encoded value
        self.assertTrue(sig is not None)
        self.assertTrue(blist.verify())    # it has been signed

        self.assertEqual(blist, blist)

        # BL2: we build testDir and the new dataDir and u_dir --------
        string = blist.to_string()
        bl2 = BuildList.parse(string, hashtype)     # round-tripped build list
        # DEBUG
        # print("\nFIRST BUILD LIST:\n%s" % blist)
        # print("\nSECOND BUILD LIST:\n%s" % bl2)
        # END

        # string2 = bl2.__str__()
        # self.assertEqual(string, string2)
        # same list, but signed now
        self.assertEqual(blist, blist)
        # self.assertEqual(bl, bl2)               # timestamps may differ

        # create empty test directories -------------------
        test_path = self.make_unique('tmp')
        u_path = os.path.join(test_path, 'uDir')
        UDir.discover(
            u_path, hashtype=hashtype)  # creates empty UDir
        dvcz_path = os.path.join(test_path, 'dvcz')
        os.mkdir(dvcz_path)

        data_path = os.path.join(test_path, blist.tree.name)
        # DEBUG
        # print("DATA_PATH: %s" % data_path)
        # print("DVCZ_DIR:  %s" % dvczPath)
        # print("U_PATH:    %s" % u_path)
        # END

        # populate the new dataDir and then the new u_dir --
        # bl2.populateDataDir(originalU, data_path)
        blist.populate_data_dir(original_u, data_path)
        self.assertEqual(len(bl2.check_in_data_dir(data_path)), 0)

        bl2.tree.save_to_u_dir(data_path, u_path, hashtype)
        self.assertEqual(len(bl2.check_in_u_dir(u_path)), 0)

        # BL3:

        # this writes the buildlist to dvczPath/lastBuildList:
        blist3 = BuildList.list_gen("title", data_path, dvcz_path,
                                    u_path=u_path, hashtype=hashtype)
        path_to_list = os.path.join(dvcz_path, 'lastBuildList')
        with open(path_to_list, 'r') as file:
            ser4 = file.read()
        bl4 = BuildList.parse(ser4, hashtype)
        # ser41 = bl4.to_string()
        bl4.to_string()
        # self.assertEqual(ser41, ser4) # FAILS: ser41 is signed, ser4 isn't

        # DEBUG
        # print("recovered from disk:\n%s" % ser4)
        # print("\nserialized from BuildList:\n%s" % ser41)
        # END

        self.assertEqual(blist.tree, blist.tree)    # check __eq__
        self.assertEqual(bl2.tree, blist.tree)
        self.assertEqual(blist3.tree, blist.tree)
        self.assertEqual(bl4.tree, blist.tree)

    def test_populate_data_dir(self):
        """
        Test populate_data_dir for the supported hashtypes.
        """
        for hashtype in [HashTypes.SHA1, HashTypes.SHA2]:
            self.do_pop_test(hashtype)

    def test_name_space(self):
        """
        Verify that ArgumentParser works as expected, specifically
        that assignment adds a key to the Namespace.
        """
        parser = ArgumentParser(description='oh hello')
        args = parser.parse_args()
        args._ = 'trash'
        self.assertEqual(args._, 'trash')
예제 #28
0
class TestNodeID(unittest.TestCase):
    """ Verify that an XLNodeID behaves like one. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    def test_valid_node_id(self):
        """
        Tests the funnction of that name.
        """

        # tests that should fail
        self.assertFalse(XLNodeID.is_valid_node_id(None))
        self.assertFalse(XLNodeID.is_valid_node_id('foo'))  # not bytes-like
        self.assertFalse(XLNodeID.is_valid_node_id(b'bar'))  # wrong length
        self.assertFalse(XLNodeID.is_valid_node_id(42))     # an int

        # tests that should succeed
        val = bytes(SHA1_BIN_LEN)
        self.assertTrue(XLNodeID.is_valid_node_id(val))
        val = bytes(SHA2_BIN_LEN)
        self.assertTrue(XLNodeID.is_valid_node_id(val))
        val = bytes(SHA3_BIN_LEN)
        self.assertTrue(XLNodeID.is_valid_node_id(val))

    def expect_failure(self, val):
        """ Expect object construction to fail. """
        try:
            XLNodeID(val)
            self.fail("XLNodeID constructed with bad ID")
        except XLNodeIDError:
            pass

    def expect_success(self, val):
        """
        Try to build a NodeID with val (a bytes valaue).
        """
        try:
            nodeid = XLNodeID(val)
            # succeeded
        except XLNodeIDError:
            self.fail("ctor raised with good ID")

        val2 = nodeid.value
        self.assertIsNotNone(val2)
        self.assertFalse(val2 is val)   # not the same object
        self.assertEqual(val2, val)     # but a valid deep copy

    def test_ctor(self):
        """
        Tests the XLNodeID constructor.
        """

        # tests that should fail
        self.expect_failure(None)    # id may not be None
        self.expect_failure('foo')   # not bytes-like
        self.expect_failure(b'bar')  # wrong length
        self.expect_failure(42)      # an int

        self.expect_failure(bytes(SHA1_BIN_LEN - 1))
        self.expect_failure(bytes(SHA1_BIN_LEN + 1))
        self.expect_failure(bytes(SHA2_BIN_LEN - 1))
        self.expect_failure(bytes(SHA2_BIN_LEN + 1))
        self.expect_failure(bytes(SHA3_BIN_LEN - 1))
        self.expect_failure(bytes(SHA3_BIN_LEN + 1))

        # tests that should succeed
        val = self.rng.some_bytes(SHA1_BIN_LEN)
        self.expect_success(val)
        val = self.rng.some_bytes(SHA2_BIN_LEN)
        self.expect_success(val)
        val = self.rng.some_bytes(SHA3_BIN_LEN)
        self.expect_success(val)

    def do_test_cloning(self, length):
        """ Verify that cloning works for a given number of bytes. """
        val = self.rng.some_bytes(length)
        id1 = XLNodeID(val)
        id2 = id1.clone()
        self.assertTrue(id1 is not id2)
        self.assertEqual(id1.value, id2.value)

    def test_cloning(self):
        """ Test cloning for bytes-like objects of a given number of bytes. """
        for length in [SHA1_BIN_LEN, SHA2_BIN_LEN, SHA3_BIN_LEN]:
            self.do_test_cloning(length)

    def do_test_comparison(self, length):
        """
        For a quasi-random byte sequence of a given length, verify that
        comparison opertors work.
        """
        val = self.rng.some_bytes(length)       # a byte array of that length

        # pick a random index into that byte array
        ndx = 1 + self.rng.next_int16(length - 1)
        if val[ndx] == 0:
            val[ndx] = 1
        if val[ndx] == 255:
            val[ndx] = 254

        # make a couple of clones of the byte array
        v_bigger = deepcopy(val)
        v_bigger[ndx] += 1
        v_smaller = deepcopy(val)
        v_smaller[ndx] -= 1

        self.assertTrue(v_bigger > val)
        self.assertTrue(v_smaller < val)

        # use these values to make NodeIDs
        n_bigger = XLNodeID(v_bigger)
        n_middle = XLNodeID(val)
        n_smaller = XLNodeID(v_smaller)

        # compare them
        self.assertTrue(n_bigger > n_middle)
        self.assertTrue(n_smaller < n_middle)

        # equality checks
        self.assertEqual(n_middle, n_middle)
        self.assertFalse(n_bigger == n_middle)
        self.assertFalse(n_middle == n_smaller)

    def test_comparion(self):
        """ Test comparison operators for IDs of standard lengths. """
        for length in [SHA1_BIN_LEN, SHA2_BIN_LEN, SHA3_BIN_LEN]:
            self.do_test_comparison(length)
예제 #29
0
class TestDropFromU(unittest.TestCase):
    """ Test the drop_from_u_dir functionality. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    def populate_tree(self, tree, data_path, u_dir, hashtype):
        """
        Generate nnn and nnn unique random values, where nnn is at least 16.
        """
        nnn = 16 + self.rng.next_int16(16)
        # DEBUG
        # print("nnn = %d" % nnn)
        # EnnnD

        values = []
        hashes = []
        for count in range(nnn):
            # generate datum ------------------------------
            datum = self.rng.some_bytes(32 + self.rng.next_int16(32))
            values.append(datum)

            # generate hash = bin_key ----------------------
            if hashtype == HashTypes.SHA1:
                sha = hashlib.sha1()
            elif hashtype == HashTypes.SHA2:
                sha = hashlib.sha256()
            elif hashtype == HashTypes.SHA3:
                sha = hashlib.sha3_256()
            elif hashtype == HashTypes.BLAKE2B:
                sha = hashlib.blake2b(digest_size=32)
            else:
                raise NotImplementedError
            sha.update(datum)
            bin_key = sha.digest()
            hex_key = sha.hexdigest()
            hashes.append(bin_key)

            # write data file -----------------------------
            file_name = 'value%04d' % count
            path_to_file = os.path.join(data_path, file_name)
            with open(path_to_file, 'wb') as file:
                # DEBUG
                # print("writing %s to %s" % (hex_key, path_to_file))
                # END
                file.write(datum)

            # insert leaf into tree -----------------------
            # path_from_top = os.path.join(top_name, file_name)
            leaf = NLHLeaf(file_name, bin_key, hashtype)
            tree.insert(leaf)

            # DEBUG
            # print("  inserting <%s %s>" % (leaf.name, leaf.hex_hash))
            # END

            # write data into uDir ------------------------
            u_dir.put_data(datum, hex_key)
        return values, hashes

    def generate_udt(self, struc, hashtype):
        """
        Generate under ./tmp a data directory with random content,
        a uDir containing the same data, and an NLHTree that matches.

        uDir has the directory structure (DIR_FLAT, DIR16x16, DIR256x256,
        etc requested.  Hashes are SHA1 if using SHA1 is True, SHA256
        otherwise.

        values is a list of binary values, each the content of a file
        under dataDir.  Each value contains a non-zero number of bytes.

        hashes is a list of the SHA hashes of the values.  Each hash
        is a binary value.  If using SHA1 it consists of 20 bytes.

        return uPath, data_path, tree, hashes, values
        """

        # make a unique U directory under ./tmp/
        os.makedirs('tmp', mode=0o755, exist_ok=True)
        u_root_name = self.rng.next_file_name(8)
        u_path = os.path.join('tmp', u_root_name)
        while os.path.exists(u_path):
            u_root_name = self.rng.next_file_name(8)
            u_path = os.path.join('tmp', u_root_name)

        # DEBUG
        # print("u_root_name = %s" % u_root_name)
        # END

        # create uDir and the NLHTree
        u_dir = UDir(u_path, struc, hashtype)
        self.assertTrue(os.path.exists(u_path))

        # make a unique data directory under tmp/
        data_tmp = self.rng.next_file_name(8)
        tmp_path = os.path.join('tmp', data_tmp)
        while os.path.exists(tmp_path):
            data_tmp = self.rng.next_file_name(8)
            tmp_path = os.path.join('tmp', data_tmp)

        # dataDir must have same base name as NLHTree
        top_name = self.rng.next_file_name(8)
        data_path = os.path.join(tmp_path, top_name)
        os.makedirs(data_path, mode=0o755)

        # DEBUG
        # print("data_tmp = %s" % data_tmp)
        # print("top_name = %s" % top_name)
        # print('data_path = %s' % data_path)
        # END

        tree = NLHTree(top_name, hashtype)
        values, hashes = self.populate_tree(tree, data_path, u_dir, hashtype)
        return u_path, data_path, tree, hashes, values

    # ---------------------------------------------------------------

    def do_test_with_ephemeral_tree(self, struc, hashtype):
        """
        Generate a tmp/ subdirectory containing a quasi-random data
        directory and corresponding uDir and NLHTree serialization.

        We use the directory strucure (struc) and hash type (hashtype)
        indicated, running various consistency tests on the three.
        """

        u_path, data_path, tree, hashes, values = self.generate_udt(
            struc, hashtype)

        # DEBUG
        # print("TREE:\n%s" % tree)
        # END
        # verify that the dataDir matches the nlhTree
        tree2 = NLHTree.create_from_file_system(data_path, hashtype)
        # DEBUG
        # print("TREE2:\n%s" % tree2)
        # END
        self.assertEqual(tree2, tree)

        nnn = len(values)             # number of values present
        hex_hashes = []
        for count in range(nnn):
            hex_hashes.append(hexlify(hashes[count]).decode('ascii'))

        ndxes = [ndx for ndx in range(nnn)]  # indexes into lists
        self.rng.shuffle(ndxes)         # shuffled

        kkk = self.rng.next_int16(nnn)   # we will drop this many indexes

        # DEBUG
        # print("dropping %d from %d elements" % (kkk, nnn))
        # END

        drop_me = ndxes[0:kkk]        # indexes of values to drop
        keep_me = ndxes[kkk:]         # of those which should still be present

        # construct an NLHTree containing values to be dropped from uDir
        clone = tree.clone()
        for count in keep_me:
            name = 'value%04d' % count
            clone.delete(name)     # the parameter is a glob !

        # these values should be absent from q: they won't be dropped from uDir
        for count in keep_me:
            name = 'value%04d' % count
            xxx = clone.find(name)
            self.assertEqual(len(xxx), 0)

        # these values shd still be present in clone: they'll be dropped from
        # UDir
        for count in drop_me:
            name = 'value%04d' % count
            xxx = clone.find(name)
            self.assertEqual(len(xxx), 1)

        # the clone subtree contains those elements which will be dropped
        # from uDir
        unmatched = clone.drop_from_u_dir(u_path)               # was unmatched

        # DEBUG
        # for x in unmatched:  # (relPath, hash)
        #    print("unmatched: %s %s" % (x[0], x[1]))
        # END
        self.assertEqual(len(unmatched), 0)

        u_dir = UDir(u_path, struc, hashtype)
        self.assertTrue(os.path.exists(u_path))

        # these values should still be present in uDir
        for count in keep_me:
            hex_hash = hex_hashes[count]
            self.assertTrue(u_dir.exists(hex_hash))

        # these values should NOT be present in UDir
        for count in drop_me:
            hex_hash = hex_hashes[count]
            self.assertFalse(u_dir.exists(hex_hash))

    def test_with_ephemeral_tree(self):
        """
        Generate tmp/ subdirectories containing a quasi-random data
        directory and corresponding uDir and NLHTree serialization,
        using various directory structures and hash types.
        """
        for struc in DirStruc:
            for hashtype in HashTypes:
                self.do_test_with_ephemeral_tree(struc, hashtype)
예제 #30
0
class TestTFWriter(unittest.TestCase):

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    # utility functions #############################################
    def dump_buffer(self, buf):
        for i in range(16):
            print("0x%02x " % buf[i], end=' ')
        print()

    # actual unit tests #############################################

    # these two methods are all that's left of testTFBuffer.py
    def test_buffer_ctor(self):
        buffer = [0] * BUFSIZE
        tf_buf = TFBuffer(TEST_MSG_SPEC, BUFSIZE, buffer)
        self.assertEqual(0, tf_buf.position)
        self.assertEqual(BUFSIZE, tf_buf.capacity)

    def test_buffer_creator(self):
        BUFSIZE = 1024
        tf_buf = TFBuffer.create(TEST_MSG_SPEC, BUFSIZE)
        self.assertTrue(isinstance(tf_buf, TFBuffer))
        self.assertEqual(0, tf_buf.position)
        self.assertEqual(BUFSIZE, tf_buf.capacity)

    # and these two methods are all that's left of testTFReader.py
    def test_reader_ctor(self):
        BUFSIZE = 1024
        buffer = bytearray(BUFSIZE)
        tf_reader = TFReader(TEST_MSG_SPEC, BUFSIZE, buffer)
        self.assertEqual(0, tf_reader.position)
        self.assertEqual(BUFSIZE, tf_reader.capacity)
        self.assertEqual(BUFSIZE, len(tf_reader.buffer))

    def test_reader_creator(self):
        BUFSIZE = 1024
        tf_reader = TFReader.create(TEST_MSG_SPEC, BUFSIZE)
        self.assertTrue(isinstance(tf_reader, TFReader))
        self.assertEqual(0, tf_reader.position)
        self.assertEqual(BUFSIZE, tf_reader.capacity)

    # next two are specific to TFWriter
    def test_writer_ctor(self):
        BUFSIZE = 1024
        buffer = bytearray(BUFSIZE)
        tf_writer = TFWriter(TEST_MSG_SPEC, BUFSIZE, buffer)
        self.assertEqual(0, tf_writer.position)
        self.assertEqual(BUFSIZE, tf_writer.capacity)

    def test_writer_creator(self):
        BUFSIZE = 1024
        tf_writer = TFWriter.create(TEST_MSG_SPEC, BUFSIZE)
        self.assertTrue(isinstance(tf_writer, TFWriter))
        self.assertEqual(0, tf_writer.position)
        self.assertEqual(BUFSIZE, tf_writer.capacity)

    def do_round_trip_field(self, writer, reader, idx, field_type, value):
        writer.put_next(idx, value)
#       # DEBUG
#       tfBuf   = writer.buffer
#       print "after put buffer is " ,
#       self.dumpBuffer(tfBuf)
#       # END
        reader.get_next()
        self.assertEqual(idx, reader.field_nbr)
        # XXX THIS SHOULD WORK:
        # self.assertEqual( fType, reader.fType    )
        self.assertEqual(value, reader.value)
        return idx + 1

    def test_writing_and_reading(self):
        BUFSIZE = 16 * 1024
        tf_writer = TFWriter.create(TEST_MSG_SPEC, BUFSIZE)
        tf_buf = tf_writer.buffer       # we share the buffer
        tf_reader = TFReader(TEST_MSG_SPEC, BUFSIZE, tf_buf)

        idx = 0                           # 0-based field number

        # field types encoded as varints (8) ========================
        # These are tested in greater detail in testVarint.py; the
        # tests here are to exercise their use in a heterogeneous
        # buffer

        # field 0: _V_UINT32
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'vuint32', 0x1f)
        self.assertEqual(1, idx)         # DEBUG XXX

        # field 1: _V_UINT32
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'vuint32', 0x172f3e4d)

        # field 2:  _V_UINT64
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'vuint64', 0x12345678abcdef3e)

        # field 3: vsInt32
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'vsint32', 192)

        # field 4: vsInt32
        # _V_SINT32 (zig-zag encoded, optimal for small values near zero)
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'vsint32', -192)

        # field 5: _V_SINT64
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'vsint64', -193)  # GEEP

        # field 6: _V_UINT32
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'vuint32', 0x172f3e4d)
        # field 7: _V_UINT64
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'vuint64', 0xffffffff172f3e4d)

        # _V_BOOL
        # XXX NOT IMPLEMENTED, NOT TESTED

        # _V_ENUM
        # XXX NOT IMPLEMENTED, NOT TESTED

        # encoded as fixed length 32 bit fields =====================
        # field 8: _F_INT32
        idx = self.do_round_trip_field(tf_writer, tf_reader, idx, 'fint32',
                                       0x172f3e4d)
        # _F_FLOAT
        # XXX STUB XXX not implemented

        # encoded as fixed length 64 bit fields =====================
        # field 9: _F_INT64
        idx = self.do_round_trip_field(tf_writer, tf_reader, idx, 'fint64',
                                       0xffffffff172f3e4d)
        # _F_DOUBLE
        # XXX STUB XXX not implemented

        # encoded as varint len followed by byte[len] ===============
        # field 10: _L_STRING
        string = self.rng.next_file_NAME(16)
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'lstring', string)

        # field 11: _L_BYTES
        b_val = bytearray(8 + self.rng.next_int16(16))
        self.rng.next_bytes(b_val)
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'lbytes', b_val)

        # _L_MSG
        # XXX STUB XXX not implemented

        # fixed length byte sequences, byte[N} ======================
        # field 12: _F_BYTES16
        self.rng.next_bytes(B128)
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'fbytes16', B128)

        # field 13: _F_BYTES20
        self.rng.next_bytes(B160)
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'fbytes20', B160)

        # may want to introduce eg fNodeID20 and fSha1Key types
        # field 14: _F_BYTES32
        self.rng.next_bytes(B256)
        idx = self.do_round_trip_field(
            tf_writer, tf_reader, idx, 'fbytes32', B256)
예제 #31
0
class TestMerkleDoc(unittest.TestCase):
    """ Test MerkleTree functionality at the document level. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    def get_two_unique_directory_names(self):
        """
        Get two candidate directory names, making sure that they differ.
        """
        dir_name1 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_name2 = dir_name1
        while dir_name2 == dir_name1:
            dir_name2 = self.rng.next_file_name(MAX_NAME_LEN)
        self.assertTrue(len(dir_name1) > 0)
        self.assertTrue(len(dir_name2) > 0)
        self.assertTrue(dir_name1 != dir_name2)
        return (dir_name1, dir_name2)

    def make_one_named_test_directory(self, name, depth, width):
        """
        Create a test directory with the name, depth, and width specified.
        The directory is under tmp/ ; subdirectories have random names
        and contents.
        """
        dir_path = "tmp/%s" % name
        if os.path.exists(dir_path):
            shutil.rmtree(dir_path)
        self.rng.next_data_dir(dir_path, depth, width, 32)
        return dir_path

    def make_two_test_directories(self, depth, width):
        """
        Create two test directories under tmp/ with distinct names but the
        depth and width specified.
        """
        dir_name1 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_path1 = self.make_one_named_test_directory(dir_name1, depth, width)

        dir_name2 = dir_name1
        while dir_name2 == dir_name1:
            dir_name2 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_path2 = self.make_one_named_test_directory(dir_name2, depth, width)

        return (dir_name1, dir_path1, dir_name2, dir_path2)

    def verify_leaf_sha256(self, node, path_to_file):
        """
        Verify that the content keys of the named file match the SHA
        hash of its contents.
        """
        self.assertTrue(os.path.exists(path_to_file))
        with open(path_to_file, "rb") as file:
            data = file.read()
        self.assertFalse(data is None)
        sha = hashlib.sha256()
        sha.update(data)
        hash_ = sha.digest()
        self.assertEqual(hash_, node.bin_hash)

    def verify_tree_sha256(self, node, path_to_tree):
        """
        Verify that the names (content keys) of files below the node
        (a Merkletree) have correct content keys, matching the SHA
        hash of the files.
        """
        if node.nodes is None:
            self.assertEqual(None, node.bin_hash)
        else:
            hash_count = 0
            sha = hashlib.sha256()
            for node_ in node.nodes:
                path_to_node = os.path.join(path_to_tree, node_.name)
                if isinstance(node_, MerkleLeaf):
                    self.verify_leaf_sha256(node_, path_to_node)
                elif isinstance(node_, MerkleTree):
                    self.verify_tree_sha256(node_, path_to_node)
                else:
                    print("DEBUG: unknown node type!")
                    self.fail("unknown node type!")
                if node_.bin_hash is not None:
                    hash_count += 1
                    sha.update(node_.bin_hash)

            if hash_count == 0:
                self.assertEqual(None, node.bin_hash)
            else:
                self.assertEqual(sha.digest(), node.bin_hash)

    # actual unit tests #############################################

    def test_bound_flat_dirs(self):
        """test directory is single level, with four data files"""

        dir_name1, dir_path1, dir_name2, dir_path2 = \
            self.make_two_test_directories(ONE, FOUR)
        doc1 = MerkleDoc.create_from_file_system(dir_path1)
        # pylint: disable=no-member
        tree1 = doc1.tree
        # XXX This succeeds BUT pylint doesn't get this right: it sees
        # doc1.tree as a function
        self.assertTrue(isinstance(tree1, MerkleTree))

        # pylint: disable=no-member
        self.assertEqual(dir_name1, tree1.name)
        self.assertTrue(doc1.bound)
        self.assertEqual(("tmp/%s" % dir_name1), dir_path1)
        # pylint: disable=no-member
        nodes1 = tree1.nodes
        self.assertTrue(nodes1 is not None)
        self.assertEqual(FOUR, len(nodes1))
        self.verify_tree_sha256(tree1, dir_path1)

        doc2 = MerkleDoc.create_from_file_system(dir_path2)
        tree2 = doc2.tree
        # pylint: disable=no-member
        self.assertEqual(dir_name2, tree2.name)
        self.assertTrue(doc2.bound)
        self.assertEqual(("tmp/%s" % dir_name2), dir_path2)
        # pylint: disable=no-member
        nodes2 = tree2.nodes
        self.assertTrue(nodes2 is not None)
        self.assertEqual(FOUR, len(nodes2))
        self.verify_tree_sha256(tree2, dir_path2)

        # pylint: disable=no-member
        self.assertTrue(tree1.equal(tree1))
        # pylint: disable=no-member
        self.assertFalse(tree1.equal(tree2))
        # pylint: disable=no-member
        self.assertFalse(tree1.equal(None))

        doc1_str = doc1.to_string()
        doc1_rebuilt = MerkleDoc.create_from_serialization(doc1_str)
        self.assertTrue(doc1.equal(doc1_rebuilt))  # MANGO

    def test_bound_needle_dirs(self):
        """test directories four deep with one data file at the lowest level"""
        (dir_name1, dir_path1, dir_name2, dir_path2) =\
            self.make_two_test_directories(FOUR, ONE)
        doc1 = MerkleDoc.create_from_file_system(dir_path1)
        tree1 = doc1.tree
        # XXX This succeeds BUT pylint doesn't get this right: it sees
        # doc1.tree as a function
        self.assertTrue(isinstance(tree1, MerkleTree))

        # pylint: disable=no-member
        self.assertEqual(dir_name1, tree1.name)
        self.assertTrue(doc1.bound)
        self.assertEqual(("tmp/%s" % dir_name1), dir_path1)
        # pylint: disable=no-member
        nodes1 = tree1.nodes
        self.assertTrue(nodes1 is not None)
        self.assertEqual(ONE, len(nodes1))
        self.verify_tree_sha256(tree1, dir_path1)

        doc2 = MerkleDoc.create_from_file_system(dir_path2)
        tree2 = doc2.tree
        # pylint: disable=no-member
        self.assertEqual(dir_name2, tree2.name)
        self.assertTrue(doc2.bound)
        self.assertEqual(("tmp/%s" % dir_name2), dir_path2)
        # pylint: disable=no-member
        nodes2 = tree2.nodes
        self.assertTrue(nodes2 is not None)
        self.assertEqual(ONE, len(nodes2))
        self.verify_tree_sha256(tree2, dir_path2)

        self.assertTrue(doc1.equal(doc1))
        self.assertFalse(doc1.equal(doc2))

        doc1_str = doc1.to_string()
        doc1_rebuilt = MerkleDoc.create_from_serialization(doc1_str)
#       # DEBUG
#       print "needle doc:\n" + doc1Str
#       print "rebuilt needle doc:\n" + doc1Rebuilt.toString()
#       # END
        self.assertTrue(doc1.equal(doc1_rebuilt))       # FOO
예제 #32
0
class TestVarint(unittest.TestCase):
    """ Test reading and writing low-level data types. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    # actual unit tests #############################################
    def test_length_as_varint(self):
        """ Verify the length in bytes of various hex values is as expected."""

        len_ = length_as_varint
        self.assertEqual(1, len_(0))
        self.assertEqual(1, len_(0x7f))
        self.assertEqual(2, len_(0x80))
        self.assertEqual(2, len_(0x3fff))
        self.assertEqual(3, len_(0x4000))
        self.assertEqual(3, len_(0x1fffff))
        self.assertEqual(4, len_(0x200000))
        self.assertEqual(4, len_(0xfffffff))
        self.assertEqual(5, len_(0x10000000))
        self.assertEqual(5, len_(0x7ffffffff))
        self.assertEqual(6, len_(0x800000000))
        self.assertEqual(6, len_(0x3ffffffffff))
        self.assertEqual(7, len_(0x40000000000))
        self.assertEqual(7, len_(0x1ffffffffffff))
        self.assertEqual(8, len_(0x2000000000000))
        self.assertEqual(8, len_(0xffffffffffffff))
        self.assertEqual(9, len_(0x100000000000000))
        self.assertEqual(9, len_(0x7fffffffffffffff))
        self.assertEqual(10, len_(0x8000000000000000))
        # the next test fails if I don't parenthesize the shift term or
        # convert >1 to /2
        big_number = 0x80000000000000000 + (self.rng.next_int64() > 1)
        self.assertEqual(10, len_(big_number))

        # MAKE SURE THIS WORKS WITH SIGNED NUMBERS

    def round_trip(self, nnn):
        """
        Test writing and reading a varint as the first and
        only field in a buffer.
        """
        # -- write varint -------------------------------------------
        field_nbr = 1 + self.rng.next_int16(1024)
        chan = Channel(LEN_BUFFER)
        write_varint_field(chan, nnn, field_nbr)
        chan.flip()

        # -- read varint --------------------------------------------
        # first the header (which is a varint) ------------
        (prim_type, field_nbr2) = read_field_hdr(chan)
        offset2 = chan.position
        self.assertEqual(PrimTypes.VARINT, prim_type)
        self.assertEqual(field_nbr, field_nbr2)
        self.assertEqual(length_as_varint(field_nbr << 3), offset2)

        # then the varint proper --------------------------
        varint_ = read_raw_varint(chan)
        chan.flip()
        offset3 = chan.limit
        self.assertEqual(nnn, varint_)
        self.assertEqual(offset2 + length_as_varint(nnn), offset3)

    def test_encode_decode(self):
        """
        Test converting certain values to varint and back again.

        All varints are handled as 64 bit unsigned ints.  WE MAY SOMETIMES
        WANT TO RESTRICT THEM TO uint32s.  Other than 42, these are the
        usual border values.
        """
        self.round_trip(0)
        self.round_trip(42)
        self.round_trip(0x7f)
        self.round_trip(0x80)
        self.round_trip(0x3fff)
        self.round_trip(0x4000)
        self.round_trip(0x1fffff)
        self.round_trip(0x200000)
        self.round_trip(0xfffffff)
        self.round_trip(0x10000000)
        self.round_trip(0x7ffffffff)
        self.round_trip(0x800000000)
        self.round_trip(0x3ffffffffff)
        self.round_trip(0x40000000000)
        self.round_trip(0x1ffffffffffff)
        self.round_trip(0x2000000000000)
        self.round_trip(0xffffffffffffff)
        self.round_trip(0x100000000000000)
        self.round_trip(0x7fffffffffffffff)
        self.round_trip(0x8000000000000000)
        self.round_trip(0xffffffffffffffff)
예제 #33
0
class TestNLHLeaf(unittest.TestCase):
    """ Test NLHLeaf-related functions. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    # utility functions #############################################

    # actual unit tests #############################################
    def do_test_simple_constructor(self, hashtype):
        """ Test constructor for specific hash. """

        check_hashtype(hashtype)
        if hashtype == HashTypes.SHA1:
            sha = hashlib.sha1()
        elif hashtype == HashTypes.SHA2:
            sha = hashlib.sha256()
        elif hashtype == HashTypes.SHA3:
            sha = hashlib.sha3_256()
        elif hashtype == HashTypes.BLAKE2B:
            sha = hashlib.blake2b(digest_size=32)
        else:
            raise NotImplementedError

        name = self.rng.next_file_name(8)
        nnn = self.rng.some_bytes(8)
        self.rng.next_bytes(nnn)
        sha.update(nnn)
        hash0 = sha.digest()

        leaf0 = NLHLeaf(name, hash0, hashtype)
        self.assertEqual(name, leaf0.name)
        self.assertEqual(hash0, leaf0.bin_hash)

        name2 = name
        while name2 == name:
            name2 = self.rng.next_file_name(8)
        nnn = self.rng.some_bytes(8)
        self.rng.next_bytes(nnn)
        sha.update(nnn)
        hash1 = sha.digest()
        leaf1 = NLHLeaf(name2, hash1, hashtype)
        self.assertEqual(name2, leaf1.name)
        self.assertEqual(hash1, leaf1.bin_hash)

        self.assertEqual(leaf0, leaf0)
        self.assertEqual(leaf1, leaf1)
        self.assertFalse(leaf0 == leaf1)

        leaf0c = leaf0.clone()
        self.assertEqual(leaf0c, leaf0)

        leaf1c = leaf1.clone()
        self.assertEqual(leaf1c, leaf1)

    def test_simplest_constructor(self):
        """ Test simple constructor for various hashes. """

        for hashtype in HashTypes:
            self.do_test_simple_constructor(hashtype)
예제 #34
0
class TestMerkleTree(unittest.TestCase):
    """ Test package functionality at the Tree level. """
    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    # utility functions ---------------------------------------------

    def get_two_unique_directory_names(self):
        """ Make two different quasi-random directory names."""
        dir_name1 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_name2 = dir_name1
        while dir_name2 == dir_name1:
            dir_name2 = self.rng.next_file_name(MAX_NAME_LEN)
        self.assertTrue(len(dir_name1) > 0)
        self.assertTrue(len(dir_name2) > 0)
        self.assertTrue(dir_name1 != dir_name2)
        return (dir_name1, dir_name2)

    def make_one_named_test_directory(self, name, depth, width):
        """ Make a directory tree with a specific name, depth and width."""
        dir_path = "tmp/%s" % name
        if os.path.exists(dir_path):
            if os.path.isfile(dir_path):
                os.unlink(dir_path)
            elif os.path.isdir(dir_path):
                shutil.rmtree(dir_path)
        self.rng.next_data_dir(dir_path, depth, width, 32)
        return dir_path

    def make_two_test_directories(self, depth, width):
        """ Create two test directories with different names. """
        dir_name1 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_path1 = self.make_one_named_test_directory(dir_name1, depth, width)

        dir_name2 = dir_name1
        while dir_name2 == dir_name1:
            dir_name2 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_path2 = self.make_one_named_test_directory(dir_name2, depth, width)

        return (dir_name1, dir_path1, dir_name2, dir_path2)

    def verify_leaf_sha(self, node, path_to_file, hashtype):
        """
        Verify a leaf node is hashed correctly, using a specific SHA hash type.
        """
        self.assertTrue(os.path.exists(path_to_file))
        with open(path_to_file, "rb") as file:
            data = file.read()
        self.assertFalse(data is None)
        if hashtype == HashTypes.SHA1:
            sha = XLSHA1()
        elif hashtype == HashTypes.SHA2:
            sha = XLSHA2()
        elif hashtype == HashTypes.SHA3:
            sha = XLSHA3()
        elif hashtype == HashTypes.BLAKE2B:
            sha = XLBLAKE2B_256()
        else:
            raise NotImplementedError
        sha.update(data)
        hash_ = sha.digest()
        self.assertEqual(hash_, node.bin_hash)

    def verify_tree_sha(self, node, path_to_node, hashtype):
        """
        Verify tree elements are hashed correctly, assuming that the node
        is a MerkleTree, using a specific SHA hash type.
        """
        if node.nodes is None:
            self.assertEqual(None, node.bin_hash)
        else:
            hash_count = 0
            if hashtype == HashTypes.SHA1:
                sha = XLSHA1()
            elif hashtype == HashTypes.SHA2:
                sha = XLSHA2()
            elif hashtype == HashTypes.SHA3:
                sha = XLSHA3()
            elif hashtype == HashTypes.BLAKE2B:
                sha = XLBLAKE2B_256()
            else:
                raise NotImplementedError
            for node_ in node.nodes:
                path_to_file = os.path.join(path_to_node, node_.name)
                if isinstance(node_, MerkleLeaf):
                    self.verify_leaf_sha(node_, path_to_file, hashtype)
                elif isinstance(node_, MerkleTree):
                    self.verify_tree_sha(node_, path_to_file, hashtype)
                else:
                    self.fail("unknown node type!")
                if node_.bin_hash is not None:
                    hash_count += 1
                    sha.update(node_.bin_hash)

            # take care to compare values of the same type;
            # node.binHash is binary, node.hexHash is hex
            if hash_count == 0:
                self.assertEqual(None, node.bin_hash)
            else:
                self.assertEqual(sha.digest(), node.bin_hash)

    # unit tests ----------------------------------------------------

    def test_pathless_unbound(self):
        """
        Test basic characteristics of very simple MerkleTrees created
        using our standard SHA hash types.
        """
        for using in [
                HashTypes.SHA1, HashTypes.SHA2, HashTypes.SHA3,
                HashTypes.BLAKE2B
        ]:
            self.do_test_pathless_unbound(using)

    def do_test_pathless_unbound(self, hashtype):
        """
        Test basic characteristics of very simple MerkleTrees created
        using a specific SHA hash type.
        """
        (dir_name1, dir_name2) = self.get_two_unique_directory_names()

        check_hashtype(hashtype)
        tree1 = MerkleTree(dir_name1, hashtype)
        self.assertEqual(dir_name1, tree1.name)
        if hashtype == HashTypes.SHA1:
            self.assertEqual(SHA1_HEX_NONE, tree1.hex_hash)
        elif hashtype == HashTypes.SHA2:
            self.assertEqual(SHA2_HEX_NONE, tree1.hex_hash)
        elif hashtype == HashTypes.SHA3:
            self.assertEqual(SHA3_HEX_NONE, tree1.hex_hash)
        elif hashtype == HashTypes.BLAKE2B_256:
            self.assertEqual(BLAKE2B_256_HEX_NONE, tree1.hex_hash)
        else:
            raise NotImplementedError
        tree2 = MerkleTree(dir_name2, hashtype)
        self.assertEqual(dir_name2, tree2.name)

        # these tests remain skimpy
        self.assertFalse(tree1 is None)
        self.assertTrue(tree1 == tree1)
        self.assertFalse(tree1 == tree2)

        tree1_str = tree1.to_string(0)

        # there should be no indent on the first line
        self.assertFalse(tree1_str[0] == ' ')

        # no extra lines should be added
        lines = tree1_str.split('\n')
        # this split generates an extra blank line, because the serialization
        # ends with CR-LF
        if lines[-1] == '':
            lines = lines[:-1]
        self.assertEqual(1, len(lines))

        tree1_rebuilt = MerkleTree.create_from_serialization(
            tree1_str, hashtype)
        self.assertTrue(tree1 == tree1_rebuilt)

    def test_bound_flat_dirs(self):
        """
        Test handling of flat directories with a few data files
        using varioush SHA hash types.
        """
        for using in [
                HashTypes.SHA1,
                HashTypes.SHA2,
                HashTypes.SHA3,
        ]:
            self.do_test_bound_flat_dirs(using)

    def do_test_bound_flat_dirs(self, hashtype):
        """test directory is single level, with four data files"""

        check_hashtype(hashtype)
        (dir_name1, dir_path1, dir_name2, dir_path2) =\
            self.make_two_test_directories(ONE, FOUR)
        tree1 = MerkleTree.create_from_file_system(dir_path1, hashtype)
        self.assertEqual(dir_name1, tree1.name)
        nodes1 = tree1.nodes
        self.assertTrue(nodes1 is not None)
        self.assertEqual(FOUR, len(nodes1))
        self.verify_tree_sha(tree1, dir_path1, hashtype)

        tree2 = MerkleTree.create_from_file_system(dir_path2, hashtype)
        self.assertEqual(dir_name2, tree2.name)
        nodes2 = tree2.nodes
        self.assertTrue(nodes2 is not None)
        self.assertEqual(FOUR, len(nodes2))
        self.verify_tree_sha(tree2, dir_path2, hashtype)

        self.assertFalse(tree1 is None)
        self.assertTrue(tree1 == tree1)
        self.assertFalse(tree1 == tree2)

        tree1_str = tree1.to_string(0)
        tree1_rebuilt = MerkleTree.create_from_serialization(
            tree1_str, hashtype)
        self.assertTrue(tree1 == tree1_rebuilt)

    def test_bound_needle_dirs(self):
        """
        Test directories four deep with various SHA hash types.
        """
        for using in [
                HashTypes.SHA1,
                HashTypes.SHA2,
                HashTypes.SHA3,
        ]:
            self.do_test_bound_needle_dirs(using)

    def do_test_bound_needle_dirs(self, hashtype):
        """test directories four deep with one data file at the lowest level"""
        (dir_name1, dir_path1, dir_name2, dir_path2) =\
            self.make_two_test_directories(FOUR, ONE)
        tree1 = MerkleTree.create_from_file_system(dir_path1, hashtype)

        self.assertEqual(dir_name1, tree1.name)
        nodes1 = tree1.nodes
        self.assertTrue(nodes1 is not None)
        self.assertEqual(ONE, len(nodes1))
        self.verify_tree_sha(tree1, dir_path1, hashtype)

        tree2 = MerkleTree.create_from_file_system(dir_path2, hashtype)
        self.assertEqual(dir_name2, tree2.name)
        nodes2 = tree2.nodes
        self.assertTrue(nodes2 is not None)
        self.assertEqual(ONE, len(nodes2))
        self.verify_tree_sha(tree2, dir_path2, hashtype)

        self.assertTrue(tree1 == tree1)
        self.assertFalse(tree1 == tree2)

        tree1_str = tree1.to_string(0)
        tree1_rebuilt = MerkleTree.create_from_serialization(
            tree1_str, hashtype)
        #       # DEBUG
        #       print "NEEDLEDIR TREE1:\n" + tree1Str
        #       print "REBUILT TREE1:\n" + tree1Rebuilt.toString("")
        #       # END
        self.assertTrue(tree1 == tree1_rebuilt)

    # tests of bugs previously found --------------------------------

    def test_gray_boxes_bug1(self):
        """
        Verify that bug #1 in handling serialization of grayboxes website
        has been corrected.
        """
        serialization =\
            '721a08022dd26e7be98b723f26131786fd2c0dc3 grayboxes.com/\n' +\
            ' fcd3973c66230b9078a86a5642b4c359fe72d7da images/\n' +\
            '  15e47f4eb55197e1bfffae897e9d5ce4cba49623 grayboxes.gif\n' +\
            ' 2477b9ea649f3f30c6ed0aebacfa32cb8250f3df index.html\n'

        # create from string array ----------------------------------
        string = serialization.split('\n')
        string = string[:-1]
        self.assertEqual(4, len(string))

        tree2 = MerkleTree.create_from_string_array(string, HashTypes.SHA1)

        ser2 = tree2.to_string(0)
        self.assertEqual(serialization, ser2)

        # create from serialization ---------------------------------
        tree1 = MerkleTree.create_from_serialization(serialization,
                                                     HashTypes.SHA1)

        ser1 = tree1.to_string(0)
        self.assertEqual(serialization, ser1)

        self.assertTrue(tree1 == tree2)

        # 2014-06-26 tagged this on here to test firstLineRE_1()
        first_line = string[0]
        match_ = MerkleTree.first_line_re_1().match(first_line)
        self.assertTrue(match_ is not None)
        self.assertEqual(match_.group(1), '')  # indent
        tree_hash = match_.group(2)
        dir_name = match_.group(3)
        self.assertEqual(tree_hash + ' ' + dir_name, first_line)

    def test_xlattice_bug1(self):
        """
        this test relies on dat.xlattice.org being locally present
        and an internally consistent merkleization
        """
        with open('tests/test_data/dat.xlattice.org', 'rb') as file:
            serialization = str(file.read(), 'utf-8')

        # create from serialization ---------------------------------
        tree1 = MerkleTree.create_from_serialization(serialization,
                                                     HashTypes.SHA1)

        #       # DEBUG
        #       print "tree1 has %d nodes" % len(tree1.nodes)
        #       with open('junk.tree1', 'w') as t:
        #           t.write( tree1.toString(0) )
        #       # END

        ser1 = tree1.to_string(0)
        self.assertEqual(serialization, ser1)

        # create from string array ----------------------------------
        string = serialization.split('\n')
        string = string[:-1]
        self.assertEqual(2511, len(string))

        tree2 = MerkleTree.create_from_string_array(string, HashTypes.SHA1)

        ser2 = tree2.to_string(0)
        self.assertEqual(serialization, ser2)

        self.assertTrue(tree1 == tree2)

    def test_gray_boxes_bug3(self):
        """ Test solution to bug in handling grayboxes website. """

        serialization =\
            '088d0e391e1a4872329e0f7ac5d45b2025363e26c199a7' + \
            '4ea39901d109afd6ba grayboxes.com/\n' +\
            ' 24652ddc14687866e6b1251589aee7e1e3079a87f80cd' + \
            '7775214f6d837612a90 images/\n' +\
            '  1eb774eef9be1e696f69a2f95711be37915aac283bb4' + \
            'b34dcbaf7d032233e090 grayboxes.gif\n' +\
            ' 6eacebda9fd55b59c0d2e48e2ed59ce9fd683379592f8' + \
            'e662b1de88e041f53c9 index.html\n'

        # create from string array ----------------------------------
        string = serialization.split('\n')
        string = string[:-1]
        self.assertEqual(4, len(string))

        tree2 = MerkleTree.create_from_string_array(string, HashTypes.SHA2)

        ser2 = tree2.to_string(0)
        self.assertEqual(serialization, ser2)

        # create from serialization ---------------------------------
        tree1 = MerkleTree.create_from_serialization(serialization,
                                                     HashTypes.SHA2)

        ser1 = tree1.to_string(0)
        self.assertEqual(serialization, ser1)

        self.assertTrue(tree1 == tree2)

        # 2014-06-26 tagged this on here to test firstLineRE_1()
        first_line = string[0]
        match_ = MerkleTree.first_line_re_2().match(first_line)
        self.assertTrue(match_ is not None)
        self.assertEqual(match_.group(1), '')  # indent
        tree_hash = match_.group(2)
        dir_name = match_.group(3)
        self.assertEqual(tree_hash + ' ' + dir_name, first_line)

    def test_xlattice_bug3(self):
        """
        this test relies on dat2.xlattice.org being locally present
        and an internally consistent merkleization
        """
        with open('tests/test_data/dat2.xlattice.org', 'rb') as file:
            serialization = str(file.read(), 'utf-8')

        # create from serialization ---------------------------------
        tree1 = MerkleTree.create_from_serialization(serialization,
                                                     HashTypes.SHA2)

        ser1 = tree1.to_string(0)
        self.assertEqual(serialization, ser1)

        # create from string array ----------------------------------
        string = serialization.split('\n')
        string = string[:-1]
        self.assertEqual(2511, len(string))

        tree2 = MerkleTree.create_from_string_array(string, HashTypes.SHA2)

        ser2 = tree2.to_string(0)
        self.assertEqual(serialization, ser2)

        self.assertTrue(tree1 == tree2)
예제 #35
0
class TestFieldImpl(unittest.TestCase):

    def setUp(self):
        self.rng = SimpleRNG(time.time())

#       data = StringIO(ZOGGERY_PROTO_SPEC)
#       p = StringProtoSpecParser(data)   # data should be file-like
#       self.str_obj_model = p.parse()     # object model from string serialization
# self.proto_name = self.str_obj_model.name  # the dotted name of the
# protocol

    def tearDown(self):
        pass

    # utility functions #############################################

    def make_registries(self, protocol):
        node_reg = R.NodeReg()
        proto_reg = R.ProtoReg(protocol, node_reg)
        msg_reg = R.MsgReg(proto_reg)
        return (node_reg, proto_reg, msg_reg)

    def le_msg_values(self):
        """ returns a list """
        timestamp = int(time.time())
        node_id = [0] * 20
        key = [0] * 20
        length = self.rng.next_int32(256 * 256)
        # let's have some random bytes
        self.rng.next_bytes(node_id)
        self.rng.next_bytes(key)
        by_ = 'who is responsible'
        path = '/home/jdd/tarballs/something.tar.gz'
        return [timestamp, node_id, key, length, by_, path]

    def lil_big_msg_values(self):
        """
        This returns a list of random-ish values in order by field type
        so that values[_F_FLOAT], for example, is a random float value.
        """
        values = []

        # 2016-03-30 This is NOT in sync with littleBigTest.py,
        #   because I have added a None for lMsg at _L_MSG

        values.append(self.rng.next_boolean())       # vBoolReqField         0
        values.append(self.rng.next_int16())         # vEnumReqField         1

        values.append(self.rng.next_int32())         # vInt32ReqField        2
        values.append(self.rng.next_int64())         # vInt64ReqField        3

        values.append(self.rng.next_int32())         # vuInt32ReqField       4
        values.append(self.rng.next_int32())         # vuInt64ReqField       5
        values.append(self.rng.next_int64())         # vsInt32ReqField       6
        values.append(self.rng.next_int64())         # vsInt64ReqField       7

        values.append(self.rng.next_int32())         # fsInt32ReqField       8
        values.append(self.rng.next_int32())         # fuInt32ReqField       9
        values.append(self.rng.next_real())          # fFloatReqField        10

        values.append(self.rng.next_int64())         # fsInt64ReqField       11
        values.append(self.rng.next_int64())         # fuInt64ReqField       12

        values.append(self.rng.next_real())          # fDoubleReqField       13

        # lStringReqField       14
        values.append(self.rng.next_file_name(16))

        rnd_len = 16 + self.rng.next_int16(49)
        byte_buf = bytearray(rnd_len)
        self.rng.next_bytes(byte_buf)
        values.append(bytes(byte_buf))               # lBytesReqField        15

        values.append(None)                         # <-------- for lMsg    16

        b128_buf = bytearray(16)
        self.rng.next_bytes(b128_buf)
        values.append(bytes(b128_buf))               # fBytes16ReqField      17

        b160_buf = bytearray(20)
        self.rng.next_bytes(b160_buf)
        values.append(bytes(b160_buf))               # fBytes20ReqField      18

        b256_buf = bytearray(32)
        self.rng.next_bytes(b256_buf)
        values.append(bytes(b256_buf))               # fBytes32ReqField      19

        return values

    # actual unit tests #############################################

    def check_field_impl_against_spec(self,
                                      proto_name, msg_name,   # not actually tested
                                      field_spec, value):    # significant for tests

        self.assertIsNotNone(field_spec)
        dotted_name = "%s.%s" % (proto_name, msg_name)
        cls = make_field_class(dotted_name, field_spec)         # a class
        if '__dict__' in dir(cls):
            print('\nGENERATED FieldImpl CLASS DICTIONARY')
            for exc in list(cls.__dict__.keys()):
                print("  %-20s %s" % (exc, cls.__dict__[exc]))

        self.assertIsNotNone(cls)
        file = cls(value)                                      # an instance
        self.assertIsNotNone(file)
        self.assertTrue(isinstance(file, cls))

        # instance attributes -----------------------------
        # we verify that the properties work correctly

        self.assertEqual(field_spec.name, file._name)
        self.assertEqual(field_spec.field_type_ndx, file.field_type)
        self.assertEqual(field_spec.quantifier, file.quantifier)
        self.assertEqual(field_spec.field_nbr, file.field_nbr)
        self.assertIsNone(file.default)          # not an elegant test

        # instance attribute ------------------------------
        # we can read back the value assigned to the instance

        self.assertEqual(value, file.value)

        # with slots enabled, this is never seen ----------
        # because __dict__ is not in the list of valid
        # attributes for f
        if '__dict__' in dir(file):
            print('\nGENERATED FieldImpl INSTANCE DICTIONARY')
            for item in list(file.__dict__.keys()):
                print("%-20s %s" % (item, file.__dict__[item]))

    def test_field_impl(self):

        node_reg, proto_reg, msg_reg = self.make_registries(
            PROTOCOL_UNDER_TEST)
        values = self.lil_big_msg_values()

        # DEBUG
        print("testFieldImpl: there are %d values" % len(values))
        # END

        # There are 18 values corresponding to the 18 field types;
        # _L_MSG should be skipped

        for tstamp in range(FieldTypes.F_BYTES32 + 1):
            # DEBUG
            print("testFieldImpl: t = %d" % tstamp)
            # END
            if tstamp == FieldTypes.L_MSG:
                continue

            # default quantifier is Q_REQ_, default is None

            field_name = 'field%d' % tstamp
            field_spec = M.FieldSpec(
                msg_reg, field_name, tstamp, field_nbr=tstamp + 100)

            self.check_field_impl_against_spec(
                PROTOCOL_UNDER_TEST, MSG_UNDER_TEST,
                field_spec, values[tstamp])

    # TEST FIELD SPEC -----------------------------------------------

    def do_field_spec_test(self, name, field_type, quantifier=M.Q_REQUIRED,
                           field_nbr=0, default=None):

        node_reg, proto_reg, msg_reg = self.make_registries(
            PROTOCOL_UNDER_TEST)

        # XXX Defaults are ignored for now.
        file = M.FieldSpec(
            msg_reg,
            name,
            field_type,
            quantifier,
            field_nbr,
            default)

        self.assertEqual(name, file.name)
        self.assertEqual(field_type, file.field_type_ndx)
        self.assertEqual(quantifier, file.quantifier)
        self.assertEqual(field_nbr, file.field_nbr)
        if default is not None:
            self.assertEqual(default, file.default)

        expected_repr = "%s %s%s @%d \n" % (
            name, file.field_type_name, M.q_name(quantifier), field_nbr)
        # DEFAULTS NOT SUPPORTED
        self.assertEqual(expected_repr, file.__repr__())

    def test_quantifiers(self):
        q_name = M.q_name
        self.assertEqual('', q_name(M.Q_REQUIRED))
        self.assertEqual('?', q_name(M.Q_OPTIONAL))
        self.assertEqual('*', q_name(M.Q_STAR))
        self.assertEqual('+', q_name(M.Q_PLUS))

    def test_field_spec(self):
        # default is not implemented yet
        self.do_field_spec_test('foo', FieldTypes.V_UINT32, M.Q_REQUIRED, 9)
        self.do_field_spec_test('bar', FieldTypes.V_SINT32, M.Q_STAR, 17)
        self.do_field_spec_test(
            'node_id',
            FieldTypes.F_BYTES20,
            M.Q_OPTIONAL,
            92)
        self.do_field_spec_test('tix', FieldTypes.V_BOOL, M.Q_PLUS, 147)
예제 #36
0
class TestFieldTypes(unittest.TestCase):
    """
    Actually tests the method used for instantiating and importing
    an instance of the FieldTypes class.
    """
    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    def test_new_fieldtypes(self):
        """
        Test the new definition of FieldTypes introduced 2017-01-30.
        """
        self.assertEqual(len(FieldTypes), FieldTypes.F_BYTES32.value + 1)
        for ndx, _ in enumerate(FieldTypes):
            self.assertEqual(_.value, ndx)
            # round trip member to sym and back to member
            self.assertEqual(FieldTypes.from_sym(_.sym), _)

    def test_constants(self):
        """
        Verify that our constants are immutable and conversion between
        string and integer forms works as expected.
        """
        self.assertEqual(len(FieldTypes), 18)

        # pylint: disable=unsubscriptable-object
        self.assertEqual(FieldTypes.V_BOOL.value, 0)
        self.assertEqual(FieldTypes.V_BOOL.sym, 'vbool')

        self.assertEqual(FieldTypes.F_BYTES32.value, len(FieldTypes) - 1)
        self.assertEqual(FieldTypes.F_BYTES32.sym, 'fbytes32')

    def test_len_funcs(self):
        """
        Verify that varint length functions return correct values.

        Tests are performed using randomly selected field numbers
        (in the range 0 .. (2^16)-1) and integer values in the same
        range.
        """
        ndx = self.rng.next_int16()  # random field number
        value = self.rng.next_int16()  # random integer value

        # == varint types ===========================================
        # ERROR because field_hdr_len 2nd param should be PrimType
        # ********************************************************
        len_ = raw.field_hdr_len(ndx, FieldTypes.V_BOOL)
        self.assertEqual(len_ + 1, typed.vbool_len(True, ndx))
        self.assertEqual(len_ + 1, typed.vbool_len(False, ndx))

        len_ = raw.field_hdr_len(ndx, FieldTypes.V_ENUM)
        zzz = len_ + raw.length_as_varint(value)
        self.assertEqual(zzz, typed.venum_len(value, ndx))
        # self.assertEqual( x, typed.vEnumLen(-x, n) )

        value = self.rng.next_int32()
        self.assertTrue(value >= 0)

        len_ = raw.field_hdr_len(ndx, FieldTypes.V_UINT32)
        zzz = len_ + raw.length_as_varint(value)
        self.assertEqual(zzz, typed.vuint32_len(value, ndx))

        value = self.rng.next_int32()
        self.assertTrue(value >= 0)
        value = value - 0x80000000

        len_ = raw.field_hdr_len(ndx, FieldTypes.V_SINT32)
        ppp = typed.encode_sint32(value)
        zzz = len_ + raw.length_as_varint(ppp)
        self.assertEqual(zzz, typed.vsint32_len(value, ndx))

        value = self.rng.next_int64()
        self.assertTrue(value >= 0)

        len_ = raw.field_hdr_len(ndx, FieldTypes.V_UINT64)
        zzz = len_ + raw.length_as_varint(value)
        self.assertEqual(zzz, typed.vuint64_len(value, ndx))

        value = self.rng.next_int64()
        self.assertTrue(value >= 0)
        value = value - 0x8000000000000000

        len_ = raw.field_hdr_len(ndx, FieldTypes.V_SINT64)
        ppp = typed.encode_sint64(value)
        zzz = len_ + raw.length_as_varint(ppp)
        self.assertEqual(zzz, typed.vsint64_len(value, ndx))

        # == fixed length 4 byte ====================================
        value = self.rng.next_int64()  # value should be ignored

        self.assertTrue(value >= 0)
        value = value - 0x8000000000000000

        # x is a signed 64 bit value whose value should be irrelevant
        len_ = raw.field_hdr_len(ndx, FieldTypes.F_UINT32)
        self.assertEqual(len_ + 4, typed.fuint32_len(value, ndx))

        len_ = raw.field_hdr_len(ndx, FieldTypes.F_SINT32)
        self.assertEqual(len_ + 4, typed.fsint32_len(value, ndx))

        len_ = raw.field_hdr_len(ndx, FieldTypes.F_FLOAT)
        self.assertEqual(len_ + 4, typed.ffloat_len(value, ndx))

        # == fixed length 8 byte ====================================
        # n is that signed 64 bit value whose value should be irrelevant
        len_ = raw.field_hdr_len(ndx, FieldTypes.F_UINT64)
        self.assertEqual(len_ + 8, typed.fuint64_len(value, ndx))
        len_ = raw.field_hdr_len(ndx, FieldTypes.F_SINT64)
        self.assertEqual(len_ + 8, typed.fsint64_len(value, ndx))
        len_ = raw.field_hdr_len(ndx, FieldTypes.F_DOUBLE)
        self.assertEqual(len_ + 8, typed.fdouble_len(value, ndx))

        # == LEN PLUS types =========================================
        def do_len_plus_test(length, ndx):
            """
            Verify that fields of interesting lengths have expected
            raw encodings.
            """
            string = [0] * length
            k = len(string)
            len_ = raw.field_hdr_len(ndx, FieldTypes.L_BYTES)
            expected_len = len_ + raw.length_as_varint(k) + k
            self.assertEqual(expected_len, typed.lbytes_len(string, ndx))

        # -- lString ---------------------------------------
        string = self.rng.next_file_name(256)
        len_ = raw.field_hdr_len(ndx, FieldTypes.L_STRING)
        k = len(string)
        expected_len = len_ + raw.length_as_varint(k) + k
        self.assertEqual(expected_len, typed.l_string_len(string, ndx))

        # -- lBytes ----------------------------------------
        do_len_plus_test(0x7f, ndx)
        do_len_plus_test(0x80, ndx)
        do_len_plus_test(0x3fff, ndx)
        do_len_plus_test(0x4000, ndx)

        # -- lMsg ------------------------------------------
        # XXX STUB

        # -- fixed length byte arrays -------------------------------
        buf = [0] * 512  # length functions should ignore actual size

        len_ = raw.field_hdr_len(ndx, FieldTypes.F_BYTES16)
        self.assertEqual(len_ + 16, typed.fbytes16_len(buf, ndx))

        len_ = raw.field_hdr_len(ndx, FieldTypes.F_BYTES20)
        self.assertEqual(len_ + 20, typed.fbytes20_len(buf, ndx))

        len_ = raw.field_hdr_len(ndx, FieldTypes.F_BYTES32)
        self.assertEqual(len_ + 32, typed.fbytes32_len(buf, ndx))
예제 #37
0
 def setUp(self):
     self.rng = SimpleRNG(time.time())
예제 #38
0
class TestNLHTree2(unittest.TestCase):
    """ Test trees derived from various quasi-random directory structures. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    # utility functions ---------------------------------------------

    def get_two_unique_directory_names(self):
        """ Make two unique directory names. """

        dir_name1 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_name2 = dir_name1
        while dir_name2 == dir_name1:
            dir_name2 = self.rng.next_file_name(MAX_NAME_LEN)
        self.assertTrue(len(dir_name1) > 0)
        self.assertTrue(len(dir_name2) > 0)
        self.assertTrue(dir_name1 != dir_name2)
        return (dir_name1, dir_name2)

    def make_one_named_test_directory(self, name, depth, width):
        """
        Create a test directory below tmp/ with specified characteristics.
        """

        dir_path = "tmp/%s" % name
        if os.path.exists(dir_path):
            shutil.rmtree(dir_path)
        self.rng.next_data_dir(dir_path, depth, width, 32)
        return dir_path

    def make_two_test_directories(self, depth, width):
        """ Make two distinct quasi-random test directories below tmp/. """

        dir_name1 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_path1 = self.make_one_named_test_directory(dir_name1, depth, width)

        dir_name2 = dir_name1
        while dir_name2 == dir_name1:
            dir_name2 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_path2 = self.make_one_named_test_directory(dir_name2, depth, width)

        return (dir_name1, dir_path1, dir_name2, dir_path2)

    # unit tests ----------------------------------------------------

    def test_pathless_unbound(self):
        """ Test the constructor using various hash types. """

        for hashtype in [HashTypes.SHA1, HashTypes.SHA2, HashTypes.SHA3, ]:
            self.do_test_pathless_unbound(hashtype)

    def do_test_pathless_unbound(self, hashtype):
        """
        Test constructor using two directories and a specific hash type.
        """

        (dir_name1, dir_name2) = self.get_two_unique_directory_names()

        check_hashtype(hashtype)
        tree1 = NLHTree(dir_name1, hashtype)
        self.assertEqual(dir_name1, tree1.name)
        self.assertEqual(tree1.hashtype, hashtype)

        tree2 = NLHTree(dir_name2, hashtype)
        self.assertEqual(dir_name2, tree2.name)
        self.assertEqual(tree2.hashtype, hashtype)

        self.assertTrue(tree1 == tree1)
        self.assertFalse(tree1 == tree2)
        self.assertFalse(tree1 is None)

        tree1c = tree1.clone()
        self.assertEqual(tree1c, tree1)

    def test_bound_flat_dirs(self):
        """
        Test directory is single level, with four data files, using
        various hash types.
        """
        for hashtype in HashTypes:
            self.do_test_bound_flat_dirs(hashtype)

    def do_test_bound_flat_dirs(self, hashtype):
        """
        Test directory is single level, with four data files, using
        specific hash type.
        """

        (dir_name1, dir_path1, dir_name2, dir_path2) =\
            self.make_two_test_directories(ONE, FOUR)
        tree1 = NLHTree.create_from_file_system(dir_path1, hashtype)
        self.assertEqual(dir_name1, tree1.name, True)
        nodes1 = tree1.nodes
        self.assertTrue(nodes1 is not None)
        self.assertEqual(FOUR, len(nodes1))

        tree2 = NLHTree.create_from_file_system(dir_path2, hashtype)
        self.assertEqual(dir_name2, tree2.name)
        nodes2 = tree2.nodes
        self.assertTrue(nodes2 is not None)
        self.assertEqual(FOUR, len(nodes2))

        self.assertEqual(tree1, tree1)
        self.assertFalse(tree1 == tree2)
        self.assertFalse(tree1 is None)

        tree1c = tree1.clone()
        self.assertEqual(tree1c, tree1)

    def test_bound_needle_dirs1(self):
        """
        Test directories four deep with one data file at the lowest level
        using various hash types.
        """
        for hashtype in HashTypes:
            self.do_test_bound_needle_dirs(hashtype)

    def do_test_bound_needle_dirs(self, hashtype):
        """
        Test directories four deep with one data file at the lowest level
        using specific hash type.
        """
        (dir_name1, dir_path1, dir_name2, dir_path2) =\
            self.make_two_test_directories(FOUR, ONE)
        tree1 = NLHTree.create_from_file_system(dir_path1, hashtype)

        self.assertEqual(dir_name1, tree1.name)
        nodes1 = tree1.nodes
        self.assertTrue(nodes1 is not None)
        self.assertEqual(ONE, len(nodes1))

        tree2 = NLHTree.create_from_file_system(dir_path2, hashtype)
        self.assertEqual(dir_name2, tree2.name)
        nodes2 = tree2.nodes
        self.assertTrue(nodes2 is not None)
        self.assertEqual(ONE, len(nodes2))

        self.assertTrue(tree1 == tree1)
        self.assertFalse(tree1 == tree2)

        tree1c = tree1.clone()
        self.assertEqual(tree1c, tree1)
예제 #39
0
#!/usr/bin/env python3
# xlcrypto_py/test_nibble_counters.py
""" Test nibble counters used in CountingBlooms. """

import time
import unittest
# from hashlib import sha1, sha256 as sha2

from rnglib import SimpleRNG
from xlcrypto import XLFilterError
from xlcrypto.filters import BloomSHA, NibbleCounters

RNG = SimpleRNG(time.time())


class TestNibbleCounters(unittest.TestCase):
    """
    Tests the counters associated with Bloom filters for sets whose members
    are 20- or 32-byte SHA digests.
    """
    def do_nibble_test_bit(self, counters, filter_bit):
        """ Count up through all possible values and beyond. """
        value = 0

        # DEBUG
        # print("do_nibble_test_bit: filter_bit %6d" % filter_bit)
        # END

        for i in range(18):
            # DEBUG
            # print("  up %2d" % i)
예제 #40
0
class TestRandomDir(unittest.TestCase):
    """ Test building quasi-random data files and directory structures. """

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    # utility functions #############################################

    # actual unit tests #############################################

    def do_test_random_dir(self, hashtype):
        """ Test building random directories with specific SHA hash type. """
        check_hashtype(hashtype)
        depth = 1 + self.rng.next_int16(3)       # so 1 to 3
        width = 1 + self.rng.next_int16(16)      # so 1 to 16

        blk_count = 1 + self.rng.next_int16(3)     # so 1 to 3
        # last block will usually be only partically populated
        max_len = BuildList.BLOCK_SIZE * (blk_count - 1) +\
            self.rng.next_int16(BuildList.BLOCK_SIZE)
        min_len = 1

        # we want the directory name to be unique
        path_to_dir = os.path.join('tmp', self.rng.next_file_name(8))
        while os.path.exists(path_to_dir):
            path_to_dir = os.path.join('tmp', self.rng.next_file_name(8))

        self.rng.next_data_dir(path_to_dir, depth, width, max_len, min_len)

        data = bytearray(max_len)            # that many null bytes
        self.rng.next_bytes(data)            # fill with random data
        if hashtype == HashTypes.SHA1:
            sha = hashlib.sha1()
        elif hashtype == HashTypes.SHA2:
            sha = hashlib.sha256()
        elif hashtype == HashTypes.SHA3:
            # pylint:disable=no-member
            sha = hashlib.sha3_256()
        elif hashtype == HashTypes.BLAKE2B:
            sha = hashlib.blake2b(digest_size=32)
        else:
            raise NotImplementedError
        sha.update(data)
        hash_ = sha.hexdigest()
        file_name = self.rng.next_file_name(8)
        path_to_file = os.path.join('tmp', file_name)
        while os.path.exists(path_to_file):
            file_name = self.rng.next_file_name(8)
            path_to_file = os.path.join('tmp', file_name)

        with open(path_to_file, 'wb') as file:
            file.write(data)

        if hashtype == HashTypes.SHA1:
            file_hash = file_sha1hex(path_to_file)
        elif hashtype == HashTypes.SHA2:
            file_hash = file_sha2hex(path_to_file)
        elif hashtype == HashTypes.SHA3:
            file_hash = file_sha3hex(path_to_file)
        elif hashtype == HashTypes.BLAKE2B:
            file_hash = file_blake2b_hex(path_to_file)
        else:
            raise NotImplementedError
        self.assertEqual(hash_, file_hash)

    def test_random_dir(self):
        """ Test building random directories with supported SHA hash types. """
        for hashtype in HashTypes:
            self.do_test_random_dir(hashtype)
예제 #41
0
class TestMerkleLeaf(unittest.TestCase):
    """ Test MerkleLeaf functionality. """
    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    # utility functions #############################################

    # actual unit tests #############################################
    def do_test_simple_constructor(self, hashtype):
        """ Test constructor for specific SHA type. """

        check_hashtype(hashtype)
        if hashtype == HashTypes.SHA1:
            sha = XLSHA1()
        elif hashtype == HashTypes.SHA2:
            sha = XLSHA2()
        elif hashtype == HashTypes.SHA3:
            sha = XLSHA3()
        elif hashtype == HashTypes.BLAKE2B:
            sha = XLBLAKE2B_256()
        else:
            raise NotImplementedError

        file_name = self.rng.next_file_name(8)
        nnn = self.rng.some_bytes(8)
        sha.update(nnn)
        hash0 = sha.digest()

        leaf0 = MerkleLeaf(file_name, hashtype, hash0)
        self.assertEqual(file_name, leaf0.name)
        self.assertEqual(hash0, leaf0.bin_hash)

        file_name2 = file_name
        while file_name2 == file_name:
            file_name2 = self.rng.next_file_name(8)
        nnn = self.rng.some_bytes(8)
        self.rng.next_bytes(nnn)
        sha.update(nnn)
        hash1 = sha.digest()
        leaf1 = MerkleLeaf(file_name2, hashtype, hash1)
        self.assertEqual(file_name2, leaf1.name)
        self.assertEqual(hash1, leaf1.bin_hash)

        self.assertTrue(leaf0 == leaf0)
        self.assertFalse(leaf0 == leaf1)

        # XXX USE NLHTree instead
        # pair0    = leaf0.toPair()
        # leaf0bis = MerkleLeaf.createFromPair(pair0)
        # self.assertEqual(leaf0bis, leaf0)

        # pair1    = leaf1.toPair()
        # leaf1bis = MerkleLeaf.createFromPair(pair1)
        # self.assertEqual(leaf1bis, leaf1)

    def test_simple_constructor(self):
        """ Test constructor for various hash types. """
        for hashtype in HashTypes:
            self.do_test_simple_constructor(hashtype=hashtype)
예제 #42
0
class TestMerkleDoc(unittest.TestCase):

    def setUp(self):
        self.rng = SimpleRNG(time.time())

    def tearDown(self):
        pass

    # utility functions #############################################
    def get_two_unique_directory_names(self):
        dir_name1 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_name2 = dir_name1
        while dir_name2 == dir_name1:
            dir_name2 = self.rng.next_file_name(MAX_NAME_LEN)
        self.assertTrue(len(dir_name1) > 0)
        self.assertTrue(len(dir_name2) > 0)
        self.assertTrue(dir_name1 != dir_name2)
        return (dir_name1, dir_name2)

    def make_one_named_test_directory(self, name, depth, width):
        dir_path = "tmp/%s" % name
        if os.path.exists(dir_path):
            shutil.rmtree(dir_path)
        self.rng.next_data_dir(dir_path, depth, width, 32)
        return dir_path

    def make_two_test_directories(self, depth, width):
        dir_name1 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_path1 = self.make_one_named_test_directory(dir_name1, depth, width)

        dir_name2 = dir_name1
        while dir_name2 == dir_name1:
            dir_name2 = self.rng.next_file_name(MAX_NAME_LEN)
        dir_path2 = self.make_one_named_test_directory(dir_name2, depth, width)

        return (dir_name1, dir_path1, dir_name2, dir_path2)

    def verify_leaf_sha(self, node, path_to_file, hashtype):
        check_hashtype(hashtype)
        self.assertTrue(os.path.exists(path_to_file))
        with open(path_to_file, "rb") as file:
            data = file.read()
        self.assertFalse(data is None)
        # pylint: disable=redefined-variable-type
        if hashtype == HashTypes.SHA1:
            sha = hashlib.sha1()
        elif hashtype == HashTypes.SHA2:
            sha = hashlib.sha256()
        elif hashtype == HashTypes.SHA3:
            # pylint: disable=no-member
            sha = hashlib.sha3_256()
        sha.update(data)
        hash_ = sha.digest()
        self.assertEqual(hash_, node.bin_hash)

    def verify_tree_sha(self, node, path_to_tree, hashtype):
        # we assume that the node is a MerkleTree
        check_hashtype(hashtype)
        if node.nodes is None:
            self.assertEqual(None, node.bin_hash)
        else:
            hash_count = 0
            # pylint: disable=redefined-variable-type
            if hashtype == HashTypes.SHA1:
                sha = hashlib.sha1()
            elif hashtype == HashTypes.SHA2:
                sha = hashlib.sha256()
            elif hashtype == HashTypes.SHA3:
                # pylint: disable=no-member
                sha = hashlib.sha3_256()
            for node_ in node.nodes:
                path_to_node = os.path.join(path_to_tree, node_.name)
                if isinstance(node_, MerkleLeaf):
                    self.verify_leaf_sha(node_, path_to_node, hashtype)
                elif isinstance(node_, MerkleTree):
                    self.verify_tree_sha(node_, path_to_node, hashtype)
                else:
                    print("DEBUG: unknown node type!")
                    self.fail("unknown node type!")
                if node_.bin_hash is not None:
                    hash_count += 1
                    sha.update(node_.bin_hash)

            if hash_count == 0:
                self.assertEqual(None, node.bin_hash)
            else:
                self.assertEqual(sha.digest(), node.bin_hash)

    # actual unit tests #############################################

    def test_bound_flat_dirs(self):
        """test directory is single level, with four data files"""
        for using in [HashTypes.SHA1, HashTypes.SHA2, HashTypes.SHA3, ]:
            self.do_test_bound_flat_dirs(using)

    def do_test_bound_flat_dirs(self, hashtype):

        (dir_name1, dir_path1, dir_name2, dir_path2) =\
            self.make_two_test_directories(ONE, FOUR)

        doc1 = MerkleDoc.create_from_file_system(dir_path1, hashtype)
        tree1 = doc1.tree
        self.assertTrue(isinstance(tree1, MerkleTree))
        # pylint: disable=no-member
        self.assertEqual(dir_name1, tree1.name)
        self.assertTrue(doc1.bound)
        self.assertEqual(("tmp/%s" % dir_name1), dir_path1)
        # pylint: disable=no-member
        nodes1 = tree1.nodes
        self.assertTrue(nodes1 is not None)
        self.assertEqual(FOUR, len(nodes1))
        self.verify_tree_sha(tree1, dir_path1, hashtype)

        doc2 = MerkleDoc.create_from_file_system(dir_path2, hashtype)
        tree2 = doc2.tree
        # pylint: disable=no-member
        self.assertEqual(dir_name2, tree2.name)
        self.assertTrue(doc2.bound)
        self.assertEqual(("tmp/%s" % dir_name2), dir_path2)
        # pylint: disable=no-member
        nodes2 = tree2.nodes
        self.assertTrue(nodes2 is not None)
        self.assertEqual(FOUR, len(nodes2))
        self.verify_tree_sha(tree2, dir_path2, hashtype)

        self.assertEqual(tree1, tree1)
        self.assertFalse(tree1 == tree2)
        self.assertFalse(tree1 is None)

        doc1_str = doc1.to_string()
        doc1_rebuilt = MerkleDoc.create_from_serialization(doc1_str, hashtype)
        # DEBUG
        #print("flat doc:\n" + doc1Str)
        #print("rebuilt flat doc:\n" + doc1Rebuilt.toString())
        # END
        self.assertTrue(doc1.equal(doc1_rebuilt))  # MANGO

    def test_bound_needle_dirs(self):
        """test directories four deep with one data file at the lowest level"""
        for using in [HashTypes.SHA1, HashTypes.SHA2, HashTypes.SHA3, ]:
            self.do_test_bound_needle_dirs(using)

    def do_test_bound_needle_dirs(self, hashtype):
        check_hashtype(hashtype)
        (dir_name1, dir_path1, dir_name2, dir_path2) =\
            self.make_two_test_directories(FOUR, ONE)
        doc1 = MerkleDoc.create_from_file_system(dir_path1, hashtype)
        tree1 = doc1.tree
        # pylint: disable=no-member
        self.assertEqual(dir_name1, tree1.name)
        self.assertTrue(doc1.bound)
        self.assertEqual(("tmp/%s" % dir_name1), dir_path1)
        # pylint: disable=no-member
        nodes1 = tree1.nodes
        self.assertTrue(nodes1 is not None)
        self.assertEqual(ONE, len(nodes1))
        self.verify_tree_sha(tree1, dir_path1, hashtype)

        doc2 = MerkleDoc.create_from_file_system(dir_path2, hashtype)
        tree2 = doc2.tree
        # pylint: disable=no-member
        self.assertEqual(dir_name2, tree2.name)
        self.assertTrue(doc2.bound)
        self.assertEqual(("tmp/%s" % dir_name2), dir_path2)
        # pylint: disable=no-member
        nodes2 = tree2.nodes
        self.assertTrue(nodes2 is not None)
        self.assertEqual(ONE, len(nodes2))
        self.verify_tree_sha(tree2, dir_path2, hashtype)

        self.assertTrue(doc1.equal(doc1))
        self.assertFalse(doc1.equal(doc2))

        doc1_str = doc1.to_string()
        doc1_rebuilt = MerkleDoc.create_from_serialization(doc1_str, hashtype)
#       # DEBUG
#       print "needle doc:\n" + doc1Str
#       print "rebuilt needle doc:\n" + doc1Rebuilt.toString()
#       # END
        self.assertTrue(doc1.equal(doc1_rebuilt))       # FOO