def test_build_seed(self):
     bucket = Builder(addresses["alpha"], my_shard_size, my_max_size,
                      my_min_free_size)
     hash0 = fixtures["test_build_seed"]["hash0"]
     hash3 = fixtures["test_build_seed"]["hash3"]
     self.assertEqual(bucket.build_seed(0), hash0)
     self.assertEqual(bucket.build_seed(3), hash3)
 def test_use_folder_tree_cleanup(self):
     bucket = Builder(addresses["beta"], my_shard_size, my_max_size,
                      use_folder_tree=True)
     bucket.build(self.store_path, cleanup=True)
     def callback(a, d, files):
         self.assertTrue(len(files) == 0)
     os.walk(self.store_path, callback, None)
    def test_builder_skips_existing(self):
        # generate shards for testing
        bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size)
        generated = bucket.build(self.store_path, False, False)

        timestamp = time.time()

        # remove one of the files
        remove_file = random.choice(list(generated.keys()))
        os.remove(os.path.join(self.store_path, remove_file))

        # generate only missing shard for testing
        bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size)
        generated = bucket.build(self.store_path, False, False)

        # verify last access times
        for seed, file_hash in generated.items():
            path = os.path.join(self.store_path, seed)
            last_access = os.path.getmtime(path)
            if seed == remove_file:
                self.assertTrue(last_access > timestamp)
            else:
                self.assertTrue(last_access < timestamp)

        # make sure all files are there
        self.assertTrue(bucket.checkup(self.store_path))
 def test_build_seed(self):
     bucket = Builder(addresses["alpha"], my_shard_size, my_max_size,
                      debug=True)
     hash0 = fixtures["test_build_seed"]["hash0"]
     hash3 = fixtures["test_build_seed"]["hash3"]
     self.assertEqual(bucket.build_seed(0), hash0)
     self.assertEqual(bucket.build_seed(3), hash3)
Example #5
0
    def test_builder_build(self):
        # generate shards for testing
        bucket = Builder(addresses["beta"], my_shard_size, my_max_size,
                         my_min_free_size)
        bucket.build(self.store_path)

        # see if the shards exist
        seeds = bucket.build_seeds(height)
        for seed in seeds:
            path = os.path.join(self.store_path, seed)
            print("PATH", path)
            self.assertTrue(os.path.exists(path))

        bucket.clean(self.store_path)

        # generate shards for testing
        bucket = Builder(addresses["gamma"], my_shard_size, my_max_size,
                         my_min_free_size)
        bucket.build(self.store_path, cleanup=True)

        # see if the shards are deleted
        seeds = bucket.build_seeds(height)
        for seed in seeds:
            path = os.path.join(self.store_path, seed)
            self.assertFalse(os.path.exists(path))
Example #6
0
    def test_use_folder_tree_cleanup(self):
        bucket = Builder(addresses["beta"], my_shard_size, my_max_size,
                         my_min_free_size,
                         use_folder_tree=True)
        bucket.build(self.store_path, cleanup=True)

        def callback(a, d, files):
            self.assertTrue(len(files) == 0)
        os.walk(self.store_path, callback, None)
Example #7
0
    def test_builder_rebuilds(self):
        bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size,
                         my_min_free_size)

        # generate empty files to be rebuilt
        seeds = bucket.build_seeds(height)
        for seed in seeds:
            path = os.path.join(self.store_path, seed)
            with open(path, 'a'):
                os.utime(path, None)

        # rebuild all files
        bucket.build(self.store_path, rebuild=True)
    def test_builder_rebuilds(self):
        bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size,
                         my_min_free_size)

        # generate empty files to be rebuilt
        seeds = bucket.build_seeds(height)
        for seed in seeds:
            path = os.path.join(self.store_path, seed)
            with open(path, 'a'):
                os.utime(path, None)

        # rebuild all files
        bucket.build(self.store_path, rebuild=True)
    def test_builder_checkup(self):
        # generate shards for testing
        bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size, debug=True)
        generated = bucket.build(self.store_path)

        # make sure all files are there
        self.assertTrue(bucket.checkup(self.store_path))

        # remove one of the files
        remove_file = random.choice(list(generated.keys()))
        os.remove(os.path.join(self.store_path, remove_file))

        # check again, should fail
        self.assertFalse(bucket.checkup(self.store_path))
    def test_build_rebuild_modify(self):
        # generate shards for testing
        bucket = Builder(addresses["epsilon"],
                         my_shard_size,
                         my_max_size,
                         debug=True)
        bucket.build(self.store_path)

        # modify one of the files
        o = 'baf428097fa601fac185750483fd532abb0e43f9f049398290fac2c049cc2a60'
        path = os.path.join(self.store_path, o)
        sha256_org_file = partialhash.compute(path)

        # write some data
        with open(path, "a") as f:
            f.write("bad data is bad\n")

        # check their hashes
        sha256_mod_file = partialhash.compute(path)
        self.assertNotEqual(sha256_org_file, sha256_mod_file)

        # build without a rebuild should fail
        bucket.build(self.store_path)
        sha256_mod_file = partialhash.compute(path)
        self.assertNotEqual(sha256_org_file, sha256_mod_file)

        # build with a rebuild should pass
        bucket.build(self.store_path, rebuild=True)
        sha256_mod_file = partialhash.compute(path)
        self.assertEqual(sha256_org_file, sha256_mod_file)
Example #11
0
    def test_builder_checkup(self):
        # generate shards for testing
        bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size)
        generated = bucket.build(self.store_path)

        # make sure all files are there
        self.assertTrue(bucket.checkup(self.store_path))

        # remove one of the files
        remove_file = random.choice(list(generated.keys()))
        os.remove(os.path.join(self.store_path, remove_file))

        # check again, should fail
        self.assertFalse(bucket.checkup(self.store_path))
    def test_builder_audit(self):
        # generate shards for testing
        bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size, debug=True)
        bucket.build(self.store_path)

        # audit
        audit_results = bucket.audit(b"storj", self.store_path, height)
        result0 = fixtures["test_builder_audit"]["result0"]
        result1 = fixtures["test_builder_audit"]["result1"]
        self.assertEqual(audit_results[0], _to_bytes(result0))
        self.assertEqual(audit_results[1], _to_bytes(result1))

        # audit full
        expected = fixtures["test_builder_audit"]["expected"]
        audit_results = bucket.full_audit(b"storj", self.store_path,
                                          height)
        self.assertEqual(audit_results, expected)
    def test_builder_rebuilds(self):
        bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size, debug=True)

        # generate empty files to be rebuilt
        for shard_num in range(height):
            path = os.path.join(self.store_path, bucket.build_seed(shard_num))
            with open(path, 'a'):
                os.utime(path, None)

        # rebuild all files
        bucket.build(self.store_path, rebuild=True)

        # audit full
        expected = fixtures["test_builder_audit"]["expected"]
        audit_results = bucket.full_audit(b"storj", self.store_path,
                                          height)
        self.assertEqual(audit_results, expected)
Example #14
0
    def test_build_rebuild_modify(self):
        # generate shards for testing
        bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size,
                         debug=True)
        bucket.build(self.store_path)

        # modify one of the files
        o = 'baf428097fa601fac185750483fd532abb0e43f9f049398290fac2c049cc2a60'
        path = os.path.join(self.store_path, o)
        sha256_org_file = partialhash.compute(path)

        # write some data
        with open(path, "a") as f:
            f.write("bad data is bad\n")

        # check their hashes
        sha256_mod_file = partialhash.compute(path)
        self.assertNotEqual(sha256_org_file, sha256_mod_file)

        # build without a rebuild should fail
        bucket.build(self.store_path)
        sha256_mod_file = partialhash.compute(path)
        self.assertNotEqual(sha256_org_file, sha256_mod_file)

        # build with a rebuild should pass
        bucket.build(self.store_path, rebuild=True)
        sha256_mod_file = partialhash.compute(path)
        self.assertEqual(sha256_org_file, sha256_mod_file)
Example #15
0
    def test_builder_skips_existing(self):
        # generate shards for testing
        bucket = Builder(addresses["epsilon"],
                         my_shard_size,
                         my_max_size,
                         debug=True)
        generated = bucket.build(self.store_path)

        timestamp = time.time()

        # remove one of the files
        remove_file = random.choice(list(generated.keys()))
        os.remove(os.path.join(self.store_path, remove_file))

        # generate only missing shard for testing
        bucket = Builder(addresses["epsilon"],
                         my_shard_size,
                         my_max_size,
                         debug=True)
        generated = bucket.build(self.store_path)

        # verify last access times
        for seed, file_hash in generated.items():
            path = os.path.join(self.store_path, seed)
            last_access = os.path.getmtime(path)
            if seed == remove_file:
                self.assertTrue(last_access > timestamp)
            else:
                self.assertTrue(last_access < timestamp)

        # make sure all files are there
        self.assertTrue(bucket.checkup(self.store_path))
Example #16
0
    def test_on_KeyboardInterrupt(self):
        def _raise(height, last):
            if not last: # only raise 1 of 2 calls
                raise KeyboardInterrupt()

        # generate 1 file with KeyboadInterrupt
        bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size,
                         my_min_free_size, on_generate_shard=_raise)
        self.assertTrue(bucket.build(store_path=self.store_path))
        
        # 1 of 2 files exists and no bad files
        for shard_num in range(height):
            path = os.path.join(self.store_path, bucket.build_seed(shard_num))
            if shard_num <= 0:
                self.assertTrue(os.path.exists(path) 
                                and os.path.getsize(path) == my_shard_size)
            else:
                self.assertFalse(os.path.exists(path))
Example #17
0
    def test_on_KeyboardInterrupt(self):
        def _raise(height, last):
            if not last: # only raise 1 of 2 calls
                raise KeyboardInterrupt()

        # generate 1 file with KeyboadInterrupt
        bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size,
                         my_min_free_size, on_generate_shard=_raise)
        self.assertTrue(bucket.build(store_path=self.store_path))
        
        # 1 of 2 files exists and no bad files
        for shard_num in range(height):
            path = os.path.join(self.store_path, bucket.build_seed(shard_num))
            if shard_num <= 0:
                self.assertTrue(os.path.exists(path) 
                                and os.path.getsize(path) == my_shard_size)
            else:
                self.assertFalse(os.path.exists(path))
    def test_on_generate_shard_callback(self):
        # save callback args
        on_generate_shard_called_with = []
        def on_generate_shard(*args):
            on_generate_shard_called_with.append(args)

        # generate shards for testing
        bucket = Builder(addresses["omega"], my_shard_size, my_max_size,
                         on_generate_shard=on_generate_shard, debug=True)
        bucket.build(self.store_path)

        # check correct call count
        calls = len(on_generate_shard_called_with)
        self.assertEqual(int(my_max_size / my_shard_size), calls)

        # check height order
        for num in range(calls):
            height = on_generate_shard_called_with[num][0]
            self.assertEqual(num + 1, height)
    def test_builder_build(self):
        # generate shards for testing
        bucket = Builder(addresses["beta"], my_shard_size, my_max_size)
        bucket.build(self.store_path, True, False)

        # see if the shards exist
        for shard_num in range(height):
            path = os.path.join(self.store_path, bucket.build_seed(shard_num))
            self.assertTrue(os.path.exists(path))

        bucket.clean(self.store_path)

        # generate shards for testing
        bucket = Builder(addresses["gamma"], my_shard_size, my_max_size)
        bucket.build(self.store_path, True, True)

        # see if the shards are deleted
        for shard_num in range(height):
            path = os.path.join(self.store_path, bucket.build_seed(shard_num))
            self.assertFalse(os.path.exists(path))
Example #20
0
    def test_on_generate_shard_callback(self):
        # save callback args
        on_generate_shard_called_with = []

        def on_generate_shard(*args):
            on_generate_shard_called_with.append(args)

        # generate shards for testing
        bucket = Builder(addresses["omega"], my_shard_size, my_max_size,
                         my_min_free_size,
                         on_generate_shard=on_generate_shard)
        bucket.build(self.store_path)

        # check correct call count (+1 call for last height)
        calls = len(on_generate_shard_called_with)
        self.assertEqual(int(my_max_size / my_shard_size) + 1, calls)

        # check height order
        for num in range(calls - 1):
            height = on_generate_shard_called_with[num][0]
            self.assertEqual(num + 1, height)
Example #21
0
    def test_build_cont(self):
        max_size1 = 1024 * 1024 * 384
        max_size2 = 1024 * 1024 * 128

        # generate shards for testing
        start_time = datetime.utcnow()
        bucket = Builder(addresses["epsilon"], my_shard_size, max_size1,
                         my_min_free_size)
        bucket.build(self.store_path)
        end_delta = datetime.utcnow() - start_time

        # should skip all shards and be faster
        start_time2 = datetime.utcnow()
        bucket = Builder(addresses["epsilon"], my_shard_size, max_size2,
                         my_min_free_size)
        bucket.build(self.store_path)
        end_delta2 = datetime.utcnow() - start_time2

        self.assertTrue(end_delta2 < end_delta)

        # delete 10% random files
        my_height = int(max_size2 / my_shard_size)
        seeds = bucket.build_seeds(height)
        for seed in seeds:
            path = os.path.join(self.store_path, seed)
            if randint(0,9)==0:
                os.remove(path)

        # should rebuild missing shards and be slower as skip all 
        # but faster as new build
        start_time3 = datetime.utcnow()
        bucket = Builder(addresses["epsilon"], my_shard_size, max_size2,
                         my_min_free_size)
        bucket.build(self.store_path, repair=True)
        end_delta3 = datetime.utcnow() - start_time3

        self.assertTrue(end_delta3 < end_delta) # faster than new build
        self.assertTrue(end_delta3 > end_delta2) # slower than skip all
Example #22
0
    def test_build_rebuild(self):
        # generate shards for testing
        bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size)
        bucket.build(self.store_path)

        # remove one of the files
        r = 'baf428097fa601fac185750483fd532abb0e43f9f049398290fac2c049cc2a60'
        os.remove(os.path.join(self.store_path, r))

        # check again, should fail
        self.assertFalse(bucket.checkup(self.store_path))

        # rebuild
        bucket.build(self.store_path, rebuild=True)

        # check again, should pass
        self.assertTrue(bucket.checkup(self.store_path))
Example #23
0
    def test_build_cont(self):
        max_size1 = 1024 * 1024 * 384
        max_size2 = 1024 * 1024 * 128

        # generate shards for testing
        start_time = datetime.utcnow()
        bucket = Builder(addresses["epsilon"], my_shard_size, max_size1)
        bucket.build(self.store_path)
        end_delta = datetime.utcnow() - start_time

        # should skip all shards and be faster
        start_time2 = datetime.utcnow()
        bucket = Builder(addresses["epsilon"], my_shard_size, max_size2)
        bucket.build(self.store_path)
        end_delta2 = datetime.utcnow() - start_time2

        self.assertTrue(end_delta2 < end_delta)
Example #24
0
    def test_builder_clean(self):
        # generate shards for testing
        bucket = Builder(addresses["delta"], my_shard_size, my_max_size)
        bucket.build(self.store_path)

        # see if the shards exist
        for shard_num in range(height):
            path = os.path.join(self.store_path, bucket.build_seed(shard_num))
            self.assertTrue(os.path.exists(path))

        # clean command
        bucket.clean(self.store_path)

        # see if the shards are deleted
        for shard_num in range(height):
            path = os.path.join(self.store_path, bucket.build_seed(shard_num))
            self.assertFalse(os.path.exists(path))
    def test_builder_clean(self):
        # generate shards for testing
        bucket = Builder(addresses["delta"], my_shard_size, my_max_size,
                         my_min_free_size)
        bucket.build(self.store_path)

        # see if the shards exist
        seeds = bucket.build_seeds(height)
        for seed in seeds:
            path = os.path.join(self.store_path, seed)
            self.assertTrue(os.path.exists(path))

        # clean command
        bucket.clean(self.store_path)

        # see if the shards are deleted
        seeds = bucket.build_seeds(height)
        for seed in seeds:
            path = os.path.join(self.store_path, seed)
            self.assertFalse(os.path.exists(path))
    def test_build_rebuild(self):
        # generate shards for testing
        bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size, debug=True)
        bucket.build(self.store_path)

        # remove one of the files
        remove_file = 'baf428097fa601fac185750483fd532abb0e43f9f049398290fac2c049cc2a60'
        os.remove(os.path.join(self.store_path, remove_file))

        # check again, should fail
        self.assertFalse(bucket.checkup(self.store_path))

        # rebuild
        bucket.build(self.store_path, rebuild=True)

        # check again, should pass
        self.assertTrue(bucket.checkup(self.store_path))
Example #27
0
    def test_builder_audit(self):
        # generate shards for testing
        bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size)
        bucket.build(self.store_path)

        # audit
        audit_results = bucket.audit(b"storj", self.store_path, height)
        result0 = fixtures["test_builder_audit"]["result0"]
        result1 = fixtures["test_builder_audit"]["result1"]
        self.assertEqual(audit_results[0], _to_bytes(result0))
        self.assertEqual(audit_results[1], _to_bytes(result1))

        # audit full
        expected = fixtures["test_builder_audit"]["expected"]
        audit_results = bucket.full_audit(b"storj", self.store_path, height)
        self.assertEqual(audit_results, expected)
Example #28
0
    def test_builder_rebuilds(self):
        bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size)

        # generate empty files to be rebuilt
        for shard_num in range(height):
            path = os.path.join(self.store_path, bucket.build_seed(shard_num))
            with open(path, 'a'):
                os.utime(path, None)

        # rebuild all files
        bucket.build(self.store_path, rebuild=True)

        # audit full
        expected = fixtures["test_builder_audit"]["expected"]
        audit_results = bucket.full_audit(b"storj", self.store_path, height)
        self.assertEqual(audit_results, expected)
Example #29
0
    def test_builder_clean(self):
        # generate shards for testing
        bucket = Builder(addresses["delta"], my_shard_size, my_max_size,
                         my_min_free_size)
        bucket.build(self.store_path)

        # see if the shards exist
        seeds = bucket.build_seeds(height)
        for seed in seeds:
            path = os.path.join(self.store_path, seed)
            self.assertTrue(os.path.exists(path))

        # clean command
        bucket.clean(self.store_path)

        # see if the shards are deleted
        seeds = bucket.build_seeds(height)
        for seed in seeds:
            path = os.path.join(self.store_path, seed)
            self.assertFalse(os.path.exists(path))
    def test_build_cont(self):
        max_size1 = 1024*1024*384
        max_size2 = 1024*1024*128

        # generate shards for testing
        start_time = datetime.utcnow()
        bucket = Builder(addresses["epsilon"], my_shard_size, max_size1, debug=True)
        bucket.build(self.store_path)
        end_delta = datetime.utcnow() - start_time

        # should skip all shards and be faster
        start_time2 = datetime.utcnow()
        bucket = Builder(addresses["epsilon"], my_shard_size, max_size2, debug=True)
        bucket.build(self.store_path)
        end_delta2 = datetime.utcnow() - start_time2

        self.assertTrue(end_delta2 < end_delta)
 def test_build_seed(self):
     bucket = Builder(addresses["alpha"], 0, 0)  # emtpy bucket
     hash0 = fixtures["test_build_seed"]["hash0"]
     hash3 = fixtures["test_build_seed"]["hash3"]
     self.assertEqual(bucket.build_seed(0), hash0)
     self.assertEqual(bucket.build_seed(3), hash3)
 def test_sha256(self):
     expected = fixtures["test_sha256"]["expected"]
     self.assertEqual(Builder.sha256("storj"), expected)
     self.assertNotEqual(Builder.sha256("not storj"), expected)
    def test_builder_audit(self):
        bucket = Builder(addresses["epsilon"], my_shard_size, 0,
                         my_min_free_size)

        # check last confirmed bitcoin hash
        btc_block = bucket.btc_last_confirmed_block()

        self.assertTrue(
            btc_block['confirmations'] >= common.DEFAULT_MIN_CONFIRMATIONS)
        self.assertTrue(btc_block['is_orphan'] == False)

        index = btc_block['block_no']

        block_pos = index % common.DEFAULT_FULL_AUDIT
        block_size = common.DEFAULT_BLOCK_SIZE

        # create empty files to skip to btc_index
        seeds = bucket.build_seeds(block_pos * block_size)
        for seed in seeds:
            path = os.path.join(self.store_path, seed)
            open(path, 'w').close()

        # generate shards for audit
        shard_size = my_shard_size * (block_pos + 1) * block_size
        bucket = Builder(addresses["epsilon"], my_shard_size, shard_size,
                         my_min_free_size)
        bucket.build(self.store_path)

        # audit possible
        good_hash = bucket.audit(self.store_path, btc_block['block_no'],
                                 btc_block['blockhash'])
        self.assertTrue(good_hash)

        seeds = bucket.build_seeds((block_pos + 1) * block_size)

        # copy a bad file for a bad audit
        path1 = os.path.join(self.store_path, seeds[-2])
        path2 = os.path.join(self.store_path, seeds[-1])
        shutil.copyfile(path1, path2)
        bad_hash = bucket.audit(self.store_path, btc_block['block_no'],
                                btc_block['blockhash'])

        self.assertFalse(good_hash == bad_hash)

        # write some bad data
        with open(path2, "a") as f:
            f.write("bad data is bad\n")

        # audit failed because last shard has bad data
        self.assertFalse(
            bucket.audit(self.store_path, btc_block['block_no'],
                         btc_block['blockhash']))

        # remove last shard
        os.remove(path2)

        # audit failed because last shard missing
        self.assertFalse(
            bucket.audit(self.store_path, btc_block['block_no'],
                         btc_block['blockhash']))

        # build last shard again
        bucket = Builder(addresses["epsilon"], my_shard_size, shard_size,
                         my_min_free_size)
        bucket.build(self.store_path)

        # audit possible
        good_hash = bucket.audit(self.store_path, btc_block['block_no'],
                                 btc_block['blockhash'])
        self.assertTrue(good_hash)

        # remove first shard of that block
        path1 = os.path.join(self.store_path, seeds[-80])
        os.remove(path1)

        # audit failed because first shard missing
        self.assertFalse(
            bucket.audit(self.store_path, btc_block['block_no'],
                         btc_block['blockhash']))
Example #34
0
 def test_sha256(self):
     expected = fixtures["test_sha256"]["expected"]
     self.assertEqual(Builder.sha256("storj"), expected)
     self.assertNotEqual(Builder.sha256("not storj"), expected)
    def test_builder_build(self):
        # generate shards for testing
        bucket = Builder(addresses["beta"], my_shard_size, my_max_size,
                         my_min_free_size)
        bucket.build(self.store_path)

        # see if the shards exist
        seeds = bucket.build_seeds(height)
        for seed in seeds:
            path = os.path.join(self.store_path, seed)
            print("PATH", path)
            self.assertTrue(os.path.exists(path))

        bucket.clean(self.store_path)

        # generate shards for testing
        bucket = Builder(addresses["gamma"], my_shard_size, my_max_size,
                         my_min_free_size)
        bucket.build(self.store_path, cleanup=True)

        # see if the shards are deleted
        seeds = bucket.build_seeds(height)
        for seed in seeds:
            path = os.path.join(self.store_path, seed)
            self.assertFalse(os.path.exists(path))
    def test_build_cont(self):
        max_size1 = 1024 * 1024 * 384
        max_size2 = 1024 * 1024 * 128

        # generate shards for testing
        start_time = datetime.utcnow()
        bucket = Builder(addresses["epsilon"], my_shard_size, max_size1,
                         my_min_free_size)
        bucket.build(self.store_path)
        end_delta = datetime.utcnow() - start_time

        # should skip all shards and be faster
        start_time2 = datetime.utcnow()
        bucket = Builder(addresses["epsilon"], my_shard_size, max_size2,
                         my_min_free_size)
        bucket.build(self.store_path)
        end_delta2 = datetime.utcnow() - start_time2

        self.assertTrue(end_delta2 < end_delta)

        # delete 10% random files
        my_height = int(max_size2 / my_shard_size)
        seeds = bucket.build_seeds(height)
        for seed in seeds:
            path = os.path.join(self.store_path, seed)
            if randint(0, 9) == 0:
                os.remove(path)

        # should rebuild missing shards and be slower as skip all
        # but faster as new build
        start_time3 = datetime.utcnow()
        bucket = Builder(addresses["epsilon"], my_shard_size, max_size2,
                         my_min_free_size)
        bucket.build(self.store_path, repair=True)
        end_delta3 = datetime.utcnow() - start_time3

        self.assertTrue(end_delta3 < end_delta)  # faster than new build
        self.assertTrue(end_delta3 > end_delta2)  # slower than skip all
Example #37
0
    def test_builder_audit(self):
        bucket = Builder(addresses["epsilon"], my_shard_size, 0,
                         my_min_free_size)

        # check last confirmed bitcoin hash
        btc_block = bucket.btc_last_confirmed_block()

        self.assertTrue(btc_block['confirmations']>=
                        common.DEFAULT_MIN_CONFIRMATIONS)
        self.assertTrue(btc_block['is_orphan']==False)

        index = btc_block['block_no']

        block_pos = index % common.DEFAULT_FULL_AUDIT
        block_size = common.DEFAULT_BLOCK_SIZE
        
        # create empty files to skip to btc_index
        seeds = bucket.build_seeds(block_pos * block_size)
        for seed in seeds:
            path = os.path.join(self.store_path, seed)
            open(path,'w').close()

        # generate shards for audit
        shard_size = my_shard_size * (block_pos + 1) * block_size
        bucket = Builder(addresses["epsilon"], my_shard_size, shard_size,
                         my_min_free_size)
        bucket.build(self.store_path)

        # audit possible
        good_hash = bucket.audit(self.store_path, 
                                 btc_block['block_no'], 
                                 btc_block['blockhash'])
        self.assertTrue(good_hash)

        seeds = bucket.build_seeds((block_pos + 1) * block_size)

        # copy a bad file for a bad audit
        path1 = os.path.join(self.store_path, seeds[-2])
        path2 = os.path.join(self.store_path, seeds[-1])
        shutil.copyfile(path1, path2)
        bad_hash = bucket.audit(self.store_path,
                                btc_block['block_no'],
                                btc_block['blockhash'])

        self.assertFalse(good_hash==bad_hash)

        # write some bad data
        with open(path2, "a") as f:
            f.write("bad data is bad\n")

        # audit failed because last shard has bad data
        self.assertFalse(bucket.audit(self.store_path,
                                      btc_block['block_no'],
                                      btc_block['blockhash']))

        # remove last shard
        os.remove(path2)

        # audit failed because last shard missing
        self.assertFalse(bucket.audit(self.store_path,
                                      btc_block['block_no'],
                                      btc_block['blockhash']))

        # build last shard again
        bucket = Builder(addresses["epsilon"], my_shard_size, shard_size,
                         my_min_free_size)
        bucket.build(self.store_path)

        # audit possible
        good_hash = bucket.audit(self.store_path,
                                 btc_block['block_no'],
                                 btc_block['blockhash'])
        self.assertTrue(good_hash)

        # remove first shard of that block
        path1 = os.path.join(self.store_path, seeds[-80])
        os.remove(path1)

        # audit failed because first shard missing
        self.assertFalse(bucket.audit(self.store_path,
                                      btc_block['block_no'],
                                      btc_block['blockhash']))
    def test_builder_build(self):
        # generate shards for testing
        bucket = Builder(addresses["beta"],
                         my_shard_size,
                         my_max_size,
                         debug=True)
        bucket.build(self.store_path)

        # see if the shards exist
        for shard_num in range(height):
            path = os.path.join(self.store_path, bucket.build_seed(shard_num))
            print("PATH", path)
            self.assertTrue(os.path.exists(path))

        bucket.clean(self.store_path)

        # generate shards for testing
        bucket = Builder(addresses["gamma"],
                         my_shard_size,
                         my_max_size,
                         debug=True)
        bucket.build(self.store_path, cleanup=True)

        # see if the shards are deleted
        for shard_num in range(height):
            path = os.path.join(self.store_path, bucket.build_seed(shard_num))
            self.assertFalse(os.path.exists(path))
Example #39
0
 def test_build_seed(self):
     bucket = Builder(addresses["alpha"], 0, 0)  # emtpy bucket
     hash0 = fixtures["test_build_seed"]["hash0"]
     hash3 = fixtures["test_build_seed"]["hash3"]
     self.assertEqual(bucket.build_seed(0), hash0)
     self.assertEqual(bucket.build_seed(3), hash3)