def test_2049_bytes_file(self):
     with TempDir() as tmpdir:
         tmpfn = os.path.join(tmpdir.name, 'input')
         write_binary(2049, tmpfn)
         hashs = get_sha1sums(tmpfn, os.path.getsize(tmpfn), 1024)
         self.assertEqual(hashs, ('5b00669c480d5cffbdfa8bdba99561160f2d1b77', '170751534f1a95fd80a7a25787ecad2b60368e0a',
                                  {2048L: 'f10ccfde60c17db26e7d85d35665c7661dbbeb2c'}, False))
         self.assertEqual(get_partial_sha1(tmpfn, 0, 1024), '5b00669c480d5cffbdfa8bdba99561160f2d1b77')
         self.assertEqual(get_partial_sha1(tmpfn, 0, 2048), 'f10ccfde60c17db26e7d85d35665c7661dbbeb2c')
         self.assertEqual(get_partial_sha1(tmpfn, 0, 2049), '170751534f1a95fd80a7a25787ecad2b60368e0a')
def small_is_truncated_of_large(large, small):
    large_hashs = large.get_part_hashs()
    small_hashs = small.get_part_hashs()

    size = 2048L
    while True:
        if size not in large_hashs or size not in small_hashs:
            equal_so_far = 1
            size /= 2
            break
        if large_hashs[size] != small_hashs[size]:
            equal_so_far = 0
            break
        size *= 2
    if not equal_so_far:
        return False

    smaller_full_hash = small.fullsha1
    larger_hash_till_size_of_small = get_partial_sha1(
        large.files[0].path, 0, small.size)

    if smaller_full_hash != larger_hash_till_size_of_small:
        return False

    return True