コード例 #1
0
ファイル: test_numpy.py プロジェクト: thedrow/fpylll
def test_dump_r(nrows=10):
    A = IntegerMatrix(nrows, nrows)
    A.randomize("ntrulike", bits=10, q=1023)
    M  = GSO.Mat(A)
    if not have_numpy:
        return

    r = numpy.ndarray(dtype='double', shape=nrows)

    M.update_gso()
    dump_r(r, M, 0, nrows)

    for i in range(nrows):
        assert abs(M.get_r(i, i) - r[i]) < 0.001
コード例 #2
0
ファイル: test_pruner.py プロジェクト: blowfish880/fpylll
def test_pruner_vec(n=20, m=20):
    M = prepare(n, m)
    if have_numpy:
        vec = []
        for m in M:
            vec.append(tuple(dump_r(m, 0, n)))

    radius = sum([mat.get_r(0, 0) for mat in M])/len(M)
    pruning = prune(radius, 0, 0.9, vec)
    assert pruning.probability >= 0.89
コード例 #3
0
ファイル: test_numpy.py プロジェクト: zhli271828/fpylll
def test_dump_r(nrows=10):
    A = IntegerMatrix(nrows, nrows)
    A.randomize("ntrulike", bits=10, q=1023)
    M = GSO.Mat(A)
    if not have_numpy:
        return

    M.update_gso()
    r = dump_r(M, 0, nrows)

    for i in range(nrows):
        assert abs(M.get_r(i, i) - r[i]) < 0.001
コード例 #4
0
ファイル: test_numpy.py プロジェクト: blowfish880/fpylll
def test_dump_r(nrows=10):
    A = IntegerMatrix(nrows, nrows)
    A.randomize("ntrulike", bits=10, q=1023)
    M = GSO.Mat(A)
    if not have_numpy:
        return

    M.update_gso()
    r = dump_r(M, 0, nrows)

    for i in range(nrows):
        assert abs(M.get_r(i, i) - r[i]) < 0.001
コード例 #5
0
ファイル: test_util.py プロジェクト: gbonnoron/fpylll
def test_gh():
    for n in dimensions:
        set_random_seed(n)
        A = make_integer_matrix(n)
        M = GSO.Mat(A, float_type="ld")
        M.discover_all_rows()
        M.update_gso()
        radius = M.get_r(0, 0)
        root_det = M.get_root_det(0, n)
        gh_radius, ge = adjust_radius_to_gh_bound(2000*radius, 0, n, root_det, 1.0)

        gh1 = gh_radius * 2**ge

        r = dump_r(M, 0, n)
        gh2 = gaussian_heuristic(r)
        assert abs(gh1/gh2 -1) < 0.01
コード例 #6
0
ファイル: test_util.py プロジェクト: isuruf/fpylll
def test_gh():
    for n in dimensions:
        set_random_seed(n)
        A = make_integer_matrix(n)
        M = GSO.Mat(A, float_type="ld")
        M.discover_all_rows()
        M.update_gso()
        radius = M.get_r(0, 0)
        root_det = M.get_root_det(0, n)
        gh_radius, ge = adjust_radius_to_gh_bound(2000 * radius, 0, n,
                                                  root_det, 1.0)

        gh1 = gh_radius * 2**ge

        r = dump_r(M, 0, n)
        gh2 = gaussian_heuristic(r)
        assert abs(gh1 / gh2 - 1) < 0.01
コード例 #7
0
def test_gh():
    try:
        from fpylll.numpy import dump_r
    except ImportError:
        return

    for n in dimensions:
        set_random_seed(n)
        A = make_integer_matrix(n)
        try:
            M = GSO.Mat(A, float_type="ld")
        except ValueError:
            M = GSO.Mat(A, float_type="d")
        M.discover_all_rows()
        M.update_gso()
        radius = M.get_r(0, 0)
        root_det = M.get_root_det(0, n)
        gh_radius, ge = adjust_radius_to_gh_bound(2000*radius, 0, n, root_det, 1.0)

        gh1 = gh_radius * 2**ge

        r = dump_r(M, 0, n)
        gh2 = gaussian_heuristic(r)
        assert abs(gh1/gh2 -1) < 0.01
コード例 #8
0
ファイル: rbkz.py プロジェクト: malb/yolo
    def recycled_svp_reduction(self, kappa, block_size, param, stats):
        """
        :param kappa:
        :param block_size:
        :param params:
        :param stats:
        """
        if stats is None:
            stats = DummyStats(self)

        self.M.update_gso()
        self.lll_obj.size_reduction(0, kappa + 1)
        self.lll_obj(kappa, kappa, kappa + block_size)

        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)

        remaining_probability, rerandomize = 1.0, False
        print " - ",

        preproc_block_size = PREPROC_BLOCK_SIZE_INIT
        while remaining_probability > 1. - param.min_success_probability:
            preproc_block_size += PREPROC_BLOCK_SIZE_INCR

            start_preproc = time()
            with stats.context("preproc"):
                rec_clean = self.recycled_svp_preprocessing(
                    kappa, block_size, param, stats, preproc_block_size)
            time_preproc = time() - start_preproc

            radius, expo = self.M.get_r_exp(kappa, kappa)

            if param.flags & BKZ.GH_BND:
                root_det = self.M.get_root_det(kappa, kappa + block_size)
                radius, expo = gaussian_heuristic(radius, expo, block_size,
                                                  root_det, param.gh_factor)

            overhead = NODE_PER_SEC * time_preproc

            with stats.context("postproc"):
                self.M.update_gso()
                R = dump_r(self.M, kappa, block_size)
                # print R
                goal_proba = 1.01 * ((param.min_success_probability - 1) /
                                     remaining_probability + 1)
                pruning = prune(radius * 2**expo,
                                overhead,
                                goal_proba, [R],
                                descent_method="gradient",
                                precision=53)

                print goal_proba, pruning.probability
            try:
                enum_obj = Enumeration(self.M, self.recycling_pool_max_size)
                aux_sols = []
                with stats.context("svp", E=enum_obj):
                    K = [x for x in pruning.coefficients]
                    radius *= 1.05
                    for i in range(5, preproc_block_size):
                        K[i] /= 1.05

                    solution, max_dist = enum_obj.enumerate(kappa,
                                                            kappa + block_size,
                                                            radius,
                                                            expo,
                                                            pruning=K,
                                                            aux_sols=aux_sols)
                    V = [v for (v, _) in aux_sols[:10]]
                    self.multi_insert(V, kappa, block_size, stats)

            except EnumerationError:
                print 0,
                pass

            remaining_probability *= (1 - pruning.probability)

        self.lll_obj.size_reduction(0, kappa + 1)
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)

        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)
        return clean


# def to_cannonical(A, v, kappa, block_size):
#     v = kappa*[0] + [x for x in v] + (A.nrows - (kappa + block_size)) * [0]
#     v = IntegerMatrix.from_iterable(1, A.nrows, map(lambda x: int(round(x)), v))
#     v = tuple((v*A)[0])
#     return v

# def multi_insert_from_cannonical(M, V, kappa, block_size):
#     d = M.d
#     s = d
#     l = len(V)
#     for v in V:
#         w = M.babai(v)
#         for i in range(kappa+block_size, d):
#             assert w[i] == 0
#         M.create_row()
#         with self.M.row_ops(s, s+1):
#             for i in range(kappa + block_size):
#                 self.M.row_addmul(s, i, w[i])
#         s += 1

#     for i in range(l).reversed():
#         self.M.move_row(kappa, d+i)

#     with stats.context("lll"):
#         self.lll_obj(kappa, kappa, kappa + block_size + 1)

#     for i in range(l):
#         self.M.move_row(kappa + block_size + i, s)

#     for i in range(l):
#         self.M.remove_last_row()