예제 #1
0
파일: bkz.py 프로젝트: blowfish880/fpylll
    def svp_call(self, kappa, block_size, params, stats=None):
        """Call SVP oracle

        :param kappa: current index
        :param params: BKZ parameters
        :param block_size: block size
        :param stats: object for maintaining statistics

        :returns: Coordinates of SVP solution or ``None`` if none was found.

        ..  note::

            ``block_size`` may be smaller than ``params.block_size`` for the last blocks.
        """
        max_dist, expo = self.M.get_r_exp(kappa, kappa)
        delta_max_dist = self.lll_obj.delta * max_dist

        if params.flags & BKZ.GH_BND:
            root_det = self.M.get_root_det(kappa, kappa+block_size)
            max_dist, expo = gaussian_heuristic(max_dist, expo, block_size, root_det, params.gh_factor)

        try:
            E = Enumeration(self.M)
            solution, max_dist = E.enumerate(kappa, kappa + block_size, max_dist, expo)
            stats.current_tour["enum nodes"] += E.get_nodes()
        except EnumerationError as msg:
            if params.flags & BKZ.GH_BND:
                return None
            else:
                raise EnumerationError(msg)

        if max_dist >= delta_max_dist:
            return None
        else:
            return solution
예제 #2
0
파일: bkz3.py 프로젝트: malb/yolo
    def svp_call(self, kappa, block_size, radius, pruning, nr_hints=0, tracer=dummy_tracer):
        """Call SVP oracle

        :param kappa: current index
        :param params: BKZ parameters
        :param block_size: block size
        :param tracer: object for maintaining statistics

        :returns: Coordinates of SVP solution or ``None`` if none was found.

        ..  note::

            ``block_size`` may be smaller than ``params.block_size`` for the last blocks.
        """
        solutions = []
        try:
            enum_obj = Enumeration(self.M, nr_hints+1, EvaluatorStrategy.OPPORTUNISTIC_N_SOLUTIONS)
            if pruning is None:
                with tracer.context("enumeration", enum_obj=enum_obj, probability=1., full=block_size==self.params.bkz_param.block_size):
                    solutions = enum_obj.enumerate(kappa, kappa + block_size, radius, 0)
            else:
                with tracer.context("enumeration", enum_obj=enum_obj, probability=pruning.expectation, full=block_size==self.params.bkz_param.block_size):
                    solutions = enum_obj.enumerate(kappa, kappa + block_size, radius, 0, pruning=pruning.coefficients)
            return solutions
        except EnumerationError:
            return []
예제 #3
0
def enum_trial(bkz_obj, preproc_cost, gh_factor=1.1):
    n = bkz_obj.A.nrows

    r = [bkz_obj.M.get_r(i, i) for i in range(0, n)]
    gh = gaussian_heuristic(r)
    radius = max(r[0] * .99, gh * gh_factor)
    PRUNE_START = time()
    pruning = prune(radius,
                    NPS * preproc_cost, [r],
                    10,
                    metric="solutions",
                    float_type="dd",
                    flags=Pruning.GRADIENT)
    PRUNE_TIME = time() - PRUNE_START
    print "Pruning time %.4f" % PRUNE_TIME
    ENUM_START = time()
    enum_obj = Enumeration(bkz_obj.M, sub_solutions=True)

    try:
        print "Enum  ... (Expecting %.5f solutions)" % (pruning.expectation),
        enum_obj.enumerate(0, n, radius, 0, pruning=pruning.coefficients)
    except EnumerationError:
        pass

    ENUM_TIME = time() - ENUM_START
    print " \t\t\t\t\t\t TIME = %.2f" % ENUM_TIME

    zeros = 0
    print "subsolutions : r[i]/gh",
    for (a, b) in enum_obj.sub_solutions[:20]:
        print "%.3f" % abs(a / gh),

    insert_sub_solutions(bkz_obj, enum_obj.sub_solutions[:n / 4])
    return
예제 #4
0
def svp_enum(bkz, params, goal):
    n = bkz.M.d
    r = [bkz.M.get_r(i, i) for i in range(0, n)]
    gh = gaussian_heuristic(r)

    rerandomize = False
    while bkz.M.get_r(0, 0) > goal:
        if rerandomize:
            bkz.randomize_block(0, n)
        bkz.svp_preprocessing(0, n, params)

        strategy = params.strategies[n]
        radius = goal
        pruning = strategy.get_pruning(goal, gh)

        try:
            enum_obj = Enumeration(bkz.M)
            max_dist, solution = enum_obj.enumerate(
                0, n, radius, 0, pruning=pruning.coefficients)[0]
            bkz.svp_postprocessing(0, n, solution, tracer=dummy_tracer)
            rerandomize = False
        except EnumerationError:
            rerandomize = True

        bkz.lll_obj()

    return
예제 #5
0
def test_multisol():
    A = make_integer_matrix()
    m = GSO.Mat(A)
    lll_obj = LLL.Reduction(m)
    lll_obj()

    solutions = []
    solutions = Enumeration(m, nr_solutions=200).enumerate(0, 27, 48.5, 0)
    assert len(solutions) == 126 / 2
    for _, sol in solutions:
        sol = IntegerMatrix.from_iterable(1, A.nrows,
                                          map(lambda x: int(round(x)), sol))
        sol = tuple((sol * A)[0])
        dist = sum([x**2 for x in sol])
        assert dist == 48

    solutions = []
    solutions = Enumeration(m, nr_solutions=126 / 2).enumerate(0, 27, 100., 0)
    assert len(solutions) == 126 / 2
    for _, sol in solutions:
        sol = IntegerMatrix.from_iterable(1, A.nrows,
                                          map(lambda x: int(round(x)), sol))
        sol = tuple((sol * A)[0])
        dist = sum([x**2 for x in sol])
        assert dist == 48
예제 #6
0
def enum_trial(bkz, preproc_cost, radius):
    n = bkz.A.nrows

    r = [bkz.M.get_r(i, i) for i in range(0, n)]       
    gh = gaussian_heuristic(r)

    PRUNE_START = time()
    NPS = 2**24
    pruning = prune(radius, NPS * preproc_cost, [r], 10, 
                    metric="solutions", float_type="dd",
                    flags=Pruning.GRADIENT|Pruning.NELDER_MEAD)
    PRUNE_TIME = time() - PRUNE_START    
    ENUM_START = time()
    enum_obj = Enumeration(bkz.M, sub_solutions=True)
    success = False
    try:        
        enum_obj.enumerate(0, n, radius, 0, pruning=pruning.coefficients)
        success = True
    except EnumerationError:
        pass
    print ("# [Prune] time %.4f"%PRUNE_TIME)

    ENUM_TIME = time() - ENUM_START
    print ("# [Enum]  (Expecting %.5f solutions)"%(pruning.expectation)),
    print (", TIME = %.2f"%ENUM_TIME)
    """
    print ("# subsolutions : r[i]/gh"),
    for (a, b) in enum_obj.sub_solutions:
        print ("%.3f"%abs(a/gh)),
    print 
    """
    insert_sub_solutions(bkz, enum_obj.sub_solutions)    
    return success
예제 #7
0
    def parallel_svp_reduction_worker(self, kappa, block_size, params,
                                      rerandomize):
        """
        One SVP reduction, typically called in a worker process after forking.

        :param kappa: current index
        :param block_size: block size
        :param params: BKZ parameters
        :param tracer: object for maintaining statistics

        """
        # we create a new tracer object to report back our timings to the calling process
        tracer = BKZTreeTracer(self,
                               verbosity=params.flags & BKZ.VERBOSE,
                               root_label="svp")

        with tracer.context("preprocessing"):
            if rerandomize:
                with tracer.context("randomization"):
                    self.randomize_block(
                        kappa + 1,
                        kappa + block_size,
                        density=params.rerandomization_density,
                        tracer=tracer)
            with tracer.context("reduction"):
                self.svp_preprocessing(kappa, block_size, params, tracer)

        radius, expo = self.M.get_r_exp(kappa, kappa)
        radius *= self.lll_obj.delta

        if params.flags & BKZ.GH_BND and block_size > 30:
            root_det = self.M.get_root_det(kappa, kappa + block_size)
            radius, expo = adjust_radius_to_gh_bound(radius, expo, block_size,
                                                     root_det,
                                                     params.gh_factor)

        pruning = self.get_pruning(kappa, block_size, params, tracer)

        try:
            enum_obj = Enumeration(self.M)
            with tracer.context("enumeration",
                                enum_obj=enum_obj,
                                probability=pruning.expectation,
                                full=block_size == params.block_size):
                max_dist, solution = enum_obj.enumerate(
                    kappa,
                    kappa + block_size,
                    radius,
                    expo,
                    pruning=pruning.coefficients)[0]
            with tracer.context("postprocessing"):
                # we translate our solution to the canonical basis because our basis is not
                # necessarily the basis of the calling process at this point
                solution = self.A.multiply_left(solution, start=kappa)

        except EnumerationError:
            solution, max_dist = None, None

        return solution, max_dist, tracer.trace, pruning.expectation
예제 #8
0
def test_enum_enum():
    for int_type in int_types:
        A = make_integer_matrix(20, 20, int_type=int_type)
        LLL.reduction(A)
        for float_type in float_types:
            M = GSO.Mat(copy(A), float_type=float_type)
            M.update_gso()
            enum_obj = Enumeration(M)
            enum_obj.enumerate(0, M.d, M.get_r(0, 0), 0)
예제 #9
0
파일: bkz2.py 프로젝트: fplll/fpylll
    def svp_reduction(self, kappa, block_size, param, tracer=dummy_tracer):
        """

        :param kappa:
        :param block_size:
        :param params:
        :param tracer:

        """

        self.lll_obj.size_reduction(0, kappa+1)
        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)

        remaining_probability, rerandomize = 1.0, False

        while remaining_probability > 1. - param.min_success_probability:
            with tracer.context("preprocessing"):
                if rerandomize:
                    with tracer.context("randomization"):
                        self.randomize_block(kappa+1, kappa+block_size,
                                             density=param.rerandomization_density, tracer=tracer)
                with tracer.context("reduction"):
                    self.svp_preprocessing(kappa, block_size, param, tracer=tracer)

            radius, expo = self.M.get_r_exp(kappa, kappa)
            radius *= self.lll_obj.delta

            if param.flags & BKZ.GH_BND and block_size > 30:
                root_det = self.M.get_root_det(kappa, kappa + block_size)
                radius, expo = gaussian_heuristic(radius, expo, block_size, root_det, param.gh_factor)

            pruning = self.get_pruning(kappa, block_size, param, tracer)

            try:
                enum_obj = Enumeration(self.M)
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    probability=pruning.probability,
                                    full=block_size==param.block_size):
                    solution, max_dist = enum_obj.enumerate(kappa, kappa + block_size, radius, expo,
                                                            pruning=pruning.coefficients)[0]
                with tracer.context("postprocessing"):
                    self.svp_postprocessing(kappa, block_size, solution, tracer=tracer)
                rerandomize = False

            except EnumerationError:
                rerandomize = True

            remaining_probability *= (1 - pruning.probability)

        self.lll_obj.size_reduction(0, kappa+1)
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)

        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)
        return clean
    def svp_reduction_mpi_trial_enum (self, bkz_sub, preproc_cost, radius, kappa, block_size):
        verbose = 0
        bkz_sub.M.update_gso()
        r = [bkz_sub.M.get_r(i, i) for i in range(kappa, kappa+block_size)]
        r_old = r[0]
        gh = gaussian_heuristic(r)
        PRUNE_START = time()
        try:
            pruning = prune(radius, NPS[block_size] * preproc_cost, [r], 10,
                                metric="solutions", float_type="mpfr",
                                flags=Pruning.GRADIENT|Pruning.NELDER_MEAD)
            """
            pruning = prune(radius, NPS[block_size] * preproc_cost, [r], 0.0001,
                                metric="probability", float_type="mpfr",
                                flags=Pruning.GRADIENT|Pruning.NELDER_MEAD)            
            """
        except:
            return False, -1, 0, 0, 0
        PRUNE_TIME = time() - PRUNE_START
        ENUM_START = time()
        enum_obj = Enumeration(bkz_sub.M, sub_solutions=True)
        success = False
        length = -1
        #print radius, pruning.coefficients
        estimate_cost = sum(pruning.detailed_cost) / NPS[block_size]
        try:        
            enum_obj.enumerate(kappa, kappa+block_size, radius, 0, pruning=pruning.coefficients)
            length = enum_obj.sub_solutions[0][0]
            success = True
        except EnumerationError:
            pass

        ENUM_TIME = time() - ENUM_START
        if (verbose):
            print ("# [Prune] time %.4f"%PRUNE_TIME)
            print ("# [Enum]  (Expecting %.5f solutions)"%(pruning.expectation)),
            print (", TIME = %.2f"%ENUM_TIME)
        """
        for (a, b) in enum_obj.sub_solutions[:20]:
            print "%.3f"%abs(a/gh),
        print 
        """
        bkz_sub.M.update_gso()
        #A_old = deepcopy(bkz_sub.A)
        bkz_sub.insert_sub_solutions(kappa, block_size, enum_obj.sub_solutions[:1+block_size/4])
        #print self.compare(A_old, bkz_sub.A)
        bkz_sub.M.update_gso()
        r_new = bkz_sub.M.get_r(kappa, kappa)
        if (r_new < r_old):
            success = True
            length = r_new
        
        return success, length, PRUNE_TIME, ENUM_TIME, estimate_cost
예제 #11
0
def test_pruner():

    # A dummyPruningParams.run to load tabulated values
    Pruning.run(5, 50, 10*[1.], .5)

    for (n, overhead) in dim_oh:

        print(" \n ~~~~ Dim %d \n" % n)

        M = prepare(n)
        r = [M.get_r(i, i) for i in range(n)]

        print(" \n GREEDY")
        radius = gaussian_heuristic(r) * 1.6
        print("pre-greedy radius %.4e" % radius)
        tt = process_time()
        pruning =Pruning.run(radius, overhead, r, 200, flags=Pruning.ZEALOUS, metric="solutions")
        print("Time %.4e"%(process_time() - tt))
        print("post-greedy radius %.4e" % radius)
        print(pruning)
        print("cost %.4e" % sum(pruning.detailed_cost))
        solutions = Enumeration(M, nr_solutions=10000).enumerate(0, n, radius, 0, pruning=pruning.coefficients)
        print(len(solutions))
        assert len(solutions)/pruning.expectation < 2
        assert len(solutions)/pruning.expectation > .2

        print(" \n GRADIENT \n")

        print("radius %.4e" % radius)
        tt = process_time()
        pruning = Pruning.run(radius, overhead, r, 200, flags=Pruning.GRADIENT, metric="solutions")
        print("Time %.4e"%(process_time() - tt))
        print(pruning)
        print("cost %.4e" % sum(pruning.detailed_cost))
        solutions = Enumeration(M, nr_solutions=10000).enumerate(0, n, radius, 0, pruning=pruning.coefficients)
        print(len(solutions))
        assert len(solutions)/pruning.expectation < 2
        assert len(solutions)/pruning.expectation > .2

        print(" \n HYBRID \n")

        print("radius %.4e" % radius)
        tt = process_time()
        pruning = Pruning.run(radius, overhead, r, 200, flags=Pruning.ZEALOUS, metric="solutions")
        print("Time %.4e"%(process_time() - tt))
        print(pruning)
        print("cost %.4e" % sum(pruning.detailed_cost))
        solutions = Enumeration(M, nr_solutions=10000).enumerate(0, n, radius, 0, pruning=pruning.coefficients)
        print(len(solutions))
        assert len(solutions)/pruning.expectation < 2
        assert len(solutions)/pruning.expectation > .2
예제 #12
0
파일: yolobkz.py 프로젝트: gbonnoron/yolo
 def enum(self, k, b, radius, pruning, for_hints=False):
     solutions = []
     try:
         if self.recycle:
             enum_obj = Enumeration(self.M,
                                    b / 2,
                                    always_update_radius=True)
         else:
             enum_obj = Enumeration(self.M, 1, always_update_radius=True)
         if pruning is None:
             with self.tracer.context("enumeration",
                                      enum_obj=enum_obj,
                                      probability=1.):
                 enum_obj.enumerate(k, k + b, radius, 0, aux_sols=solutions)
         else:
             with self.tracer.context("enumeration",
                                      enum_obj=enum_obj,
                                      probability=pruning.probability):
                 enum_obj.enumerate(k,
                                    k + b,
                                    radius,
                                    0,
                                    pruning=pruning.coefficients,
                                    aux_sols=solutions)
         return solutions[0][0], [sol for (sol, _) in solutions[1:]]
     except EnumerationError:
         return None, []
예제 #13
0
def test_svp():
    for m, n in dimensions:
        A = make_integer_matrix(m, n)
        A = LLL.reduction(A)
        M = GSO.Mat(A)
        M.update_gso()
        E = Enumeration(M)
        _, v1 = E.enumerate(0, M.d, M.get_r(0, 0), 0)[0]
        v1 = A.multiply_left(v1)
        nv1 = sum([v_**2 for v_ in v1])

        v0 = SVP.shortest_vector(A)
        nv0 = sum([v_**2 for v_ in v0])

        assert nv0 == nv1
예제 #14
0
 def enumeration_cvp(self, lattice, gso, pruning, radius, target):
     self.log("Start Enumeration(CVP).")
     try:
         E = Enumeration(gso)
         enum = E.enumerate(0,
                            lattice.nrows,
                            radius,
                            0,
                            gso.from_canonical(target),
                            pruning=pruning.coefficients)
         _, v1 = enum[0]
         return self.vector_from_coeffs(v1, lattice)
     except EnumerationError:
         self.log("No solution.")
         return None
예제 #15
0
def test_cvp():
    for m, n in dimensions:
        A = make_integer_matrix(m, n)
        A = LLL.reduction(A)
        M = GSO.Mat(A)
        M.update_gso()
        t = list(make_integer_matrix(n, n)[0])
        v0 = CVP.closest_vector(A, t)

        E = Enumeration(M)
        v1, _ = E.enumerate(0, A.nrows, 2, 40, M.from_canonical(t))
        v1 = IntegerMatrix.from_iterable(1, A.nrows, map(lambda x: int(round(x)), v1))
        v1 = tuple((v1*A)[0])

        assert v0 == v1
예제 #16
0
파일: pbkz.py 프로젝트: fplll/fpylll
    def parallel_svp_reduction_worker(self, kappa, block_size, params, rerandomize):
        """
        One SVP reduction, typically called in a worker process after forking.

        :param kappa: current index
        :param block_size: block size
        :param params: BKZ parameters
        :param tracer: object for maintaining statistics

        """
        # we create a new tracer object to report back our timings to the calling process
        tracer = BKZTreeTracer(self, verbosity=params.flags & BKZ.VERBOSE, root_label="svp")

        with tracer.context("preprocessing"):
            if rerandomize:
                with tracer.context("randomization"):
                    self.randomize_block(
                        kappa + 1, kappa + block_size, density=params.rerandomization_density, tracer=tracer
                    )
            with tracer.context("reduction"):
                self.svp_preprocessing(kappa, block_size, params, tracer)

        radius, expo = self.M.get_r_exp(kappa, kappa)
        radius *= self.lll_obj.delta

        if params.flags & BKZ.GH_BND and block_size > 30:
            root_det = self.M.get_root_det(kappa, kappa + block_size)
            radius, expo = gaussian_heuristic(radius, expo, block_size, root_det, params.gh_factor)

        pruning = self.get_pruning(kappa, block_size, params, tracer)

        try:
            enum_obj = Enumeration(self.M)
            with tracer.context(
                "enumeration", enum_obj=enum_obj, probability=pruning.probability, full=block_size == params.block_size
            ):
                solution, max_dist = enum_obj.enumerate(
                    kappa, kappa + block_size, radius, expo, pruning=pruning.coefficients
                )[0]
            with tracer.context("postprocessing"):
                # we translate our solution to the canonical basis because our basis is not
                # necessarily the basis of the calling process at this point
                solution = self.A.multiply_left(solution, start=kappa)

        except EnumerationError:
            solution = None

        return solution, tracer.trace, pruning.probability
예제 #17
0
def test_enum_init():
    for int_type in int_types:
        A = make_integer_matrix(20, 20, int_type=int_type)
        for float_type in float_types:
            M = GSO.Mat(copy(A), float_type=float_type)
            enum_obj = Enumeration(M)
            del enum_obj
예제 #18
0
def test_cvp():
    for m, n in dimensions:
        A = make_integer_matrix(m, n)
        A = LLL.reduction(A)
        M = GSO.Mat(A)
        M.update_gso()
        t = list(make_integer_matrix(n, n)[0])
        v0 = CVP.closest_vector(A, t)

        E = Enumeration(M)
        v1, _ = E.enumerate(0, A.nrows, 2, 40, M.from_canonical(t))[0]
        v1 = IntegerMatrix.from_iterable(1, A.nrows,
                                         map(lambda x: int(round(x)), v1))
        v1 = tuple((v1 * A)[0])

        assert v0 == v1
예제 #19
0
파일: bkz.py 프로젝트: MatthiasMi/fpylll
    def svp_call(self, kappa, block_size, params, tracer=None):
        """Call SVP oracle

        :param kappa: current index
        :param params: BKZ parameters
        :param block_size: block size
        :param tracer: object for maintaining statistics

        :returns: Coordinates of SVP solution or ``None`` if none was found.

        ..  note::

            ``block_size`` may be smaller than ``params.block_size`` for the last blocks.
        """
        max_dist, expo = self.M.get_r_exp(kappa, kappa)
        delta_max_dist = self.lll_obj.delta * max_dist

        if params.flags & BKZ.GH_BND:
            root_det = self.M.get_root_det(kappa, kappa + block_size)
            max_dist, expo = adjust_radius_to_gh_bound(max_dist, expo,
                                                       block_size, root_det,
                                                       params.gh_factor)

        try:
            enum_obj = Enumeration(self.M)
            with tracer.context("enumeration",
                                enum_obj=enum_obj,
                                probability=1.0):
                solution, max_dist = enum_obj.enumerate(
                    kappa, kappa + block_size, max_dist, expo)[0]

        except EnumerationError as msg:
            if params.flags & BKZ.GH_BND:
                return None
            else:
                raise EnumerationError(msg)

        if max_dist >= delta_max_dist * (1 << expo):
            return None
        else:
            return solution
예제 #20
0
    def svp_reduction(self, kappa, block_size):
        """Call the SVP oracle and insert found vector into basis.

        :param kappa: row index
        :param block_size: an integer > 2

        """
        clean = True

        self.lll_obj(0, kappa, kappa + block_size)
        if self.lll_obj.nswaps > 0:
            clean = False

        max_dist, expo = self.m.get_r_exp(kappa, kappa)
        delta_max_dist = self.lll_obj.delta * max_dist

        solution, max_dist = Enumeration(self.m).enumerate(kappa,
                                                           kappa + block_size,
                                                           max_dist,
                                                           expo,
                                                           pruning=None)[0]

        if max_dist >= delta_max_dist * (1 << expo):
            return clean

        nonzero_vectors = len([x for x in solution if x])

        if nonzero_vectors == 1:
            first_nonzero_vector = None
            for i in range(block_size):
                if abs(solution[i]) == 1:
                    first_nonzero_vector = i
                    break

            self.m.move_row(kappa + first_nonzero_vector, kappa)
            self.lll_obj.size_reduction(kappa,
                                        kappa + first_nonzero_vector + 1)

        else:
            d = self.m.d
            self.m.create_row()

            with self.m.row_ops(d, d + 1):
                for i in range(block_size):
                    self.m.row_addmul(d, kappa + i, solution[i])

            self.m.move_row(d, kappa)
            self.lll_obj(kappa, kappa, kappa + block_size + 1)
            self.m.move_row(kappa + block_size, d)

            self.m.remove_last_row()

        return False
예제 #21
0
def test_callback_enum(d=40):

    FPLLL.set_random_seed(0x1337)
    A = LLL.reduction(IntegerMatrix.random(100, "qary", k=50, q=7681))
    M = GSO.Mat(A)
    M.update_gso()

    # we are not imposing a constraint
    enum_obj = Enumeration(M)
    solutions = enum_obj.enumerate(0, d, 0.99*M.get_r(0, 0), 0)
    max_dist, sol = solutions[0]
    assert(A.multiply_left(sol)[0] != 2)

    # now we do
    def callback(new_sol_coord):
        if A.multiply_left(new_sol_coord)[0] == 2:
            return True
        else:
            return False

    enum_obj = Enumeration(M, callbackf=callback)
    solutions = enum_obj.enumerate(0, d, 0.99*M.get_r(0, 0), 0)
    max_dist, sol = solutions[0]

    assert(A.multiply_left(sol)[0] == 2)
예제 #22
0
def test_enum_gram_coherence():
    """
        Test if the enumeration algorithm is consistent with the Gram matrices
        The vectors returned by the enumeration should be the same wether a
        lattice is given by its basis or by its Gram matrix
    """

    dimensions = ((3, 3), (10, 10), (20, 20), (25, 25))

    for m, n in dimensions:
        for int_type in int_types:
            A = make_integer_matrix(m, n, int_type=int_type)
            LLL.reduction(A)
            G = tools.compute_gram(A)
            for float_type in float_types:
                M_A = GSO.Mat(copy(A), float_type=float_type, gram=False)
                M_G = GSO.Mat(copy(G), float_type=float_type, gram=True)

                M_A.update_gso()
                M_G.update_gso()

                enum_obj_a = Enumeration(M_A, nr_solutions=min(m, 5))
                shortest_vectors_a = enum_obj_a.enumerate(
                    0, M_A.d, M_A.get_r(0, 0), 0)

                enum_obj_g = Enumeration(M_G, nr_solutions=min(m, 5))
                shortest_vectors_g = enum_obj_g.enumerate(
                    0, M_G.d, M_G.get_r(0, 0), 0)

                for i in range(len(shortest_vectors_a)):
                    assert shortest_vectors_a[i] == shortest_vectors_g[i]
예제 #23
0
파일: bkz.py 프로젝트: gbonnoron/fpylll
    def svp_call(self, kappa, block_size, params, tracer=None):
        """Call SVP oracle

        :param kappa: current index
        :param params: BKZ parameters
        :param block_size: block size
        :param tracer: object for maintaining statistics

        :returns: Coordinates of SVP solution or ``None`` if none was found.

        ..  note::

            ``block_size`` may be smaller than ``params.block_size`` for the last blocks.
        """
        max_dist, expo = self.M.get_r_exp(kappa, kappa)
        delta_max_dist = self.lll_obj.delta * max_dist

        if params.flags & BKZ.GH_BND:
            root_det = self.M.get_root_det(kappa, kappa+block_size)
            max_dist, expo = adjust_radius_to_gh_bound(max_dist, expo, block_size, root_det, params.gh_factor)

        try:
            enum_obj = Enumeration(self.M)
            with tracer.context("enumeration", enum_obj=enum_obj, probability=1.0):
                solution, max_dist = enum_obj.enumerate(kappa, kappa + block_size, max_dist, expo)[0]

        except EnumerationError as msg:
            if params.flags & BKZ.GH_BND:
                return None
            else:
                raise EnumerationError(msg)

        if max_dist >= delta_max_dist * (1<<expo):
            return None
        else:
            return solution
예제 #24
0
    def svp_reduction(self, kappa, block_size):
        """Call the SVP oracle and insert found vector into basis.

        :param kappa: row index
        :param block_size: an integer > 2

        """
        clean = True

        self.lll_obj(0, kappa, kappa + block_size)
        if self.lll_obj.nswaps > 0:
            clean = False

        max_dist, expo = self.m.get_r_exp(kappa, kappa)
        delta_max_dist = self.lll_obj.delta * max_dist

        solution, max_dist = Enum.enumerate(self.m, max_dist, expo, kappa, kappa + block_size, None)

        if max_dist >= delta_max_dist:
            return clean

        nonzero_vectors = len([x for x in solution if x])

        if nonzero_vectors == 1:
            first_nonzero_vector = None
            for i in range(block_size):
                if abs(solution[i]) == 1:
                    first_nonzero_vector = i
                    break

            self.m.move_row(kappa + first_nonzero_vector, kappa)
            self.lll_obj.size_reduction(kappa, kappa + 1)

        else:
            d = self.m.d
            self.m.create_row()

            with self.m.row_ops(d, d+1):
                for i in range(block_size):
                    self.m.row_addmul(d, kappa + i, solution[i])

            self.m.move_row(d, kappa)
            self.lll_obj(kappa, kappa, kappa + block_size + 1)
            self.m.move_row(kappa + block_size, d)

            self.m.remove_last_row()

        return False
예제 #25
0
    def dsvp_reduction(self, kappa, block_size):
        """FIXME! briefly describe function

        :param kappa:
        :param block_size:
        :returns:
        :rtype:

        """
        clean = True

        self.lll_obj(0, kappa, kappa + block_size)
        if self.lll_obj.nswaps > 0:
            clean = False

        max_dist, expo = self.m.get_r_exp(kappa + block_size - 1,
                                          kappa + block_size - 1)
        max_dist = 1.0 / max_dist
        expo *= -1.0
        delta_max_dist = self.lll_obj.delta * max_dist

        solution, max_dist = Enumeration(self.m).enumerate(kappa,
                                                           kappa + block_size,
                                                           max_dist,
                                                           expo,
                                                           pruning=None,
                                                           dual=True)[0]
        if max_dist >= delta_max_dist:
            return clean

        with self.m.row_ops(kappa, kappa + block_size):
            pairs = list(enumerate(solution, start=kappa))
            [self.m.negate_row(pair[0]) for pair in pairs if pair[1] < 0]
            pairs = map(lambda x: (x[0], abs(x[1])), pairs)
            # GCD should be tree based but for proof of concept implementation, this will do
            row, x = reduce(self.euclid, pairs)
            if x != 1:
                raise RuntimeError("Euclid failed!")
            self.m.move_row(row, kappa + block_size - 1)
        self.lll_obj(kappa, kappa, kappa + block_size)

        return False
예제 #26
0
    def dsvp_reduction(self, kappa, block_size):
        """FIXME! briefly describe function

        :param kappa:
        :param block_size:
        :returns:
        :rtype:

        """
        clean = True

        self.lll_obj(0, kappa, kappa + block_size)
        if self.lll_obj.nswaps > 0:
            clean = False

        max_dist, expo = self.m.get_r_exp(kappa + block_size - 1, kappa + block_size - 1)
        max_dist = 1.0/max_dist
        expo *= -1.0
        delta_max_dist = self.lll_obj.delta * max_dist

        solution, max_dist = Enum.enumerate(self.m, max_dist, expo, kappa, kappa + block_size, None, dual=True)
        if max_dist >= delta_max_dist:
            return clean

        with self.m.row_ops(kappa, kappa+block_size):
            pairs = list(enumerate(solution, start=kappa))
            [self.m.negate_row(pair[0]) for pair in pairs if pair[1] < 0]
            pairs = map(lambda x: (x[0], abs(x[1])), pairs)
            # GCD should be tree based but for proof of concept implementation, this will do
            row, x = reduce(self.euclid, pairs)
            if x != 1:
                raise RuntimeError("Euclid failed!")
            self.m.move_row(row, kappa + block_size - 1)
        self.lll_obj(kappa, kappa, kappa + block_size)

        return False
예제 #27
0
 def enum(self, k, b, radius, pruning, for_hints=False):
     solutions = []
     try:
         if self.recycle:
             enum_obj = Enumeration(self.M, b / 2)
         else:
             enum_obj = Enumeration(self.M, 1)
         if pruning is None:
             with self.tracer.context("enumeration",
                                      enum_obj=enum_obj,
                                      probability=1.):
                 solutions = enum_obj.enumerate(k, k + b, radius, 0)
         else:
             with self.tracer.context("enumeration",
                                      enum_obj=enum_obj,
                                      probability=pruning.expectation):
                 solutions = enum_obj.enumerate(
                     k, k + b, radius, 0, pruning=pruning.coefficients)
         return [sol for (sol, _) in solutions[0:]]
     except EnumerationError:
         return None, []
예제 #28
0
    def svp_reduction(self, kappa, block_size, params, tracer=dummy_tracer):
        """

        :param kappa:
        :param block_size:
        :param params:
        :param tracer:

        """

        self.lll_obj.size_reduction(0, kappa + 1)
        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)

        remaining_probability, rerandomize = 1.0, False

        while remaining_probability > 1. - params.min_success_probability:
            with tracer.context("preprocessing"):
                if rerandomize:
                    with tracer.context("randomization"):
                        self.randomize_block(
                            kappa + 1,
                            kappa + block_size,
                            density=params.rerandomization_density,
                            tracer=tracer)
                with tracer.context("reduction"):
                    self.svp_preprocessing(kappa,
                                           block_size,
                                           params,
                                           tracer=tracer)

            with tracer.context("pruner"):
                radius, pruning = self.get_pruning(kappa, block_size, params,
                                                   tracer)

            try:
                enum_obj = Enumeration(self.M)
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    probability=pruning.expectation,
                                    full=block_size == params.block_size):
                    max_dist, solution = enum_obj.enumerate(
                        kappa,
                        kappa + block_size,
                        radius,
                        0,
                        pruning=pruning.coefficients)[0]
                with tracer.context("postprocessing"):
                    self.svp_postprocessing(kappa,
                                            block_size,
                                            solution,
                                            tracer=tracer)
                rerandomize = False

            except EnumerationError:
                rerandomize = True

            remaining_probability *= (1 - pruning.expectation)

        self.lll_obj.size_reduction(0, kappa + 1)
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)

        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)
        return clean
예제 #29
0
파일: yolosvp.py 프로젝트: malb/yolo
def yolo_hsvp(n, A, gh_factor, core=0):
    timer = Timer()
    ybkz = YoloBKZ(A, tuners=tuners)

    start_from = None
    start_from_rec = None

    first_len = ybkz.M.get_r(0, 0)
    root_det = ybkz.M.get_root_det(0, n)

    gh_radius, ge = gaussian_heuristic(first_len, 0, n, root_det, 1.)
    gh_radius = abs(gh_radius * 2**ge)
    radius = gh_factor * gh_radius

    target_prob = (1. / gh_factor)**(n / 2)

    trial = 0
    count = 0
    restarted = 0
    ybkz.randomize(0, n, density=1)

    while True:
        timer.reset()
        max_efficiency = 0.
        for b in range(8, n / 2, 4):
            ybkz.tour(b, target_prob=.50)

        restarted += 1
        for b in range(n / 2, n - 10, 2):
            count += 1
            ybkz.tour(b, target_prob=.10)
            overhead = NODE_PER_SEC * timer.elapsed()
            R = tuple([ybkz.M.get_r(i, i) for i in range(0, n)])

            title = "c=%d r=%d b=%d t=%.1fs" % (core, restarted, b,
                                                timer.elapsed())
            print title

            pruning = prune(radius,
                            overhead,
                            target_prob, [R],
                            descent_method="hybrid",
                            precision=53,
                            start_from=start_from)
            start_from = pruning.coefficients
            print "c=%d  pruning approximated  t=%.1fs" % (core,
                                                           timer.elapsed())

            pruning = prune(radius,
                            overhead,
                            target_prob, [R],
                            descent_method="gradient",
                            precision=YOLO_PRUNER_PREC,
                            start_from=start_from)
            title = "c=%d r=%d b=%d t=%.1fs p=%1.2e e=%.1fs" % (
                core, restarted, b, timer.elapsed(),
                pruning.probability / target_prob,
                (target_prob * timer.elapsed()) / pruning.probability)
            print title

            plot_and_save([log(x / gh_radius) / log(2.) for x in R], title,
                          '%d/c%ds%d.png' % (n, core, count))

            start_from = pruning.coefficients
            try:
                enum_obj = Enumeration(ybkz.M)
                solution, _ = enum_obj.enumerate(0,
                                                 n,
                                                 radius,
                                                 0,
                                                 pruning=pruning.coefficients)
                ybkz.insert(0, n, solution)
                print
                print list(A[0])
                return
            except EnumerationError:
                print "c=%d Enum failed  t=%.1fs" % (core, timer.elapsed())
                pass

            efficiency = (pruning.probability / timer.elapsed())

            #  RECYCLING
            r_start = count % 10
            recycling_radius = ybkz.M.get_r(r_start, r_start) * .99
            pruning = prune(recycling_radius,
                            overhead,
                            target_prob, [R[r_start:]],
                            descent_method="hybrid",
                            precision=53)
            title = "REC c=%d r=%d b=%d t=%.1fs p=%1.2e e=%.1fs" % (
                core, restarted, b, timer.elapsed(),
                pruning.probability / target_prob,
                (target_prob * timer.elapsed()) / pruning.probability)
            print title

            try:
                hints = []
                enum_obj = Enumeration(ybkz.M, n / 2)
                solution, _ = enum_obj.enumerate(r_start,
                                                 n,
                                                 recycling_radius,
                                                 r_start,
                                                 pruning=pruning.coefficients,
                                                 aux_sols=hints)
                hints = [sol for (sol, _) in hints[1:]]
                ybkz.insert(r_start, n, solution, hints=hints)
                print "c=%d Recycled %d t=%.1fs" % (core, len(hints) + 1,
                                                    timer.elapsed())
                break
            except EnumerationError:
                pass
            start_from_rec = pruning.coefficients
            #  END OF RECYCLING

            if 2 * efficiency < max_efficiency:
                ybkz.randomize(0, n, density=1)
                ybkz.lll_obj(0, 0, n)
                break
            max_efficiency = max(efficiency, max_efficiency)
            timer.reset()
예제 #30
0
    def svp_reduction_single(self,
                             kappa,
                             block_size,
                             params,
                             tracer=dummy_tracer):
        """
        :param kappa:
        :param block_size:
        :param params:
        :param tracer:
        """
        print self.lll_obj.delta
        verbose = 0

        if (params.flags & BKZ.DUMP_GSO):
            verbose = 1

        if (verbose):
            start_time = time()
        self.M.update_gso()
        r = [self.M.get_r(i, i) for i in range(kappa, kappa + block_size)]
        gh_length = gaussian_heuristic(r)
        kappa_length = self.M.get_r(kappa, kappa)
        goal = min(kappa_length, gh_length)

        self.lll_obj.size_reduction(0, kappa + 1)
        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)
        remaining_probability = 1.0
        rerandomize = False
        trials = 0
        sub_solutions = block_size > SUBSOL_BLOCKSIZE

        # copy old lattice
        if (params.flags & BKZ.DUMP_GSO):
            A_backup = self.copy_to_IntegerMatrix_long(self.A)
            v_old = A_backup[kappa]
            r_old = [
                log(self.M.get_r(i, i))
                for i in range(kappa, kappa + block_size)
            ]

        # main loop
        while remaining_probability > 1. - params.min_success_probability:

            # 1. preprocessing
            preproc_start = time()

            with tracer.context("preprocessing"):
                #self.M.update_gso()
                self.svp_preprocessing(kappa,
                                       block_size,
                                       params,
                                       trials,
                                       tracer=tracer)
            preproc_cost = time() - preproc_start

            with tracer.context("pruner"):
                target = 1 - ((1. - params.min_success_probability) /
                              remaining_probability)

                radius, pruning = self.get_pruning(kappa, block_size, params,
                                                   target * 1.01, preproc_cost,
                                                   tracer)

            # 2. enum
            enum_obj = Enumeration(self.M, sub_solutions=sub_solutions)
            try:
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    probability=pruning.expectation,
                                    full=block_size == params.block_size):
                    max_dist, solution = enum_obj.enumerate(
                        kappa,
                        kappa + block_size,
                        radius,
                        0,
                        pruning=pruning.coefficients)[0]

                # 3. post processing
                with tracer.context("postprocessing"):
                    preproc_start = time(
                    )  # Include post_processing time as the part of the next pre_processing

                    if not sub_solutions:
                        self.svp_postprocessing(kappa,
                                                block_size,
                                                solution,
                                                tracer=tracer)
                    if sub_solutions:
                        self.insert_sub_solutions(
                            kappa, block_size,
                            enum_obj.sub_solutions[:1 + block_size / 4])
                    self.M.update_gso()

            except EnumerationError:
                preproc_start = time()

            remaining_probability *= (1 - pruning.expectation)

            trials += 1

        # recover basis
        if (params.flags & BKZ.DUMP_GSO):
            r_new = [
                self.M.get_r(i, i) for i in range(kappa, kappa + block_size)
            ]
            current = self.copy_to_vector_long(self.A[kappa])
            # update
            self.copy_from_IntegerMatrix_long(A_backup)
            self.M = GSO.Mat(self.A, float_type=TYPE)
            self.M.update_gso()
            self.lll_obj = LLL.Reduction(self.M, flags=LLL.DEFAULT)
            self.insert_in_IntegerMatrix(self.A, current, kappa, block_size)
            # update again for safe
            self.M = GSO.Mat(self.A, float_type=TYPE)
            self.M.update_gso()
            self.M = GSO.Mat(self.A, float_type=TYPE)
            self.M = GSO.Mat(self.A, float_type=TYPE)
            self.M.update_gso()
            self.lll_obj = LLL.Reduction(self.M, flags=LLL.DEFAULT)
            if (not self.check_compare(A_backup, self.A, kappa, block_size)):
                print "# error exit"
                sys.exit(1)

        self.M.update_gso()
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)
        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)

        if (params.flags & BKZ.DUMP_GSO):
            global stat_update_gh
            r_new = self.M.get_r(kappa, kappa)
            r_newlog = [
                log(self.M.get_r(i, i))
                for i in range(kappa, kappa + block_size)
            ]
            stat_update_gh[stat_tours - 1].append(
                float(sqrt(r_new / gh_length)))

        if (verbose):
            if (rank == 0):
                kappa_length = r_new
                print "# [rank %d] kappa %d, bs %d, r %d (gh %d), time %s, trials %s " % \
                  (rank, kappa, block_size, kappa_length, goal, time()-start_time, trials)

                det_n = float(sum(r_old) / block_size)
                normalized_old = [(r_old[i] - det_n)
                                  for i in range(0, block_size)]
                normalized_new = [(r_newlog[i] - det_n)
                                  for i in range(0, block_size)]
                global stat_old_norm
                global stat_new_norm

                if (block_size == params.block_size):
                    for i in range(block_size):
                        stat_old_norm[i] = stat_old_norm[i] + normalized_old[i]
                        stat_new_norm[i] = stat_new_norm[i] + normalized_new[i]

        return clean
예제 #31
0
    def svp_reduction_single_nosub(self,
                                   kappa,
                                   block_size,
                                   params,
                                   tracer=dummy_tracer):
        """
        :param kappa:
        :param block_size:
        :param params:
        :param tracer:
        """

        if (block_size == 80):
            start_time = clock()

        #if block_size < 30:
        #    return BKZBase.svp_reduction(self, kappa, block_size, params, tracer=tracer)
        self.lll_obj.size_reduction(0, kappa + 1)
        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)

        remaining_probability, rerandomize, trials = 1.0, False, 0

        while remaining_probability > 1. - params.min_success_probability:
            preproc_start = clock()
            with tracer.context("preprocessing"):
                if False:  # ((trials%5)==4):
                    self.randomize_block(kappa + 1,
                                         kappa + block_size,
                                         density=1,
                                         tracer=tracer)
                self.svp_preprocessing(kappa,
                                       block_size,
                                       params,
                                       tracer=tracer,
                                       trials=trials)
            preproc_cost = clock() - preproc_start

            with tracer.context("pruner"):
                target = 1 - ((1. - params.min_success_probability) /
                              remaining_probability)
                target = min(target, .5)
                # target = params.min_success_probability
                radius, pruning = self.get_pruning(kappa, block_size, params,
                                                   target * 1.01, preproc_cost,
                                                   tracer)

            if (block_size == 90):
                print " single target is ", target, ", expectation ", pruning.expectation

            try:
                enum_obj = Enumeration(self.M)
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    probability=pruning.expectation,
                                    full=block_size == params.block_size):
                    max_dist, solution = enum_obj.enumerate(
                        kappa,
                        kappa + block_size,
                        radius,
                        0,
                        pruning=pruning.coefficients)[0]
                with tracer.context("postprocessing"):
                    rerandomize = True
                    self.svp_postprocessing(kappa,
                                            block_size,
                                            solution,
                                            tracer=tracer)

            except EnumerationError:
                rerandomize = False

            remaining_probability *= (1 - pruning.expectation)
            trials += 1

        self.lll_obj.size_reduction(0, kappa + 1)
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)

        if (block_size == 90):
            print " time bs 90 is ", clock(
            ) - start_time, ", trials = ", trials, kappa

        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)
        return clean
예제 #32
0
    def svp_reduction_single(self,
                             kappa,
                             block_size,
                             params,
                             tracer=dummy_tracer):
        """

        :param kappa:
        :param block_size:
        :param params:
        :param tracer:

        """

        verbose = 0
        if (verbose):
            start_time = time()
            self.M.update_gso()
            r = [self.M.get_r(i, i) for i in range(kappa, kappa + block_size)]
            #gh_length = gaussian_heuristic(r) * params.gh_factor
            gh_length = gaussian_heuristic(r) * 1.1
            kappa_length = self.M.get_r(kappa, kappa)
            goal = min(kappa_length, gh_length)

        self.lll_obj.size_reduction(0, kappa + 1)
        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)
        remaining_probability = 1.0
        rerandomize = False
        trials = 0
        sub_solutions = block_size > SUBSOL_BLOCKSIZE
        while remaining_probability > 1. - params.min_success_probability:

            # 1. preprocessing
            preproc_start = time()
            with tracer.context("preprocessing"):
                self.M.update_gso()
                self.svp_preprocessing(kappa,
                                       block_size,
                                       params,
                                       trials,
                                       tracer=tracer)
            preproc_cost = time() - preproc_start

            with tracer.context("pruner"):
                target = 1 - ((1. - params.min_success_probability) /
                              remaining_probability)

                radius, pruning = self.get_pruning(kappa, block_size, params,
                                                   target * 1.01, preproc_cost,
                                                   tracer)

            # 2. enum

            enum_obj = Enumeration(self.M, sub_solutions=sub_solutions)
            try:
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    probability=pruning.expectation,
                                    full=block_size == params.block_size):
                    max_dist, solution = enum_obj.enumerate(
                        kappa,
                        kappa + block_size,
                        radius,
                        0,
                        pruning=pruning.coefficients)[0]

                # 3. post processing
                with tracer.context("postprocessing"):
                    preproc_start = time(
                    )  # Include post_processing time as the part of the next pre_processing

                    if not sub_solutions:
                        self.svp_postprocessing(kappa,
                                                block_size,
                                                solution,
                                                tracer=tracer)
                    if sub_solutions:
                        self.insert_sub_solutions(
                            kappa, block_size,
                            enum_obj.sub_solutions[:1 + block_size / 4])
                    self.M.update_gso()

            except EnumerationError:
                preproc_start = time()

            remaining_probability *= (1 - pruning.expectation)
            trials += 1

        self.lll_obj.size_reduction(0, kappa + 1)
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)
        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)
        if (verbose):
            if (rank == 0):
                kappa_length = self.M.get_r(kappa, kappa)
                print "# [rank %d] kappa %d, bs %d, r %d (gh %d), time %s, trials %s " % \
                  (rank, kappa, block_size, kappa_length, goal, time()-start_time, trials)
                print "gh_factor: ", params.gh_factor

        return clean
예제 #33
0
    def svp_reduction(self,
                      kappa,
                      block_size,
                      param,
                      tracer=dummy_tracer,
                      top_level=False):
        if top_level:
            # do a full LLL up to kappa + block_size
            with tracer.context("lll"):
                self.lll_obj(0, kappa, kappa + block_size, 0)

        remaining_probability, rerandomize = 1.0, False

        while remaining_probability > 1. - param.min_success_probability:
            with tracer.context("preprocessing"):
                if rerandomize:
                    with tracer.context("randomization"):
                        # make a copy of the local block to restore in case rerandomisation decreases quality
                        self.copy_block(kappa, block_size)
                        self.randomize_block(
                            kappa + 1,
                            kappa + block_size,
                            density=param.rerandomization_density,
                            tracer=tracer)
                with tracer.context("reduction"):
                    self.svp_preprocessing(kappa,
                                           block_size,
                                           param,
                                           tracer=tracer)

            radius, expo = self.M.get_r_exp(kappa, kappa)
            radius *= self.lll_obj.delta

            if param.flags & BKZ.GH_BND and block_size > 30:
                root_det = self.M.get_root_det(kappa, kappa + block_size)
                radius, expo = adjust_radius_to_gh_bound(
                    radius, expo, block_size, root_det, param.gh_factor)

            pruning = self.get_pruning(kappa, block_size, param, tracer)

            try:
                enum_obj = Enumeration(self.M)
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    probability=pruning.expectation,
                                    full=block_size == param.block_size):
                    solution, max_dist = enum_obj.enumerate(
                        kappa,
                        kappa + block_size,
                        radius,
                        expo,
                        pruning=pruning.coefficients)[0]
                with tracer.context("postprocessing"):
                    self.svp_postprocessing(kappa,
                                            block_size,
                                            solution,
                                            tracer=tracer,
                                            top_level=top_level)
                    if rerandomize:
                        self.delete_copy_block(kappa,
                                               block_size,
                                               restore=False)
                rerandomize = False

            except EnumerationError:
                with tracer.context("postprocessing"):
                    if rerandomize:
                        # restore block, TODO don't do this unconditionally
                        self.delete_copy_block(kappa, block_size, restore=True)
                rerandomize = True

            remaining_probability *= (1 - pruning.expectation)
예제 #34
0
def test_pruner():

    # A dummy prune to load tabulated values
    prune(5, 50, .5, 10*[1.])

    for (n, overhead) in dim_oh:

        print(" \n ~~~~ Dim %d \n" % n)

        M = prepare(n)
        r = [M.get_r(i, i) for i in range(n)]

        print(" \n GREEDY")
        radius = gaussian_heuristic(r) * 1.6
        print("pre-greedy radius %.4e" % radius)
        tt = clock()
        (radius, pruning) = prune(radius, overhead, 200, r,
                                  descent_method="greedy", metric="solutions")
        print("Time %.4e"%(clock() - tt))
        print("post-greedy radius %.4e" % radius)
        print(pruning)
        print("cost %.4e" % sum(pruning.detailed_cost))
        solutions = Enumeration(M, nr_solutions=10000).enumerate(0, n, radius, 0, pruning=pruning.coefficients)
        print(len(solutions))
        assert len(solutions)/pruning.expectation < 2
        assert len(solutions)/pruning.expectation > .2

        print(" \n GREEDY \n")
        print("pre-greedy radius %.4e" % radius)
        tt = clock()
        (radius, pruning) = prune(radius, overhead, 200, r, descent_method="greedy", metric="solutions")
        print("Time %.4e"%(clock() - tt))
        print("post-greedy radius %.4e" % radius)
        print(pruning)
        print("cost %.4e" % sum(pruning.detailed_cost))
        solutions = Enumeration(M, nr_solutions=10000).enumerate(0, n, radius, 0, pruning=pruning.coefficients)
        print(len(solutions))
        assert len(solutions)/pruning.expectation < 2
        assert len(solutions)/pruning.expectation > .2

        print(" \n GRADIENT \n")

        print("radius %.4e" % radius)
        tt = clock()
        pruning = prune(radius, overhead, 200, r, descent_method="gradient", metric="solutions")
        print("Time %.4e"%(clock() - tt))
        print(pruning)
        print("cost %.4e" % sum(pruning.detailed_cost))
        solutions = Enumeration(M, nr_solutions=10000).enumerate(0, n, radius, 0, pruning=pruning.coefficients)
        print(len(solutions))
        assert len(solutions)/pruning.expectation < 2
        assert len(solutions)/pruning.expectation > .2

        print(" \n HYBRID \n")

        print("radius %.4e" % radius)
        tt = clock()
        pruning = prune(radius, overhead, 200, r, descent_method="hybrid", metric="solutions")
        print("Time %.4e"%(clock() - tt))
        print(pruning)
        print("cost %.4e" % sum(pruning.detailed_cost))
        solutions = Enumeration(M, nr_solutions=10000).enumerate(0, n, radius, 0, pruning=pruning.coefficients)
        print(len(solutions))
        assert len(solutions)/pruning.expectation < 2
        assert len(solutions)/pruning.expectation > .2
예제 #35
0
    def svp_reduction(self, kappa, block_size, params, tracer=dummy_tracer):
        """

        :param kappa:
        :param block_size:
        :param params:
        :param tracer:

        """

        self.lll_obj.size_reduction(0, kappa + 1)
        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)

        remaining_probability, rerandomize = 1.0, False

        # NOTE: In the tail we might have less than (1+c)β space
        end = min(ceil(kappa + (1 + self.c) * block_size), self.M.d)

        while remaining_probability > 1.0 - params.min_success_probability:
            with tracer.context("preprocessing"):
                if rerandomize:
                    with tracer.context("randomization"):
                        self.randomize_block(
                            kappa + 1,
                            kappa + block_size,
                            density=params.rerandomization_density,
                            tracer=tracer,
                        )
                with tracer.context("reduction"):
                    self.svp_preprocessing(kappa,
                                           end,
                                           block_size,
                                           params,
                                           tracer=tracer)
            with tracer.context("pruner"):
                radius, exp, pruning = self.get_pruning(
                    kappa, block_size, params, tracer)

            try:
                enum_obj = Enumeration(self.M)
                with tracer.context(
                        "enumeration",
                        enum_obj=enum_obj,
                        probability=pruning.expectation,
                        full=True,
                ):  # HACK: we wan to record all enum costs.
                    max_dist, solution = enum_obj.enumerate(
                        kappa,
                        kappa + block_size,
                        radius,
                        exp,
                        pruning=pruning.coefficients,
                    )[0]
                with tracer.context("postprocessing"):
                    self.svp_postprocessing(kappa,
                                            block_size,
                                            solution,
                                            tracer=tracer)
                rerandomize = False

            except EnumerationError:
                rerandomize = True

            remaining_probability *= 1 - pruning.expectation

        self.lll_obj.size_reduction(0, kappa + 1)
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)

        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)
        return clean
예제 #36
0
    def __call__(cls,
                 M,
                 predicate,
                 squared_target_norm,
                 invalidate_cache=lambda: None,
                 target_prob=None,
                 preproc_offset=20,
                 ph=0,
                 threads=1,
                 **kwds):
        preproc_time = None
        ntests = 0

        if target_prob is None:
            target_prob = cls.DEFAULT_TARGET_PROB

        bkz_res = usvp_pred_bkz_enum_solve(M,
                                           predicate,
                                           block_size=min(
                                               STRATEGIES_MAX_DIM, M.d),
                                           invalidate_cache=invalidate_cache,
                                           threads=threads)

        if bkz_res.success:  # this might be enough
            return bkz_res

        FPLLL.set_threads(threads)

        M.update_gso()
        bkz = BKZ2(M)
        tracer = BKZTreeTracer(bkz, root_label="enum_pred", start_clocks=True)

        remaining_probability, rerandomize, found, solution = (1.0, False,
                                                               False, None)

        while remaining_probability > 1.0 - target_prob:
            invalidate_cache()

            with tracer.context("preprocessing"):
                if rerandomize:
                    with tracer.context("randomization"):
                        bkz.randomize_block(0, M.d, tracer=tracer, density=3)
                with tracer.context("reduction"):
                    with tracer.context("lll"):
                        bkz.lll_obj()
                    for _ in range(4):
                        bkz.tour(
                            BKZ.EasyParam(min(max(M.d - preproc_offset, 2),
                                              STRATEGIES_MAX_DIM),
                                          flags=BKZ.GH_BND),
                            tracer=tracer,
                        )

            if preproc_time is None:
                preproc_time = float(
                    tracer.trace.child("preprocessing")["cputime"])

            with tracer.context("check"):
                for v in M.B:
                    ntests += 1
                    if predicate(v, standard_basis=True):
                        found = True
                        solution = tuple([int(v_) for v_ in v])
                        break

            if found:
                break

            with tracer.context("pruner"):
                preproc_cost = threads * preproc_time * 2 * 10**9 / 100  # 100 cycles per node
                with SuppressStream():
                    r = []
                    for i in range(M.d):
                        r_, exp = M.get_r_exp(i, i)
                        r.append(r_ * 2**(exp - ph))
                    (cost, prob), coeffs = cls.pruning_coefficients(
                        squared_target_norm / 2**ph,
                        r,
                        preproc_cost,
                        target_prob=target_prob)

            def callbackf(v):
                nonlocal ntests
                ntests += 1
                return predicate(v, standard_basis=False)

            enum_obj = Enumeration(M, callbackf=callbackf)
            with tracer.context("enumeration",
                                enum_obj=enum_obj,
                                probability=prob,
                                full=True):
                try:
                    solutions = enum_obj.enumerate(0,
                                                   M.d,
                                                   squared_target_norm / 2**ph,
                                                   ph,
                                                   pruning=coeffs)
                    _, v = solutions[0]
                    found = True
                    solution = tuple([int(v_) for v_ in M.B.multiply_left(v)])
                    break
                except EnumerationError:
                    pass

            rerandomize = True
            remaining_probability *= 1 - prob

        tracer.exit()
        FPLLL.set_threads(1)

        b0, b0e = bkz.M.get_r_exp(0, 0)

        return USVPPredSolverResults(
            success=found,
            solution=solution,
            ntests=ntests + bkz_res.ntests,
            b0=b0**(0.5) * 2**(b0e / 2.0),
            cputime=tracer.trace.data["cputime"] + bkz_res.cputime,
            walltime=tracer.trace.data["walltime"] + bkz_res.walltime,
            data=tracer.trace,
        )
예제 #37
0
파일: bkz2.py 프로젝트: zhli271828/fpylll
    def svp_reduction(self, kappa, block_size, param, tracer=dummy_tracer):
        """

        :param kappa:
        :param block_size:
        :param params:
        :param tracer:

        """

        self.lll_obj.size_reduction(0, kappa + 1)
        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)

        remaining_probability, rerandomize = 1.0, False

        while remaining_probability > 1. - param.min_success_probability:
            with tracer.context("preprocessing"):
                if rerandomize:
                    with tracer.context("randomization"):
                        self.randomize_block(
                            kappa + 1,
                            kappa + block_size,
                            density=param.rerandomization_density,
                            tracer=tracer)
                with tracer.context("reduction"):
                    self.svp_preprocessing(kappa,
                                           block_size,
                                           param,
                                           tracer=tracer)

            radius, expo = self.M.get_r_exp(kappa, kappa)
            radius *= self.lll_obj.delta

            if param.flags & BKZ.GH_BND and block_size > 30:
                root_det = self.M.get_root_det(kappa, kappa + block_size)
                radius, expo = adjust_radius_to_gh_bound(
                    radius, expo, block_size, root_det, param.gh_factor)

            pruning = self.get_pruning(kappa, block_size, param, tracer)

            try:
                enum_obj = Enumeration(self.M)
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    probability=pruning.expectation,
                                    full=block_size == param.block_size):
                    solution, max_dist = enum_obj.enumerate(
                        kappa,
                        kappa + block_size,
                        radius,
                        expo,
                        pruning=pruning.coefficients)[0]
                with tracer.context("postprocessing"):
                    self.svp_postprocessing(kappa,
                                            block_size,
                                            solution,
                                            tracer=tracer)
                rerandomize = False

            except EnumerationError:
                rerandomize = True

            remaining_probability *= (1 - pruning.expectation)

        self.lll_obj.size_reduction(0, kappa + 1)
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)

        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)
        return clean