Пример #1
0
Файл: bkz3.py Проект: malb/yolo
    def svp_call(self, kappa, block_size, radius, pruning, nr_hints=0, tracer=dummy_tracer):
        """Call SVP oracle

        :param kappa: current index
        :param params: BKZ parameters
        :param block_size: block size
        :param tracer: object for maintaining statistics

        :returns: Coordinates of SVP solution or ``None`` if none was found.

        ..  note::

            ``block_size`` may be smaller than ``params.block_size`` for the last blocks.
        """
        solutions = []
        try:
            enum_obj = Enumeration(self.M, nr_hints+1, EvaluatorStrategy.OPPORTUNISTIC_N_SOLUTIONS)
            if pruning is None:
                with tracer.context("enumeration", enum_obj=enum_obj, probability=1., full=block_size==self.params.bkz_param.block_size):
                    solutions = enum_obj.enumerate(kappa, kappa + block_size, radius, 0)
            else:
                with tracer.context("enumeration", enum_obj=enum_obj, probability=pruning.expectation, full=block_size==self.params.bkz_param.block_size):
                    solutions = enum_obj.enumerate(kappa, kappa + block_size, radius, 0, pruning=pruning.coefficients)
            return solutions
        except EnumerationError:
            return []
Пример #2
0
def enum_trial(bkz, preproc_cost, radius):
    n = bkz.A.nrows

    r = [bkz.M.get_r(i, i) for i in range(0, n)]       
    gh = gaussian_heuristic(r)

    PRUNE_START = time()
    NPS = 2**24
    pruning = prune(radius, NPS * preproc_cost, [r], 10, 
                    metric="solutions", float_type="dd",
                    flags=Pruning.GRADIENT|Pruning.NELDER_MEAD)
    PRUNE_TIME = time() - PRUNE_START    
    ENUM_START = time()
    enum_obj = Enumeration(bkz.M, sub_solutions=True)
    success = False
    try:        
        enum_obj.enumerate(0, n, radius, 0, pruning=pruning.coefficients)
        success = True
    except EnumerationError:
        pass
    print ("# [Prune] time %.4f"%PRUNE_TIME)

    ENUM_TIME = time() - ENUM_START
    print ("# [Enum]  (Expecting %.5f solutions)"%(pruning.expectation)),
    print (", TIME = %.2f"%ENUM_TIME)
    """
    print ("# subsolutions : r[i]/gh"),
    for (a, b) in enum_obj.sub_solutions:
        print ("%.3f"%abs(a/gh)),
    print 
    """
    insert_sub_solutions(bkz, enum_obj.sub_solutions)    
    return success
Пример #3
0
def enum_trial(bkz_obj, preproc_cost, gh_factor=1.1):
    n = bkz_obj.A.nrows

    r = [bkz_obj.M.get_r(i, i) for i in range(0, n)]
    gh = gaussian_heuristic(r)
    radius = max(r[0] * .99, gh * gh_factor)
    PRUNE_START = time()
    pruning = prune(radius,
                    NPS * preproc_cost, [r],
                    10,
                    metric="solutions",
                    float_type="dd",
                    flags=Pruning.GRADIENT)
    PRUNE_TIME = time() - PRUNE_START
    print "Pruning time %.4f" % PRUNE_TIME
    ENUM_START = time()
    enum_obj = Enumeration(bkz_obj.M, sub_solutions=True)

    try:
        print "Enum  ... (Expecting %.5f solutions)" % (pruning.expectation),
        enum_obj.enumerate(0, n, radius, 0, pruning=pruning.coefficients)
    except EnumerationError:
        pass

    ENUM_TIME = time() - ENUM_START
    print " \t\t\t\t\t\t TIME = %.2f" % ENUM_TIME

    zeros = 0
    print "subsolutions : r[i]/gh",
    for (a, b) in enum_obj.sub_solutions[:20]:
        print "%.3f" % abs(a / gh),

    insert_sub_solutions(bkz_obj, enum_obj.sub_solutions[:n / 4])
    return
Пример #4
0
def test_callback_enum(d=40):

    FPLLL.set_random_seed(0x1337)
    A = LLL.reduction(IntegerMatrix.random(100, "qary", k=50, q=7681))
    M = GSO.Mat(A)
    M.update_gso()

    # we are not imposing a constraint
    enum_obj = Enumeration(M)
    solutions = enum_obj.enumerate(0, d, 0.99*M.get_r(0, 0), 0)
    max_dist, sol = solutions[0]
    assert(A.multiply_left(sol)[0] != 2)

    # now we do
    def callback(new_sol_coord):
        if A.multiply_left(new_sol_coord)[0] == 2:
            return True
        else:
            return False

    enum_obj = Enumeration(M, callbackf=callback)
    solutions = enum_obj.enumerate(0, d, 0.99*M.get_r(0, 0), 0)
    max_dist, sol = solutions[0]

    assert(A.multiply_left(sol)[0] == 2)
Пример #5
0
 def enum(self, k, b, radius, pruning, for_hints=False):
     solutions = []
     try:
         if self.recycle:
             enum_obj = Enumeration(self.M,
                                    b / 2,
                                    always_update_radius=True)
         else:
             enum_obj = Enumeration(self.M, 1, always_update_radius=True)
         if pruning is None:
             with self.tracer.context("enumeration",
                                      enum_obj=enum_obj,
                                      probability=1.):
                 enum_obj.enumerate(k, k + b, radius, 0, aux_sols=solutions)
         else:
             with self.tracer.context("enumeration",
                                      enum_obj=enum_obj,
                                      probability=pruning.probability):
                 enum_obj.enumerate(k,
                                    k + b,
                                    radius,
                                    0,
                                    pruning=pruning.coefficients,
                                    aux_sols=solutions)
         return solutions[0][0], [sol for (sol, _) in solutions[1:]]
     except EnumerationError:
         return None, []
Пример #6
0
def test_enum_gram_coherence():
    """
        Test if the enumeration algorithm is consistent with the Gram matrices
        The vectors returned by the enumeration should be the same wether a
        lattice is given by its basis or by its Gram matrix
    """

    dimensions = ((3, 3), (10, 10), (20, 20), (25, 25))

    for m, n in dimensions:
        for int_type in int_types:
            A = make_integer_matrix(m, n, int_type=int_type)
            LLL.reduction(A)
            G = tools.compute_gram(A)
            for float_type in float_types:
                M_A = GSO.Mat(copy(A), float_type=float_type, gram=False)
                M_G = GSO.Mat(copy(G), float_type=float_type, gram=True)

                M_A.update_gso()
                M_G.update_gso()

                enum_obj_a = Enumeration(M_A, nr_solutions=min(m, 5))
                shortest_vectors_a = enum_obj_a.enumerate(
                    0, M_A.d, M_A.get_r(0, 0), 0)

                enum_obj_g = Enumeration(M_G, nr_solutions=min(m, 5))
                shortest_vectors_g = enum_obj_g.enumerate(
                    0, M_G.d, M_G.get_r(0, 0), 0)

                for i in range(len(shortest_vectors_a)):
                    assert shortest_vectors_a[i] == shortest_vectors_g[i]
Пример #7
0
def test_enum_enum():
    for int_type in int_types:
        A = make_integer_matrix(20, 20, int_type=int_type)
        LLL.reduction(A)
        for float_type in float_types:
            M = GSO.Mat(copy(A), float_type=float_type)
            M.update_gso()
            enum_obj = Enumeration(M)
            enum_obj.enumerate(0, M.d, M.get_r(0, 0), 0)
    def svp_reduction_mpi_trial_enum (self, bkz_sub, preproc_cost, radius, kappa, block_size):
        verbose = 0
        bkz_sub.M.update_gso()
        r = [bkz_sub.M.get_r(i, i) for i in range(kappa, kappa+block_size)]
        r_old = r[0]
        gh = gaussian_heuristic(r)
        PRUNE_START = time()
        try:
            pruning = prune(radius, NPS[block_size] * preproc_cost, [r], 10,
                                metric="solutions", float_type="mpfr",
                                flags=Pruning.GRADIENT|Pruning.NELDER_MEAD)
            """
            pruning = prune(radius, NPS[block_size] * preproc_cost, [r], 0.0001,
                                metric="probability", float_type="mpfr",
                                flags=Pruning.GRADIENT|Pruning.NELDER_MEAD)            
            """
        except:
            return False, -1, 0, 0, 0
        PRUNE_TIME = time() - PRUNE_START
        ENUM_START = time()
        enum_obj = Enumeration(bkz_sub.M, sub_solutions=True)
        success = False
        length = -1
        #print radius, pruning.coefficients
        estimate_cost = sum(pruning.detailed_cost) / NPS[block_size]
        try:        
            enum_obj.enumerate(kappa, kappa+block_size, radius, 0, pruning=pruning.coefficients)
            length = enum_obj.sub_solutions[0][0]
            success = True
        except EnumerationError:
            pass

        ENUM_TIME = time() - ENUM_START
        if (verbose):
            print ("# [Prune] time %.4f"%PRUNE_TIME)
            print ("# [Enum]  (Expecting %.5f solutions)"%(pruning.expectation)),
            print (", TIME = %.2f"%ENUM_TIME)
        """
        for (a, b) in enum_obj.sub_solutions[:20]:
            print "%.3f"%abs(a/gh),
        print 
        """
        bkz_sub.M.update_gso()
        #A_old = deepcopy(bkz_sub.A)
        bkz_sub.insert_sub_solutions(kappa, block_size, enum_obj.sub_solutions[:1+block_size/4])
        #print self.compare(A_old, bkz_sub.A)
        bkz_sub.M.update_gso()
        r_new = bkz_sub.M.get_r(kappa, kappa)
        if (r_new < r_old):
            success = True
            length = r_new
        
        return success, length, PRUNE_TIME, ENUM_TIME, estimate_cost
Пример #9
0
    def svp_call(self, kappa, block_size, params, stats=None):
        """Call SVP oracle

        :param kappa: current index
        :param params: BKZ parameters
        :param block_size: block size
        :param stats: object for maintaining statistics

        :returns: Coordinates of SVP solution or ``None`` if none was found.

        ..  note::

            ``block_size`` may be smaller than ``params.block_size`` for the last blocks.
        """
        max_dist, expo = self.M.get_r_exp(kappa, kappa)
        delta_max_dist = self.lll_obj.delta * max_dist

        if params.flags & BKZ.GH_BND:
            root_det = self.M.get_root_det(kappa, kappa+block_size)
            max_dist, expo = gaussian_heuristic(max_dist, expo, block_size, root_det, params.gh_factor)

        try:
            E = Enumeration(self.M)
            solution, max_dist = E.enumerate(kappa, kappa + block_size, max_dist, expo)
            stats.current_tour["enum nodes"] += E.get_nodes()
        except EnumerationError as msg:
            if params.flags & BKZ.GH_BND:
                return None
            else:
                raise EnumerationError(msg)

        if max_dist >= delta_max_dist:
            return None
        else:
            return solution
Пример #10
0
def svp_enum(bkz, params, goal):
    n = bkz.M.d
    r = [bkz.M.get_r(i, i) for i in range(0, n)]
    gh = gaussian_heuristic(r)

    rerandomize = False
    while bkz.M.get_r(0, 0) > goal:
        if rerandomize:
            bkz.randomize_block(0, n)
        bkz.svp_preprocessing(0, n, params)

        strategy = params.strategies[n]
        radius = goal
        pruning = strategy.get_pruning(goal, gh)

        try:
            enum_obj = Enumeration(bkz.M)
            max_dist, solution = enum_obj.enumerate(
                0, n, radius, 0, pruning=pruning.coefficients)[0]
            bkz.svp_postprocessing(0, n, solution, tracer=dummy_tracer)
            rerandomize = False
        except EnumerationError:
            rerandomize = True

        bkz.lll_obj()

    return
Пример #11
0
    def parallel_svp_reduction_worker(self, kappa, block_size, params,
                                      rerandomize):
        """
        One SVP reduction, typically called in a worker process after forking.

        :param kappa: current index
        :param block_size: block size
        :param params: BKZ parameters
        :param tracer: object for maintaining statistics

        """
        # we create a new tracer object to report back our timings to the calling process
        tracer = BKZTreeTracer(self,
                               verbosity=params.flags & BKZ.VERBOSE,
                               root_label="svp")

        with tracer.context("preprocessing"):
            if rerandomize:
                with tracer.context("randomization"):
                    self.randomize_block(
                        kappa + 1,
                        kappa + block_size,
                        density=params.rerandomization_density,
                        tracer=tracer)
            with tracer.context("reduction"):
                self.svp_preprocessing(kappa, block_size, params, tracer)

        radius, expo = self.M.get_r_exp(kappa, kappa)
        radius *= self.lll_obj.delta

        if params.flags & BKZ.GH_BND and block_size > 30:
            root_det = self.M.get_root_det(kappa, kappa + block_size)
            radius, expo = adjust_radius_to_gh_bound(radius, expo, block_size,
                                                     root_det,
                                                     params.gh_factor)

        pruning = self.get_pruning(kappa, block_size, params, tracer)

        try:
            enum_obj = Enumeration(self.M)
            with tracer.context("enumeration",
                                enum_obj=enum_obj,
                                probability=pruning.expectation,
                                full=block_size == params.block_size):
                max_dist, solution = enum_obj.enumerate(
                    kappa,
                    kappa + block_size,
                    radius,
                    expo,
                    pruning=pruning.coefficients)[0]
            with tracer.context("postprocessing"):
                # we translate our solution to the canonical basis because our basis is not
                # necessarily the basis of the calling process at this point
                solution = self.A.multiply_left(solution, start=kappa)

        except EnumerationError:
            solution, max_dist = None, None

        return solution, max_dist, tracer.trace, pruning.expectation
Пример #12
0
    def svp_reduction(self, kappa, block_size, param, tracer=dummy_tracer):
        """

        :param kappa:
        :param block_size:
        :param params:
        :param tracer:

        """

        self.lll_obj.size_reduction(0, kappa+1)
        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)

        remaining_probability, rerandomize = 1.0, False

        while remaining_probability > 1. - param.min_success_probability:
            with tracer.context("preprocessing"):
                if rerandomize:
                    with tracer.context("randomization"):
                        self.randomize_block(kappa+1, kappa+block_size,
                                             density=param.rerandomization_density, tracer=tracer)
                with tracer.context("reduction"):
                    self.svp_preprocessing(kappa, block_size, param, tracer=tracer)

            radius, expo = self.M.get_r_exp(kappa, kappa)
            radius *= self.lll_obj.delta

            if param.flags & BKZ.GH_BND and block_size > 30:
                root_det = self.M.get_root_det(kappa, kappa + block_size)
                radius, expo = gaussian_heuristic(radius, expo, block_size, root_det, param.gh_factor)

            pruning = self.get_pruning(kappa, block_size, param, tracer)

            try:
                enum_obj = Enumeration(self.M)
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    probability=pruning.probability,
                                    full=block_size==param.block_size):
                    solution, max_dist = enum_obj.enumerate(kappa, kappa + block_size, radius, expo,
                                                            pruning=pruning.coefficients)[0]
                with tracer.context("postprocessing"):
                    self.svp_postprocessing(kappa, block_size, solution, tracer=tracer)
                rerandomize = False

            except EnumerationError:
                rerandomize = True

            remaining_probability *= (1 - pruning.probability)

        self.lll_obj.size_reduction(0, kappa+1)
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)

        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)
        return clean
Пример #13
0
 def enum(self, k, b, radius, pruning, for_hints=False):
     solutions = []
     try:
         if self.recycle:
             enum_obj = Enumeration(self.M, b / 2)
         else:
             enum_obj = Enumeration(self.M, 1)
         if pruning is None:
             with self.tracer.context("enumeration",
                                      enum_obj=enum_obj,
                                      probability=1.):
                 solutions = enum_obj.enumerate(k, k + b, radius, 0)
         else:
             with self.tracer.context("enumeration",
                                      enum_obj=enum_obj,
                                      probability=pruning.expectation):
                 solutions = enum_obj.enumerate(
                     k, k + b, radius, 0, pruning=pruning.coefficients)
         return [sol for (sol, _) in solutions[0:]]
     except EnumerationError:
         return None, []
Пример #14
0
def test_svp():
    for m, n in dimensions:
        A = make_integer_matrix(m, n)
        A = LLL.reduction(A)
        M = GSO.Mat(A)
        M.update_gso()
        E = Enumeration(M)
        _, v1 = E.enumerate(0, M.d, M.get_r(0, 0), 0)[0]
        v1 = A.multiply_left(v1)
        nv1 = sum([v_**2 for v_ in v1])

        v0 = SVP.shortest_vector(A)
        nv0 = sum([v_**2 for v_ in v0])

        assert nv0 == nv1
Пример #15
0
 def enumeration_cvp(self, lattice, gso, pruning, radius, target):
     self.log("Start Enumeration(CVP).")
     try:
         E = Enumeration(gso)
         enum = E.enumerate(0,
                            lattice.nrows,
                            radius,
                            0,
                            gso.from_canonical(target),
                            pruning=pruning.coefficients)
         _, v1 = enum[0]
         return self.vector_from_coeffs(v1, lattice)
     except EnumerationError:
         self.log("No solution.")
         return None
Пример #16
0
def test_cvp():
    for m, n in dimensions:
        A = make_integer_matrix(m, n)
        A = LLL.reduction(A)
        M = GSO.Mat(A)
        M.update_gso()
        t = list(make_integer_matrix(n, n)[0])
        v0 = CVP.closest_vector(A, t)

        E = Enumeration(M)
        v1, _ = E.enumerate(0, A.nrows, 2, 40, M.from_canonical(t))
        v1 = IntegerMatrix.from_iterable(1, A.nrows, map(lambda x: int(round(x)), v1))
        v1 = tuple((v1*A)[0])

        assert v0 == v1
Пример #17
0
    def svp_reduction(self, kappa, block_size):
        """Call the SVP oracle and insert found vector into basis.

        :param kappa: row index
        :param block_size: an integer > 2

        """
        clean = True

        self.lll_obj(0, kappa, kappa + block_size)
        if self.lll_obj.nswaps > 0:
            clean = False

        max_dist, expo = self.m.get_r_exp(kappa, kappa)
        delta_max_dist = self.lll_obj.delta * max_dist

        solution, max_dist = Enum.enumerate(self.m, max_dist, expo, kappa, kappa + block_size, None)

        if max_dist >= delta_max_dist:
            return clean

        nonzero_vectors = len([x for x in solution if x])

        if nonzero_vectors == 1:
            first_nonzero_vector = None
            for i in range(block_size):
                if abs(solution[i]) == 1:
                    first_nonzero_vector = i
                    break

            self.m.move_row(kappa + first_nonzero_vector, kappa)
            self.lll_obj.size_reduction(kappa, kappa + 1)

        else:
            d = self.m.d
            self.m.create_row()

            with self.m.row_ops(d, d+1):
                for i in range(block_size):
                    self.m.row_addmul(d, kappa + i, solution[i])

            self.m.move_row(d, kappa)
            self.lll_obj(kappa, kappa, kappa + block_size + 1)
            self.m.move_row(kappa + block_size, d)

            self.m.remove_last_row()

        return False
Пример #18
0
    def parallel_svp_reduction_worker(self, kappa, block_size, params, rerandomize):
        """
        One SVP reduction, typically called in a worker process after forking.

        :param kappa: current index
        :param block_size: block size
        :param params: BKZ parameters
        :param tracer: object for maintaining statistics

        """
        # we create a new tracer object to report back our timings to the calling process
        tracer = BKZTreeTracer(self, verbosity=params.flags & BKZ.VERBOSE, root_label="svp")

        with tracer.context("preprocessing"):
            if rerandomize:
                with tracer.context("randomization"):
                    self.randomize_block(
                        kappa + 1, kappa + block_size, density=params.rerandomization_density, tracer=tracer
                    )
            with tracer.context("reduction"):
                self.svp_preprocessing(kappa, block_size, params, tracer)

        radius, expo = self.M.get_r_exp(kappa, kappa)
        radius *= self.lll_obj.delta

        if params.flags & BKZ.GH_BND and block_size > 30:
            root_det = self.M.get_root_det(kappa, kappa + block_size)
            radius, expo = gaussian_heuristic(radius, expo, block_size, root_det, params.gh_factor)

        pruning = self.get_pruning(kappa, block_size, params, tracer)

        try:
            enum_obj = Enumeration(self.M)
            with tracer.context(
                "enumeration", enum_obj=enum_obj, probability=pruning.probability, full=block_size == params.block_size
            ):
                solution, max_dist = enum_obj.enumerate(
                    kappa, kappa + block_size, radius, expo, pruning=pruning.coefficients
                )[0]
            with tracer.context("postprocessing"):
                # we translate our solution to the canonical basis because our basis is not
                # necessarily the basis of the calling process at this point
                solution = self.A.multiply_left(solution, start=kappa)

        except EnumerationError:
            solution = None

        return solution, tracer.trace, pruning.probability
Пример #19
0
def test_cvp():
    for m, n in dimensions:
        A = make_integer_matrix(m, n)
        A = LLL.reduction(A)
        M = GSO.Mat(A)
        M.update_gso()
        t = list(make_integer_matrix(n, n)[0])
        v0 = CVP.closest_vector(A, t)

        E = Enumeration(M)
        v1, _ = E.enumerate(0, A.nrows, 2, 40, M.from_canonical(t))[0]
        v1 = IntegerMatrix.from_iterable(1, A.nrows,
                                         map(lambda x: int(round(x)), v1))
        v1 = tuple((v1 * A)[0])

        assert v0 == v1
Пример #20
0
    def svp_call(self, kappa, block_size, params, tracer=None):
        """Call SVP oracle

        :param kappa: current index
        :param params: BKZ parameters
        :param block_size: block size
        :param tracer: object for maintaining statistics

        :returns: Coordinates of SVP solution or ``None`` if none was found.

        ..  note::

            ``block_size`` may be smaller than ``params.block_size`` for the last blocks.
        """
        max_dist, expo = self.M.get_r_exp(kappa, kappa)
        delta_max_dist = self.lll_obj.delta * max_dist

        if params.flags & BKZ.GH_BND:
            root_det = self.M.get_root_det(kappa, kappa + block_size)
            max_dist, expo = adjust_radius_to_gh_bound(max_dist, expo,
                                                       block_size, root_det,
                                                       params.gh_factor)

        try:
            enum_obj = Enumeration(self.M)
            with tracer.context("enumeration",
                                enum_obj=enum_obj,
                                probability=1.0):
                solution, max_dist = enum_obj.enumerate(
                    kappa, kappa + block_size, max_dist, expo)[0]

        except EnumerationError as msg:
            if params.flags & BKZ.GH_BND:
                return None
            else:
                raise EnumerationError(msg)

        if max_dist >= delta_max_dist * (1 << expo):
            return None
        else:
            return solution
Пример #21
0
    def dsvp_reduction(self, kappa, block_size):
        """FIXME! briefly describe function

        :param kappa:
        :param block_size:
        :returns:
        :rtype:

        """
        clean = True

        self.lll_obj(0, kappa, kappa + block_size)
        if self.lll_obj.nswaps > 0:
            clean = False

        max_dist, expo = self.m.get_r_exp(kappa + block_size - 1, kappa + block_size - 1)
        max_dist = 1.0/max_dist
        expo *= -1.0
        delta_max_dist = self.lll_obj.delta * max_dist

        solution, max_dist = Enum.enumerate(self.m, max_dist, expo, kappa, kappa + block_size, None, dual=True)
        if max_dist >= delta_max_dist:
            return clean

        with self.m.row_ops(kappa, kappa+block_size):
            pairs = list(enumerate(solution, start=kappa))
            [self.m.negate_row(pair[0]) for pair in pairs if pair[1] < 0]
            pairs = map(lambda x: (x[0], abs(x[1])), pairs)
            # GCD should be tree based but for proof of concept implementation, this will do
            row, x = reduce(self.euclid, pairs)
            if x != 1:
                raise RuntimeError("Euclid failed!")
            self.m.move_row(row, kappa + block_size - 1)
        self.lll_obj(kappa, kappa, kappa + block_size)

        return False
Пример #22
0
    def svp_call(self, kappa, block_size, params, tracer=None):
        """Call SVP oracle

        :param kappa: current index
        :param params: BKZ parameters
        :param block_size: block size
        :param tracer: object for maintaining statistics

        :returns: Coordinates of SVP solution or ``None`` if none was found.

        ..  note::

            ``block_size`` may be smaller than ``params.block_size`` for the last blocks.
        """
        max_dist, expo = self.M.get_r_exp(kappa, kappa)
        delta_max_dist = self.lll_obj.delta * max_dist

        if params.flags & BKZ.GH_BND:
            root_det = self.M.get_root_det(kappa, kappa+block_size)
            max_dist, expo = adjust_radius_to_gh_bound(max_dist, expo, block_size, root_det, params.gh_factor)

        try:
            enum_obj = Enumeration(self.M)
            with tracer.context("enumeration", enum_obj=enum_obj, probability=1.0):
                solution, max_dist = enum_obj.enumerate(kappa, kappa + block_size, max_dist, expo)[0]

        except EnumerationError as msg:
            if params.flags & BKZ.GH_BND:
                return None
            else:
                raise EnumerationError(msg)

        if max_dist >= delta_max_dist * (1<<expo):
            return None
        else:
            return solution
Пример #23
0
def yolo_hsvp(n, A, gh_factor, core=0):
    timer = Timer()
    ybkz = YoloBKZ(A, tuners=tuners)

    start_from = None
    start_from_rec = None

    first_len = ybkz.M.get_r(0, 0)
    root_det = ybkz.M.get_root_det(0, n)

    gh_radius, ge = gaussian_heuristic(first_len, 0, n, root_det, 1.)
    gh_radius = abs(gh_radius * 2**ge)
    radius = gh_factor * gh_radius

    target_prob = (1. / gh_factor)**(n / 2)

    trial = 0
    count = 0
    restarted = 0
    ybkz.randomize(0, n, density=1)

    while True:
        timer.reset()
        max_efficiency = 0.
        for b in range(8, n / 2, 4):
            ybkz.tour(b, target_prob=.50)

        restarted += 1
        for b in range(n / 2, n - 10, 2):
            count += 1
            ybkz.tour(b, target_prob=.10)
            overhead = NODE_PER_SEC * timer.elapsed()
            R = tuple([ybkz.M.get_r(i, i) for i in range(0, n)])

            title = "c=%d r=%d b=%d t=%.1fs" % (core, restarted, b,
                                                timer.elapsed())
            print title

            pruning = prune(radius,
                            overhead,
                            target_prob, [R],
                            descent_method="hybrid",
                            precision=53,
                            start_from=start_from)
            start_from = pruning.coefficients
            print "c=%d  pruning approximated  t=%.1fs" % (core,
                                                           timer.elapsed())

            pruning = prune(radius,
                            overhead,
                            target_prob, [R],
                            descent_method="gradient",
                            precision=YOLO_PRUNER_PREC,
                            start_from=start_from)
            title = "c=%d r=%d b=%d t=%.1fs p=%1.2e e=%.1fs" % (
                core, restarted, b, timer.elapsed(),
                pruning.probability / target_prob,
                (target_prob * timer.elapsed()) / pruning.probability)
            print title

            plot_and_save([log(x / gh_radius) / log(2.) for x in R], title,
                          '%d/c%ds%d.png' % (n, core, count))

            start_from = pruning.coefficients
            try:
                enum_obj = Enumeration(ybkz.M)
                solution, _ = enum_obj.enumerate(0,
                                                 n,
                                                 radius,
                                                 0,
                                                 pruning=pruning.coefficients)
                ybkz.insert(0, n, solution)
                print
                print list(A[0])
                return
            except EnumerationError:
                print "c=%d Enum failed  t=%.1fs" % (core, timer.elapsed())
                pass

            efficiency = (pruning.probability / timer.elapsed())

            #  RECYCLING
            r_start = count % 10
            recycling_radius = ybkz.M.get_r(r_start, r_start) * .99
            pruning = prune(recycling_radius,
                            overhead,
                            target_prob, [R[r_start:]],
                            descent_method="hybrid",
                            precision=53)
            title = "REC c=%d r=%d b=%d t=%.1fs p=%1.2e e=%.1fs" % (
                core, restarted, b, timer.elapsed(),
                pruning.probability / target_prob,
                (target_prob * timer.elapsed()) / pruning.probability)
            print title

            try:
                hints = []
                enum_obj = Enumeration(ybkz.M, n / 2)
                solution, _ = enum_obj.enumerate(r_start,
                                                 n,
                                                 recycling_radius,
                                                 r_start,
                                                 pruning=pruning.coefficients,
                                                 aux_sols=hints)
                hints = [sol for (sol, _) in hints[1:]]
                ybkz.insert(r_start, n, solution, hints=hints)
                print "c=%d Recycled %d t=%.1fs" % (core, len(hints) + 1,
                                                    timer.elapsed())
                break
            except EnumerationError:
                pass
            start_from_rec = pruning.coefficients
            #  END OF RECYCLING

            if 2 * efficiency < max_efficiency:
                ybkz.randomize(0, n, density=1)
                ybkz.lll_obj(0, 0, n)
                break
            max_efficiency = max(efficiency, max_efficiency)
            timer.reset()
Пример #24
0
Файл: rbkz.py Проект: malb/yolo
    def recycled_svp_reduction(self, kappa, block_size, param, stats):
        """
        :param kappa:
        :param block_size:
        :param params:
        :param stats:
        """
        if stats is None:
            stats = DummyStats(self)

        self.M.update_gso()
        self.lll_obj.size_reduction(0, kappa + 1)
        self.lll_obj(kappa, kappa, kappa + block_size)

        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)

        remaining_probability, rerandomize = 1.0, False
        print " - ",

        preproc_block_size = PREPROC_BLOCK_SIZE_INIT
        while remaining_probability > 1. - param.min_success_probability:
            preproc_block_size += PREPROC_BLOCK_SIZE_INCR

            start_preproc = time()
            with stats.context("preproc"):
                rec_clean = self.recycled_svp_preprocessing(
                    kappa, block_size, param, stats, preproc_block_size)
            time_preproc = time() - start_preproc

            radius, expo = self.M.get_r_exp(kappa, kappa)

            if param.flags & BKZ.GH_BND:
                root_det = self.M.get_root_det(kappa, kappa + block_size)
                radius, expo = gaussian_heuristic(radius, expo, block_size,
                                                  root_det, param.gh_factor)

            overhead = NODE_PER_SEC * time_preproc

            with stats.context("postproc"):
                self.M.update_gso()
                R = dump_r(self.M, kappa, block_size)
                # print R
                goal_proba = 1.01 * ((param.min_success_probability - 1) /
                                     remaining_probability + 1)
                pruning = prune(radius * 2**expo,
                                overhead,
                                goal_proba, [R],
                                descent_method="gradient",
                                precision=53)

                print goal_proba, pruning.probability
            try:
                enum_obj = Enumeration(self.M, self.recycling_pool_max_size)
                aux_sols = []
                with stats.context("svp", E=enum_obj):
                    K = [x for x in pruning.coefficients]
                    radius *= 1.05
                    for i in range(5, preproc_block_size):
                        K[i] /= 1.05

                    solution, max_dist = enum_obj.enumerate(kappa,
                                                            kappa + block_size,
                                                            radius,
                                                            expo,
                                                            pruning=K,
                                                            aux_sols=aux_sols)
                    V = [v for (v, _) in aux_sols[:10]]
                    self.multi_insert(V, kappa, block_size, stats)

            except EnumerationError:
                print 0,
                pass

            remaining_probability *= (1 - pruning.probability)

        self.lll_obj.size_reduction(0, kappa + 1)
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)

        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)
        return clean


# def to_cannonical(A, v, kappa, block_size):
#     v = kappa*[0] + [x for x in v] + (A.nrows - (kappa + block_size)) * [0]
#     v = IntegerMatrix.from_iterable(1, A.nrows, map(lambda x: int(round(x)), v))
#     v = tuple((v*A)[0])
#     return v

# def multi_insert_from_cannonical(M, V, kappa, block_size):
#     d = M.d
#     s = d
#     l = len(V)
#     for v in V:
#         w = M.babai(v)
#         for i in range(kappa+block_size, d):
#             assert w[i] == 0
#         M.create_row()
#         with self.M.row_ops(s, s+1):
#             for i in range(kappa + block_size):
#                 self.M.row_addmul(s, i, w[i])
#         s += 1

#     for i in range(l).reversed():
#         self.M.move_row(kappa, d+i)

#     with stats.context("lll"):
#         self.lll_obj(kappa, kappa, kappa + block_size + 1)

#     for i in range(l):
#         self.M.move_row(kappa + block_size + i, s)

#     for i in range(l):
#         self.M.remove_last_row()
Пример #25
0
def approx_svp_time(seed, params, return_queue=None, progressive=False):
    """Run Approx-SVP_{1.05} reduction on ``A`` using ``params``.

    :param seed: random seed for matrix creation
    :param params: BKZ preprocessing parameters, preprocessing block size is ignored
    :param return_queue: if not ``None``, the result is put on this queue.
    :param progressive: run Progressive-BKZ

    """
    from chal import load_svp_challenge
    from fpylll.algorithms.bkz import BKZReduction as BKZBase

    FPLLL.set_random_seed(seed)
    A = load_svp_challenge(params.block_size, seed=seed)
    M = GSO.Mat(A)
    M.update_gso()

    gh = gaussian_heuristic(M.r())
    target_norm = 1.05**2 * gh

    nodes_per_second = 2.0 * 10**9 / 100.0

    self = BKZ2(M)
    tracer = BKZTreeTracer(self, start_clocks=True)

    rerandomize = False
    preproc_cost = None
    with tracer.context(("tour", 0)):
        while M.get_r(0, 0) > target_norm:
            with tracer.context("preprocessing"):
                if rerandomize:
                    self.randomize_block(
                        1,
                        params.block_size,
                        density=params.rerandomization_density,
                        tracer=tracer)
                with tracer.context("reduction"):
                    BKZBase.svp_preprocessing(self, 0, params.block_size,
                                              params, tracer)  # LLL
                    preproc = round(0.9878 * params.block_size -
                                    24.12)  # curve fitted to chal.py output
                    prepar = params.__class__(block_size=preproc,
                                              strategies=params.strategies,
                                              flags=BKZ.GH_BND)
                    self.tour(prepar, 0, params.block_size, tracer=tracer)

            if preproc_cost is None:
                preproc_cost = float(
                    tracer.trace.find("preprocessing")["walltime"])
                preproc_cost *= nodes_per_second

            with tracer.context("pruner"):
                step_target = M.get_r(0,
                                      0) * 0.99 if progressive else target_norm
                pruner = Pruning.Pruner(step_target,
                                        preproc_cost, [M.r()],
                                        target=1,
                                        metric=Pruning.EXPECTED_SOLUTIONS)
                coefficients = pruner.optimize_coefficients([1.] * M.d)
            try:
                enum_obj = Enumeration(self.M)
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    full=True):
                    max_dist, solution = enum_obj.enumerate(
                        0,
                        params.block_size,
                        target_norm,
                        0,
                        pruning=coefficients)[0]
                with tracer.context("postprocessing"):
                    self.svp_postprocessing(0,
                                            params.block_size,
                                            solution,
                                            tracer=tracer)
                rerandomize = False
            except EnumerationError:
                rerandomize = True

            self.M.update_gso()
            logger.debug("r_0: %7.2f, target: %7.2f, preproc: %3d" %
                         (log(M.get_r(0, 0), 2), log(target_norm, 2), preproc))

    tracer.exit()
    tracer.trace.data["|A_0|"] = A[0].norm()
    tracer.trace.data["preprocessing_block_size"] = preproc

    if return_queue:
        return_queue.put(tracer.trace)
    else:
        return tracer.trace
Пример #26
0
    def svp_reduction_single_nosub(self,
                                   kappa,
                                   block_size,
                                   params,
                                   tracer=dummy_tracer):
        """
        :param kappa:
        :param block_size:
        :param params:
        :param tracer:
        """

        if (block_size == 80):
            start_time = clock()

        #if block_size < 30:
        #    return BKZBase.svp_reduction(self, kappa, block_size, params, tracer=tracer)
        self.lll_obj.size_reduction(0, kappa + 1)
        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)

        remaining_probability, rerandomize, trials = 1.0, False, 0

        while remaining_probability > 1. - params.min_success_probability:
            preproc_start = clock()
            with tracer.context("preprocessing"):
                if False:  # ((trials%5)==4):
                    self.randomize_block(kappa + 1,
                                         kappa + block_size,
                                         density=1,
                                         tracer=tracer)
                self.svp_preprocessing(kappa,
                                       block_size,
                                       params,
                                       tracer=tracer,
                                       trials=trials)
            preproc_cost = clock() - preproc_start

            with tracer.context("pruner"):
                target = 1 - ((1. - params.min_success_probability) /
                              remaining_probability)
                target = min(target, .5)
                # target = params.min_success_probability
                radius, pruning = self.get_pruning(kappa, block_size, params,
                                                   target * 1.01, preproc_cost,
                                                   tracer)

            if (block_size == 90):
                print " single target is ", target, ", expectation ", pruning.expectation

            try:
                enum_obj = Enumeration(self.M)
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    probability=pruning.expectation,
                                    full=block_size == params.block_size):
                    max_dist, solution = enum_obj.enumerate(
                        kappa,
                        kappa + block_size,
                        radius,
                        0,
                        pruning=pruning.coefficients)[0]
                with tracer.context("postprocessing"):
                    rerandomize = True
                    self.svp_postprocessing(kappa,
                                            block_size,
                                            solution,
                                            tracer=tracer)

            except EnumerationError:
                rerandomize = False

            remaining_probability *= (1 - pruning.expectation)
            trials += 1

        self.lll_obj.size_reduction(0, kappa + 1)
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)

        if (block_size == 90):
            print " time bs 90 is ", clock(
            ) - start_time, ", trials = ", trials, kappa

        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)
        return clean
Пример #27
0
    def svp_reduction_single(self,
                             kappa,
                             block_size,
                             params,
                             tracer=dummy_tracer):
        """

        :param kappa:
        :param block_size:
        :param params:
        :param tracer:

        """

        verbose = 0
        if (verbose):
            start_time = time()
            self.M.update_gso()
            r = [self.M.get_r(i, i) for i in range(kappa, kappa + block_size)]
            #gh_length = gaussian_heuristic(r) * params.gh_factor
            gh_length = gaussian_heuristic(r) * 1.1
            kappa_length = self.M.get_r(kappa, kappa)
            goal = min(kappa_length, gh_length)

        self.lll_obj.size_reduction(0, kappa + 1)
        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)
        remaining_probability = 1.0
        rerandomize = False
        trials = 0
        sub_solutions = block_size > SUBSOL_BLOCKSIZE
        while remaining_probability > 1. - params.min_success_probability:

            # 1. preprocessing
            preproc_start = time()
            with tracer.context("preprocessing"):
                self.M.update_gso()
                self.svp_preprocessing(kappa,
                                       block_size,
                                       params,
                                       trials,
                                       tracer=tracer)
            preproc_cost = time() - preproc_start

            with tracer.context("pruner"):
                target = 1 - ((1. - params.min_success_probability) /
                              remaining_probability)

                radius, pruning = self.get_pruning(kappa, block_size, params,
                                                   target * 1.01, preproc_cost,
                                                   tracer)

            # 2. enum

            enum_obj = Enumeration(self.M, sub_solutions=sub_solutions)
            try:
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    probability=pruning.expectation,
                                    full=block_size == params.block_size):
                    max_dist, solution = enum_obj.enumerate(
                        kappa,
                        kappa + block_size,
                        radius,
                        0,
                        pruning=pruning.coefficients)[0]

                # 3. post processing
                with tracer.context("postprocessing"):
                    preproc_start = time(
                    )  # Include post_processing time as the part of the next pre_processing

                    if not sub_solutions:
                        self.svp_postprocessing(kappa,
                                                block_size,
                                                solution,
                                                tracer=tracer)
                    if sub_solutions:
                        self.insert_sub_solutions(
                            kappa, block_size,
                            enum_obj.sub_solutions[:1 + block_size / 4])
                    self.M.update_gso()

            except EnumerationError:
                preproc_start = time()

            remaining_probability *= (1 - pruning.expectation)
            trials += 1

        self.lll_obj.size_reduction(0, kappa + 1)
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)
        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)
        if (verbose):
            if (rank == 0):
                kappa_length = self.M.get_r(kappa, kappa)
                print "# [rank %d] kappa %d, bs %d, r %d (gh %d), time %s, trials %s " % \
                  (rank, kappa, block_size, kappa_length, goal, time()-start_time, trials)
                print "gh_factor: ", params.gh_factor

        return clean
Пример #28
0
    def svp_reduction(self,
                      kappa,
                      block_size,
                      param,
                      tracer=dummy_tracer,
                      top_level=False):
        if top_level:
            # do a full LLL up to kappa + block_size
            with tracer.context("lll"):
                self.lll_obj(0, kappa, kappa + block_size, 0)

        remaining_probability, rerandomize = 1.0, False

        while remaining_probability > 1. - param.min_success_probability:
            with tracer.context("preprocessing"):
                if rerandomize:
                    with tracer.context("randomization"):
                        # make a copy of the local block to restore in case rerandomisation decreases quality
                        self.copy_block(kappa, block_size)
                        self.randomize_block(
                            kappa + 1,
                            kappa + block_size,
                            density=param.rerandomization_density,
                            tracer=tracer)
                with tracer.context("reduction"):
                    self.svp_preprocessing(kappa,
                                           block_size,
                                           param,
                                           tracer=tracer)

            radius, expo = self.M.get_r_exp(kappa, kappa)
            radius *= self.lll_obj.delta

            if param.flags & BKZ.GH_BND and block_size > 30:
                root_det = self.M.get_root_det(kappa, kappa + block_size)
                radius, expo = adjust_radius_to_gh_bound(
                    radius, expo, block_size, root_det, param.gh_factor)

            pruning = self.get_pruning(kappa, block_size, param, tracer)

            try:
                enum_obj = Enumeration(self.M)
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    probability=pruning.expectation,
                                    full=block_size == param.block_size):
                    solution, max_dist = enum_obj.enumerate(
                        kappa,
                        kappa + block_size,
                        radius,
                        expo,
                        pruning=pruning.coefficients)[0]
                with tracer.context("postprocessing"):
                    self.svp_postprocessing(kappa,
                                            block_size,
                                            solution,
                                            tracer=tracer,
                                            top_level=top_level)
                    if rerandomize:
                        self.delete_copy_block(kappa,
                                               block_size,
                                               restore=False)
                rerandomize = False

            except EnumerationError:
                with tracer.context("postprocessing"):
                    if rerandomize:
                        # restore block, TODO don't do this unconditionally
                        self.delete_copy_block(kappa, block_size, restore=True)
                rerandomize = True

            remaining_probability *= (1 - pruning.expectation)
Пример #29
0
    def svp_reduction(self, kappa, block_size, param, stats):
        """

        :param kappa:
        :param block_size:
        :param params:
        :param stats:

        """
        if stats is None:
            stats = DummyStats(self)

        self.lll_obj.size_reduction(0, kappa + 1)
        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)

        remaining_probability, rerandomize = 1.0, False
        preproc_timer = LocalTimer()
        enum_timer = LocalTimer()
        pruner_timer = LocalTimer()
        preproc_block_size = 0

        while remaining_probability > 1. - param.min_success_probability:

            with LocalTimerContext(preproc_timer):
                with stats.context("preproc"):
                    if rerandomize:
                        self.randomize_block(
                            kappa + 1,
                            kappa + block_size,
                            density=param.rerandomization_density,
                            stats=stats)
                    preproc_block_size = self.svp_preprocessing(
                        kappa, block_size, param, stats)
                    self.lll_obj(kappa, kappa, kappa + block_size)

            target_probability = 1.01 * (
                (param.min_success_probability - 1) / remaining_probability +
                1.)

            with LocalTimerContext(pruner_timer):
                radius, pruning = self.decide_enumeration(
                    kappa,
                    block_size,
                    param,
                    stats=stats,
                    preproc_time=preproc_timer.val,
                    target_probability=target_probability)
            try:
                enum_obj = Enumeration(self.M)
                with LocalTimerContext(enum_timer):
                    with stats.context("svp", E=enum_obj):
                        solution, max_dist = enum_obj.enumerate(
                            kappa,
                            kappa + block_size,
                            radius,
                            0,
                            pruning=pruning.coefficients)
                self.svp_postprocessing(kappa, block_size, solution, stats)
                rerandomize = False

            except EnumerationError:
                rerandomize = True

            if block_size >= AUTO_MIN_BLOCK_SIZE:
                probability_per_second = pruning.probability / (
                    preproc_timer.val + enum_timer.val + pruner_timer.val)
                self.preproc_decider.feedback(block_size, preproc_block_size,
                                              probability_per_second)

            remaining_probability *= (1 - pruning.probability)

        self.lll_obj.size_reduction(0, kappa + 1)
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)

        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)
        return clean
Пример #30
0
    def svp_reduction(self, kappa, block_size, params, tracer=dummy_tracer):
        """

        :param kappa:
        :param block_size:
        :param params:
        :param tracer:

        """

        self.lll_obj.size_reduction(0, kappa + 1)
        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)

        remaining_probability, rerandomize = 1.0, False

        # NOTE: In the tail we might have less than (1+c)β space
        end = min(ceil(kappa + (1 + self.c) * block_size), self.M.d)

        while remaining_probability > 1.0 - params.min_success_probability:
            with tracer.context("preprocessing"):
                if rerandomize:
                    with tracer.context("randomization"):
                        self.randomize_block(
                            kappa + 1,
                            kappa + block_size,
                            density=params.rerandomization_density,
                            tracer=tracer,
                        )
                with tracer.context("reduction"):
                    self.svp_preprocessing(kappa,
                                           end,
                                           block_size,
                                           params,
                                           tracer=tracer)
            with tracer.context("pruner"):
                radius, exp, pruning = self.get_pruning(
                    kappa, block_size, params, tracer)

            try:
                enum_obj = Enumeration(self.M)
                with tracer.context(
                        "enumeration",
                        enum_obj=enum_obj,
                        probability=pruning.expectation,
                        full=True,
                ):  # HACK: we wan to record all enum costs.
                    max_dist, solution = enum_obj.enumerate(
                        kappa,
                        kappa + block_size,
                        radius,
                        exp,
                        pruning=pruning.coefficients,
                    )[0]
                with tracer.context("postprocessing"):
                    self.svp_postprocessing(kappa,
                                            block_size,
                                            solution,
                                            tracer=tracer)
                rerandomize = False

            except EnumerationError:
                rerandomize = True

            remaining_probability *= 1 - pruning.expectation

        self.lll_obj.size_reduction(0, kappa + 1)
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)

        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)
        return clean
Пример #31
0
    def __call__(cls,
                 M,
                 predicate,
                 squared_target_norm,
                 invalidate_cache=lambda: None,
                 target_prob=None,
                 preproc_offset=20,
                 ph=0,
                 threads=1,
                 **kwds):
        preproc_time = None
        ntests = 0

        if target_prob is None:
            target_prob = cls.DEFAULT_TARGET_PROB

        bkz_res = usvp_pred_bkz_enum_solve(M,
                                           predicate,
                                           block_size=min(
                                               STRATEGIES_MAX_DIM, M.d),
                                           invalidate_cache=invalidate_cache,
                                           threads=threads)

        if bkz_res.success:  # this might be enough
            return bkz_res

        FPLLL.set_threads(threads)

        M.update_gso()
        bkz = BKZ2(M)
        tracer = BKZTreeTracer(bkz, root_label="enum_pred", start_clocks=True)

        remaining_probability, rerandomize, found, solution = (1.0, False,
                                                               False, None)

        while remaining_probability > 1.0 - target_prob:
            invalidate_cache()

            with tracer.context("preprocessing"):
                if rerandomize:
                    with tracer.context("randomization"):
                        bkz.randomize_block(0, M.d, tracer=tracer, density=3)
                with tracer.context("reduction"):
                    with tracer.context("lll"):
                        bkz.lll_obj()
                    for _ in range(4):
                        bkz.tour(
                            BKZ.EasyParam(min(max(M.d - preproc_offset, 2),
                                              STRATEGIES_MAX_DIM),
                                          flags=BKZ.GH_BND),
                            tracer=tracer,
                        )

            if preproc_time is None:
                preproc_time = float(
                    tracer.trace.child("preprocessing")["cputime"])

            with tracer.context("check"):
                for v in M.B:
                    ntests += 1
                    if predicate(v, standard_basis=True):
                        found = True
                        solution = tuple([int(v_) for v_ in v])
                        break

            if found:
                break

            with tracer.context("pruner"):
                preproc_cost = threads * preproc_time * 2 * 10**9 / 100  # 100 cycles per node
                with SuppressStream():
                    r = []
                    for i in range(M.d):
                        r_, exp = M.get_r_exp(i, i)
                        r.append(r_ * 2**(exp - ph))
                    (cost, prob), coeffs = cls.pruning_coefficients(
                        squared_target_norm / 2**ph,
                        r,
                        preproc_cost,
                        target_prob=target_prob)

            def callbackf(v):
                nonlocal ntests
                ntests += 1
                return predicate(v, standard_basis=False)

            enum_obj = Enumeration(M, callbackf=callbackf)
            with tracer.context("enumeration",
                                enum_obj=enum_obj,
                                probability=prob,
                                full=True):
                try:
                    solutions = enum_obj.enumerate(0,
                                                   M.d,
                                                   squared_target_norm / 2**ph,
                                                   ph,
                                                   pruning=coeffs)
                    _, v = solutions[0]
                    found = True
                    solution = tuple([int(v_) for v_ in M.B.multiply_left(v)])
                    break
                except EnumerationError:
                    pass

            rerandomize = True
            remaining_probability *= 1 - prob

        tracer.exit()
        FPLLL.set_threads(1)

        b0, b0e = bkz.M.get_r_exp(0, 0)

        return USVPPredSolverResults(
            success=found,
            solution=solution,
            ntests=ntests + bkz_res.ntests,
            b0=b0**(0.5) * 2**(b0e / 2.0),
            cputime=tracer.trace.data["cputime"] + bkz_res.cputime,
            walltime=tracer.trace.data["walltime"] + bkz_res.walltime,
            data=tracer.trace,
        )
Пример #32
0
    def svp_reduction(self, kappa, block_size, param, tracer=dummy_tracer):
        """

        :param kappa:
        :param block_size:
        :param params:
        :param tracer:

        """

        self.lll_obj.size_reduction(0, kappa + 1)
        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)

        remaining_probability, rerandomize = 1.0, False

        while remaining_probability > 1. - param.min_success_probability:
            with tracer.context("preprocessing"):
                if rerandomize:
                    with tracer.context("randomization"):
                        self.randomize_block(
                            kappa + 1,
                            kappa + block_size,
                            density=param.rerandomization_density,
                            tracer=tracer)
                with tracer.context("reduction"):
                    self.svp_preprocessing(kappa,
                                           block_size,
                                           param,
                                           tracer=tracer)

            radius, expo = self.M.get_r_exp(kappa, kappa)
            radius *= self.lll_obj.delta

            if param.flags & BKZ.GH_BND and block_size > 30:
                root_det = self.M.get_root_det(kappa, kappa + block_size)
                radius, expo = adjust_radius_to_gh_bound(
                    radius, expo, block_size, root_det, param.gh_factor)

            pruning = self.get_pruning(kappa, block_size, param, tracer)

            try:
                enum_obj = Enumeration(self.M)
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    probability=pruning.expectation,
                                    full=block_size == param.block_size):
                    solution, max_dist = enum_obj.enumerate(
                        kappa,
                        kappa + block_size,
                        radius,
                        expo,
                        pruning=pruning.coefficients)[0]
                with tracer.context("postprocessing"):
                    self.svp_postprocessing(kappa,
                                            block_size,
                                            solution,
                                            tracer=tracer)
                rerandomize = False

            except EnumerationError:
                rerandomize = True

            remaining_probability *= (1 - pruning.expectation)

        self.lll_obj.size_reduction(0, kappa + 1)
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)

        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)
        return clean
Пример #33
0
    def svp_reduction(self, kappa, block_size, params, tracer=dummy_tracer):
        """

        :param kappa:
        :param block_size:
        :param params:
        :param tracer:

        """

        self.lll_obj.size_reduction(0, kappa + 1)
        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)

        remaining_probability, rerandomize = 1.0, False

        while remaining_probability > 1. - params.min_success_probability:
            with tracer.context("preprocessing"):
                if rerandomize:
                    with tracer.context("randomization"):
                        self.randomize_block(
                            kappa + 1,
                            kappa + block_size,
                            density=params.rerandomization_density,
                            tracer=tracer)
                with tracer.context("reduction"):
                    self.svp_preprocessing(kappa,
                                           block_size,
                                           params,
                                           tracer=tracer)

            with tracer.context("pruner"):
                radius, pruning = self.get_pruning(kappa, block_size, params,
                                                   tracer)

            try:
                enum_obj = Enumeration(self.M)
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    probability=pruning.expectation,
                                    full=block_size == params.block_size):
                    max_dist, solution = enum_obj.enumerate(
                        kappa,
                        kappa + block_size,
                        radius,
                        0,
                        pruning=pruning.coefficients)[0]
                with tracer.context("postprocessing"):
                    self.svp_postprocessing(kappa,
                                            block_size,
                                            solution,
                                            tracer=tracer)
                rerandomize = False

            except EnumerationError:
                rerandomize = True

            remaining_probability *= (1 - pruning.expectation)

        self.lll_obj.size_reduction(0, kappa + 1)
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)

        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)
        return clean
    def svp_reduction(self, kappa, block_size, params, tracer=dummy_tracer):
        """

        :param kappa:
        :param block_size:
        :param params:
        :param tracer:

        """

        self.lll_obj.size_reduction(0, kappa + 1)
        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)

        remaining_probability, rerandomize, trials = 1.0, False, 0

        sub_solutions = block_size > SUBSOL_BLOCKSIZE
        preproc_start = time()

        while remaining_probability > 1. - params.min_success_probability:
            with tracer.context("preprocessing"):
                self.svp_preprocessing(kappa,
                                       block_size,
                                       params,
                                       trials,
                                       tracer=tracer)
            preproc_cost = time() - preproc_start

            with tracer.context("pruner"):
                target = 1 - ((1. - params.min_success_probability) /
                              remaining_probability)
                radius, pruning = self.get_pruning(kappa, block_size, params,
                                                   target * 1.01, preproc_cost,
                                                   tracer)

            enum_obj = Enumeration(self.M, sub_solutions=sub_solutions)
            try:
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    probability=pruning.expectation,
                                    full=block_size == params.block_size):
                    max_dist, solution = enum_obj.enumerate(
                        kappa,
                        kappa + block_size,
                        radius,
                        0,
                        pruning=pruning.coefficients)[0]
                with tracer.context("postprocessing"):
                    preproc_start = time(
                    )  # Include post_processing time as the part of the next pre_processing
                    if not sub_solutions:
                        self.svp_postprocessing(kappa,
                                                block_size,
                                                solution,
                                                tracer=tracer)
                    if sub_solutions:
                        self.insert_sub_solutions(
                            kappa, block_size,
                            enum_obj.sub_solutions[:1 + block_size / 4])

            except EnumerationError:
                preproc_start = time()

            remaining_probability *= (1 - pruning.expectation)
            trials += 1

        self.lll_obj.size_reduction(0, kappa + 1)
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)

        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)
        return clean
Пример #35
0
    def svp_reduction_single(self,
                             kappa,
                             block_size,
                             params,
                             tracer=dummy_tracer):
        """
        :param kappa:
        :param block_size:
        :param params:
        :param tracer:
        """
        print self.lll_obj.delta
        verbose = 0

        if (params.flags & BKZ.DUMP_GSO):
            verbose = 1

        if (verbose):
            start_time = time()
        self.M.update_gso()
        r = [self.M.get_r(i, i) for i in range(kappa, kappa + block_size)]
        gh_length = gaussian_heuristic(r)
        kappa_length = self.M.get_r(kappa, kappa)
        goal = min(kappa_length, gh_length)

        self.lll_obj.size_reduction(0, kappa + 1)
        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)
        remaining_probability = 1.0
        rerandomize = False
        trials = 0
        sub_solutions = block_size > SUBSOL_BLOCKSIZE

        # copy old lattice
        if (params.flags & BKZ.DUMP_GSO):
            A_backup = self.copy_to_IntegerMatrix_long(self.A)
            v_old = A_backup[kappa]
            r_old = [
                log(self.M.get_r(i, i))
                for i in range(kappa, kappa + block_size)
            ]

        # main loop
        while remaining_probability > 1. - params.min_success_probability:

            # 1. preprocessing
            preproc_start = time()

            with tracer.context("preprocessing"):
                #self.M.update_gso()
                self.svp_preprocessing(kappa,
                                       block_size,
                                       params,
                                       trials,
                                       tracer=tracer)
            preproc_cost = time() - preproc_start

            with tracer.context("pruner"):
                target = 1 - ((1. - params.min_success_probability) /
                              remaining_probability)

                radius, pruning = self.get_pruning(kappa, block_size, params,
                                                   target * 1.01, preproc_cost,
                                                   tracer)

            # 2. enum
            enum_obj = Enumeration(self.M, sub_solutions=sub_solutions)
            try:
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    probability=pruning.expectation,
                                    full=block_size == params.block_size):
                    max_dist, solution = enum_obj.enumerate(
                        kappa,
                        kappa + block_size,
                        radius,
                        0,
                        pruning=pruning.coefficients)[0]

                # 3. post processing
                with tracer.context("postprocessing"):
                    preproc_start = time(
                    )  # Include post_processing time as the part of the next pre_processing

                    if not sub_solutions:
                        self.svp_postprocessing(kappa,
                                                block_size,
                                                solution,
                                                tracer=tracer)
                    if sub_solutions:
                        self.insert_sub_solutions(
                            kappa, block_size,
                            enum_obj.sub_solutions[:1 + block_size / 4])
                    self.M.update_gso()

            except EnumerationError:
                preproc_start = time()

            remaining_probability *= (1 - pruning.expectation)

            trials += 1

        # recover basis
        if (params.flags & BKZ.DUMP_GSO):
            r_new = [
                self.M.get_r(i, i) for i in range(kappa, kappa + block_size)
            ]
            current = self.copy_to_vector_long(self.A[kappa])
            # update
            self.copy_from_IntegerMatrix_long(A_backup)
            self.M = GSO.Mat(self.A, float_type=TYPE)
            self.M.update_gso()
            self.lll_obj = LLL.Reduction(self.M, flags=LLL.DEFAULT)
            self.insert_in_IntegerMatrix(self.A, current, kappa, block_size)
            # update again for safe
            self.M = GSO.Mat(self.A, float_type=TYPE)
            self.M.update_gso()
            self.M = GSO.Mat(self.A, float_type=TYPE)
            self.M = GSO.Mat(self.A, float_type=TYPE)
            self.M.update_gso()
            self.lll_obj = LLL.Reduction(self.M, flags=LLL.DEFAULT)
            if (not self.check_compare(A_backup, self.A, kappa, block_size)):
                print "# error exit"
                sys.exit(1)

        self.M.update_gso()
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)
        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)

        if (params.flags & BKZ.DUMP_GSO):
            global stat_update_gh
            r_new = self.M.get_r(kappa, kappa)
            r_newlog = [
                log(self.M.get_r(i, i))
                for i in range(kappa, kappa + block_size)
            ]
            stat_update_gh[stat_tours - 1].append(
                float(sqrt(r_new / gh_length)))

        if (verbose):
            if (rank == 0):
                kappa_length = r_new
                print "# [rank %d] kappa %d, bs %d, r %d (gh %d), time %s, trials %s " % \
                  (rank, kappa, block_size, kappa_length, goal, time()-start_time, trials)

                det_n = float(sum(r_old) / block_size)
                normalized_old = [(r_old[i] - det_n)
                                  for i in range(0, block_size)]
                normalized_new = [(r_newlog[i] - det_n)
                                  for i in range(0, block_size)]
                global stat_old_norm
                global stat_new_norm

                if (block_size == params.block_size):
                    for i in range(block_size):
                        stat_old_norm[i] = stat_old_norm[i] + normalized_old[i]
                        stat_new_norm[i] = stat_new_norm[i] + normalized_new[i]

        return clean