Exemplo n.º 1
0
def print_basis_stats(M, n):
    r = [M.get_r(i, i) for i in range(n)]
    gh = gaussian_heuristic(r)
    lhvr = log(gaussian_heuristic(r[:n/2])) - log(gaussian_heuristic(r[n/2:]))
    #print (lhrv = %.4f, r[i]/gh)%lhvr,
    for i in range(20):
        print ('{}'.format(r[i]/gh)),
    print
    return
Exemplo n.º 2
0
def print_basis_stats(M, n):
    r = [M.get_r(i, i) for i in range(n)]
    gh = gaussian_heuristic(r)
    lhvr = log(gaussian_heuristic(r[:n / 2])) - log(
        gaussian_heuristic(r[n / 2:]))
    print "lhrv = %.4f, r[i]/gh" % lhvr,
    for i in range(20):
        print "%.3f" % (r[i] / gh),
    print
    return
Exemplo n.º 3
0
    def get_pruning(self,
                    kappa,
                    block_size,
                    params,
                    target,
                    preproc_cost,
                    tracer=dummy_tracer):

        # small block size
        if (block_size <= BOUND_SINGLE):
            strategy = params.strategies[block_size]
            radius = self.M.get_r(kappa, kappa) * self.lll_obj.delta
            r = [self.M.get_r(i, i) for i in range(kappa, kappa + block_size)]
            gh_radius = gaussian_heuristic(r)
            if (params.flags & BKZ.GH_BND and block_size > 30):
                radius = min(radius, gh_radius * params.gh_factor)
            return radius, strategy.get_pruning(radius, gh_radius)

        # large block size
        else:
            radius = self.M.get_r(kappa, kappa) * self.lll_obj.delta
            r = [self.M.get_r(i, i) for i in range(kappa, kappa + block_size)]
            gh_radius = gaussian_heuristic(r)
            radius = min(radius, gh_radius * params.gh_factor)
            preproc_cost += .001
            if not (block_size > GRADIENT_BLOCKSIZE):
                pruning = prune(radius,
                                NPS[block_size] * preproc_cost, [r],
                                target,
                                flags=0)
            else:
                try:
                    #pruning = prune(radius, NPS[block_size] * preproc_cost, [r], target)
                    #pruning = prune(radius, NPS[block_size] * preproc_cost, [r], 10,
                    #                    metric="solutions", float_type="dd",
                    #                    flags=Pruning.GRADIENT|Pruning.NELDER_MEAD)
                    pruning = Pruning.run(radius,
                                          NPS[block_size] * preproc_cost, [r],
                                          0.1,
                                          flags=Pruning.NELDER_MEAD
                                          | Pruning.GRADIENT,
                                          float_type="double")
                except:
                    pruning = prune(radius,
                                    NPS[block_size] * preproc_cost, [r],
                                    target,
                                    flags=0)
            return radius, pruning
Exemplo n.º 4
0
    def svp_call(self, kappa, block_size, params, stats=None):
        """Call SVP oracle

        :param kappa: current index
        :param params: BKZ parameters
        :param block_size: block size
        :param stats: object for maintaining statistics

        :returns: Coordinates of SVP solution or ``None`` if none was found.

        ..  note::

            ``block_size`` may be smaller than ``params.block_size`` for the last blocks.
        """
        max_dist, expo = self.M.get_r_exp(kappa, kappa)
        delta_max_dist = self.lll_obj.delta * max_dist

        if params.flags & BKZ.GH_BND:
            root_det = self.M.get_root_det(kappa, kappa+block_size)
            max_dist, expo = gaussian_heuristic(max_dist, expo, block_size, root_det, params.gh_factor)

        try:
            E = Enumeration(self.M)
            solution, max_dist = E.enumerate(kappa, kappa + block_size, max_dist, expo)
            stats.current_tour["enum nodes"] += E.get_nodes()
        except EnumerationError as msg:
            if params.flags & BKZ.GH_BND:
                return None
            else:
                raise EnumerationError(msg)

        if max_dist >= delta_max_dist:
            return None
        else:
            return solution
Exemplo n.º 5
0
    def enum(self, M, k, target_prob, preproc_time):
        b = self.b

        r = [M.get_r(i, i) for i in range(k, k + b)]
        radius = r[0] * .99
        gh_radius = gaussian_heuristic(r)
        if b > 30:
            radius = min(radius, 1.1 * gh_radius)

        if b < YOLO_PRUNER_MIN_BLOCK_SIZE:
            return radius, self.strategy.get_pruning(radius, gh_radius)

        R = tuple([M.get_r(i, i) for i in range(k, k + b)])
        overhead = (preproc_time + RESTART_PENALTY) * NODE_PER_SEC
        start_from = self.last_prunings
        pruning = prune(radius,
                        overhead,
                        target_prob, [R],
                        descent_method="gradient",
                        precision=53,
                        start_from=start_from)
        self.last_prunings = pruning.coefficients
        self.proba = (self.proba * YOLO_MEMORY_LENGTH) + pruning.expectation
        self.proba /= YOLO_MEMORY_LENGTH + 1
        return radius, pruning
Exemplo n.º 6
0
    def enum(self, M, k, target_prob, preproc_time):
        b = self.b

        radius = M.get_r(k, k) * .99
        root_det = M.get_root_det(k, k + b - 1)
        gh_radius, ge = gaussian_heuristic(radius, 0, b, root_det, 1.)
        if b > 30:
            radius = min(radius, 1.21 * gh_radius * 2**ge)

        if b < YOLO_PRUNER_MIN_BLOCK_SIZE:
            return radius, self.strategy.get_pruning(radius, gh_radius * 2**ge)

        R = tuple([M.get_r(i, i) for i in range(k, k + b)])
        overhead = (preproc_time + RESTART_PENALTY) * NODE_PER_SEC
        start_from = self.last_prunings
        pruning = prune(radius,
                        overhead,
                        target_prob, [R],
                        descent_method="gradient",
                        precision=53,
                        start_from=start_from)
        self.last_prunings = pruning.coefficients
        self.proba = (self.proba * YOLO_MEMORY_LENGTH) + pruning.probability
        self.proba /= YOLO_MEMORY_LENGTH + 1
        return radius, pruning
Exemplo n.º 7
0
    def decide_enumeration(self,
                           kappa,
                           block_size,
                           param,
                           stats=None,
                           preproc_time=0.1,
                           target_probability=.5):

        radius = self.M.get_r(kappa, kappa)
        root_det = self.M.get_root_det(kappa, kappa + block_size)
        gh_radius, ge = gaussian_heuristic(radius, 0, block_size, root_det,
                                           1.0)

        if block_size < AUTO_MIN_BLOCK_SIZE:
            strategy = param.strategies[block_size]
            return radius, strategy.get_pruning(radius, gh_radius * 2**ge)
        else:
            with stats.context("pruner"):
                R = [
                    self.M.get_r(i, i)
                    for i in range(kappa, kappa + block_size)
                ]
                overhead = preproc_time * AUTO_NODE_PER_SEC
                start_from = self.last_pruning[block_size]
                pruning = prune(radius,
                                overhead,
                                target_probability, [R],
                                descent_method="gradient",
                                precision=53,
                                start_from=start_from)
                self.last_pruning[block_size] = pruning.coefficients
                return radius, pruning
Exemplo n.º 8
0
    def get_pruning(self,
                    kappa,
                    block_size,
                    params,
                    target,
                    preproc_cost,
                    tracer=dummy_tracer):
        radius = self.M.get_r(kappa, kappa) * self.lll_obj.delta
        if block_size < 30:
            return radius, PruningParams(4., ())

        r = [self.M.get_r(i, i) for i in range(kappa, kappa + block_size)]
        gh_radius = gaussian_heuristic(r)
        if (params.flags & BKZ.GH_BND and block_size > 30):
            radius = min(radius, gh_radius * params.gh_factor)

        if not (block_size > GRADIENT_BLOCKSIZE):
            pruning = prune(radius,
                            NPS[block_size] * preproc_cost, [r],
                            target,
                            flags=0)
        else:
            while True:
                try:
                    pruning = prune(radius, NPS[block_size] * preproc_cost,
                                    [r], target)
                    break
                except:
                    preproc_cost = 2 * preproc_cost + .01

        return radius, pruning
Exemplo n.º 9
0
def enum_trial(bkz_obj, preproc_cost, gh_factor=1.1):
    n = bkz_obj.A.nrows

    r = [bkz_obj.M.get_r(i, i) for i in range(0, n)]
    gh = gaussian_heuristic(r)
    radius = max(r[0] * .99, gh * gh_factor)
    PRUNE_START = time()
    pruning = prune(radius,
                    NPS * preproc_cost, [r],
                    10,
                    metric="solutions",
                    float_type="dd",
                    flags=Pruning.GRADIENT)
    PRUNE_TIME = time() - PRUNE_START
    print "Pruning time %.4f" % PRUNE_TIME
    ENUM_START = time()
    enum_obj = Enumeration(bkz_obj.M, sub_solutions=True)

    try:
        print "Enum  ... (Expecting %.5f solutions)" % (pruning.expectation),
        enum_obj.enumerate(0, n, radius, 0, pruning=pruning.coefficients)
    except EnumerationError:
        pass

    ENUM_TIME = time() - ENUM_START
    print " \t\t\t\t\t\t TIME = %.2f" % ENUM_TIME

    zeros = 0
    print "subsolutions : r[i]/gh",
    for (a, b) in enum_obj.sub_solutions[:20]:
        print "%.3f" % abs(a / gh),

    insert_sub_solutions(bkz_obj, enum_obj.sub_solutions[:n / 4])
    return
Exemplo n.º 10
0
def svp_enum(bkz, params, goal):
    n = bkz.M.d
    r = [bkz.M.get_r(i, i) for i in range(0, n)]
    gh = gaussian_heuristic(r)

    rerandomize = False
    while bkz.M.get_r(0, 0) > goal:
        if rerandomize:
            bkz.randomize_block(0, n)
        bkz.svp_preprocessing(0, n, params)

        strategy = params.strategies[n]
        radius = goal
        pruning = strategy.get_pruning(goal, gh)

        try:
            enum_obj = Enumeration(bkz.M)
            max_dist, solution = enum_obj.enumerate(
                0, n, radius, 0, pruning=pruning.coefficients)[0]
            bkz.svp_postprocessing(0, n, solution, tracer=dummy_tracer)
            rerandomize = False
        except EnumerationError:
            rerandomize = True

        bkz.lll_obj()

    return
Exemplo n.º 11
0
def svp_kernel_trial(arg0, params=None, seed=None, goal_r0=None):
    # Pool.map only supports a single parameter
    if params is None and seed is None:
        n, params, seed = arg0
    else:
        n = arg0

    params = copy.copy(params)
    dim4free_dec = params.pop("workout/dim4free_dec")
    pump_params = pop_prefixed_params("pump", params)
    challenge_seed = params.pop("challenge_seed")

    A, _ = load_svpchallenge_and_randomize(n, s=challenge_seed, seed=seed)
    g6k = Siever(A, params, seed=seed)
    tracer = SieveTreeTracer(g6k,
                             root_label=("svp-challenge", n),
                             start_clocks=True)

    gh = gaussian_heuristic([g6k.M.get_r(i, i) for i in range(n)])
    ds = list(range(0, n - 40, dim4free_dec))[::-1] + 10 * [0]

    if goal_r0 is None:
        goal_r0 = 1.1 * gh

    for d in ds:
        workout(g6k,
                tracer,
                0,
                n,
                dim4free_dec=dim4free_dec,
                goal_r0=goal_r0 * 1.001,
                pump_params=pump_params)

    tracer.exit()
    return int(g6k.M.get_r(0, 0)), gh
Exemplo n.º 12
0
def usvp_pred_solve(A,
                    predicate,
                    squared_target_norm,
                    invalidate_cache=lambda: None,
                    solver=None,
                    **kwds):
    """
    Solve uSVP with predicate.

    Given a USVP instance ``A`` with ``predicate`` and a target of ``squared_target_norm`` solve
    this intance using ``solver``.

    :param A: An ``IntegerMatrix`` or a ``MatGSO`` object
    :param predicate: a predicate (this will inject ``M`` into its global namespace)
    :param squared_target_norm: the squared norm of the target
    :param invalidate_cache: a callable to invalidate caches for the predicate.
    :param solver: uSVP with predicate solver to use.

    """
    from g6k import Siever

    try:
        solver = solvers[solver]
    except KeyError:
        pass

    try:
        A.update_gso()
        M = A
    except AttributeError:
        M = Siever.MatGSO(A)
        M.update_gso()

    predicate.__globals__["M"] = M

    if solver is None:
        cost, block_size = usvp_pred_bkz_enum_solve.estimate(
            M, squared_target_norm)
        if cost:  # HACK
            if block_size >= 70:
                solver_name = "bkz-sieve"
            else:
                solver_name = "bkz-enum"
        else:
            gh = gaussian_heuristic(M.r())
            if M.d < 40 or squared_target_norm / gh > 4 / 3.0:
                solver_name = "enum_pred"
            else:
                solver_name = "sieve_pred"
        solver = solvers[solver_name]
        logging.debug(
            "% solving with {solver_name}".format(solver_name=solver_name))

    aux_kwds = kwds
    kwds = solver.parametersf(M, squared_target_norm)
    kwds.update(aux_kwds)

    logging.debug("% solving with {kwds}".format(kwds=kwds))

    return solver(M, predicate, invalidate_cache=invalidate_cache, **kwds)
Exemplo n.º 13
0
def svp_improve_trial(filename, bs):
    A, _ = pickle.load(open(filename, 'rb'))
    n = A.nrows
    bkz = BKZReduction(A)

    BKZ_START = time()
    bkz.lll_obj()
    r = [bkz.M.get_r(i, i) for i in range(n)]
    print "*********************************************************"
    print "# Run with BS = %d" % bs
    print "# [ File", filename, "]", "before BKZ",
    print_basis_stats(bkz.M, n)
    gh = gaussian_heuristic(r)

    llbs = []
    for lbs in range(30, bs - 10, 2) + [bs]:
        llbs.append(lbs)
        params = fplll_bkz.Param(block_size=lbs, max_loops=1,
                                 min_success_probability=.01)
        bkz(params=params)
        bkz.lll_obj()
    print "# progressive block_sizes = ", llbs

    r = [bkz.M.get_r(i, i) for i in range(n)]

    BKZ_TIME = time() - BKZ_START
    print "# [ File", filename, "]", "after BKZ",
    print ("BKZ-[%d .. %d]  ... \t\t "%(30, bs)),
    print_basis_stats(bkz.M, n)
    print ("# [BKZ] TIME = %.2f"%BKZ_TIME)
    success = enum_trial(bkz, BKZ_TIME, r[0]*.99)
    print
    pickle.dump((A, success), open(filename, 'wb'))
    return success
Exemplo n.º 14
0
def enum_trial(bkz, preproc_cost, radius):
    n = bkz.A.nrows

    r = [bkz.M.get_r(i, i) for i in range(0, n)]       
    gh = gaussian_heuristic(r)

    PRUNE_START = time()
    NPS = 2**24
    pruning = prune(radius, NPS * preproc_cost, [r], 10, 
                    metric="solutions", float_type="dd",
                    flags=Pruning.GRADIENT|Pruning.NELDER_MEAD)
    PRUNE_TIME = time() - PRUNE_START    
    ENUM_START = time()
    enum_obj = Enumeration(bkz.M, sub_solutions=True)
    success = False
    try:        
        enum_obj.enumerate(0, n, radius, 0, pruning=pruning.coefficients)
        success = True
    except EnumerationError:
        pass
    print ("# [Prune] time %.4f"%PRUNE_TIME)

    ENUM_TIME = time() - ENUM_START
    print ("# [Enum]  (Expecting %.5f solutions)"%(pruning.expectation)),
    print (", TIME = %.2f"%ENUM_TIME)
    """
    print ("# subsolutions : r[i]/gh"),
    for (a, b) in enum_obj.sub_solutions:
        print ("%.3f"%abs(a/gh)),
    print 
    """
    insert_sub_solutions(bkz, enum_obj.sub_solutions)    
    return success
Exemplo n.º 15
0
    def get_pruning(self, kappa, block_size, param, stats=None):
        strategy = param.strategies[block_size]

        radius, re = self.M.get_r_exp(kappa, kappa)
        root_det = self.M.get_root_det(kappa, kappa + block_size)
        gh_radius, ge = gaussian_heuristic(radius, re, block_size, root_det, 1.0)
        return strategy.get_pruning(radius  * 2**re, gh_radius * 2**ge)
Exemplo n.º 16
0
def asvp_kernel(arg0, params=None, seed=None):
    logger = logging.getLogger('asvp')

    # Pool.map only supports a single parameter
    if params is None and seed is None:
        n, params, seed = arg0
    else:
        n = arg0

    params = copy.copy(params)

    load_matrix = params.pop("load_matrix")
    pump_params = pop_prefixed_params("pump", params)
    workout_params = pop_prefixed_params("workout", params)
    verbose = params.pop("verbose")
    if verbose:
        workout_params["verbose"] = True
    challenge_seed = params.pop("challenge_seed")

    if load_matrix is None:
        A, _ = load_svpchallenge_and_randomize(n, s=challenge_seed, seed=seed)
        if verbose:
            print("Loaded challenge dim %d" % n)
    else:
        A, _ = load_matrix_file(load_matrix)
        if verbose:
            print("Loaded file '%s'" % load_matrix)

    g6k = Siever(A, params, seed=seed)
    tracer = SieveTreeTracer(g6k, root_label=("svp-challenge", n), start_clocks=True)

    gh = gaussian_heuristic([g6k.M.get_r(i, i) for i in range(n)])

    flast = workout(g6k, tracer, 0, n, pump_params=pump_params, **workout_params)

    tracer.exit()
    stat = tracer.trace

    f = workout_params["dim4free_min"]
    gh2 = gaussian_heuristic([g6k.M.get_r(i, i) for i in range(f, n)])
    quality = (gh * (n - f)) / (gh2 * n)

    stat.data["quality"] = quality

    print >> sys.stderr, g6k.M.B

    return tracer.trace
Exemplo n.º 17
0
def asvp_kernel(arg0, params=None, seed=None):
    logger = logging.getLogger('asvp')

    # Pool.map only supports a single parameter
    if params is None and seed is None:
        n, params, seed = arg0
    else:
        n = arg0

    params = copy.copy(params)

    load_matrix = params.pop("load_matrix")
    pump_params = pop_prefixed_params("pump", params)
    workout_params = pop_prefixed_params("workout", params)
    verbose = params.pop("verbose")
    if verbose:
        workout_params["verbose"] = True
    challenge_seed = params.pop("challenge_seed")

    if load_matrix is None:
        A, _ = load_svpchallenge_and_randomize(n, s=challenge_seed, seed=seed)
        if verbose:
            print(("Loaded challenge dim %d" % n))
    else:
        A, _ = load_matrix_file(load_matrix)
        if verbose:
            print(("Loaded file '%s'" % load_matrix))

    g6k = Siever(A, params, seed=seed)
    tracer = SieveTreeTracer(g6k,
                             root_label=("svp-challenge", n),
                             start_clocks=True)

    gh = gaussian_heuristic([g6k.M.get_r(i, i) for i in range(n)])
    goal_r0 = (1.05**2) * gh
    if verbose:
        print(("gh = %f, goal_r0/gh = %f, r0/gh = %f" %
               (gh, goal_r0 / gh, sum([x * x for x in A[0]]) / gh)))

    flast = workout(g6k,
                    tracer,
                    0,
                    n,
                    goal_r0=goal_r0,
                    pump_params=pump_params,
                    **workout_params)

    tracer.exit()
    stat = tracer.trace
    stat.data["flast"] = flast

    if verbose:
        logger.info("sol %d, %s" % (n, A[0]))

    norm = sum([x * x for x in A[0]])
    if verbose:
        logger.info("norm %.1f ,hf %.5f" % (norm**.5, (norm / gh)**.5))

    return tracer.trace
Exemplo n.º 18
0
def asvp(AA, max_bs, gh_factor):
    n = AA.nrows
    A = IntegerMatrix_to_long(AA)
    bkz = BKZReduction(A)
    bkz.lll_obj()
    bkz.randomize_block(0, n, density=n / 4)
    bkz.lll_obj()
    r = [bkz.M.get_r(i, i) for i in range(n)]
    gh = gaussian_heuristic(r)

    max_bs -= 2 * randint(0, 4)
    bs = max_bs - 20

    trials = 0
    while r[0] > gh * gh_factor:
        r = [bkz.M.get_r(i, i) for i in range(n)]

        print
        BKZ_START = time()
        # print_basis_stats(bkz.M, n)
        for lbs in range(30, bs - 10, 2) + [bs]:
            params = fplll_bkz.Param(
                block_size=lbs, max_loops=1,
                min_success_probability=.01)  #, flags=fplll_bkz.BOUNDED_LLL)
            bkz(params=params)
            bkz.lll_obj()
        r = [bkz.M.get_r(i, i) for i in range(n)]
        BKZ_TIME = time() - BKZ_START
        print "BKZ-[%d .. %d]  ... \t\t " % (30, bs),
        print "  \t\t\t\t\t\t\t TIME = %.2f" % BKZ_TIME
        print_basis_stats(bkz.M, n)

        enum_trial(bkz, BKZ_TIME, gh_factor=gh_factor)
        print
        r = [bkz.M.get_r(i, i) for i in range(n)]
        gh = gaussian_heuristic(r)
        trials += 1
        bs = min(bs + 2, max_bs)

    print "Finished !"
    print_basis_stats(bkz.M, n)
    print "\n\n SOLUTION %d:" % n
    print A[0]

    return trials
Exemplo n.º 19
0
    def get_pruning(self, kappa, block_size, params, tracer=dummy_tracer):
        strategy = params.strategies[block_size]
        radius = 2 ** self.r[kappa]
        gh_radius = gaussian_heuristic([2 ** r_ for r_ in self.r[kappa : kappa + block_size]])

        if params.flags & BKZ.GH_BND and block_size > 30:
            radius = min(radius, gh_radius)  # HACK

        return radius, strategy.get_pruning(radius, gh_radius)
Exemplo n.º 20
0
    def get_pruning(self, kappa, block_size, params, tracer=dummy_tracer):
        strategy = params.strategies[block_size]
        radius = self.M.get_r(kappa, kappa) * self.lll_obj.delta
        r = [self.M.get_r(i, i) for i in range(kappa, kappa + block_size)]
        gh_radius = gaussian_heuristic(r)

        if (params.flags & BKZ.GH_BND and block_size > 30):
            radius = min(radius, gh_radius * params.gh_factor)

        return radius, strategy.get_pruning(radius, gh_radius)
Exemplo n.º 21
0
Arquivo: bkz2.py Projeto: fplll/fpylll
    def svp_reduction(self, kappa, block_size, param, tracer=dummy_tracer):
        """

        :param kappa:
        :param block_size:
        :param params:
        :param tracer:

        """

        self.lll_obj.size_reduction(0, kappa+1)
        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)

        remaining_probability, rerandomize = 1.0, False

        while remaining_probability > 1. - param.min_success_probability:
            with tracer.context("preprocessing"):
                if rerandomize:
                    with tracer.context("randomization"):
                        self.randomize_block(kappa+1, kappa+block_size,
                                             density=param.rerandomization_density, tracer=tracer)
                with tracer.context("reduction"):
                    self.svp_preprocessing(kappa, block_size, param, tracer=tracer)

            radius, expo = self.M.get_r_exp(kappa, kappa)
            radius *= self.lll_obj.delta

            if param.flags & BKZ.GH_BND and block_size > 30:
                root_det = self.M.get_root_det(kappa, kappa + block_size)
                radius, expo = gaussian_heuristic(radius, expo, block_size, root_det, param.gh_factor)

            pruning = self.get_pruning(kappa, block_size, param, tracer)

            try:
                enum_obj = Enumeration(self.M)
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    probability=pruning.probability,
                                    full=block_size==param.block_size):
                    solution, max_dist = enum_obj.enumerate(kappa, kappa + block_size, radius, expo,
                                                            pruning=pruning.coefficients)[0]
                with tracer.context("postprocessing"):
                    self.svp_postprocessing(kappa, block_size, solution, tracer=tracer)
                rerandomize = False

            except EnumerationError:
                rerandomize = True

            remaining_probability *= (1 - pruning.probability)

        self.lll_obj.size_reduction(0, kappa+1)
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)

        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)
        return clean
Exemplo n.º 22
0
def basis_quality(M):
    r"""
    Return a dictionary with various expressions of quality of the basis corresponding to ``M``.

    Let `|b_i^*|` be the norm of the `i`-th Gram-Schmidt vector.  Let `Λ` be the lattice spanned by
    the basis of dimension `d`.

        - ``r_0`` - `|b_0|^2`

        - ``\`` - the slope of `\log(|b_i^*|)`

        - ``rhf`` - the root-Hermite factor `|b_0|/\Vol(Λ)^{1/d}` also written as `\delta_0`

        - ``hv/hv`` - the dth-root of the fraction of the first and second half-volumes, i.e. the
          dth-root of `∏_{i=0}^{d/2-1} |b_i|/∏_{i=d/2}^{d-1} |b_i|`.  If `d` is odd, the length
          `|b_{d//2}|` is ignored.

        - ``r_0/gh`` - `|b_0|/GH` where `GH = Γ(d/2+1)^{1/d}/π^{1/2} ⋅ \Vol(Λ)^{1/d}` is the Gaussian
          Heuristic for the shortest vector.

    :param M: A MatGSO object.

    :example:

        >>> from fpylll import IntegerMatrix, GSO, LLL, set_random_seed
        >>> set_random_seed(1337)
        >>> A = IntegerMatrix.random(100, "qary", bits=30, k=50)
        >>> _ = LLL.reduction(A)
        >>> M = GSO.Mat(A); _ = M.update_gso()

        >>> from fpylll.tools.quality import basis_quality
        >>> from fpylll.tools.bkz_stats import pretty_dict
        >>> str(pretty_dict(basis_quality(M)))
        '{"r_0":   2^34.0,  "r_0/gh": 9.389811,  "rhf": 1.020530,  "/": -0.08550,  "hv/hv": 2.940943}'

    """

    d = M.d

    ret = OrderedDict()

    r = [M.get_r(i, i) for i in range(d)]

    log_volume = sum(log(r_) / 2 for r_ in r)

    lhs = sum(log(r_) / 2 for r_ in r[:d // 2])
    rhs = sum(log(r_) / 2 for r_ in r[d // 2 + (d % 2):])

    ret["r_0"] = r[0]
    ret["r_0/gh"] = r[0] / gaussian_heuristic(r)
    ret["rhf"] = exp((log(r[0]) / 2.0 - log_volume / d) / d)
    ret['/'] = M.get_current_slope(0, d)
    ret["hv/hv"] = exp((lhs - rhs) / d)

    return ret
    def svp_reduction_mpi_trial_enum (self, bkz_sub, preproc_cost, radius, kappa, block_size):
        verbose = 0
        bkz_sub.M.update_gso()
        r = [bkz_sub.M.get_r(i, i) for i in range(kappa, kappa+block_size)]
        r_old = r[0]
        gh = gaussian_heuristic(r)
        PRUNE_START = time()
        try:
            pruning = prune(radius, NPS[block_size] * preproc_cost, [r], 10,
                                metric="solutions", float_type="mpfr",
                                flags=Pruning.GRADIENT|Pruning.NELDER_MEAD)
            """
            pruning = prune(radius, NPS[block_size] * preproc_cost, [r], 0.0001,
                                metric="probability", float_type="mpfr",
                                flags=Pruning.GRADIENT|Pruning.NELDER_MEAD)            
            """
        except:
            return False, -1, 0, 0, 0
        PRUNE_TIME = time() - PRUNE_START
        ENUM_START = time()
        enum_obj = Enumeration(bkz_sub.M, sub_solutions=True)
        success = False
        length = -1
        #print radius, pruning.coefficients
        estimate_cost = sum(pruning.detailed_cost) / NPS[block_size]
        try:        
            enum_obj.enumerate(kappa, kappa+block_size, radius, 0, pruning=pruning.coefficients)
            length = enum_obj.sub_solutions[0][0]
            success = True
        except EnumerationError:
            pass

        ENUM_TIME = time() - ENUM_START
        if (verbose):
            print ("# [Prune] time %.4f"%PRUNE_TIME)
            print ("# [Enum]  (Expecting %.5f solutions)"%(pruning.expectation)),
            print (", TIME = %.2f"%ENUM_TIME)
        """
        for (a, b) in enum_obj.sub_solutions[:20]:
            print "%.3f"%abs(a/gh),
        print 
        """
        bkz_sub.M.update_gso()
        #A_old = deepcopy(bkz_sub.A)
        bkz_sub.insert_sub_solutions(kappa, block_size, enum_obj.sub_solutions[:1+block_size/4])
        #print self.compare(A_old, bkz_sub.A)
        bkz_sub.M.update_gso()
        r_new = bkz_sub.M.get_r(kappa, kappa)
        if (r_new < r_old):
            success = True
            length = r_new
        
        return success, length, PRUNE_TIME, ENUM_TIME, estimate_cost
Exemplo n.º 24
0
    def get_pruning(self, kappa, block_size, params, tracer=dummy_tracer):
        strategy = params.strategies[block_size]
        radius, re = self.M.get_r_exp(kappa, kappa)
        radius *= self.lll_obj.delta
        r = [self.M.get_r_exp(i, i) for i in range(kappa, kappa + block_size)]
        gh_radius = gaussian_heuristic([x for x, _ in r])
        ge = float(sum([y for _, y in r])) / len(r)

        if (params.flags & BKZ.GH_BND and block_size > 30):
            radius = min(radius, gh_radius * 2**(ge - re) * params.gh_factor)

        return radius, re, strategy.get_pruning(radius,
                                                gh_radius * 2**(ge - re))
Exemplo n.º 25
0
def test_pruner():

    # A dummyPruningParams.run to load tabulated values
    Pruning.run(5, 50, 10*[1.], .5)

    for (n, overhead) in dim_oh:

        print(" \n ~~~~ Dim %d \n" % n)

        M = prepare(n)
        r = [M.get_r(i, i) for i in range(n)]

        print(" \n GREEDY")
        radius = gaussian_heuristic(r) * 1.6
        print("pre-greedy radius %.4e" % radius)
        tt = process_time()
        pruning =Pruning.run(radius, overhead, r, 200, flags=Pruning.ZEALOUS, metric="solutions")
        print("Time %.4e"%(process_time() - tt))
        print("post-greedy radius %.4e" % radius)
        print(pruning)
        print("cost %.4e" % sum(pruning.detailed_cost))
        solutions = Enumeration(M, nr_solutions=10000).enumerate(0, n, radius, 0, pruning=pruning.coefficients)
        print(len(solutions))
        assert len(solutions)/pruning.expectation < 2
        assert len(solutions)/pruning.expectation > .2

        print(" \n GRADIENT \n")

        print("radius %.4e" % radius)
        tt = process_time()
        pruning = Pruning.run(radius, overhead, r, 200, flags=Pruning.GRADIENT, metric="solutions")
        print("Time %.4e"%(process_time() - tt))
        print(pruning)
        print("cost %.4e" % sum(pruning.detailed_cost))
        solutions = Enumeration(M, nr_solutions=10000).enumerate(0, n, radius, 0, pruning=pruning.coefficients)
        print(len(solutions))
        assert len(solutions)/pruning.expectation < 2
        assert len(solutions)/pruning.expectation > .2

        print(" \n HYBRID \n")

        print("radius %.4e" % radius)
        tt = process_time()
        pruning = Pruning.run(radius, overhead, r, 200, flags=Pruning.ZEALOUS, metric="solutions")
        print("Time %.4e"%(process_time() - tt))
        print(pruning)
        print("cost %.4e" % sum(pruning.detailed_cost))
        solutions = Enumeration(M, nr_solutions=10000).enumerate(0, n, radius, 0, pruning=pruning.coefficients)
        print(len(solutions))
        assert len(solutions)/pruning.expectation < 2
        assert len(solutions)/pruning.expectation > .2
Exemplo n.º 26
0
def cost_kernel(arg0,
                preproc=None,
                strategies=None,
                costs=None,
                float_type=None):
    """
    Compute pruning coefficients after preprocessing and return estimated cost.

    :param arg0: either a tuple containing all arguments or r (squared Gram-Schmidt vectors)
    :param preproc: preprocessing parameters
    :param strategies: reduction strategies
    :param costs: precomputed costs for smaller dimensions
    :param float_type: float type to use in pruner

    :returns: cost and strategy

    ..  note :: the unusual arrangement with ``arg0`` is to support ``Pool.map`` which only
        supports one input parameter.
    """
    from cost import preprocess

    if (preproc is None and strategies is None and costs is None
            and float_type is None):
        r, preproc, strategies, costs, float_type = arg0
    else:
        r = arg0

    d = len(r)

    r, preproc_cost = preprocess(r, preproc, strategies, costs, max_loops=1)

    gh = gaussian_heuristic(r)
    target_norm = 1.05**2 * gh

    pruner = Pruning.Pruner(
        target_norm,
        preproc_cost,
        [r],
        target=1,
        metric=Pruning.EXPECTED_SOLUTIONS,
        float_type=float_type,
    )
    coefficients = pruner.optimize_coefficients([1.0] * d)
    cost = {
        "total cost": preproc_cost + pruner.repeated_enum_cost(coefficients),
        "single enum": pruner.single_enum_cost(coefficients),
        "preprocessing block size": preproc,
        "preprocessing": preproc_cost,
        "probability": svp_probability(coefficients, float_type=float_type),
    }
    return cost
Exemplo n.º 27
0
    def __init__(self, n, d, gso):
        self.n = n
        self.lib = ctypes.cdll.LoadLibrary("./SubSieveLib.so")
        self.d = d
        self.r = [gso.get_r(i, i) for i in range(self.n)]
        self.gh = gaussian_heuristic(self.r[d:])
        self.gs = zeros((self.n, self.n), dtype=float64)
        for i in xrange(self.n):
            self.gs[i][i] = gso.get_r(i, i)
            for j in xrange(i):
                self.gs[i][j] = gso.get_mu(i, j)

        self.lib.initialize(self.n, self.d, c_double_ptr(self.gs),
                            ctypes.c_double(self.gh))
Exemplo n.º 28
0
Arquivo: pbkz.py Projeto: fplll/fpylll
    def parallel_svp_reduction_worker(self, kappa, block_size, params, rerandomize):
        """
        One SVP reduction, typically called in a worker process after forking.

        :param kappa: current index
        :param block_size: block size
        :param params: BKZ parameters
        :param tracer: object for maintaining statistics

        """
        # we create a new tracer object to report back our timings to the calling process
        tracer = BKZTreeTracer(self, verbosity=params.flags & BKZ.VERBOSE, root_label="svp")

        with tracer.context("preprocessing"):
            if rerandomize:
                with tracer.context("randomization"):
                    self.randomize_block(
                        kappa + 1, kappa + block_size, density=params.rerandomization_density, tracer=tracer
                    )
            with tracer.context("reduction"):
                self.svp_preprocessing(kappa, block_size, params, tracer)

        radius, expo = self.M.get_r_exp(kappa, kappa)
        radius *= self.lll_obj.delta

        if params.flags & BKZ.GH_BND and block_size > 30:
            root_det = self.M.get_root_det(kappa, kappa + block_size)
            radius, expo = gaussian_heuristic(radius, expo, block_size, root_det, params.gh_factor)

        pruning = self.get_pruning(kappa, block_size, params, tracer)

        try:
            enum_obj = Enumeration(self.M)
            with tracer.context(
                "enumeration", enum_obj=enum_obj, probability=pruning.probability, full=block_size == params.block_size
            ):
                solution, max_dist = enum_obj.enumerate(
                    kappa, kappa + block_size, radius, expo, pruning=pruning.coefficients
                )[0]
            with tracer.context("postprocessing"):
                # we translate our solution to the canonical basis because our basis is not
                # necessarily the basis of the calling process at this point
                solution = self.A.multiply_left(solution, start=kappa)

        except EnumerationError:
            solution = None

        return solution, tracer.trace, pruning.probability
Exemplo n.º 29
0
def find_norm_kernel_trial(arg0, params=None, seed=None, goal_r0=None):
    # Pool.map only supports a single parameter
    if params is None and seed is None:
        n, params, seed = arg0
    else:
        n = arg0

    params = copy.copy(params)
    dim4free_dec = params.pop("workout/dim4free_dec")
    pump_params = pop_prefixed_params("pump", params)
    load_matrix = params.pop("load_matrix")
    verbose = params.pop("verbose")

    A, _ = load_matrix_file(load_matrix,
                            randomize=True,
                            seed=None,
                            float_type="double")

    if A.nrows != n:
        raise ValueError(
            f"wrong dimension:: Expected dim(A) = {A.nrows}, got n = {n}")

    g6k = Siever(A, params, seed=seed)
    tracer = SieveTreeTracer(g6k,
                             root_label=("svp-challenge", n),
                             start_clocks=True)

    gh = gaussian_heuristic([g6k.M.get_r(i, i) for i in range(n)])
    ds = list(range(0, n - 40, dim4free_dec))[::-1] + 10 * [0]

    if goal_r0 is None:
        goal_r0 = 1.1 * gh

    if verbose and n < 90:
        verbose = False

    for d in ds:
        workout(g6k,
                tracer,
                0,
                n,
                dim4free_dec=dim4free_dec,
                goal_r0=goal_r0 * 1.001,
                pump_params=pump_params,
                verbose=verbose)

    tracer.exit()
    return int(g6k.M.get_r(0, 0)), gh
Exemplo n.º 30
0
def test_gh():
    for n in dimensions:
        set_random_seed(n)
        A = make_integer_matrix(n)
        M = GSO.Mat(A, float_type="ld")
        M.discover_all_rows()
        M.update_gso()
        radius = M.get_r(0, 0)
        root_det = M.get_root_det(0, n)
        gh_radius, ge = adjust_radius_to_gh_bound(2000*radius, 0, n, root_det, 1.0)

        gh1 = gh_radius * 2**ge

        r = dump_r(M, 0, n)
        gh2 = gaussian_heuristic(r)
        assert abs(gh1/gh2 -1) < 0.01
Exemplo n.º 31
0
def svpchallenge_test ():
    dim = 60
    A_pre = IntegerMatrix.from_file("svpchallenge/svpchallengedim%dseed0.txt"%dim)
    print "# input dim: ", dim
    print "# nrows: ", A_pre.nrows
    ASVP_START = time()
    LLL.reduction(A_pre)
    A = IntegerMatrix.from_matrix(A_pre, int_type="long")
    bkz = BKZReduction(A)
    bkz.lll_obj()
    r = [bkz.M.get_r(i, i) for i in range(dim)]
    goal = (1.05)**2 * gaussian_heuristic(r)
    params = fplll_bkz.Param(block_size=20, max_loops=1,
                                 min_success_probability=.01)
    bkz(params=params)
    print " done BKZ yes"
Exemplo n.º 32
0
    def select_index(self, block_size, min_row, max_row):
        self.M.update_gso()
        r = self.M.r()
        maxv = -1
        for k in range(min_row, max_row - block_size - 1):
            # current vector
            k_length = self.M.get_r(k, k)
            gh = gaussian_heuristic(r[k:k + block_size]) * 1.1
            v = sqrt(k_length) / sqrt(gh)
            if v > maxv:
                maxv, maxi = v, k

        if (maxv < 1):
            maxi = 0

        return maxi
Exemplo n.º 33
0
def svpchallenge_par3(bs_diff=10, cores=2, start_dim=80, end_dim=80+2, BS_RANDOM_RANGE = 10):
    for dim in range(start_dim, start_dim+2, 2):
        A_pre = IntegerMatrix.from_file("svpchallenge/svpchallengedim%dseed0.txt"%dim)
        print "# input dim: ", dim
        print "# nrows: ", A_pre.nrows
        ASVP_START = time()
        LLL.reduction(A_pre)
        A = IntegerMatrix.from_matrix(A_pre, int_type="long")
        bkz = BKZReduction(A)
        bkz.lll_obj()
        r = [bkz.M.get_r(i, i) for i in range(dim)]
        goal = (1.05)**2 * gaussian_heuristic(r)
        bs_ulim = dim - bs_diff
        interacting_parrallel_asvp(A, bs_ulim, goal, cores, BS_RANDOM_RANGE)
        ASVP_TIME = time() - ASVP_START

        print ("\nSUMMARY", {"input dim": dim, "bs_range": (bs_ulim - BS_RANDOM_RANGE, bs_ulim), "time": ASVP_TIME})
Exemplo n.º 34
0
def test_gh():
    for n in dimensions:
        set_random_seed(n)
        A = make_integer_matrix(n)
        M = GSO.Mat(A, float_type="ld")
        M.discover_all_rows()
        M.update_gso()
        radius = M.get_r(0, 0)
        root_det = M.get_root_det(0, n)
        gh_radius, ge = adjust_radius_to_gh_bound(2000 * radius, 0, n,
                                                  root_det, 1.0)

        gh1 = gh_radius * 2**ge

        r = dump_r(M, 0, n)
        gh2 = gaussian_heuristic(r)
        assert abs(gh1 / gh2 - 1) < 0.01
Exemplo n.º 35
0
def asvp_kernel(arg0, params=None, seed=None):
    # Pool.map only supports a single parameter
    if params is None and seed is None:
        n, params, seed = arg0
    else:
        n = arg0

    params = copy.copy(params)

    load_matrix = params.pop("load_matrix")
    goal_r0__gh = params.pop('goal_r0__gh')
    pump_params = pop_prefixed_params("pump", params)
    workout_params = pop_prefixed_params("workout", params)
    verbose = params.pop("verbose")
    if verbose:
        workout_params["verbose"] = True

    A, _ = load_matrix_file(load_matrix, randomize=False, seed=None, float_type="double")
    if verbose:
        print(("Loaded file '%s'" % load_matrix))

    g6k = Siever(A, params, seed=seed)
    tracer = SieveTreeTracer(g6k, root_label=("svp-challenge", n), start_clocks=True)

    gh = gaussian_heuristic([g6k.M.get_r(i, i) for i in range(n)])
    goal_r0 = (goal_r0__gh**2) * gh
    if verbose:
        print(("gh = %f, goal_r0/gh = %f, r0/gh = %f" % (gh, goal_r0/gh, sum([x*x for x in A[0]])/gh)))
 
    flast = workout(g6k, tracer, 0, n, goal_r0=goal_r0, **workout_params)

    tracer.exit()
    stat = tracer.trace
    sol = tuple(A[0])
    stat.data["flast"] = flast
    tracer.trace.data['res'] = A

    if verbose:
        print(f"svp: sol {sol}")

    norm = sum([x*x for x in sol])
    if verbose:
        print("svp: norm %.1f ,hf %.5f" % (norm**.5, (norm/gh)**.5))

    return tracer.trace
Exemplo n.º 36
0
Arquivo: bkz.py Projeto: fplll/fpylll
    def svp_call(self, kappa, block_size, params, tracer=None):
        """Call SVP oracle

        :param kappa: current index
        :param params: BKZ parameters
        :param block_size: block size
        :param tracer: object for maintaining statistics

        :returns: Coordinates of SVP solution or ``None`` if none was found.

        ..  note::

            ``block_size`` may be smaller than ``params.block_size`` for the last blocks.
        """
        max_dist, expo = self.M.get_r_exp(kappa, kappa)
        delta_max_dist = self.lll_obj.delta * max_dist

        if params.flags & BKZ.GH_BND:
            root_det = self.M.get_root_det(kappa, kappa+block_size)
            max_dist, expo = gaussian_heuristic(max_dist, expo, block_size, root_det, params.gh_factor)

        try:
            enum_obj = Enumeration(self.M)
            with tracer.context("enumeration", enum_obj=enum_obj, probability=1.0):
                solution, max_dist = enum_obj.enumerate(kappa, kappa + block_size, max_dist, expo)[0]

        except EnumerationError as msg:
            if params.flags & BKZ.GH_BND:
                return None
            else:
                raise EnumerationError(msg)

        if max_dist >= delta_max_dist * (1<<expo):
            return None
        else:
            return solution
Exemplo n.º 37
0
def test_pruner():

    # A dummy prune to load tabulated values
    prune(5, 50, .5, 10*[1.])

    for (n, overhead) in dim_oh:

        print(" \n ~~~~ Dim %d \n" % n)

        M = prepare(n)
        r = [M.get_r(i, i) for i in range(n)]

        print(" \n GREEDY")
        radius = gaussian_heuristic(r) * 1.6
        print("pre-greedy radius %.4e" % radius)
        tt = clock()
        (radius, pruning) = prune(radius, overhead, 200, r,
                                  descent_method="greedy", metric="solutions")
        print("Time %.4e"%(clock() - tt))
        print("post-greedy radius %.4e" % radius)
        print(pruning)
        print("cost %.4e" % sum(pruning.detailed_cost))
        solutions = Enumeration(M, nr_solutions=10000).enumerate(0, n, radius, 0, pruning=pruning.coefficients)
        print(len(solutions))
        assert len(solutions)/pruning.expectation < 2
        assert len(solutions)/pruning.expectation > .2

        print(" \n GREEDY \n")
        print("pre-greedy radius %.4e" % radius)
        tt = clock()
        (radius, pruning) = prune(radius, overhead, 200, r, descent_method="greedy", metric="solutions")
        print("Time %.4e"%(clock() - tt))
        print("post-greedy radius %.4e" % radius)
        print(pruning)
        print("cost %.4e" % sum(pruning.detailed_cost))
        solutions = Enumeration(M, nr_solutions=10000).enumerate(0, n, radius, 0, pruning=pruning.coefficients)
        print(len(solutions))
        assert len(solutions)/pruning.expectation < 2
        assert len(solutions)/pruning.expectation > .2

        print(" \n GRADIENT \n")

        print("radius %.4e" % radius)
        tt = clock()
        pruning = prune(radius, overhead, 200, r, descent_method="gradient", metric="solutions")
        print("Time %.4e"%(clock() - tt))
        print(pruning)
        print("cost %.4e" % sum(pruning.detailed_cost))
        solutions = Enumeration(M, nr_solutions=10000).enumerate(0, n, radius, 0, pruning=pruning.coefficients)
        print(len(solutions))
        assert len(solutions)/pruning.expectation < 2
        assert len(solutions)/pruning.expectation > .2

        print(" \n HYBRID \n")

        print("radius %.4e" % radius)
        tt = clock()
        pruning = prune(radius, overhead, 200, r, descent_method="hybrid", metric="solutions")
        print("Time %.4e"%(clock() - tt))
        print(pruning)
        print("cost %.4e" % sum(pruning.detailed_cost))
        solutions = Enumeration(M, nr_solutions=10000).enumerate(0, n, radius, 0, pruning=pruning.coefficients)
        print(len(solutions))
        assert len(solutions)/pruning.expectation < 2
        assert len(solutions)/pruning.expectation > .2