def get_pruning(self, kappa, block_size, params, target, preproc_cost, tracer=dummy_tracer): radius = self.M.get_r(kappa, kappa) * self.lll_obj.delta if block_size < 30: return radius, PruningParams(4., ()) r = [self.M.get_r(i, i) for i in range(kappa, kappa + block_size)] gh_radius = gaussian_heuristic(r) if (params.flags & BKZ.GH_BND and block_size > 30): radius = min(radius, gh_radius * params.gh_factor) if not (block_size > GRADIENT_BLOCKSIZE): pruning = prune(radius, NPS[block_size] * preproc_cost, [r], target, flags=0) else: while True: try: pruning = prune(radius, NPS[block_size] * preproc_cost, [r], target) break except: preproc_cost = 2 * preproc_cost + .01 return radius, pruning
def get_pruning(self, kappa, block_size, params, target, preproc_cost, tracer=dummy_tracer): # small block size if (block_size <= BOUND_SINGLE): strategy = params.strategies[block_size] radius = self.M.get_r(kappa, kappa) * self.lll_obj.delta r = [self.M.get_r(i, i) for i in range(kappa, kappa + block_size)] gh_radius = gaussian_heuristic(r) if (params.flags & BKZ.GH_BND and block_size > 30): radius = min(radius, gh_radius * params.gh_factor) return radius, strategy.get_pruning(radius, gh_radius) # large block size else: radius = self.M.get_r(kappa, kappa) * self.lll_obj.delta r = [self.M.get_r(i, i) for i in range(kappa, kappa + block_size)] gh_radius = gaussian_heuristic(r) radius = min(radius, gh_radius * params.gh_factor) preproc_cost += .001 if not (block_size > GRADIENT_BLOCKSIZE): pruning = prune(radius, NPS[block_size] * preproc_cost, [r], target, flags=0) else: try: #pruning = prune(radius, NPS[block_size] * preproc_cost, [r], target) #pruning = prune(radius, NPS[block_size] * preproc_cost, [r], 10, # metric="solutions", float_type="dd", # flags=Pruning.GRADIENT|Pruning.NELDER_MEAD) pruning = Pruning.run(radius, NPS[block_size] * preproc_cost, [r], 0.1, flags=Pruning.NELDER_MEAD | Pruning.GRADIENT, float_type="double") except: pruning = prune(radius, NPS[block_size] * preproc_cost, [r], target, flags=0) return radius, pruning
def enum_trial(bkz_obj, preproc_cost, gh_factor=1.1): n = bkz_obj.A.nrows r = [bkz_obj.M.get_r(i, i) for i in range(0, n)] gh = gaussian_heuristic(r) radius = max(r[0] * .99, gh * gh_factor) PRUNE_START = time() pruning = prune(radius, NPS * preproc_cost, [r], 10, metric="solutions", float_type="dd", flags=Pruning.GRADIENT) PRUNE_TIME = time() - PRUNE_START print "Pruning time %.4f" % PRUNE_TIME ENUM_START = time() enum_obj = Enumeration(bkz_obj.M, sub_solutions=True) try: print "Enum ... (Expecting %.5f solutions)" % (pruning.expectation), enum_obj.enumerate(0, n, radius, 0, pruning=pruning.coefficients) except EnumerationError: pass ENUM_TIME = time() - ENUM_START print " \t\t\t\t\t\t TIME = %.2f" % ENUM_TIME zeros = 0 print "subsolutions : r[i]/gh", for (a, b) in enum_obj.sub_solutions[:20]: print "%.3f" % abs(a / gh), insert_sub_solutions(bkz_obj, enum_obj.sub_solutions[:n / 4]) return
def enum_trial(bkz, preproc_cost, radius): n = bkz.A.nrows r = [bkz.M.get_r(i, i) for i in range(0, n)] gh = gaussian_heuristic(r) PRUNE_START = time() NPS = 2**24 pruning = prune(radius, NPS * preproc_cost, [r], 10, metric="solutions", float_type="dd", flags=Pruning.GRADIENT|Pruning.NELDER_MEAD) PRUNE_TIME = time() - PRUNE_START ENUM_START = time() enum_obj = Enumeration(bkz.M, sub_solutions=True) success = False try: enum_obj.enumerate(0, n, radius, 0, pruning=pruning.coefficients) success = True except EnumerationError: pass print ("# [Prune] time %.4f"%PRUNE_TIME) ENUM_TIME = time() - ENUM_START print ("# [Enum] (Expecting %.5f solutions)"%(pruning.expectation)), print (", TIME = %.2f"%ENUM_TIME) """ print ("# subsolutions : r[i]/gh"), for (a, b) in enum_obj.sub_solutions: print ("%.3f"%abs(a/gh)), print """ insert_sub_solutions(bkz, enum_obj.sub_solutions) return success
def svp_reduction_mpi_trial_enum (self, bkz_sub, preproc_cost, radius, kappa, block_size): verbose = 0 bkz_sub.M.update_gso() r = [bkz_sub.M.get_r(i, i) for i in range(kappa, kappa+block_size)] r_old = r[0] gh = gaussian_heuristic(r) PRUNE_START = time() try: pruning = prune(radius, NPS[block_size] * preproc_cost, [r], 10, metric="solutions", float_type="mpfr", flags=Pruning.GRADIENT|Pruning.NELDER_MEAD) """ pruning = prune(radius, NPS[block_size] * preproc_cost, [r], 0.0001, metric="probability", float_type="mpfr", flags=Pruning.GRADIENT|Pruning.NELDER_MEAD) """ except: return False, -1, 0, 0, 0 PRUNE_TIME = time() - PRUNE_START ENUM_START = time() enum_obj = Enumeration(bkz_sub.M, sub_solutions=True) success = False length = -1 #print radius, pruning.coefficients estimate_cost = sum(pruning.detailed_cost) / NPS[block_size] try: enum_obj.enumerate(kappa, kappa+block_size, radius, 0, pruning=pruning.coefficients) length = enum_obj.sub_solutions[0][0] success = True except EnumerationError: pass ENUM_TIME = time() - ENUM_START if (verbose): print ("# [Prune] time %.4f"%PRUNE_TIME) print ("# [Enum] (Expecting %.5f solutions)"%(pruning.expectation)), print (", TIME = %.2f"%ENUM_TIME) """ for (a, b) in enum_obj.sub_solutions[:20]: print "%.3f"%abs(a/gh), print """ bkz_sub.M.update_gso() #A_old = deepcopy(bkz_sub.A) bkz_sub.insert_sub_solutions(kappa, block_size, enum_obj.sub_solutions[:1+block_size/4]) #print self.compare(A_old, bkz_sub.A) bkz_sub.M.update_gso() r_new = bkz_sub.M.get_r(kappa, kappa) if (r_new < r_old): success = True length = r_new return success, length, PRUNE_TIME, ENUM_TIME, estimate_cost
def get_pruning(self, kappa, block_size, params, target, preproc_cost, tracer=dummy_tracer): radius = self.M.get_r(kappa, kappa) * self.lll_obj.delta r = [self.M.get_r(i, i) for i in range(kappa, kappa + block_size)] gh_radius = gaussian_heuristic(r) if (params.flags & BKZ.GH_BND and block_size > 30): radius = min(radius, gh_radius * params.gh_factor) preproc_cost += .001 if not (block_size > GRADIENT_BLOCKSIZE): pruning = prune(radius, NPS[block_size] * preproc_cost, [r], target, flags=0) else: try: pruning = prune(radius, NPS[block_size] * preproc_cost, [r], target) except: try: pruning = Pruning.run(radius, NPS[block_size] * preproc_cost, [r], 0.1, flags=Pruning.NELDER_MEAD, float_type="double") except: pruning = prune(radius, NPS[block_size] * preproc_cost, [r], target, flags=0) return radius, pruning
def bench_enumeration(n): """Return number of nodes visited and wall time for enumeration in dimension `n`. :param n: dimension :returns: nodes, wall time """ A = IntegerMatrix.random(n, "qary", bits=5 * n, k=1) M = MatGSO(A) L = LLLReduction(M) L(0, 0, n) radius = M.get_r(0, 0) * .999 pruning = prune(radius, 2**30, 0.9, M) enum = Enumeration(M) t = time() enum.enumerate(0, n, radius, 0, pruning=pruning.coefficients) t = time() - t cost = enum.get_nodes() return cost, t
def bench_enumeration(n): """Return number of nodes visited and wall time for enumeration in dimension `n`. :param n: dimension :returns: nodes, wall time """ A = IntegerMatrix.random(n, "qary", bits=5*n, k=1) M = MatGSO(A) L = LLLReduction(M) L(0, 0, n) radius = M.get_r(0, 0) * .999 pruning = prune(radius, 2**30, 0.9, M) enum = Enumeration(M) t = time() enum.enumerate(0, n, radius, 0, pruning=pruning.coefficients) t = time() - t cost = enum.get_nodes() return cost, t
radius = M.get_r(kappa, kappa) * 0.99 r = [M.get_r(i, i) for i in range(kappa, kappa + block_size)] gh_radius = gaussian_heuristic(r) radius = min(radius, gh_radius * params.gh_factor) #pruning = strategy.get_pruning(radius, gh_radius) preproc_cost = 2**20 total_cost_all = 0.0 while remaining_probability > (1. - params.min_success_probability): #while True: target = 1 - ( (1. - params.min_success_probability) / remaining_probability) print target pruning = prune(radius, NPS[block_size] * preproc_cost, [r], target, flags=0) #print radius, pruning.coefficients print pruning.detailed_cost # estimate_cost = sum(pruning.detailed_cost) / NPS[block_size] estimate_cost = sum(pruning.detailed_cost) print estimate_cost probability = pruning.expectation total_cost = (estimate_cost + preproc_cost) / (probability) print preproc_cost, estimate_cost, total_cost, probability total_cost_all += total_cost remaining_probability *= (1 - pruning.expectation) print remaining_probability, params.min_success_probability print total_cost_all