Beispiel #1
0
    def __init__(self, A, tuners=None, recycle=True):
        """Construct a new instance of the BKZ algorithm.

        :param A: an integer matrix, a GSO object or an LLL object

        """
        self.recycle = recycle
        if isinstance(A, LLL.Reduction):
            L, M, B = A, A.M, A.M.B
        elif isinstance(A, GSO.Mat):
            L, M, B = None, A, A.B
        elif isinstance(A, IntegerMatrix):
            L, M, B = None, None, A
        else:
            raise TypeError(
                "type of A must be in {IntegerMatrix, GSO.Mat, LLL.Reduction}, but got type '%s'"
                % type(A))

        if M is None and L is None:
            wrapper = LLL.Wrapper(B)
            wrapper()
        if M is None:
            M = GSO.Mat(B, flags=GSO.ROW_EXPO)
        if L is None:
            L = LLL.Reduction(M, flags=LLL.DEFAULT)

        self.lll_obj, self.M, self.A = L, M, B
        self.lll_obj()

        if tuners is None:
            self.tuners = [Tuner(b) for b in range(YOLO_MAX_BLOCK_SIZE)]
        else:
            self.tuners = tuners
        self.tracer = BKZTreeTracer(self, verbosity=True)
Beispiel #2
0
Datei: bkz3.py Projekt: malb/yolo
    def __call__(self, params, min_row=0, max_row=-1):
        """Run the BKZ algorithm with parameters `param`.

        :param params: BKZ parameters
        :param min_row: start processing in this row
        :param max_row: stop processing in this row (exclusive)

        """
        self.ith_block = 0
        tracer = BKZTreeTracer(self, verbosity=params.bkz_param.flags & BKZ.VERBOSE, start_clocks=True)
        self.params = params

        self.lll_objs = 20*[None]
        for i in range(20):
            eta = etas[i]
            self.lll_objs[i] = LLL.Reduction(self.M, flags=LLL.DEFAULT, eta=eta)

        cputime_start = time.clock()

        self.M.discover_all_rows()
        with tracer.context("lll"):
            for i in range(20):
                self.lll_objs[i]()

        if params.rampup:
            with tracer.context("rampup", -1):
                self.preprocessing(params.bkz_param.block_size, min_row, max_row, start=10, step=1, tracer=tracer)

        i = 0
        self.ith_tour = 0
        while True:
            with tracer.context("tour", i):
                self.ith_block = 0
                self.ith_tour += 1
                clean = self.tour(params.bkz_param, min_row, max_row, tracer=tracer, top_level=True)
            print "proba %.4f" % self.tuners[params.bkz_param.block_size].proba
            # for x in sorted(self.tuners[params.bkz_param.block_size].data.keys()):
            #    try:
            #        print x, "\t %d \t %.2f " % (self.tuners[params.bkz_param.block_size].counts[x], self.tuners[params.bkz_param.block_size].data[x])
            #    except:
            #        pass
            print
            i += 1
            if (not clean) or params.bkz_param.block_size >= self.A.nrows:
                break
            if (params.bkz_param.flags & BKZ.AUTO_ABORT) and auto_abort.test_abort():
                break
            if (params.bkz_param.flags & BKZ.MAX_LOOPS) and i >= params.bkz_param.max_loops:
                break
            if (params.bkz_param.flags & BKZ.MAX_TIME) and time.clock() - cputime_start >= params.bkz_param.max_time:
                break

        self.trace = tracer.trace
        return clean
Beispiel #3
0
    def parallel_svp_reduction_worker(self, kappa, block_size, params,
                                      rerandomize):
        """
        One SVP reduction, typically called in a worker process after forking.

        :param kappa: current index
        :param block_size: block size
        :param params: BKZ parameters
        :param tracer: object for maintaining statistics

        """
        # we create a new tracer object to report back our timings to the calling process
        tracer = BKZTreeTracer(self,
                               verbosity=params.flags & BKZ.VERBOSE,
                               root_label="svp")

        with tracer.context("preprocessing"):
            if rerandomize:
                with tracer.context("randomization"):
                    self.randomize_block(
                        kappa + 1,
                        kappa + block_size,
                        density=params.rerandomization_density,
                        tracer=tracer)
            with tracer.context("reduction"):
                self.svp_preprocessing(kappa, block_size, params, tracer)

        radius, expo = self.M.get_r_exp(kappa, kappa)
        radius *= self.lll_obj.delta

        if params.flags & BKZ.GH_BND and block_size > 30:
            root_det = self.M.get_root_det(kappa, kappa + block_size)
            radius, expo = adjust_radius_to_gh_bound(radius, expo, block_size,
                                                     root_det,
                                                     params.gh_factor)

        pruning = self.get_pruning(kappa, block_size, params, tracer)

        try:
            enum_obj = Enumeration(self.M)
            with tracer.context("enumeration",
                                enum_obj=enum_obj,
                                probability=pruning.expectation,
                                full=block_size == params.block_size):
                solution, max_dist = enum_obj.enumerate(
                    kappa,
                    kappa + block_size,
                    radius,
                    expo,
                    pruning=pruning.coefficients)[0]
            with tracer.context("postprocessing"):
                # we translate our solution to the canonical basis because our basis is not
                # necessarily the basis of the calling process at this point
                solution = self.A.multiply_left(solution, start=kappa)

        except EnumerationError:
            solution, max_dist = None, None

        return solution, max_dist, tracer.trace, pruning.expectation
Beispiel #4
0
    def __call__(self, params, min_row=0, max_row=-1):
        """Run the BKZ algorithm with parameters `param`.

        :param params: BKZ parameters
        :param min_row: start processing in this row
        :param max_row: stop processing in this row (exclusive)

        """
        tracer = BKZTreeTracer(self,
                               verbosity=params.flags & BKZ.VERBOSE,
                               start_clocks=True)

        auto_abort = BKZ.AutoAbort(self.M, self.A.nrows)
        cputime_start = time.clock()

        with tracer.context("lll"):
            self.lll_obj()

        i = 0
        while True:
            with tracer.context("tour", i):
                self.tour(params, min_row, max_row, tracer, top_level=True)
            i += 1
            if params.block_size >= self.A.nrows:
                break
            if auto_abort.test_abort():
                break
            if (params.flags & BKZ.MAX_LOOPS) and i >= params.max_loops:
                break
            if (params.flags & BKZ.MAX_TIME
                ) and time.clock() - cputime_start >= params.max_time:
                break

        tracer.exit()
        self.trace = tracer.trace
Beispiel #5
0
def test_bkz_postprocessing():
    A = IntegerMatrix.random(20, "qary", bits=20, k=10, int_type="long")
    LLL.reduction(A)

    bkz = BKZ(A)
    bkz.M.update_gso()
    tracer = BKZTreeTracer(bkz)

    solution = (2, 2, 0, 3, 4, 5, 7)

    v = A.multiply_left(solution, 3)
    bkz.svp_postprocessing(3, len(solution), solution, tracer)
    w = tuple(A[3])
    assert v == w

    solution = (2, 1, 0, 3, 4, 5, 7)

    v = A.multiply_left(solution, 3)
    bkz.svp_postprocessing(3, len(solution), solution, tracer)
    w = tuple(A[3])
    assert v == w
Beispiel #6
0
    def parallel_svp_reduction_worker(self, kappa, block_size, params, rerandomize):
        """
        One SVP reduction, typically called in a worker process after forking.

        :param kappa: current index
        :param block_size: block size
        :param params: BKZ parameters
        :param tracer: object for maintaining statistics

        """
        # we create a new tracer object to report back our timings to the calling process
        tracer = BKZTreeTracer(self, verbosity=params.flags & BKZ.VERBOSE, root_label="svp")

        with tracer.context("preprocessing"):
            if rerandomize:
                with tracer.context("randomization"):
                    self.randomize_block(
                        kappa + 1, kappa + block_size, density=params.rerandomization_density, tracer=tracer
                    )
            with tracer.context("reduction"):
                self.svp_preprocessing(kappa, block_size, params, tracer)

        radius, expo = self.M.get_r_exp(kappa, kappa)
        radius *= self.lll_obj.delta

        if params.flags & BKZ.GH_BND and block_size > 30:
            root_det = self.M.get_root_det(kappa, kappa + block_size)
            radius, expo = gaussian_heuristic(radius, expo, block_size, root_det, params.gh_factor)

        pruning = self.get_pruning(kappa, block_size, params, tracer)

        try:
            enum_obj = Enumeration(self.M)
            with tracer.context(
                "enumeration", enum_obj=enum_obj, probability=pruning.probability, full=block_size == params.block_size
            ):
                solution, max_dist = enum_obj.enumerate(
                    kappa, kappa + block_size, radius, expo, pruning=pruning.coefficients
                )[0]
            with tracer.context("postprocessing"):
                # we translate our solution to the canonical basis because our basis is not
                # necessarily the basis of the calling process at this point
                solution = self.A.multiply_left(solution, start=kappa)

        except EnumerationError:
            solution = None

        return solution, tracer.trace, pruning.probability
Beispiel #7
0
class YoloBKZ(object):
    def __init__(self, A, tuners=None, recycle=True):
        """Construct a new instance of the BKZ algorithm.

        :param A: an integer matrix, a GSO object or an LLL object

        """
        self.recycle = recycle
        if isinstance(A, LLL.Reduction):
            L, M, B = A, A.M, A.M.B
        elif isinstance(A, GSO.Mat):
            L, M, B = None, A, A.B
        elif isinstance(A, IntegerMatrix):
            L, M, B = None, None, A
        else:
            raise TypeError(
                "type of A must be in {IntegerMatrix, GSO.Mat, LLL.Reduction}, but got type '%s'"
                % type(A))

        if M is None and L is None:
            wrapper = LLL.Wrapper(B)
            wrapper()
        if M is None:
            M = GSO.Mat(B, flags=GSO.ROW_EXPO)
        if L is None:
            L = LLL.Reduction(M, flags=LLL.DEFAULT)

        self.lll_obj, self.M, self.A = L, M, B
        self.lll_obj()

        if tuners is None:
            self.tuners = [Tuner(b) for b in range(YOLO_MAX_BLOCK_SIZE)]
        else:
            self.tuners = tuners
        self.tracer = BKZTreeTracer(self, verbosity=True)

    def tour(self, b, target_prob=0.5, begin=0, end=None):
        if end is None:
            end = self.M.d

        for k in range(begin, end - 2):
            tmp_b = min(b, end - k)
            self.svp_reduce(k, tmp_b, target_prob)

    def preprocess(self, k, b, preprocessing):
        begin = k
        end = k + b
        with self.tracer.context("lll"):
            self.lll_obj.size_reduction(k, k + b, k)
            self.lll_obj(k, k, k + b)
        for preproc_b in preprocessing:
            if preproc_b > 2:
                self.tour(preproc_b, .5, begin, end)

    def filter_hints(self, hints):
        return [v for v in hints if sum([x * x for x in v]) > 1.5]

    def insert(self, k, b, solution, hints=[]):
        M = self.M

        if (solution is not None) and len(hints) == 0:
            nonzero_vectors = len([x for x in solution if x])
            if nonzero_vectors == 1:
                first_nonzero_vector = None
                for i in range(b):
                    if abs(solution[i]) == 1:
                        first_nonzero_vector = i
                        break

                M.move_row(k + first_nonzero_vector, k)
                with self.tracer.context("lll"):
                    self.lll_obj.size_reduction(k,
                                                k + first_nonzero_vector + 1)
                return 1

        if solution is not None:
            vectors = [solution] + hints
        else:
            if len(hints) == 0:
                return 0
            vectors = hints
        l = len(vectors)

        for vector in vectors:
            M.create_row()
            with M.row_ops(M.d - 1, M.d):
                for i in range(b):
                    M.row_addmul(M.d - 1, k + i, vector[i])

        for i in reversed(range(l)):
            M.move_row(M.d - 1, k)

        with self.tracer.context("postproc"):
            self.lll_obj(k, k, k + b + l)

        for i in range(l):
            M.move_row(k + b, M.d - 1)
            M.remove_last_row()

        return l

    def randomize(self, min_row, max_row, density=0):
        """Randomize basis between from ``min_row`` and ``max_row`` (exclusive)

            1. permute rows

            2. apply lower triangular matrix with coefficients in -1,0,1

            3. LLL reduce result

        :param min_row: start in this row
        :param max_row: stop at this row (exclusive)
        :param tracer: object for maintaining statistics
        :param density: number of non-zero coefficients in lower triangular transformation matrix
        """
        if max_row - min_row < 2:
            return  # there is nothing to do

        # 1. permute rows
        niter = 4 * (max_row - min_row)  # some guestimate
        with self.M.row_ops(min_row, max_row):
            for i in range(niter):
                b = a = randint(min_row, max_row - 1)
                while b == a:
                    b = randint(min_row, max_row - 1)
                self.M.move_row(b, a)

        # 2. triangular transformation matrix with coefficients in -1,0,1
        with self.M.row_ops(min_row, max_row):
            for a in range(min_row, max_row - 2):
                for i in range(density):
                    b = randint(a + 1, max_row - 1)
                    s = randint(0, 1)
                    self.M.row_addmul(a, b, 2 * s - 1)

        return

    def enum(self, k, b, radius, pruning, for_hints=False):
        solutions = []
        try:
            if self.recycle:
                enum_obj = Enumeration(self.M, b / 2)
            else:
                enum_obj = Enumeration(self.M, 1)
            if pruning is None:
                with self.tracer.context("enumeration",
                                         enum_obj=enum_obj,
                                         probability=1.):
                    solutions = enum_obj.enumerate(k, k + b, radius, 0)
            else:
                with self.tracer.context("enumeration",
                                         enum_obj=enum_obj,
                                         probability=pruning.expectation):
                    solutions = enum_obj.enumerate(
                        k, k + b, radius, 0, pruning=pruning.coefficients)
            return [sol for (sol, _) in solutions[0:]]
        except EnumerationError:
            return None, []

    def svp_reduce(self, k, b, target_prob, stop_at_gh=None):

        timer = Timer()
        rem_prob, inserted = 1.0, 1
        M = self.M

        while rem_prob > 1. - target_prob:
            tmp_target_prob = 1.01 * (target_prob - 1) / rem_prob + 1.01

            # if inserted == 0:
            #     with self.tracer.context("randomize"):
            #         self.randomize(k+1, k+b)

            with self.tracer.context("preprocessing"):
                preprocessing = self.tuners[b].preprocess()
                self.preprocess(k, b, preprocessing)

            with self.tracer.context("pruner"):
                radius, pruning = self.tuners[b].enum(M, k, tmp_target_prob,
                                                      timer.elapsed())
            solutions = self.enum(k, b, radius, pruning)
            solution = solutions[0]
            if solution is None:
                hints = []
            else:
                hints = solutions[1:]

            if pruning is None:
                rem_prob = 0
            else:
                rem_prob *= (1 - pruning.expectation)

            # radius, pruning = self.tuner.enum_for_hints(M, k, b, timer.elapsed())
            # if radius>0:
            #     hints += self.enum(k, b, radius, pruning, for_hints=False)
            # hints = self.filter_hints(hints)[:b/2]

            self.tuners[b].feedback(preprocessing, pruning, timer.elapsed())
            timer.reset()
            with self.tracer.context("postprocessing"):
                inserted = self.insert(k, b, solution, hints)

    def __call__(self, b, tours=8):
        self.M.discover_all_rows()

        for i in range(tours):
            print
            with self.tracer.context("tour", i):
                self.tour(b)
            print "proba %.4f" % self.tuners[b].proba,
            i += 1
            # best = max(self.tuners[b].data, key=self.tuners[b].data.get)
            for x in sorted(self.tuners[b].data.keys()):
                try:
                    print x, "\t %d \t %.2f " % (self.tuners[b].counts[x],
                                                 self.tuners[b].data[x])
                except:
                    pass
            print