예제 #1
0
파일: bkz2.py 프로젝트: gbonnoron/fpylll
    def get_pruning(self, kappa, block_size, param, tracer=dummy_tracer):
        strategy = param.strategies[block_size]

        radius, re = self.M.get_r_exp(kappa, kappa)
        root_det = self.M.get_root_det(kappa, kappa + block_size)
        gh_radius, ge = adjust_radius_to_gh_bound(radius, re, block_size, root_det, 1.0)
        return strategy.get_pruning(radius  * 2**re, gh_radius * 2**ge)
예제 #2
0
    def parallel_svp_reduction_worker(self, kappa, block_size, params,
                                      rerandomize):
        """
        One SVP reduction, typically called in a worker process after forking.

        :param kappa: current index
        :param block_size: block size
        :param params: BKZ parameters
        :param tracer: object for maintaining statistics

        """
        # we create a new tracer object to report back our timings to the calling process
        tracer = BKZTreeTracer(self,
                               verbosity=params.flags & BKZ.VERBOSE,
                               root_label="svp")

        with tracer.context("preprocessing"):
            if rerandomize:
                with tracer.context("randomization"):
                    self.randomize_block(
                        kappa + 1,
                        kappa + block_size,
                        density=params.rerandomization_density,
                        tracer=tracer)
            with tracer.context("reduction"):
                self.svp_preprocessing(kappa, block_size, params, tracer)

        radius, expo = self.M.get_r_exp(kappa, kappa)
        radius *= self.lll_obj.delta

        if params.flags & BKZ.GH_BND and block_size > 30:
            root_det = self.M.get_root_det(kappa, kappa + block_size)
            radius, expo = adjust_radius_to_gh_bound(radius, expo, block_size,
                                                     root_det,
                                                     params.gh_factor)

        pruning = self.get_pruning(kappa, block_size, params, tracer)

        try:
            enum_obj = Enumeration(self.M)
            with tracer.context("enumeration",
                                enum_obj=enum_obj,
                                probability=pruning.expectation,
                                full=block_size == params.block_size):
                max_dist, solution = enum_obj.enumerate(
                    kappa,
                    kappa + block_size,
                    radius,
                    expo,
                    pruning=pruning.coefficients)[0]
            with tracer.context("postprocessing"):
                # we translate our solution to the canonical basis because our basis is not
                # necessarily the basis of the calling process at this point
                solution = self.A.multiply_left(solution, start=kappa)

        except EnumerationError:
            solution, max_dist = None, None

        return solution, max_dist, tracer.trace, pruning.expectation
예제 #3
0
파일: bkz2.py 프로젝트: zhli271828/fpylll
    def get_pruning(self, kappa, block_size, param, tracer=dummy_tracer):
        strategy = param.strategies[block_size]

        radius, re = self.M.get_r_exp(kappa, kappa)
        root_det = self.M.get_root_det(kappa, kappa + block_size)
        gh_radius, ge = adjust_radius_to_gh_bound(radius, re, block_size,
                                                  root_det, 1.0)
        return strategy.get_pruning(radius * 2**re, gh_radius * 2**ge)
예제 #4
0
파일: bkz2.py 프로젝트: gbonnoron/fpylll
    def svp_reduction(self, kappa, block_size, param, tracer=dummy_tracer):
        """

        :param kappa:
        :param block_size:
        :param params:
        :param tracer:

        """

        self.lll_obj.size_reduction(0, kappa+1)
        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)

        remaining_probability, rerandomize = 1.0, False

        while remaining_probability > 1. - param.min_success_probability:
            with tracer.context("preprocessing"):
                if rerandomize:
                    with tracer.context("randomization"):
                        self.randomize_block(kappa+1, kappa+block_size,
                                             density=param.rerandomization_density, tracer=tracer)
                with tracer.context("reduction"):
                    self.svp_preprocessing(kappa, block_size, param, tracer=tracer)

            radius, expo = self.M.get_r_exp(kappa, kappa)
            radius *= self.lll_obj.delta

            if param.flags & BKZ.GH_BND and block_size > 30:
                root_det = self.M.get_root_det(kappa, kappa + block_size)
                radius, expo = adjust_radius_to_gh_bound(radius, expo, block_size, root_det, param.gh_factor)

            pruning = self.get_pruning(kappa, block_size, param, tracer)

            try:
                enum_obj = Enumeration(self.M)
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    probability=pruning.expectation,
                                    full=block_size==param.block_size):
                    solution, max_dist = enum_obj.enumerate(kappa, kappa + block_size, radius, expo,
                                                            pruning=pruning.coefficients)[0]
                with tracer.context("postprocessing"):
                    self.svp_postprocessing(kappa, block_size, solution, tracer=tracer)
                rerandomize = False

            except EnumerationError:
                rerandomize = True

            remaining_probability *= (1 - pruning.expectation)

        self.lll_obj.size_reduction(0, kappa+1)
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)

        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)
        return clean
예제 #5
0
def test_gh():
    for n in dimensions:
        set_random_seed(n)
        A = make_integer_matrix(n)
        M = GSO.Mat(A, float_type="ld")
        M.discover_all_rows()
        M.update_gso()
        radius = M.get_r(0, 0)
        root_det = M.get_root_det(0, n)
        gh_radius, ge = adjust_radius_to_gh_bound(2000*radius, 0, n, root_det, 1.0)

        gh1 = gh_radius * 2**ge

        r = dump_r(M, 0, n)
        gh2 = gaussian_heuristic(r)
        assert abs(gh1/gh2 -1) < 0.01
예제 #6
0
파일: pbkz.py 프로젝트: gbonnoron/fpylll
    def parallel_svp_reduction_worker(self, kappa, block_size, params, rerandomize):
        """
        One SVP reduction, typically called in a worker process after forking.

        :param kappa: current index
        :param block_size: block size
        :param params: BKZ parameters
        :param tracer: object for maintaining statistics

        """
        # we create a new tracer object to report back our timings to the calling process
        tracer = BKZTreeTracer(self, verbosity=params.flags & BKZ.VERBOSE, root_label="svp")

        with tracer.context("preprocessing"):
            if rerandomize:
                with tracer.context("randomization"):
                    self.randomize_block(kappa+1, kappa+block_size,
                                         density=params.rerandomization_density, tracer=tracer)
            with tracer.context("reduction"):
                self.svp_preprocessing(kappa, block_size, params, tracer)

        radius, expo = self.M.get_r_exp(kappa, kappa)
        radius *= self.lll_obj.delta

        if params.flags & BKZ.GH_BND and block_size > 30:
            root_det = self.M.get_root_det(kappa, kappa + block_size)
            radius, expo = adjust_radius_to_gh_bound(radius, expo, block_size, root_det, params.gh_factor)

        pruning = self.get_pruning(kappa, block_size, params, tracer)

        try:
            enum_obj = Enumeration(self.M)
            with tracer.context("enumeration",
                                enum_obj=enum_obj,
                                probability=pruning.expectation,
                                full=block_size==params.block_size):
                solution, max_dist = enum_obj.enumerate(kappa, kappa + block_size, radius, expo,
                                                        pruning=pruning.coefficients)[0]
            with tracer.context("postprocessing"):
                # we translate our solution to the canonical basis because our basis is not
                # necessarily the basis of the calling process at this point
                solution = self.A.multiply_left(solution, start=kappa)

        except EnumerationError:
            solution, max_dist = None, None

        return solution, max_dist, tracer.trace, pruning.expectation
예제 #7
0
파일: test_util.py 프로젝트: isuruf/fpylll
def test_gh():
    for n in dimensions:
        set_random_seed(n)
        A = make_integer_matrix(n)
        M = GSO.Mat(A, float_type="ld")
        M.discover_all_rows()
        M.update_gso()
        radius = M.get_r(0, 0)
        root_det = M.get_root_det(0, n)
        gh_radius, ge = adjust_radius_to_gh_bound(2000 * radius, 0, n,
                                                  root_det, 1.0)

        gh1 = gh_radius * 2**ge

        r = dump_r(M, 0, n)
        gh2 = gaussian_heuristic(r)
        assert abs(gh1 / gh2 - 1) < 0.01
예제 #8
0
파일: bkz.py 프로젝트: MatthiasMi/fpylll
    def svp_call(self, kappa, block_size, params, tracer=None):
        """Call SVP oracle

        :param kappa: current index
        :param params: BKZ parameters
        :param block_size: block size
        :param tracer: object for maintaining statistics

        :returns: Coordinates of SVP solution or ``None`` if none was found.

        ..  note::

            ``block_size`` may be smaller than ``params.block_size`` for the last blocks.
        """
        max_dist, expo = self.M.get_r_exp(kappa, kappa)
        delta_max_dist = self.lll_obj.delta * max_dist

        if params.flags & BKZ.GH_BND:
            root_det = self.M.get_root_det(kappa, kappa + block_size)
            max_dist, expo = adjust_radius_to_gh_bound(max_dist, expo,
                                                       block_size, root_det,
                                                       params.gh_factor)

        try:
            enum_obj = Enumeration(self.M)
            with tracer.context("enumeration",
                                enum_obj=enum_obj,
                                probability=1.0):
                solution, max_dist = enum_obj.enumerate(
                    kappa, kappa + block_size, max_dist, expo)[0]

        except EnumerationError as msg:
            if params.flags & BKZ.GH_BND:
                return None
            else:
                raise EnumerationError(msg)

        if max_dist >= delta_max_dist * (1 << expo):
            return None
        else:
            return solution
예제 #9
0
파일: bkz.py 프로젝트: gbonnoron/fpylll
    def svp_call(self, kappa, block_size, params, tracer=None):
        """Call SVP oracle

        :param kappa: current index
        :param params: BKZ parameters
        :param block_size: block size
        :param tracer: object for maintaining statistics

        :returns: Coordinates of SVP solution or ``None`` if none was found.

        ..  note::

            ``block_size`` may be smaller than ``params.block_size`` for the last blocks.
        """
        max_dist, expo = self.M.get_r_exp(kappa, kappa)
        delta_max_dist = self.lll_obj.delta * max_dist

        if params.flags & BKZ.GH_BND:
            root_det = self.M.get_root_det(kappa, kappa+block_size)
            max_dist, expo = adjust_radius_to_gh_bound(max_dist, expo, block_size, root_det, params.gh_factor)

        try:
            enum_obj = Enumeration(self.M)
            with tracer.context("enumeration", enum_obj=enum_obj, probability=1.0):
                solution, max_dist = enum_obj.enumerate(kappa, kappa + block_size, max_dist, expo)[0]

        except EnumerationError as msg:
            if params.flags & BKZ.GH_BND:
                return None
            else:
                raise EnumerationError(msg)

        if max_dist >= delta_max_dist * (1<<expo):
            return None
        else:
            return solution
예제 #10
0
def test_gh():
    try:
        from fpylll.numpy import dump_r
    except ImportError:
        return

    for n in dimensions:
        set_random_seed(n)
        A = make_integer_matrix(n)
        try:
            M = GSO.Mat(A, float_type="ld")
        except ValueError:
            M = GSO.Mat(A, float_type="d")
        M.discover_all_rows()
        M.update_gso()
        radius = M.get_r(0, 0)
        root_det = M.get_root_det(0, n)
        gh_radius, ge = adjust_radius_to_gh_bound(2000*radius, 0, n, root_det, 1.0)

        gh1 = gh_radius * 2**ge

        r = dump_r(M, 0, n)
        gh2 = gaussian_heuristic(r)
        assert abs(gh1/gh2 -1) < 0.01
예제 #11
0
    def svp_reduction(self,
                      kappa,
                      block_size,
                      param,
                      tracer=dummy_tracer,
                      top_level=False):
        if top_level:
            # do a full LLL up to kappa + block_size
            with tracer.context("lll"):
                self.lll_obj(0, kappa, kappa + block_size, 0)

        remaining_probability, rerandomize = 1.0, False

        while remaining_probability > 1. - param.min_success_probability:
            with tracer.context("preprocessing"):
                if rerandomize:
                    with tracer.context("randomization"):
                        # make a copy of the local block to restore in case rerandomisation decreases quality
                        self.copy_block(kappa, block_size)
                        self.randomize_block(
                            kappa + 1,
                            kappa + block_size,
                            density=param.rerandomization_density,
                            tracer=tracer)
                with tracer.context("reduction"):
                    self.svp_preprocessing(kappa,
                                           block_size,
                                           param,
                                           tracer=tracer)

            radius, expo = self.M.get_r_exp(kappa, kappa)
            radius *= self.lll_obj.delta

            if param.flags & BKZ.GH_BND and block_size > 30:
                root_det = self.M.get_root_det(kappa, kappa + block_size)
                radius, expo = adjust_radius_to_gh_bound(
                    radius, expo, block_size, root_det, param.gh_factor)

            pruning = self.get_pruning(kappa, block_size, param, tracer)

            try:
                enum_obj = Enumeration(self.M)
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    probability=pruning.expectation,
                                    full=block_size == param.block_size):
                    solution, max_dist = enum_obj.enumerate(
                        kappa,
                        kappa + block_size,
                        radius,
                        expo,
                        pruning=pruning.coefficients)[0]
                with tracer.context("postprocessing"):
                    self.svp_postprocessing(kappa,
                                            block_size,
                                            solution,
                                            tracer=tracer,
                                            top_level=top_level)
                    if rerandomize:
                        self.delete_copy_block(kappa,
                                               block_size,
                                               restore=False)
                rerandomize = False

            except EnumerationError:
                with tracer.context("postprocessing"):
                    if rerandomize:
                        # restore block, TODO don't do this unconditionally
                        self.delete_copy_block(kappa, block_size, restore=True)
                rerandomize = True

            remaining_probability *= (1 - pruning.expectation)
예제 #12
0
파일: bkz2.py 프로젝트: zhli271828/fpylll
    def svp_reduction(self, kappa, block_size, param, tracer=dummy_tracer):
        """

        :param kappa:
        :param block_size:
        :param params:
        :param tracer:

        """

        self.lll_obj.size_reduction(0, kappa + 1)
        old_first, old_first_expo = self.M.get_r_exp(kappa, kappa)

        remaining_probability, rerandomize = 1.0, False

        while remaining_probability > 1. - param.min_success_probability:
            with tracer.context("preprocessing"):
                if rerandomize:
                    with tracer.context("randomization"):
                        self.randomize_block(
                            kappa + 1,
                            kappa + block_size,
                            density=param.rerandomization_density,
                            tracer=tracer)
                with tracer.context("reduction"):
                    self.svp_preprocessing(kappa,
                                           block_size,
                                           param,
                                           tracer=tracer)

            radius, expo = self.M.get_r_exp(kappa, kappa)
            radius *= self.lll_obj.delta

            if param.flags & BKZ.GH_BND and block_size > 30:
                root_det = self.M.get_root_det(kappa, kappa + block_size)
                radius, expo = adjust_radius_to_gh_bound(
                    radius, expo, block_size, root_det, param.gh_factor)

            pruning = self.get_pruning(kappa, block_size, param, tracer)

            try:
                enum_obj = Enumeration(self.M)
                with tracer.context("enumeration",
                                    enum_obj=enum_obj,
                                    probability=pruning.expectation,
                                    full=block_size == param.block_size):
                    solution, max_dist = enum_obj.enumerate(
                        kappa,
                        kappa + block_size,
                        radius,
                        expo,
                        pruning=pruning.coefficients)[0]
                with tracer.context("postprocessing"):
                    self.svp_postprocessing(kappa,
                                            block_size,
                                            solution,
                                            tracer=tracer)
                rerandomize = False

            except EnumerationError:
                rerandomize = True

            remaining_probability *= (1 - pruning.expectation)

        self.lll_obj.size_reduction(0, kappa + 1)
        new_first, new_first_expo = self.M.get_r_exp(kappa, kappa)

        clean = old_first <= new_first * 2**(new_first_expo - old_first_expo)
        return clean