Пример #1
0
def make_knl():

    target = NumbaTarget()

    # build individual kernels
    osc = model.Kuramoto()
    osc.dt = 1.0
    osc.const['omega'] = 10.0 * 2.0 * np.pi / 1e3
    osc_knl = osc.kernel(target)

    cfun = coupling.Kuramoto(osc)
    cfun.param['a'] = pm.parse('a')
    net = network.Network(osc, cfun)
    net_knl = net.kernel(target)

    scm = scheme.EulerStep(osc.dt)
    scm_knl = scm.kernel(target)
    scm_knl = lp.fix_parameters(scm_knl, nsvar=len(osc.state_sym))

    # fuse kernels
    knls = osc_knl, net_knl, scm_knl
    data_flow = [('input', 1, 0), ('diffs', 0, 2), ('drift', 0, 2),
                 ('state', 2, 0)]
    knl = lp.fuse_kernels(knls, data_flow=data_flow)

    # and time step
    knl = lp.to_batched(knl, 'nstep', [], 'i_step', sequential=True)
    knl = lp.fix_parameters(knl,
                            i_time=pm.parse('(i_step + i_step_0) % ntime'))
    knl.args.append(lp.ValueArg('i_step_0', np.uintc))
    knl = lp.add_dtypes(knl, {'i_step_0': np.uintc})

    return knl, osc
Пример #2
0
    def __init__(self, decomp, num_bins, dtype, **kwargs):
        from pymbolic import parse
        import pymbolic.functions as pf

        max_f, min_f = parse("max_f, min_f")
        max_log_f, min_log_f = parse("max_log_f, min_log_f")

        halo_shape = kwargs.pop("halo_shape", 0)
        f = Field("f", offset=halo_shape)

        def clip(expr):
            _min, _max = parse("min, max")
            return _max(_min(expr, num_bins - 1), 0)

        linear_bin = (f - min_f) / (max_f - min_f)
        log_bin = (pf.log(pf.fabs(f)) - min_log_f) / (max_log_f - min_log_f)
        histograms = {
            "linear": (clip(linear_bin * num_bins), 1),
            "log": (clip(log_bin * num_bins), 1)
        }

        super().__init__(decomp, histograms, num_bins, dtype, **kwargs)

        reducers = {}
        reducers["max_f"] = [(f, "max")]
        reducers["min_f"] = [(f, "min")]
        reducers["max_log_f"] = [(pf.log(pf.fabs(f)), "max")]
        reducers["min_log_f"] = [(pf.log(pf.fabs(f)), "min")]

        self.get_min_max = Reduction(decomp,
                                     reducers,
                                     halo_shape=halo_shape,
                                     **kwargs)
Пример #3
0
def test_diff():
    pytest.importorskip("pexpect")

    from pymbolic.interop.maxima import diff
    from pymbolic import parse

    diff(parse("sqrt(x**2+y**2)"), parse("x"))
Пример #4
0
 def assert_parsed_same_as_python(expr_str):
     # makes sure that has only one line
     expr_str, = expr_str.split('\n')
     from pymbolic.interop.ast import ASTToPymbolic
     import ast
     ast2p = ASTToPymbolic()
     try:
         expr_parsed_by_python = ast2p(ast.parse(expr_str).body[0].value)
     except SyntaxError:
         with pytest.raises(ParseError):
             parse(expr_str)
     else:
         expr_parsed_by_pymbolic = parse(expr_str)
         assert expr_parsed_by_python == expr_parsed_by_pymbolic
Пример #5
0
 def floatify(val):
     if not (isinstance(val, float) or isinstance(val, int)):
         ss = next(
             (s for s in self.string_strides if s.search(str(val))),
             None)
         assert ss is not None, 'malformed strides'
         from pymbolic import parse
         # we're interested in the number of conditions we can test
         # hence, we substitute '1' for the problem size, and divide the
         # array stisize by the # of
         val = parse(str(val).replace(p_size.name, '1'))
         val = parse(str(val).replace(w_size.name, '1'))
         assert isinstance(val, (float, int)), (arry.name, val)
     return val
Пример #6
0
def network_time_step(
        model: model.BaseKernel,
        coupling: coupling.BaseCoupling,
        scheme: scheme.TimeStepScheme,
        target: lp.target.TargetBase=None,
        ):
    target = target or utils.default_target()
    # fuse kernels
    kernels = [
        model.kernel(target),
        network.Network(model, coupling).kernel(target),
        lp.fix_parameters(scheme.kernel(target), nsvar=len(model.state_sym)),
    ]
    data_flow = [
        ('input', 1, 0),
        ('diffs', 0, 2),
        ('drift', 0, 2),
        ('state', 2, 0)
    ]
    knl = lp.fuse_kernels(kernels, data_flow=data_flow)
    # time step
    knl = lp.to_batched(knl, 'nstep', [], 'i_step', sequential=True)
    new_i_time = pm.parse('(i_step + i_step_0) % ntime')
    knl = lp.fix_parameters(knl, i_time=new_i_time)
    knl.args.append(lp.ValueArg('i_step_0', np.uintc))
    knl = lp.add_dtypes(knl, {'i_step_0': np.uintc})
    return knl
Пример #7
0
 def tangent(self):
     self.arguments["tangent"] = \
             lp.GlobalArg("tangent", self.geometry_dtype,
                     shape=("ntargets", self.dim), order="C")
     from pytools.obj_array import make_obj_array
     return make_obj_array(
         [parse("tangent[itgt, %d]" % i) for i in range(self.dim)])
Пример #8
0
def test_strict_round_trip(knl):
    from pymbolic import parse
    from pymbolic.primitives import Quotient

    exprs = [2j, parse("x**y"), Quotient(1, 2), parse("exp(x)")]
    for expr in exprs:
        result = knl.eval_expr(expr)
        round_trips_correctly = result == expr
        if not round_trips_correctly:
            print("ORIGINAL:")
            print("")
            print(expr)
            print("")
            print("POST-MAXIMA:")
            print("")
            print(result)
        assert round_trips_correctly
Пример #9
0
    def _parse_expr(self, expr):
        from pymbolic import parse, substitute
        parsed = parse(expr)

        # substitute in global constants
        parsed = substitute(parsed, self.constants)

        return parsed
Пример #10
0
 def tangent(self):
     self.arguments["tangent"] = \
             lp.GlobalArg("tangent", self.geometry_dtype,
                     shape=("ntargets", self.dim), order="C")
     from pytools.obj_array import make_obj_array
     return make_obj_array([
         parse("tangent[itgt, %d]" % i)
         for i in range(self.dim)])
Пример #11
0
def test_int_max_min_c_target(ctx_factory, which):
    from numpy.random import default_rng
    from pymbolic import parse
    rng = default_rng()

    n = 100
    arr1 = rng.integers(-100, 100, n)
    arr2 = rng.integers(-100, 100, n)
    np_func = getattr(np, f"{which}imum")

    knl = lp.make_kernel(
        "{[i]: 0<=i<100}",
        [lp.Assignment(parse("out[i]"), parse(f"{which}(arr1[i], arr2[i])"))],
        target=lp.ExecutableCTarget())

    _, (out, ) = knl(arr1=arr1, arr2=arr2)
    np.testing.assert_allclose(np_func(arr1, arr2), out)
Пример #12
0
def assert_parse_roundtrip(expr):
    from pymbolic.mapper.stringifier import StringifyMapper
    strified = StringifyMapper()(expr)
    from pymbolic import parse
    parsed_expr = parse(strified)
    print(expr)
    print(parsed_expr)
    assert expr == parsed_expr
Пример #13
0
 def src_derivative_dir(self):
     self.arguments["src_derivative_dir"] = \
             lp.GlobalArg("src_derivative_dir",
                     self.geometry_dtype, shape=("ntargets", self.dim),
                     order="C")
     from pytools.obj_array import make_obj_array
     return make_obj_array([
         parse("src_derivative_dir[itgt, %d]" % i)
         for i in range(self.dim)])
Пример #14
0
 def src_derivative_dir(self):
     self.arguments["src_derivative_dir"] = \
             lp.GlobalArg("src_derivative_dir",
                     self.geometry_dtype, shape=("ntargets", self.dim),
                     order="C")
     from pytools.obj_array import make_obj_array
     return make_obj_array([
         parse("src_derivative_dir[itgt, %d]" % i) for i in range(self.dim)
     ])
Пример #15
0
def test_compile():
    from pymbolic import parse, compile
    code = compile(parse("x ** y"), ["x", "y"])
    assert code(2, 5) == 32

    # Test pickling of compiled code.
    import pickle
    code = pickle.loads(pickle.dumps(code))
    assert code(3, 3) == 27
Пример #16
0
def test_compile():
    from pymbolic import parse, compile
    code = compile(parse("x ** y"), ["x", "y"])
    assert code(2, 5) == 32

    # Test pickling of compiled code.
    import pickle
    code = pickle.loads(pickle.dumps(code))
    assert code(3, 3) == 27
Пример #17
0
def test_no_comparison():
    from pymbolic import parse

    x = parse("17+3*x")
    y = parse("12-5*y")

    def expect_typeerror(f):
        try:
            f()
        except TypeError:
            pass
        else:
            assert False

    expect_typeerror(lambda: x < y)
    expect_typeerror(lambda: x <= y)
    expect_typeerror(lambda: x > y)
    expect_typeerror(lambda: x >= y)
Пример #18
0
def test_no_comparison():
    from pymbolic import parse

    x = parse("17+3*x")
    y = parse("12-5*y")

    def expect_typeerror(f):
        try:
            f()
        except TypeError:
            pass
        else:
            assert False

    expect_typeerror(lambda: x < y)
    expect_typeerror(lambda: x <= y)
    expect_typeerror(lambda: x > y)
    expect_typeerror(lambda: x >= y)
Пример #19
0
    def cumsum(self, arg):
        """
        Registers  a substitution rule in order to cumulatively sum the
        elements of array ``arg`` along ``axis``. Mimics :func:`numpy.cumsum`.

        :return: An instance of :class:`numloopy.ArraySymbol` which is
            which is registered as the cumulative summed-substitution rule.
        """
        # Note: this can remain as a substitution but loopy does not have
        # support for translating inames for substitutions to the kernel
        # domains
        assert len(arg.shape) == 1
        i_iname = self.name_generator(based_on="i")
        j_iname = self.name_generator(based_on="i")

        space = isl.Space.create_from_names(isl.DEFAULT_CONTEXT,
                                            [i_iname, j_iname])
        domain = isl.BasicSet.universe(space)
        arg_name = self.name_generator(based_on="arr")
        subst_name = self.name_generator(based_on="subst")
        domain = domain & make_slab(space, i_iname, 0, arg.shape[0])
        domain = domain.add_constraint(
            isl.Constraint.ineq_from_names(space, {j_iname: 1}))
        domain = domain.add_constraint(
            isl.Constraint.ineq_from_names(space, {
                j_iname: -1,
                i_iname: 1,
                1: -1
            }))
        cumsummed_arg = ArraySymbol(stack=self,
                                    name=arg_name,
                                    shape=arg.shape,
                                    dtype=arg.dtype)
        cumsummed_subst = ArraySymbol(stack=self,
                                      name=subst_name,
                                      shape=arg.shape,
                                      dtype=arg.dtype)
        subst_iname = self.name_generator(based_on="i")
        rule = lp.SubstitutionRule(
            subst_name, (subst_iname, ),
            Subscript(Variable(arg_name), (Variable(subst_iname), )))

        from loopy.library.reduction import SumReductionOperation

        insn = lp.Assignment(assignee=Subscript(Variable(arg_name),
                                                (Variable(i_iname), )),
                             expression=lp.Reduction(
                                 SumReductionOperation(), (j_iname, ),
                                 parse('{}({})'.format(arg.name, j_iname))))
        self.data.append(cumsummed_arg)
        self.substs_to_arrays[subst_name] = arg_name
        self.register_implicit_assignment(insn)
        self.domains.append(domain)

        self.register_substitution(rule)
        return cumsummed_subst
Пример #20
0
def parse_sympy(s):
    if isinstance(s, unicode):
        # Sympy is not spectacularly happy with unicode function names
        s = s.encode()

    from pymbolic import parse
    from pymbolic.sympy_interface import PymbolicToSympyMapper

    # use pymbolic because it has a semi-secure parser
    return PymbolicToSympyMapper()(parse(s))
Пример #21
0
def test_graphviz():
    from pymbolic import parse
    expr = parse("(2*a[1]*b[1]+2*a[0]*b[0])*(hankel_1(-1,sqrt(a[1]**2+a[0]**2)*k) "
            "-hankel_1(1,sqrt(a[1]**2+a[0]**2)*k))*k /(4*sqrt(a[1]**2+a[0]**2)) "
            "+hankel_1(0,sqrt(a[1]**2+a[0]**2)*k)")

    from pymbolic.mapper.graphviz import GraphvizMapper
    gvm = GraphvizMapper()
    gvm(expr)
    print(gvm.get_dot_code())
Пример #22
0
    def eval(expr, source, center1, center2, target):
        from pymbolic import parse, evaluate
        context = {
                "s": source,
                "c1": center1,
                "c2": center2,
                "t": target,
                "norm": la.norm}

        return evaluate(parse(expr), context)
Пример #23
0
    def eval(expr, source, center1, center2, target):
        from pymbolic import parse, evaluate
        context = {
                "s": source,
                "c1": center1,
                "c2": center2,
                "t": target,
                "norm": la.norm}

        return evaluate(parse(expr), context)
Пример #24
0
def test_graphviz():
    from pymbolic import parse
    expr = parse("(2*a[1]*b[1]+2*a[0]*b[0])*(hankel_1(-1,sqrt(a[1]**2+a[0]**2)*k) "
            "-hankel_1(1,sqrt(a[1]**2+a[0]**2)*k))*k /(4*sqrt(a[1]**2+a[0]**2)) "
            "+hankel_1(0,sqrt(a[1]**2+a[0]**2)*k)")

    from pymbolic.mapper.graphviz import GraphvizMapper
    gvm = GraphvizMapper()
    gvm(expr)
    print(gvm.get_dot_code())
Пример #25
0
def parse_sympy(s):
    if isinstance(s, unicode):
        # Sympy is not spectacularly happy with unicode function names
        s = s.encode()

    from pymbolic import parse
    from pymbolic.sympy_interface import PymbolicToSympyMapper

    # use pymbolic because it has a semi-secure parser
    return PymbolicToSympyMapper()(parse(s))
Пример #26
0
    def get_expr_dataset(self, expression, description=None, unit=None):
        """Prepare a time-series dataset for a given expression.

        @arg expression: A C{pymbolic} expression that may involve
          the time-series variables and the constants in this :class:`LogManager`.
          If there is data from multiple ranks for a quantity occuring in
          this expression, an aggregator may have to be specified.
        @return: C{(description, unit, table)}, where C{table}
          is a list of tuples C{(tick_nbr, value)}.

        Aggregators are specified as follows:
        - C{qty.min}, C{qty.max}, C{qty.avg}, C{qty.sum}, C{qty.norm2}
        - C{qty[rank_nbr]}
        - C{qty.loc}
        """

        parsed = self._parse_expr(expression)
        parsed, dep_data = self._get_expr_dep_data(parsed)

        # aggregate table data
        for dd in dep_data:
            table = self.get_table(dd.name)
            table.sort(["step"])
            dd.table = table.aggregated(["step"], "value", dd.agg_func).data

        # evaluate unit and description, if necessary
        if unit is None:
            from pymbolic import substitute, parse

            unit_dict = dict((dd.varname, dd.qdat.unit) for dd in dep_data)
            from pytools import all
            if all(v is not None for v in six.itervalues(unit_dict)):
                unit_dict = dict(
                    (k, parse(v)) for k, v in six.iteritems(unit_dict))
                unit = substitute(parsed, unit_dict)
            else:
                unit = None

        if description is None:
            description = expression

        # compile and evaluate
        from pymbolic import compile
        compiled = compile(parsed, [dd.varname for dd in dep_data])

        data = []

        for key, values in _join_by_first_of_tuple(dd.table
                                                   for dd in dep_data):
            try:
                data.append((key, compiled(*values)))
            except ZeroDivisionError:
                pass

        return (description, unit, data)
Пример #27
0
def test_sympy_interop(proc_shape):
    if proc_shape != (1, 1, 1):
        pytest.skip("test field only on one rank")

    from pystella.field.sympy import pymbolic_to_sympy, sympy_to_pymbolic
    import sympy as sym

    f = ps.Field("f", offset="h")
    g = ps.Field("g", offset="h")

    expr = f[0]**2 * g + 2 * g[1] * f
    sympy_expr = pymbolic_to_sympy(expr)
    new_expr = sympy_to_pymbolic(sympy_expr)
    sympy_expr_2 = pymbolic_to_sympy(new_expr)
    assert sym.simplify(sympy_expr - sympy_expr_2) == 0, \
        "sympy <-> pymbolic conversion not invertible"

    expr = f + shift_fields(f, (1, 2, 3))
    sympy_expr = pymbolic_to_sympy(expr)
    new_expr = sympy_to_pymbolic(sympy_expr)
    sympy_expr_2 = pymbolic_to_sympy(new_expr)
    assert sym.simplify(sympy_expr - sympy_expr_2) == 0, \
        "sympy <-> pymbolic conversion not invertible with shifted indices"

    # from pymbolic.functions import fabs, exp, exmp1
    fabs = parse("math.fabs")
    exp = parse("math.exp")
    expm1 = parse("math.expm1")
    x = sym.Symbol("x")

    expr = sym.Abs(x)
    assert sympy_to_pymbolic(expr) == fabs(var("x"))

    expr = sym.exp(x)
    assert sympy_to_pymbolic(expr) == exp(var("x"))

    expr = sym.Function("expm1")(x)  # pylint: disable=E1102
    assert sympy_to_pymbolic(expr) == expm1(var("x"))

    expr = sym.Function("aaa")(x)  # pylint: disable=E1102
    from pymbolic.primitives import Call, Variable
    assert sympy_to_pymbolic(expr) == Call(Variable("aaa"), (Variable("x"), ))
Пример #28
0
def parse_sympy(s):
    if six.PY2:
        if isinstance(s, unicode):  # noqa -- has Py2/3 guard
            # Sympy is not spectacularly happy with unicode function names
            s = s.encode()

    from pymbolic import parse
    from pymbolic.interop.sympy import PymbolicToSympyMapper

    # use pymbolic because it has a semi-secure parser
    return PymbolicToSympyMapper()(parse(s))
Пример #29
0
def test_np_bool_handling(ctx_factory):
    import pymbolic.primitives as p
    from loopy.symbolic import parse
    ctx = ctx_factory()
    queue = cl.CommandQueue(ctx)

    knl = lp.make_kernel(
        "{:}", [lp.Assignment(parse("y"), p.LogicalNot(np.bool_(False)))],
        [lp.GlobalArg("y", dtype=np.bool_, shape=lp.auto)])
    evt, (out, ) = knl(queue)
    assert out.get().item() is True
Пример #30
0
def make_knl():
    # choose network model parts
    osc = model.Kuramoto()
    osc.dt = 1.0
    osc.const['omega'] = 10.0 * 2.0 * np.pi / 1e3
    cfun = coupling.Kuramoto(osc)
    cfun.param['a'] = pm.parse('a')
    scm = scheme.EulerStep(osc.dt)
    # create kernel
    knl = transforms.network_time_step(osc, cfun, scm)
    return knl, osc
Пример #31
0
 def kernel_data(self) -> List[str]:
     "Return arguments / data to kernel."
     # normalize wrt. key set like ['n,out', 'foo,bar']
     csk = ','.join(self.kernel_dtypes().keys())
     data = [key for key in csk.split(',')]
     if hasattr(self, 'extra_data_shape'):
         for name, shape in self.extra_data_shape.items():
             shape = tuple(pm.parse(_) for _ in shape.split(','))
             arg = lp.GlobalArg(name, shape=shape)
             data[data.index(name)] = arg
     return data
Пример #32
0
    def get_expr_dataset(self, expression, description=None, unit=None):
        """Prepare a time-series dataset for a given expression.

        @arg expression: A C{pymbolic} expression that may involve
          the time-series variables and the constants in this :class:`LogManager`.
          If there is data from multiple ranks for a quantity occuring in
          this expression, an aggregator may have to be specified.
        @return: C{(description, unit, table)}, where C{table}
          is a list of tuples C{(tick_nbr, value)}.

        Aggregators are specified as follows:
        - C{qty.min}, C{qty.max}, C{qty.avg}, C{qty.sum}, C{qty.norm2}
        - C{qty[rank_nbr]}
        - C{qty.loc}
        """

        parsed = self._parse_expr(expression)
        parsed, dep_data = self._get_expr_dep_data(parsed)

        # aggregate table data
        for dd in dep_data:
            table = self.get_table(dd.name)
            table.sort(["step"])
            dd.table = table.aggregated(["step"], "value", dd.agg_func).data

        # evaluate unit and description, if necessary
        if unit is None:
            from pymbolic import substitute, parse

            unit_dict = dict((dd.varname, dd.qdat.unit) for dd in dep_data)
            from pytools import all
            if all(v is not None for v in six.itervalues(unit_dict)):
                unit_dict = dict((k, parse(v)) for k, v in six.iteritems(unit_dict))
                unit = substitute(parsed, unit_dict)
            else:
                unit = None

        if description is None:
            description = expression

        # compile and evaluate
        from pymbolic import compile
        compiled = compile(parsed, [dd.varname for dd in dep_data])

        data = []

        for key, values in _join_by_first_of_tuple(dd.table for dd in dep_data):
            try:
                data.append((key, compiled(*values)))
            except ZeroDivisionError:
                pass

        return (description, unit, data)
Пример #33
0
    def sum(self, arg, axis=None):
        """
        Registers  a substitution rule in order to sum the elements of array
        ``arg`` along ``axis``.

        :return: An instance of :class:`numloopy.ArraySymbol` which is
            which is registered as the sum-substitution rule.
        """
        if isinstance(axis, int):
            axis = (axis, )

        if not axis:
            axis = tuple(range(len(arg.shape)))

        inames = [self.name_generator(based_on="i") for _ in arg.shape]

        space = isl.Space.create_from_names(isl.DEFAULT_CONTEXT, inames)
        domain = isl.BasicSet.universe(space)
        for axis_len, iname in zip(arg.shape, inames):
            domain &= make_slab(space, iname, 0, axis_len)

        self.domains.append(domain)

        reduction_inames = tuple(iname for i, iname in enumerate(inames)
                                 if i in axis)
        left_inames = tuple(iname for i, iname in enumerate(inames)
                            if i not in axis)

        def _one_if_empty(t):
            if t:
                return t
            else:
                return (1, )

        subst_name = self.name_generator(based_on="subst")

        summed_arg = ArraySymbol(
            stack=self,
            name=subst_name,
            shape=_one_if_empty(
                tuple(axis_len for i, axis_len in enumerate(arg.shape)
                      if i not in axis)),
            dtype=arg.dtype)

        from loopy.library.reduction import SumReductionOperation

        rule = lp.SubstitutionRule(
            subst_name, left_inames,
            lp.Reduction(SumReductionOperation(), reduction_inames,
                         parse('{}({})'.format(arg.name, ', '.join(inames)))))
        self.register_substitution(rule)

        return summed_arg
Пример #34
0
def test_strict_round_trip(knl):
    from pymbolic import parse
    from pymbolic.primitives import Quotient

    exprs = [
            2j,
            parse("x**y"),
            Quotient(1, 2),
            parse("exp(x)")
            ]
    for expr in exprs:
        result = knl.eval_expr(expr)
        round_trips_correctly = result == expr
        if not round_trips_correctly:
            print("ORIGINAL:")
            print("")
            print(expr)
            print("")
            print("POST-MAXIMA:")
            print("")
            print(result)
        assert round_trips_correctly
Пример #35
0
 def _insn_cfun(self, k, pre, post):
     "Generates an instruction for a single coupling function."
     # TODO add loopy hints to make more efficient
     # substitute pre_syn and post_syn for obsrv data
     pre_expr = subst_vars(
         expr=pre,
         #                                                     k is var idx
         pre_syn=pm.parse('obsrv[i_time - delays[j_node], col[j_node], k]'),
         post_syn=pm.parse('obsrv[i_time, i_node, k]'),
     )
     # build weighted sum over nodes
     sum = subst_vars(
         expr=pm.parse('sum(j_node, weights[j_node] * pre_expr)'),
         pre_expr=pre_expr,
     )
     # mean used by some cfuns
     mean = sum / pm.var('nnode')
     # subst mean / sum through post cfun
     post_expr = subst_vars(post, sum=sum, mean=mean)
     # generate store instruction for post expr, with params
     post_expr = subst_vars(post_expr, k=k, **self.cfun.param)
     return 'input[i_node, %d] = %s' % (k, post_expr)
Пример #36
0
def exprs(sexprs):
    """
    Build array of symbolic expresssions from sequence of strings.

    """
    exprs = []
    for expr in sexprs:
        if isinstance(expr, (int, float)):
            exprs.append(expr)
        else:
            try:
                exprs.append(pm.parse(expr))
            except Exception as exc:
                raise Exception(repr(expr))
    return np.array(exprs)
Пример #37
0
    def get_expr_dataset(self, expression, description=None, unit=None):
        """Prepare a time-series dataset for a given expression.

        @arg expression: A C{pymbolic} expression that may involve
          the time-series variables and the constants in this L{LogManager}.
          If there is data from multiple ranks for a quantity occuring in
          this expression, an aggregator may have to be specified.
        @return: C{(description, unit, table)}, where C{table} 
          is a list of tuples C{(tick_nbr, value)}.

        Aggregators are specified as follows:
        - C{qty.min}, C{qty.max}, C{qty.avg}, C{qty.sum}, C{qty.norm2}
        - C{qty[rank_nbr]
        """

        parsed = self._parse_expr(expression)
        parsed, dep_data = self._get_expr_dep_data(parsed)

        # aggregate table data
        for dd in dep_data:
            table = self.get_table(dd.name)
            table.sort(["step"])
            dd.table = table.aggregated(["step"], "value", dd.agg_func).data

        # evaluate unit and description, if necessary
        if unit is None:
            from pymbolic import substitute, parse

            unit = substitute(parsed,
                    dict((dd.varname, parse(dd.qdat.unit)) for dd in dep_data)
                    )

        if description is None:
            description = expression

        # compile and evaluate
        from pymbolic import compile
        compiled = compile(parsed, [dd.varname for dd in dep_data])

        return (description,
                unit,
                [(key, compiled(*values))
                    for key, values in _join_by_first_of_tuple(
                        dd.table for dd in dep_data)
                    ])
Пример #38
0
def test_pymbolic_sexprs():
    def check_round_trip(expr):
        assert sexpr_to_pymbolic(pymbolic_to_sexpr(expr)) == expr

    from pymbolic.primitives import Variable, Sum, Product, Power
    check_round_trip(Variable("x"))
    check_round_trip(1)
    check_round_trip(-11)
    check_round_trip(1.1)
    check_round_trip(1.1e-2)
    check_round_trip(Sum((7, )))
    check_round_trip(Sum((1, 2, 3)))
    check_round_trip(
        Sum((1, Product((2, 3, Power(1, Sum((Product((-1, 2)), 2))))), 3)))
    check_round_trip(Product((1, 2, 3)))
    check_round_trip(Power(1, Variable("x")))
    check_round_trip(Power(Power(1, 2), 3))
    check_round_trip(Power(1, Power(2, 3)))
    check_round_trip(Power(Power(Sum((1, 2)), 3), 3.5))

    from pymbolic import parse
    check_round_trip(
        parse("c_m2l * (40 * ((p_fmm + 1)**2)"
              "** 1.5 * (p_qbx + 1) ** 0.5 + 1)"))

    def check_error(expr):
        with pytest.raises(ParserError):
            sexpr_to_pymbolic(expr)

    check_error("")
    check_error("(")
    check_error(")")
    check_error("()")
    check_error("1 2 3")
    check_error("(Var ''')")
    check_error("(Power (Var 'x'")
    check_error("(Product 1 2) (Sum 1 2)")
    check_error("(Sum (Sum 1 2) 3")
    check_error("(Error)")
    check_error("Sum")
Пример #39
0
    def make_spectra_knl(self, is_real, rank_shape):
        from pymbolic import var, parse
        indices = i, j, k = parse("i, j, k")
        momenta = [var("momenta_"+xx) for xx in ("x", "y", "z")]
        ksq = sum((dk_i * mom[ii])**2
                  for mom, dk_i, ii in zip(momenta, self.dk, indices))
        kmag = var("sqrt")(ksq)
        bin_expr = var("round")(kmag / self.bin_width)

        if is_real:
            from pymbolic.primitives import If, Comparison, LogicalAnd
            nyq = self.grid_shape[-1] / 2
            condition = LogicalAnd((Comparison(momenta[2][k], ">", 0),
                                    Comparison(momenta[2][k], "<", nyq)))
            count = If(condition, 2, 1)
        else:
            count = 1

        fk = var("fk")[i, j, k]
        weight_expr = count * kmag**(var("k_power")) * var("abs")(fk)**2

        histograms = {"spectrum": (bin_expr, weight_expr)}

        args = [
            lp.GlobalArg("fk", self.cdtype, shape=("Nx", "Ny", "Nz"),
                         offset=lp.auto),
            lp.GlobalArg("momenta_x", self.rdtype, shape=("Nx",)),
            lp.GlobalArg("momenta_y", self.rdtype, shape=("Ny",)),
            lp.GlobalArg("momenta_z", self.rdtype, shape=("Nz",)),
            lp.ValueArg("k_power", self.rdtype),
            ...
        ]

        from pystella.histogram import Histogrammer
        return Histogrammer(self.decomp, histograms, self.num_bins,
                            self.rdtype, args=args, rank_shape=rank_shape)
Пример #40
0
def test_dynamic_field(proc_shape):
    if proc_shape != (1, 1, 1):
        pytest.skip("test field only on one rank")

    y = ps.DynamicField("y", offset="h")

    result = ps.index_fields(y)
    assert result == parse("y[i + h, j + h, k + h]"), result

    result = ps.index_fields(y.lap)
    assert result == parse("lap_y[i, j, k]"), result

    result = ps.index_fields(y.dot)
    assert result == parse("dydt[i + h, j + h, k + h]"), result

    result = ps.index_fields(y.pd[var("x")])
    assert result == parse("dydx[x, i, j, k]"), result

    result = ps.index_fields(y.d(1, 0))
    assert result == parse("dydt[1, i + h, j + h, k + h]"), result

    result = ps.index_fields(y.d(1, 1))
    assert result == parse("dydx[1, 0, i, j, k]"), result
Пример #41
0
 def density_prime(self):
     prime_var_name = self.density_var_name+"_prime"
     self.arguments[prime_var_name] = \
             lp.GlobalArg(prime_var_name, self.density_dtype,
                     shape="ntargets", order="C")
     return parse("%s[itgt]" % prime_var_name)
Пример #42
0
def extract_subst(kernel, subst_name, template, parameters=()):
    """
    :arg subst_name: The name of the substitution rule to be created.
    :arg template: Unification template expression.
    :arg parameters: An iterable of parameters used in
        *template*, or a comma-separated string of the same.

    All targeted subexpressions must match ('unify with') *template*
    The template may contain '*' wildcards that will have to match exactly across all
    unifications.
    """

    if isinstance(template, str):
        from pymbolic import parse
        template = parse(template)

    if isinstance(parameters, str):
        parameters = tuple(
                s.strip() for s in parameters.split(","))

    var_name_gen = kernel.get_var_name_generator()

    # {{{ replace any wildcards in template with new variables

    def get_unique_var_name():
        based_on = subst_name+"_wc"

        result = var_name_gen(based_on)
        return result

    from loopy.symbolic import WildcardToUniqueVariableMapper
    wc_map = WildcardToUniqueVariableMapper(get_unique_var_name)
    template = wc_map(template)

    # }}}

    # {{{ deal with iname deps of template that are not independent_inames

    # (We call these 'matching_vars', because they have to match exactly in
    # every CSE. As above, they might need to be renamed to make them unique
    # within the kernel.)

    matching_vars = []
    old_to_new = {}

    for iname in (get_dependencies(template)
            - set(parameters)
            - kernel.non_iname_variable_names()):
        if iname in kernel.all_inames():
            # need to rename to be unique
            new_iname = var_name_gen(iname)
            old_to_new[iname] = var(new_iname)
            matching_vars.append(new_iname)
        else:
            matching_vars.append(iname)

    if old_to_new:
        template = (
                SubstitutionMapper(make_subst_func(old_to_new))
                (template))

    # }}}

    # {{{ gather up expressions

    expr_descriptors = []

    from loopy.symbolic import UnidirectionalUnifier
    unif = UnidirectionalUnifier(
            lhs_mapping_candidates=set(parameters) | set(matching_vars))

    def gather_exprs(expr, mapper):
        urecs = unif(template, expr)

        if urecs:
            if len(urecs) > 1:
                raise RuntimeError("ambiguous unification of '%s' with template '%s'"
                        % (expr, template))

            urec, = urecs

            expr_descriptors.append(
                    ExprDescriptor(
                        insn=insn,
                        expr=expr,
                        unif_var_dict=dict((lhs.name, rhs)
                            for lhs, rhs in urec.equations)))
        else:
            mapper.fallback_mapper(expr)
            # can't nest, don't recurse

    from loopy.symbolic import (
            CallbackMapper, WalkMapper, IdentityMapper)
    dfmapper = CallbackMapper(gather_exprs, WalkMapper())

    for insn in kernel.instructions:
        dfmapper(insn.assignees)
        dfmapper(insn.expression)

    for sr in six.itervalues(kernel.substitutions):
        dfmapper(sr.expression)

    # }}}

    if not expr_descriptors:
        raise RuntimeError("no expressions matching '%s'" % template)

    # {{{ substitute rule into instructions

    def replace_exprs(expr, mapper):
        found = False
        for exprd in expr_descriptors:
            if expr is exprd.expr:
                found = True
                break

        if not found:
            return mapper.fallback_mapper(expr)

        args = [exprd.unif_var_dict[arg_name]
                for arg_name in parameters]

        result = var(subst_name)
        if args:
            result = result(*args)

        return result
        # can't nest, don't recurse

    cbmapper = CallbackMapper(replace_exprs, IdentityMapper())

    new_insns = []

    for insn in kernel.instructions:
        new_insns.append(insn.with_transformed_expressions(cbmapper))

    from loopy.kernel.data import SubstitutionRule
    new_substs = {
            subst_name: SubstitutionRule(
                name=subst_name,
                arguments=tuple(parameters),
                expression=template,
                )}

    for subst in six.itervalues(kernel.substitutions):
        new_substs[subst.name] = subst.copy(
                expression=cbmapper(subst.expression))

    # }}}

    return kernel.copy(
            instructions=new_insns,
            substitutions=new_substs)
Пример #43
0
def test_diff():
    from pymbolic.interop.maxima import diff
    from pymbolic import parse
    diff(parse("sqrt(x**2+y**2)"), parse("x"))
Пример #44
0
 def side(self):
     self.arguments["side"] = \
             lp.GlobalArg("side", self.geometry_dtype, shape="ntargets")
     return parse("side[itgt]")
Пример #45
0
def test_elliptic():
    """Test various properties of elliptic operators."""

    from hedge.tools import unit_vector

    def matrix_rep(op):
        h, w = op.shape
        mat = numpy.zeros(op.shape)
        for j in range(w):
            mat[:, j] = op(unit_vector(w, j))
        return mat

    def check_grad_mat():
        import pyublas

        if not pyublas.has_sparse_wrappers():
            return

        grad_mat = op.grad_matrix()

        # print len(discr), grad_mat.nnz, type(grad_mat)
        for i in range(10):
            u = numpy.random.randn(len(discr))

            mat_result = grad_mat * u
            op_result = numpy.hstack(op.grad(u))

            err = la.norm(mat_result - op_result) * la.norm(op_result)
            assert la.norm(mat_result - op_result) * la.norm(op_result) < 1e-5

    def check_matrix_tgt():
        big = num.zeros((20, 20), flavor=num.SparseBuildMatrix)
        small = num.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
        print small
        from hedge._internal import MatrixTarget

        tgt = MatrixTarget(big, 4, 4)
        tgt.begin(small.shape[0], small.shape[1])
        print "YO"
        tgt.add_coefficients(4, 4, small)
        print "DUDE"
        tgt.finalize()
        print big

    import pymbolic

    v_x = pymbolic.var("x")
    truesol = pymbolic.parse("math.sin(x[0]**2*x[1]**2)")
    truesol_c = pymbolic.compile(truesol, variables=["x"])
    rhs = pymbolic.simplify(pymbolic.laplace(truesol, [v_x[0], v_x[1]]))
    rhs_c = pymbolic.compile(rhs, variables=["x", "el"])

    from hedge.mesh import TAG_ALL, TAG_NONE
    from hedge.mesh.generator import make_disk_mesh

    mesh = make_disk_mesh(r=0.5, max_area=0.1, faces=20)
    mesh = mesh.reordered_by("cuthill")

    from hedge.backends import CPURunContext

    rcon = CPURunContext()

    from hedge.tools import EOCRecorder

    eocrec = EOCRecorder()
    for order in [1, 2, 3, 4, 5]:
        for flux in ["ldg", "ip"]:
            from hedge.discretization.local import TriangleDiscretization

            discr = rcon.make_discretization(
                mesh, TriangleDiscretization(order), debug=discr_class.noninteractive_debug_flags()
            )

            from hedge.data import GivenFunction
            from hedge.models.poisson import PoissonOperator

            op = PoissonOperator(
                discr.dimensions,
                dirichlet_tag=TAG_ALL,
                dirichlet_bc=GivenFunction(lambda x, el: truesol_c(x)),
                neumann_tag=TAG_NONE,
            )

            bound_op = op.bind(discr)

            if order <= 3:
                mat = matrix_rep(bound_op)
                sym_err = la.norm(mat - mat.T)
                # print sym_err
                assert sym_err < 1e-12
                # check_grad_mat()

            from hedge.iterative import parallel_cg

            truesol_v = discr.interpolate_volume_function(lambda x, el: truesol_c(x))
            sol_v = -parallel_cg(
                rcon,
                -bound_op,
                bound_op.prepare_rhs(discr.interpolate_volume_function(rhs_c)),
                tol=1e-10,
                max_iterations=40000,
            )

            eocrec.add_data_point(order, discr.norm(sol_v - truesol_v))

    # print eocrec.pretty_print()
    assert eocrec.estimate_order_of_convergence()[0, 1] > 8
Пример #46
0
 def mean_curvature(self):
     self.arguments["mean_curvature"] = \
             lp.GlobalArg("mean_curvature",
                     self.geometry_dtype, shape="ntargets",
                     order="C")
     return parse("mean_curvature[itgt]")
Пример #47
0
 def density(self):
     self.arguments[self.density_var_name] = \
             lp.GlobalArg(self.density_var_name, self.density_dtype,
                     shape="ntargets", order="C")
     return parse("%s[itgt]" % self.density_var_name)
Пример #48
0
def test_stringifier_preserve_shift_order():
    for expr in [
            parse("(a << b) >> 2"),
            parse("a << (b >> 2)")
            ]:
        assert parse(str(expr)) == expr
Пример #49
0
def test_latex_mapper():
    from pymbolic import parse
    from pymbolic.mapper.stringifier import LaTeXMapper, StringifyMapper

    tm = LaTeXMapper()
    sm = StringifyMapper()

    equations = []

    def add(expr):
        # Add an equation to the list of tests.
        equations.append(r"\[%s\] %% from: %s" % (tm(expr), sm(expr)))

    add(parse("a * b + c"))
    add(parse("f(a,b,c)"))
    add(parse("a ** b ** c"))
    add(parse("(a | b) ^ ~c"))
    add(parse("a << b"))
    add(parse("a >> b"))
    add(parse("a[i,j,k]"))
    add(parse("a[1:3]"))
    add(parse("a // b"))
    add(parse("not (a or b) and c"))
    add(parse("(a % b) % c"))
    add(parse("(a >= b) or (b <= c)"))
    add(prim.Min((1,)) + prim.Max((1, 2)))
    add(prim.Substitution(prim.Variable("x") ** 2, ("x",), (2,)))
    add(prim.Derivative(parse("x**2"), ("x",)))

    # Run LaTeX and ensure the file compiles.
    import os
    import tempfile
    import subprocess
    import shutil

    latex_dir = tempfile.mkdtemp("pymbolic")

    try:
        tex_file_path = os.path.join(latex_dir, "input.tex")

        with open(tex_file_path, "w") as tex_file:
            contents = LATEX_TEMPLATE % "\n".join(equations)
            tex_file.write(contents)

        try:
            subprocess.check_output(
                    ["latex",
                     "-interaction=nonstopmode",
                     "-output-directory=%s" % latex_dir,
                     tex_file_path],
                    universal_newlines=True)
        except FileNotFoundError:
            pytest.skip("latex command not found")
        except subprocess.CalledProcessError as err:
            assert False, str(err.output)

    finally:
        shutil.rmtree(latex_dir)
Пример #50
0
def test_parser():
    from pymbolic import parse
    parse("(2*a[1]*b[1]+2*a[0]*b[0])*(hankel_1(-1,sqrt(a[1]**2+a[0]**2)*k) "
            "-hankel_1(1,sqrt(a[1]**2+a[0]**2)*k))*k /(4*sqrt(a[1]**2+a[0]**2)) "
            "+hankel_1(0,sqrt(a[1]**2+a[0]**2)*k)")
    print(repr(parse("d4knl0")))
    print(repr(parse("0.")))
    print(repr(parse("0.e1")))
    assert parse("0.e1") == 0
    assert parse("1e-12") == 1e-12
    print(repr(parse("a >= 1")))
    print(repr(parse("a <= 1")))

    print(repr(parse(":")))
    print(repr(parse("1:")))
    print(repr(parse(":2")))
    print(repr(parse("1:2")))
    print(repr(parse("::")))
    print(repr(parse("1::")))
    print(repr(parse(":1:")))
    print(repr(parse("::1")))
    print(repr(parse("3::1")))
    print(repr(parse(":5:1")))
    print(repr(parse("3:5:1")))
    print(repr(parse("g[i,k]+2.0*h[i,k]")))
    print(repr(parse("g[i,k]+(+2.0)*h[i,k]")))
    print(repr(parse("a - b - c")))
    print(repr(parse("-a - -b - -c")))
    print(repr(parse("- - - a - - - - b - - - - - c")))

    print(repr(parse("~(a ^ b)")))
    print(repr(parse("(a | b) | ~(~a & ~b)")))

    print(repr(parse("3 << 1")))
    print(repr(parse("1 >> 3")))

    print(parse("3::1"))

    assert parse("e1") == prim.Variable("e1")
    assert parse("d1") == prim.Variable("d1")

    from pymbolic import variables
    f, x, y, z = variables("f x y z")
    assert parse("f((x,y),z)") == f((x, y), z)
    assert parse("f((x,),z)") == f((x,), z)
    assert parse("f(x,(y,z),z)") == f(x, (y, z), z)

    assert parse("f(x,(y,z),z, name=15)") == f(x, (y, z), z, name=15)
    assert parse("f(x,(y,z),z, name=15, name2=17)") == f(
            x, (y, z), z, name=15, name2=17)
Пример #51
0
from pymbolic import parse, var
from pymbolic.mapper.dependency import DependencyMapper

x = var("x")
y = var("y")

expr2 = 3*x+5-y
expr = parse("3*x+5-y")

print expr
print expr2

dm = DependencyMapper()
print dm(expr)
Пример #52
0
        if stats_callback is not None:
            stats_callback(size,  self,
                    kernel_rec.kernel.prepared_timed_call(vectors[0]._grid, results[0]._block, *args))
        else:
            kernel_rec.kernel.prepared_async_call(vectors[0]._grid, results[0]._block, self.stream, *args)

        return results




if __name__ == "__main__":
    test_dtype = numpy.float32

    import pycuda.autoinit
    from pymbolic import parse
    expr = parse("2*x+3*y+4*z")
    print expr
    cexpr = CompiledVectorExpression(expr,
            lambda expr: (True, test_dtype),
            test_dtype)

    from pymbolic import var
    ctx = {
        var("x"): gpuarray.arange(5, dtype=test_dtype),
        var("y"): gpuarray.arange(5, dtype=test_dtype),
        var("z"): gpuarray.arange(5, dtype=test_dtype),
        }

    print cexpr(lambda expr: ctx[expr])
Пример #53
0
def test_substitute():
    from pymbolic import parse, substitute, evaluate
    u = parse("5+x.min**2")
    xmin = parse("x.min")
    assert evaluate(substitute(u, {xmin: 25})) == 630
Пример #54
0
 def mean_curvature(self):
     self.arguments["mean_curvature"] = \
             lp.GlobalArg("mean_curvature",
                     self.geometry_dtype, shape="ntargets",
                     order="C")
     return parse("mean_curvature[itgt]")