Beispiel #1
0
    def test_conditional_dimension(self):
        """Test that different ConditionalDimensions have different hash value."""
        d0 = Dimension(name='d')
        s0 = Scalar(name='s')
        d1 = Dimension(name='d', spacing=s0)

        cd0 = ConditionalDimension(name='cd', parent=d0, factor=4)
        cd1 = ConditionalDimension(name='cd', parent=d0, factor=5)
        assert cd0 is not cd1
        assert hash(cd0) != hash(cd1)

        cd2 = ConditionalDimension(name='cd',
                                   parent=d0,
                                   factor=4,
                                   indirect=True)
        assert hash(cd0) != hash(cd2)

        cd3 = ConditionalDimension(name='cd', parent=d1, factor=4)
        assert hash(cd0) != hash(cd3)

        s1 = Scalar(name='s', dtype=np.int32)
        cd4 = ConditionalDimension(name='cd',
                                   parent=d0,
                                   factor=4,
                                   condition=s0 > 3)
        assert hash(cd0) != hash(cd4)

        cd5 = ConditionalDimension(name='cd',
                                   parent=d0,
                                   factor=4,
                                   condition=s1 > 3)
        assert hash(cd0) != hash(cd5)
        assert hash(cd4) != hash(cd5)
Beispiel #2
0
    def _extract_time_invariants(self,
                                 cluster,
                                 template,
                                 with_cse=True,
                                 costmodel=None,
                                 **kwargs):
        """
        Extract time-invariant subexpressions, and assign them to temporaries.
        """

        # Extract time invariants
        make = lambda i: Scalar(name=template(i)).indexify()
        rule = iq_timeinvariant(cluster.trace)
        costmodel = costmodel or (lambda e: estimate_cost(e) > 0)
        processed, found = xreplace_constrained(cluster.exprs, make, rule,
                                                costmodel)

        if with_cse:
            leaves = [i for i in processed if i not in found]

            # Search for common sub-expressions amongst them (and only them)
            make = lambda i: Scalar(name=template(i + len(found))).indexify()
            found = common_subexprs_elimination(found, make)

            # Some temporaries may be droppable at this point
            processed = compact_temporaries(found, leaves)

        return cluster.rebuild(processed)
Beispiel #3
0
def test_shared_data():
    s = Scalar(name='s')
    a = Scalar(name='a')

    sdata = SharedData(name='sdata',
                       npthreads=2,
                       fields=[s],
                       dynamic_fields=[a])

    pkl_sdata = pickle.dumps(sdata)
    new_sdata = pickle.loads(pkl_sdata)

    assert sdata.name == new_sdata.name
    assert sdata.size == new_sdata.size
    assert sdata.fields == new_sdata.fields
    assert sdata.pfields == new_sdata.pfields
    assert sdata.dynamic_fields == new_sdata.dynamic_fields

    ffp = FieldFromPointer(sdata._field_flag, sdata.symbolic_base)

    pkl_ffp = pickle.dumps(ffp)
    new_ffp = pickle.loads(pkl_ffp)

    assert ffp == new_ffp

    indexed = sdata[0]

    pkl_indexed = pickle.dumps(indexed)
    new_indexed = pickle.loads(pkl_indexed)

    assert indexed.name == new_indexed.name
    assert indexed.shape == new_indexed.shape
Beispiel #4
0
def test_collect_aliases(fc, fd, exprs, expected):
    grid = Grid(shape=(4, 4))
    x, y = grid.dimensions  # noqa
    xi, yi = grid.interior.dimensions  # noqa

    t0 = Scalar(name='t0')  # noqa
    t1 = Scalar(name='t1')  # noqa
    t2 = Scalar(name='t2')  # noqa
    t3 = Scalar(name='t3')  # noqa
    fa = Function(name='fa', grid=grid, shape=(4, ), dimensions=(x, ))  # noqa
    fb = Function(name='fb', grid=grid, shape=(4, ), dimensions=(x, ))  # noqa
    fc = Function(name='fc', grid=grid)  # noqa
    fd = Function(name='fd', grid=grid)  # noqa

    # List/dict comprehension would need explicit locals/globals mappings to eval
    for i, e in enumerate(list(exprs)):
        exprs[i] = eval(e)
    for k, v in list(expected.items()):
        expected[eval(k)] = eval(v)

    aliases = collect(exprs)

    assert len(aliases) > 0

    for k, v in aliases.items():
        assert ((len(v.aliased) == 1 and expected[k] is None)
                or v.anti_stencil == expected[k])
Beispiel #5
0
def test_cse(exprs, expected):
    """Test common subexpressions elimination."""
    grid = Grid((3, 3, 3))
    dims = grid.dimensions

    tu = TimeFunction(name="tu", grid=grid, space_order=2)  # noqa
    tv = TimeFunction(name="tv", grid=grid, space_order=2)  # noqa
    tw = TimeFunction(name="tw", grid=grid, space_order=2)  # noqa
    tz = TimeFunction(name="tz", grid=grid, space_order=2)  # noqa
    ti0 = Array(name='ti0', shape=(3, 5, 7),
                dimensions=dims).indexify()  # noqa
    ti1 = Array(name='ti1', shape=(3, 5, 7),
                dimensions=dims).indexify()  # noqa
    t0 = Scalar(name='t0')  # noqa
    t1 = Scalar(name='t1')  # noqa
    t2 = Scalar(name='t2')  # noqa

    # List comprehension would need explicit locals/globals mappings to eval
    for i, e in enumerate(list(exprs)):
        exprs[i] = DummyEq(indexify(eval(e).evaluate))

    counter = generator()
    make = lambda: Scalar(name='r%d' % counter()).indexify()
    processed = _cse(exprs, make)
    assert len(processed) == len(expected)
    assert all(str(i.rhs) == j for i, j in zip(processed, expected))
Beispiel #6
0
def test_estimate_cost(expr, expected, estimate):
    # Note: integer arithmetic isn't counted
    grid = Grid(shape=(4, 4))
    x, y = grid.dimensions  # noqa

    t0 = Scalar(name='t0')  # noqa
    t1 = Scalar(name='t1')  # noqa
    t2 = Scalar(name='t2')  # noqa
    fa = Function(name='fa', grid=grid, shape=(4, ), dimensions=(x, ))  # noqa
    fb = Function(name='fb', grid=grid, shape=(4, ), dimensions=(x, ))  # noqa
    fc = Function(name='fc', grid=grid)  # noqa

    assert estimate_cost(eval(expr), estimate) == expected
Beispiel #7
0
    def test_dimension(self):
        """Test that different Dimensions have different hash value."""
        d0 = Dimension(name='d')
        s0 = Scalar(name='s')
        d1 = Dimension(name='d', spacing=s0)
        assert hash(d0) != hash(d1)

        s1 = Scalar(name='s', dtype=np.int32)
        d2 = Dimension(name='d', spacing=s1)
        assert hash(d1) != hash(d2)

        d3 = Dimension(name='d', spacing=Constant(name='s1'))
        assert hash(d3) != hash(d0)
        assert hash(d3) != hash(d1)
Beispiel #8
0
    def test_scalar(self):
        """
        Test that Scalars with same name but different attributes do not alias to
        the same Scalar. Conversely, if the name and the attributes are the same,
        they must alias to the same Scalar.
        """
        s0 = Scalar(name='s0')
        s1 = Scalar(name='s0')
        assert s0 is s1

        s2 = Scalar(name='s0', dtype=np.int32)
        assert s2 is not s1

        s3 = Scalar(name='s0', is_const=True)
        assert s3 is not s1
Beispiel #9
0
    def extract(cls, n, context, min_cost, cluster, sregistry):
        make = lambda: Scalar(name=sregistry.make_name(), dtype=cluster.dtype
                              ).indexify()

        exclude = {
            i.source.indexed
            for i in cluster.scope.d_flow.independent()
        }
        rule0 = lambda e: not e.free_symbols & exclude
        rule1 = make_is_time_invariant(context)
        rule2 = lambda e: estimate_cost(e, True) >= min_cost
        rule = lambda e: rule0(e) and rule1(e) and rule2(e)

        extracted = []
        mapper = OrderedDict()
        for e in cluster.exprs:
            for i in search(e, rule, 'all', 'dfs_first_hit'):
                if i not in mapper:
                    symbol = make()
                    mapper[i] = symbol
                    extracted.append(e.func(symbol, i))

        processed = [uxreplace(e, mapper) for e in cluster.exprs]

        return extracted + processed, extracted
Beispiel #10
0
def test_common_subexprs_elimination(tu, tv, tw, ti0, ti1, t0, t1, exprs,
                                     expected):
    make = lambda i: Scalar(name='r%d' % i).indexify()
    processed = common_subexprs_elimination(
        EVAL(exprs, tu, tv, tw, ti0, ti1, t0, t1), make)
    assert len(processed) == len(expected)
    assert all(str(i.rhs) == j for i, j in zip(processed, expected))
Beispiel #11
0
 def visit_Iteration(self, o, subs, offsets=defaultdict(set)):
     nodes, subs = self.visit(o.children, subs, offsets=offsets)
     if o.dim.is_Stepping:
         # For SteppingDimension insert the explicit
         # definition of buffered variables, eg. t+1 => t1
         init = []
         for i, off in enumerate(filter_ordered(offsets[o.dim])):
             vname = Scalar(name="%s%d" % (o.dim.name, i), dtype=np.int32)
             value = (o.dim.parent + off) % o.dim.modulo
             init.append(UnboundedIndex(vname, value, value))
             subs[o.dim + off] = LoweredDimension(vname.name, o.dim, off)
         # Always lower to symbol
         subs[o.dim.parent] = Scalar(name=o.dim.parent.name, dtype=np.int32)
         return o._rebuild(index=o.dim.parent.name, uindices=init), subs
     else:
         return o._rebuild(*nodes), subs
Beispiel #12
0
    def test_iterations_ompized(self, exprs, expected):
        grid = Grid(shape=(4, 4))
        x, y = grid.dimensions  # noqa

        fa = Function(name='fa', grid=grid, dimensions=(x, ),
                      shape=(4, ))  # noqa
        fb = Function(name='fb', grid=grid, dimensions=(x, ),
                      shape=(4, ))  # noqa
        fc = Function(name='fc', grid=grid)  # noqa
        fd = Function(name='fd', grid=grid)  # noqa
        t0 = Scalar(name='t0')  # noqa

        eqns = []
        for e in exprs:
            eqns.append(eval(e))

        op = Operator(eqns, dle='openmp')

        iterations = FindNodes(Iteration).visit(op)
        assert len(iterations) == len(expected)

        # Check for presence of pragma omp
        for i, j in zip(iterations, expected):
            pragmas = i.pragmas
            if j is True:
                assert len(pragmas) == 1
                pragma = pragmas[0]
                assert 'omp for' in pragma.value
            else:
                for k in pragmas:
                    assert 'omp for' not in k.value
Beispiel #13
0
    def test_conditional_dimension(self):
        """
        Test that ConditionalDimensions with same name but different attributes do not
        alias to the same ConditionalDimension. Conversely, if the name and the attributes
        are the same, they must alias to the same ConditionalDimension.
        """
        i = Dimension(name='i')
        ci0 = ConditionalDimension(name='ci', parent=i, factor=4)
        ci1 = ConditionalDimension(name='ci', parent=i, factor=4)
        assert ci0 is ci1

        ci2 = ConditionalDimension(name='ci', parent=i, factor=8)
        assert ci2 is not ci1

        ci3 = ConditionalDimension(name='ci',
                                   parent=i,
                                   factor=4,
                                   indirect=True)
        assert ci3 is not ci1

        s = Scalar(name='s')
        ci4 = ConditionalDimension(name='ci',
                                   parent=i,
                                   factor=4,
                                   condition=s > 3)
        assert ci4 is not ci1
        ci5 = ConditionalDimension(name='ci',
                                   parent=i,
                                   factor=4,
                                   condition=s > 3)
        assert ci5 is ci4
Beispiel #14
0
 def v_literal(self, x, y):
     vx = Vector(x)
     vxy = Vector(x, y)
     vx1y = Vector(x + 1, y)
     s = Scalar(name='s', nonnegative=True)
     vs3 = Vector(s + 3, smart=True)
     return vx, vxy, vx1y, vs3
Beispiel #15
0
def cse(cluster, template, *args):
    """
    Common sub-expressions elimination (CSE).
    """
    make = lambda: Scalar(name=template(), dtype=cluster.dtype).indexify()
    processed = _cse(cluster.exprs, make)

    return cluster.rebuild(processed)
Beispiel #16
0
def test_yreplace_time_invariants(exprs, expected):
    grid = Grid((3, 3, 3))
    dims = grid.dimensions
    tu = TimeFunction(name="tu", grid=grid, space_order=4).indexify()
    tv = TimeFunction(name="tv", grid=grid, space_order=4).indexify()
    tw = TimeFunction(name="tw", grid=grid, space_order=4).indexify()
    ti0 = Array(name='ti0', shape=(3, 5, 7), dimensions=dims).indexify()
    ti1 = Array(name='ti1', shape=(3, 5, 7), dimensions=dims).indexify()
    t0 = Scalar(name='t0').indexify()
    t1 = Scalar(name='t1').indexify()
    exprs = EVAL(exprs, tu, tv, tw, ti0, ti1, t0, t1)
    counter = generator()
    make = lambda: Scalar(name='r%d' % counter()).indexify()
    processed, found = yreplace(exprs, make, make_is_time_invariant(exprs),
                                lambda i: estimate_cost(i) > 0)
    assert len(found) == len(expected)
    assert all(str(i.rhs) == j for i, j in zip(found, expected))
Beispiel #17
0
    def extract(cls, n, context, min_cost, max_alias, cluster, sregistry):
        make = lambda: Scalar(name=sregistry.make_name(), dtype=cluster.dtype
                              ).indexify()

        # The `depth` determines "how big" the extracted sum-of-products will be.
        # We observe that in typical FD codes:
        #   add(mul, mul, ...) -> stems from first order derivative
        #   add(mul(add(mul, mul, ...), ...), ...) -> stems from second order derivative
        # To search the muls in the former case, we need `depth=0`; to search the outer
        # muls in the latter case, we need `depth=2`
        depth = n

        exclude = {
            i.source.indexed
            for i in cluster.scope.d_flow.independent()
        }
        rule0 = lambda e: not e.free_symbols & exclude
        rule1 = lambda e: e.is_Mul and q_terminalop(e, depth)
        rule = lambda e: rule0(e) and rule1(e)

        extracted = OrderedDict()
        mapper = {}
        for e in cluster.exprs:
            for i in search(e, rule, 'all', 'bfs_first_hit'):
                if i in mapper:
                    continue

                key = lambda a: a.is_Add
                terms, others = split(list(i.args), key)

                if max_alias:
                    # Treat `e` as an FD expression and pull out the derivative
                    # coefficient from `i`
                    # Note: typically derivative coefficients are numbers, but
                    # sometimes they could be provided in symbolic form through an
                    # arbitrary Function.  In the latter case, we rely on the
                    # heuristic that such Function's basically never span the whole
                    # grid, but rather a single Grid dimension (e.g., `c[z, n]` for a
                    # stencil of diameter `n` along `z`)
                    if e.grid is not None and terms:
                        key = partial(maybe_coeff_key, e.grid)
                        others, more_terms = split(others, key)
                        terms.extend(more_terms)

                if terms:
                    k = i.func(*terms)
                    try:
                        symbol, _ = extracted[k]
                    except KeyError:
                        symbol, _ = extracted.setdefault(k, (make(), e))
                    mapper[i] = i.func(symbol, *others)

        if mapper:
            extracted = [e.func(v, k) for k, (v, e) in extracted.items()]
            processed = [uxreplace(e, mapper) for e in cluster.exprs]
            return extracted + processed, extracted
        else:
            return cluster.exprs, []
Beispiel #18
0
    def test_clear_cache_with_alive_symbols(self,
                                            operate_on_empty_cache,
                                            nx=1000,
                                            ny=1000):
        """
        Test that `clear_cache` doesn't affect caching if an object is still alive.
        """
        grid = Grid(shape=(nx, ny), dtype=np.float64)

        f0 = Function(name='f', grid=grid, space_order=2)
        f1 = Function(name='f', grid=grid, space_order=2)

        # Obviously:
        assert f0 is not f1

        # And clearly, both still alive after a `clear_cache`
        clear_cache()
        assert f0 is not f1
        assert f0.grid.dimensions[0] is grid.dimensions[0]

        # Now we try with symbols
        s0 = Scalar(name='s')
        s1 = Scalar(name='s')

        # Clearly:
        assert s1 is s0

        clear_cache()
        s2 = Scalar(name='s')

        # s2 must still be s1/so, even after a clear_cache, as so/s1 are both alive!
        assert s2 is s1

        del s0
        del s1
        s3 = Scalar(name='s')

        # And obviously, still:
        assert s3 is s2

        cache_size = len(_SymbolCache)
        del s2
        del s3
        clear_cache()
        assert len(_SymbolCache) == cache_size - 1
Beispiel #19
0
    def _eliminate_intra_stencil_redundancies(self, cluster, template, **kwargs):
        """
        Perform common subexpression elimination, bypassing the tensor expressions
        extracted in previous passes.
        """
        make = lambda: Scalar(name=template(), dtype=cluster.dtype).indexify()
        processed = common_subexprs_elimination(cluster.exprs, make)

        return cluster.rebuild(processed)
Beispiel #20
0
def test_xreplace_constrained_time_varying(tu, tv, tw, ti0, ti1, t0, t1, exprs,
                                           expected):
    exprs = EVAL(exprs, tu, tv, tw, ti0, ti1, t0, t1)
    make = lambda i: Scalar(name='r%d' % i).indexify()
    processed, found = xreplace_constrained(
        exprs, make, iq_timevarying(TemporariesGraph(exprs)),
        lambda i: estimate_cost(i) > 0)
    assert len(found) == len(expected)
    assert all(str(i.rhs) == j for i, j in zip(found, expected))
Beispiel #21
0
def test_yreplace_time_invariants(tu, tv, tw, ti0, ti1, t0, t1, exprs, expected):
    exprs = EVAL(exprs, tu, tv, tw, ti0, ti1, t0, t1)
    counter = generator()
    make = lambda: Scalar(name='r%d' % counter()).indexify()
    processed, found = yreplace(exprs, make,
                                make_is_time_invariant(exprs),
                                lambda i: estimate_cost(i) > 0)
    assert len(found) == len(expected)
    assert all(str(i.rhs) == j for i, j in zip(found, expected))
Beispiel #22
0
    def test_index_mode_detection(self, indexed, expected):
        """
        Test detection of IterationInstance access modes (AFFINE vs IRREGULAR).

        Proper detection of access mode is a prerequisite to any sort of
        data dependence analysis.
        """
        grid = Grid(shape=(4, 4, 4))
        x, y, z = grid.dimensions  # noqa

        sx = SubDimension.middle('sx', x, 1, 1)  # noqa

        u = Function(name='u', grid=grid)  # noqa
        c = Constant(name='c')  # noqa
        sc = Scalar(name='sc', is_const=True)  # noqa
        s = Scalar(name='s')  # noqa

        ii = IterationInstance(eval(indexed))
        assert ii.index_mode == expected
Beispiel #23
0
    def _extract_time_invariants(self, cluster, template, **kwargs):
        """
        Extract time-invariant subexpressions, and assign them to temporaries.
        """
        make = lambda: Scalar(name=template(), dtype=cluster.dtype).indexify()
        rule = make_is_time_invariant(cluster.exprs)
        costmodel = lambda e: estimate_cost(e, True) >= self.MIN_COST_ALIAS_INV
        processed, found = yreplace(cluster.exprs, make, rule, costmodel, eager=True)

        return cluster.rebuild(processed)
Beispiel #24
0
def test_internal_symbols():
    s = dSymbol(name='s', dtype=np.float32)
    pkl_s = pickle.dumps(s)
    new_s = pickle.loads(pkl_s)
    assert new_s.name == s.name
    assert new_s.dtype is np.float32

    s = Scalar(name='s', dtype=np.int32, is_const=True)
    pkl_s = pickle.dumps(s)
    new_s = pickle.loads(pkl_s)
    assert new_s.name == s.name
    assert new_s.dtype is np.int32
    assert new_s.is_const is True

    s = Scalar(name='s', nonnegative=True)
    pkl_s = pickle.dumps(s)
    new_s = pickle.loads(pkl_s)
    assert new_s.name == s.name
    assert new_s.assumptions0['nonnegative'] is True
Beispiel #25
0
    def _extract_sum_of_products(self, cluster, template, **kwargs):
        """
        Extract sub-expressions in sum-of-product form, and assign them to temporaries.
        """
        make = lambda: Scalar(name=template(), dtype=cluster.dtype).indexify()
        rule = q_sum_of_product
        costmodel = lambda e: not (q_leaf(e) or q_terminalop(e))
        processed, _ = yreplace(cluster.exprs, make, rule, costmodel)

        return cluster.rebuild(processed)
Beispiel #26
0
def extract(cluster, rule1, model, template):
    make = lambda: Scalar(name=template(), dtype=cluster.dtype).indexify()

    # Rule out symbols inducing Dimension-independent data dependences
    exclude = {i.source.indexed for i in cluster.scope.d_flow.independent()}
    rule0 = lambda e: not e.free_symbols & exclude

    # Composite extraction rule -- correctness(0) + logic(1)
    rule = lambda e: rule0(e) and rule1(e)

    return yreplace(cluster.exprs, make, rule, model, eager=True)
Beispiel #27
0
    def test_sub_dimension(self):
        """Test that different SubDimensions have different hash value."""
        d0 = Dimension(name='d')
        d1 = Dimension(name='d', spacing=Scalar(name='s'))

        di0 = SubDimension.middle('di', d0, 1, 1)
        di1 = SubDimension.middle('di', d1, 1, 1)
        assert hash(di0) != hash(d0)
        assert hash(di0) != hash(di1)

        dl0 = SubDimension.left('dl', d0, 2)
        assert hash(dl0) != hash(di0)
Beispiel #28
0
def test_conditional_dimension():
    d = Dimension(name='d')
    s = Scalar(name='s')
    cd = ConditionalDimension(name='ci', parent=d, factor=4, condition=s > 3)

    pkl_cd = pickle.dumps(cd)
    new_cd = pickle.loads(pkl_cd)

    assert cd.name == new_cd.name
    assert cd.parent == new_cd.parent
    assert cd.factor == new_cd.factor
    assert cd.condition == new_cd.condition
Beispiel #29
0
def test_dimension_cache():
    """
    Test that :class:`Dimension`s with same name but different attributes do not
    alias to the same Dimension.
    """
    d0 = Dimension(name='d')
    d1 = Dimension(name='d')
    assert d0 is d1

    s0 = Scalar(name='s0')
    s1 = Scalar(name='s1')

    d2 = Dimension(name='d', spacing=s0)
    d3 = Dimension(name='d', spacing=s1)
    assert d2 is not d3

    d4 = Dimension(name='d', spacing=s1)
    assert d3 is d4

    d5 = Dimension(name='d', spacing=Constant(name='s1'))
    assert d2 is not d5
Beispiel #30
0
def test_incr_dimension():
    s = Scalar(name='s')
    d = Dimension(name='d')
    dd = IncrDimension(d, s, 5, 2, name='dd')

    pkl_dd = pickle.dumps(dd)
    new_dd = pickle.loads(pkl_dd)

    assert dd.name == new_dd.name
    assert dd.parent == new_dd.parent
    assert dd.symbolic_min == new_dd.symbolic_min
    assert dd.symbolic_max == new_dd.symbolic_max
    assert dd.step == new_dd.step