Beispiel #1
0
def dimension_sort(expr):
    """
    Topologically sort the Dimensions in ``expr``, based on the order in which they
    appear within Indexeds.
    """
    def handle_indexed(indexed):
        relation = []
        for i in indexed.indices:
            try:
                maybe_dim = split_affine(i).var
                if isinstance(maybe_dim, Dimension):
                    relation.append(maybe_dim)
            except ValueError:
                # Maybe there are some nested Indexeds (e.g., the situation is A[B[i]])
                nested = flatten(
                    handle_indexed(n) for n in retrieve_indexed(i))
                if nested:
                    relation.extend(nested)
                else:
                    # Fallback: Just insert all the Dimensions we find, regardless of
                    # what the user is attempting to do
                    relation.extend([
                        d for d in filter_sorted(i.free_symbols)
                        if isinstance(d, Dimension)
                    ])
        return tuple(relation)

    relations = {handle_indexed(i) for i in retrieve_indexed(expr)}

    # Add in any implicit dimension (typical of scalar temporaries, or Step)
    relations.add(expr.implicit_dims)

    # Add in leftover free dimensions (not an Indexed' index)
    extra = set([i for i in expr.free_symbols if isinstance(i, Dimension)])

    # Add in pure data dimensions (e.g., those accessed only via explicit values,
    # such as A[3])
    indexeds = retrieve_indexed(expr, deep=True)
    extra.update(set().union(*[set(i.function.dimensions) for i in indexeds]))

    # Enforce determinism
    extra = filter_sorted(extra, key=attrgetter('name'))

    # Add in implicit relations for parent dimensions
    # -----------------------------------------------
    # 1) Note that (d.parent, d) is what we want, while (d, d.parent) would be
    # wrong; for example, in `((t, time), (t, x, y), (x, y))`, `x` could now
    # preceed `time`, while `t`, and therefore `time`, *must* appear before `x`,
    # as indicated by the second relation
    implicit_relations = {(d.parent, d) for d in extra if d.is_Derived}
    # 2) To handle cases such as `((time, xi), (x,))`, where `xi` a SubDimension
    # of `x`, besides `(x, xi)`, we also have to add `(time, x)` so that we
    # obtain the desired ordering `(time, x, xi)`. W/o `(time, x)`, the ordering
    # `(x, time, xi)` might be returned instead, which would be non-sense
    implicit_relations.update({tuple(d.root for d in i) for i in relations})

    ordering = PartialOrderTuple(extra,
                                 relations=(relations | implicit_relations))

    return ordering
Beispiel #2
0
def dimension_sort(expr):
    """
    Topologically sort the :class:`Dimension`s in ``expr``, based on the order
    in which they appear within :class:`Indexed`s.
    """

    def handle_indexed(indexed):
        relation = []
        for i in indexed.indices:
            try:
                maybe_dim = split_affine(i).var
                if isinstance(maybe_dim, Dimension):
                    relation.append(maybe_dim)
            except ValueError:
                # Maybe there are some nested Indexeds (e.g., the situation is A[B[i]])
                nested = flatten(handle_indexed(n) for n in retrieve_indexed(i))
                if nested:
                    relation.extend(nested)
                else:
                    # Fallback: Just insert all the Dimensions we find, regardless of
                    # what the user is attempting to do
                    relation.extend([d for d in filter_sorted(i.free_symbols)
                                     if isinstance(d, Dimension)])
        return tuple(relation)

    relations = {handle_indexed(i) for i in retrieve_indexed(expr, mode='all')}

    # Add in any implicit dimension (typical of scalar temporaries, or Step)
    relations.add(expr.implicit_dims)

    # Add in leftover free dimensions (not an Indexed' index)
    extra = set([i for i in expr.free_symbols if isinstance(i, Dimension)])

    # Add in pure data dimensions (e.g., those accessed only via explicit values,
    # such as A[3])
    indexeds = retrieve_indexed(expr, deep=True)
    extra.update(set().union(*[set(i.function.indices) for i in indexeds]))

    # Enforce determinism
    extra = filter_sorted(extra, key=attrgetter('name'))

    # Add in implicit relations for parent dimensions
    # -----------------------------------------------
    # 1) Note that (d.parent, d) is what we want, while (d, d.parent) would be
    # wrong; for example, in `((t, time), (t, x, y), (x, y))`, `x` could now
    # preceed `time`, while `t`, and therefore `time`, *must* appear before `x`,
    # as indicated by the second relation
    implicit_relations = {(d.parent, d) for d in extra if d.is_Derived}
    # 2) To handle cases such as `((time, xi), (x,))`, where `xi` a SubDimension
    # of `x`, besides `(x, xi)`, we also have to add `(time, x)` so that we
    # obtain the desired ordering `(time, x, xi)`. W/o `(time, x)`, the ordering
    # `(x, time, xi)` might be returned instead, which would be non-sense
    implicit_relations.update({tuple(d.root for d in i) for i in relations})

    ordering = PartialOrderTuple(extra, relations=(relations | implicit_relations))

    return ordering
Beispiel #3
0
def dimension_sort(expr, key=None):
    """
    Topologically sort the :class:`Dimension`s in ``expr``, based on the order
    in which they appear within :class:`Indexed`s.

    :param expr: The :class:`devito.Eq` from which the :class:`Dimension`s are
                 extracted.
    :param key: A callable used as key to enforce a final ordering.
    """

    def handle_indexed(indexed):
        constraint = []
        for i in indexed.indices:
            try:
                maybe_dim = split_affine(i).var
                if isinstance(maybe_dim, Dimension):
                    constraint.append(maybe_dim)
            except ValueError:
                # Maybe there are some nested Indexeds (e.g., the situation is A[B[i]])
                nested = flatten(handle_indexed(n) for n in retrieve_indexed(i))
                if nested:
                    constraint.extend(nested)
                else:
                    # Fallback: Just insert all the Dimensions we find, regardless of
                    # what the user is attempting to do
                    constraint.extend([d for d in filter_sorted(i.free_symbols)
                                       if isinstance(d, Dimension)])
        return constraint

    constraints = [handle_indexed(i) for i in retrieve_indexed(expr, mode='all')]

    ordering = toposort(constraints)

    # Add in leftover free dimensions (not an Indexed' index)
    extra = set([i for i in expr.free_symbols if isinstance(i, Dimension)])

    # Add in pure data dimensions (e.g., those accessed only via explicit values,
    # such as A[3])
    indexeds = retrieve_indexed(expr, deep=True)
    if indexeds:
        extra.update(set.union(*[set(i.function.indices) for i in indexeds]))

    # Enforce determinism
    extra = filter_sorted(extra, key=attrgetter('name'))

    ordering.extend([i for i in extra if i not in ordering])

    # Add in parent dimensions
    for i in list(ordering):
        if i.is_Derived and i.parent not in ordering:
            ordering.insert(ordering.index(i), i.parent)

    return sorted(ordering, key=key)
Beispiel #4
0
def detect_flow_directions(exprs):
    """Return a mapper from :class:`Dimension`s to iterables of
    :class:`IterationDirection`s representing the theoretically necessary
    directions to evaluate ``exprs`` so that the information "naturally
    flows" from an iteration to another."""
    exprs = as_tuple(exprs)

    writes = [Access(i.lhs, 'W') for i in exprs]
    reads = flatten(retrieve_indexed(i.rhs, mode='all') for i in exprs)
    reads = [Access(i, 'R') for i in reads]

    # Determine indexed-wise direction by looking at the vector distance
    mapper = defaultdict(set)
    for w in writes:
        for r in reads:
            if r.name != w.name:
                continue
            dimensions = [d for d in w.aindices if d is not None]
            for d in dimensions:
                try:
                    if w.distance(r, d) > 0:
                        mapper[d].add(Forward)
                        break
                    elif w.distance(r, d) < 0:
                        mapper[d].add(Backward)
                        break
                    else:
                        mapper[d].add(Any)
                except TypeError:
                    # Nothing can be deduced
                    mapper[d].add(Any)
                    break
            # Remainder
            for d in dimensions[dimensions.index(d) + 1:]:
                mapper[d].add(Any)

    # Add in any encountered Dimension
    mapper.update({
        d: {Any}
        for d in flatten(i.aindices for i in reads + writes)
        if d is not None and d not in mapper
    })

    # Add in stepping dimensions (just in case they haven't been detected yet)
    # note: stepping dimensions may force a direction on the parent
    assert all(v == {Any} or mapper.get(k.parent, v) in [v, {Any}]
               for k, v in mapper.items() if k.is_Stepping)
    mapper.update({
        k.parent: set(v)
        for k, v in mapper.items()
        if k.is_Stepping and mapper.get(k.parent) == {Any}
    })

    # Add in derived dimensions parents
    mapper.update({
        k.parent: set(v)
        for k, v in mapper.items() if k.is_Derived and k.parent not in mapper
    })

    return mapper
Beispiel #5
0
def detect_accesses(expr):
    """
    Return a mapper ``M : F -> S``, where F are Functions appearing
    in ``expr`` and S are Stencils. ``M[f]`` represents all data accesses
    to ``f`` within ``expr``. Also map ``M[None]`` to all Dimensions used in
    ``expr`` as plain symbols, rather than as array indices.
    """
    # Compute M : F -> S
    mapper = defaultdict(Stencil)
    for e in retrieve_indexed(expr, mode='all', deep=True):
        f = e.function
        for a in e.indices:
            if isinstance(a, Dimension):
                mapper[f][a].update([0])
            d = None
            off = []
            for i in a.args:
                if isinstance(i, Dimension):
                    d = i
                elif i.is_integer:
                    off += [int(i)]
            if d is not None:
                mapper[f][d].update(off or [0])

    # Compute M[None]
    other_dims = [i for i in retrieve_terminals(expr) if isinstance(i, Dimension)]
    other_dims.extend(list(expr.implicit_dims))
    mapper[None] = Stencil([(i, 0) for i in other_dims])

    return mapper
Beispiel #6
0
def detect_accesses(expr):
    """
    Return a mapper ``M : F -> S``, where F are Functions appearing
    in ``expr`` and S are Stencils. ``M[f]`` represents all data accesses
    to ``f`` within ``expr``. Also map ``M[None]`` to all Dimensions used in
    ``expr`` as plain symbols, rather than as array indices.
    """
    # Compute M : F -> S
    mapper = defaultdict(Stencil)
    for e in retrieve_indexed(expr, deep=True):
        f = e.function
        for a in e.indices:
            if isinstance(a, Dimension):
                mapper[f][a].update([0])
            d = None
            off = []
            for i in a.args:
                if isinstance(i, Dimension):
                    d = i
                elif i.is_integer:
                    off += [int(i)]
            if d is not None:
                mapper[f][d].update(off or [0])

    # Compute M[None]
    other_dims = [i for i in retrieve_terminals(expr) if isinstance(i, Dimension)]
    other_dims.extend(list(expr.implicit_dims))
    mapper[None] = Stencil([(i, 0) for i in other_dims])

    return mapper
Beispiel #7
0
def compute_intervals(expr):
    """Return an iterable of :class:`Interval`s representing the data items
    accessed by the :class:`sympy.Eq` ``expr``."""
    # Detect the indexeds' offsets along each dimension
    stencil = Stencil()
    for e in retrieve_indexed(expr, mode='all', deep=True):
        for a in e.indices:
            if isinstance(a, Dimension):
                stencil[a].update([0])
            d = None
            off = [0]
            for i in a.args:
                if isinstance(i, Dimension):
                    d = i
                elif i.is_integer:
                    off += [int(i)]
            if d is not None:
                stencil[d].update(off)

    # Determine intervals and their iterators
    iterators = OrderedDict()
    for i in stencil.dimensions:
        if i.is_NonlinearDerived:
            iterators.setdefault(i.parent, []).append(stencil.entry(i))
        else:
            iterators.setdefault(i, [])
    intervals = []
    for k, v in iterators.items():
        offs = set.union(set(stencil.get(k)), *[i.ofs for i in v])
        intervals.append(Interval(k, min(offs), max(offs)))

    return intervals, iterators
Beispiel #8
0
    def _lower_exprs(cls, expressions, **kwargs):
        """
        Expression lowering:

            * Form and gather any required implicit expressions;
            * Evaluate derivatives;
            * Flatten vectorial equations;
            * Indexify Functions;
            * Apply substitution rules;
            * Specialize (e.g., index shifting)
        """
        # Add in implicit expressions, e.g., induced by SubDomains
        expressions = cls._add_implicit(expressions)

        # Unfold lazyiness
        expressions = flatten([i.evaluate for i in expressions])

        # Scalarize tensor expressions
        expressions = [j for i in expressions for j in i._flatten]

        # Indexification
        # E.g., f(x - 2*h_x, y) -> f[xi + 2, yi + 4]  (assuming halo_size=4)
        processed = []
        for expr in expressions:
            if expr.subdomain:
                dimension_map = expr.subdomain.dimension_map
            else:
                dimension_map = {}

            # Handle Functions (typical case)
            mapper = {
                f: f.indexify(lshift=True, subs=dimension_map)
                for f in retrieve_functions(expr)
            }

            # Handle Indexeds (from index notation)
            for i in retrieve_indexed(expr):
                f = i.function

                # Introduce shifting to align with the computational domain
                indices = [(a + o)
                           for a, o in zip(i.indices, f._size_nodomain.left)]

                # Apply substitutions, if necessary
                if dimension_map:
                    indices = [j.xreplace(dimension_map) for j in indices]

                mapper[i] = f.indexed[indices]

            subs = kwargs.get('subs')
            if subs:
                # Include the user-supplied substitutions, and use
                # `xreplace` for constant folding
                processed.append(expr.xreplace({**mapper, **subs}))
            else:
                processed.append(uxreplace(expr, mapper))

        processed = cls._specialize_exprs(processed)

        return processed
Beispiel #9
0
 def make_grid_gets(expr):
     mapper = {}
     indexeds = retrieve_indexed(expr)
     data_carriers = [i for i in indexeds if i.base.function.from_YASK]
     for i in data_carriers:
         args = [ListInitializer([INT(make_grid_gets(j)) for j in i.indices])]
         mapper[i] = make_sharedptr_funcall(namespace['code-grid-get'], args,
                                            yk_grid_objs[i.base.function.name])
     return expr.xreplace(mapper)
Beispiel #10
0
def lower_exprs(expressions, **kwargs):
    """
    Lowering an expression consists of the following passes:

        * Indexify functions;
        * Align Indexeds with the computational domain;
        * Apply user-provided substitution;

    Examples
    --------
    f(x - 2*h_x, y) -> f[xi + 2, yi + 4]  (assuming halo_size=4)
    """

    processed = []
    for expr in as_tuple(expressions):
        try:
            dimension_map = expr.subdomain.dimension_map
        except AttributeError:
            # Some Relationals may be pure SymPy objects, thus lacking the subdomain
            dimension_map = {}

        # Handle Functions (typical case)
        mapper = {
            f: f.indexify(lshift=True, subs=dimension_map)
            for f in retrieve_functions(expr)
        }

        # Handle Indexeds (from index notation)
        for i in retrieve_indexed(expr):
            f = i.function

            # Introduce shifting to align with the computational domain
            indices = [(lower_exprs(a) + o)
                       for a, o in zip(i.indices, f._size_nodomain.left)]

            # Apply substitutions, if necessary
            if dimension_map:
                indices = [j.xreplace(dimension_map) for j in indices]

            mapper[i] = f.indexed[indices]

        subs = kwargs.get('subs')
        # Add dimensions map to the mapper in case dimensions are used
        # as an expression, i.e. Eq(u, x, subdomain=xleft)
        mapper.update(dimension_map)
        if subs:
            # Include the user-supplied substitutions, and use
            # `xreplace` for constant folding
            processed.append(expr.xreplace({**mapper, **subs}))
        else:
            processed.append(uxreplace(expr, mapper))

    if isinstance(expressions, Iterable):
        return processed
    else:
        assert len(processed) == 1
        return processed.pop()
Beispiel #11
0
 def make_grid_gets(expr):
     mapper = {}
     indexeds = retrieve_indexed(expr)
     data_carriers = [i for i in indexeds if i.base.function.from_YASK]
     for i in data_carriers:
         name = namespace['code-grid-name'](i.base.function.name)
         args = [ListInitializer([INT(make_grid_gets(j)) for j in i.indices])]
         mapper[i] = make_sharedptr_funcall(namespace['code-grid-get'], args, name)
     return expr.xreplace(mapper)
Beispiel #12
0
 def test_fd_indices(self, so):
     """
     Test that shifted derivative have Integer offset after indexification.
     """
     grid = Grid((10, ))
     x = grid.dimensions[0]
     x0 = x + .5 * x.spacing
     u = Function(name="u", grid=grid, space_order=so)
     dx = indexify(u.dx(x0=x0).evaluate)
     for f in retrieve_indexed(dx):
         assert len(f.indices[0].atoms(Float)) == 0
Beispiel #13
0
    def interpolate(self,
                    expr,
                    offset=0,
                    u_t=None,
                    p_t=None,
                    cummulative=False):
        """Creates a :class:`sympy.Eq` equation for the interpolation
        of an expression onto this sparse point collection.

        :param expr: The expression to interpolate.
        :param offset: Additional offset from the boundary for
                       absorbing boundary conditions.
        :param u_t: (Optional) time index to use for indexing into
                    field data in `expr`.
        :param p_t: (Optional) time index to use for indexing into
                    the sparse point data.
        :param cummulative: (Optional) If True, perform an increment rather
                            than an assignment. Defaults to False.
        """
        expr = indexify(expr)

        # Apply optional time symbol substitutions to expr
        if u_t is not None:
            time = self.grid.time_dim
            t = self.grid.stepping_dim
            expr = expr.subs(t, u_t).subs(time, u_t)

        variables = list(retrieve_indexed(expr))
        # List of indirection indices for all adjacent grid points
        index_matrix = [
            tuple(idx + ii + offset
                  for ii, idx in zip(inc, self.coordinate_indices))
            for inc in self.point_increments
        ]
        # Generate index substituions for all grid variables
        idx_subs = []
        for i, idx in enumerate(index_matrix):
            v_subs = [(v, v.base[v.indices[:-self.grid.dim] + idx])
                      for v in variables]
            idx_subs += [OrderedDict(v_subs)]
        # Substitute coordinate base symbols into the coefficients
        subs = OrderedDict(zip(self.point_symbols, self.coordinate_bases))
        rhs = sum([
            expr.subs(vsub) * b.subs(subs)
            for b, vsub in zip(self.coefficients, idx_subs)
        ])
        # Apply optional time symbol substitutions to lhs of assignment
        lhs = self if p_t is None else self.subs(self.indices[0], p_t)

        rhs = rhs + lhs if cummulative is True else rhs

        return [Eq(lhs, rhs)]
Beispiel #14
0
 def is_index(self, key):
     """
     Return True if ``key`` is used as array index in an expression of the
     FlowGraph, False otherwise.
     """
     if key not in self:
         return False
     match = key.base.label if self[key].is_Tensor else key
     for i in self.extract(key, readby=True):
         for e in retrieve_indexed(i):
             if any(match in idx.free_symbols for idx in e.indices):
                 return True
     return False
Beispiel #15
0
 def tensors(self):
     """
     Return all occurrences of the tensors in ``self`` keyed by function.
     """
     mapper = {}
     for v in self.values():
         handle = retrieve_indexed(v)
         for i in handle:
             found = mapper.setdefault(i.function, [])
             if i not in found:
                 # Not using sets to preserve order
                 found.append(i)
     return mapper
Beispiel #16
0
def align_accesses(expr, key=lambda i: False):
    """
    ``expr -> expr'``, with ``expr'`` semantically equivalent to ``expr``, but
    with data accesses aligned to the domain if ``key(function)`` gives True.
    """
    mapper = {}
    for indexed in retrieve_indexed(expr):
        f = indexed.function
        if not key(f):
            continue
        subs = {i: i + j for i, j in zip(indexed.indices, f._size_nodomain.left)}
        mapper[indexed] = indexed.xreplace(subs)
    return expr.xreplace(mapper)
 def tensors(self):
     """
     Return all occurrences of the tensors in ``self`` keyed by function.
     """
     mapper = {}
     for v in self.values():
         handle = retrieve_indexed(v)
         for i in handle:
             found = mapper.setdefault(i.base.function, [])
             if i not in found:
                 # Not using sets to preserve order
                 found.append(i)
     return mapper
 def is_index(self, key):
     """
     Return True if ``key`` is used as array index in an expression of the
     FlowGraph, False otherwise.
     """
     if key not in self:
         return False
     match = key.base.label if self[key].is_tensor else key
     for i in self.extract(key, readby=True):
         for e in retrieve_indexed(i):
             if any(match in idx.free_symbols for idx in e.indices):
                 return True
     return False
Beispiel #19
0
def align_accesses(expr, key=lambda i: False):
    """
    ``expr -> expr'``, with ``expr'`` semantically equivalent to ``expr``, but
    with data accesses aligned to the computational domain if ``key(function)``
    gives True.
    """
    mapper = {}
    for indexed in retrieve_indexed(expr):
        f = indexed.function
        if not key(f):
            continue
        subs = {i: i + j for i, j in zip(indexed.indices, f._size_halo.left)}
        mapper[indexed] = indexed.xreplace(subs)
    return expr.xreplace(mapper)
Beispiel #20
0
    def extract(cls, expr):
        """
        Compute the stencil of ``expr``.
        """
        indexeds = retrieve_indexed(expr, mode='all')
        indexeds += flatten([retrieve_indexed(i) for i in e.indices]
                            for e in indexeds)

        stencil = Stencil()
        for e in indexeds:
            for a in e.indices:
                if isinstance(a, Dimension):
                    stencil[a].update([0])
                d = None
                off = [0]
                for i in a.args:
                    if isinstance(i, Dimension):
                        d = i
                    elif i.is_integer:
                        off += [int(i)]
                if d is not None:
                    stencil[d].update(off)

        return stencil
Beispiel #21
0
def test_canonical_ordering(expr, expected):
    """
    Test that the `expr.args` are stored in canonical ordering.
    """
    grid = Grid(shape=(10, ))
    x, = grid.dimensions  # noqa

    f = Function(name='f', grid=grid)  # noqa
    g = Function(name='g', grid=grid)  # noqa

    expr = eval(expr)
    for n, i in enumerate(list(expected)):
        expected[n] = eval(i)

    assert retrieve_indexed(expr) == expected
Beispiel #22
0
    def extract(cls, expr):
        """
        Compute the stencil of ``expr``.
        """
        assert expr.is_Equality

        # Collect all indexed objects appearing in /expr/
        terminals = retrieve_terminals(expr, mode='all')
        indexeds = [i for i in terminals if i.is_Indexed]
        indexeds += flatten([retrieve_indexed(i) for i in e.indices]
                            for e in indexeds)

        # Enforce deterministic dimension ordering...
        dims = OrderedDict()
        for e in terminals:
            if isinstance(e, Dimension):
                dims[(e, )] = e
            elif e.is_Indexed:
                d = []
                for a in e.indices:
                    found = [
                        i for i in a.free_symbols if isinstance(i, Dimension)
                    ]
                    d.extend([i for i in found if i not in d])
                dims[tuple(d)] = e
        # ... giving higher priority to TimeFunction objects; time always go first
        dims = sorted(
            list(dims),
            key=lambda i: not (isinstance(dims[i], Dimension) or dims[i].base.
                               function.is_TimeFunction))
        stencil = Stencil([(i, set()) for i in partial_order(dims)])

        # Determine the points accessed along each dimension
        for e in indexeds:
            for a in e.indices:
                if isinstance(a, Dimension):
                    stencil[a].update([0])
                d = None
                off = [0]
                for i in a.args:
                    if isinstance(i, Dimension):
                        d = i
                    elif i.is_integer:
                        off += [i]
                if d is not None:
                    stencil[d].update(off)

        return stencil
Beispiel #23
0
def analyze(expr):
    """
    Determine whether ``expr`` is a potential Alias and collect relevant metadata.

    A necessary condition is that all Indexeds in ``expr`` are affine in the
    access Dimensions so that the access offsets (or "strides") can be derived.
    For example, given the following Indexeds: ::

        A[i, j, k], B[i, j+2, k+3], C[i-1, j+4]

    All of the access functions all affine in ``i, j, k``, and the offsets are: ::

        (0, 0, 0), (0, 2, 3), (-1, 4)
    """
    # No way if writing to a tensor or an increment
    if expr.lhs.is_Indexed or expr.is_Increment:
        return

    indexeds = retrieve_indexed(expr.rhs)
    if not indexeds:
        return

    bases = []
    offsets = []
    for i in indexeds:
        ii = IterationInstance(i)

        # There must not be irregular accesses, otherwise we won't be able to
        # calculate the offsets
        if ii.is_irregular:
            return

        # Since `ii` is regular (and therefore affine), it is guaranteed that `ai`
        # below won't be None
        base = []
        offset = []
        for e, ai in zip(ii, ii.aindices):
            if e.is_Number:
                base.append(e)
            else:
                base.append(ai)
                offset.append((ai, e - ai))
        bases.append(tuple(base))
        offsets.append(LabeledVector(offset))

    return Candidate(expr.rhs, indexeds, bases, offsets)
    def _extract_nonaffine_indices(self, cluster, template, **kwargs):
        """
        Extract non-affine array indices, and assign them to temporaries.
        """
        make = lambda: Scalar(name=template(), dtype=np.int32).indexify()

        mapper = OrderedDict()
        for e in cluster.exprs:
            for indexed in retrieve_indexed(e):
                for i, d in zip(indexed.indices, indexed.base.function.indices):
                    if not (q_affine(i, d) or i.is_Number):
                        mapper[i] = make()

        processed = [Eq(v, k) for k, v in mapper.items()]
        processed.extend([e.xreplace(mapper) for e in cluster.exprs])

        return cluster.rebuild(processed)
Beispiel #25
0
    def inject(self, field, expr, offset=0, **kwargs):
        """Symbol for injection of an expression onto a grid

        :param field: The grid field into which we inject.
        :param expr: The expression to inject.
        :param offset: Additional offset from the boundary for
                       absorbing boundary conditions.
        :param u_t: (Optional) time index to use for indexing into `field`.
        :param p_t: (Optional) time index to use for indexing into `expr`.
        """
        u_t = kwargs.get('u_t', None)
        p_t = kwargs.get('p_t', None)

        expr = indexify(expr)
        field = indexify(field)
        variables = list(retrieve_indexed(expr)) + [field]

        # Apply optional time symbol substitutions to field and expr
        if u_t is not None:
            field = field.subs(field.indices[0], u_t)
        if p_t is not None:
            expr = expr.subs(self.indices[0], p_t)

        # List of indirection indices for all adjacent grid points
        index_matrix = [
            tuple(idx + ii + offset
                  for ii, idx in zip(inc, self.coordinate_indices))
            for inc in self.point_increments
        ]

        # Generate index substituions for all grid variables except
        # the sparse `SparseFunction` types
        idx_subs = []
        for i, idx in enumerate(index_matrix):
            v_subs = [(v, v.base[v.indices[:-self.grid.dim] + idx])
                      for v in variables
                      if not v.base.function.is_SparseFunction]
            idx_subs += [OrderedDict(v_subs)]

        # Substitute coordinate base symbols into the coefficients
        subs = OrderedDict(zip(self.point_symbols, self.coordinate_bases))
        return [
            Inc(field.subs(vsub),
                field.subs(vsub) + expr.subs(subs).subs(vsub) * b.subs(subs))
            for b, vsub in zip(self.coefficients, idx_subs)
        ]
Beispiel #26
0
 def handle_indexed(indexed):
     relation = []
     for i in indexed.indices:
         try:
             maybe_dim = split_affine(i).var
             if isinstance(maybe_dim, Dimension):
                 relation.append(maybe_dim)
         except ValueError:
             # Maybe there are some nested Indexeds (e.g., the situation is A[B[i]])
             nested = flatten(handle_indexed(n) for n in retrieve_indexed(i))
             if nested:
                 relation.extend(nested)
             else:
                 # Fallback: Just insert all the Dimensions we find, regardless of
                 # what the user is attempting to do
                 relation.extend([d for d in filter_sorted(i.free_symbols)
                                  if isinstance(d, Dimension)])
     return tuple(relation)
Beispiel #27
0
def create_alias(expr, offsets):
    """
    Create an aliasing expression of ``expr`` by replacing the offsets of each
    indexed object in ``expr`` with the new values in ``offsets``. ``offsets``
    is an ordered sequence of tuples with as many elements as the number of
    indexed objects in ``expr``.
    """
    indexeds = retrieve_indexed(expr, mode='all')
    assert len(indexeds) == len(offsets)

    mapper = {}
    for indexed, ofs in zip(indexeds, offsets):
        base = indexed.base
        dimensions = base.function.dimensions
        assert len(dimensions) == len(ofs)
        mapper[indexed] = indexed.func(base, *[sum(i) for i in zip(dimensions, ofs)])

    return expr.xreplace(mapper)
Beispiel #28
0
 def handle_indexed(indexed):
     relation = []
     for i in indexed.indices:
         try:
             maybe_dim = split_affine(i).var
             if isinstance(maybe_dim, Dimension):
                 relation.append(maybe_dim)
         except ValueError:
             # Maybe there are some nested Indexeds (e.g., the situation is A[B[i]])
             nested = flatten(handle_indexed(n) for n in retrieve_indexed(i))
             if nested:
                 relation.extend(nested)
             else:
                 # Fallback: Just insert all the Dimensions we find, regardless of
                 # what the user is attempting to do
                 relation.extend([d for d in filter_sorted(i.free_symbols)
                                  if isinstance(d, Dimension)])
     return tuple(relation)
Beispiel #29
0
def create_alias(expr, offsets):
    """
    Create an aliasing expression of ``expr`` by replacing the offsets of each
    indexed object in ``expr`` with the new values in ``offsets``. ``offsets``
    is an ordered sequence of tuples with as many elements as the number of
    indexed objects in ``expr``.
    """
    indexeds = retrieve_indexed(expr, mode='all')
    assert len(indexeds) == len(offsets)

    mapper = {}
    for indexed, ofs in zip(indexeds, offsets):
        base = indexed.base
        dimensions = base.function.indices
        assert len(dimensions) == len(ofs)
        mapper[indexed] = indexed.func(base, *[sum(i) for i in zip(dimensions, ofs)])

    return expr.xreplace(mapper)
Beispiel #30
0
def get_accesses(expr):
    mapper = defaultdict(set)
    for e in retrieve_indexed(expr, mode='all', deep=True):
        f = e.function
        accesses = []
        for a in e.indices:
            dim = a
            offset = None
            for i in a.args:
                if i.is_integer:
                    offset = i
                else:
                    dim = i

            accesses.append((dim, offset or 0))

        mapper[f].add(tuple(accesses))

    return mapper
def align_accesses(expr, reverse=False):
    """
    ``expr -> expr'``, with ``expr'`` semantically equivalent to ``expr``, but
    with data accesses aligned to the computational domain. If the optional flag
    ``reverse`` is passed as True (defaults False), then the reverse operation
    takes place; that is, assuming ``expr`` was aligned to the computational
    domain, ``expr'`` gets aligned back to the first allocated entry.
    """
    shift = lambda i: (-i if reverse is True else i)
    mapper = {}
    for indexed in retrieve_indexed(expr):
        f = indexed.base.function
        if not f.is_TensorFunction:
            continue
        subs = {
            i: i + shift(j.left)
            for i, j in zip(indexed.indices, f._offset_domain)
        }
        mapper[indexed] = indexed.xreplace(subs)
    return expr.xreplace(mapper)
Beispiel #32
0
    def _extract_nonaffine_indices(self, cluster, template, **kwargs):
        """
        Extract non-affine array indices, and assign them to temporaries.
        """
        make = lambda: Scalar(name=template(), dtype=np.int32).indexify()

        mapper = OrderedDict()
        for e in cluster.exprs:
            # Note: using mode='all' and then checking for presence in the mapper
            # (a few lines below), rather retrieving unique indexeds only (a set),
            # is the key to deterministic code generation
            for indexed in retrieve_indexed(e, mode='all'):
                for i, d in zip(indexed.indices, indexed.function.indices):
                    if q_affine(i, d) or q_scalar(i):
                        continue
                    elif i not in mapper:
                        mapper[i] = make()

        processed = [Eq(v, k) for k, v in mapper.items()]
        processed.extend([e.xreplace(mapper) for e in cluster.exprs])

        return cluster.rebuild(processed)
def detect_accesses(expr):
    """
    Return a mapper ``M : F -> S``, where F are :class:`Function`s appearing
    in ``expr`` and S are :class:`Stencil`s. ``M[f]`` represents all data accesses
    to ``f`` within ``expr``.
    """
    mapper = defaultdict(Stencil)
    for e in retrieve_indexed(expr, mode='all', deep=True):
        f = e.base.function
        for a in e.indices:
            if isinstance(a, Dimension):
                mapper[f][a].update([0])
            d = None
            off = []
            for i in a.args:
                if isinstance(i, Dimension):
                    d = i
                elif i.is_integer:
                    off += [int(i)]
            if d is not None:
                mapper[f][d].update(off or [0])
    return mapper
Beispiel #34
0
    def __init__(self, exprs):
        """
        Initialize a Scope, which represents a group of :class:`Access` objects
        extracted from some expressions ``exprs``. The expressions are to be
        provided as they appear in program order.
        """
        exprs = as_tuple(exprs)
        assert all(isinstance(i, Eq) for i in exprs)

        self.reads = {}
        self.writes = {}
        for i, e in enumerate(exprs):
            # reads
            for j in retrieve_indexed(e.rhs):
                v = self.reads.setdefault(j.base.function, [])
                mode = 'R' if not q_inc(e) else 'RI'
                v.append(TimedAccess(j, mode, i))
            # write
            if e.lhs.is_Indexed:
                v = self.writes.setdefault(e.lhs.base.function, [])
                mode = 'W' if not q_inc(e) else 'WI'
                v.append(TimedAccess(e.lhs, mode, i))
Beispiel #35
0
    def _extract_nonaffine_indices(self, cluster, template, **kwargs):
        """
        Extract non-affine array indices, and assign them to temporaries.
        """
        make = lambda: Scalar(name=template(), dtype=np.int32).indexify()

        mapper = OrderedDict()
        for e in cluster.exprs:
            # Note: using mode='all' and then checking for presence in the mapper
            # (a few lines below), rather retrieving unique indexeds only (a set),
            # is the key to deterministic code generation
            for indexed in retrieve_indexed(e, mode='all'):
                for i, d in zip(indexed.indices, indexed.function.indices):
                    if q_affine(i, d) or q_scalar(i):
                        continue
                    elif i not in mapper:
                        mapper[i] = make()

        processed = [Eq(v, k) for k, v in mapper.items()]
        processed.extend([e.xreplace(mapper) for e in cluster.exprs])

        return cluster.rebuild(processed)
Beispiel #36
0
    def test_multiple_steppers(self, expr, exp_uindices, exp_mods):
        """Tests generation of multiple, mixed time stepping indices."""
        grid = Grid(shape=(3, 3, 3))
        x, y, z = grid.dimensions

        u = TimeFunction(name='u', grid=grid)  # noqa
        v = TimeFunction(name='v', grid=grid, time_order=4)  # noqa

        op = Operator(eval(expr), dle='noop')

        iters = FindNodes(Iteration).visit(op)
        time_iter = [i for i in iters if i.dim.is_Time]
        assert len(time_iter) == 1
        time_iter = time_iter[0]

        # Check uindices in Iteration header
        signatures = [i._properties for i in time_iter.uindices]
        assert len(signatures) == len(exp_uindices)
        assert all(i in signatures for i in exp_uindices)

        # Check uindices within each TimeFunction
        exprs = [i.expr for i in FindNodes(Expression).visit(op)]
        assert(i.indices[i.function._time_position].modulo == exp_mods[i.function.name]
               for i in flatten(retrieve_indexed(i) for i in exprs))
Beispiel #37
0
def detect_flow_directions(exprs):
    """
    Return a mapper from Dimensions to Iterables of IterationDirections
    representing the theoretically necessary directions to evaluate ``exprs``
    so that the information "naturally flows" from an iteration to another.
    """
    exprs = as_tuple(exprs)

    writes = [Access(i.lhs, 'W') for i in exprs]
    reads = flatten(retrieve_indexed(i.rhs, mode='all') for i in exprs)
    reads = [Access(i, 'R') for i in reads]

    # Determine indexed-wise direction by looking at the distance vector
    mapper = defaultdict(set)
    for w in writes:
        for r in reads:
            if r.name != w.name:
                continue
            dimensions = [d for d in w.aindices if d is not None]
            if not dimensions:
                continue
            for d in dimensions:
                distance = None
                for i in d._defines:
                    try:
                        distance = w.distance(r, i, view=i)
                    except TypeError:
                        pass
                try:
                    if distance > 0:
                        mapper[d].add(Forward)
                        break
                    elif distance < 0:
                        mapper[d].add(Backward)
                        break
                    else:
                        mapper[d].add(Any)
                except TypeError:
                    # Nothing can be deduced
                    mapper[d].add(Any)
                    break
            # Remainder
            for d in dimensions[dimensions.index(d) + 1:]:
                mapper[d].add(Any)

    # Add in any encountered Dimension
    mapper.update({d: {Any} for d in flatten(i.aindices for i in reads + writes)
                   if d is not None and d not in mapper})

    # Add in derived-dimensions parents, in case they haven't been detected yet
    mapper.update({k.parent: set(v) for k, v in mapper.items()
                   if k.is_Derived and mapper.get(k.parent, {Any}) == {Any}})

    # Add in:
    # - free Dimensions, ie Dimensions used as symbols rather than as array indices
    # - implicit Dimensions, ie Dimensions that do not explicitly appear in `exprs`
    #   (typically used for inline temporaries)
    for i in exprs:
        candidates = {s for s in i.free_symbols if isinstance(s, Dimension)}
        candidates.update(set(i.implicit_dims))
        mapper.update({d: {Any} for d in candidates if d not in mapper})

    return mapper
Beispiel #38
0
def detect_flow_directions(exprs):
    """
    Return a mapper from Dimensions to Iterables of IterationDirections
    representing the theoretically necessary directions to evaluate ``exprs``
    so that the information "naturally flows" from an iteration to another.
    """
    exprs = as_tuple(exprs)

    writes = [Access(i.lhs, 'W') for i in exprs]
    reads = flatten(retrieve_indexed(i.rhs) for i in exprs)
    reads = [Access(i, 'R') for i in reads]

    # Determine indexed-wise direction by looking at the distance vector
    mapper = defaultdict(set)
    for w in writes:
        for r in reads:
            if r.name != w.name:
                continue
            dimensions = [d for d in w.aindices if d is not None]
            if not dimensions:
                continue
            for d in dimensions:
                distance = None
                for i in d._defines:
                    try:
                        distance = w.distance(r, i, view=i)
                    except TypeError:
                        pass
                try:
                    if distance > 0:
                        mapper[d].add(Forward)
                        break
                    elif distance < 0:
                        mapper[d].add(Backward)
                        break
                    else:
                        mapper[d].add(Any)
                except TypeError:
                    # Nothing can be deduced
                    mapper[d].add(Any)
                    break
            # Remainder
            for d in dimensions[dimensions.index(d) + 1:]:
                mapper[d].add(Any)

    # Add in any encountered Dimension
    mapper.update({d: {Any} for d in flatten(i.aindices for i in reads + writes)
                   if d is not None and d not in mapper})

    # Add in derived-dimensions parents, in case they haven't been detected yet
    mapper.update({k.parent: set(v) for k, v in mapper.items()
                   if k.is_Derived and mapper.get(k.parent, {Any}) == {Any}})

    # Add in:
    # - free Dimensions, ie Dimensions used as symbols rather than as array indices
    # - implicit Dimensions, ie Dimensions that do not explicitly appear in `exprs`
    #   (typically used for inline temporaries)
    for i in exprs:
        candidates = {s for s in i.free_symbols if isinstance(s, Dimension)}
        candidates.update(set(i.implicit_dims))
        mapper.update({d: {Any} for d in candidates if d not in mapper})

    return mapper
Beispiel #39
0
def collect(exprs):
    """
    Determine groups of aliasing expressions in ``exprs``.

    An expression A aliases an expression B if both A and B apply the same
    operations to the same input operands, with the possibility for indexed objects
    to index into locations at a fixed constant offset in each dimension.

    For example: ::

        exprs = (a[i+1] + b[i+1], a[i+1] + b[j+1], a[i] + c[i],
                 a[i+2] - b[i+2], a[i+2] + b[i], a[i-1] + b[i-1])

    The following expressions in ``exprs`` alias to ``a[i] + b[i]``: ::

        a[i+1] + b[i+1] : same operands and operations, distance along i = 1
        a[i-1] + b[i-1] : same operands and operations, distance along i = -1

    Whereas the following do not: ::

        a[i+1] + b[j+1] : because at least one index differs
        a[i] + c[i] : because at least one of the operands differs
        a[i+2] - b[i+2] : because at least one operation differs
        a[i+2] + b[i] : because distance along ``i`` differ (+2 and +0)
    """
    ExprData = namedtuple('ExprData', 'dimensions offsets')

    # Discard expressions:
    # - that surely won't alias to anything
    # - that are non-scalar
    candidates = OrderedDict()
    for expr in exprs:
        if expr.lhs.is_Indexed:
            continue
        indexeds = retrieve_indexed(expr.rhs, mode='all')
        if indexeds and not any(q_indirect(i) for i in indexeds):
            handle = calculate_offsets(indexeds)
            if handle:
                candidates[expr.rhs] = ExprData(*handle)

    aliases = OrderedDict()
    mapper = OrderedDict()
    unseen = list(candidates)
    while unseen:
        # Find aliasing expressions
        handle = unseen.pop(0)
        group = [handle]
        for e in list(unseen):
            if compare(handle, e) and\
                    is_translated(candidates[handle].offsets, candidates[e].offsets):
                group.append(e)
                unseen.remove(e)

        # Try creating a basis for the aliasing expressions' offsets
        offsets = [tuple(candidates[e].offsets) for e in group]
        try:
            COM, distances = calculate_COM(offsets)
        except DSEException:
            # Ignore these potential aliases and move on
            continue

        alias = create_alias(handle, COM)

        # An alias has been created, so I can now update the expression mapper
        mapper.update([(i, group) for i in group])

        # In circumstances in which an expression has repeated coefficients, e.g.
        # ... + 0.025*a[...] + 0.025*b[...],
        # We may have found a common basis (i.e., same COM, same alias) at this point
        v = aliases.setdefault(alias,
                               Alias(alias, candidates[handle].dimensions))
        v.extend(group, distances)

    # Heuristically attempt to relax the aliases offsets
    # to maximize the likelyhood of loop fusion
    grouped = OrderedDict()
    for i in aliases.values():
        grouped.setdefault(i.dimensions, []).append(i)
    for dimensions, group in grouped.items():
        ideal_anti_stencil = Stencil.union(*[i.anti_stencil for i in group])
        for i in group:
            if i.anti_stencil.subtract(ideal_anti_stencil).empty:
                aliases[i.alias] = i.relax(ideal_anti_stencil)

    return mapper, aliases
Beispiel #40
0
def collect(exprs, min_storage, ignore_collected):
    """
    Find groups of aliasing expressions.

    We shall introduce the following (loose) terminology:

        * A ``terminal`` is the leaf of a mathematical operation. Terminals
          can be numbers (n), literals (l), or Indexeds (I).
        * ``R`` is the relaxation operator := ``R(n) = n``, ``R(l) = l``,
          ``R(I) = J``, where ``J`` has the same base as ``I`` but with all
          offsets stripped away. For example, ``R(a[i+2,j-1]) = a[i,j]``.
        * A ``relaxed expression`` is an expression in which all of the
          terminals are relaxed.

    Now we define the concept of aliasing. We say that an expression A
    aliases an expression B if:

        * ``R(A) == R(B)``
        * all pairwise Indexeds in A and B access memory locations at a
          fixed constant distance along each Dimension.

    For example, consider the following expressions:

        * a[i+1] + b[i+1]
        * a[i+1] + b[j+1]
        * a[i] + c[i]
        * a[i+2] - b[i+2]
        * a[i+2] + b[i]
        * a[i-1] + b[i-1]

    Out of the expressions above, the following alias to `a[i] + b[i]`:

        * a[i+1] + b[i+1] : same operands and operations, distance along i: 1
        * a[i-1] + b[i-1] : same operands and operations, distance along i: -1

    Whereas the following do not:

        * a[i+1] + b[j+1] : because at least one index differs
        * a[i] + c[i] : because at least one of the operands differs
        * a[i+2] - b[i+2] : because at least one operation differs
        * a[i+2] + b[i] : because the distances along ``i`` differ (+2 and +0)
    """
    # Find the potential aliases
    found = []
    for expr in exprs:
        if expr.lhs.is_Indexed or expr.is_Increment:
            continue

        indexeds = retrieve_indexed(expr.rhs)

        bases = []
        offsets = []
        for i in indexeds:
            ii = IterationInstance(i)
            if ii.is_irregular:
                break

            base = []
            offset = []
            for e, ai in zip(ii, ii.aindices):
                if q_constant(e):
                    base.append(e)
                else:
                    base.append(ai)
                    offset.append((ai, e - ai))
            bases.append(tuple(base))
            offsets.append(LabeledVector(offset))

        if indexeds and len(bases) == len(indexeds):
            found.append(Candidate(expr, indexeds, bases, offsets))

    # Create groups of aliasing expressions
    mapper = OrderedDict()
    unseen = list(found)
    while unseen:
        c = unseen.pop(0)
        group = [c]
        for u in list(unseen):
            # Is the arithmetic structure of `c` and `u` equivalent ?
            if not compare_ops(c.expr, u.expr):
                continue

            # Is `c` translated w.r.t. `u` ?
            if not c.translated(u):
                continue

            group.append(u)
            unseen.remove(u)
        group = Group(group)

        # Apply callback to heuristically discard groups
        if ignore_collected(group):
            continue

        if min_storage:
            k = group.dimensions_translated
        else:
            k = group.dimensions
        mapper.setdefault(k, []).append(group)

    aliases = Aliases()
    for _groups in list(mapper.values()):
        groups = list(_groups)

        while groups:
            # For each Dimension, determine the Minimum Intervals (MI) spanning
            # all of the Groups diameters
            # Example: x's largest_diameter=2  => [x[-2,0], x[-1,1], x[0,2]]
            # Note: Groups that cannot evaluate their diameter are dropped
            mapper = defaultdict(int)
            for g in list(groups):
                try:
                    mapper.update({d: max(mapper[d], v) for d, v in g.diameter.items()})
                except ValueError:
                    groups.remove(g)
            intervalss = {d: make_rotations_table(d, v) for d, v in mapper.items()}

            # For each Group, find a rotation that is compatible with a given MI
            mapper = {}
            for d, intervals in intervalss.items():
                for interval in list(intervals):
                    found = {g: g.find_rotation_distance(d, interval) for g in groups}
                    if all(distance is not None for distance in found.values()):
                        # `interval` is OK !
                        mapper[interval] = found
                        break

            if len(mapper) == len(intervalss):
                break

            # Try again with fewer groups
            smallest = len(min(groups, key=len))
            groups = [g for g in groups if len(g) > smallest]

        for g in groups:
            c = g.pivot
            distances = defaultdict(int, [(i.dim, v[g]) for i, v in mapper.items()])

            # Create the basis alias
            offsets = [LabeledVector([(l, v[l] + distances[l]) for l in v.labels])
                       for v in c.offsets]
            subs = {i: i.function[[l + v.fromlabel(l, 0) for l in b]]
                    for i, b, v in zip(c.indexeds, c.bases, offsets)}
            alias = uxreplace(c.expr, subs)

            # All aliased expressions
            aliaseds = [i.expr for i in g]

            # Distance of each aliased expression from the basis alias
            distances = []
            for i in g:
                distance = [o.distance(v) for o, v in zip(i.offsets, offsets)]
                distance = [(d, set(v)) for d, v in LabeledVector.transpose(*distance)]
                distances.append(LabeledVector([(d, v.pop()) for d, v in distance]))

            aliases.add(alias, list(mapper), aliaseds, distances)

    return aliases
Beispiel #41
0
def collect(exprs):
    """
    Determine groups of aliasing expressions in ``exprs``.

    An expression A aliases an expression B if both A and B apply the same
    operations to the same input operands, with the possibility for indexed objects
    to index into locations at a fixed constant offset in each dimension.

    For example: ::

        exprs = (a[i+1] + b[i+1], a[i+1] + b[j+1], a[i] + c[i],
                 a[i+2] - b[i+2], a[i+2] + b[i], a[i-1] + b[i-1])

    The following expressions in ``exprs`` alias to ``a[i] + b[i]``: ::

        a[i+1] + b[i+1] : same operands and operations, distance along i = 1
        a[i-1] + b[i-1] : same operands and operations, distance along i = -1

    Whereas the following do not: ::

        a[i+1] + b[j+1] : because at least one index differs
        a[i] + c[i] : because at least one of the operands differs
        a[i+2] - b[i+2] : because at least one operation differs
        a[i+2] + b[i] : because distance along ``i`` differ (+2 and +0)
    """
    ExprData = namedtuple('ExprData', 'dimensions offsets')

    # Discard expressions:
    # - that surely won't alias to anything
    # - that are non-scalar
    candidates = OrderedDict()
    for expr in exprs:
        if expr.lhs.is_Indexed:
            continue
        indexeds = retrieve_indexed(expr.rhs, mode='all')
        if indexeds and not any(q_indirect(i) for i in indexeds):
            handle = calculate_offsets(indexeds)
            if handle:
                candidates[expr.rhs] = ExprData(*handle)

    aliases = OrderedDict()
    mapper = OrderedDict()
    unseen = list(candidates)
    while unseen:
        # Find aliasing expressions
        handle = unseen.pop(0)
        group = [handle]
        for e in list(unseen):
            if compare(handle, e) and\
                    is_translated(candidates[handle].offsets, candidates[e].offsets):
                group.append(e)
                unseen.remove(e)

        # Try creating a basis for the aliasing expressions' offsets
        offsets = [tuple(candidates[e].offsets) for e in group]
        try:
            COM, distances = calculate_COM(offsets)
        except DSEException:
            # Ignore these potential aliases and move on
            continue

        alias = create_alias(handle, COM)

        # An alias has been created, so I can now update the expression mapper
        mapper.update([(i, group) for i in group])

        # In circumstances in which an expression has repeated coefficients, e.g.
        # ... + 0.025*a[...] + 0.025*b[...],
        # We may have found a common basis (i.e., same COM, same alias) at this point
        v = aliases.setdefault(alias, Alias(alias, candidates[handle].dimensions))
        v.extend(group, distances)

    # Heuristically attempt to relax the aliases offsets
    # to maximize the likelyhood of loop fusion
    groups = OrderedDict()
    for i in aliases.values():
        groups.setdefault(i.dimensions, []).append(i)
    for group in groups.values():
        ideal_anti_stencil = Stencil.union(*[i.anti_stencil for i in group])
        for i in group:
            if i.anti_stencil.subtract(ideal_anti_stencil).empty:
                aliases[i.alias] = i.relax(ideal_anti_stencil)

    return mapper, aliases