Example #1
0
def clusterize(exprs, stencils, atomics=None):
    """
    Derive :class:`Cluster` objects from an iterable of expressions; a stencil for
    each expression must be provided. A list of atomic dimensions (see description
    in Cluster.__doc__) may be provided.
    """
    assert len(exprs) == len(stencils)

    exprs, stencils = aggregate(exprs, stencils)

    Info = namedtuple('Info', 'trace stencil')

    # Build a dependence graph and associate each node with its Stencil
    mapper = OrderedDict()
    g = TemporariesGraph(exprs)
    for (k, v), j in zip(g.items(), stencils):
        if v.is_tensor:
            trace = g.trace(k)
            trace += tuple(i for i in g.trace(k, readby=True) if i not in trace)
            mapper[k] = Info(trace, j)

    # A cluster stencil is determined iteratively, by first calculating the
    # "local" stencil and then by looking at the stencils of all other clusters
    # depending on it. The stencil information is propagated until there are
    # no more updates.
    queue = list(mapper)
    while queue:
        target = queue.pop(0)

        info = mapper[target]
        strict_trace = [i.lhs for i in info.trace if i.lhs != target]

        stencil = Stencil(info.stencil.entries)
        for i in strict_trace:
            if i in mapper:
                stencil = stencil.add(mapper[i].stencil)

        mapper[target] = Info(info.trace, stencil)

        if stencil != info.stencil:
            # Something has changed, need to propagate the update
            queue.extend([i for i in strict_trace if i not in queue])

    clusters = []
    for target, info in mapper.items():
        # Drop all non-output tensors, as computed by other clusters
        exprs = [i for i in info.trace if i.lhs.is_Symbol or i.lhs == target]

        # Create and track the cluster
        clusters.append(Cluster(exprs, info.stencil.frozen, atomics))

    return merge(clusters)
Example #2
0
 def anti_stencil(self):
     handle = Stencil()
     for d, i in zip(self.dimensions, zip(*self.distances)):
         handle[d].update(set(i))
     for d, i in zip(self.dimensions, zip(*self._ghost_offsets)):
         handle[d].update(set(i))
     return handle
Example #3
0
 def anti_stencil(self):
     ret = Stencil()
     for k, v in self.Tdistances:
         ret[k].update(set(v))
     for k, v in self.ghost_offsets.items():
         ret[k].update(v)
     return ret
Example #4
0
 def _store_argument_offsets(self, stencils):
     offs = Stencil.union(*stencils)
     arg_offs = {d: v for d, v in offs.diameter.items()}
     arg_offs.update(
         {d.parent: v
          for d, v in arg_offs.items() if d.is_Stepping})
     self.argument_offsets = {d.end_name: v for d, v in arg_offs.items()}
Example #5
0
    def _retrieve_stencils(self, expressions):
        """Determine the :class:`Stencil` of each provided expression."""
        stencils = [Stencil(i) for i in expressions]
        dimensions = set.union(*[set(i.dimensions) for i in stencils])

        # Filter out aliasing stepping dimensions
        mapper = {d.parent: d for d in dimensions if d.is_Stepping}
        return [i.replace(mapper) for i in stencils]
Example #6
0
    def __new__(cls, *args, **kwargs):
        if len(args) == 1 and isinstance(args[0], LoweredEq):
            # origin: LoweredEq(devito.LoweredEq, **kwargs)
            input_expr = args[0]
            expr = sympy.Eq.__new__(cls, *input_expr.args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i, kwargs.get(i) or getattr(input_expr, i))
            return expr
        elif len(args) == 1 and isinstance(args[0], Eq):
            # origin: LoweredEq(devito.Eq)
            input_expr = expr = args[0]
        elif len(args) == 2:
            expr = sympy.Eq.__new__(cls, *args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i, kwargs.pop(i))
            return expr
        else:
            raise ValueError("Cannot construct LoweredEq from args=%s "
                             "and kwargs=%s" % (str(args), str(kwargs)))

        # Well-defined dimension ordering
        ordering = dimension_sort(expr)

        # Analyze the expression
        mapper = detect_accesses(expr)
        oobs = detect_oobs(mapper)
        conditionals = [i for i in ordering if i.is_Conditional]

        # The iteration space is constructed so that information always flows
        # from an iteration to another (i.e., no anti-dependences are created)
        directions, _ = force_directions(detect_flow_directions(expr), lambda i: Any)
        iterators = build_iterators(mapper)
        intervals = build_intervals(Stencil.union(*mapper.values()))
        intervals = IntervalGroup(intervals, relations=ordering.relations)
        ispace = IterationSpace(intervals.zero(), iterators, directions)

        # The data space is relative to the computational domain. Note that we
        # are deliberately dropping the intervals ordering (by turning `intervals`
        # into a list), as this is irrelevant (even more: dangerous) for data spaces
        intervals = [i if i.dim in oobs else i.zero() for i in intervals]
        intervals += [Interval(i, 0, 0) for i in ordering
                      if i not in ispace.dimensions + conditionals]
        parts = {k: IntervalGroup(build_intervals(v)) for k, v in mapper.items() if k}
        dspace = DataSpace(intervals, parts)

        # Finally create the LoweredEq with all metadata attached
        expr = super(LoweredEq, cls).__new__(cls, expr.lhs, expr.rhs, evaluate=False)

        expr._dspace = dspace
        expr._ispace = ispace
        expr._conditionals = tuple(conditionals)
        expr._reads, expr._writes = detect_io(expr)

        expr._is_Increment = input_expr.is_Increment
        expr._implicit_dims = input_expr.implicit_dims

        return expr
Example #7
0
def retrieve_offsets(stencils):
    """
    Return a mapper from :class:`Dimension`s to the min/max integer offsets
    within ``stencils``.
    """
    offs = Stencil.union(*stencils)
    mapper = {d: v for d, v in offs.diameter.items()}
    mapper.update({d.parent: v for d, v in mapper.items() if d.is_Stepping})
    return mapper
Example #8
0
    def __new__(cls, *args, **kwargs):
        if len(args) == 1 and isinstance(args[0], LoweredEq):
            # origin: LoweredEq(devito.LoweredEq, **kwargs)
            input_expr = args[0]
            expr = Eq.__new__(cls, *input_expr.args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i, kwargs.get(i) or getattr(input_expr, i))
            return expr
        elif len(args) == 1 and isinstance(args[0], Eq):
            # origin: LoweredEq(sympy.Eq)
            input_expr = expr = args[0]
        elif len(args) == 2:
            expr = Eq.__new__(cls, *args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i, kwargs.pop(i))
            return expr
        else:
            raise ValueError("Cannot construct LoweredEq from args=%s "
                             "and kwargs=%s" % (str(args), str(kwargs)))

        # Well-defined dimension ordering
        ordering = dimension_sort(expr)

        # Analyze the expression
        mapper = detect_accesses(expr)
        oobs = detect_oobs(mapper)
        conditionals = [i for i in ordering if i.is_Conditional]

        # The iteration space is constructed so that information always flows
        # from an iteration to another (i.e., no anti-dependences are created)
        directions, _ = force_directions(detect_flow_directions(expr), lambda i: Any)
        iterators = build_iterators(mapper)
        intervals = build_intervals(Stencil.union(*mapper.values()))
        intervals = IntervalGroup(intervals, relations=ordering.relations)
        ispace = IterationSpace(intervals.zero(), iterators, directions)

        # The data space is relative to the computational domain. Note that we
        # are deliberately dropping the intervals ordering (by turning `intervals`
        # into a list), as this is irrelevant (even more: dangerous) for data spaces
        intervals = [i if i.dim in oobs else i.zero() for i in intervals]
        intervals += [Interval(i, 0, 0) for i in ordering
                      if i not in ispace.dimensions + conditionals]
        parts = {k: IntervalGroup(build_intervals(v)) for k, v in mapper.items() if k}
        dspace = DataSpace(intervals, parts)

        # Finally create the LoweredEq with all metadata attached
        expr = super(LoweredEq, cls).__new__(cls, expr.lhs, expr.rhs, evaluate=False)
        expr._is_Increment = getattr(input_expr, 'is_Increment', False)
        expr._dspace = dspace
        expr._ispace = ispace
        expr._conditionals = tuple(conditionals)
        expr._reads, expr._writes = detect_io(expr)

        return expr
Example #9
0
    def __new__(cls, input_expr, subs=None):
        # Sanity check
        assert type(input_expr) != LoweredEq
        assert isinstance(input_expr, Eq)

        # Indexification
        expr = indexify(input_expr)

        # Apply caller-provided substitution
        if subs is not None:
            expr = expr.xreplace(subs)

        expr = super(LoweredEq, cls).__new__(cls,
                                             expr.lhs,
                                             expr.rhs,
                                             evaluate=False)
        expr.is_Increment = getattr(input_expr, 'is_Increment', False)

        # Get the accessed data points
        stencil = Stencil(expr)

        # Well-defined dimension ordering
        ordering = dimension_sort(expr, key=lambda i: not i.is_Time)

        # Split actual Intervals (the data spaces) from the "derived" iterators,
        # to build an IterationSpace
        iterators = OrderedDict()
        for i in ordering:
            if i.is_Derived:
                iterators.setdefault(i.parent, []).append(stencil.entry(i))
            else:
                iterators.setdefault(i, [])
        intervals = []
        for k, v in iterators.items():
            offs = set.union(set(stencil.get(k)), *[i.ofs for i in v])
            intervals.append(Interval(k, min(offs), max(offs)).negate())
        expr.ispace = IterationSpace(intervals, iterators)

        return expr
Example #10
0
def make_stencils(expressions):
    """
    Create a :class:`Stencil` for each of the provided expressions. The following
    rules apply: ::

        * A :class:`SteppingDimension` ``d`` is replaced by its parent ``d.parent``.
    """
    stencils = [Stencil(i) for i in expressions]
    dimensions = set.union(*[set(i.dimensions) for i in stencils])

    # Filter out aliasing stepping dimensions
    mapper = {d.parent: d for d in dimensions if d.is_Stepping}
    return [i.replace(mapper) for i in stencils]
Example #11
0
    def _retrieve_stencils(self, expressions):
        """Determine the :class:`Stencil` of each provided expression."""
        stencils = [Stencil(i) for i in expressions]
        dimensions = set.union(*[set(i.dimensions) for i in stencils])

        # Filter out aliasing stepping dimensions
        mapper = {d.parent: d for d in dimensions if d.is_Stepping}
        for i in list(stencils):
            for d in i.dimensions:
                if d in mapper:
                    i[mapper[d]] = i.pop(d).union(i.get(mapper[d], set()))

        return stencils
Example #12
0
    def __init__(self,
                 alias,
                 aliased=None,
                 distances=None,
                 ghost_offsets=None):
        self.alias = alias
        self.aliased = aliased or []
        self.distances = distances or []
        self.ghost_offsets = ghost_offsets or Stencil()

        assert len(self.aliased) == len(self.distances)

        # Transposed distances
        self.Tdistances = LabeledVector.transpose(*distances)
Example #13
0
def merge(clusters):
    """
    Given an ordered collection of :class:`Cluster` objects, return a
    (potentially) smaller sequence in which clusters with identical stencil
    have been merged into a single :class:`Cluster`.
    """
    mapper = OrderedDict()
    for c in clusters:
        mapper.setdefault((c.stencil.entries, c.atomics), []).append(c)

    processed = []
    for (entries, atomics), clusters in mapper.items():
        # Eliminate redundant temporaries
        temporaries = OrderedDict()
        for c in clusters:
            for k, v in c.trace.items():
                if k not in temporaries:
                    temporaries[k] = v
        # Squash the clusters together
        processed.append(Cluster(temporaries.values(), Stencil(entries), atomics))

    return processed
Example #14
0
    def __new__(cls, *args, **kwargs):
        if len(args) == 1 and isinstance(args[0], LoweredEq):
            # origin: LoweredEq(devito.LoweredEq, **kwargs)
            input_expr = args[0]
            expr = sympy.Eq.__new__(cls, *input_expr.args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i,
                        kwargs.get(i) or getattr(input_expr, i))
            return expr
        elif len(args) == 1 and isinstance(args[0], Eq):
            # origin: LoweredEq(devito.Eq)
            input_expr = expr = args[0]
        elif len(args) == 2:
            expr = sympy.Eq.__new__(cls, *args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i, kwargs.pop(i))
            return expr
        else:
            raise ValueError("Cannot construct LoweredEq from args=%s "
                             "and kwargs=%s" % (str(args), str(kwargs)))

        # Well-defined dimension ordering
        ordering = dimension_sort(expr)

        # Analyze the expression
        accesses = detect_accesses(expr)
        dimensions = Stencil.union(*accesses.values())

        # Separate out the SubIterators from the main iteration Dimensions, that
        # is those which define an actual iteration space
        iterators = {}
        for d in dimensions:
            if d.is_SubIterator:
                iterators.setdefault(d.root, set()).add(d)
            elif d.is_Conditional:
                # Use `parent`, and `root`, because a ConditionalDimension may
                # have a SubDimension as parent
                iterators.setdefault(d.parent, set())
            else:
                iterators.setdefault(d, set())

        # Construct the IterationSpace
        intervals = IntervalGroup([Interval(d, 0, 0) for d in iterators],
                                  relations=ordering.relations)
        ispace = IterationSpace(intervals, iterators)

        # Construct the conditionals and replace the ConditionalDimensions in `expr`
        conditionals = {}
        for d in ordering:
            if not d.is_Conditional:
                continue
            if d.condition is None:
                conditionals[d] = GuardFactor(d)
            else:
                conditionals[d] = diff2sympy(lower_exprs(d.condition))
            if d.factor is not None:
                expr = uxreplace(expr, {d: IntDiv(d.index, d.factor)})
        conditionals = frozendict(conditionals)

        # Lower all Differentiable operations into SymPy operations
        rhs = diff2sympy(expr.rhs)

        # Finally create the LoweredEq with all metadata attached
        expr = super(LoweredEq, cls).__new__(cls,
                                             expr.lhs,
                                             rhs,
                                             evaluate=False)

        expr._ispace = ispace
        expr._conditionals = conditionals
        expr._reads, expr._writes = detect_io(expr)

        expr._is_Increment = input_expr.is_Increment
        expr._implicit_dims = input_expr.implicit_dims

        return expr
Example #15
0
    def __new__(cls, *args, **kwargs):
        if len(args) == 1:
            # origin: LoweredEq(expr)
            expr = input_expr = args[0]
            assert not isinstance(expr, LoweredEq) and isinstance(expr, Eq)
        elif len(args) == 2:
            # origin: LoweredEq(lhs, rhs, stamp=...)
            stamp = kwargs.pop('stamp')
            expr = Eq.__new__(cls, *args, evaluate=False)
            assert isinstance(stamp, Eq)
            expr.is_Increment = stamp.is_Increment
            expr._ispace, expr._dspace = stamp.ispace, stamp.dspace
            expr.reads, expr.writes = stamp.reads, stamp.writes
            return expr
        elif len(args) == 5:
            # origin: LoweredEq(expr, ispace, space)
            input_expr, ispace, dspace, reads, writes = args
            assert isinstance(ispace, IterationSpace) and isinstance(dspace, DataSpace)
            expr = Eq.__new__(cls, *input_expr.args, evaluate=False)
            expr.is_Increment = input_expr.is_Increment
            expr._ispace, expr._dspace = ispace, dspace
            expr.reads, expr.writes = reads, writes
            return expr
        else:
            raise ValueError("Cannot construct LoweredEq from args=%s "
                             "and kwargs=%s" % (str(args), str(kwargs)))

        # Well-defined dimension ordering
        ordering = dimension_sort(expr, key=lambda i: not i.is_Time)

        # Introduce space sub-dimensions if need to
        region = getattr(input_expr, '_region', DOMAIN)
        if region == INTERIOR:
            mapper = {i: SubDimension.middle("%si" % i, i, 1, 1)
                      for i in ordering if i.is_Space}
            expr = expr.xreplace(mapper)
            for k, v in mapper.items():
                ordering.insert(ordering.index(k) + 1, v)

        # Analyze the expression
        mapper = detect_accesses(expr)
        oobs = detect_oobs(mapper)

        # The iteration space is constructed so that information always flows
        # from an iteration to another (i.e., no anti-dependences are created)
        directions, _ = force_directions(detect_flow_directions(expr), lambda i: Any)
        iterators = build_iterators(mapper)
        intervals = build_intervals(Stencil.union(*mapper.values()))
        intervals = sorted(intervals, key=lambda i: ordering.index(i.dim))
        ispace = IterationSpace([i.zero() for i in intervals], iterators, directions)

        # The data space is relative to the computational domain
        intervals = [i if i.dim in oobs else i.zero() for i in intervals]
        intervals += [Interval(i, 0, 0) for i in ordering if i not in ispace.dimensions]
        parts = {k: IntervalGroup(build_intervals(v)) for k, v in mapper.items() if k}
        dspace = DataSpace(intervals, parts)

        # Finally create the LoweredEq with all metadata attached
        expr = super(LoweredEq, cls).__new__(cls, expr.lhs, expr.rhs, evaluate=False)
        expr.is_Increment = getattr(input_expr, 'is_Increment', False)
        expr._dspace = dspace
        expr._ispace = ispace
        expr.reads, expr.writes = detect_io(expr)

        return expr
Example #16
0
    def __new__(cls, *args, **kwargs):
        if len(args) == 1 and isinstance(args[0], LoweredEq):
            # origin: LoweredEq(devito.LoweredEq, **kwargs)
            input_expr = args[0]
            expr = sympy.Eq.__new__(cls, *input_expr.args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i,
                        kwargs.get(i) or getattr(input_expr, i))
            return expr
        elif len(args) == 1 and isinstance(args[0], Eq):
            # origin: LoweredEq(devito.Eq)
            input_expr = expr = args[0]
        elif len(args) == 2:
            expr = sympy.Eq.__new__(cls, *args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i, kwargs.pop(i))
            return expr
        else:
            raise ValueError("Cannot construct LoweredEq from args=%s "
                             "and kwargs=%s" % (str(args), str(kwargs)))

        # Well-defined dimension ordering
        ordering = dimension_sort(expr)

        # Analyze the expression
        mapper = detect_accesses(expr)
        oobs = detect_oobs(mapper)
        conditionals = [i for i in ordering if i.is_Conditional]

        # Construct Intervals for IterationSpace and DataSpace
        intervals = build_intervals(Stencil.union(*mapper.values()))
        iintervals = []  # iteration Intervals
        dintervals = []  # data Intervals
        for i in intervals:
            d = i.dim
            if d in oobs:
                iintervals.append(i.zero())
                dintervals.append(i)
            else:
                iintervals.append(i.zero())
                dintervals.append(i.zero())

        # Construct the IterationSpace
        iintervals = IntervalGroup(iintervals, relations=ordering.relations)
        iterators = build_iterators(mapper)
        ispace = IterationSpace(iintervals, iterators)

        # Construct the DataSpace
        dintervals.extend([
            Interval(i, 0, 0) for i in ordering
            if i not in ispace.dimensions + conditionals
        ])
        parts = {
            k: IntervalGroup(build_intervals(v)).add(iintervals)
            for k, v in mapper.items() if k
        }
        dspace = DataSpace(dintervals, parts)

        # Lower all Differentiable operations into SymPy operations
        rhs = diff2sympy(expr.rhs)

        # Finally create the LoweredEq with all metadata attached
        expr = super(LoweredEq, cls).__new__(cls,
                                             expr.lhs,
                                             rhs,
                                             evaluate=False)

        expr._dspace = dspace
        expr._ispace = ispace
        expr._conditionals = tuple(conditionals)
        expr._reads, expr._writes = detect_io(expr)

        expr._is_Increment = input_expr.is_Increment
        expr._implicit_dims = input_expr.implicit_dims

        return expr
Example #17
0
def collect(exprs):
    """
    Determine groups of aliasing expressions in ``exprs``.

    An expression A aliases an expression B if both A and B apply the same
    operations to the same input operands, with the possibility for indexed objects
    to index into locations at a fixed constant offset in each dimension.

    For example: ::

        exprs = (a[i+1] + b[i+1], a[i+1] + b[j+1], a[i] + c[i],
                 a[i+2] - b[i+2], a[i+2] + b[i], a[i-1] + b[i-1])

    The following expressions in ``exprs`` alias to ``a[i] + b[i]``: ::

        a[i+1] + b[i+1] : same operands and operations, distance along i = 1
        a[i-1] + b[i-1] : same operands and operations, distance along i = -1

    Whereas the following do not: ::

        a[i+1] + b[j+1] : because at least one index differs
        a[i] + c[i] : because at least one of the operands differs
        a[i+2] - b[i+2] : because at least one operation differs
        a[i+2] + b[i] : because distance along ``i`` differ (+2 and +0)
    """
    ExprData = namedtuple('ExprData', 'dimensions offsets')

    # Discard expressions:
    # - that surely won't alias to anything
    # - that are non-scalar
    candidates = OrderedDict()
    for expr in exprs:
        if expr.lhs.is_Indexed:
            continue
        indexeds = retrieve_indexed(expr.rhs, mode='all')
        if indexeds and not any(q_indirect(i) for i in indexeds):
            handle = calculate_offsets(indexeds)
            if handle:
                candidates[expr.rhs] = ExprData(*handle)

    aliases = OrderedDict()
    mapper = OrderedDict()
    unseen = list(candidates)
    while unseen:
        # Find aliasing expressions
        handle = unseen.pop(0)
        group = [handle]
        for e in list(unseen):
            if compare(handle, e) and\
                    is_translated(candidates[handle].offsets, candidates[e].offsets):
                group.append(e)
                unseen.remove(e)

        # Try creating a basis for the aliasing expressions' offsets
        offsets = [tuple(candidates[e].offsets) for e in group]
        try:
            COM, distances = calculate_COM(offsets)
        except DSEException:
            # Ignore these potential aliases and move on
            continue

        alias = create_alias(handle, COM)

        # An alias has been created, so I can now update the expression mapper
        mapper.update([(i, group) for i in group])

        # In circumstances in which an expression has repeated coefficients, e.g.
        # ... + 0.025*a[...] + 0.025*b[...],
        # We may have found a common basis (i.e., same COM, same alias) at this point
        v = aliases.setdefault(alias, Alias(alias, candidates[handle].dimensions))
        v.extend(group, distances)

    # Heuristically attempt to relax the aliases offsets
    # to maximize the likelyhood of loop fusion
    groups = OrderedDict()
    for i in aliases.values():
        groups.setdefault(i.dimensions, []).append(i)
    for group in groups.values():
        ideal_anti_stencil = Stencil.union(*[i.anti_stencil for i in group])
        for i in group:
            if i.anti_stencil.subtract(ideal_anti_stencil).empty:
                aliases[i.alias] = i.relax(ideal_anti_stencil)

    return mapper, aliases
Example #18
0
def collect(exprs):
    """
    Determine groups of aliasing expressions in ``exprs``.

    An expression A aliases an expression B if both A and B apply the same
    operations to the same input operands, with the possibility for indexed objects
    to index into locations at a fixed constant offset in each dimension.

    For example: ::

        exprs = (a[i+1] + b[i+1], a[i+1] + b[j+1], a[i] + c[i],
                 a[i+2] - b[i+2], a[i+2] + b[i], a[i-1] + b[i-1])

    The following expressions in ``exprs`` alias to ``a[i] + b[i]``: ::

        a[i+1] + b[i+1] : same operands and operations, distance along i = 1
        a[i-1] + b[i-1] : same operands and operations, distance along i = -1

    Whereas the following do not: ::

        a[i+1] + b[j+1] : because at least one index differs
        a[i] + c[i] : because at least one of the operands differs
        a[i+2] - b[i+2] : because at least one operation differs
        a[i+2] + b[i] : because distance along ``i`` differ (+2 and +0)
    """
    ExprData = namedtuple('ExprData', 'dimensions offsets')

    # Discard expressions:
    # - that surely won't alias to anything
    # - that are non-scalar
    candidates = OrderedDict()
    for expr in exprs:
        if expr.lhs.is_Indexed:
            continue
        indexeds = retrieve_indexed(expr.rhs, mode='all')
        if indexeds and not any(q_indirect(i) for i in indexeds):
            handle = calculate_offsets(indexeds)
            if handle:
                candidates[expr.rhs] = ExprData(*handle)

    aliases = OrderedDict()
    mapper = OrderedDict()
    unseen = list(candidates)
    while unseen:
        # Find aliasing expressions
        handle = unseen.pop(0)
        group = [handle]
        for e in list(unseen):
            if compare(handle, e) and\
                    is_translated(candidates[handle].offsets, candidates[e].offsets):
                group.append(e)
                unseen.remove(e)

        # Try creating a basis for the aliasing expressions' offsets
        offsets = [tuple(candidates[e].offsets) for e in group]
        try:
            COM, distances = calculate_COM(offsets)
        except DSEException:
            # Ignore these potential aliases and move on
            continue

        alias = create_alias(handle, COM)

        # An alias has been created, so I can now update the expression mapper
        mapper.update([(i, group) for i in group])

        # In circumstances in which an expression has repeated coefficients, e.g.
        # ... + 0.025*a[...] + 0.025*b[...],
        # We may have found a common basis (i.e., same COM, same alias) at this point
        v = aliases.setdefault(alias,
                               Alias(alias, candidates[handle].dimensions))
        v.extend(group, distances)

    # Heuristically attempt to relax the aliases offsets
    # to maximize the likelyhood of loop fusion
    grouped = OrderedDict()
    for i in aliases.values():
        grouped.setdefault(i.dimensions, []).append(i)
    for dimensions, group in grouped.items():
        ideal_anti_stencil = Stencil.union(*[i.anti_stencil for i in group])
        for i in group:
            if i.anti_stencil.subtract(ideal_anti_stencil).empty:
                aliases[i.alias] = i.relax(ideal_anti_stencil)

    return mapper, aliases
Example #19
0
 def stencil(self):
     """Compute the stencil of the expression."""
     return Stencil(self.expr)
Example #20
0
    def __new__(cls, *args, **kwargs):
        # Parse input
        if len(args) == 1:
            input_expr = args[0]
            assert type(input_expr) != LoweredEq
            assert isinstance(input_expr, Eq)
        elif len(args) == 2:
            # Reconstructing from existing Eq. E.g., we end up here after xreplace
            expr = super(Eq, cls).__new__(cls, *args, evaluate=False)
            stamp = kwargs.get('stamp')
            assert isinstance(stamp, Eq)
            expr.is_Increment = stamp.is_Increment
            expr.dspace = stamp.dspace
            expr.ispace = stamp.ispace
            return expr
        else:
            raise ValueError("Cannot construct Eq from args=%s "
                             "and kwargs=%s" % (str(args), str(kwargs)))

        # Indexification
        expr = indexify(input_expr)

        # Apply caller-provided substitution
        subs = kwargs.get('subs')
        if subs is not None:
            expr = expr.xreplace(subs)

        # Well-defined dimension ordering
        ordering = dimension_sort(expr, key=lambda i: not i.is_Time)

        # Introduce space sub-dimensions if need to
        region = getattr(input_expr, '_region', DOMAIN)
        if region == INTERIOR:
            mapper = {
                i: SubDimension("%si" % i, i, 1, -1)
                for i in ordering if i.is_Space
            }
            expr = expr.xreplace(mapper)
            ordering = [mapper.get(i, i) for i in ordering]

        # Get the accessed data points
        stencil = Stencil(expr)

        # Split actual Intervals (the data spaces) from the "derived" iterators,
        # to build an IterationSpace
        iterators = OrderedDict()
        for i in ordering:
            if i.is_Stepping:
                iterators.setdefault(i.parent, []).append(stencil.entry(i))
            else:
                iterators.setdefault(i, [])
        intervals = []
        for k, v in iterators.items():
            offs = set.union(set(stencil.get(k)), *[i.ofs for i in v])
            intervals.append(Interval(k, min(offs), max(offs)))

        # Finally create the LoweredEq with all metadata attached
        expr = super(LoweredEq, cls).__new__(cls,
                                             expr.lhs,
                                             expr.rhs,
                                             evaluate=False)
        expr.is_Increment = getattr(input_expr, 'is_Increment', False)
        expr.dspace = DataSpace(intervals)
        expr.ispace = IterationSpace([i.negate() for i in intervals],
                                     iterators)

        return expr
Example #21
0
def collect(exprs):
    """
    Determine groups of aliasing expressions in ``exprs``.

    An expression A aliases an expression B if both A and B perform the same
    arithmetic operations over the same input operands, with the possibility for
    Indexeds to access locations at a fixed constant offset in each Dimension.

    For example: ::

        exprs = (a[i+1] + b[i+1],
                 a[i+1] + b[j+1],
                 a[i] + c[i],
                 a[i+2] - b[i+2],
                 a[i+2] + b[i],
                 a[i-1] + b[i-1])

    The following expressions in ``exprs`` alias to ``a[i] + b[i]``: ::

        a[i+1] + b[i+1] : same operands and operations, distance along i = 1
        a[i-1] + b[i-1] : same operands and operations, distance along i = -1

    Whereas the following do not: ::

        a[i+1] + b[j+1] : because at least one index differs
        a[i] + c[i] : because at least one of the operands differs
        a[i+2] - b[i+2] : because at least one operation differs
        a[i+2] + b[i] : because distance along ``i`` differ (+2 and +0)
    """
    # Determine the potential aliases
    candidates = []
    for expr in exprs:
        candidate = analyze(expr)
        if candidate is not None:
            candidates.append(candidate)

    # Group together the aliasing expressions (ultimately build an Alias for each
    # group of aliasing expressions)
    aliases = Aliases()
    unseen = list(candidates)
    while unseen:
        c = unseen.pop(0)

        # Find aliasing expressions
        group = [c]
        for i in list(unseen):
            if compare_ops(c.expr, i.expr) and is_translated(c, i):
                group.append(i)
                unseen.remove(i)

        # Try creating a basis spanning the aliasing expressions' iteration vectors
        try:
            COM, distances = calculate_COM(group)
        except ValueError:
            # Ignore these aliasing expressions and move on
            continue

        # Create an alias expression centering `c`'s Indexeds at the COM
        subs = {
            i: i.function[[x + v.fromlabel(x, 0) for x in b]]
            for i, b, v in zip(c.indexeds, c.bases, COM)
        }
        alias = c.expr.xreplace(subs)
        aliased = [i.expr for i in group]

        aliases[alias] = Alias(alias, aliased, distances)

    # Attempt to drop composite aliases to minimize the working set.
    # For example:
    #
    # a[i+1]*b[i+1]             a[i+1]
    # a[i+1]           ---->    b[i+1]
    # b[i+1]          becomes
    #
    # Note:
    # "attempt" because this is a very hard problem, which depends/relies on:
    # - the input format, eg [r0 = a, r1 = b] rather than [r0 = a*b],
    # - the observation that the COMs are often identical across different Aliases
    #   eg {A[i+1] = ..., A[i+1]*B[i+1]} (here A is always centered at [i+1])
    # Note:
    # This approach is very naive. Ideally, one would want to set up and solve a
    # proper minimization problem
    for origin, alias in list(aliases.items()):
        try:
            impacted = [aliases[i] for i in origin.args]
        except KeyError:
            continue
        for aliased, distance in alias.with_distance:
            assert len(impacted) == len(aliased.args)
            for i, a in zip(impacted, aliased.args):
                aliases[i.alias] = i.add(a, distance)
        aliases.pop(origin)

    # Heuristically attempt to relax the Aliases offsets to maximize the
    # likelyhood of loop fusion
    groups = OrderedDict()
    for i in aliases.values():
        groups.setdefault(i.dimensions, []).append(i)
    for group in groups.values():
        ideal_anti_stencil = Stencil.union(*[i.anti_stencil for i in group])
        for i in group:
            if i.anti_stencil.subtract(ideal_anti_stencil).empty:
                aliases[i.alias] = i.relax(ideal_anti_stencil)

    return aliases