Ejemplo n.º 1
0
def clusterize(exprs):
    """
    Group a sequence of :class:`ir.Eq`s into one or more :class:`Cluster`s.
    """
    clusters = ClusterGroup()
    flowmap = detect_flow_directions(exprs)
    prev = None
    for idx, e in enumerate(exprs):
        if e.is_Tensor:
            scalars = [i for i in exprs[prev:idx] if i.is_Scalar]
            # Iteration space
            ispace = IterationSpace.merge(e.ispace, *[i.ispace for i in scalars])
            # Enforce iteration directions
            fdirs, _ = force_directions(flowmap, lambda d: ispace.directions.get(d))
            ispace = IterationSpace(ispace.intervals, ispace.sub_iterators, fdirs)
            # Data space
            dspace = DataSpace.merge(e.dspace, *[i.dspace for i in scalars])
            # Prepare for next range
            prev = idx

            clusters.append(PartialCluster(scalars + [e], ispace, dspace))

    # Group PartialClusters together where possible
    clusters = groupby(clusters)

    # Introduce conditional PartialClusters
    clusters = guard(clusters)

    return clusters.finalize()
Ejemplo n.º 2
0
    def from_clusters(cls, *clusters):
        """
        Build a new Cluster from a sequence of pre-existing Clusters with
        compatible IterationSpace.
        """
        assert len(clusters) > 0
        root = clusters[0]
        if not all(root.ispace.is_compatible(c.ispace) for c in clusters):
            raise ValueError("Cannot build a Cluster from Clusters with "
                             "incompatible IterationSpace")
        if not all(root.guards == c.guards for c in clusters):
            raise ValueError("Cannot build a Cluster from Clusters with "
                             "non-homogeneous guards")

        exprs = chain(*[c.exprs for c in clusters])
        ispace = IterationSpace.union(*[c.ispace for c in clusters])
        dspace = DataSpace.union(*[c.dspace for c in clusters])

        guards = root.guards

        properties = {}
        for c in clusters:
            for d, v in c.properties.items():
                properties[d] = normalize_properties(properties.get(d, v), v)

        try:
            syncs = normalize_syncs(*[c.syncs for c in clusters])
        except ValueError:
            raise ValueError("Cannot build a Cluster from Clusters with "
                             "non-compatible synchronization operations")

        return Cluster(exprs, ispace, dspace, guards, properties, syncs)
Ejemplo n.º 3
0
 def squash(self, other):
     """Concatenate the expressions in ``other`` to those in ``self``.
     ``self`` and ``other`` must have same ``ispace``. Duplicate
     expressions are dropped. The :class:`DataSpace` is updated
     accordingly."""
     assert self.ispace.is_compatible(other.ispace)
     self.exprs.extend([i for i in other.exprs if i not in self.exprs])
     self.dspace = DataSpace.merge(self.dspace, other.dspace)
     self.ispace = IterationSpace.merge(self.ispace, other.ispace)
Ejemplo n.º 4
0
    def __new__(cls, *args, **kwargs):
        if len(args) == 1 and isinstance(args[0], LoweredEq):
            # origin: LoweredEq(devito.LoweredEq, **kwargs)
            input_expr = args[0]
            expr = Eq.__new__(cls, *input_expr.args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i, kwargs.get(i) or getattr(input_expr, i))
            return expr
        elif len(args) == 1 and isinstance(args[0], Eq):
            # origin: LoweredEq(sympy.Eq)
            input_expr = expr = args[0]
        elif len(args) == 2:
            expr = Eq.__new__(cls, *args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i, kwargs.pop(i))
            return expr
        else:
            raise ValueError("Cannot construct LoweredEq from args=%s "
                             "and kwargs=%s" % (str(args), str(kwargs)))

        # Well-defined dimension ordering
        ordering = dimension_sort(expr)

        # Analyze the expression
        mapper = detect_accesses(expr)
        oobs = detect_oobs(mapper)
        conditionals = [i for i in ordering if i.is_Conditional]

        # The iteration space is constructed so that information always flows
        # from an iteration to another (i.e., no anti-dependences are created)
        directions, _ = force_directions(detect_flow_directions(expr), lambda i: Any)
        iterators = build_iterators(mapper)
        intervals = build_intervals(Stencil.union(*mapper.values()))
        intervals = IntervalGroup(intervals, relations=ordering.relations)
        ispace = IterationSpace(intervals.zero(), iterators, directions)

        # The data space is relative to the computational domain. Note that we
        # are deliberately dropping the intervals ordering (by turning `intervals`
        # into a list), as this is irrelevant (even more: dangerous) for data spaces
        intervals = [i if i.dim in oobs else i.zero() for i in intervals]
        intervals += [Interval(i, 0, 0) for i in ordering
                      if i not in ispace.dimensions + conditionals]
        parts = {k: IntervalGroup(build_intervals(v)) for k, v in mapper.items() if k}
        dspace = DataSpace(intervals, parts)

        # Finally create the LoweredEq with all metadata attached
        expr = super(LoweredEq, cls).__new__(cls, expr.lhs, expr.rhs, evaluate=False)
        expr._is_Increment = getattr(input_expr, 'is_Increment', False)
        expr._dspace = dspace
        expr._ispace = ispace
        expr._conditionals = tuple(conditionals)
        expr._reads, expr._writes = detect_io(expr)

        return expr
Ejemplo n.º 5
0
 def squash(self, other):
     """
     Concatenate the expressions in ``other`` to those in ``self``.
     ``self`` and ``other`` must have same ``ispace``. Duplicate expressions
     are dropped. The DataSpace is updated accordingly.
     """
     assert self.ispace.is_compatible(other.ispace)
     self.exprs.extend([i for i in other.exprs
                        if i not in self.exprs or i.is_Increment])
     self.dspace = DataSpace.merge(self.dspace, other.dspace)
     self.ispace = IterationSpace.merge(self.ispace, other.ispace)
Ejemplo n.º 6
0
 def from_clusters(cls, *clusters):
     """
     Build a new Cluster from a sequence of pre-existing Clusters with
     compatible IterationSpace.
     """
     assert len(clusters) > 0
     root = clusters[0]
     assert all(root.ispace.is_compatible(c.ispace) for c in clusters)
     exprs = chain(*[c.exprs for c in clusters])
     ispace = IterationSpace.union(*[c.ispace for c in clusters])
     dspace = DataSpace.union(*[c.dspace for c in clusters])
     return Cluster(exprs, ispace, dspace)
Ejemplo n.º 7
0
 def from_clusters(cls, *clusters):
     """
     Build a new Cluster from a sequence of pre-existing Clusters with
     compatible IterationSpace.
     """
     assert len(clusters) > 0
     root = clusters[0]
     if not all(root.ispace.is_compatible(c.ispace) for c in clusters):
         raise ValueError("Cannot build a Cluster from Clusters with "
                          "incompatible IterationSpace")
     if not all(root.properties == c.properties for c in clusters):
         raise ValueError("Cannot build a Cluster from Clusters with "
                          "non-homogeneous properties")
     exprs = chain(*[c.exprs for c in clusters])
     ispace = IterationSpace.union(*[c.ispace for c in clusters])
     dspace = DataSpace.union(*[c.dspace for c in clusters])
     return Cluster(exprs, ispace, dspace, properties=root.properties)
Ejemplo n.º 8
0
    def __new__(cls, *args, **kwargs):
        # Parse input
        if len(args) == 1:
            input_expr = args[0]
            assert type(input_expr) != LoweredEq
            assert isinstance(input_expr, Eq)
        elif len(args) == 2:
            # Reconstructing from existing Eq. E.g., we end up here after xreplace
            expr = super(Eq, cls).__new__(cls, *args, evaluate=False)
            stamp = kwargs.get('stamp')
            assert isinstance(stamp, Eq)
            expr.is_Increment = stamp.is_Increment
            expr.dspace = stamp.dspace
            expr.ispace = stamp.ispace
            return expr
        else:
            raise ValueError("Cannot construct Eq from args=%s "
                             "and kwargs=%s" % (str(args), str(kwargs)))

        # Indexification
        expr = indexify(input_expr)

        # Apply caller-provided substitution
        subs = kwargs.get('subs')
        if subs is not None:
            expr = expr.xreplace(subs)

        # Well-defined dimension ordering
        ordering = dimension_sort(expr, key=lambda i: not i.is_Time)

        # Introduce space sub-dimensions if need to
        region = getattr(input_expr, '_region', DOMAIN)
        if region == INTERIOR:
            mapper = {
                i: SubDimension("%si" % i, i, 1, -1)
                for i in ordering if i.is_Space
            }
            expr = expr.xreplace(mapper)
            ordering = [mapper.get(i, i) for i in ordering]

        # Get the accessed data points
        stencil = Stencil(expr)

        # Split actual Intervals (the data spaces) from the "derived" iterators,
        # to build an IterationSpace
        iterators = OrderedDict()
        for i in ordering:
            if i.is_Stepping:
                iterators.setdefault(i.parent, []).append(stencil.entry(i))
            else:
                iterators.setdefault(i, [])
        intervals = []
        for k, v in iterators.items():
            offs = set.union(set(stencil.get(k)), *[i.ofs for i in v])
            intervals.append(Interval(k, min(offs), max(offs)))

        # Finally create the LoweredEq with all metadata attached
        expr = super(LoweredEq, cls).__new__(cls,
                                             expr.lhs,
                                             expr.rhs,
                                             evaluate=False)
        expr.is_Increment = getattr(input_expr, 'is_Increment', False)
        expr.dspace = DataSpace(intervals)
        expr.ispace = IterationSpace([i.negate() for i in intervals],
                                     iterators)

        return expr
Ejemplo n.º 9
0
 def dspace(self):
     """Return the DataSpace of this ClusterGroup."""
     return DataSpace.union(*[i.dspace.reset() for i in self])
Ejemplo n.º 10
0
    def __new__(cls, *args, **kwargs):
        if len(args) == 1 and isinstance(args[0], LoweredEq):
            # origin: LoweredEq(devito.LoweredEq, **kwargs)
            input_expr = args[0]
            expr = sympy.Eq.__new__(cls, *input_expr.args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i,
                        kwargs.get(i) or getattr(input_expr, i))
            return expr
        elif len(args) == 1 and isinstance(args[0], Eq):
            # origin: LoweredEq(devito.Eq)
            input_expr = expr = args[0]
        elif len(args) == 2:
            expr = sympy.Eq.__new__(cls, *args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i, kwargs.pop(i))
            return expr
        else:
            raise ValueError("Cannot construct LoweredEq from args=%s "
                             "and kwargs=%s" % (str(args), str(kwargs)))

        # Well-defined dimension ordering
        ordering = dimension_sort(expr)

        # Analyze the expression
        mapper = detect_accesses(expr)
        oobs = detect_oobs(mapper)
        conditionals = [i for i in ordering if i.is_Conditional]

        # Construct Intervals for IterationSpace and DataSpace
        intervals = build_intervals(Stencil.union(*mapper.values()))
        iintervals = []  # iteration Intervals
        dintervals = []  # data Intervals
        for i in intervals:
            d = i.dim
            if d in oobs:
                iintervals.append(i.zero())
                dintervals.append(i)
            else:
                iintervals.append(i.zero())
                dintervals.append(i.zero())

        # Construct the IterationSpace
        iintervals = IntervalGroup(iintervals, relations=ordering.relations)
        iterators = build_iterators(mapper)
        ispace = IterationSpace(iintervals, iterators)

        # Construct the DataSpace
        dintervals.extend([
            Interval(i, 0, 0) for i in ordering
            if i not in ispace.dimensions + conditionals
        ])
        parts = {
            k: IntervalGroup(build_intervals(v)).add(iintervals)
            for k, v in mapper.items() if k
        }
        dspace = DataSpace(dintervals, parts)

        # Lower all Differentiable operations into SymPy operations
        rhs = diff2sympy(expr.rhs)

        # Finally create the LoweredEq with all metadata attached
        expr = super(LoweredEq, cls).__new__(cls,
                                             expr.lhs,
                                             rhs,
                                             evaluate=False)

        expr._dspace = dspace
        expr._ispace = ispace
        expr._conditionals = tuple(conditionals)
        expr._reads, expr._writes = detect_io(expr)

        expr._is_Increment = input_expr.is_Increment
        expr._implicit_dims = input_expr.implicit_dims

        return expr
Ejemplo n.º 11
0
    def __new__(cls, *args, **kwargs):
        if len(args) == 1:
            # origin: LoweredEq(expr)
            expr = input_expr = args[0]
            assert not isinstance(expr, LoweredEq) and isinstance(expr, Eq)
        elif len(args) == 2:
            # origin: LoweredEq(lhs, rhs, stamp=...)
            stamp = kwargs.pop('stamp')
            expr = Eq.__new__(cls, *args, evaluate=False)
            assert isinstance(stamp, Eq)
            expr.is_Increment = stamp.is_Increment
            expr._ispace, expr._dspace = stamp.ispace, stamp.dspace
            expr.reads, expr.writes = stamp.reads, stamp.writes
            return expr
        elif len(args) == 5:
            # origin: LoweredEq(expr, ispace, space)
            input_expr, ispace, dspace, reads, writes = args
            assert isinstance(ispace, IterationSpace) and isinstance(
                dspace, DataSpace)
            expr = Eq.__new__(cls, *input_expr.args, evaluate=False)
            expr.is_Increment = input_expr.is_Increment
            expr._ispace, expr._dspace = ispace, dspace
            expr.reads, expr.writes = reads, writes
            return expr
        else:
            raise ValueError("Cannot construct LoweredEq from args=%s "
                             "and kwargs=%s" % (str(args), str(kwargs)))

        # Well-defined dimension ordering
        ordering = dimension_sort(expr, key=lambda i: not i.is_Time)

        # Introduce space sub-dimensions if need to
        region = getattr(input_expr, '_region', DOMAIN)
        if region == INTERIOR:
            mapper = {
                i: SubDimension("%si" % i, i, 1, -1)
                for i in ordering if i.is_Space
            }
            expr = expr.xreplace(mapper)
            ordering = [mapper.get(i, i) for i in ordering]

        # Analyze data accesses
        mapper = detect_accesses(expr)
        oobs = detect_oobs(mapper)

        # The iteration space is constructed so that information always flows
        # from an iteration to another (i.e., no anti-dependences are created)
        directions, _ = force_directions(detect_flow_directions(expr),
                                         lambda i: Any)
        intervals, iterators = build_intervals(mapper)
        intervals = sorted(intervals, key=lambda i: ordering.index(i.dim))
        ispace = IterationSpace([i.zero() for i in intervals], iterators,
                                directions)

        # The data space is relative to the computational domain
        intervals = [i if i.dim in oobs else i.zero() for i in intervals]
        intervals += [
            Interval(i, 0, 0) for i in ordering if i not in ispace.dimensions
        ]
        parts = {
            k:
            IntervalGroup(Interval(i, min(j), max(j)) for i, j in v.items())
            for k, v in mapper.items()
        }
        dspace = DataSpace(intervals, parts)

        # Finally create the LoweredEq with all metadata attached
        expr = super(LoweredEq, cls).__new__(cls,
                                             expr.lhs,
                                             expr.rhs,
                                             evaluate=False)
        expr.is_Increment = getattr(input_expr, 'is_Increment', False)
        expr._dspace = dspace
        expr._ispace = ispace
        expr.reads, expr.writes = detect_io(expr)

        return expr
Ejemplo n.º 12
0
 def dspace(self):
     """Return the DataSpace of this ClusterGroup."""
     return DataSpace.merge(*[i.dspace for i in self])
Ejemplo n.º 13
0
 def dspace(self):
     """Return the cumulative :class:`DataSpace` of this ClusterGroup."""
     return DataSpace.merge(*[i.dspace for i in self])
Ejemplo n.º 14
0
    def dspace(self):
        """
        Derive the DataSpace of the Cluster from its expressions, IterationSpace,
        and Guards.
        """
        accesses = detect_accesses(self.exprs)

        # Construct the `parts` of the DataSpace, that is a projection of the data
        # space for each Function appearing in `self.exprs`
        parts = {}
        for f, v in accesses.items():
            if f is None:
                continue

            intervals = [
                Interval(d, min(offs), max(offs)) for d, offs in v.items()
            ]
            intervals = IntervalGroup(intervals)

            # Factor in the IterationSpace -- if the min/max points aren't zero,
            # then the data intervals need to shrink/expand accordingly
            intervals = intervals.promote(lambda d: d.is_Block)
            shift = self.ispace.intervals.promote(lambda d: d.is_Block)
            intervals = intervals.add(shift)

            # Map SubIterators to the corresponding data space Dimension
            # E.g., `xs -> x -> x0_blk0 -> x` or `t0 -> t -> time`
            intervals = intervals.promote(lambda d: d.is_SubIterator)

            # If the bound of a Dimension is explicitly guarded, then we should
            # shrink the `parts` accordingly
            for d, v in self.guards.items():
                ret = v.find(BaseGuardBoundNext)
                assert len(ret) <= 1
                if len(ret) != 1:
                    continue
                if ret.pop().direction is Forward:
                    intervals = intervals.translate(d, v1=-1)
                else:
                    intervals = intervals.translate(d, 1)

            # Special case: if the factor of a ConditionalDimension has value 1,
            # then we can safely resort to the parent's Interval
            intervals = intervals.promote(
                lambda d: d.is_Conditional and d.factor == 1)

            parts[f] = intervals

        # Determine the Dimensions requiring shifted min/max points to avoid
        # OOB accesses
        oobs = set()
        for f, v in parts.items():
            for i in v:
                if i.dim.is_Sub:
                    d = i.dim.parent
                else:
                    d = i.dim
                try:
                    if i.lower < 0 or \
                       i.upper > f._size_nodomain[d].left + f._size_halo[d].right:
                        # It'd mean trying to access a point before the
                        # left halo (test0) or after the right halo (test1)
                        oobs.update(d._defines)
                except (KeyError, TypeError):
                    # Unable to detect presence of OOB accesses (e.g., `d` not in
                    # `f._size_halo`, that is typical of indirect accesses `A[B[i]]`)
                    pass

        # Construct the `intervals` of the DataSpace, that is a global,
        # Dimension-centric view of the data space
        intervals = IntervalGroup.generate('union', *parts.values())
        # E.g., `db0 -> time`, but `xi NOT-> x`
        intervals = intervals.promote(lambda d: not d.is_Sub)
        intervals = intervals.zero(set(intervals.dimensions) - oobs)

        return DataSpace(intervals, parts)
Ejemplo n.º 15
0
    def __new__(cls, *args, **kwargs):
        if len(args) == 1 and isinstance(args[0], LoweredEq):
            # origin: LoweredEq(devito.LoweredEq, **kwargs)
            input_expr = args[0]
            expr = Eq.__new__(cls, *input_expr.args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i,
                        kwargs.get(i) or getattr(input_expr, i))
            return expr
        elif len(args) == 1 and isinstance(args[0], Eq):
            # origin: LoweredEq(sympy.Eq)
            input_expr = expr = args[0]
        elif len(args) == 2:
            expr = Eq.__new__(cls, *args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i, kwargs.pop(i))
            return expr
        else:
            raise ValueError("Cannot construct LoweredEq from args=%s "
                             "and kwargs=%s" % (str(args), str(kwargs)))

        # Well-defined dimension ordering
        ordering = dimension_sort(expr, key=lambda i: not i.is_Time)

        # Introduce space sub-dimensions if need to
        region = getattr(input_expr, '_region', DOMAIN)
        if region == INTERIOR:
            mapper = {
                i: SubDimension.middle("%si" % i, i, 1, 1)
                for i in ordering if i.is_Space
            }
            expr = expr.xreplace(mapper)
            for k, v in mapper.items():
                ordering.insert(ordering.index(k) + 1, v)

        # Analyze the expression
        mapper = detect_accesses(expr)
        oobs = detect_oobs(mapper)

        # The iteration space is constructed so that information always flows
        # from an iteration to another (i.e., no anti-dependences are created)
        directions, _ = force_directions(detect_flow_directions(expr),
                                         lambda i: Any)
        iterators = build_iterators(mapper)
        intervals = build_intervals(Stencil.union(*mapper.values()))
        intervals = sorted(intervals, key=lambda i: ordering.index(i.dim))
        ispace = IterationSpace([i.zero() for i in intervals], iterators,
                                directions)

        # The data space is relative to the computational domain
        intervals = [i if i.dim in oobs else i.zero() for i in intervals]
        intervals += [
            Interval(i, 0, 0) for i in ordering if i not in ispace.dimensions
        ]
        parts = {
            k: IntervalGroup(build_intervals(v))
            for k, v in mapper.items() if k
        }
        dspace = DataSpace(intervals, parts)

        # Finally create the LoweredEq with all metadata attached
        expr = super(LoweredEq, cls).__new__(cls,
                                             expr.lhs,
                                             expr.rhs,
                                             evaluate=False)
        expr._is_Increment = getattr(input_expr, 'is_Increment', False)
        expr._dspace = dspace
        expr._ispace = ispace
        expr._reads, expr._writes = detect_io(expr)

        return expr
Ejemplo n.º 16
0
 def dspace(self):
     """Return the DataSpace of this ClusterGroup."""
     return DataSpace.merge(*[i.dspace for i in self])