예제 #1
0
def clusterize(exprs):
    """
    Group a sequence of :class:`ir.Eq`s into one or more :class:`Cluster`s.
    """
    clusters = ClusterGroup()
    flowmap = detect_flow_directions(exprs)
    prev = None
    for idx, e in enumerate(exprs):
        if e.is_Tensor:
            scalars = [i for i in exprs[prev:idx] if i.is_Scalar]
            # Iteration space
            ispace = IterationSpace.merge(e.ispace, *[i.ispace for i in scalars])
            # Enforce iteration directions
            fdirs, _ = force_directions(flowmap, lambda d: ispace.directions.get(d))
            ispace = IterationSpace(ispace.intervals, ispace.sub_iterators, fdirs)
            # Data space
            dspace = DataSpace.merge(e.dspace, *[i.dspace for i in scalars])
            # Prepare for next range
            prev = idx

            clusters.append(PartialCluster(scalars + [e], ispace, dspace))

    # Group PartialClusters together where possible
    clusters = groupby(clusters)

    # Introduce conditional PartialClusters
    clusters = guard(clusters)

    return clusters.finalize()
예제 #2
0
파일: equation.py 프로젝트: opesci/devito
    def __new__(cls, *args, **kwargs):
        if len(args) == 1 and isinstance(args[0], LoweredEq):
            # origin: LoweredEq(devito.LoweredEq, **kwargs)
            input_expr = args[0]
            expr = sympy.Eq.__new__(cls, *input_expr.args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i, kwargs.get(i) or getattr(input_expr, i))
            return expr
        elif len(args) == 1 and isinstance(args[0], Eq):
            # origin: LoweredEq(devito.Eq)
            input_expr = expr = args[0]
        elif len(args) == 2:
            expr = sympy.Eq.__new__(cls, *args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i, kwargs.pop(i))
            return expr
        else:
            raise ValueError("Cannot construct LoweredEq from args=%s "
                             "and kwargs=%s" % (str(args), str(kwargs)))

        # Well-defined dimension ordering
        ordering = dimension_sort(expr)

        # Analyze the expression
        mapper = detect_accesses(expr)
        oobs = detect_oobs(mapper)
        conditionals = [i for i in ordering if i.is_Conditional]

        # The iteration space is constructed so that information always flows
        # from an iteration to another (i.e., no anti-dependences are created)
        directions, _ = force_directions(detect_flow_directions(expr), lambda i: Any)
        iterators = build_iterators(mapper)
        intervals = build_intervals(Stencil.union(*mapper.values()))
        intervals = IntervalGroup(intervals, relations=ordering.relations)
        ispace = IterationSpace(intervals.zero(), iterators, directions)

        # The data space is relative to the computational domain. Note that we
        # are deliberately dropping the intervals ordering (by turning `intervals`
        # into a list), as this is irrelevant (even more: dangerous) for data spaces
        intervals = [i if i.dim in oobs else i.zero() for i in intervals]
        intervals += [Interval(i, 0, 0) for i in ordering
                      if i not in ispace.dimensions + conditionals]
        parts = {k: IntervalGroup(build_intervals(v)) for k, v in mapper.items() if k}
        dspace = DataSpace(intervals, parts)

        # Finally create the LoweredEq with all metadata attached
        expr = super(LoweredEq, cls).__new__(cls, expr.lhs, expr.rhs, evaluate=False)

        expr._dspace = dspace
        expr._ispace = ispace
        expr._conditionals = tuple(conditionals)
        expr._reads, expr._writes = detect_io(expr)

        expr._is_Increment = input_expr.is_Increment
        expr._implicit_dims = input_expr.implicit_dims

        return expr
예제 #3
0
파일: equation.py 프로젝트: ponykid/SNIST
    def __new__(cls, *args, **kwargs):
        if len(args) == 1 and isinstance(args[0], LoweredEq):
            # origin: LoweredEq(devito.LoweredEq, **kwargs)
            input_expr = args[0]
            expr = Eq.__new__(cls, *input_expr.args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i, kwargs.get(i) or getattr(input_expr, i))
            return expr
        elif len(args) == 1 and isinstance(args[0], Eq):
            # origin: LoweredEq(sympy.Eq)
            input_expr = expr = args[0]
        elif len(args) == 2:
            expr = Eq.__new__(cls, *args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i, kwargs.pop(i))
            return expr
        else:
            raise ValueError("Cannot construct LoweredEq from args=%s "
                             "and kwargs=%s" % (str(args), str(kwargs)))

        # Well-defined dimension ordering
        ordering = dimension_sort(expr)

        # Analyze the expression
        mapper = detect_accesses(expr)
        oobs = detect_oobs(mapper)
        conditionals = [i for i in ordering if i.is_Conditional]

        # The iteration space is constructed so that information always flows
        # from an iteration to another (i.e., no anti-dependences are created)
        directions, _ = force_directions(detect_flow_directions(expr), lambda i: Any)
        iterators = build_iterators(mapper)
        intervals = build_intervals(Stencil.union(*mapper.values()))
        intervals = IntervalGroup(intervals, relations=ordering.relations)
        ispace = IterationSpace(intervals.zero(), iterators, directions)

        # The data space is relative to the computational domain. Note that we
        # are deliberately dropping the intervals ordering (by turning `intervals`
        # into a list), as this is irrelevant (even more: dangerous) for data spaces
        intervals = [i if i.dim in oobs else i.zero() for i in intervals]
        intervals += [Interval(i, 0, 0) for i in ordering
                      if i not in ispace.dimensions + conditionals]
        parts = {k: IntervalGroup(build_intervals(v)) for k, v in mapper.items() if k}
        dspace = DataSpace(intervals, parts)

        # Finally create the LoweredEq with all metadata attached
        expr = super(LoweredEq, cls).__new__(cls, expr.lhs, expr.rhs, evaluate=False)
        expr._is_Increment = getattr(input_expr, 'is_Increment', False)
        expr._dspace = dspace
        expr._ispace = ispace
        expr._conditionals = tuple(conditionals)
        expr._reads, expr._writes = detect_io(expr)

        return expr
예제 #4
0
def clusterize(exprs):
    """Group a sequence of :class:`ir.Eq`s into one or more :class:`Cluster`s."""

    # Group expressions based on data dependences
    groups = group_expressions(exprs)

    clusters = ClusterGroup()
    for g in groups:
        # Coerce iteration space of each expression in each group
        mapper = OrderedDict([(e, e.ispace) for e in g])
        flowmap = detect_flow_directions(g)
        queue = list(g)
        while queue:
            v = queue.pop(0)

            intervals, sub_iterators, directions = mapper[v].args
            forced, clashes = force_directions(flowmap,
                                               lambda i: directions.get(i))
            for e in g:
                intervals = intervals.intersection(
                    mapper[e].intervals.drop(clashes))
            directions = {i: forced[i] for i in directions}
            coerced_ispace = IterationSpace(intervals, sub_iterators,
                                            directions)

            # Need update propagation ?
            if coerced_ispace != mapper[v]:
                mapper[v] = coerced_ispace
                queue.extend([i for i in g if i not in queue])

        # Wrap each tensor expression in a PartialCluster
        for k, v in mapper.items():
            if k.is_Tensor:
                scalars = [i for i in g[:g.index(k)] if i.is_Scalar]
                clusters.append(PartialCluster(scalars + [k], v))

    # Group PartialClusters together where possible
    clusters = groupby(clusters)

    # Introduce conditional PartialClusters
    clusters = guard(clusters)

    return clusters.finalize()
def clusterize(exprs):
    """Group a sequence of :class:`ir.Eq`s into one or more :class:`Cluster`s."""
    # Group expressions based on data dependences
    groups = group_expressions(exprs)

    clusters = ClusterGroup()

    # Coerce iteration direction of each expression in each group
    for g in groups:
        mapper = OrderedDict([(e, e.directions) for e in g])
        flowmap = detect_flow_directions(g)
        queue = list(g)
        while queue:
            k = queue.pop(0)
            directions, _ = force_directions(flowmap,
                                             lambda i: mapper[k].get(i))
            directions = {i: directions[i] for i in mapper[k]}
            # Need update propagation ?
            if directions != mapper[k]:
                mapper[k] = directions
                queue.extend([i for i in g if i not in queue])

        # Wrap each tensor expression in a PartialCluster
        for k, v in mapper.items():
            if k.is_Tensor:
                scalars = [i for i in g[:g.index(k)] if i.is_Scalar]
                intervals, sub_iterators, _ = k.ispace.args
                ispace = IterationSpace(intervals, sub_iterators, v)
                clusters.append(PartialCluster(scalars + [k], ispace,
                                               k.dspace))

    # Group PartialClusters together where possible
    clusters = groupby(clusters)

    # Introduce conditional PartialClusters
    clusters = guard(clusters)

    return clusters.finalize()
예제 #6
0
def clusterize(exprs):
    """Group a sequence of LoweredEqs into one or more Clusters."""
    clusters = ClusterGroup()

    # Wrap each LoweredEq in `exprs` within a PartialCluster. The PartialCluster's
    # iteration direction is enforced based on the iteration direction of the
    # surrounding LoweredEqs
    flowmap = detect_flow_directions(exprs)
    for e in exprs:
        directions, _ = force_directions(flowmap,
                                         lambda d: e.ispace.directions.get(d))
        ispace = IterationSpace(e.ispace.intervals, e.ispace.sub_iterators,
                                directions)

        clusters.append(PartialCluster(e, ispace, e.dspace))

    # Group PartialClusters together where possible
    clusters = groupby(clusters)

    # Introduce conditional PartialClusters
    clusters = guard(clusters)

    return clusters.finalize()
예제 #7
0
    def __new__(cls, *args, **kwargs):
        if len(args) == 1:
            # origin: LoweredEq(expr)
            expr = input_expr = args[0]
            assert not isinstance(expr, LoweredEq) and isinstance(expr, Eq)
        elif len(args) == 2:
            # origin: LoweredEq(lhs, rhs, stamp=...)
            stamp = kwargs.pop('stamp')
            expr = Eq.__new__(cls, *args, evaluate=False)
            assert isinstance(stamp, Eq)
            expr.is_Increment = stamp.is_Increment
            expr._ispace, expr._dspace = stamp.ispace, stamp.dspace
            expr.reads, expr.writes = stamp.reads, stamp.writes
            return expr
        elif len(args) == 5:
            # origin: LoweredEq(expr, ispace, space)
            input_expr, ispace, dspace, reads, writes = args
            assert isinstance(ispace, IterationSpace) and isinstance(
                dspace, DataSpace)
            expr = Eq.__new__(cls, *input_expr.args, evaluate=False)
            expr.is_Increment = input_expr.is_Increment
            expr._ispace, expr._dspace = ispace, dspace
            expr.reads, expr.writes = reads, writes
            return expr
        else:
            raise ValueError("Cannot construct LoweredEq from args=%s "
                             "and kwargs=%s" % (str(args), str(kwargs)))

        # Well-defined dimension ordering
        ordering = dimension_sort(expr, key=lambda i: not i.is_Time)

        # Introduce space sub-dimensions if need to
        region = getattr(input_expr, '_region', DOMAIN)
        if region == INTERIOR:
            mapper = {
                i: SubDimension("%si" % i, i, 1, -1)
                for i in ordering if i.is_Space
            }
            expr = expr.xreplace(mapper)
            ordering = [mapper.get(i, i) for i in ordering]

        # Analyze data accesses
        mapper = detect_accesses(expr)
        oobs = detect_oobs(mapper)

        # The iteration space is constructed so that information always flows
        # from an iteration to another (i.e., no anti-dependences are created)
        directions, _ = force_directions(detect_flow_directions(expr),
                                         lambda i: Any)
        intervals, iterators = build_intervals(mapper)
        intervals = sorted(intervals, key=lambda i: ordering.index(i.dim))
        ispace = IterationSpace([i.zero() for i in intervals], iterators,
                                directions)

        # The data space is relative to the computational domain
        intervals = [i if i.dim in oobs else i.zero() for i in intervals]
        intervals += [
            Interval(i, 0, 0) for i in ordering if i not in ispace.dimensions
        ]
        parts = {
            k:
            IntervalGroup(Interval(i, min(j), max(j)) for i, j in v.items())
            for k, v in mapper.items()
        }
        dspace = DataSpace(intervals, parts)

        # Finally create the LoweredEq with all metadata attached
        expr = super(LoweredEq, cls).__new__(cls,
                                             expr.lhs,
                                             expr.rhs,
                                             evaluate=False)
        expr.is_Increment = getattr(input_expr, 'is_Increment', False)
        expr._dspace = dspace
        expr._ispace = ispace
        expr.reads, expr.writes = detect_io(expr)

        return expr
예제 #8
0
    def __new__(cls, *args, **kwargs):
        if len(args) == 1 and isinstance(args[0], LoweredEq):
            # origin: LoweredEq(devito.LoweredEq, **kwargs)
            input_expr = args[0]
            expr = Eq.__new__(cls, *input_expr.args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i,
                        kwargs.get(i) or getattr(input_expr, i))
            return expr
        elif len(args) == 1 and isinstance(args[0], Eq):
            # origin: LoweredEq(sympy.Eq)
            input_expr = expr = args[0]
        elif len(args) == 2:
            expr = Eq.__new__(cls, *args, evaluate=False)
            for i in cls._state:
                setattr(expr, '_%s' % i, kwargs.pop(i))
            return expr
        else:
            raise ValueError("Cannot construct LoweredEq from args=%s "
                             "and kwargs=%s" % (str(args), str(kwargs)))

        # Well-defined dimension ordering
        ordering = dimension_sort(expr, key=lambda i: not i.is_Time)

        # Introduce space sub-dimensions if need to
        region = getattr(input_expr, '_region', DOMAIN)
        if region == INTERIOR:
            mapper = {
                i: SubDimension.middle("%si" % i, i, 1, 1)
                for i in ordering if i.is_Space
            }
            expr = expr.xreplace(mapper)
            for k, v in mapper.items():
                ordering.insert(ordering.index(k) + 1, v)

        # Analyze the expression
        mapper = detect_accesses(expr)
        oobs = detect_oobs(mapper)

        # The iteration space is constructed so that information always flows
        # from an iteration to another (i.e., no anti-dependences are created)
        directions, _ = force_directions(detect_flow_directions(expr),
                                         lambda i: Any)
        iterators = build_iterators(mapper)
        intervals = build_intervals(Stencil.union(*mapper.values()))
        intervals = sorted(intervals, key=lambda i: ordering.index(i.dim))
        ispace = IterationSpace([i.zero() for i in intervals], iterators,
                                directions)

        # The data space is relative to the computational domain
        intervals = [i if i.dim in oobs else i.zero() for i in intervals]
        intervals += [
            Interval(i, 0, 0) for i in ordering if i not in ispace.dimensions
        ]
        parts = {
            k: IntervalGroup(build_intervals(v))
            for k, v in mapper.items() if k
        }
        dspace = DataSpace(intervals, parts)

        # Finally create the LoweredEq with all metadata attached
        expr = super(LoweredEq, cls).__new__(cls,
                                             expr.lhs,
                                             expr.rhs,
                                             evaluate=False)
        expr._is_Increment = getattr(input_expr, 'is_Increment', False)
        expr._dspace = dspace
        expr._ispace = ispace
        expr._reads, expr._writes = detect_io(expr)

        return expr