Beispiel #1
0
    def is_equivalent(self, other):
        if not isinstance(other, Stream):
            return False

        # Test type
        if self.dtype != other.dtype:
            return False

        # Test dimensionality
        if len(self.shape) != len(other.shape):
            return False

        # Test shape
        for dim, otherdim in zip(self.shape, other.shape):
            # If both are symbols, ensure equality
            if symbolic.issymbolic(dim) and symbolic.issymbolic(otherdim):
                if dim != otherdim:
                    return False

            # If one is a symbol and the other is a constant
            # make sure they are equivalent
            elif symbolic.issymbolic(otherdim):
                if symbolic.eval(otherdim) != dim:
                    return False
            elif symbolic.issymbolic(dim):
                if symbolic.eval(dim) != otherdim:
                    return False
            else:
                # Any other case (constant vs. constant), check for equality
                if otherdim != dim:
                    return False
        return True
Beispiel #2
0
    def can_be_applied(graph: Union[SDFG, SDFGState],
                       candidate: Dict['PatternNode', int], expr_index: int,
                       sdfg: SDFG, strict: bool) -> bool:
        src = graph.nodes()[candidate[BankSplit._src_node]]
        dst = graph.nodes()[candidate[BankSplit._dst_node]]
        src_array = sdfg.arrays[src.data]
        dst_array = sdfg.arrays[dst.data]

        plain_array = lambda array: isinstance(
            array, data.Array) and not isinstance(array, data.View)

        if not plain_array(src_array):
            return False
        if not plain_array(dst_array):
            return False

        # same dimensions means HBM-array needs 1 dimension more
        collect_src = len(src_array.shape) - 1 == len(dst_array.shape)
        distribute_dst = len(src_array.shape) + 1 == len(dst_array.shape)
        if collect_src and symbolic.issymbolic(src_array.shape[0],
                                               sdfg.constants):
            return False
        elif distribute_dst and symbolic.issymbolic(dst_array.shape[0],
                                                    sdfg.constants):
            return False
        return collect_src or distribute_dst
Beispiel #3
0
 def replace(self, repl_dict):
     for i, ((rb, re, rs),
             ts) in enumerate(zip(self.ranges, self.tile_sizes)):
         self.ranges[i] = (
             rb.subs(repl_dict) if symbolic.issymbolic(rb) else rb,
             re.subs(repl_dict) if symbolic.issymbolic(re) else re,
             rs.subs(repl_dict) if symbolic.issymbolic(rs) else rs)
         self.tile_sizes[i] = (ts.subs(repl_dict)
                               if symbolic.issymbolic(ts) else ts)
Beispiel #4
0
def _replsym(symlist, symrepl):
    """ Helper function to replace symbols in various symbolic expressions. """
    if symlist is None:
        return None
    if isinstance(symlist, (symbolic.SymExpr, symbolic.symbol, sp.Basic)):
        return symlist.subs(symrepl)
    for i, dim in enumerate(symlist):
        try:
            symlist[i] = tuple(d.subs(symrepl) if symbolic.issymbolic(d) else d for d in dim)
        except TypeError:
            symlist[i] = (dim.subs(symrepl) if symbolic.issymbolic(dim) else dim)
    return symlist
Beispiel #5
0
def make_transients_persistent(sdfg: SDFG, device: dtypes.DeviceType) -> None:
    ''' 
    Helper function to change several storage and scheduling properties
    - Makes non-view array lifetimes persistent, with some 
      restrictions depending on the device 
    - Reset nonatomic WCR edges on GPU 
    :param sdfg: SDFG
    :param device: Device type
    '''
    for nsdfg in sdfg.all_sdfgs_recursive():
        for aname, arr in nsdfg.arrays.items():
            if arr.transient and not isinstance(
                    arr, dt.View) and not symbolic.issymbolic(arr.total_size):
                if arr.storage != dtypes.StorageType.Register:
                    arr.lifetime = dtypes.AllocationLifetime.Persistent

    if device == dtypes.DeviceType.GPU:
        for aname, arr in sdfg.arrays.items():
            if arr.transient and not isinstance(
                    arr, dt.View):  #and size only depends on SDFG params
                if arr.storage == dtypes.StorageType.GPU_Global:
                    arr.lifetime = dtypes.AllocationLifetime.Persistent

        # Reset nonatomic WCR edges
        for n, _ in sdfg.all_nodes_recursive():
            if isinstance(n, SDFGState):
                for edge in n.edges():
                    edge.data.wcr_nonatomic = False
Beispiel #6
0
    def can_be_applied(self, dim_exprs, variable_context, node_range,
                       orig_edges, dim_index, total_dims):
        dims = []
        for dim in dim_exprs:
            if isinstance(dim, tuple):
                dims.extend(dim)
            else:
                dims.append(dim)

        self.params = variable_context[-1]
        defined_vars = variable_context[-2]

        used_symbols = set()
        for dim in dims:
            if symbolic.issymbolic(dim):
                used_symbols.update(dim.free_symbols)

        if (used_symbols & set(self.params)
                and any(s not in defined_vars
                        for s in node_range.free_symbols)):
            # Cannot propagate symbols that are undefined in the outer range
            # (e.g., dynamic map ranges).
            return False

        # Always matches
        return True
Beispiel #7
0
def calc_set_image_range(map_idx, map_set, array_range):
    image = []
    for a_range in array_range:
        new_range = list(a_range)
        for m_idx, m_range in zip(map_idx, map_set):
            symbol = symbolic.pystr_to_symbolic(m_idx)
            for i in range(3):
                if isinstance(m_range[i], SymExpr):
                    exact = m_range[i].expr
                    approx = m_range[i].approx
                else:
                    exact = m_range[i]
                    approx = overapproximate(m_range[i])
                if isinstance(new_range[i], SymExpr):
                    new_range[i] = SymExpr(
                        new_range[i].expr.subs([(symbol, exact)]),
                        new_range[i].approx.subs([(symbol, approx)]))
                elif issymbolic(new_range[i]):
                    new_range[i] = SymExpr(
                        new_range[i].subs([(symbol, exact)]),
                        new_range[i].subs([(symbol, approx)]))
                else:
                    new_range[i] = SymExpr(new_range[i], new_range[i])
        image.append(new_range)
    return subsets.Range(image)
Beispiel #8
0
    def __call__(self, *args, **kwargs):
        """ Convenience function that parses, compiles, and runs a DaCe 
            program. """
        binaryobj = self.compile(*args)
        # Add named arguments to the call
        kwargs.update({aname: arg for aname, arg in zip(self.argnames, args)})
        # Update arguments with symbols in data shapes
        kwargs.update({
            sym: symbolic.symbol(sym).get()
            for arg in args
            for sym in (symbolic.symlist(arg.descriptor.shape) if hasattr(
                arg, 'descriptor') else [])
        })
        # Update arguments with symbol values
        for aname in self.argnames:
            if aname in binaryobj.sdfg.arrays:
                sym_shape = binaryobj.sdfg.arrays[aname].shape
                for sym in (sym_shape):
                    if symbolic.issymbolic(sym):
                        try:
                            kwargs[str(sym)] = sym.get()
                        except:
                            pass

        return binaryobj(**kwargs)
Beispiel #9
0
    def _check_strides(inner_strides: List[symbolic.SymbolicType],
                       outer_strides: List[symbolic.SymbolicType],
                       memlet: Memlet, nested_sdfg: nodes.NestedSDFG) -> bool:
        """
        Returns True if the strides of the inner array can be matched
        to the strides of the outer array upon inlining. Takes into
        consideration memlet (un)squeeze and nested SDFG symbol mapping.
        :param inner_strides: The strides of the array inside the nested SDFG.
        :param outer_strides: The strides of the array in the external SDFG.
        :param nested_sdfg: Nested SDFG node with symbol mapping.
        :return: True if all strides match, False otherwise.
        """
        # Take unsqueezing into account
        dims_to_ignore = [
            i for i, s in enumerate(memlet.subset.size()) if s == 1
        ]
        ostrides = [
            os for i, os in enumerate(outer_strides) if i not in dims_to_ignore
        ]
        if len(ostrides) == 0:
            ostrides = [1]
        if len(ostrides) != len(inner_strides):
            return False

        # Replace all inner symbols based on symbol mapping
        repldict = {
            symbolic.pystr_to_symbolic(k): symbolic.pystr_to_symbolic(v)
            for k, v in nested_sdfg.symbol_mapping.items()
        }
        istrides = [
            istr.subs(repldict) if symbolic.issymbolic(istr) else istr
            for istr in inner_strides
        ]

        return all(istr == ostr for istr, ostr in zip(istrides, ostrides))
Beispiel #10
0
def _datadesc(obj: Any):
    from dace import data
    if isinstance(obj, data.Data):
        return obj
    elif symbolic.issymbolic(obj):
        return data.Scalar(symbolic.symtype(obj))
    elif isinstance(obj, dtypes.typeclass):
        return data.Scalar(obj)
    return data.Scalar(dtypes.typeclass(type(obj)))
Beispiel #11
0
def ndarray(shape, dtype=numpy.float64, *args, **kwargs):
    """ Returns a numpy ndarray where all symbols have been evaluated to
        numbers and types are converted to numpy types. """
    repldict = {sym: sym.get() for sym in symbolic.symlist(shape).values()}
    new_shape = [
        int(s.subs(repldict) if symbolic.issymbolic(s) else s) for s in shape
    ]
    new_dtype = dtype.type if isinstance(dtype, dtypes.typeclass) else dtype
    return numpy.ndarray(shape=new_shape, dtype=new_dtype, *args, **kwargs)
Beispiel #12
0
def check_issymbolic(iterator: iter, sdfg):
    for item in iterator:
        # catch symbolic (compile time variables)
        if symbolic.issymbolic(item, sdfg.constants):
            raise ValueError(
                "Please use sdfg.specialize to make the following symbol(s) constant: {}"
                .format(", ".join([
                    str(x) for x in item.free_symbols
                    if str(x) not in sdfg.constants
                ])))
Beispiel #13
0
def _dependent_indices(itervar: str, subset: subsets.Subset) -> Set[int]:
    """ Finds the indices or ranges of a subset that depend on the iteration
        variable. Returns their index in the subset's indices/ranges list.
    """
    if isinstance(subset, subsets.Indices):
        return {
            i
            for i, idx in enumerate(subset) if symbolic.issymbolic(idx)
            and itervar in {str(s)
                            for s in idx.free_symbols}
        }
    else:
        return {
            i
            for i, rng in enumerate(subset) if any(
                symbolic.issymbolic(t)
                and itervar in {str(s)
                                for s in t.free_symbols} for t in rng)
        }
Beispiel #14
0
    def can_be_applied(self, graph: SDFGState, expr_index: int, sdfg: SDFG, permissive: bool) -> bool:
        src = self.src_node
        dst = self.dst_node
        src_array = sdfg.arrays[src.data]
        dst_array = sdfg.arrays[dst.data]

        plain_array = lambda array: isinstance(array, data.Array) and not isinstance(array, data.View)

        if not plain_array(src_array):
            return False
        if not plain_array(dst_array):
            return False

        # same dimensions means HBM-array needs 1 dimension more
        collect_src = len(src_array.shape) - 1 == len(dst_array.shape)
        distribute_dst = len(src_array.shape) + 1 == len(dst_array.shape)
        if collect_src and symbolic.issymbolic(src_array.shape[0], sdfg.constants):
            return False
        elif distribute_dst and symbolic.issymbolic(dst_array.shape[0], sdfg.constants):
            return False
        return collect_src or distribute_dst
Beispiel #15
0
    def can_be_applied(self, graph, expr_index, sdfg, permissive=False):
        # Is this even a loop
        if not super().can_be_applied(graph, expr_index, sdfg, permissive):
            return False

        guard = self.loop_guard
        begin = self.loop_begin
        found = find_for_loop(graph, guard, begin)

        # If loop cannot be detected, fail
        if not found:
            return False
        _, rng, _ = found

        # If loop stride is not specialized or constant-sized, fail
        if symbolic.issymbolic(rng[2], sdfg.constants):
            return False
        # If loop range diff is not constant-sized, fail
        if symbolic.issymbolic(rng[1] - rng[0], sdfg.constants):
            return False
        return True
Beispiel #16
0
def isallowed(var, allow_recursive=False):
    """ Returns True if a given object is allowed in a DaCe program.

        :param allow_recursive: whether to allow dicts or lists containing constants.
    """
    from dace.symbolic import issymbolic

    if allow_recursive:
        if isinstance(var, (list, tuple)):
            return all(isallowed(v, allow_recursive=False) for v in var)

    return isconstant(var) or ismodule(var) or issymbolic(var) or isinstance(var, typeclass)
Beispiel #17
0
    def can_be_applied(graph, candidate, expr_index, sdfg, strict=False):
        # Is this even a loop
        if not DetectLoop.can_be_applied(graph, candidate, expr_index, sdfg,
                                         strict):
            return False

        guard = graph.node(candidate[DetectLoop._loop_guard])
        begin = graph.node(candidate[DetectLoop._loop_begin])
        found = find_for_loop(graph, guard, begin)

        # If loop cannot be detected, fail
        if not found:
            return False
        _, rng, _ = found

        # If loop stride is not specialized or constant-sized, fail
        if symbolic.issymbolic(rng[2], sdfg.constants):
            return False
        # If loop range diff is not constant-sized, fail
        if symbolic.issymbolic(rng[1] - rng[0], sdfg.constants):
            return False
        return True
Beispiel #18
0
    def should_apply(self, sdfg: SDFG) -> bool:
        """
        Fast check (O(m)) whether the pass should early-exit without traversing the SDFG.
        """
        for edge in sdfg.edges():
            # If there are no assignments, there are no constants to propagate
            if len(edge.data.assignments) == 0:
                continue
            # If no assignment assigns a constant to a symbol, no constants can be propagated
            if any(not symbolic.issymbolic(aval)
                   for aval in edge.data.assignments.values()):
                return True

        return False
Beispiel #19
0
 def test_subset_dependency(subset: sbs.Subset, mparams: Set[int]) -> Tuple[bool, List[int]]:
     dims = []
     for i, r in enumerate(subset):
         if not isinstance(r, (list, tuple)):
             r = [r]
         fsymbols = set()
         for token in r:
             if symbolic.issymbolic(token):
                 fsymbols = fsymbols.union({str(s) for s in token.free_symbols})
         if itervar in fsymbols:
             if fsymbols.intersection(mparams):
                 return (False, [])
             else:
                 dims.append(i)
     return (True, dims)
Beispiel #20
0
def create_datadescriptor(obj):
    """ Creates a data descriptor from various types of objects.
        @see: dace.data.Data
    """
    from dace import dtypes  # Avoiding import loops
    if isinstance(obj, Data):
        return obj
    elif hasattr(obj, '__descriptor__'):
        return obj.__descriptor__()
    elif hasattr(obj, 'descriptor'):
        return obj.descriptor
    elif isinstance(obj, (list, tuple, numpy.ndarray)):
        if isinstance(obj, (list, tuple)):  # Lists and tuples are cast to numpy
            obj = numpy.array(obj)

        if obj.dtype.fields is not None:  # Struct
            dtype = dtypes.struct(
                'unnamed', **{
                    k: dtypes.typeclass(v[0].type)
                    for k, v in obj.dtype.fields.items()
                })
        else:
            dtype = dtypes.typeclass(obj.dtype.type)
        return Array(dtype=dtype,
                     strides=tuple(s // obj.itemsize for s in obj.strides),
                     shape=obj.shape)
    # special case for torch tensors. Maybe __array__ could be used here for a more
    # general solution, but torch doesn't support __array__ for cuda tensors.
    elif type(obj).__module__ == "torch" and type(obj).__name__ == "Tensor":
        try:
            import torch
            return Array(dtype=dtypes.TORCH_DTYPE_TO_TYPECLASS[obj.dtype],
                         strides=obj.stride(),
                         shape=tuple(obj.shape))
        except ImportError:
            raise ValueError(
                "Attempted to convert a torch.Tensor, but torch could not be imported"
            )
    elif symbolic.issymbolic(obj):
        return Scalar(symbolic.symtype(obj))
    elif isinstance(obj, dtypes.typeclass):
        return Scalar(obj)
    elif obj in {int, float, complex, bool, None}:
        return Scalar(dtypes.typeclass(obj))
    elif callable(obj):
        # Cannot determine return value/argument types from function object
        return Scalar(dtypes.callback(None))
    return Scalar(dtypes.typeclass(type(obj)))
Beispiel #21
0
    def _replace_in_subset(subset, string_or_symbol, new_string_or_symbol):
        new_subset = copy.deepcopy(subset)

        repldict = {
            symbolic.pystr_to_symbolic(string_or_symbol):
            symbolic.pystr_to_symbolic(new_string_or_symbol)
        }

        for i, dim in enumerate(new_subset):
            try:
                new_subset[i] = tuple(d.subs(repldict) for d in dim)
            except TypeError:
                new_subset[i] = (dim.subs(repldict)
                                 if symbolic.issymbolic(dim) else dim)

        return new_subset
Beispiel #22
0
    def can_be_applied(graph: dace.SDFGState,
                       candidate: Dict[Any, int],
                       expr_index: int,
                       sdfg: dace.SDFG,
                       strict=False):
        map_entry: nodes.MapEntry = graph.node(candidate[NestK._map_entry])
        stencil: Stencil = graph.node(candidate[NestK._stencil])

        if len(map_entry.map.params) != 1:
            return False
        if sd.has_dynamic_map_inputs(graph, map_entry):
            return False
        pname = map_entry.map.params[0]  # Usually "k"
        dim_index = None

        for edge in graph.out_edges(map_entry):
            if edge.dst != stencil:
                return False

        for edge in graph.all_edges(stencil):
            if edge.data.data is None:  # Empty memlet
                continue
            # TODO: Use bitmap to verify lower-dimensional arrays
            if len(edge.data.subset) == 3:
                for i, rng in enumerate(edge.data.subset.ndrange()):
                    for r in rng:
                        if pname in map(str, r.free_symbols):
                            if dim_index is not None and dim_index != i:
                                # k dimension must match in all memlets
                                return False
                            if str(r) != pname:
                                if symbolic.issymbolic(
                                        r - symbolic.symbol(pname),
                                        sdfg.constants):
                                    warnings.warn('k expression is nontrivial')
                            dim_index = i

        # No nesting dimension found
        if dim_index is None:
            return False

        # Ensure the stencil shape is 1 for the found dimension
        if stencil.shape[dim_index] != 1:
            return False

        return True
Beispiel #23
0
def _create_datadescriptor(obj):
    """ Creates a data descriptor from various types of objects.
        @see: dace.data.Data
    """
    if isinstance(obj, data.Data):
        return obj

    try:
        return obj.descriptor
    except AttributeError:
        if isinstance(obj, numpy.ndarray):
            return data.Array(dtype=types.typeclass(obj.dtype.type),
                              shape=obj.shape)
        if symbolic.issymbolic(obj):
            return data.Scalar(symbolic.symtype(obj))
        if isinstance(obj, types.typeclass):
            return data.Scalar(obj)
        return data.Scalar(types.typeclass(type(obj)))
Beispiel #24
0
def create_datadescriptor(obj):
    """ Creates a data descriptor from various types of objects.
        @see: dace.data.Data
    """
    from dace import dtypes  # Avoiding import loops
    if isinstance(obj, Data):
        return obj

    try:
        return obj.descriptor
    except AttributeError:
        if isinstance(obj, numpy.ndarray):
            return Array(dtype=dtypes.typeclass(obj.dtype.type),
                         shape=obj.shape)
        if symbolic.issymbolic(obj):
            return Scalar(symbolic.symtype(obj))
        if isinstance(obj, dtypes.typeclass):
            return Scalar(obj)
        return Scalar(dtypes.typeclass(type(obj)))
Beispiel #25
0
def create_batch_gemm_sdfg(dtype, strides):
    #########################
    sdfg = SDFG('einsum')
    state = sdfg.add_state()
    M, K, N = (symbolic.symbol(s) for s in ['M', 'K', 'N'])
    BATCH, sAM, sAK, sAB, sBK, sBN, sBB, sCM, sCN, sCB = (
        symbolic.symbol(s) if symbolic.issymbolic(strides[s]) else strides[s]
        for s in [
            'BATCH', 'sAM', 'sAK', 'sAB', 'sBK', 'sBN', 'sBB', 'sCM', 'sCN',
            'sCB'
        ])

    batched = strides['BATCH'] != 1

    _, xarr = sdfg.add_array(
        'X',
        dtype=dtype,
        shape=[BATCH, M, K] if batched else [M, K],
        strides=[sAB, sAM, sAK] if batched else [sAM, sAK])
    _, yarr = sdfg.add_array(
        'Y',
        dtype=dtype,
        shape=[BATCH, K, N] if batched else [K, N],
        strides=[sBB, sBK, sBN] if batched else [sBK, sBN])
    _, zarr = sdfg.add_array(
        'Z',
        dtype=dtype,
        shape=[BATCH, M, N] if batched else [M, N],
        strides=[sCB, sCM, sCN] if batched else [sCM, sCN])

    gX = state.add_read('X')
    gY = state.add_read('Y')
    gZ = state.add_write('Z')

    import dace.libraries.blas as blas  # Avoid import loop

    libnode = blas.MatMul('einsum_gemm')
    state.add_node(libnode)
    state.add_edge(gX, None, libnode, '_a', Memlet.from_array(gX.data, xarr))
    state.add_edge(gY, None, libnode, '_b', Memlet.from_array(gY.data, yarr))
    state.add_edge(libnode, '_c', gZ, None, Memlet.from_array(gZ.data, zarr))

    return sdfg
Beispiel #26
0
def calc_set_image_range(map_idx, map_set, array_range):
    image = []
    for a_range in array_range:
        new_range = list(a_range)
        for m_idx, m_range in zip(map_idx, map_set):
            symbol = symbolic.pystr_to_symbolic(m_idx)
            for i in range(3):
                if isinstance(m_range[i], SymExpr):
                    exact = m_range[i].expr
                    approx = m_range[i].approx
                else:
                    exact = m_range[i]
                    approx = overapproximate(m_range[i])
                if isinstance(new_range[i], SymExpr):
                    new_range[i] = SymExpr(
                        new_range[i].expr.subs([(symbol, exact)]),
                        new_range[i].approx.subs([(symbol, approx)]))
                elif issymbolic(new_range[i]):
                    new_range[i] = SymExpr(
                        new_range[i].subs([(symbol, exact)]),
                        new_range[i].subs([(symbol, approx)]))
                else:
                    new_range[i] = SymExpr(new_range[i], new_range[i])
            if isinstance(new_range[0], SymExpr):
                start = new_range[0].approx
            else:
                start = new_range[0]
            if isinstance(new_range[1], SymExpr):
                stop = new_range[1].approx
            else:
                stop = new_range[1]
            if isinstance(new_range[2], SymExpr):
                step = new_range[2].approx
            else:
                step = new_range[2]
            descending = (start > stop) == True
            posstep = (step > 0) == True
            if descending and posstep:
                new_range[0], new_range[1] = new_range[1], new_range[0]
        image.append(new_range)
    return subsets.Range(image)
Beispiel #27
0
def move_small_arrays_to_stack(sdfg: SDFG) -> None:
    """
    Set all Default storage types that are constant sized and less than
    the auto-tile size to the stack (as StorageType.Register).
    :param sdfg: The SDFG to operate on.
    :note: Operates in-place on the SDFG.
    """
    converted = 0
    tile_size = config.Config.get('optimizer', 'autotile_size')
    for sd, aname, array in sdfg.arrays_recursive():
        if isinstance(array, dt.Stream):
            continue
        if (array.transient and array.storage == dtypes.StorageType.Default
                and array.lifetime == dtypes.AllocationLifetime.Scope):
            if not symbolic.issymbolic(array.total_size, sd.constants):
                eval_size = symbolic.evaluate(array.total_size, sd.constants)
                if eval_size <= tile_size:
                    array.storage = dtypes.StorageType.Register
                    converted += 1

    if config.Config.get_bool('debugprint') and converted > 0:
        print(f'Statically allocating {converted} transient arrays')
Beispiel #28
0
    def replace(self, repl_dict):
        """ Substitute a given set of symbols with a different set of symbols.
            :param repl_dict: A dict of string symbol names to symbols with
                              which to replace them.
        """
        repl_to_intermediate = {}
        repl_to_final = {}
        for symbol in repl_dict:
            if str(symbol) != str(repl_dict[symbol]):
                intermediate = symbolic.symbol('__dacesym_' + str(symbol))
                repl_to_intermediate[symbolic.symbol(symbol)] = intermediate
                repl_to_final[intermediate] = repl_dict[symbol]

        if len(repl_to_intermediate) > 0:
            if self.volume is not None and symbolic.issymbolic(self.volume):
                self.volume = self.volume.subs(repl_to_intermediate)
                self.volume = self.volume.subs(repl_to_final)
            if self.subset is not None:
                self.subset.replace(repl_to_intermediate)
                self.subset.replace(repl_to_final)
            if self.other_subset is not None:
                self.other_subset.replace(repl_to_intermediate)
                self.other_subset.replace(repl_to_final)
Beispiel #29
0
def create_datadescriptor(obj):
    """ Creates a data descriptor from various types of objects.
        @see: dace.data.Data
    """
    from dace import dtypes  # Avoiding import loops
    if isinstance(obj, Data):
        return obj
    elif hasattr(obj, '__descriptor__'):
        return obj.__descriptor__()
    elif hasattr(obj, 'descriptor'):
        return obj.descriptor
    elif isinstance(obj, (list, tuple, numpy.ndarray)):
        if isinstance(obj,
                      (list, tuple)):  # Lists and tuples are cast to numpy
            obj = numpy.array(obj)

        if obj.dtype.fields is not None:  # Struct
            dtype = dtypes.struct(
                'unnamed', **{
                    k: dtypes.typeclass(v[0].type)
                    for k, v in obj.dtype.fields.items()
                })
        else:
            dtype = dtypes.typeclass(obj.dtype.type)
        return Array(dtype=dtype,
                     strides=tuple(s // obj.itemsize for s in obj.strides),
                     shape=obj.shape)
    elif symbolic.issymbolic(obj):
        return Scalar(symbolic.symtype(obj))
    elif isinstance(obj, dtypes.typeclass):
        return Scalar(obj)
    elif obj in {int, float, complex, bool, None}:
        return Scalar(dtypes.typeclass(obj))
    elif callable(obj):
        # Cannot determine return value/argument types from function object
        return Scalar(dtypes.callback(None))
    return Scalar(dtypes.typeclass(type(obj)))
Beispiel #30
0
    def can_be_applied(graph, candidate, expr_index, sdfg, strict=False):
        if not DetectLoop.can_be_applied(graph, candidate, expr_index, sdfg,
                                         strict):
            return False

        guard = graph.node(candidate[DetectLoop._loop_guard])
        begin = graph.node(candidate[DetectLoop._loop_begin])

        # Obtain iteration variable, range, and stride
        guard_inedges = graph.in_edges(guard)
        condition_edge = graph.edges_between(guard, begin)[0]
        itervar = list(guard_inedges[0].data.assignments.keys())[0]
        condition = condition_edge.data.condition_sympy()

        # If loop cannot be detected, fail
        rng = LoopUnroll._loop_range(itervar, guard_inedges, condition)
        if not rng:
            return False

        # If loop is not specialized or constant-sized, fail
        if any(symbolic.issymbolic(r, sdfg.constants) for r in rng):
            return False

        return True