Beispiel #1
0
def get_arith_ops(sdfg_json):
    loaded = load_sdfg_from_json(sdfg_json)
    if loaded['error'] is not None:
        return loaded['error']
    sdfg = loaded['sdfg']

    propagation.propagate_memlets_sdfg(sdfg)

    arith_map = {}
    create_arith_ops_map(sdfg, arith_map, {})
    return {
        'arith_ops_map': arith_map,
    }
def test_memlet_volume_propagation_nsdfg():
    sdfg = make_sdfg()
    propagation.propagate_memlets_sdfg(sdfg)

    main_state = sdfg.nodes()[0]
    data_in_memlet = main_state.edges()[0].data
    bound_stream_in_memlet = main_state.edges()[1].data
    out_stream_memlet = main_state.edges()[2].data

    memlet_check_parameters(data_in_memlet, 0, True, [(0, N - 1, 1)])
    memlet_check_parameters(bound_stream_in_memlet, 1, False, [(0, 0, 1)])
    memlet_check_parameters(out_stream_memlet, 0, True, [(0, 0, 1)])

    nested_sdfg = main_state.nodes()[3].sdfg

    loop_state = nested_sdfg.nodes()[2]

    state_check_executions(loop_state, symbol('loop_bound'))
Beispiel #3
0
def test_unsqueeze():
    """ Tests for an issue in unsqueeze not allowing reshape. """
    @dace.program
    def callee(A: dace.float64[60, 2]):
        A[:, 1] = 5.0

    @dace.program
    def caller(A: dace.float64[2, 3, 4, 5]):
        callee(A)

    A = np.random.rand(2, 3, 4, 5)
    expected = A[:]
    expected.reshape(60, 2)[:, 1] = 5.0

    sdfg = caller.to_sdfg()
    prop.propagate_memlets_sdfg(sdfg)
    sdfg(A=A)

    assert np.allclose(A, expected)
Beispiel #4
0
 def apply_pattern(self, sdfg: SDFG, append: bool = True) -> Union[Any, None]:
     """
     Applies this transformation on the given SDFG, using the transformation
     instance to find the right SDFG object (based on SDFG ID), and applying
     memlet propagation as necessary.
     :param sdfg: The SDFG (or an SDFG in the same hierarchy) to apply the
                  transformation to.
     :param append: If True, appends the transformation to the SDFG
                    transformation history.
     :return: A transformation-defined return value, which could be used
              to pass analysis data out, or nothing.
     """
     tsdfg: SDFG = sdfg.sdfg_list[self.sdfg_id]
     if append:
         sdfg.append_transformation(self)
     retval = self.apply(tsdfg)
     if not self.annotates_memlets():
         propagation.propagate_memlets_sdfg(tsdfg)
     return retval
Beispiel #5
0
def consolidate_edges(sdfg: SDFG, starting_scope=None) -> int:
    """
    Union scope-entering memlets relating to the same data node in all states.
    This effectively reduces the number of connectors and allows more
    transformations to be performed, at the cost of losing the individual
    per-tasklet memlets.
    :param sdfg: The SDFG to consolidate.
    :return: Number of edges removed.
    """
    from dace.sdfg.propagation import propagate_memlets_sdfg, propagate_memlets_scope

    consolidated = 0
    for state in sdfg.nodes():
        # Start bottom-up
        if starting_scope and starting_scope.entry not in state.nodes():
            continue

        queue = [starting_scope] if starting_scope else state.scope_leaves()
        next_queue = []
        while len(queue) > 0:
            for scope in queue:
                consolidated += consolidate_edges_scope(state, scope.entry)
                consolidated += consolidate_edges_scope(state, scope.exit)
                if scope.parent is not None:
                    next_queue.append(scope.parent)
            queue = next_queue
            next_queue = []

        if starting_scope is not None:
            # Repropagate memlets from this scope outwards
            propagate_memlets_scope(sdfg, state, starting_scope)

            # No need to traverse other states
            break

    # Repropagate memlets
    if starting_scope is None:
        propagate_memlets_sdfg(sdfg)

    return consolidated
Beispiel #6
0
    def optimize(self):
        """ A command-line UI for applying patterns on the SDFG.
            :return: An optimized SDFG object
        """
        sdfg_file = self.sdfg.name + '.sdfg'
        if os.path.isfile(sdfg_file):
            ui_input = input('An SDFG with the filename "%s" was found. '
                             'Would you like to use it instead? [Y/n] ' %
                             sdfg_file)
            if len(ui_input) == 0 or ui_input[0] not in ['n', 'N']:
                return dace.SDFG.from_file(sdfg_file)

        # Visualize SDFGs during optimization process
        VISUALIZE_SDFV = Config.get_bool('optimizer', 'visualize_sdfv')
        SAVE_INTERMEDIATE = Config.get_bool('optimizer', 'save_intermediate')

        if SAVE_INTERMEDIATE:
            self.sdfg.save(os.path.join('_dacegraphs', 'before.sdfg'))
            if VISUALIZE_SDFV:
                from dace.cli import sdfv
                sdfv.view(os.path.join('_dacegraphs', 'before.sdfg'))

        # Optimize until there is not pattern matching or user stops the process.
        pattern_counter = 0
        while True:
            # Print in the UI all the pattern matching options.
            ui_options = sorted(self.get_pattern_matches())
            ui_options_idx = 0
            for pattern_match in ui_options:
                sdfg = self.sdfg.sdfg_list[pattern_match.sdfg_id]
                print('%d. Transformation %s' %
                      (ui_options_idx, pattern_match.print_match(sdfg)))
                ui_options_idx += 1

            # If no pattern matchings were found, quit.
            if ui_options_idx == 0:
                print('No viable transformations found')
                break

            ui_input = input(
                'Select the pattern to apply (0 - %d or name$id): ' %
                (ui_options_idx - 1))

            pattern_name, occurrence, param_dict = _parse_cli_input(ui_input)

            pattern_match = None
            if (pattern_name is None and occurrence >= 0
                    and occurrence < ui_options_idx):
                pattern_match = ui_options[occurrence]
            elif pattern_name is not None:
                counter = 0
                for match in ui_options:
                    if type(match).__name__ == pattern_name:
                        if occurrence == counter:
                            pattern_match = match
                            break
                        counter = counter + 1

            if pattern_match is None:
                print(
                    'You did not select a valid option. Quitting optimization ...'
                )
                break

            match_id = (str(occurrence) if pattern_name is None else '%s$%d' %
                        (pattern_name, occurrence))
            sdfg = self.sdfg.sdfg_list[pattern_match.sdfg_id]
            print('You selected (%s) pattern %s with parameters %s' %
                  (match_id, pattern_match.print_match(sdfg), str(param_dict)))

            # Set each parameter of the parameter dictionary separately
            for k, v in param_dict.items():
                setattr(pattern_match, k, v)

            pattern_match.apply(sdfg)
            self.applied_patterns.add(type(pattern_match))

            if SAVE_INTERMEDIATE:
                filename = 'after_%d_%s_b4lprop' % (
                    pattern_counter + 1, type(pattern_match).__name__)
                self.sdfg.save(os.path.join('_dacegraphs', filename + '.sdfg'))

            if not pattern_match.annotates_memlets():
                propagation.propagate_memlets_sdfg(self.sdfg)

            if True:
                pattern_counter += 1
                if SAVE_INTERMEDIATE:
                    filename = 'after_%d_%s' % (pattern_counter,
                                                type(pattern_match).__name__)
                    self.sdfg.save(
                        os.path.join('_dacegraphs', filename + '.sdfg'))

                    if VISUALIZE_SDFV:
                        from dace.cli import sdfv
                        sdfv.view(
                            os.path.join('_dacegraphs', filename + '.sdfg'))

        return self.sdfg
Beispiel #7
0
 def apply_pattern(self, sdfg):
     """ Applies this transformation on the given SDFG. """
     self.apply(sdfg)
     if not self.annotates_memlets():
         propagation.propagate_memlets_sdfg(sdfg)
def make_sdfg(squeeze, name):
    N, M = dace.symbol('N'), dace.symbol('M')
    sdfg = dace.SDFG('memlet_propagation_%s' % name)
    sdfg.add_symbol('N', dace.int64)
    sdfg.add_symbol('M', dace.int64)
    sdfg.add_array('A', [N + 1, M], dace.int64)
    state = sdfg.add_state()
    me, mx = state.add_map('map', dict(j='1:M'))
    w = state.add_write('A')

    # Create nested SDFG
    nsdfg = dace.SDFG('nested')
    if squeeze:
        nsdfg.add_array('a1', [N + 1], dace.int64, strides=[M])
        nsdfg.add_array('a2', [N - 1], dace.int64, strides=[M])
    else:
        nsdfg.add_array('a', [N + 1, M], dace.int64)

    nstate = nsdfg.add_state()
    a1 = nstate.add_write('a1' if squeeze else 'a')
    a2 = nstate.add_write('a2' if squeeze else 'a')
    t1 = nstate.add_tasklet('add99', {}, {'out'}, 'out = i + 99')
    t2 = nstate.add_tasklet('add101', {}, {'out'}, 'out = i + 101')
    nstate.add_edge(t1, 'out', a1, None,
                    dace.Memlet('a1[i]' if squeeze else 'a[i, 1]'))
    nstate.add_edge(t2, 'out', a2, None,
                    dace.Memlet('a2[i]' if squeeze else 'a[i+2, 0]'))
    nsdfg.add_loop(None, nstate, None, 'i', '0', 'i < N - 2', 'i + 1')

    # Connect nested SDFG to toplevel one
    nsdfg_node = state.add_nested_sdfg(nsdfg,
                                       None, {},
                                       {'a1', 'a2'} if squeeze else {'a'},
                                       symbol_mapping=dict(j='j', N='N',
                                                           M='M'))
    state.add_nedge(me, nsdfg_node, dace.Memlet())
    # Add outer memlet that is overapproximated
    if squeeze:
        # This is expected to propagate to A[0:N - 2, j].
        state.add_memlet_path(nsdfg_node,
                              mx,
                              w,
                              src_conn='a1',
                              memlet=dace.Memlet('A[0:N+1, j]'))
        # This is expected to propagate to A[2:N, j - 1].
        state.add_memlet_path(nsdfg_node,
                              mx,
                              w,
                              src_conn='a2',
                              memlet=dace.Memlet('A[2:N+1, j-1]'))
    else:
        # This memlet is expected to propagate to A[0:N, j - 1:j + 1].
        state.add_memlet_path(nsdfg_node,
                              mx,
                              w,
                              src_conn='a',
                              memlet=dace.Memlet('A[0:N+1, j-1:j+1]'))

    propagation.propagate_memlets_sdfg(sdfg)

    return sdfg
Beispiel #9
0
    def apply(self, sdfg: SDFG):
        graph = sdfg.nodes()[self.state_id]
        map_entry = graph.nodes()[self.subgraph[Vectorization._map_entry]]
        tasklet: nodes.Tasklet = graph.successors(map_entry)[0]
        param = symbolic.pystr_to_symbolic(map_entry.map.params[-1])

        # Create new vector size.
        vector_size = self.vector_len
        dim_from, dim_to, dim_skip = map_entry.map.range[-1]

        # Determine whether to create preamble or postamble maps
        if self.preamble is not None:
            create_preamble = self.preamble
        else:
            create_preamble = not ((dim_from % vector_size == 0) == True
                                   or dim_from == 0)
        if self.postamble is not None:
            create_postamble = self.postamble
        else:
            if isinstance(dim_to, symbolic.SymExpr):
                create_postamble = (((dim_to.approx + 1) %
                                     vector_size == 0) == False)
            else:
                create_postamble = (((dim_to + 1) % vector_size == 0) == False)

        # Determine new range for vectorized map
        if self.strided_map:
            new_range = [dim_from, dim_to - vector_size + 1, vector_size]
        else:
            new_range = [
                dim_from // vector_size, ((dim_to + 1) // vector_size) - 1,
                dim_skip
            ]

        # Create preamble non-vectorized map (replacing the original map)
        if create_preamble:
            old_scope = graph.scope_subgraph(map_entry, True, True)
            new_scope: ScopeSubgraphView = replicate_scope(
                sdfg, graph, old_scope)
            new_begin = dim_from + (vector_size - (dim_from % vector_size))
            map_entry.map.range[-1] = (dim_from, new_begin - 1, dim_skip)
            # Replace map_entry with the replicated scope (so that the preamble
            # will usually come first in topological sort)
            map_entry = new_scope.entry
            tasklet = new_scope.nodes()[old_scope.nodes().index(tasklet)]
            new_range[0] = new_begin

        # Create postamble non-vectorized map
        if create_postamble:
            new_scope: ScopeSubgraphView = replicate_scope(
                sdfg, graph, graph.scope_subgraph(map_entry, True, True))
            dim_to_ex = dim_to + 1
            new_scope.entry.map.range[-1] = (dim_to_ex -
                                             (dim_to_ex % vector_size), dim_to,
                                             dim_skip)

        # Change the step of the inner-most dimension.
        map_entry.map.range[-1] = tuple(new_range)

        # Vectorize connectors adjacent to the tasklet.
        for edge in graph.all_edges(tasklet):
            connectors = (tasklet.in_connectors
                          if edge.dst == tasklet else tasklet.out_connectors)
            conn = edge.dst_conn if edge.dst == tasklet else edge.src_conn

            if edge.data.data is None:  # Empty memlets
                continue
            desc = sdfg.arrays[edge.data.data]
            contigidx = desc.strides.index(1)

            newlist = []

            lastindex = edge.data.subset[contigidx]
            if isinstance(lastindex, tuple):
                newlist = [(rb, re, rs) for rb, re, rs in edge.data.subset]
                symbols = set()
                for indd in lastindex:
                    symbols.update(
                        symbolic.pystr_to_symbolic(indd).free_symbols)
            else:
                newlist = [(rb, rb, 1) for rb in edge.data.subset]
                symbols = symbolic.pystr_to_symbolic(lastindex).free_symbols

            oldtype = connectors[conn]
            if oldtype is None or oldtype.type is None:
                oldtype = desc.dtype

            # Vector to scalar WCR edge: change connector and continue
            lastedge = graph.memlet_path(edge)[-1]
            if (lastedge.data.subset.num_elements() == 1
                    and edge.data.wcr is not None):
                connectors[conn] = dtypes.vector(oldtype, vector_size)
                continue

            if str(param) not in map(str, symbols):
                continue

            # Vectorize connector, if not already vectorized
            if isinstance(oldtype, dtypes.vector):
                continue

            connectors[conn] = dtypes.vector(oldtype, vector_size)

            # Modify memlet subset to match vector length
            if self.strided_map:
                rb = newlist[contigidx][0]
                if self.propagate_parent:
                    newlist[contigidx] = (rb / self.vector_len,
                                          rb / self.vector_len, 1)
                else:
                    newlist[contigidx] = (rb, rb + self.vector_len - 1, 1)
            else:
                rb = newlist[contigidx][0]
                if self.propagate_parent:
                    newlist[contigidx] = (rb, rb, 1)
                else:
                    newlist[contigidx] = (self.vector_len * rb,
                                          self.vector_len * rb +
                                          self.vector_len - 1, 1)
            edge.data.subset = subsets.Range(newlist)
            edge.data.volume = vector_size

        # Vector length propagation using data descriptors, recursive traversal
        # outwards
        if self.propagate_parent:
            for edge in graph.all_edges(tasklet):
                cursdfg = sdfg
                curedge = edge
                while cursdfg is not None:
                    arrname = curedge.data.data
                    dtype = cursdfg.arrays[arrname].dtype

                    # Change type and shape to vector
                    if not isinstance(dtype, dtypes.vector):
                        cursdfg.arrays[arrname].dtype = dtypes.vector(
                            dtype, vector_size)
                        new_shape = list(cursdfg.arrays[arrname].shape)
                        contigidx = cursdfg.arrays[arrname].strides.index(1)
                        new_shape[contigidx] /= vector_size
                        try:
                            new_shape[contigidx] = int(new_shape[contigidx])
                        except TypeError:
                            pass
                        cursdfg.arrays[arrname].shape = new_shape

                    propagation.propagate_memlets_sdfg(cursdfg)

                    # Find matching edge in parent
                    nsdfg = cursdfg.parent_nsdfg_node
                    if nsdfg is None:
                        break
                    tstate = cursdfg.parent
                    curedge = ([
                        e
                        for e in tstate.in_edges(nsdfg) if e.dst_conn == arrname
                    ] + [
                        e for e in tstate.out_edges(nsdfg)
                        if e.src_conn == arrname
                    ])[0]
                    cursdfg = cursdfg.parent_sdfg