Exemple #1
0
def test_subgraph():
    A, expected = config()
    B_init = np.random.rand(2)

    graph = mapfission_sdfg()
    graph.apply_transformations(MapFission)
    dace.sdfg.propagation.propagate_memlets_sdfg(graph)
    cgraph = graph.compile()

    B = dcpy(B_init)
    cgraph(A=A, B=B)
    del cgraph
    assert np.allclose(B, expected)

    graph.validate()

    subgraph = SubgraphView(graph.nodes()[0], graph.nodes()[0].nodes())
    sf = SubgraphFusion(subgraph)
    assert sf.can_be_applied(graph, subgraph)
    fusion(graph, graph.nodes()[0], None)
    ccgraph = graph.compile()

    B = dcpy(B_init)
    ccgraph(A=A, B=B)
    assert np.allclose(B, expected)
    graph.validate()
Exemple #2
0
 def softimpute(self):
     it = 0
     X_hat = dcpy(self.X)
     M = dcpy(self.M)
     start_time = time.time()
     while it < self.maxiter:
         it = it + 1
         X_hat = self.P_X - M * self.omega + M
         U, D, VT = np.linalg.svd(X_hat, full_matrices = False)
         #prt('U', U)
         #prt('D', D)
         #prt('V.T', VT)
         SD = np.diag(np.fmax(D - self._lambda, 0))
         M = U @ SD @ VT
         
         diff = np.linalg.norm(M - self.M, ord = 'fro')
         if diff < self.eps:
             break
         
         self.M = dcpy(M)
         
         self.time_seq.append( time.time()-start_time )
         #self.mse_seq.append( np.sqrt(mse(self.X_truth, M)) )
         self.mse_seq.append( mse(self.X_truth, M))
     
     end_time = time.time()
     print('number of iteration:', it)
     print('time:', end_time - start_time)
Exemple #3
0
 def __deepcopy__(self, memo):
     node = object.__new__(AccessNode)
     node._data = self._data
     node._setzero = self._setzero
     node._in_connectors = dcpy(self._in_connectors, memo=memo)
     node._out_connectors = dcpy(self._out_connectors, memo=memo)
     node._debuginfo = dcpy(self._debuginfo, memo=memo)
     return node
Exemple #4
0
    def coverage_dicts(sdfg, graph, map_entry, outer_range=True):
        '''
        returns a tuple of two dicts:
        the first dict has as a key all data entering the map
        and its associated access range
        the second dict has as a key all data exiting the map
        and its associated access range
        if outer_range = True, substitutes outer ranges
        into min/max of inner access range
        '''
        map_exit = graph.exit_node(map_entry)
        map = map_entry.map

        entry_coverage = {}
        exit_coverage = {}
        # create dicts with which we can replace all iteration
        # variable_mapping by their range
        map_min = {dace.symbol(param): e for param, e in zip(map.params, map.range.min_element())}
        map_max = {dace.symbol(param): e for param, e in zip(map.params, map.range.max_element())}

        # look at inner memlets at map entry
        for e in graph.out_edges(map_entry):
            if not e.data.subset:
                continue
            if outer_range:
                # get subset
                min_element = [m.subs(map_min) for m in e.data.subset.min_element()]
                max_element = [m.subs(map_max) for m in e.data.subset.max_element()]
                # create range
                rng = subsets.Range((min_e, max_e, 1) for min_e, max_e in zip(min_element, max_element))
            else:
                rng = dcpy(e.data.subset)

            if e.data.data not in entry_coverage:
                entry_coverage[e.data.data] = rng
            else:
                old_coverage = entry_coverage[e.data.data]
                entry_coverage[e.data.data] = subsets.union(old_coverage, rng)

        # look at inner memlets at map exit
        for e in graph.in_edges(map_exit):
            if outer_range:
                # get subset
                min_element = [m.subs(map_min) for m in e.data.subset.min_element()]
                max_element = [m.subs(map_max) for m in e.data.subset.max_element()]
                # craete range
                rng = subsets.Range((min_e, max_e, 1) for min_e, max_e in zip(min_element, max_element))
            else:
                rng = dcpy(e.data.subset)

            if e.data.data not in exit_coverage:
                exit_coverage[e.data.data] = rng
            else:
                old_coverage = exit_coverage[e.data]
                exit_coverage[e.data.data] = subsets.union(old_coverage, rng)

        # return both coverages as a tuple
        return (entry_coverage, exit_coverage)
Exemple #5
0
    def can_be_applied(self, sdfg: SDFG, subgraph: SubgraphView) -> bool:
        # get lowest scope maps of subgraph
        # grab first node and see whether all nodes are in the same graph
        # (or nested sdfgs therein)

        graph = subgraph.graph

        # next, get all the maps by obtaining a copy (for potential offsets)
        map_entries = helpers.get_outermost_scope_maps(sdfg, graph, subgraph)
        ranges = [dcpy(map_entry.range) for map_entry in map_entries]
        # offset if option is toggled
        if self.allow_offset == True:
            for r in ranges:
                r.offset(r.min_element(), negative=True)
        brng = helpers.common_map_base_ranges(ranges)

        # more than one outermost scoped map entry has to be availble
        if len(map_entries) <= 1:
            return False

        # check whether any parameters are in common
        if len(brng) == 0:
            return False

        # if option enabled, return false if any splits are introduced
        if self.permutation_only == True:
            for map_entry in map_entries:
                if len(map_entry.params) != len(brng):
                    return False

        # if option enabled, check contiguity in the last contiguous dimension
        # if there is a map split ocurring with the last contiguous dimension being
        # in the *outer* map, we fail (-> bad data access pattern)
        if self.check_contiguity == True:
            reassignment = helpers.find_reassignment(map_entries,
                                                     brng,
                                                     offset=self.allow_offset)
            for map_entry in map_entries:
                no_common = sum(
                    [1 for j in reassignment[map_entry] if j != -1])
                if no_common != len(map_entry.params):
                    # check every memlet for access
                    for e in itertools.chain(
                            graph.out_edges(map_entry),
                            graph.in_edges(graph.exit_node(map_entry))):
                        subset = dcpy(e.data.subset)
                        subset.pop([i for i in range(subset.dims() - 1)])
                        for s in subset.free_symbols:

                            if reassignment[map_entry][
                                    map_entry.map.params.index(s)] != -1:
                                warnings.warn(
                                    "MultiExpansion::Contiguity fusion violation detected"
                                )
                                return False

        return True
Exemple #6
0
def differ(old, new):
    N, M = len(old), len(new)
    dist_mat = []
    path_mat = []
    for i in range(N + 1):
        dist_mat.append([-1 for j in range(M + 1)])
        path_mat.append([[] for j in range(M + 1)])

    for n in range(N + 1):
        for m in range(M + 1):
            if n == 0 and m == 0:
                dist = 0
                path = []
            elif n == 0:
                dist = m
                path = [Action('insert', 1, m)]
            elif m == 0:
                dist = n
                path = [Action('delete', 1, n)]
            else:
                d_keep = dist_mat[n - 1][m -
                                         1] if old[n -
                                                   1] == new[m -
                                                             1] else 2 + N + M
                d_del = dist_mat[n - 1][m] + 1
                d_ins = dist_mat[n][m - 1] + 1

                dist = min(d_keep, d_del, d_ins)

                if d_keep == dist:
                    path = dcpy(path_mat[n - 1][m - 1])
                    if path and path[-1].kind == 'keep':
                        path[-1].end += 1
                    else:
                        path.append(Action('keep', n, n))
                elif d_ins == dist:
                    path = dcpy(path_mat[n][m - 1])
                    if path and path[-1].kind == 'insert':
                        path[-1].end += 1
                    else:
                        path.append(Action('insert', m, m))
                else:  # d_del == dist:
                    path = dcpy(path_mat[n - 1][m])
                    if path and path[-1].kind == 'delete':
                        path[-1].end += 1
                    else:
                        path.append(Action('delete', n, n))

            dist_mat[n][m] = dist
            path_mat[n][m] = path

    return dist_mat[N][M], path_mat[N][M]
    def apply(self, graph: SDFGState, sdfg: SDFG):
        import dace.libraries.blas as blas

        transpose_a = self.transpose_a
        _at = self.at
        transpose_b = self.transpose_b
        _bt = self.bt
        a_times_b = self.a_times_b

        for src, src_conn, _, _, memlet in graph.in_edges(transpose_a):
            graph.add_edge(src, src_conn, a_times_b, '_b', memlet)
        graph.remove_node(transpose_a)
        for src, src_conn, _, _, memlet in graph.in_edges(transpose_b):
            graph.add_edge(src, src_conn, a_times_b, '_a', memlet)
        graph.remove_node(transpose_b)
        graph.remove_node(_at)
        graph.remove_node(_bt)

        for _, _, dst, dst_conn, memlet in graph.out_edges(a_times_b):
            subset = dcpy(memlet.subset)
            subset.squeeze()
            size = subset.size()
            shape = [size[1], size[0]]
            break
        tmp_name, tmp_arr = sdfg.add_temp_transient(shape, a_times_b.dtype)
        tmp_acc = graph.add_access(tmp_name)
        transpose_c = blas.Transpose('_Transpose_', a_times_b.dtype)
        for edge in graph.out_edges(a_times_b):
            _, _, dst, dst_conn, memlet = edge
            graph.remove_edge(edge)
            graph.add_edge(transpose_c, '_out', dst, dst_conn, memlet)
        graph.add_edge(a_times_b, '_c', tmp_acc, None, dace.Memlet.from_array(tmp_name, tmp_arr))
        graph.add_edge(tmp_acc, None, transpose_c, '_inp', dace.Memlet.from_array(tmp_name, tmp_arr))
Exemple #8
0
    def crossover(cls, parent_1, parent_2):
        """Return a pair of child solutions from parent crossover.

        Method based on swapping random client data.
        """
        child_1 = dcpy(parent_1)
        child_2 = dcpy(parent_2)
        for client in range(Solution.client_nr):
            # product delivery data swapped with 50% probability
            # client order will be corrected later
            if randint(1, 2) == 2:
                child_1.product_delivery[client] = parent_2.product_delivery[
                    client][:]
                child_2.product_delivery[client] = parent_1.product_delivery[
                    client][:]
        return child_1, child_2
Exemple #9
0
def generate_files_sony(old_subj_data, savepath):
    num_subj = len(old_subj_data)
    num_cond = len(old_subj_data[0])
    subj_data = [[dcpy(empty_map) for i in range(num_cond)]
                 for j in range(num_subj)]
    clip_size = old_subj_data[0][0]['trials'].shape[2] / 2
    num_electrodes = old_subj_data[0][0]['trials'].shape[1]
    for subj_ind in range(num_subj):
        for cond in range(num_cond):
            conds = conds_map[cond]
            old_entry = old_subj_data[subj_ind][cond]
            for i in range(2):
                clip = range(clip_size * i, clip_size * (i + 1))
                entry = subj_data[subj_ind][conds[i]]
                entry['trials'] = concat(entry['trials'],
                                         old_entry['trials'][:, :, clip])

    if not os.path.isdir(savepath):
        os.makedirs(savepath)

    dats = []
    dats_det = []
    for cond in range(num_cond):
        print 'Generating condition', cond + 1, 'files...'
        dats.append(np.zeros((0, clip_size, num_electrodes)))
        dats_det.append(np.zeros((0, clip_size, num_electrodes)))
        for subj in range(num_subj):
            trials = subj_data[subj][cond]['trials']
            dats[-1] = concat(dats[-1], permute(trials, axes=(0, 2, 1)))
        savename = os.path.join(savepath, cond_names_map[cond])
        savemat(savename, {'dat': dats[-1]})
        dats_det[-1] = detrend(dats[-1], axis=1)
        savename = os.path.join(savepath, cond_names_map[cond] + '_det')
        savemat(savename, {'dat': dats_det[-1]})
Exemple #10
0
    def fuse_nodes(self, sdfg, graph, edge, new_dst, new_dst_conn):
        """ Fuses two nodes via memlets and possibly transient arrays. """
        memlet_path = graph.memlet_path(edge)
        access_node = memlet_path[-1].dst

        local_name = "__s%d_n%d%s_n%d%s" % (
            self.state_id,
            graph.node_id(edge.src),
            edge.src_conn,
            graph.node_id(edge.dst),
            edge.dst_conn,
        )
        # Add intermediate memory between subgraphs. If a scalar,
        # uses direct connection. If an array, adds a transient node
        if edge.data.subset.num_elements() == 1:
            sdfg.add_scalar(
                local_name,
                dtype=access_node.desc(graph).dtype,
                transient=True,
                storage=dtypes.StorageType.Register,
            )
            edge.data.data = local_name
            edge.data.subset = "0"
            local_node = edge.src
            src_connector = edge.src_conn
        else:
            sdfg.add_transient(local_name,
                               edge.data.subset.size(),
                               dtype=access_node.desc(graph).dtype)
            local_node = graph.add_access(local_name)
            src_connector = None
            edge.data.data = local_name
            edge.data.subset = ",".join(
                ["0:" + str(s) for s in edge.data.subset.size()])
            # Add edge that leads to transient node
            graph.add_edge(
                edge.src,
                edge.src_conn,
                local_node,
                None,
                dcpy(edge.data),
            )
        ########
        # Add edge that leads to the second node
        graph.add_edge(local_node, src_connector, new_dst, new_dst_conn,
                       dcpy(edge.data))
Exemple #11
0
def callback_traj(msg):
    global arr_msg, pose_msg, traj_pub, pose_pub
    msg.pose.position.z = 4
    pose_msg = dcpy(msg.pose)
    # pose_msg.orientation = msg.pose.orientation
    # pose_msg.position = msg.pose.position
    arr_msg.poses.insert(callback_traj.counter, pose_msg)
    arr_msg.header.frame_id = msg.header.frame_id
    callback_traj.counter += 1
Exemple #12
0
def data_concatenate(input, grpname='', intersection=False):
    import numpy as np
    from copy import deepcopy as dcpy
    #Test data for equal keys
    res_description='Concatenation of '
    test=[]
    intsect=[]
    for k in input:
        test.extend(k.keys)
        intsect.append(set(k.keys))
        res_description+=' '+k.description
    if not intersection:
        
        assert set(test)==set(input[0].keys), 'Input files have different key variables.'
        #Concatenate data
        ind_base=len(input[0].data)
        group_variable=[input[0].grpname]*ind_base
        res_dropped=input[0].dropped
        res_keystype=input[0].keystype
        res_keys=input[0].keys
        res_keysdescr = input[0].keysdescr
        res_filename="Concatenated structure. All variables."
        res_nodata=input[0].nodata
        res_data=input[0].data
        for k in input[1:]:
            res_dropped+=k.dropped
            res_data=np.concatenate((res_data,k.data))
            for j in k.nodata:
                res_nodata[j].extend(list(np.array(k.nodata[j])+ind_base))
            ind_base+=len(k.data) 
            group_variable.extend([k.grpname]*len(k.data))
            res=Csvdata(res_description,res_keys,res_keysdescr,res_data,res_nodata,res_keystype,res_dropped,res_filename,grpname)
    else:
        allkeys=set.intersection(*intsect)
        ind_base=len(input[0].data)
        group_variable=[input[0].grpname]*ind_base
        res_data,res_keys,res_keystype,res_keysdescr=input[0].extractdatabykeylist(allkeys)
        res_dropped=None
        res_filename="Concatenated structure. Intersected variables."
        res_nodata=input[0].nodata
        for k in input[1:]:
            dta,qq,qq1,qq2=k.extractdatabykeylist(allkeys)
            group_variable.extend([k.grpname]*len(dta))
            res_data=np.concatenate((res_data,dta))
            for j in k.nodata:
                if j not in res_nodata.keys():
                    res_nodata.update({j:[]})
                res_nodata[j].extend(list(np.array(k.nodata[j])+ind_base))
            ind_base+=len(k.data)
        tmpdict=dcpy(res_nodata)
        for k in tmpdict:
            if k not in allkeys:
                res_nodata.pop(k)
        res=Csvdata(res_description,res_keys,res_keysdescr,res_data,res_nodata,res_keystype,res_dropped,res_filename,grpname)
    return (res,group_variable)
Exemple #13
0
 def Affect(self, Friendly, IncludeSelf=False, All=False):
     if not IncludeSelf:
         groups = [_ for _ in self.groups if _ != Friendly]
     else:
         groups = dcpy(self.groups)
     if not All:
         # Trim to one group
         groups = self.rng.choices(groups, k=1)
     # Iterate over all affected groups
     yield len(groups)
     for group in groups:
         yield group
 def adjust_arrays_nsdfg(self, sdfg, nsdfg, name, nname):
     '''
     DFS to replace strides and volumes of data that has adjacent
     nested SDFGs to its access nodes. Needed in a post-processing
     step during fusion.
     '''
     nsdfg.data(nname).strides = dcpy(sdfg.data(name).strides)
     nsdfg.data(nname).total_size = dcpy(sdfg.data(name).total_size)
     # traverse the whole graph and search for arrays
     for ngraph in nsdfg.nodes():
         for nnode in ngraph.nodes():
             if isinstance(nnode, nodes.AccessNode) and nnode.label == nname:
                 # trace and recurse if necessary
                 for e in chain(ngraph.out_edges(nnode),
                                ngraph.in_edges(nnode)):
                     for te in ngraph.memlet_tree(e):
                         if isinstance(te.dst, nodes.NestedSDFG):
                             self.adjust_arrays_nsdfg(
                                 nsdfg, te.dst.sdfg, nname, te.dst_conn)
                         if isinstance(te.src, nodes.NestedSDFG):
                             self.adjust_arrays_nsdfg(
                                 nsdfg, te.src.sdfg, nname, te.src_conn)
Exemple #15
0
    def __init__(self, gamestate, chcol: str):
        Node.__init__(self)
        self.is_root = True

        self.gamestate = dcpy(gamestate)

        # get character index
        self.id, self.character = self.get_character_id(
            self.gamestate['characters'], chcol)

        # Removing current character from options.
        self.gamestate['options'].pop(
            next(id for id, ch in enumerate(self.gamestate['options'])
                 if ch['color'] == chcol))
Exemple #16
0
def test_stopword_affection():
    import sys
    import codecs as c
    sys.stdout = c.open('result.stopword', 'w', 'utf8')
    T = rw.load_folder()
    vec.cutT(T)
    vec.n_gram(T, 2)
    from copy import deepcopy as dcpy
    rawT = dcpy(T)

    nostf, stf = [], []
    test_time = 100
    for i in range(test_time):
        T = dcpy(rawT)
        vec.confuse(T)
        rw.write_data(T)

        D,rD = vec.dictify(T)
        rw.write_dict(D, rD)
        tsr.tfidf(T)
        tsr.x2max_filter(T,D,1000,'tfidf')
        rw.write_libsvm(T,'x2max')
        nostf.append(knn.test(T, 0.8, warn_on_equidistant=False))

        T = rw.read_T()
        vec.stopwordfilter(T)
        D,rD = vec.dictify(T)
        rw.write_dict(D, rD)
        tsr.tfidf(T)
        tsr.x2max_filter(T,D,1000,'tfidf')
        rw.write_libsvm(T,'x2max')
        stf.append(knn.test(T, 0.8, warn_on_equidistant=False))

    print 'no stop word filtering...'
    knn.score_analysis(nostf)
    print 'stop word filtering...'
    knn.score_analysis(stf)
Exemple #17
0
    def __init__(self, gamestate: dict, charid: int, pos: int):
        Node.__init__(self)

        # Copy the state of the game, then set the original inaccessible
        self.gamestate = dcpy(gamestate)
        gamestate = None

        self.pos = pos

        self.character = self.gamestate['characters'][charid]
        self.character['position'] = self.pos
        self.gain = self.gamestate['compute_gain'].pop(0)(self.gamestate)

        self.try_debug()
        if len(self.gamestate['options']) > 0:
            self.next = self.gamestate['root_node'](self.gamestate)
Exemple #18
0
    def __init__(self, gamestate: dict, room: int, charid: int, moves: list):
        Node.__init__(self)

        # Copy the state of the game then remove original to avoid using it
        self.gamestate = dcpy(gamestate)
        gamestate = None

        self.pos = room

        # Moving the blackout to the given room
        self.gamestate['shadow'] = room

        # Check every possible move
        for m in moves:
            tmp = MoveNode(self.gamestate, charid, m)
            self.update_best_node(tmp)
            self.options.append(tmp)
Exemple #19
0
    def __deepcopy__(self, memo):
        n = object.__new__(type(self))

        n.name = dcpy(self.name)
        n.inputs = dcpy(self.inputs)
        n.outputs = dcpy(self.outputs)
        n.globals = self.globals
        n.locals = dcpy(self.locals)
        n.transients = dcpy(self.transients)
        n.params = dcpy(self.params)
        n.parent = None
        n.children = []
        n.is_async = dcpy(self.is_async)

        return n
Exemple #20
0
    def __init__(self, gamestate: dict, charid: int, pos: int, targetid: int):
        Node.__init__(self)

        # Copy the state of the game, then set the original inaccessible
        self.gamestate = dcpy(gamestate)
        gamestate = None

        self.pos = pos
        self.character = {'color': 'brown'}

        # Moving the persian and one other character to the next room
        self.gamestate['characters'][charid]['position'] = pos
        self.gamestate['characters'][targetid]['position'] = pos

        self.gain = self.gamestate['compute_gain'].pop(0)(self.gamestate)
        self.try_debug()

        if len(self.gamestate['options']) > 0:
            self.next = self.gamestate['root_node'](self.gamestate)
    def copy_edge(self,
                  graph,
                  edge,
                  new_src=None,
                  new_src_conn=None,
                  new_dst=None,
                  new_dst_conn=None,
                  new_data=None,
                  remove_old=False):
        '''
        Copies an edge going from source to dst.
        If no destination is specified, the edge is copied with the same
        destination and port as the original edge, else the edge is copied
        with the new destination and the new port.
        If no source is specified, the edge is copied with the same
        source and port as the original edge, else the edge is copied
        with the new source and the new port
        If remove_old is specified, the old edge is removed immediately
        If new_data is specified, inserts new_data as a memlet, else
        else makes a deepcopy of the current edges memlet
        '''
        data = new_data if new_data else dcpy(edge.data)
        src = edge.src if new_src is None else new_src
        src_conn = edge.src_conn if new_src is None else new_src_conn
        dst = edge.dst if new_dst is None else new_dst
        dst_conn = edge.dst_conn if new_dst is None else new_dst_conn

        ret = graph.add_edge(src, src_conn, dst, dst_conn, data)

        if remove_old:
            graph.remove_edge(edge)
        '''
        if new_src:
            ret = graph.add_edge(new_src, new_src_conn, edge.dst, edge.dst_conn,
                                 data)
            graph.remove_edge(edge)
        if new_dst:
            ret = graph.add_edge(edge.src, edge.src_conn, new_dst, new_dst_conn,
                                 data)
            graph.remove_edge(edge)
        '''
        return ret
    def __init__(self, gamestate: dict, charid: int, pos: int, moves: list):
        Node.__init__(self)

        # Copy the state of the game, then set the original inaccessible
        self.gamestate = dcpy(gamestate)
        gamestate = None

        self.pos = pos
        self.character = {'color': 'Christine'}

        # Drawing everyone around to the given room (including christine)
        for ch in self.gamestate['characters']:
            if ch['position'] in moves:
                ch['position'] = self.pos

        self.gain = self.gamestate['compute_gain'].pop(0)(self.gamestate)
        self.try_debug()

        if len(self.gamestate['options']) > 0:
            self.next = self.gamestate['root_node'](self.gamestate)
Exemple #23
0
def generate_data(path, savepath):
    subjs = [item for item in os.listdir(path) if '.' not in item]
    num_cond = get_num_conds(os.path.join(path, subjs[0]))
    subj_data = [[dcpy(empty_map) for i in range(num_cond)]
                 for j in range(len(subjs))]
    for subj_ind in xrange(len(subjs)):
        subj = subjs[subj_ind]
        print 'Adding subject', subj
        subj_path = os.path.join(path, subj)
        files = [item for item in os.listdir(subj_path) if 'Raw' in item]
        for data_ind in xrange(len(files)):
            data_file = files[data_ind]
            cond = int(data_file[data_file.rfind('c00') + 3]) - 1
            data = loadmat(os.path.join(subj_path, data_file))
            raw_data = data['RawTrial'][:, electrodes]
            num_TP = raw_data.shape[0] / data['NmbEpochs']
            raw_data = permute(raw_data.reshape(num_TP, data['NmbEpochs'],
                                                len(electrodes)),
                               axes=(1, 2, 0))
            valid_epochs = [i for i in range(data['NmbEpochs']) if \
                np.sum(data['IsEpochOK'][i]) > num_channels/2]
            raw_data = raw_data[valid_epochs, :, :]
            num_epochs = raw_data.shape[0]
            if num_epochs:
                entry = subj_data[subj_ind][cond]
                entry['groups'].append(num_epochs)
                entry['trials'] = concat(entry['trials'], raw_data)
                raw_data_mean = np.mean(raw_data, axis=0)[None, :]
                entry['mean_trials'] = concat(entry['mean_trials'],
                                              raw_data_mean)
        for cond in xrange(num_cond):
            entry = subj_data[subj_ind][cond]
            entry['groups'] = np.squeeze(entry['groups'])
            entry['trials'] = np.squeeze(entry['trials'])
            entry['mean_trials'] = np.squeeze(entry['mean_trials'])

    if not os.path.isdir(savepath):
        os.makedirs(savepath)

    savemat(os.path.join(savepath, 'subj_data'), {'subj_data': subj_data})
    return subj_data
Exemple #24
0
 def mutation(self):
     """Perform mutation on solution by applying all or several mutation methods in random order."""
     # randomly order possible mutations (1 to 5)
     muts = range(1, 6)
     shuffle(muts)
     # save original self
     original = dcpy(self)
     # apply first mutation
     self.mutation_wrapper(muts[0])
     # if no mutation returned, iterate until mutation returned
     i = 1
     while self == original:
         self.mutation_wrapper(muts[i])
         i += 1
     # apply remaining mutations with 50% probability
     for j in range(i, 5):
         k = randint(1, 2)
         if k == 1:
             self.mutation_wrapper(muts[j])
     # correct after sequential mutations
     self.correct()
Exemple #25
0
    def _copy_first_map_contents(self, sdfg, graph, first_map_entry):
        inter_nodes = list(
            graph.all_nodes_between(first_map_entry, self.first_map_exit) -
            {first_map_entry})
        new_inter_nodes = [dcpy(node) for node in inter_nodes]
        tmp_map = dict()
        for node in new_inter_nodes:
            if isinstance(node, nds.AccessNode):
                data = sdfg.arrays[node.data]
                if isinstance(data, dt.Scalar) and data.transient:
                    tmp_name = sdfg.temp_data_name()
                    sdfg.add_scalar(tmp_name, data.dtype, transient=True)
                    tmp_map[node.data] = tmp_name
                    node.data = tmp_name
            graph.add_node(node)
        id_map = {
            graph.node_id(old): graph.node_id(new)
            for old, new in zip(inter_nodes, new_inter_nodes)
        }

        def map_node(node):
            return graph.node(id_map[graph.node_id(node)])

        def map_memlet(memlet):
            memlet = dcpy(memlet)
            memlet.data = tmp_map.get(memlet.data, memlet.data)
            return memlet

        for edge in graph.edges():
            if edge.src in inter_nodes or edge.dst in inter_nodes:
                src = map_node(
                    edge.src) if edge.src in inter_nodes else edge.src
                dst = map_node(
                    edge.dst) if edge.dst in inter_nodes else edge.dst
                edge_data = map_memlet(edge.data)
                graph.add_edge(src, edge.src_conn, dst, edge.dst_conn,
                               edge_data)

        return new_inter_nodes
Exemple #26
0
    def _replicate_first_map(self, sdfg, graph, first_map_entry,
                             intermediate_dnodes):
        for dnode in intermediate_dnodes:
            array_name = dnode.data
            array = sdfg.arrays[array_name]

            read_offsets = self._read_offsets(graph, array_name)

            # Replicate first map tasklets once for each read offset access and
            # connect them to other tasklets accordingly
            for offset, edges in read_offsets.items():
                new_nodes = self._copy_first_map_contents(
                    sdfg, graph, first_map_entry)
                tmp_name = "__otf"
                tmp_name, _ = sdfg.add_scalar(tmp_name,
                                              array.dtype,
                                              transient=True,
                                              find_new_name=True)
                tmp_access = graph.add_access(tmp_name)

                for node in new_nodes:
                    for edge in graph.edges_between(node, self.first_map_exit):
                        graph.add_edge(edge.src, edge.src_conn, tmp_access,
                                       None, Memlet(tmp_name))
                        graph.remove_edge(edge)

                    for edge in graph.edges_between(first_map_entry, node):
                        memlet = dcpy(edge.data)
                        memlet.subset.offset(list(offset), negative=False)
                        self.second_map_entry.add_out_connector(edge.src_conn +
                                                                "_")
                        graph.add_edge(self.second_map_entry,
                                       edge.src_conn + "_", node,
                                       edge.dst_conn, memlet)
                        graph.remove_edge(edge)

                for edge in edges:
                    graph.add_edge(tmp_access, None, edge.dst, edge.dst_conn,
                                   Memlet(tmp_name))
def generate_rawDict(ports, caltype='solt'):
    ''' Make a dictionary to store raw signals (e.g. used for VNA calibration)
		~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
		Usage:	ports = [1, 2]
				CAL_data = generate_CALdict(measPorts)

		IN:		ports		--	list of ports to be measured on
				caltype		--	(optional, str) type of structure to create
								choose from:
									- 'solt'	(short, open, load, through
													on each port + crossterms)
									- 'meas'	(custom)

		OUT:	CAL_data		--	dictionary to store raw signals
		~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
	'''
    import itertools
    from copy import deepcopy as dcpy
    rawDict = {}

    rawDict['frequencies'] = None

    if caltype.lower() == 'solt':

        raw = {'a': None, 'u': None, 'v': None, 'b': None}
        CAL_port = {
            's': dcpy(raw),
            'o': dcpy(raw),
            'l': dcpy(raw)
        }  # short, open, load
        CAL_cross = {'t': dcpy(raw)}  # through

        for port in ports:
            rawDict['p' + str(port)] = dcpy(CAL_port)  # s-o-l
        #

        crossterms = tuple(itertools.combinations(ports, r=2))
        for term in crossterms:
            rawDict['p' + str(term[0]) + 'p' + str(term[1])] = dcpy(CAL_cross)
        #
    else:
        print('Sorry, caltype "%s" not supported' % caltype)
        raise ('caltypeError')
    #

    return rawDict
Exemple #28
0
    def apply(self, sdfg):
        graph = sdfg.nodes()[self.state_id]
        transpose_a = graph.nodes()[self.subgraph[
            MatrixProductTranspose._transpose_a]]
        _at = graph.nodes()[self.subgraph[MatrixProductTranspose._at]]
        transpose_b = graph.nodes()[self.subgraph[
            MatrixProductTranspose._transpose_b]]
        _bt = graph.nodes()[self.subgraph[MatrixProductTranspose._bt]]
        a_times_b = graph.nodes()[self.subgraph[
            MatrixProductTranspose._a_times_b]]

        for src, src_conn, _, _, memlet in graph.in_edges(transpose_a):
            graph.add_edge(src, src_conn, a_times_b, '_b', memlet)
        graph.remove_node(transpose_a)
        for src, src_conn, _, _, memlet in graph.in_edges(transpose_b):
            graph.add_edge(src, src_conn, a_times_b, '_a', memlet)
        graph.remove_node(transpose_b)
        graph.remove_node(_at)
        graph.remove_node(_bt)

        for _, _, dst, dst_conn, memlet in graph.out_edges(a_times_b):
            subset = dcpy(memlet.subset)
            subset.squeeze()
            size = subset.size()
            shape = [size[1], size[0]]
            break
        tmp_name, tmp_arr = sdfg.add_temp_transient(shape, a_times_b.dtype)
        tmp_acc = graph.add_access(tmp_name)
        transpose_c = blas.Transpose('_Transpose_', a_times_b.dtype)
        for edge in graph.out_edges(a_times_b):
            _, _, dst, dst_conn, memlet = edge
            graph.remove_edge(edge)
            graph.add_edge(transpose_c, '_out', dst, dst_conn, memlet)
        graph.add_edge(a_times_b, '_c', tmp_acc, None,
                       dace.Memlet.from_array(tmp_name, tmp_arr))
        graph.add_edge(tmp_acc, None, transpose_c, '_inp',
                       dace.Memlet.from_array(tmp_name, tmp_arr))
Exemple #29
0
    def __deepcopy__(self, memo):
        node = object.__new__(Memlet)

        # Set properties
        node._volume = dcpy(self._volume, memo=memo)
        node._dynamic = self._dynamic
        node._subset = dcpy(self._subset, memo=memo)
        node._other_subset = dcpy(self._other_subset, memo=memo)
        node._data = dcpy(self._data, memo=memo)
        node._wcr = dcpy(self._wcr, memo=memo)
        node._wcr_nonatomic = dcpy(self._wcr_nonatomic, memo=memo)
        node._debuginfo = dcpy(self._debuginfo, memo=memo)
        node._wcr_nonatomic = self._wcr_nonatomic
        node._allow_oob = self._allow_oob
        node._is_data_src = self._is_data_src

        # Nullify graph references
        node._sdfg = None
        node._state = None
        node._edge = None

        return node
def generate_rawDict(ports, caltype='solt'):
	''' Make a dictionary to store raw signals (e.g. used for VNA calibration)
		~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
		Usage:	ports = [1, 2]
				CAL_data = generate_CALdict(measPorts)

		IN:		ports		--	list of ports to be measured on
				caltype		--	(optional, str) type of structure to create
								choose from:
									- 'solt'	(short, open, load, through
													on each port + crossterms)
									- 'meas'	(custom)

		OUT:	CAL_data		--	dictionary to store raw signals
		~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
	'''
	import itertools
	from copy import deepcopy as dcpy
	rawDict = {}

	rawDict['frequencies'] = None

	if caltype.lower()=='solt':

		raw = {'a': None, 'u': None, 'v': None, 'b': None}
		CAL_port = { 's': dcpy(raw), 'o': dcpy(raw), 'l': dcpy(raw) } # short, open, load
		CAL_cross = {'t': dcpy(raw) } # through

		for port in ports:
			rawDict['p'+str(port)] = dcpy(CAL_port) # s-o-l
		#

		crossterms = tuple(itertools.combinations(ports, r=2))
		for term in crossterms:
			rawDict['p'+str(term[0])+'p'+str(term[1])] = dcpy(CAL_cross)
		#
	else:
		print('Sorry, caltype "%s" not supported' % caltype)
		raise('caltypeError')
	#

	return rawDict
Exemple #31
0
    def apply(self, sdfg):
        state = sdfg.nodes()[self.state_id]
        nested_sdfg = state.nodes()[self.subgraph[CopyToDevice._nested_sdfg]]
        storage = self.storage
        created_arrays = set()

        for _, edge in enumerate(state.in_edges(nested_sdfg)):

            src, src_conn, dst, dst_conn, memlet = edge
            dataname = memlet.data
            if dataname is None:
                continue
            memdata = sdfg.arrays[dataname]

            name = 'device_' + dataname + '_in'
            if name not in created_arrays:
                if isinstance(memdata, data.Array):
                    name, _ = sdfg.add_array(
                        'device_' + dataname + '_in',
                        shape=[
                            symbolic.overapproximate(r)
                            for r in memlet.bounding_box_size()
                        ],
                        dtype=memdata.dtype,
                        transient=True,
                        storage=storage,
                        find_new_name=True)
                elif isinstance(memdata, data.Scalar):
                    name, _ = sdfg.add_scalar('device_' + dataname + '_in',
                                              dtype=memdata.dtype,
                                              transient=True,
                                              storage=storage,
                                              find_new_name=True)
                else:
                    raise NotImplementedError
                created_arrays.add(name)

            data_node = nodes.AccessNode(name)

            to_data_mm = dcpy(memlet)
            from_data_mm = dcpy(memlet)
            from_data_mm.data = name
            offset = []
            for ind, r in enumerate(memlet.subset):
                offset.append(r[0])
                if isinstance(memlet.subset[ind], tuple):
                    begin = memlet.subset[ind][0] - r[0]
                    end = memlet.subset[ind][1] - r[0]
                    step = memlet.subset[ind][2]
                    from_data_mm.subset[ind] = (begin, end, step)
                else:
                    from_data_mm.subset[ind] -= r[0]

            state.remove_edge(edge)
            state.add_edge(src, src_conn, data_node, None, to_data_mm)
            state.add_edge(data_node, None, dst, dst_conn, from_data_mm)

        for _, edge in enumerate(state.out_edges(nested_sdfg)):

            src, src_conn, dst, dst_conn, memlet = edge
            dataname = memlet.data
            if dataname is None:
                continue
            memdata = sdfg.arrays[dataname]

            name = 'device_' + dataname + '_out'
            if name not in created_arrays:
                if isinstance(memdata, data.Array):
                    name, _ = sdfg.add_array(
                        name,
                        shape=[
                            symbolic.overapproximate(r)
                            for r in memlet.bounding_box_size()
                        ],
                        dtype=memdata.dtype,
                        transient=True,
                        storage=storage,
                        find_new_name=True)
                elif isinstance(memdata, data.Scalar):
                    name, _ = sdfg.add_scalar(name,
                                              dtype=memdata.dtype,
                                              transient=True,
                                              storage=storage)
                else:
                    raise NotImplementedError
                created_arrays.add(name)

            data_node = nodes.AccessNode(name)

            to_data_mm = dcpy(memlet)
            from_data_mm = dcpy(memlet)
            to_data_mm.data = name
            offset = []
            for ind, r in enumerate(memlet.subset):
                offset.append(r[0])
                if isinstance(memlet.subset[ind], tuple):
                    begin = memlet.subset[ind][0] - r[0]
                    end = memlet.subset[ind][1] - r[0]
                    step = memlet.subset[ind][2]
                    to_data_mm.subset[ind] = (begin, end, step)
                else:
                    to_data_mm.subset[ind] -= r[0]

            state.remove_edge(edge)
            state.add_edge(src, src_conn, data_node, None, to_data_mm)
            state.add_edge(data_node, None, dst, dst_conn, from_data_mm)

        # Change storage for all data inside nested SDFG to device.
        change_storage(nested_sdfg.sdfg, storage)
Exemple #32
0
    def apply(self, sdfg):
        """
            This method applies the mapfusion transformation. 
            Other than the removal of the second map entry node (SME), and the first
            map exit (FME) node, it has the following side effects:

            1.  Any transient adjacent to both FME and SME with degree = 2 will be removed. 
                The tasklets that use/produce it shall be connected directly with a 
                scalar/new transient (if the dataflow is more than a single scalar)

            2.  If this transient is adjacent to FME and SME and has other
                uses, it will be adjacent to the new map exit post fusion.
                Tasklet-> Tasklet edges will ALSO be added as mentioned above.

            3.  If an access node is adjacent to FME but not SME, it will be
                adjacent to new map exit post fusion.

            4.  If an access node is adjacent to SME but not FME, it will be
                adjacent to the new map entry node post fusion.

        """
        graph = sdfg.nodes()[self.state_id]
        first_exit = graph.nodes()[self.subgraph[MapFusion._first_map_exit]]
        first_entry = graph.entry_node(first_exit)
        second_entry = graph.nodes()[self.subgraph[
            MapFusion._second_map_entry]]
        second_exit = graph.exit_nodes(second_entry)[0]

        intermediate_nodes = set()
        for _, _, dst, _, _ in graph.out_edges(first_exit):
            intermediate_nodes.add(dst)
            assert isinstance(dst, nodes.AccessNode)

        # Check if an access node refers to non transient memory, or transient
        # is used at another location (cannot erase)
        do_not_erase = set()
        for node in intermediate_nodes:
            if sdfg.arrays[node.data].transient is False:
                do_not_erase.add(node)
            else:
                for edge in graph.in_edges(node):
                    if edge.src != first_exit:
                        do_not_erase.add(node)
                        break
                else:
                    for edge in graph.out_edges(node):
                        if edge.dst != second_entry:
                            do_not_erase.add(node)
                            break

        # Find permutation between first and second scopes
        perm = MapFusion.find_permutation(first_entry.map, second_entry.map)
        params_dict = {}
        for index, param in enumerate(first_entry.map.params):
            params_dict[param] = second_entry.map.params[perm[index]]

        # Replaces (in memlets and tasklet) the second scope map
        # indices with the permuted first map indices.
        # This works in two passes to avoid problems when e.g., exchanging two
        # parameters (instead of replacing (j,i) and (i,j) to (j,j) and then
        # i,i).
        second_scope = graph.scope_subgraph(second_entry)
        for firstp, secondp in params_dict.items():
            if firstp != secondp:
                replace(second_scope, secondp, '__' + secondp + '_fused')
        for firstp, secondp in params_dict.items():
            if firstp != secondp:
                replace(second_scope, '__' + secondp + '_fused', firstp)

        # Isolate First exit node
        ############################
        edges_to_remove = set()
        nodes_to_remove = set()
        for edge in graph.in_edges(first_exit):
            memlet_path = graph.memlet_path(edge)
            edge_index = next(i for i, e in enumerate(memlet_path)
                              if e == edge)
            access_node = memlet_path[-1].dst
            if access_node not in do_not_erase:
                out_edges = [
                    e for e in graph.out_edges(access_node)
                    if e.dst == second_entry
                ]
                # In this transformation, there can only be one edge to the
                # second map
                assert len(out_edges) == 1
                # Get source connector to the second map
                connector = out_edges[0].dst_conn[3:]

                new_dst = None
                new_dst_conn = None
                # Look at the second map entry out-edges to get the new
                # destination
                for _e in graph.out_edges(second_entry):
                    if _e.src_conn[4:] == connector:
                        new_dst = _e.dst
                        new_dst_conn = _e.dst_conn
                        break
                if new_dst is None:
                    # Access node is not used in the second map
                    nodes_to_remove.add(access_node)
                    continue
                # If the source is an access node, modify the memlet to point
                # to it
                if (isinstance(edge.src, nodes.AccessNode)
                        and edge.data.data != edge.src.data):
                    edge.data.data = edge.src.data
                    edge.data.subset = ("0" if edge.data.other_subset is None
                                        else edge.data.other_subset)
                    edge.data.other_subset = None

                else:
                    # Add a transient scalar/array
                    self.fuse_nodes(sdfg, graph, edge, new_dst, new_dst_conn)

                edges_to_remove.add(edge)

                # Remove transient node between the two maps
                nodes_to_remove.add(access_node)
            else:  # The case where intermediate array node cannot be removed
                # Node will become an output of the second map exit
                out_e = memlet_path[edge_index + 1]
                conn = second_exit.next_connector()
                graph.add_edge(
                    second_exit,
                    'OUT_' + conn,
                    out_e.dst,
                    out_e.dst_conn,
                    dcpy(out_e.data),
                )
                second_exit.add_out_connector('OUT_' + conn)

                graph.add_edge(edge.src, edge.src_conn, second_exit,
                               'IN_' + conn, dcpy(edge.data))
                second_exit.add_in_connector('IN_' + conn)

                edges_to_remove.add(out_e)

                # If the second map needs this node, link the connector
                # that generated this to the place where it is needed, with a
                # temp transient/scalar for memlet to be generated
                for out_e in graph.out_edges(second_entry):
                    second_memlet_path = graph.memlet_path(out_e)
                    source_node = second_memlet_path[0].src
                    if source_node == access_node:
                        self.fuse_nodes(sdfg, graph, edge, out_e.dst,
                                        out_e.dst_conn)

                edges_to_remove.add(edge)
        ###
        # First scope exit is isolated and can now be safely removed
        for e in edges_to_remove:
            graph.remove_edge(e)
        graph.remove_nodes_from(nodes_to_remove)
        graph.remove_node(first_exit)

        # Isolate second_entry node
        ###########################
        for edge in graph.in_edges(second_entry):
            memlet_path = graph.memlet_path(edge)
            edge_index = next(i for i, e in enumerate(memlet_path)
                              if e == edge)
            access_node = memlet_path[0].src
            if access_node in intermediate_nodes:
                # Already handled above, can be safely removed
                graph.remove_edge(edge)
                continue

            # This is an external input to the second map which will now go
            # through the first map.
            conn = first_entry.next_connector()
            graph.add_edge(edge.src, edge.src_conn, first_entry, 'IN_' + conn,
                           dcpy(edge.data))
            first_entry.add_in_connector('IN_' + conn)
            graph.remove_edge(edge)
            out_e = memlet_path[edge_index + 1]
            graph.add_edge(
                first_entry,
                'OUT_' + conn,
                out_e.dst,
                out_e.dst_conn,
                dcpy(out_e.data),
            )
            first_entry.add_out_connector('OUT_' + conn)

            graph.remove_edge(out_e)
        ###
        # Second node is isolated and can now be safely removed
        graph.remove_node(second_entry)

        # Fix scope exit to point to the right map
        second_exit.map = first_entry.map
Exemple #33
0
    def can_be_applied(graph, candidate, expr_index, sdfg, strict=False):
        first_map_exit = graph.nodes()[candidate[MapFusion._first_map_exit]]
        first_map_entry = graph.entry_node(first_map_exit)
        second_map_entry = graph.nodes()[candidate[
            MapFusion._second_map_entry]]

        for _in_e in graph.in_edges(first_map_exit):
            if _in_e.data.wcr is not None:
                for _out_e in graph.out_edges(second_map_entry):
                    if _out_e.data.data == _in_e.data.data:
                        # wcr is on a node that is used in the second map, quit
                        return False
        # Check whether there is a pattern map -> access -> map.
        intermediate_nodes = set()
        intermediate_data = set()
        for _, _, dst, _, _ in graph.out_edges(first_map_exit):
            if isinstance(dst, nodes.AccessNode):
                intermediate_nodes.add(dst)
                intermediate_data.add(dst.data)

                # If array is used anywhere else in this state.
                num_occurrences = len([
                    n for n in graph.nodes()
                    if isinstance(n, nodes.AccessNode) and n.data == dst.data
                ])
                if num_occurrences > 1:
                    return False
            else:
                return False
        # Check map ranges
        perm = MapFusion.find_permutation(first_map_entry.map,
                                          second_map_entry.map)
        if perm is None:
            return False

        # Check if any intermediate transient is also going to another location
        second_inodes = set(e.src for e in graph.in_edges(second_map_entry)
                            if isinstance(e.src, nodes.AccessNode))
        transients_to_remove = intermediate_nodes & second_inodes
        # if any(e.dst != second_map_entry for n in transients_to_remove
        #        for e in graph.out_edges(n)):
        if any(graph.out_degree(n) > 1 for n in transients_to_remove):
            return False

        # Create a dict that maps parameters of the first map to those of the
        # second map.
        params_dict = {}
        for _index, _param in enumerate(first_map_entry.map.params):
            params_dict[_param] = second_map_entry.map.params[perm[_index]]

        out_memlets = [e.data for e in graph.in_edges(first_map_exit)]

        # Check that input set of second map is provided by the output set
        # of the first map, or other unrelated maps
        for second_edge in graph.out_edges(second_map_entry):
            # Memlets that do not come from one of the intermediate arrays
            if second_edge.data.data not in intermediate_data:
                # however, if intermediate_data eventually leads to
                # second_memlet.data, need to fail.
                for _n in intermediate_nodes:
                    source_node = _n
                    destination_node = graph.memlet_path(second_edge)[0].src
                    # NOTE: Assumes graph has networkx version
                    if destination_node in nx.descendants(
                            graph._nx, source_node):
                        return False
                continue

            provided = False

            # Compute second subset with respect to first subset's symbols
            sbs_permuted = dcpy(second_edge.data.subset)
            sbs_permuted.replace({
                symbolic.pystr_to_symbolic(k): symbolic.pystr_to_symbolic(v)
                for k, v in params_dict.items()
            })

            for first_memlet in out_memlets:
                if first_memlet.data != second_edge.data.data:
                    continue

                # If there is a covered subset, it is provided
                if first_memlet.subset.covers(sbs_permuted):
                    provided = True
                    break

            # If none of the output memlets of the first map provide the info,
            # fail.
            if provided is False:
                return False

        # Success
        return True