Пример #1
0
 def test_mutual_accessibility_on_very_deep_graph(self):
     gr = pygraph.classes.graph.graph()
     gr.add_nodes(range(0, 5001))
     for i in range(0, 5000):
         gr.add_edge((i, i + 1))
     recursionlimit = getrecursionlimit()
     mutual_accessibility(gr)
     assert getrecursionlimit() == recursionlimit
 def test_mutual_accessibility_on_very_deep_graph(self):
     gr = pygraph.classes.graph.graph()
     gr.add_nodes(range(0,5001))
     for i in range(0,5000):
         gr.add_edge((i,i+1))
     recursionlimit = getrecursionlimit()
     mutual_accessibility(gr)
     assert getrecursionlimit() == recursionlimit
Пример #3
0
    def schwartz_set_heuristic(self):

        # Iterate through using the Schwartz set heuristic
        self.actions = []
        while len(self.graph.edges()) > 0:
            access = accessibility(self.graph)
            mutual_access = mutual_accessibility(self.graph)
            candidates_to_remove = set()
            for candidate in self.graph.nodes():
                candidates_to_remove |= (set(access[candidate]) -
                                         set(mutual_access[candidate]))

            # Remove nodes at the end of non-cycle paths
            if len(candidates_to_remove) > 0:
                self.actions.append({'nodes': candidates_to_remove})
                for candidate in candidates_to_remove:
                    self.graph.del_node(candidate)

            # If none exist, remove the weakest edges
            else:
                edge_weights = self.edge_weights(self.graph)
                self.actions.append({
                    'edges':
                    matching_keys(edge_weights, min(edge_weights.values()))
                })
                for edge in self.actions[-1]["edges"]:
                    self.graph.del_edge(edge)

        self.graph_winner()
    def schwartz_set_heuristic(self):

        # Iterate through using the Schwartz set heuristic
        self.actions = []
        while len(self.graph.edges()) > 0:
            access = accessibility(self.graph)
            mutual_access = mutual_accessibility(self.graph)
            candidates_to_remove = set()
            for candidate in self.graph.nodes():
                candidates_to_remove |= (set(access[candidate]) -
                                         set(mutual_access[candidate]))

            # Remove nodes at the end of non-cycle paths
            if len(candidates_to_remove) > 0:
                self.actions.append({'nodes': candidates_to_remove})
                for candidate in candidates_to_remove:
                    self.graph.del_node(candidate)

            # If none exist, remove the weakest edges
            else:
                edge_weights = self.edge_weights(self.graph)
                self.actions.append({
                    'edges': matching_keys(edge_weights,
                                           min(edge_weights.values()))
                })
                for edge in self.actions[-1]["edges"]:
                    self.graph.del_edge(edge)

        self.graph_winner()
Пример #5
0
 def __init__(self, graph):
     pygraph = GraphConversions.graph_to_pygraph(graph)
     self.__mutual_acc = mutual_accessibility(pygraph)
     self.__sccs = sorted(
         set(
             tuple(scc) for scc in self.__mutual_acc.values()
             if len(scc) > 1))
Пример #6
0
 def __init__(self, sdfgraph, num_processors=1):
     """ initialize scheduler class
     """
     self.sdfgraph = sdfgraph
     self.strong_connected = mutual_accessibility(sdfgraph.graph)
     self.num_processors = num_processors
     self.steady_schedule = {}
     self.boot_schedule = []
     self.max_lev_diff = 1
Пример #7
0
def getSCC(gr):

    z = mutual_accessibility(gr)


    z_sorted = []
    for zz in z:
        z_sorted.append(zz)

    komp = []
    for zz in z_sorted:
        if not z[zz] in komp:
            komp.append(z[zz])


# SCC POLICZONE

    komp2 = komp[:]

    for i in (0, len(komp)):
        for item in komp:
            for stam in item:
                for item2 in komp:
                    if item != item2:
                        for stam2 in item2:
                           if gr.has_edge((stam,stam2)):
                               a = komp2.index(item2)
                               b = komp2.index(item)
                               if(a < b): #zamien miejscami
                                   komp2[b], komp2[a] = komp2[a], komp2[b]

    komp3 = []
    for komponent in komp2:
        con = 0
        if komp3 == []:
            komp3.append(komponent)
            item = komponent
        else:
            for node in komponent:
                for node2 in item:
                    if gr.has_edge((node2, node)):
                        con = 1
            if(con == 1):
                komp3.append(komponent)
                item = komponent
            else:
                for node in komponent:
                    item.append(node)


    if(loop_fuse):
        komp2 = komp3



    return komp2
Пример #8
0
    def get_all_cycles(self):
        """
        Currently, this just return the result of the call to
        mutual_accessibility which actually returns the set of
        strongly connected components in the graph. From that result,
        it should be possible to get the actual cycles.

        But this is not implemented yet.
        """
        return mutual_accessibility(self.graph)
 def test_mutual_accessibility_in_digraph(self):
     gr = testlib.new_digraph()
     
     ma = mutual_accessibility(gr)
     for n in gr:
         for m in gr:
             if (m in ma[n]):
                 assert m in depth_first_search(gr, n)[0]
                 assert n in depth_first_search(gr, m)[0]
             else:
                 assert m not in depth_first_search(gr, n)[0] or n not in depth_first_search(gr, m)[0]
Пример #10
0
def biggest_strongly_connected_component(g):
    ma = mutual_accessibility(g)
    max_component = []
    for component in ma.values():
        if len(component) > len(max_component):
            max_component = component
    g2 = digraph()
    g2.add_nodes(max_component)
    for edge in g.edges():
        if edge[0] in max_component and edge[1] in max_component:
            g2.add_edge(edge)
    return g2
 def test_mutual_accessibility_in_graph(self):
     gr = testlib.new_graph()
     gr.add_nodes(['a','b','c'])
     gr.add_edge(('a','b'))
     gr.add_edge(('a','c'))
     
     ma = mutual_accessibility(gr)
     for n in gr:
         for m in gr:
             if (m in ma[n]):
                 assert m in depth_first_search(gr, n)[0]
                 assert n in depth_first_search(gr, m)[0]
             else:
                 assert m not in depth_first_search(gr, n)[0] or n not in depth_first_search(gr, m)[0]
Пример #12
0
    def test_mutual_accessibility_in_graph(self):
        gr = testlib.new_graph()
        gr.add_nodes(['a', 'b', 'c'])
        gr.add_edge(('a', 'b'))
        gr.add_edge(('a', 'c'))

        ma = mutual_accessibility(gr)
        for n in gr:
            for m in gr:
                if (m in ma[n]):
                    assert m in depth_first_search(gr, n)[0]
                    assert n in depth_first_search(gr, m)[0]
                else:
                    assert m not in depth_first_search(
                        gr, n)[0] or n not in depth_first_search(gr, m)[0]
def find_rejecting_sccs(automaton):
    """ Return set of SCC(set of nodes) containing a rejecting transition.
    """

    g = _convert_to_digraph(automaton.nodes)
    sccs = mutual_accessibility(g)
    rejecting_sccs = set()
    edges = _build_edges_map(g)  # dict of source node to list of destination nodes

    for _, nodes in sccs.items():
        for n in nodes:
            edges_within_scc = set(edges[n]).intersection(set(nodes))  # reachable nodes which are in SCC

            is_rejecting = sum(map(lambda next: g.get_edge_properties((n, next))['is_rejecting'],
                edges_within_scc))
            if is_rejecting > 0:
                rejecting_sccs.add(frozenset(nodes))

    return rejecting_sccs
def find_all_cycles(node2arc_targets):
    """ cycle aka strongly connected component of a digraph """
    result = mutual_accessibility(_graph2py_digraph(node2arc_targets))
    cycles = []
    nodes_in_cycles = set()
    for node, cycle in result.items():
        if len(cycle) <= 1:
            continue
        if node in nodes_in_cycles:
            #assert cycles.count(cycle) == 1
            continue
        cycles.append(set(cycle))
        for node in cycle:
            assert not node in nodes_in_cycles
            nodes_in_cycles.add(node)
    for cycle in cycles:
        print(node, cycle)
    pdb.set_trace()
    return cycles
Пример #15
0
def find_rejecting_sccs(automaton):
    """ Return set of SCC(set of nodes) containing a rejecting transition.
    """

    g = _convert_to_digraph(automaton.nodes)
    sccs = mutual_accessibility(g)
    rejecting_sccs = set()
    edges = _build_edges_map(g)

    for _, nodes in sccs.items():
        for n in nodes:
            edges_within_scc = set(edges[n]).intersection(set(nodes))

            is_rejecting = sum(map(lambda next: g.get_edge_properties((n, next))['is_rejecting'],
                edges_within_scc))
            if is_rejecting > 0:
                rejecting_sccs.add(frozenset(nodes))

    return rejecting_sccs
Пример #16
0
def find_all_cycles(node2arc_targets):
    """ cycle aka strongly connected component of a digraph.
        Return list of stronly connected components.
    """
    result = mutual_accessibility(_graph2py_digraph(node2arc_targets))
    cycles = []
    nodes_in_cycles = set()
    for node, cycle in result.items():
        if len(cycle) <= 1:
            continue
        if node in nodes_in_cycles:
            #assert cycles.count(cycle) == 1
            continue
        cycles.append(set(cycle))
        for node in cycle:
            assert not node in nodes_in_cycles
            nodes_in_cycles.add(node)
    for cycle in cycles:
        print(node, cycle)
    return cycles
Пример #17
0
def find_rejecting_sccs(automaton):
    """
    :return: set of SCCs(set of nodes) that
             contains a rejecting transition between two nodes of the SCC.
    """

    g = _convert_to_digraph(automaton.nodes)
    sccs = mutual_accessibility(g)
    rejecting_sccs = set()
    dst_nodes_by_node = _build_edges_map(g)

    for scc in sccs.values():
        for n in scc:
            n_dst_nodes_within_scc = dst_nodes_by_node[n] & set(scc)
            if not n_dst_nodes_within_scc:
                continue

            is_acc = any(g.get_edge_properties((n, dst))['is_acc']
                         for dst in n_dst_nodes_within_scc)

            if is_acc:
                rejecting_sccs.add(frozenset(scc))

    return rejecting_sccs
Пример #18
0
def find_final_sccs(automaton:Automaton) -> Set[FrozenSet[Node]]:
    """
    :return: set of SCCs(set of nodes) that
             contains a final transition between two nodes of the SCC.
    """

    g = _convert_to_digraph(automaton.nodes)
    sccs = mutual_accessibility(g)
    final_sccs = set()
    dst_nodes_by_node = _build_edges_map(g)

    for scc in sccs.values():
        for n in scc:
            n_dst_nodes_within_scc = dst_nodes_by_node[n] & set(scc)
            if not n_dst_nodes_within_scc:
                continue

            is_acc = any(g.get_edge_properties((n, dst))['is_acc']
                         for dst in n_dst_nodes_within_scc)

            if is_acc:
                final_sccs.add(frozenset(scc))

    return final_sccs
Пример #19
0
print post
print "\n"

bfs, bfsord= breadth_first_search(gr, root='A')
print "Breadth first search"
print "Spanning tree"
print bfs
print "level-based ordering"
print bfsord
print "\n"

print "Accessibility"
access= accessibility(gr)
print access
print "Mutual accessibility"
maccess= mutual_accessibility(gr)
print maccess
print "\n"

print "Traversal"
trav= traversal(gr, 'A', 'pre')
for t in trav:
  print t
print "Transitive Edges"
tredges= transitive_edges(gr)
print tredges
print "\n"

print "shortest_path_bellman_ford"
short= shortest_path_bellman_ford(gr, 'A')
print short
Пример #20
0
def convert_history(files, tasks, releases, objects):
    """Converts the Synergy history between two releases to a Git compatible one."""

    log.basicConfig(filename="convert_history.log", level=log.DEBUG)

    file_objects = [ccm_cache.get_object(o) for o in objects]
    log.info("Looking for cycles in the File History graph")
    while find_cycle(files):
        cycle = find_cycle(files)
        log.info("\tA cycle was found!")
        log.info("\tCycle: %s" % ", ".join(cycle))

        # Find the newest file
        newest = max(
            cycle,
            key=lambda x: [
                fileobject.get_integrate_time() for fileobject in file_objects if fileobject.get_objectname() == x
            ][0],
        )
        log.info("\tObject %s is the newest in the cycle: it should not have successors!" % newest)

        # Remove the outgoing link from the newest file
        for successor in files.neighbors(newest):
            if successor in cycle:
                files.del_edge((newest, successor))
                log.info("\tRemoved the %s -> %s edge" % (newest, successor))

    log.info("Remove transitive edges in the File History graph")
    for edge in transitive_edges(files):
        if edge in files.edges():
            files.del_edge(edge)
        else:
            log.warning("Weird, transitive edge not found!")

    log.info("Sanitize tasks")
    sanitized_tasks = _sanitize_tasks(tasks)

    log.info("Create commits graph")
    commits = create_commits_graph(files, sanitized_tasks, releases)

    # Uncomment for debug... (remember import)
    # hack = {'previous': releases.edges()[0]}
    # htg.commit_graph_to_image(commits, hack, tasks, name='Pre-'+releases.edges()[1])

    log.info("Looking for cycles in the Commits graph")
    while find_cycle(commits):
        log.info("Finding strictly connected components")
        cycle = max(mutual_accessibility(commits).values(), key=len)

        # cycle = find_cycle(commits)

        log.info("\tA cycle was found!")
        log.info("\tCycle: %s" % ", ".join(cycle))

        log.info("Find the nodes in the cycle going from one task to another")
        culpript_edges = []
        for task in cycle:
            for obj in tasks.links(task):
                for neighbor in files.neighbors(obj):
                    if neighbor not in tasks.links(task) and tasks.links(neighbor)[0] in cycle:
                        culpript_edges.append((obj, neighbor))
                        log.info("\tAdding culpript edge (%s, %s)" % (obj, neighbor))

        log.info("Connect the nodes found")
        culpript_nodes = set()
        for head, tail in culpript_edges:
            culpript_nodes.add(head)
            culpript_nodes.add(tail)
        for head, tail in permutations(culpript_nodes, 2):
            if tasks.links(head)[0] == tasks.links(tail)[0] and (head, tail) not in culpript_edges:
                log.info("\tAdding edge (%s, %s)" % (head, tail))
                culpript_edges.append((head, tail))

        reduced_digraph = digraph()
        reduced_digraph.add_nodes(culpript_nodes)
        [reduced_digraph.add_edge(edge) for edge in culpript_edges]

        shortest_cycle = max(mutual_accessibility(reduced_digraph).values(), key=len)
        log.info("Cycle in objects: %s" % shortest_cycle)

        candidate_cuts = []

        # Find the tasks
        t = set()
        for node in shortest_cycle:
            t.add(tasks.links(node)[0])
        log.info("T: %s" % str(t))

        for i in t:
            log.info("Cuts for task %s" % i)
            # Find the objects in the cycle belonging to task i
            obj_in_task = set(tasks.links(i)) & set(shortest_cycle)
            log.info("Objects in cycle and task: %s" % obj_in_task)
            if len(obj_in_task) < 15:
                if len(obj_in_task) > 1:
                    for j in range(1, len(obj_in_task) / 2 + 1):
                        candidate_cuts.extend([k for k in combinations(obj_in_task, j)])
            else:
                log.info("Cycle too long...")
                pass
        log.info("Candidate_cuts: %s" % str(candidate_cuts))

        # Find the cut to break the cycle
        cut = _find_cut(candidate_cuts, cycle, tasks, files, releases)
        if not cut:
            # Make a qualified guess of a cut with the shortest walk of files in the tasks
            walk, node = _find_shortest_incident_or_neighbor_walk(shortest_cycle, cycle, files, tasks)
            new_cut = walk
            new_cut.append(node)
            candidate_cuts.insert(0, tuple(new_cut))

            log.info("Candidate cuts: %s", candidate_cuts)
            cut = _find_cut(candidate_cuts, cycle, tasks, files, releases)

            if not cut:
                # Error! This should not happen
                log.info("Cut not found.")
                log.shutdown()
                raise Exception("Cut not found")

        tasks, task, task_name = _apply_cut(cut, tasks)
        commits = create_commits_graph(files, tasks, releases)

    else:
        log.info("No cycles found")

    log.shutdown()
    return commits
Пример #21
0
def GenerateSCC(j, permute):

    gr = DepGraph(j)

    z = mutual_accessibility(gr)


    z_sorted = []
    for zz in z:
        z_sorted.append(zz)

    komp = []
    for zz in z_sorted:
        if not z[zz] in komp:
            komp.append(z[zz])


   
# SCC POLICZONE

    komp2 = komp[:]

    for i in (0, len(komp)):
        for item in komp:
            for stam in item:
                for item2 in komp:
                    if item != item2:
                        for stam2 in item2:
                           if gr.has_edge((stam,stam2)):
                               a = komp2.index(item2)
                               b = komp2.index(item)
                               if(a < b): #zamien miejscami
                                   komp2[b], komp2[a] = komp2[a], komp2[b]




    rel = gen.RelPrint("tmp/petit_loop_"+str(j)+".t", 1)



    
    dane = gr.nodes()
    paczka = loop_tools.ReadStatementNests("tmp/petit_loop_"+str(j)+".t", dane)
    combo = paczka[0]
    instrukcje = combo
    granice = paczka[2]

    max_nest = 0
    for c in combo:
        if c['nest'] > max_nest:
            max_nest = c['nest']
        
   
    pattern = re.compile("\n#.*$")
    rel = pattern.sub("", rel)
    pattern = re.compile("{.*\->")
    
    t = str(re.findall(pattern, rel)[0])
    t = t.replace(" ", "")
    t = t.replace("->", "")
    t = t + " : true};"



    # loop fuse to do in future and permutation too
    if(False):
        komp3 = []
        for komponent in komp2:
            con = 0
            if komp3 == []:
                komp3.append(komponent)
                item = komponent
            else:
                for node in komponent:
                    for node2 in item:
                        if gr.has_edge((node2, node)):
                            con = 1
                if(con == 1):
                    komp3.append(komponent)
                    item = komponent
                else:
                    for node in komponent:
                        item.append(node)

        loop_fuse = False

        if(loop_fuse):
            komp2 = komp3


        
    params = ""
    if("] -> {" in rel):
        params = re.findall(".*\] -> \{", rel)[0].replace("{", "") 
         

    zbiory = []
    for komponent in komp2:
        warunek = "("
        for line in komponent:
            if warunek != "(":
                warunek = warunek + "or"
            warunek = warunek + " v=" + line + " "
        warunek = warunek + ") and true"
        zbior = t.replace("true", warunek)
        zbiory.append(zbior)

    file = open("tmp/barv_scc.txt", 'w')
    
    if(1 == 1):
        file.write('R:=' + rel + ';\n')
        file.write('S:= dom R + ran R;')
        
        i = 0
        for zbior in zbiory:
            file.write('S'+str(i)+':=' + zbior)
            file.write('S'+str(i)+':= S * S'+str(i)+';')
            file.write('codegen S'+str(i)+';')
            i = i + 1
            
    i = 0
    permutate_list = []

    for komponent in komp2:
        set = params+"{["
        ll = find_instr(komponent, instrukcje)   # jesli komponent bedzie z loop fusion to trzeba wybrac najbardziej zagn. instrukcje
        vars = instrukcje[ll]['vars']

        #for q in range(0, max_nest+1):
        #    set = set + "i" + str(q) + ","
        set = set + ','.join(vars) + ",v ] : "

        #set = set[:-1] + "] : "
        for line in komponent:
            path = []
            for item in combo:
                for z in item['st']:
                    if int(line) == int(z):
                        path = item['path']
            for q in range(0, len(vars)):
                if(q < len(path)):
                    set = set + granice[path[q]]['lb'] + " <= "+ vars[q] +" <= " + granice[path[q]]['ub'] + " and "
                    #set = set + granice[path[q]]['lb'] + " <= i" + str(q) + " <= " + granice[path[q]]['ub'] + " and "
                else:
                    set = set + " " + vars[q] + " = -1 and "
            
            set = set + " v = " + line +" and true or "
        set = set + " false}; "


        if(1==0): #permute

            # tylko zaleznosci z scc TODO
            rel = imperf_tile.PermuteBlock("tmp/deps" + str(j) + ".txt", set, instrukcje[ll], params, len(instrukcje[ll]['vars']), permutate_list)



            set = isl.Set(str(set))

            # relacja identity - nie rob nic


            if(rel.is_equal(rel.identity(rel.get_space()))):
                permutate_list.pop()
            else:
                set = set.apply(rel)
            set_size = set.dim(isl.dim_type.set)
            set = set.insert_dims(isl.dim_type.set, set_size-1, max_nest+1-set_size)
            tmp_set = '{[' + ','.join(instrukcje[ll]['vars']) +',-1' * (max_nest - len(instrukcje[ll]['vars'])) + ',v]}'
            set = set.intersect(isl.Set(tmp_set)).coalesce()

            set = 'S'+str(i)+':=' + str(set) + ";"



            print set

            file.write(set)
            file.write('codegen S'+str(i)+';')
            i = i + 1
    
    
    file.close();




    
    cmd = barv_script + " < tmp/barv_scc.txt" 
    process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
    process.wait()
    output = process.stdout.read()


    #dodaj s1 do petli
    '''lines = output.split("\n")
    lines = filter(lambda x:x!='',lines)   
    loop = ""
    for i in range(0,len(lines)):
        tmp = lines[i]
        pattern = re.compile("^\W*\(")
        if pattern.match(tmp):
            tmp = tmp.replace("(", "s1(")
            tmp = tmp.replace(", ", ",")
        loop = loop + tmp
# if(i < len(lines)-1):
        loop = loop + "\n"
        
    linestring = open("tmp/petit_loop_"+str(j)+".t", 'r').read()
    petit_lines = linestring.split('\n')
      
    lines = loop.split("\n")
    
    debug = False
    loop = ""
    for line in lines:
        if("s1(" in line):
            pattern = re.compile(",[^,]+\)")
            st = pattern.findall(line)[0].replace(",", "").replace(")", "")
            if(not st.isdigit()):
                print "== ERROR =="
                sys.exit()
            numb_st = int(st)
            st = petit_lines[int(st)-1]
            st = st.replace(",", "][")
            
            # nawiasy powinny byc zmienione jesli poprzedzaja je znaki 
            # alfanumeryczne, inaczej to nawiasy nie od tablic
            st =  re.sub("\s*\(", "(", st)
            st =  re.sub("\s*\)", ")", st)
            st =  re.sub(r'(?<=[a-zA-Z0-9_])\(', '[', st)
            st =  re.sub(r'(?<=[a-zA-Z0-9_])\)', ']', st)
            
            # szukaj wektora
            for item in combo:
                if numb_st in item['st']:
                    vec = item['vars']
                    if debug:
                        loop = loop +  get_tab(line) + "// " + ",".join(item['vars']) + "\n"
                    break
            
            # nowy wektor
            vec_new = line.replace("s1(", "").replace(");", "").replace(" ", "").split(",")[:-1]
            if debug:
                loop = loop +  get_tab(line) + "// " + ",".join(vec_new) + "\n"
            
            st2 = st
            for i in range(0, len(vec)):
                st2 = re.sub('\\b'+vec[i]+'\\b', vec_new[i], st2)  

            if debug:
                loop = loop +  get_tab(line) + "// " + st + ";\n"
                
            loop = loop + get_tab(line) + st2 + ";\n"
        else:
            loop = loop + line.replace('int ', '') + "\n"
            
    #do poprawy - za duzo operacji na plikach przerobic korekte
    '''

    loop = tiling_v3.postprocess_loop(output.split('\n'))
    lines = loop.split('\n')
    loop = imperf_tile.RestoreStatements(lines, "tmp/petit_loop_"+str(j)+".t", dane,  0, 0, permutate_list)


    text_file = open("loop_scc.c", "w")
    text_file.write(loop)
    text_file.close()
    
    loop = ""
    for line in correct.Korekta("loop_scc.c"):
        loop = loop + line + "\n"
    
    text_file = open("loop_scc.c", "w")
    text_file.write(loop)
    text_file.close()
    
    # dopisz do instrukcji wektory zmiennych indeksowych petli
    # uzyskaj tabulacje
    # zastap instrukcje
    # nadaj format C
    
    
    
    if(1==0):
        
        linestring = open("tmp/C_loop_"+str(j)+".c", 'r').read()
        lines = linestring.split('\n')   
        
        lines = linestring.split('\n')
        stuff = []
        
        for line in lines:
            if 'for' in line:
                stuff.append(functions.Loop(line));
        
        v = ""
        for s in stuff:
            v = v + s['var'] + ","
        v = v + "v"
        
        text_file = open("names_info.txt", "w")
        text_file.write(v)
        text_file.close()
        
        text_file = open("pseudocode.txt", "w")
        text_file.write(loop)
        text_file.close()
        
        
        
        gen.ParsePrint("tmp/petit_loop_"+str(j)+".t")
     
        shutil.copyfile("out_pseudocode.txt", "tmp/C_loop_scc"+str(j)+".c")
    def testDigraph(self):
        gr = pygraph.digraph()
        gr.add_nodes(xrange(25))
        edges = [
            (13, 22),
            (18, 0),
            (17, 8),
            (15, 13),
            (13, 19),
            (21, 2),
            (3, 11),
            (11, 23),
            (4, 22),
            (4, 2),
            (3, 22),
            (23, 7),
            (12, 2),
            (6, 7),
            (7, 15),
            (0, 15),
            (20, 21),
            (22, 16),
            (19, 14),
            (22, 14),
            (7, 19),
            (0, 11),
            (9, 11),
            (12, 17),
            (15, 4),
            (6, 15),
            (24, 10),
            (4, 10),
            (11, 4),
            (8, 2),
            (1, 23),
            (9, 22),
            (10, 13),
            (5, 24),
            (4, 16),
            (23, 5),
            (6, 23),
            (11, 15),
            (22, 11),
            (6, 12),
            (15, 14),
            (12, 22),
            (17, 4),
            (17, 9),
            (9, 13),
            (8, 3),
            (21, 15),
            (24, 7),
            (1, 12),
            (4, 1),
            (11, 22),
            (0, 13),
            (18, 7),
            (24, 3),
            (21, 10),
            (6, 13),
            (8, 22),
            (13, 9),
            (3, 4),
            (12, 8),
        ]
        for each in edges:
            gr.add_edge(each[0], each[1])

        ma = mutual_accessibility(gr)
        for n in gr:
            for m in gr:
                if m in ma[n]:
                    assert m in depth_first_search(gr, n)[0]
                    assert n in depth_first_search(gr, m)[0]
                else:
                    assert m not in depth_first_search(gr, n)[0] or n not in depth_first_search(gr, m)[0]
Пример #23
0
def convert_history(files, tasks, releases, fileobjects):
    """Converts the Synergy history between two releases to a Git compatible one."""

    #print "Look for cycles in the File History graph"
    while find_cycle(files):
        cycle = find_cycle(files)
        #print "A cycle was found!"
        #print "Cycle:", cycle

        # Find the newest file
        newest = max(cycle, key=lambda x: [fileobject.get_integrate_time() for fileobject in fileobjects if fileobject.get_objectname == x][0])
        #print "Object %s is the newest in the cycle: it should not have successors!" % newest

        # Remove the outgoing link from the newest file
        for successor in files.neighbors(newest):
            if successor in cycle:
                files.del_edge((newest, successor))
                #print "Removed the %s -> %s edge" % (newest, successor)

    [files.del_edge(edge) for i, edge in transitive_edges(files)]
    #print "Removed transitive edges from the File History graph."

    sanitized_tasks = _sanitize_tasks(tasks)
    #print "Tasks hypergraph sanitized."

    commits = create_commits_graph(files, sanitized_tasks, releases)

    #print "First commits graph created."

    # Cycles detection
    while find_cycle(commits):
        cycle = find_cycle(commits)
        #print "Cycles found!"
        #print "Cycle:", cycle

        # Generate the reduced file history graph
        reduced_graph = _create_reduced_graph(files, tasks, cycle)
        #print "Reduced graph:", reduced_graph

        # Find the longest cycle in the reduced graph
        longest_cycle = max(mutual_accessibility(reduced_graph).values(), key=len)

        candidate_cuts = []

        for edge in zip(longest_cycle, longest_cycle[1:] + longest_cycle[0:1]):
            node1, node2 = edge
            # Find to which task the edge belongs to
            if tasks.links(node1) == tasks.links(node2):
                task = tasks.links(node1)[0]
                # Find which cuts are compatible and add them to the candidates list
                candidate_cuts.extend( [cut for cut in _find_cuts(tasks.links(task))
                        if (node1 in cut and node2 not in cut)
                        or (node2 in cut and node2 not in cut)])

        #print "Candidate_cuts:", candidate_cuts

        for (counter, cut) in enumerate(candidate_cuts):
            #print "Cut:", cut

            # Apply the cut
            task = tasks.links(cut[0])[0] # All the nodes in the cut belong to the same task and there are no overlapping tasks

            task_name = ""
            for i in count(1):
                task_name = task + "_" + str(i)
                if task_name not in tasks.edges():
                    #print "Adding task", task_name
                    tasks.add_edge(task_name)
                    break

            for node in cut:
                #print "Unlinking file %s from task %s" % (node, task)
                tasks.unlink(node, task)
                tasks.graph.del_edge(((node,'n'), (task,'h'))) # An ugly hack to work around a bug in pygraph
                #print "Linking file %s to task %s" % (node, task_name)
                tasks.link(node, task_name)

            # If no more cycles are found in the updated reduced graph then break
            commits2 = create_commits_graph(files, tasks, releases)

            cycle2 = find_cycle(commits2)
            if set(cycle) & set(cycle2) == set(cycle):
                # Undo the changes!
                #print "The cycle was not removed. Undoing changes..."
                #print "\tDeleting task", task_name
                tasks.del_edge(task_name)

                for node in cut:
                    #print "\tLinking file %s to task %s" % (node, task)
                    tasks.link(node, task)
                #print "Done."
            else:
                #print "Cut found."
                commits = create_commits_graph(files, tasks, releases)
                break
        #else:
            # Error! This should not happen
            #print "Cut not found."

    #else:
        #print "No cycles found"

    return commits
Пример #24
0
def convert_history(files, tasks, releases, objects):
    """Converts the Synergy history between two releases to a Git compatible one."""

    log.basicConfig(filename='convert_history.log', level=log.DEBUG)

    file_objects = [ccm_cache.get_object(o) for o in objects]
    log.info("Looking for cycles in the File History graph")
    while find_cycle(files):
        cycle = find_cycle(files)
        log.info("\tA cycle was found!")
        log.info("\tCycle: %s" % ", ".join(cycle))

        # Find the newest file
        newest = max(cycle,
                     key=lambda x: [
                         fileobject.get_integrate_time()
                         for fileobject in file_objects
                         if fileobject.get_objectname() == x
                     ][0])
        log.info(
            "\tObject %s is the newest in the cycle: it should not have successors!"
            % newest)

        # Remove the outgoing link from the newest file
        for successor in files.neighbors(newest):
            if successor in cycle:
                files.del_edge((newest, successor))
                log.info("\tRemoved the %s -> %s edge" % (newest, successor))

    log.info("Remove transitive edges in the File History graph")
    for edge in transitive_edges(files):
        if edge in files.edges():
            files.del_edge(edge)
        else:
            log.warning("Weird, transitive edge not found!")

    log.info("Sanitize tasks")
    sanitized_tasks = _sanitize_tasks(tasks)

    log.info("Create commits graph")
    commits = create_commits_graph(files, sanitized_tasks, releases)

    # Uncomment for debug... (remember import)
    #hack = {'previous': releases.edges()[0]}
    #htg.commit_graph_to_image(commits, hack, tasks, name='Pre-'+releases.edges()[1])

    log.info("Looking for cycles in the Commits graph")
    while find_cycle(commits):
        log.info("Finding strictly connected components")
        cycle = max(mutual_accessibility(commits).values(), key=len)

        #cycle = find_cycle(commits)

        log.info("\tA cycle was found!")
        log.info("\tCycle: %s" % ", ".join(cycle))

        log.info("Find the nodes in the cycle going from one task to another")
        culpript_edges = []
        for task in cycle:
            for obj in tasks.links(task):
                for neighbor in files.neighbors(obj):
                    if neighbor not in tasks.links(task) and tasks.links(
                            neighbor)[0] in cycle:
                        culpript_edges.append((obj, neighbor))
                        log.info("\tAdding culpript edge (%s, %s)" %
                                 (obj, neighbor))

        log.info("Connect the nodes found")
        culpript_nodes = set()
        for head, tail in culpript_edges:
            culpript_nodes.add(head)
            culpript_nodes.add(tail)
        for head, tail in permutations(culpript_nodes, 2):
            if tasks.links(head)[0] == tasks.links(tail)[0] and (
                    head, tail) not in culpript_edges:
                log.info("\tAdding edge (%s, %s)" % (head, tail))
                culpript_edges.append((head, tail))

        reduced_digraph = digraph()
        reduced_digraph.add_nodes(culpript_nodes)
        [reduced_digraph.add_edge(edge) for edge in culpript_edges]

        shortest_cycle = max(mutual_accessibility(reduced_digraph).values(),
                             key=len)
        log.info("Cycle in objects: %s" % shortest_cycle)

        candidate_cuts = []

        # Find the tasks
        t = set()
        for node in shortest_cycle:
            t.add(tasks.links(node)[0])
        log.info("T: %s" % str(t))

        for i in t:
            log.info("Cuts for task %s" % i)
            # Find the objects in the cycle belonging to task i
            obj_in_task = set(tasks.links(i)) & set(shortest_cycle)
            log.info("Objects in cycle and task: %s" % obj_in_task)
            if len(obj_in_task) < 15:
                if len(obj_in_task) > 1:
                    for j in range(1, len(obj_in_task) / 2 + 1):
                        candidate_cuts.extend(
                            [k for k in combinations(obj_in_task, j)])
            else:
                log.info("Cycle too long...")
                pass
        if len(candidate_cuts) < 50:
            log.info("Candidate_cuts: %s" % str(candidate_cuts))

        # Find the cut to break the cycle
        cut = _find_cut(candidate_cuts, cycle, tasks, files, releases)
        if not cut:
            log.info("Shortest cycle cut didn't work, next option...")
            # Make a qualified guess of a cut with the shortest walk of files in the tasks
            walk, node = _find_shortest_incident_or_neighbor_walk(
                shortest_cycle, cycle, files, tasks)
            log.info("Shortest incident or neighbor walk from {0}: {1}".format(
                node, walk))
            new_cut = walk
            new_cut.append(node)
            candidate_cuts.insert(0, tuple(new_cut))

            log.info("Candidate cuts: %s", candidate_cuts)
            cut = _find_cut(candidate_cuts, cycle, tasks, files, releases)

            if not cut:
                # Error! This should not happen
                log.info("Cut not found.")
                log.shutdown()
                raise Exception("Cut not found")

        tasks, task, task_name = _apply_cut(cut, tasks)
        commits = create_commits_graph(files, tasks, releases)

    else:
        log.info("No cycles found")

    log.shutdown()
    return commits