def sharma1998ext(precedents): """ Generates a AOA graph (PERT) from successors table Algorithm sharma1998 extended returns: pert.Pert() graph data structure """ pert_graph = pert.Pert() successors = graph.reversed_prelation_table(precedents) # Close the graph (not in sharma1998) origin = pert_graph.nextNodeNumber() pert_graph.add_node(origin) dest = pert_graph.nextNodeNumber() pert_graph.add_node(dest) begin_act = graph.begining_activities(successors) end_act = graph.ending_activities(successors) begin_end_act = begin_act.intersection(end_act) # -Creates a common node for starting activities for act in begin_act - begin_end_act: pert_graph.addActivity(act, origin) # -Creates a common node for ending activities for act in end_act - begin_end_act: pert_graph.addActivity(act, origin=None, destination=dest) # -Deals with begin-end activities if begin_end_act: act = begin_end_act.pop() pert_graph.addActivity(act, origin, dest) for act in begin_end_act: o, d = pert_graph.addActivity(act, origin) pert_graph.addActivity("seDummy", d, dest, dummy=True) # Sharma1998 algorithm for act in successors: #print "Processing", act, pert_graph #window.images.append( graph.pert2image(pert_graph) ) if not pert_graph.activityArc(act): pert_graph.addActivity(act) #window.images.append( graph.pert2image(pert_graph) ) a_origin, a_dest = pert_graph.activityArc(act) #print '(', a_origin, a_dest, ')' for pre in precedents[act]: #print pert_graph.successors #print pre, pre in pert_graph.inActivitiesR(graph.reversed_prelation_table(pert_graph.successors), a_origin) if pre not in pert_graph.inActivitiesR(a_origin): if not pert_graph.activityArc(pre): pert_graph.addActivity(pre) #window.images.append( graph.pert2image(pert_graph) ) pert_graph.makePrelation(pre, act) a_origin, a_dest = pert_graph.activityArc(act) return pert_graph.renumerar()
def cohen_sadeh(prelations): """ Build graph PERT using Cohen-Sadeh algorithm Note: the original algorithm does not consider parallel activities (creates a multigraph) prelations = {'activity': ['predecesor1', 'predecesor2'...} return graph pert.Pert() """ # Adaptation to avoid multiple end nodes successors = graph.reversed_prelation_table(prelations) end_act = graph.ending_activities(successors) #Step 1. Construct work table with Immediate Predecessors Columns = namedlist.namedlist('Columns', ['pre', 'blocked', 'dummy', 'suc', 'start_node', 'end_node']) # [0 Predecesors, 1 Blocked, 2 Dummy, 3 Successors, 4 Start node, 5 End node] # Blocked = (False or Activity with same precedents) work_table = {} for act, predecessors in prelations.items(): work_table[act] = Columns(set(predecessors), False, False, None, None, None) # print "\n--- Step 1 ---" # __print_work_table(work_table) #Step 2. Identify Identical Precedence Constraint of Diferent Activities visited_pred = {} for act, columns in work_table.items(): pred = frozenset(columns.pre) if pred not in visited_pred: visited_pred[pred] = act else: columns.blocked = visited_pred[pred] # print "\n--- Step 2 ---" # __print_work_table(work_table) #Step 3. Identify Necessary Dummy Arcs dups = set() visited_act = set() for columns in work_table.values(): if not columns.blocked: for act in columns.pre: if act in visited_act: dups.add(act) visited_act.add(act) # print "\n--- Step 3.1 ---" # print dups #Step 3.2, 3.3 and 4. Create rows and information for Dummy Arcs dummy_counter = collections.Counter() for _, columns in work_table.items(): # Avoid blocked if not columns.blocked: predecessors = columns.pre if len(predecessors) > 1: for pre in list(predecessors): if pre in dups: predecessors.remove(pre) dummy_name = pre + '-d' + str(dummy_counter[pre]) dummy_counter[pre] += 1 predecessors.add(dummy_name) work_table[dummy_name] = Columns(set([pre]), False, True, None, None, None) # print "\n--- Step 4 ---" # __print_work_table(work_table) #Step 5. Creating nodes node = 0 # instead of 0, can start at 100 to avoid confusion with activities named with numbers when debugging for act, columns in work_table.items(): if not columns.dummy and not columns.blocked: columns.start_node = node node += 1 # print "\n--- Step 5a ---" # __print_work_table(work_table) for act, columns in work_table.items(): if not columns.dummy and columns.blocked: columns.start_node = work_table[columns.blocked].start_node # print "\n--- Step 5b ---" # __print_work_table(work_table) #Step 6. Associate activities with their end nodes # (a) find one non-dummy successor for each activity for act, columns in work_table.items(): for suc, suc_columns in work_table.items(): if not suc_columns.dummy and not suc_columns.blocked: if act in suc_columns.pre: columns.suc = suc break # print "\n--- Step 6a ---" # __print_work_table(work_table) # (b) find end nodes graph_end_node = node # Reserve one node for graph end node += 1 for act, columns in work_table.items(): suc = columns.suc if suc: columns.end_node = work_table[suc].start_node else: # Create needed end nodes, avoiding multiple graph end nodes (adaptation) if act in end_act: columns.end_node = graph_end_node else: columns.end_node = node node += 1 # print "\n--- Step 6b ---" # __print_work_table(work_table) #Step 7. Associate dummy arcs with start nodes for act, columns in work_table.items(): if columns.dummy: pred = iter(columns.pre).next() start_node = work_table[pred].end_node columns.start_node = start_node # print "\n--- Step 7 ---" # __print_work_table(work_table) #Step 8. Generate the graph pm_graph = pert.PertMultigraph() for act, columns in work_table.items(): _, _, dummy, _, start, end = columns pm_graph.add_arc((start, end), (act, dummy)) p_graph = pm_graph.to_directed_graph() return p_graph.renumerar()
return data[0] except IOError: print 'Error reading file:', filename sys.exit(1) if len(sys.argv)==3: repeticiones= int(sys.argv[2]) filename=sys.argv[1] data = openProject(filename) successors = {} for i in data: successors[i[1]]=i[2] prelaciones1 = graph.reversed_prelation_table(successors) prelaciones = { 'B': [], 'A': [], 'D': ['B'], 'C': [], 'F': ['C'], 'E': ['D'], 'H': ['B'], 'G': ['F'], 'J': ['F'], 'I': ['A'], 'L': ['C', 'E'], 'K': ['I'], 'N': ['B'], 'M': ['H'],
def sysloOptimal(prelations): """ Build a PERT graph using Syslo algorithm return p_graph pert.PertMultigraph() """ # Adaptation to avoid multiple end nodes successors = graph.reversed_prelation_table(prelations) end_act = graph.ending_activities(successors) #Kahn1962.check_cycles(successors) prela = successors.copy() Columns = namedlist.namedlist('Columns', ['pre', 'blocked', 'dummy', 'suc', 'start_node', 'end_node']) # [0 Predecesors, 1 Blocked, 2 Dummy, 3 Successors, 4 Start node, 5 End node] # Blocked = (False or Activity with same precedents) #Step 0. grafo = {} alt = graph.successors2precedents(successors) grafo = graph.successors2precedents(syslo_table.syslo(prela, grafo, alt)) #Step 1. Save the new prelation table in a work table work_table = {} for act, pre in grafo.items(): if not act in prelations: work_table[act] = Columns(pre, False, True, None, None, None) else: work_table[act] = Columns(pre, False, False, None, None, None) #Step 2. Identify Dummy Activities And Identical Precedence Constraint of Diferent Activities visited_pred = {} for act, columns in work_table.items(): pred = frozenset(columns.pre) if pred not in visited_pred: visited_pred[pred] = act else: columns.blocked = visited_pred[pred] #Step 3. Creating nodes # (a) find start nodes node = 0 # instead of 0, can start at 100 to avoid confusion with activities named with numbers when debugging for act, columns in work_table.items(): if not columns.blocked: columns.start_node = node node += 1 if columns.blocked: columns.start_node = work_table[columns.blocked].start_node # Associate activities with their end nodes for suc, suc_columns in work_table.items(): if not suc_columns.blocked: if act in suc_columns.pre: columns.suc = suc break # (b) find end nodes graph_end_node = node # Reserve one node for graph end node += 1 for act, columns in work_table.items(): suc = columns.suc if suc: columns.end_node = work_table[suc].start_node else: # Create needed end nodes, avoiding multiple graph end nodes (adaptation) if act in end_act: columns.end_node = graph_end_node else: columns.end_node = node node += 1 # Step 4. Remove redundancy of dummy activities vis = [] for act, columns in work_table.items(): if columns.dummy == False: for q in work_table[act].pre: for w in work_table[act].pre: if q in work_table and w in work_table: if q != w and work_table[q].pre == work_table[w].pre and work_table[q].dummy==True and work_table[w].dummy==True: if w not in vis: del work_table[w] vis.append(q) #Step 5. Generate the graph pm_graph = pert.PertMultigraph() for act, columns in work_table.items(): _, _, dummy, _, start, end = columns pm_graph.add_arc((start, end), (act, dummy)) p_graph = pm_graph.to_directed_graph() return p_graph return p_graph
def sysloPolynomial(prelations): # Adaptation to avoid multiple end nodes successors = graph.reversed_prelation_table(prelations) end_act = graph.ending_activities(successors) # Step 0. Construct work table with Immediate Predecessors Columns = namedlist.namedlist("Columns", ["pre", "blocked", "dummy", "suc", "start_node", "end_node"]) # [0 Predecesors, 1 Blocked, 2 Dummy, 3 Successors, 4 Start node, 5 End node] # Blocked = (False or Activity with same precedents) # Step 1. Create the improper covers work_table_pol = makeCover(prelations, successors) # Step 2. Syslo Polynomial algorithm final = successors.copy() visited = [] for act, pred in prelations.items(): for v in pred: for u in pred: if u != v and successors[v] != successors[u] and act not in visited: # Find activity in the improper cover table for key, value in work_table_pol.items(): if act in value.w: w = value.w # Find each row that belongs to the predecessors of activity for key, value in work_table_pol.items(): if set(value.u).issubset(prelations[act]) and value.u: vertex = set(value.u).pop() # Compare successors of a row with the improper cover of the activity if successors[vertex] != w: for q in value.u: if final.has_key(q): final[q] = list( (set(final[q]) - set(w) | set([str(vertex) + separator + str(act)])) - set([act]) ) else: final[q] = list( set(successors[q]) - set(w) | set([str(vertex) + separator + str(act)]) ) final[str(vertex) + separator + str(act)] = [act] for l in w: visited.append(l) final = graph.successors2precedents(final) work_table = {} for act, pred in final.items(): work_table[act] = Columns(pred, False, False, None, None, None) if act not in prelations: work_table[act].dummy = True # Step 3. Identify Dummy Activities And Identical Precedence Constraint of Diferent Activities visited_pred = {} for act, columns in work_table.items(): pred = frozenset(columns.pre) if pred not in visited_pred: visited_pred[pred] = act else: columns.blocked = visited_pred[pred] # Step 4. Creating nodes # (a) find start nodes node = 0 # instead of 0, can start at 100 to avoid confusion with activities named with numbers when debugging for act, columns in work_table.items(): if not columns.blocked: columns.start_node = node node += 1 if columns.blocked: columns.start_node = work_table[columns.blocked].start_node # Associate activities with their end nodes for suc, suc_columns in work_table.items(): if not suc_columns.blocked: if act in suc_columns.pre: columns.suc = suc break # (b) find end nodes graph_end_node = node # Reserve one node for graph end node += 1 pm_graph = pert.PertMultigraph() for act, columns in work_table.items(): suc = columns.suc if suc: columns.end_node = work_table[suc].start_node else: # Create needed end nodes, avoiding multiple graph end nodes (adaptation) if act in end_act: columns.end_node = node else: columns.end_node = graph_end_node node += 1 # Generate the graph _, _, dummy, _, start, end = columns pm_graph.add_arc((start, end), (act, dummy)) p_graph = pm_graph.to_directed_graph() return p_graph
except IOError: print 'Error reading file:', filename sys.exit(1) if len(sys.argv) == 3: repeticiones = int(sys.argv[2]) filename = sys.argv[1] data = openProject(filename) successors = {} for i in data: successors[i[1]] = i[2] prelaciones1 = graph.reversed_prelation_table(successors) prelaciones = { 'B': [], 'A': [], 'D': ['B'], 'C': [], 'F': ['C'], 'E': ['D'], 'H': ['B'], 'G': ['F'], 'J': ['F'], 'I': ['A'], 'L': ['C', 'E'], 'K': ['I'], 'N': ['B'], 'M': ['H'],
def sysloPolynomial(prelations): # Adaptation to avoid multiple end nodes successors = graph.reversed_prelation_table(prelations) end_act = graph.ending_activities(successors) #Step 0. Construct work table with Immediate Predecessors Columns = namedlist.namedlist('Columns', ['pre', 'blocked', 'dummy', 'suc', 'start_node', 'end_node']) # [0 Predecesors, 1 Blocked, 2 Dummy, 3 Successors, 4 Start node, 5 End node] # Blocked = (False or Activity with same precedents) #Step 1. Create the improper covers work_table_pol = makeCover(prelations, successors) # Step 2. Syslo Polynomial algorithm final = successors.copy() visited = [] for act, pred in prelations.items(): for v in pred: for u in pred: if u != v and successors[v] != successors[u] and act not in visited: # Find activity in the improper cover table for key, value in work_table_pol.items(): if act in value.w: w = value.w # Find each row that belongs to the predecessors of activity for key, value in work_table_pol.items(): if set(value.u).issubset(prelations[act]) and value.u: vertex = set(value.u).pop() # Compare successors of a row with the improper cover of the activity if successors[vertex] != w: for q in value.u: if final.has_key(q): final[q] = list((set(final[q]) - set(w) | set([str(vertex) + separator + str(act)])) - set([act])) else: final[q] = list(set(successors[q]) - set(w) | set([str(vertex) + separator + str(act)])) final[str(vertex) + separator + str(act)] = [act] for l in w: visited.append(l) final = graph.successors2precedents(final) work_table = {} for act, pred in final.items(): work_table[act] = Columns(pred, False, False, None, None, None) if act not in prelations: work_table[act].dummy = True #Step 3. Identify Dummy Activities And Identical Precedence Constraint of Diferent Activities visited_pred = {} for act, columns in work_table.items(): pred = frozenset(columns.pre) if pred not in visited_pred: visited_pred[pred] = act else: columns.blocked = visited_pred[pred] #Step 4. Creating nodes # (a) find start nodes node = 0 # instead of 0, can start at 100 to avoid confusion with activities named with numbers when debugging for act, columns in work_table.items(): if not columns.blocked: columns.start_node = node node += 1 if columns.blocked: columns.start_node = work_table[columns.blocked].start_node # Associate activities with their end nodes for suc, suc_columns in work_table.items(): if not suc_columns.blocked: if act in suc_columns.pre: columns.suc = suc break # (b) find end nodes graph_end_node = node # Reserve one node for graph end node += 1 pm_graph = pert.PertMultigraph() for act, columns in work_table.items(): suc = columns.suc if suc: columns.end_node = work_table[suc].start_node else: # Create needed end nodes, avoiding multiple graph end nodes (adaptation) if act in end_act: columns.end_node = node else: columns.end_node = graph_end_node node += 1 # Generate the graph _, _, dummy, _, start, end = columns pm_graph.add_arc((start, end), (act, dummy)) p_graph = pm_graph.to_directed_graph() return p_graph
def gento_municio(predecessors): """ Creates a PERT graph usign the algorithm defined in: Gento-Municio, Angel M. “Un Algoritmo Para La Realización de Grafos Con Las Actividades En Los Arcos, Grafos PERT.” Cuadernos Del CIMBAGE, no. 7 (2004): 103. """ nodes = NodeList(predecessors.keys()) successors = graph.reversed_prelation_table(predecessors) # Generate precedences/successors matrix matrix = scipy.zeros([nodes.num_real_activities, nodes.num_real_activities], dtype=int) for activity, successor in successors.items(): for suc in successor: matrix[nodes.activity_names.index(activity)][nodes.activity_names.index(suc)] = 1 # print "MATRIX filled: \n", matrix # sum each column sum_predecessors = scipy.sum(matrix, axis=0) # print "SUM_PREDECESSORS: ", sum_predecessors # sum each row sum_successors = scipy.sum(matrix, axis=1) # print "SUM_SUCCCESSORS: ", sum_successors # Step 1. Search initial activities (have no predecessors) [3.1] beginning, = numpy.nonzero(sum_predecessors == 0) # print "Beginning: ", beginning # add begin node to activities that begin at initial node begin_node = nodes.next_node() for node_activity in beginning: nodes[node_activity][0] = begin_node # Step 2. Search endings activities (have no successors) [3.2] ending, = numpy.nonzero(sum_successors == 0) # print "Ending: ", ending # add end node to activities that end in final node # note: this step may be replaced by handling them in steps 3 and 4 as stI and stII end_node = nodes.next_node() for node_activity in ending: nodes[node_activity][1] = end_node # print nodes # Step 3. Search standard type I (Activity have unique successors) [3.3] act_one_predeccessor, = numpy.nonzero(sum_predecessors == 1) stI = collections.defaultdict(list) for i in act_one_predeccessor: pred = numpy.nonzero((matrix[:,i]))[0][0] if (sum_successors[pred] == 1 # this condition is redundant but faster than the following check or sum_successors[pred] == scipy.sum(matrix[:,numpy.nonzero(matrix[pred])])): stI[pred].append(i) # print "stI: ", stI #Add the same end node of activities to the begin node of its successors activities for node_activity in stI: stI_node = nodes.next_node() nodes[node_activity][1] = stI_node for successor in stI[node_activity]: nodes[successor][0] = stI_node # print nodes #Step 4. Search standard II(Full) and standard II(Incomplete) [3.4] # print "--- Step 4 ---" # dictionary with key: equal successors; value: mother activities stII = collections.defaultdict(list) for act in range(nodes.num_real_activities): stII[frozenset(matrix[act].nonzero()[0])].append(act) # remove ending activities and those included in type I del(stII[ frozenset([]) ]) for pred, succs in stI.items(): del(stII[ frozenset(succs) ]) # assigns nodes to type II as indicated in figure 8 mark_complete = [] for succs, preds in stII.items(): u = len(preds) # if NP[succs] != u (complete) # print preds, '->', succs, if not [ i for i in succs if sum_predecessors[i] != u ]: # print 'complete' mark_complete.append(succs) node = nodes.next_node() for act in preds: nodes[act][1] = node for act in succs: nodes[act][0] = node else: # (incomplete) # print 'incomplete' node = nodes.next_node() for act in preds: nodes[act][1] = node # print nodes # Step 5. Search for matching successors [3.5] # print "--- Step 5 ---" # remove type II complete so that stII becomes MASC for succs in mark_complete: del stII[succs] masc = stII # print "MASC" # for succs, preds in stII.items(): # print preds, succs npc = scipy.zeros([nodes.num_real_activities], dtype=int) for succs, preds in masc.items(): num_preds = len(preds) for succ in succs: npc[succ] += num_preds # print npc # Step 6. Identifying start nodes on matching successors # print "--- Step 6 ---" act_no_initial = [i for i in range(nodes.num_real_activities) if nodes[i][0] == None] # print act_no_initial, "<- No initial node" num_no_initial = len(act_no_initial) mra = scipy.zeros([num_no_initial, num_no_initial], dtype=int) for succs, preds in masc.items(): num_preds = len(preds) for act_i, act_j in itertools.combinations(succs, 2): mra[act_no_initial.index(act_i), act_no_initial.index(act_j)] += num_preds mra[act_no_initial.index(act_j), act_no_initial.index(act_i)] += num_preds # Symmetry, any succ order # print 'MRA' # print mra # check matching successors and assign them initial nodes for i in range(num_no_initial): for j in range(i+1, num_no_initial): if mra[i,j] == npc[act_no_initial[i]] and mra[i,j] == npc[act_no_initial[j]]: # print 'coincidencia', i, j, "(", act_no_initial[i], act_no_initial[j], ")" if nodes[act_no_initial[i]][0] != None: node = nodes[act_no_initial[i]][0] else: node = nodes.next_node() nodes[act_no_initial[i]][0] = node nodes[act_no_initial[j]][0] = node # assign initial node to the remaining activities (they must be alone, interpreted, not clear on paper) for node in nodes: if node[0] == None: node[0] = nodes.next_node() # print nodes # Step 7. String search # create MNS (to avoid counting matching successors twice) mns = {} unconnected = set() # all nodes in MNS for succs, preds in masc.items(): succ_nodes = set([nodes[succ][0] for succ in succs]) mns[ nodes[preds[0]][1] ] = succ_nodes # as all preds have same successors they will be usign just one node unconnected.update(succ_nodes) # print 'MNS' # for pred, succs in mns.items(): # print pred, '-', succs # create MRN unconnected = list(unconnected) num_unconnected = len(unconnected) # print unconnected, '<-Unconnected' appear = scipy.zeros([num_unconnected], dtype=int) mrn = scipy.zeros([num_unconnected, num_unconnected], dtype=int) for pred, u_nodes in mns.items(): for node in u_nodes: appear[ unconnected.index(node) ] += 1 for node_a, node_b in itertools.combinations(u_nodes, 2): mrn[unconnected.index(node_a), unconnected.index(node_b)] += 1 mrn[unconnected.index(node_b), unconnected.index(node_a)] += 1 # print 'MRN' # print mrn # print 'Appear' # print appear # create MC mc = [] for i in range(num_unconnected): mc.append([j for j in range(num_unconnected) if mrn[i,j] == appear[i] ]) # print 'MC' # for i in range(num_unconnected): # print i, '-', mc[i] # use strings to connect nodes for i in range(num_unconnected): following_nodes = sorted(mc[i], key=lambda x : len(mc[x])) while following_nodes: # print following_nodes follower = following_nodes.pop() # print 'extracted:', follower # Create dummy i -> follower (unconnected to real) nodes.append_dummy(unconnected[i], unconnected[follower]) for fol_follower in mc[follower]: # print 'remove:', fol_follower try: following_nodes.remove(fol_follower) except ValueError: pass # if it has already been connected, it will not be in list now # print nodes # Step 8. Final nodes and dummies # (note: contrary to what paper says, we have already set final nodes for all # activities in step 4 as indicated in figure 8. Nevertheless, these nodes are # unconnected so we replace them here if necessary. Not assigning nodes in step 4 # would break step 7) for succs, preds in masc.items(): # print "Studying:", preds, '->', succs if len(succs) == 1: # Case I # print "Case I" for pred in preds: nodes[pred][1] = nodes[next(iter(succs))][0] else: # Get follower with lower npc min_follower = None min_npc = None for succ in succs: if min_npc == None or min_npc > npc[succ]: min_follower = succ min_npc = npc[succ] # print min_follower, 'npc:', min_npc # Count the number of masc rows containing our successor activities count = 0 for others in masc: if succs.issubset(others): count += len(masc[others]) if count >= min_npc: # Case II (if min_npc==1) and Case III # print "Case II or III" for pred in preds: nodes[pred][1] = nodes[min_follower][0] else: for succ in succs: # note: if there are several predecessors, they have the same end node assigned in step 4 nodes.append_dummy(nodes[next(iter(preds))][1], nodes[succ][0]) # Step 9. Final nodes for type II incomplete # (note: final nodes have already been assigned in step 8. We think section 3.9 of paper is unnecessary) return nodes.to_pert_graph().renumerar()
def __init__(self, pert=None): super(Pert, self).__init__() self.construct = algoritmoSharma.sharma1998ext if pert != None: self.successors, self.arcs = pert self.predecessors = graph.reversed_prelation_table(self.successors)
def mouhoub(prelations): """ Build a PERT graph using Mouhoub algorithm prelations = {'activity': ['predecesor1', 'predecesor2'...} return p_graph pert.PertMultigraph() """ Columns = namedlist.namedlist('Columns', ['pre', 'su', 'blocked', 'dummy', 'suc', 'start_node', 'end_node', 'aux']) # [0 Predecesors, 1 Successors, 2 Blocked, 3 Dummy, 4 Blocked successor, 5 Start node, 6 End node, 7 Auxiliar ] # Blocked = (False or Activity with same precedents) # Adaptation to avoid multiple end nodes successors = graph.reversed_prelation_table(prelations) successors_copy = graph.reversed_prelation_table(prelations.copy()) end_act = graph.ending_activities(successors) # Step 0. Remove Z Configuration. Update the prelation table in complete_bipartite dictionary complete_bipartite = successors complete_bipartite.update(zConfiguration.zconf(successors)) # STEPS TO BUILD THE PERT GRAPH #Step 1. Save the prelations in the work table complete_bipartite = graph.successors2precedents(complete_bipartite) work_table = {} for act, sucesores in complete_bipartite.items(): work_table[act] = Columns(set(sucesores), successors[act], None, False, None, None, None, None) if act not in prelations: work_table[act].dummy = True #Step 2. Identify Identical Precedence Constraint of Diferent Activities visited_pred = {} for act, columns in work_table.items(): pred = frozenset(columns.pre) if pred not in visited_pred: visited_pred[pred] = act else: columns.blocked = visited_pred[pred] #Step 3. Creating nodes # (a) Find start nodes node = 0 # instead of 0, can start at 100 to avoid confusion with activities named with numbers when debugging for act, columns in work_table.items(): if not columns.blocked: columns.start_node = node node += 1 if columns.blocked: columns.start_node = work_table[columns.blocked].start_node # Associate activities with their end nodes for suc, suc_columns in work_table.items(): if not suc_columns.blocked: if act in suc_columns.pre: columns.suc = suc break # (b) Find end nodes graph_end_node = node # Reserve one node for graph end node += 1 for act, columns in work_table.items(): suc = columns.suc if suc: columns.end_node = work_table[suc].start_node else: # Create needed end nodes, avoiding multiple graph end nodes (adaptation) if act in end_act: columns.end_node = graph_end_node else: columns.end_node = node node += 1 #Step 4. MOUHOUB algorithm rules to remove extra dummy activities mouhoubRules.rule_1(successors_copy, work_table) G2 = mouhoubRules.rule_2(prelations, work_table) G3 = mouhoubRules.rule_3(G2, work_table) G4 = mouhoubRules.rule_4(G3, work_table) G5_6 = mouhoubRules.rule_5_6(successors_copy, work_table, G4) G3a = mouhoubRules.rule_3(G5_6, work_table) G4a = mouhoubRules.rule_4(G3a, work_table) G7 = mouhoubRules.rule_7(successors_copy, successors, G4a, node) work_table_final = {} for act, sucesores in G7.items(): work_table_final[act] = Columns([], [], [], sucesores.dummy, sucesores.suc, sucesores.start_node, sucesores.end_node, []) #Step 5. Delete Dummy Cycles for act, sucesores in work_table_final.items(): for act2, sucesores2 in work_table_final.items(): if act != act2: if sucesores.end_node == sucesores2.end_node and sucesores.start_node == sucesores2.start_node: if act not in successors: del work_table_final[act] #Step 6. Generate the graph pm_graph = pert.PertMultigraph() for act, columns in work_table_final.items(): _, _, _, dummy, _, start, end, _ = columns pm_graph.add_arc((start, end), (act, dummy)) p_graph = pm_graph.to_directed_graph() return p_graph
def main(): """ Test AOA (PERT) network generation algorithms with some given project files """ # Parse arguments and options parser = argparse.ArgumentParser( description='Test AOA graph generation algorithms with given files') parser.add_argument('infiles', nargs='*', help='Project files to test') parser.add_argument( '--table-file', '-t', default='resultados.csv', help= 'Name of file to append test results in CSV format (default: resultados.csv)' ) parser.add_argument('-r', '--repeat', default=1, type=int, help='Number of repetitions (default: 1)') parser.add_argument('--SVG', action='store_true', help='Draw the graph in a SVG file') parser.add_argument('--no-stop', action='store_true', help='Do not stop when an algorithm fails') parser.add_argument('-c', '--CohenSadeh', action='store_true', help='Test Cohen Sadeh algorithm') parser.add_argument('-s', '--Sharma', action='store_true', help='Test Sharma algorithm') parser.add_argument('-l', '--Salas', action='store_true', help='Test Lorenzo Salas algorithm') parser.add_argument('-g', '--GentoMunicio', action='store_true', help='Test Gento Municio algorithm') parser.add_argument('-o', '--Optimal', action='store_true', help='Test set based optimal algorithm') parser.add_argument('-m', '--Mouhoub', action='store_true', help='Test Mouhoub algorithm') parser.add_argument('-p', '--Syslo_Polynomial', action='store_true', help='Test Syslo Polynomial algorithm') parser.add_argument('-y', '--Syslo_Optimal', action='store_true', help='Test Syslo Optimal algorithm') args = parser.parse_args() if args.repeat < 1: print 'Number of repetitions must be > 0' return 1 try: f_csv = open(args.table_file, "a") except IOError: print 'Can not open table file (%s) to append results in CSV format' % ( args.table_file, ) return 1 # List of name and function of each algorithm to test algorithms = [] if args.CohenSadeh: algorithms.append(('CohenSadeh', algoritmoCohenSadeh.cohen_sadeh)) if args.Sharma: algorithms.append(('Sharma', algoritmoSharma.sharma1998ext)) if args.Optimal: algorithms.append(('Conjuntos', algoritmoConjuntos.algoritmoN)) if args.GentoMunicio: algorithms.append( ('GentoMunicio', algoritmoGentoMunicio.gento_municio)) if args.Salas: algorithms.append(('Salas', algoritmoSalas.salas)) if args.Mouhoub: algorithms.append(('Mouhoub', algoritmoMouhoub.mouhoub)) if args.Syslo_Polynomial: algorithms.append( ('Syslo Polinomico', algoritmoSysloPolynomial.sysloPolynomial)) if args.Syslo_Optimal: algorithms.append(('Syslo Optimo', algoritmoSysloOptimal.sysloOptimal)) # Perform tests on each file for filename in args.infiles: print "\nFilename: ", filename data = openProject(filename) if not data: print 'Can not read or understand file' else: # XXX Aqui habria que cortar si falla el checkeo del fichero check_activities(data) # Test each algorithm for name, alg in algorithms: print name # Get successors from activities table successors = {} for i in data: successors[i[1]] = i[2] # Count prelations list_of_predecessors = successors.values() num_of_predecessors = 0 for predecessors in list_of_predecessors: num_of_predecessors += len(predecessors) # Get predecessors from successors prelaciones = graph.reversed_prelation_table(successors) # Run algorithm pert_graph = None itime = os.times() for i in range(args.repeat): try: pert_graph = alg(prelaciones) except Exception: print traceback.format_exc() print " --- Algorithm failed! --- " if not args.no_stop: return 1 break if pert_graph: ftime = os.times() utime = ftime[0] - itime[0] # Print test results print "utime %.4f" % (utime) print "utime: ", utime print "numero de nodos: ", pert_graph.number_of_nodes() print "numero de arcos: ", pert_graph.number_of_arcs() print "numero de arcos reales: ", pert_graph.numArcsReales( ) print "numero de arcos ficticios: ", pert_graph.numArcsFicticios( ) print "numero de predecesors/sucesores: ", num_of_predecessors print "Validation: " if not validation.check_validation( successors, pert_graph) and not args.no_stop: return 1 print "" # XXX ??Falta incluir aqui el numero de actividades?? result_line = '"' + filename + '",' + '"' + name + '",' + str(len(data)) + ',' + str(num_of_predecessors) + ',' + \ str(pert_graph.number_of_nodes()) + ',' + str(pert_graph.number_of_arcs()) + ',' + \ str(pert_graph.numArcsReales()) + ',' + str(pert_graph.numArcsFicticios()) + ',' + "%.4f"%(utime) f_csv.write(result_line + "\n") if pert_graph == 1: print "No hay resultados que mostrar" # Draw graph and save in a file (*.svg) if args.SVG: image_text = graph.pert2image(pert_graph) fsalida = open( os.path.split(filename)[1] + '_' + name + '.svg', 'w') fsalida.write(image_text) fsalida.close() f_csv.close() return 0
def gento_municio(predecessors): """ Creates a PERT graph usign the algorithm defined in: Gento-Municio, Angel M. “Un Algoritmo Para La Realización de Grafos Con Las Actividades En Los Arcos, Grafos PERT.” Cuadernos Del CIMBAGE, no. 7 (2004): 103. """ nodes = NodeList(predecessors.keys()) successors = graph.reversed_prelation_table(predecessors) # Generate precedences/successors matrix matrix = scipy.zeros( [nodes.num_real_activities, nodes.num_real_activities], dtype=int) for activity, successor in successors.items(): for suc in successor: matrix[nodes.activity_names.index(activity)][ nodes.activity_names.index(suc)] = 1 # print "MATRIX filled: \n", matrix # sum each column sum_predecessors = scipy.sum(matrix, axis=0) # print "SUM_PREDECESSORS: ", sum_predecessors # sum each row sum_successors = scipy.sum(matrix, axis=1) # print "SUM_SUCCCESSORS: ", sum_successors # Step 1. Search initial activities (have no predecessors) [3.1] beginning, = numpy.nonzero(sum_predecessors == 0) # print "Beginning: ", beginning # add begin node to activities that begin at initial node begin_node = nodes.next_node() for node_activity in beginning: nodes[node_activity][0] = begin_node # Step 2. Search endings activities (have no successors) [3.2] ending, = numpy.nonzero(sum_successors == 0) # print "Ending: ", ending # add end node to activities that end in final node # note: this step may be replaced by handling them in steps 3 and 4 as stI and stII end_node = nodes.next_node() for node_activity in ending: nodes[node_activity][1] = end_node # print nodes # Step 3. Search standard type I (Activity have unique successors) [3.3] act_one_predeccessor, = numpy.nonzero(sum_predecessors == 1) stI = collections.defaultdict(list) for i in act_one_predeccessor: pred = numpy.nonzero((matrix[:, i]))[0][0] if (sum_successors[pred] == 1 # this condition is redundant but faster than the following check or sum_successors[pred] == scipy.sum( matrix[:, numpy.nonzero(matrix[pred])])): stI[pred].append(i) # print "stI: ", stI #Add the same end node of activities to the begin node of its successors activities for node_activity in stI: stI_node = nodes.next_node() nodes[node_activity][1] = stI_node for successor in stI[node_activity]: nodes[successor][0] = stI_node # print nodes #Step 4. Search standard II(Full) and standard II(Incomplete) [3.4] # print "--- Step 4 ---" # dictionary with key: equal successors; value: mother activities stII = collections.defaultdict(list) for act in range(nodes.num_real_activities): stII[frozenset(matrix[act].nonzero()[0])].append(act) # remove ending activities and those included in type I del (stII[frozenset([])]) for pred, succs in stI.items(): del (stII[frozenset(succs)]) # assigns nodes to type II as indicated in figure 8 mark_complete = [] for succs, preds in stII.items(): u = len(preds) # if NP[succs] != u (complete) # print preds, '->', succs, if not [i for i in succs if sum_predecessors[i] != u]: # print 'complete' mark_complete.append(succs) node = nodes.next_node() for act in preds: nodes[act][1] = node for act in succs: nodes[act][0] = node else: # (incomplete) # print 'incomplete' node = nodes.next_node() for act in preds: nodes[act][1] = node # print nodes # Step 5. Search for matching successors [3.5] # print "--- Step 5 ---" # remove type II complete so that stII becomes MASC for succs in mark_complete: del stII[succs] masc = stII # print "MASC" # for succs, preds in stII.items(): # print preds, succs npc = scipy.zeros([nodes.num_real_activities], dtype=int) for succs, preds in masc.items(): num_preds = len(preds) for succ in succs: npc[succ] += num_preds # print npc # Step 6. Identifying start nodes on matching successors # print "--- Step 6 ---" act_no_initial = [ i for i in range(nodes.num_real_activities) if nodes[i][0] == None ] # print act_no_initial, "<- No initial node" num_no_initial = len(act_no_initial) mra = scipy.zeros([num_no_initial, num_no_initial], dtype=int) for succs, preds in masc.items(): num_preds = len(preds) for act_i, act_j in itertools.combinations(succs, 2): mra[act_no_initial.index(act_i), act_no_initial.index(act_j)] += num_preds mra[act_no_initial.index(act_j), act_no_initial. index(act_i)] += num_preds # Symmetry, any succ order # print 'MRA' # print mra # check matching successors and assign them initial nodes for i in range(num_no_initial): for j in range(i + 1, num_no_initial): if mra[i, j] == npc[act_no_initial[i]] and mra[i, j] == npc[ act_no_initial[j]]: # print 'coincidencia', i, j, "(", act_no_initial[i], act_no_initial[j], ")" if nodes[act_no_initial[i]][0] != None: node = nodes[act_no_initial[i]][0] else: node = nodes.next_node() nodes[act_no_initial[i]][0] = node nodes[act_no_initial[j]][0] = node # assign initial node to the remaining activities (they must be alone, interpreted, not clear on paper) for node in nodes: if node[0] == None: node[0] = nodes.next_node() # print nodes # Step 7. String search # create MNS (to avoid counting matching successors twice) mns = {} unconnected = set() # all nodes in MNS for succs, preds in masc.items(): succ_nodes = set([nodes[succ][0] for succ in succs]) mns[nodes[preds[0]][ 1]] = succ_nodes # as all preds have same successors they will be usign just one node unconnected.update(succ_nodes) # print 'MNS' # for pred, succs in mns.items(): # print pred, '-', succs # create MRN unconnected = list(unconnected) num_unconnected = len(unconnected) # print unconnected, '<-Unconnected' appear = scipy.zeros([num_unconnected], dtype=int) mrn = scipy.zeros([num_unconnected, num_unconnected], dtype=int) for pred, u_nodes in mns.items(): for node in u_nodes: appear[unconnected.index(node)] += 1 for node_a, node_b in itertools.combinations(u_nodes, 2): mrn[unconnected.index(node_a), unconnected.index(node_b)] += 1 mrn[unconnected.index(node_b), unconnected.index(node_a)] += 1 # print 'MRN' # print mrn # print 'Appear' # print appear # create MC mc = [] for i in range(num_unconnected): mc.append( [j for j in range(num_unconnected) if mrn[i, j] == appear[i]]) # print 'MC' # for i in range(num_unconnected): # print i, '-', mc[i] # use strings to connect nodes for i in range(num_unconnected): following_nodes = sorted(mc[i], key=lambda x: len(mc[x])) while following_nodes: # print following_nodes follower = following_nodes.pop() # print 'extracted:', follower # Create dummy i -> follower (unconnected to real) nodes.append_dummy(unconnected[i], unconnected[follower]) for fol_follower in mc[follower]: # print 'remove:', fol_follower try: following_nodes.remove(fol_follower) except ValueError: pass # if it has already been connected, it will not be in list now # print nodes # Step 8. Final nodes and dummies # (note: contrary to what paper says, we have already set final nodes for all # activities in step 4 as indicated in figure 8. Nevertheless, these nodes are # unconnected so we replace them here if necessary. Not assigning nodes in step 4 # would break step 7) for succs, preds in masc.items(): # print "Studying:", preds, '->', succs if len(succs) == 1: # Case I # print "Case I" for pred in preds: nodes[pred][1] = nodes[next(iter(succs))][0] else: # Get follower with lower npc min_follower = None min_npc = None for succ in succs: if min_npc == None or min_npc > npc[succ]: min_follower = succ min_npc = npc[succ] # print min_follower, 'npc:', min_npc # Count the number of masc rows containing our successor activities count = 0 for others in masc: if succs.issubset(others): count += len(masc[others]) if count >= min_npc: # Case II (if min_npc==1) and Case III # print "Case II or III" for pred in preds: nodes[pred][1] = nodes[min_follower][0] else: for succ in succs: # note: if there are several predecessors, they have the same end node assigned in step 4 nodes.append_dummy(nodes[next(iter(preds))][1], nodes[succ][0]) # Step 9. Final nodes for type II incomplete # (note: final nodes have already been assigned in step 8. We think section 3.9 of paper is unnecessary) return nodes.to_pert_graph().renumerar()