def test_model_big(self): """ A very large graph that tests the CPU and memory efficiency of our cyclic checker. """ p = Node(name='project') times = pd.read_csv(os.path.join(BASE_DIR, 'fixtures/timings.dsv'), delimiter='|', dtype={'PROC_ID': str}) deps = pd.read_csv(os.path.join(BASE_DIR, 'fixtures/deps_big.dsv'), delimiter='|', dtype={'PARENT_ID': str, 'UPROC_ID': str}) # Define nodes with timings for utiming in times.itertuples(index=False): p.add(Node(name=utiming.PROC_ID, duration=utiming.DURATION)) # Add dependencies to the model for dep in deps.itertuples(index=False): while True: try: p.link(dep.PARENT_ID, dep.UPROC_ID) break except KeyError as missingnode: print('Missing node when adding dependency = ') print(missingnode) print(dep) for errordep in dep: p.add(Node(errordep, duration=0)) p.update_all() critical_path = p.get_critical_path() print(critical_path)
def calc_critical_path(self, project): task_nodes = {} project_node = Node('project') tasks = self.env['project.task'].search([('project_id', '=', project.id)]) for task in tasks: Project.node_link(project_node, task_nodes, task) project_node.update_all() return [n.name for n in project_node.get_critical_path()]
def create_network(self, actions): for action in actions: self.nodes.append(self.p.add(Node(action.name, action.duration))) for action in actions: for pre in action.predecessors: self.p.link(pre, action.name) self.p.update_all()
def get_criticalpath_project(self, act_ids): p = Node('project') node = {} #Node register for act in act_ids: node[act.id] = p.add(Node(act.name, duration = act.duration)) #Link Nodes for act in act_ids: for trans in act.to_ids: p.link(node[trans.process_from_id.id], node[trans.process_to_id.id]) p.update_all() return p, node
def getPath(self): project = get_object_or_404(Project, pk=self.pk) p = Node(project.name) task = Task.objects.all() for t in task: ptask = p.add(Node(t.name, duration=t.duration)) if t.predecessors: for pred in t.predecessors.all(): p.link(ptask.name,pred.name) p.update_all() return p.get_critical_path()
def test_nodes(self): # Confirm nodes work with set operations. nodes = set() n1 = Node(name=1, duration=1) n2 = Node(name=2, duration=1) n2a = Node(name=2, duration=1) self.assertEqual(n2, n2a) nodes.add(n1) nodes.add(n2) nodes.add(n2a) self.assertEqual(len(nodes), 2) parent = Node('parent') self.assertEqual(len(parent.nodes), 0) self.assertTrue(n1 not in parent.nodes) parent.add(n1) self.assertEqual(len(parent.nodes), 1) self.assertTrue(n1 in parent.nodes) self.assertTrue(n2 not in parent.nodes) parent.add(n2) self.assertEqual(len(parent.nodes), 2) self.assertTrue(n1 in parent.nodes) self.assertTrue(n2 in parent.nodes)
def CriticalPath(request,pk): context = {} project = get_object_or_404(Project, pk=pk) p = Node(project.name) task = Task.objects.all() for t in task: ptask = p.add(Node(t.name, duration=t.duration)) if t.predecessors: for pred in t.predecessors.all(): p.link(ptask.name,pred.name) p.update_all() context['criticalpath'] = p.get_critical_path() context['duration'] = p.duration context['name'] = p template = 'manage/criticalpath.html' return render_to_response(template, context, context_instance=RequestContext(request) )
class Project: def __init__(self): self.p = Node('project') self.nodes = [] def create_network(self, actions): for action in actions: self.nodes.append(self.p.add(Node(action.name, action.duration))) for action in actions: for pre in action.predecessors: self.p.link(pre, action.name) self.p.update_all() def get_critical_path(self): return self.p.get_critical_path() def get_duration(self): return self.p.duration
def test_model_big(self): """ A very large graph that tests the CPU and memory efficiency of our cyclic checker. """ p = Node(name='project') times = pd.read_csv(os.path.join(BASE_DIR, 'fixtures/timings.dsv'), delimiter='|', dtype={'PROC_ID': str}) deps = pd.read_csv(os.path.join(BASE_DIR, 'fixtures/deps_big.dsv'), delimiter='|', dtype={ 'PARENT_ID': str, 'UPROC_ID': str }) # Define nodes with timings for utiming in times.itertuples(index=False): p.add(Node(name=utiming.PROC_ID, duration=utiming.DURATION)) # Add dependencies to the model for dep in deps.itertuples(index=False): while True: try: p.link(dep.PARENT_ID, dep.UPROC_ID) break except KeyError as missingnode: print('Missing node when adding dependency = ') print(missingnode) print(dep) for errordep in dep: p.add(Node(errordep, duration=0)) p.update_all() critical_path = p.get_critical_path() print(critical_path)
from criticalpath import Node p = Node("project") a = p.add(Node("A", duration=3)) c = p.add(Node("C", duration=2)) d = p.add(Node("D", duration=3)) e = p.add(Node("E", duration=1)) b = p.add(Node("B", duration=5)) h = p.add(Node("H", duration=3)) f = p.add(Node("F", duration=4)) g = p.add(Node("G", duration=1)) j = p.add(Node("J", duration=4)) i = p.add(Node("I", duration=5)) l = p.add(Node("L", duration=2)) k = p.add(Node("K", duration=5)) p.link(a, e).link(e, g).link(e, h).link(g, k) p.link(c, b).link(b, j).link(c, h).link(h, j).link(h, i) p.link(d, h).link(h, i).link(d, f).link(f, i).link(f, l) # all tasks A-K must be completed within 10 weeks # you're allocated 4 workers per week # the most workers on a given task is 2 per week # the most workers on all tasks for a given week is 5 # putting an additional worker on a task (2 total) adds a $100 charge
vectors.drop(columns=['Id', 'Blocks', 'Parent', 'Type', 'Summary', 'Status'], inplace=True) vectors['BkdPf'] = vectors['BkdPf'].fillna("") vectors['BkdSP'] = vectors['BkdSP'].fillna(0) vectors['BkdSP'] = vectors['BkdSP'].astype(int) ## 2) For each feature determine critical path nodes, links, and duration proj_dict = {} # Holds the critical path module's projects (features) node_dict = {} # Holds each features nodes # First add all the nodes to each project, else key error FeatureIds = features['FeatureId'].unique().tolist() for FeatureId in FeatureIds: proj_dict[FeatureId] = Node(FeatureId) vector_data = vectors.loc[vectors['FeatureId'] == FeatureId] if len(vector_data) > 0: for vector in vector_data.itertuples(): if vector.BkrId not in node_dict: node_dict[vector.BkrId] = proj_dict[FeatureId].add( Node(vector.BkrId, duration=vector.BkrSP)) if vector.BkdId not in node_dict: node_dict[vector.BkdId] = proj_dict[FeatureId].add( Node(vector.BkdId, duration=vector.BkdSP)) # Then, add all the dependency links for FeatureId in FeatureIds: vector_data = vectors.loc[vectors['FeatureId'] == FeatureId] if len(vector_data) > 0: for vector in vector_data.itertuples():
def test_graph(n): """Return an acyclic graph containing 2**n simple paths.""" p = Node(name='graph') for i in range(n): from_id = 3 * i to_id1 = 3 * i + 1 to_id2 = 3 * i + 2 to_id3 = 3 * (i + 1) #i=0=>0,1,2,3 #i=1=>3,4,5,6 p.add(p.get_or_create_node(name=from_id, duration=1)) p.add(p.get_or_create_node(name=to_id1, duration=1)) p.add(p.get_or_create_node(name=to_id2, duration=1)) p.add(p.get_or_create_node(name=to_id3, duration=1)) p.link(from_id, to_id1) p.link(from_id, to_id2) p.link(to_id1, to_id3) p.link(to_id2, to_id3) return p
def handle(self, root_job_id, **options): root_job = Job.objects.get(id=int(root_job_id)) samples = int(options['samples']) # Add all system task nodes. system = Node('system') system.add( Node(root_job.id, duration=root_job.get_run_length_estimate(samples=samples))) print('%s takes about %s seconds' \ % (root_job, root_job.get_run_length_estimate(samples=samples))) chain = root_job.get_chained_jobs() for job in chain: print('%s takes about %s seconds' \ % (job, job.get_run_length_estimate(samples=samples))) node = Node(job.id, duration=job.get_run_length_estimate(samples=samples)) node.description = job.name system.add(node) # Add all links between task nodes. print('-' * 80) for job in chain: if not job.enabled: continue dependees = JobDependency.objects.filter(dependent=job, dependee__enabled=True) dependees = dependees.values_list('dependee_id', flat=True) print(job, dependees) for dependee in dependees: # Link dependent job to dependee. assert job.id != 1 system.link(from_node=dependee, to_node=job.id) root_node = system.lookup_node(1) print('root_node:', root_node, root_node.to_nodes, root_node.incoming_nodes) system.add_exit() sys.stdout.flush() #return print('Updating values...') system.update_all() critical_path = system.get_critical_path() print('critical_path:', critical_path) system.print_times() print('min hours:', system.duration * (1 / 60.) * (1 / 60.))
def handle(self, root_job_id, **options): root_job = Job.objects.get(id=int(root_job_id)) samples = int(options['samples']) # Add all system task nodes. system = Node('system') system.add(Node(root_job.id, duration=root_job.get_run_length_estimate(samples=samples))) print('%s takes about %s seconds' \ % (root_job, root_job.get_run_length_estimate(samples=samples))) chain = root_job.get_chained_jobs() for job in chain: print('%s takes about %s seconds' \ % (job, job.get_run_length_estimate(samples=samples))) node = Node(job.id, duration=job.get_run_length_estimate(samples=samples)) node.description = job.name system.add(node) # Add all links between task nodes. print('-'*80) for job in chain: if not job.enabled: continue dependees = JobDependency.objects.filter(dependent=job, dependee__enabled=True) dependees = dependees.values_list('dependee_id', flat=True) print(job, dependees) for dependee in dependees: # Link dependent job to dependee. assert job.id != 1 system.link(from_node=dependee, to_node=job.id) root_node = system.lookup_node(1) print('root_node:', root_node, root_node.to_nodes, root_node.incoming_nodes) system.add_exit() sys.stdout.flush() #return print('Updating values...') system.update_all() critical_path = system.get_critical_path() print('critical_path:', critical_path) system.print_times() print('min hours:', system.duration*(1/60.)*(1/60.))
def test_project(self): p = Node('project') a = p.add(Node('A', duration=3)) b = p.add(Node('B', duration=3, lag=0)) c = p.add(Node('C', duration=4, lag=0)) d = p.add(Node('D', duration=6, lag=0)) e = p.add(Node('E', duration=5, lag=0)) p.link(a, b) p.link(a, c) p.link(a, d) p.link(b, e) p.link(c, e) p.link(d, e) p.update_all() # for node in sorted(p.nodes, key=lambda n: n.name): # node.print_times() self.assertEqual(a.es, 0) self.assertEqual(a.ef, 3) self.assertEqual(a.ls, 0) self.assertEqual(a.lf, 3) self.assertEqual(b.es, 3) self.assertEqual(b.ef, 6) self.assertEqual(b.ls, 6) self.assertEqual(b.lf, 9) self.assertEqual(c.es, 3) self.assertEqual(c.ef, 7) self.assertEqual(c.ls, 5) self.assertEqual(c.lf, 9) self.assertEqual(d.es, 3) self.assertEqual(d.ef, 9) self.assertEqual(d.ls, 3) self.assertEqual(d.lf, 9) self.assertEqual(e.es, 9) self.assertEqual(e.ef, 14) self.assertEqual(e.ls, 9) self.assertEqual(e.lf, 14) critical_path = p.get_critical_path() #print critical_path self.assertEqual(critical_path, [a, d, e]) self.assertEqual(p.duration, 14) self.assertEqual(p.es, 0) self.assertEqual(p.ef, 14) self.assertEqual(p.ls, 0) self.assertEqual(p.lf, 14)
from criticalpath import Node if __name__ == '__main__': cantidad = int(input("Ingrese la cantidad de nodos ")) nodos = [] list = [] links = [] p = Node('project') for i in range(cantidad): #se capturan los nodos a = None b = str(i) duracion = int(input("Ingrese la duracion del nodo " + b + " :")) nodos.append(a) nodos[i] = p.add(Node(b, duration=duracion)) print("ingrese las relacion por pares") print("Se termina con un espacio en blanco") while True: list = [] valor1 = input() if valor1 == "": break valor2 = input() valor1 = int(valor1) valor2 = int(valor2) list.append(nodos[valor1]) list.append(nodos[valor2]) list = tuple(list) links.append(list) print()
from criticalpath import Node p = Node('project') a = p.add(Node('A', duration=14)) b = p.add(Node('B', duration=20)) c = p.add(Node('C', duration=3)) d = p.add(Node('D', duration=20)) e = p.add(Node('E', duration=8)) f = p.add(Node('F', duration=11)) g = p.add(Node('G', duration=13)) h = p.add(Node('H', duration=20)) links = [(a, d), (a, b), (b, c), (d, g), (c, g), (c, e), (d, e), (e, f), (g, h), (f, h)] for link in links: p.link(*link) p.update_all() print(p.get_critical_path()) print(p.duration)
def test_cycles(self): p = Node('project') a = p.add(Node('A', duration=3)) b = p.add(Node('B', duration=3, lag=0)) c = p.add(Node('C', duration=4, lag=0)) d = p.add(Node('D', duration=6, lag=0)) e = p.add(Node('E', duration=5, lag=0)) p.link(a, b) p.link(a, c) p.link(a, d) p.link(b, e) p.link(c, e) p.link(d, e) self.assertEqual(p.is_acyclic(), True) p = Node('project') a = p.add(Node('A', duration=3)) b = p.add(Node('B', duration=3, lag=0)) c = p.add(Node('C', duration=4, lag=0)) d = p.add(Node('D', duration=6, lag=0)) e = p.add(Node('E', duration=5, lag=0)) p.link(a, b) p.link(a, c) p.link(a, d) p.link(b, e) p.link(c, e) p.link(d, e) p.link(e, a) # links back! self.assertEqual(p.is_acyclic(), False)
def __init__(self): self.p = Node('project') self.nodes = []
def get_critical_path(activities, nodes, edges): p = Node('Activity') links = [] p.add(Node(activities[0].activity_name, duration=activities[0].duration)) for i in range(1, len(activities)): p.add( Node(activities[i].activity_name, duration=activities[i].duration, lag=0)) for act in activities: if act.no_of_predecessors != 0: if len(act.predecessors_string) > 1: for pred in act.predecessors: links.append((pred, act.activity_name)) else: links.append((act.predecessors_string, act.activity_name)) for lk in links: if lk[0] != ' ': p.link(lk[0], lk[1]) p.update_all() print("Critical Path = ", p.get_critical_path()) print("Critical Path Duration = ", p.duration) return p.get_critical_path(), p.duration
def create_function(combination,Res,BindAvail): combinations=copy.copy(combination) #**********new combination processing********* Qout={} j=1 #create the ordering graph executable=True pred2='' predecessors=[] valeurs = [] namep=str("projet") p = Node(namep) l={} while len(combination) > 0 and not BindAvail < Res: Exec=[] #determine the set of data services which can be executed at iteration i(i.e.,all its required inputs are available) for servic in combination: for ser in Service_lake: if ser.ide==servic: service=ser if set(service.inputs).issubset(BindAvail): Exec.append(service) if not Exec: executable=False else: for service in Exec: l[str(service.ide)]=p.add(Node(str(service.ide), duration=int(service.response_time),lag=0)) predd='"' pred2=pred2+service.name+' '+str(service.response_time)+' ' if len(predecessors)>1: for pr in predecessors: for service1 in Service_lake: if service1.name==pr: p.link(l[str(service.ide)],l[str(service1.ide)]) predd=predd+str(service1.ide)+',' pred2=pred2+str(service1.name)+' ' Index=len(predd)-1 Name_list = list(predd) Name_list[Index] = '"' predd = "".join(Name_list) elif len(predecessors)==1: for service1 in Service_lake: if service1==predecessors[0]: p.link(l[str(service.ide)],l[str(service1.ide)]) predd=str(service1.ide) pred2=pred2+str(service1.name)+' ' if predd=='"': predd='' valeurs.append([str(service.ide),service.name,str(service.response_time),predd,str(0)]) pred2=pred2+str("\n ") for out in service.outputs: BindAvail.add(out) #check here the content of BindAvail combination.remove(service.ide) predecessors=[] for service in Exec: predecessors.append(service) if executable: #!!! identify the critical path for a plan P, elapsed time and critical services !!! p.update_all() s=p.get_critical_path() t=p.duration P=Plan(combinations,pred2,s,t) Plans.append(P) ii=P.ide file=str("Plans/"+"ExecutableOrdering"+str(ii)+".csv") f = open(file, 'w') ligneEntete = ",".join(entetes) + "\n" f.write(ligneEntete) n=0 for valeur in valeurs: ligne = ",".join(valeur) + "\n" f.write(ligne) n+=1 f.close() df = pd.read_csv("Plans/"+"ExecutableOrdering"+str(ii)+".csv") for i in range(0,n): for servic in P.incl_services: if df.at[i,"id"]==servic: for ser in Service_lake: if ser.ide==servic: service=ser p=calls_number(service,str("Plans/"+"ExecutableOrdering"+str(ii)+".csv")) df.at[i, "critical"]= 1 df.at[i,"calls_number"]=p df.to_csv("Plans/"+"ExecutableOrdering"+str(ii)+".csv", index=False) P.Ordering_file=file else: print("this plan is not executable") return P,executable
def node_get_or_add(project_node, task_nodes, task): if task.id not in task_nodes.keys(): node = project_node.add( Node(task.id, duration=task.critical_path_duration)) task_nodes.update({task.id: node}) return task_nodes[task.id]