def Dijkstra_p(G,start,end=None): D = {} # dictionary of final distances P = {} # dictionary of predecessors Q = priorityDictionary() # est.dist. of non-final vert. Q[start] = 0 comm = PSim(len(Q)+1) if comm.rank==0: i = 1 for v in Q: D[v] = Q[v] if v == end: break comm.send(i, v) i = i + 1 else: v = comm.recv(0) for w in G[v]: vwLength = D[v] + G[v][w] if w in D: if vwLength < D[w]: raise ValueError, "Dijkstra: found better path to already-final vertex" elif w not in Q or vwLength < Q[w]: Q[w] = vwLength P[w] = v return (D,P)
def Parallel(V, E, w, s): l = V.keys() """ Create a new process for each start vector. Note that there is an "extra" because the root process does NOT handle any work - it just scatters and gathers data. """ comm = PSim(len(l) + 1) # Scatter if comm.rank==0: # Loop through all of the processes for i in range(1, len(l)+1): comm.send(i, l[i-1]) else: curr = comm.recv(0) res = Dijkstra(V, curr) comm.send(0, res) # Gather if comm.rank==0: # Loop through all of the processes for i in range(1, len(l)+1): res = comm.recv(i) print "" print "Start: " + str(l[i-1]) print "Path: " + str(res)
def mergesort_test(n, p): comm = PSim(p) if comm.rank == 0: data = [random.random() for i in range(n)] comm.send(1, data[n/2:]) mergesort(data, 0, n/2) data[n/2:] = comm.recv(1) merge(data, 0, n/2, n) print(data) else: data = comm.recv(0) mergesort(data) comm.send(0, data)
def mergesort_test(n,p): import random from psim import PSim comm = PSim(p) if comm.rank==0: data = [random.random() for i in range(n)] comm.send(1, data[n/2:]) mergesort(data,0,n/2) data[n/2:] = comm.recv(1) merge(data,0,n/2,n) print data else: data = comm.recv(0) mergesort(data) comm.send(0,data)
def Dijkstra_pf(graph, start): vertices, links = graph P = [PrimVertex(i, links) for i in vertices] Q = [P[i] for i in vertices if not i == start] vertex = P[start] vertex.closest_dist = 0 comm = PSim(len(vertices) + 1) while Q: i = 1 for neighbor_id, length in vertex.neighbors: if comm.rank == 0: # comm.send(i, (P[neighbor_id], length)) comm.send(i, (neighbor_id, length)) else: # neighbor, length = comm.recv(0) nid, lth = comm.recv(0) neighbor = P[nid] dist = lth + vertex.closest_dist if neighbor in Q and dist < neighbor.closest_dist: neighbor.closest = vertex neighbor.closest_dist = dist comm.send(0, (nid, neighbor)) i = i + 1 if comm.rank == 0: l = [] # Gather the message from each iteration for i in range(1, i): nid, r = comm.recv(i) l.append(r) neighbor = P[nid] if r.closest_dist < neighbor.closest_dist: neighbor.closest = vertex neighbor.closest_dist = r.closest_dist heapify(Q) vertex = heappop(Q) # for v in P: print v.closest return [(v.id, v.closest.id, v.closest_dist) for v in P if not v.id == start] """if comm.rank==0:
def scalar_product_test1(n, p): comm = PSim(p) h = n / p if comm.rank == 0: a = [random.random() for i in range(n)] b = [random.random() for i in range(n)] for k in range(1, p): comm.send(k, a[k * h:k * h + h]) comm.send(k, b[k * h:k * h + h]) else: a = comm.recv(0) b = comm.recv(0) scalar = sum(a[i] * b[i] for i in range(h)) if comm.rank == 0: for k in range(1, p): scalar += comm.recv(k) print(scalar) else: comm.send(0, scalar)
def scalar_product_test1(n,p): import random from psim import PSim comm = PSim(p) h = n/p if comm.rank==0: a = [random.random() for i in range(n)] b = [random.random() for i in range(n)] for k in range(1,p): comm.send(k, a[k*h:k*h+h]) comm.send(k, b[k*h:k*h+h]) else: a = comm.recv(0) b = comm.recv(0) scalar = sum(a[i]*b[i] for i in range(h)) if comm.rank == 0: for k in range(1,p): scalar += comm.recv(k) print scalar else: comm.send(0,scalar)
else: index, revIndex, graph, distances = None, None, None, None source = None destination = None #Distribute a portion of the data to all nodes ds = range(d) for i in ds: shift = 2**(d - 1 - i) senders = range(0, 2**d, 2**(d - i)) receivers = [sender + shift for sender in senders] if comm.rank in senders: receiver = comm.rank + shift if comm.rank == 0: verts = index.keys() comm.send(receiver, verts[n / (2**(i + 1)):n / (2**i)]) comm.send(receiver, index) comm.send(receiver, graph) comm.send(receiver, distances) comm.send(receiver, source) comm.send(receiver, destination) elif comm.rank in receivers: sender = comm.rank - shift verts = comm.recv(sender) index = comm.recv(sender) graph = comm.recv(sender) distances = comm.recv(sender) source = comm.recv(sender) destination = comm.recv(sender) # Run the search and report back on the results
#Exit the loop when there are no other values to calculate Z with if len(newPosa) < numStrings and \ len(newPosc) < numStrings and \ len(newPost) < numStrings and \ len(newPosg) < numStrings: break else: #receive arrays newPos = comm.recv(0) if comm.rank == 1: B = Ba elif comm.rank == 2: B = Bc elif comm.rank == 3: B = Bt elif comm.rank == 4: B = Bg if len(newPos) < numStrings: Z = sys.maxint else: Z = CalcXYZ(newPos, B) comm.send(0, Z)
newPosa = GetNextIndexes(newRow, Ba) newPosc = GetNextIndexes(newRow, Bc) newPost = GetNextIndexes(newRow, Bt) newPosg = GetNextIndexes(newRow, Bg) #Exit the loop when there are no other values to calculate Z with if len(newPosa) < numStrings and \ len(newPosc) < numStrings and \ len(newPost) < numStrings and \ len(newPosg) < numStrings: break else: #receive arrays newPos = comm.recv(0) if comm.rank == 1: B = Ba elif comm.rank == 2: B = Bc elif comm.rank == 3: B = Bt elif comm.rank == 4: B = Bg if len(newPos) < numStrings: Z = sys.maxint else: Z = CalcXYZ(newPos, B) comm.send(0, Z)
else: comm = PSim(p) #create nodes, node 0 is the master #master node if comm.rank == 0: #the master node creates the initial graph the_graph = make_graph_tuple(n,e) #start the timer st = time.time() #break up edges and send to other worker nodes to #sort the edges by weight num_edges = e / p #node 0's edges to sort local_edges_to_sort = the_graph[0:num_edges] for i in range (1,p): comm.send(i, the_graph[num_edges*i:(num_edges*i)+num_edges]) #sort own local piece quicksort_tuple_graph(local_edges_to_sort) #now receive sorted pieces from worker nodes sorted_edges_to_merge = [] sorted_edges_to_merge.append(local_edges_to_sort) for j in range (1,p): sorted_edges_to_merge.append(comm.recv(j)) #merge sorted edges from workers into 1 sorted list sorted_edges = [] for k in range (0,e): smallest = MAX_WEIGHT+1 smallest_pos = -1 for m in range(0,p): if len(sorted_edges_to_merge[m]) > 0: #check for empty list
else: index,revIndex,graph,distances = None,None,None,None source = None destination = None #Distribute a portion of the data to all nodes ds = range(d) for i in ds: shift = 2**(d-1-i) senders = range(0,2**d,2**(d-i)) receivers = [sender+shift for sender in senders] if comm.rank in senders: receiver = comm.rank+shift if comm.rank == 0: verts = index.keys() comm.send(receiver,verts[n/(2**(i+1)):n/(2**i)]) comm.send(receiver,index) comm.send(receiver,graph) comm.send(receiver,distances) comm.send(receiver,source) comm.send(receiver,destination) elif comm.rank in receivers: sender = comm.rank-shift verts = comm.recv(sender) index = comm.recv(sender) graph = comm.recv(sender) distances = comm.recv(sender) source = comm.recv(sender) destination = comm.recv(sender) # Run the search and report back on the results
else: comm = PSim(p) #create nodes, node 0 is the master #master node if comm.rank == 0: #the master node creates the initial graph the_graph = make_graph_tuple(n, e) #start the timer st = time.time() #break up edges and send to other worker nodes to #sort the edges by weight num_edges = e / p #node 0's edges to sort local_edges_to_sort = the_graph[0:num_edges] for i in range(1, p): comm.send(i, the_graph[num_edges * i:(num_edges * i) + num_edges]) #sort own local piece quicksort_tuple_graph(local_edges_to_sort) #now receive sorted pieces from worker nodes sorted_edges_to_merge = [] sorted_edges_to_merge.append(local_edges_to_sort) for j in range(1, p): sorted_edges_to_merge.append(comm.recv(j)) #merge sorted edges from workers into 1 sorted list sorted_edges = [] for k in range(0, e): smallest = MAX_WEIGHT + 1 smallest_pos = -1 for m in range(0, p): if len(sorted_edges_to_merge[m]) > 0: #check for empty list