def main(): Adj_List = DFS.Construct_data_array(connections_data) Cordinate_List = DFS.Construct_data_array(location_data) sorted_adj_list = DFS.Sort_Adjency_List(Adj_List) """ print("Adjency List:") for item in Adj_List: print(item) print("Sorted Adjency List") for item in sorted_adj_list: print(item) """ """ print("TESTS") print(DFS.Get_Connections("A1",sorted_adj_list)) print(DFS.Get_Locations("A1",Cordinate_List)) print(DFS.Get_Index("A1", sorted_adj_list)) print(DFS.distance_calc("A1", "A2",Cordinate_List)) """ """ print("Cordinate List:") for item in Cordinate_List: print(item) """ path = DFS.DFS("C1", "B1", sorted_adj_list, DFS.Get_Connections) if path == False: print("Path not found") else: print("\n") DFS.PrintPathStack(path, Cordinate_List)
def test(matrix): V = [Graph.Node(i) for i in range(0, len(matrix))] E = [[] for i in range(0, len(matrix))] G = Graph.Graph(V, E) Et = [[] for i in range(0, len(matrix))] Vt = [Graph.Node(i) for i in range(0, len(matrix))] Gt = Graph.Graph(Vt, Et) for i in range(0, len(matrix)): for j in range(0, len(matrix)): if matrix[i][j] is 1: E[i].append(Graph.Arch(i, j)) V[i].addAdj(V[j]) Et[j].append(Graph.Arch(j, i)) Vt[j].addAdj(Vt[i]) DFS.DFS(G) sccList = [[] for i in range(0, len(Gt.V))] DFS.SCC(Gt, sccList) count = 0 length = [] for i in range(0, len(sccList)): if (len(sccList[i]) is not 0): count += 1 length.append(len(sccList[i])) #print "SCC["+str(i)+"] : " + str(sccList[i]) print "Ci sono in totale " + str(count) + " componenti fortemente connesse" print "Con il seguente numero di vertici al loro interno: ", length
def tryAC3(): # create a dictionary of ConstraintVars keyed by names in VarNames. startTimer = time() variables = dict() constraints = [] queue = Queue() #instance of a FIFO Queue puzzles = testRead.readSudoku() puzzle = puzzles[2] puzzleSizeFile = puzzle.size setUpKenKen(puzzleSizeFile, variables, constraints) #set All Diff constraints for unary in puzzle.unaryDomains: nodeConsistent( UnaryConstraint(variables[str(unary[0])], lambda x: x == int(unary[1]))) print("INITIAL DOMAINS") handlers.printDomains(variables, puzzleSizeFile) numDomainsBefore = handlers.countDomains(variables) print("NUM OF DOMAINS BEFORE AC3: " + str(numDomainsBefore)) # Arrange the queue with all constraints and run Revise handlers.PrepareQueue(queue, constraints, variables) handlers.RunQueue(queue) if handlers.checkCompleteDomain(variables): endTimer = time() timeRunning = endTimer - startTimer print("Solution using AC-3 Only!") handlers.printDomains(variables, puzzleSizeFile) print("\033[91mTime running AC-3: " + str(round(timeRunning, 2)) + 's \033[0m') else: endTimer = time() print("-------------------------------------------------") print("IT WAS NOT POSSIBLE TO SOLVE USING THE AC-3 ONLY.") handlers.printDomains(variables, puzzleSizeFile) numDomainsAfter = handlers.countDomains(variables) gain = (1 - (numDomainsAfter / numDomainsBefore)) * 100 timeRunning = endTimer - startTimer print("NUM OF DOMAINS AFTER AC3: " + str(numDomainsAfter)) print("REDUCTION OF: " + str(round(gain, 2)) + "%") print("\033[91mTime running AC-3: " + str(round(timeRunning, 2)) + 's \033[0m') csp = CSP(puzzleSizeFile, variables, constraints) #Here you can choose which search technique you want to use searchTechnique = "BFS" if (searchTechnique == "BFS"): print("CALLING BFS..." + '\n') BFS.AC3_BFS(BFS.Problem(csp, timeRunning)) elif (searchTechnique == "DFS"): print("CALLING DFS..." + '\n') DFS.AC3_DFS(DFS.Problem(csp, timeRunning))
def Kosaraju(mang_vertex,matran): duyet(mang_vertex,matran) for i in range(len(mang_vertex)): print "dinh %d: %d %d %d" %(mang_vertex[i].index+1,mang_vertex[i].start_time,mang_vertex[i].end_time,mang_vertex[i].group) DFS.reverse_matrix(matran) mang_vertex.sort(key=lambda ver:ver.end_time,reverse=True) for i in range(len(mang_vertex)): mang_vertex[i].reset() duyet(mang_vertex,matran) mang_vertex.sort(key=lambda ver:ver.index) for i in range(len(mang_vertex)): print "dinh %d: %d %d %d" %(mang_vertex[i].index+1,mang_vertex[i].start_time,mang_vertex[i].end_time,mang_vertex[i].group)
def test_do_not_find_unconnected_element(self): """ Does an unconnected node remain unexplored? """ # Adjacency List of graph G G = {} G[0] = [1, 2] G[1] = [3] G[2] = [4] G[3] = [4, 5] G[4] = [5] G[5] = [] G[6] = [7] G[7] = [] # Start node s = 0 currentLabel = len(G.keys()) recursiveExp = dict.fromkeys(G.keys(), 0) nodeOrder = dict.fromkeys(G.keys()) exploredList, nodeOrder, currentLabel = DFS.DFSRecursive2( G, s, recursiveExp, nodeOrder, currentLabel) expExploredList = { 0: 1, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1, 6: 0, 7: 0 } # i.e. 6 & 7 # unexplored nodeOrderExp = {0: 3, 1: 5, 2: 4, 3: 6, 4: 7, 5: 8, 6: None, 7: None} self.assertEqual(expExploredList, exploredList) self.assertEqual(nodeOrderExp, nodeOrder)
def test_find_connected_elements(self): """ Is a connected node explored? """ # Adjacency List of graph G G = {} G[0] = [1, 2] G[1] = [3] G[2] = [4] G[3] = [4, 5] G[4] = [5] G[5] = [] # Start node s = 0 currentLabel = len(G.keys()) recursiveExp = dict.fromkeys(G.keys(), 0) nodeOrder = dict.fromkeys(G.keys()) exploredList, nodeOrder, currentLabel = DFS.DFSRecursive2( G, s, recursiveExp, nodeOrder, currentLabel) expExploredList = { 0: 1, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1 } # i.e. all explored nodeOrderExp = {0: 1, 1: 3, 2: 2, 3: 4, 4: 5, 5: 6} self.assertEqual(expExploredList, exploredList) self.assertEqual(nodeOrderExp, nodeOrder)
def reachable(self,v,L): """Yield pairs (w,label) on nonrepetitive paths from v,L.""" if v not in self or L not in self[v]: return for w,LL,bit in DFS.preorder(self.nrg,(v,L,False)): if bit and LL in self[w]: yield w,LL
def test_do_not_find_unconnected_element(self): """ Does an unconnected node remain unexplored? """ # Adjacency List of graph G G = {} G[0] = [1, 2] G[1] = [0, 3] G[2] = [0, 3, 4] G[3] = [1, 2, 4, 5] G[4] = [2, 3, 5] G[5] = [4, 5] G[6] = [7] G[7] = [6] # Start node s = 0 exploredList = DFS.DFS(G, s) expExploredList = { 0: 1, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1, 6: 0, 7: 0 } # i.e. 6 & 7 # unexplored self.assertEqual(expExploredList, exploredList)
def test_find_connected_elements(self): """ Is a connected node explored? """ # Adjacency List of graph G G = {} G[0] = [1, 2] G[1] = [0, 3] G[2] = [0, 3, 4] G[3] = [1, 2, 4, 5] G[4] = [2, 3, 5] G[5] = [4, 5] # Start node s = 0 recursiveExp = dict.fromkeys(G.keys(), 0) exploredList = DFS.DFSRecursive(G, s, recursiveExp) expExploredList = { 0: 1, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1 } # i.e. all explored self.assertEqual(expExploredList, exploredList)
def FirstPlot(): algo_name = "DFS search" input_data = [] exec_time = [] for n in range(100, 1100, 100): graphList = [] graph = DFS.Graph(g=graphList) vertexList = [] DFS.createGraph(graph, vertexList, n) vertexNum = random.randint(0, n - 1) start_time = time.clock() DFS.DFS(graph) end_time = time.clock() exec_time.append((end_time - start_time) * 1000) input_data.append(n) CreatePlot(input_data, exec_time, algo_name)
def Kosaraju(mang_vertex, matran): duyet(mang_vertex, matran) for i in range(len(mang_vertex)): print "dinh %d: %d %d %d" % ( mang_vertex[i].index + 1, mang_vertex[i].start_time, mang_vertex[i].end_time, mang_vertex[i].group) DFS.reverse_matrix(matran) mang_vertex.sort(key=lambda ver: ver.end_time, reverse=True) for i in range(len(mang_vertex)): mang_vertex[i].reset() duyet(mang_vertex, matran) mang_vertex.sort(key=lambda ver: ver.index) for i in range(len(mang_vertex)): print "dinh %d: %d %d %d" % ( mang_vertex[i].index + 1, mang_vertex[i].start_time, mang_vertex[i].end_time, mang_vertex[i].group)
def find_game_mode(self, game_mode): if game_mode == DFS_MODE: return DFS.DFS() elif game_mode == A_STAR_MODE: return A_STAR.A_STAR() elif game_mode == BFS_MODE: return BFS.BFS() raise Exception( "Attention: Only DFS and A-STAR modes are available currently.")
def run_DFS(): N = 5 vertices = [(0, 1), (0, 4), (1, 2), (1, 3), (1, 4), (2, 3), (3, 4)] G = gr.Graph(N, vertices) G.create_adj_dict() start = 1 adj_dict = G.get_adj_dict() dfs = dfs_search.DFS(adj_dict) path = dfs(start) return adj_dict, path
def getAlgo(type, Matrix, startX, startY, n, grid, size): if type == "DFS": print("DFS") result = DFS(Matrix, startX, startY, n - 1, n - 1, grid, n, size) return result if type == "BFS": return BFS(Matrix, startX, startY, n - 1, n - 1, grid, n, size) if type == "AStar": return AStar(Matrix, startX, startY, n - 1, n - 1, grid, n, size, "Manhattan")
def aplica_profundidade(): if sys.version_info[0] < 3: pathDoArquivo = tk.Open().show() else: pathDoArquivo = filedialog.askopenfilename() G = nx.read_gexf(pathDoArquivo) G = dfs.DFS(G, G.nodes()[0]) pathDoArquivo = pathDoArquivo.replace(".gexf", "_DFS.gexf") nx.write_gexf(G, pathDoArquivo) nx.draw(G) plt.show()
def runSearchAlgorithm(self, alg): if(self.startNode == None or self.endNode == None): return if alg.get() == "DFS": dfs = DFS(self.g,self.startNode, self.endNode, GUI = self) dfs.run() elif alg.get() == "BFS": bfs = BFS(self.g,self.startNode, self.endNode, GUI = self) bfs.run() elif alg.get() == "A*": a_star = A_Star(self.g, self.startNode, self.endNode, GUI = self) a_star.run() elif alg.get() == "WA*": w = simpledialog.askinteger("Input", "Choose a weight for WA*", parent=self.win, minvalue=0, maxvalue=10000) wa_star = WA_Star(self.g, self.startNode, self.endNode, weight = w, GUI = self) wa_star.run() else: dijkstra = Dijkstra(self.g, self.startNode, self.endNode, GUI=self) dijkstra.run()
def Kosaraju(mang_vertex, matran): duyet(mang_vertex, matran) print "Sau buoc 1:" for i in range(len(mang_vertex)): print "dinh %d: %d %d %d" % (i + 1, mang_vertex[i].start_time, mang_vertex[i].end_time, mang_vertex[i].group) #buoc 2 DFS.reverse_matrix(matran) #buoc 3 mang_vertex.sort(key=lambda ver: ver.end_time, reverse=True) # sap xep theo thu tu giam dan end_time for i in range(len(mang_vertex)): mang_vertex[i].reset() duyet(mang_vertex, matran) mang_vertex.sort(key=lambda ver: ver.index) print "Sau buoc 3:" for i in range(len(mang_vertex)): print "dinh %d: %d %d %d" % (i + 1, mang_vertex[i].start_time, mang_vertex[i].end_time, mang_vertex[i].group)
def TwoColor(G): """ Find a bipartition of G, if one exists. Raises NonBipartite or returns dict mapping vertices to two colors (True and False). """ color = {} for v,w,edgetype in DFS.search(G): if edgetype is DFS.forward: color[w] = not color.get(v,False) elif edgetype is DFS.nontree and color[v] == color[w]: raise NonBipartite return color
def duyet(mang_vertex,matran): cur_time = 1 not_end=True group=1 while not_end: not_end=False for i in range(len(mang_vertex)): if mang_vertex[i].start_time == 0: not_end = True mang_vertex[i].group=group cur_time=DFS.run(mang_vertex,matran,i,cur_time) cur_time+=1 group+=1 break
def duyet(mang_vertex, matran): cur_time = 1 not_end = True group = 1 while not_end: not_end = False for i in range(len(mang_vertex)): if mang_vertex[i].start_time == 0: not_end = True mang_vertex[i].group = group cur_time = DFS.run(mang_vertex, matran, i, cur_time) cur_time += 1 group += 1 break
def __init__(self, bn, num_cases): self.bn = bn # Bayesian Network self.num_cases = num_cases # number of test self.dataset = np.zeros((self.num_cases, self.bn.n)) # DFS orders the nodes from the root to the leaves self.ordered_array = dfs.order(self.bn.dag, self.bn.nodes) for i in range(self.num_cases): # for each test (row) for j in range(self.bn.n): # for each node (column) v = float(random.random()) p = float(self.get_prob(self.ordered_array[j].value, i)) if v <= p: self.dataset[i][self.ordered_array[j].value] = 1
def main(): a = SemFeature() user_counter = 0 for user in a.docs: user_counter += 1 print "\rUser_%i" % user_counter, user, "creating concept pair..." tagged_corpus = a.posTagging(a.docs[user]) target_corpus = a.parseTag2Rel(tagged_corpus) pair = a.createConceptPair(a.docs[user], target_corpus) print "Loading graph..." time_0 = time.time() G = nx.read_gpickle('cn.pkl') # create_graph() print "Complete in %fs" % (time.time() - time_0) dfs = DFS(G) for ps in pair: #t0 = time.time() for p in ps: t0 = time.time() print "find (%s %s)" % (p[0], p[1]) dfs.find(p[0], p[1]) print "Search time: %fs" % (time.time() - t0) raw_input()
def HypercubeEmbedding(M): """Map medium states isometrically onto a hypercube.""" dim = 0 tokmap = {} for t in M.tokens(): if t not in tokmap: tokmap[t] = tokmap[M.reverse(t)] = 1<<dim dim += 1 embed = {} G = StateTransitionGraph(M) for prev,current,edgetype in DFS.search(G): if edgetype == DFS.forward: if prev == current: embed[current] = 0 else: embed[current] = embed[prev] ^ tokmap[G[prev][current]] return embed
def eden(i): bfs = BFS.main("./labyrinths/" + i) print("---") ids = IDS.main("./labyrinths/" + i) print("---") dfs = DFS.main("./labyrinths/" + i) print("---") tss = TSS.main("./labyrinths/" + i) print("---") ast = AStar.main("./labyrinths/" + i) print("---") dijkstra = Dijkstra.main("./labyrinths/" + i) print("---") greedy = Greedy.main("./labyrinths/" + i) print("---") greedyHeuristics = GreedyHeuristics.main("./labyrinths/" + i) print("###########################################################################")
def plot_extensions(dataset_path, num_extensions): allpaths = DFS.DFS(dataset_path) p = Path(os.getcwd()).parent dataset_name = path_utilities.get_last_dir_from_path(dataset_path) write_path = os.path.join(p, "outputs/", dataset_name + "--output/") if not os.path.isdir(write_path): os.mkdir(write_path) # a list of all the file names (without the paths) filenames = [] for path in allpaths: filenames.append(path_utilities.get_fname_from_path(path)) filenames_no_ext, exts = remove_all_extensions(filenames) plot_extension_pie(exts, num_extensions, write_path, dataset_path) '''
def shuffle(dataset_path): if (confirm(prompt="Warning, this will scramble the directory " + "structure of all files and folders in " + dataset_path + ". Are you sure you want to do this? ")): print("Ok.") else: exit() if (confirm(prompt="Really sure, though?")): print("Ok.") else: exit() if (confirm(prompt="Super duper sure???")): print("Ok.") else: exit() # get a list of the paths to every file in the dataset # rooted at "dataset_path" filepaths = DFS.DFS(dataset_path) num_files = len(filepaths) # list of the parent directories of every file in # "filepaths". directory_list = [] # for each file for filepath in filepaths: # get its parent directory directory = remove_path_end(filepath) # and add it to our list of parent directories directory_list.append(directory) # generate a permutation of the number of files perm = np.random.permutation(num_files) # for each index for i in range(num_files): # get the image of the index under our permutation permuted_index = perm[i] # get the file we're moving next_file = filepaths[i] # get the randomly chosen destination directory dest_dir = directory_list[permuted_index] # move the file print(next_file) os.system("mv " + next_file + " " + dest_dir)
def dijkstra(G, start): Q = myDeque.myDeque() weights = {} L = [] Q.push(start) weights[start] = 0 for i in range(0, len(G)): if i == start: continue weights[i] = 1000000 #имитация бесконечности while Q.length() != 0: i = Q.topL() if not DFS.in_list(L, i): for j in range(0, len(G)): if G[i][j] > 0: Q.push(j) put_weight(Q, G, weights, i, j) L.append(Q.topL()) Q.pop() return weights
def main(): data = pd.read_csv("/Users/babak_khorrami/Downloads/soc-Epinions1.txt", header=0, sep="\t") dt = np.array(data) nodes = set(dt[:, 0:2].ravel()) g = Graph() for n in nodes: g.add_node(n) for i in range(dt.shape[0]): g.add_edge(dt[i, 0], dt[i, 1], 1) print(g.get_node_count()) print("------ Graph Created -------") dfs_small = DFS(g) dfs_small.dfs(0) dfs_small.print_dfs()
def test_DFS(self): G = Graph() G.add_vertex('u', 'v') G.add_vertex('u', 'x') G.add_vertex('x', 'v') G.add_vertex('v', 'y') G.add_vertex('y', 'x') G.add_vertex('w', 'y') G.add_vertex('w', 'z') G.add_vertex('z', 'z') DFS.recursive_dfs(G) G.debug() G.clean() DFS.iterative_dfs(G, 'u') DFS.iterative_dfs(G, 'w') G.debug()
def aplica_todos(): if sys.version_info[0] < 3: pathDoArquivo = tk.Open().show() else: pathDoArquivo = filedialog.askopenfilename() G = nx.read_gexf(pathDoArquivo) M = krl.Kruskal(G) pathDoArquivoNovo = pathDoArquivo.replace(".gexf", "_MST_Kruskal.gexf") nx.write_gexf(M, pathDoArquivoNovo) M = pr.Prim(G, G.nodes()[0]) pathDoArquivoNovo = pathDoArquivo.replace(".gexf", "_MST_Prim.gexf") nx.write_gexf(G, pathDoArquivoNovo) M = bfs.BFS(G, G.nodes()[0]) pathDoArquivoNovo = pathDoArquivo.replace(".gexf", "_BFS.gexf") nx.write_gexf(M, pathDoArquivoNovo) M = dfs.DFS(G, G.nodes()[0]) pathDoArquivoNovo = pathDoArquivo.replace(".gexf", "_DFS.gexf") nx.write_gexf(M, pathDoArquivoNovo) M = dks.Dijkstra(G, G.nodes()[0]) pathDoArquivoNovo = pathDoArquivo.replace(".gexf", "_Dijkstra.gexf") nx.write_gexf(M, pathDoArquivoNovo) M = wp.WelshPowell(G, G.nodes()[0]) pathDoArquivoNovo = pathDoArquivo.replace(".gexf", "_WelshPowell.gexf") nx.write_gexf(M, pathDoArquivoNovo)
import DFS from Node import Node node1 = Node("A") node2 = Node("B") node3 = Node("C") node4 = Node("D") node5 = Node("E") node6 = Node("F") node1.neighbors.append(node2) node1.neighbors.append(node3) node3.neighbors.append(node6) node2.neighbors.append(node4) node4.neighbors.append(node5) DFS.dfs(node1)
from DFS import * # Defining vertexes VERTEX1 = Vertex(1) VERTEX2 = Vertex(2) VERTEX3 = Vertex(3) VERTEX4 = Vertex(4) VERTEX5 = Vertex(5) # Defining Neighbours for those vertexes VERTEX1.neighbour_list.append(VERTEX2) VERTEX1.neighbour_list.append(VERTEX3) VERTEX3.neighbour_list.append(VERTEX4) VERTEX4.neighbour_list.append(VERTEX5) vertex_list = list() vertex_list.append(VERTEX1) vertex_list.append(VERTEX2) vertex_list.append(VERTEX3) vertex_list.append(VERTEX4) vertex_list.append(VERTEX5) dfs = DFS() dfs.dfs(vertex_list)
cur_time = 1 not_end=True group=1 while not_end: not_end=False for i in range(len(mang_vertex)): if mang_vertex[i].start_time == 0: not_end = True mang_vertex[i].group=group cur_time=DFS.run(mang_vertex,matran,i,cur_time) cur_time+=1 group+=1 break def Kosaraju(mang_vertex,matran): duyet(mang_vertex,matran) for i in range(len(mang_vertex)): print "dinh %d: %d %d %d" %(mang_vertex[i].index+1,mang_vertex[i].start_time,mang_vertex[i].end_time,mang_vertex[i].group) DFS.reverse_matrix(matran) mang_vertex.sort(key=lambda ver:ver.end_time,reverse=True) for i in range(len(mang_vertex)): mang_vertex[i].reset() duyet(mang_vertex,matran) mang_vertex.sort(key=lambda ver:ver.index) for i in range(len(mang_vertex)): print "dinh %d: %d %d %d" %(mang_vertex[i].index+1,mang_vertex[i].start_time,mang_vertex[i].end_time,mang_vertex[i].group) matran=DFS.init("F:\\GitHub\python_pttkgt\\graph\\input_kosaraju.txt") mang_vertex = [DFS.Vertex(index=i) for i in range(matran.height)] Kosaraju(mang_vertex,matran)
def RoutingTable(M): """ Return a dictionary mapping pairs (state1,state2) to tokens, such that the action of the token takes state1 closer to state2. By following successive tokens from this table, we can find a path in the medium that uses each token at most once and involves no token-reverse pairs. We use the O(n^2) time algorithm from arxiv:cs.DS/0206033. This is also a key step of the partial cube recognition algorithm from arxiv:0705.1025 -- as part of that algorithm, if we recognize that the input is not a medium, we raise MediumError. """ G = StateTransitionGraph(M) current = initialState = next(iter(M)) # find list of tokens that lead to the initial state activeTokens = set() for LG in BFS.BreadthFirstLevels(G,initialState): for v in LG: for w in LG[v]: activeTokens.add(G[w][v]) for t in activeTokens: if M.reverse(t) in activeTokens: raise MediumError("shortest path to initial state is not concise") activeTokens = list(activeTokens) inactivated = object() # flag object to mark inactive tokens # rest of data structure: point from states to list and list to states activeForState = {S:-1 for S in M} statesForPos = [[] for i in activeTokens] def scan(S): """Find the next token that is effective for s.""" i = activeForState[S] while True: i += 1 if i >= len(activeTokens): raise MediumError("no active token from %s to %s" %(S,current)) if activeTokens[i] != inactivated and M(S,activeTokens[i]) != S: activeForState[S] = i statesForPos[i].append(S) return # set initial active states for S in M: if S != current: scan(S) # traverse the graph, maintaining active tokens visited = set() routes = {} for prev,current,edgetype in DFS.search(G,initialState): if prev != current and edgetype != DFS.nontree: if edgetype == DFS.reverse: prev,current = current,prev # add token to end of list, point to it from old state activeTokens.append(G[prev][current]) activeForState[prev] = len(activeTokens) - 1 statesForPos.append([prev]) # inactivate reverse token, find new token for its states activeTokens[activeForState[current]] = inactivated for S in statesForPos[activeForState[current]]: if S != current: scan(S) # remember routing table as part of returned results if current not in visited: for S in M: if S != current: routes[S,current] = activeTokens[activeForState[S]] return routes
__author__ = 'edwingsantos' from DepthFirstSearch.Node import Node import DFS node1 = Node("A") node2 = Node("B") node3 = Node("C") node4 = Node("D") node5 = Node("F") node6 = Node("G") node1.adjacentList.append(node2) node1.adjacentList.append(node3) node2.adjacentList.append(node4) node4.adjacentList.append(node5) node6.adjacentList.append(node1) DFS.dfs(node1)
#!/usr/bin/python from Node import Node; import DFS; node1 = Node("A"); node2 = Node("B"); node3 = Node("C"); node4 = Node("D"); node5 = Node("E"); node1.adjacenciesList.append(node2); node1.adjacenciesList.append(node3); node2.adjacenciesList.append(node4); node4.adjacenciesList.append(node5); DFS.dfs(node1);
def main(): n = DFS.dfs(DFS.G, 'Result/img') if report(n, 'Result/slideshow.html'): print "Report succesfully genarated"