def shortest_path(source, target): """ Returns the shortest list of (movie_id, person_id) pairs that connect the source to the target. If no possible path, returns None. """ frontier = QueueFrontier() explored = set() path = [] source_node = Node(source, None, None) frontier.add(source_node) while ((frontier.empty()) == False): parent = frontier.remove() explored.add(parent) neighbors = neighbors_for_person(parent.state) for neighbor in neighbors: in_frontier = frontier.contains_state(neighbor[1]) in_explored = any(parent.state == neighbor[1] for parent in explored) if (not in_frontier and not in_explored): neighbor_node = Node(neighbor[1], parent, neighbor[0]) frontier.add(neighbor_node) if (neighbor_node.state == target): path = backtrack(path, neighbor_node) return path return path
def spanning_tree_total(Graph, tree=None, leave=None): if tree == None: tree = nd.Node() tree.objects.append(Graph) if Graph.kids == []: pass else: for kid in Graph.kids: if tr.In_tree(kid, tree, it_points): pass else: a = nd.Node() tree.kids.append(a) a.objects.append(kid) b = a spanning_tree_total(kid, tree, b) else: if Graph.kids == []: pass else: for kid in Graph.kids: if tr.In_tree(kid, tree, it_points): pass else: a = nd.Node() leave.kids.append(a) a.objects.append(kid) if not (kid.kids == []): spanning_tree_total(kid, tree, a) return tree
def add_layer(num_layer, source_DNA): k_d = initialize_kernel() if num_layer > len(DNA2layers(source_DNA)) - 3: return None def relabler(k): if k == -2: return num_layer elif k < num_layer: return k else: return k + 1 g = DNA2graph(source_DNA) total_layers = len(DNA2layers(source_DNA)) if num_layer - 1 > total_layers - 3: return None else: node = nd.Node() if num_layer == 0: clone_node = g.key2node.get(num_layer - 1) clone_layer = clone_node.objects[0] node.objects.append((0, 3, clone_layer[2], k_d, k_d)) g.add_node(-2, node) g.add_edges(-1, [-2]) g.add_edges(-2, [num_layer]) parent = g.key2node.get(0) node_a = g.key2node.get(-1) node_b = g.key2node.get(-2) swap_kids(parent, node_a, node_b) g.remove_edge(-1, 0) else: o_node = g.key2node.get(num_layer - 1) clone_node = g.key2node.get(num_layer) if graph2full_node(g) == clone_node: clone_node = o_node o_layer = o_node.objects[0] clone_layer = clone_node.objects[0] node = nd.Node() node.objects.append((0, o_layer[2], o_layer[2], k_d, k_d)) g.add_node(-2, node) g.add_edges(num_layer - 1, [-2]) g.add_edges(-2, [num_layer]) parent = g.key2node.get(num_layer) node_a = g.key2node.get(num_layer - 1) node_b = g.key2node.get(-2) swap_kids(parent, node_a, node_b) g.remove_edge(num_layer - 1, num_layer) t_node = g.key2node.get(num_layer) t_layer = t_node.objects[0] #t_layer=layer_chanel(t_layer,clone_layer[2]) t_node.objects[0] = t_layer node.objects[0] = un_pool(list(node.objects[0]).copy()) g.relable(relabler) fix_fully_conected(g) return Persistent_synapse_condition(graph2DNA(g))
def add_edgest_test(): g=gr.Graph() g.add_node(0,nd.Node()) g.add_node(1,nd.Node()) g.add_node(2,nd.Node()) g.add_edges(1,[0]) g.add_edges(2,[1]) print('The kids are') print(len(g.key2node[0].kids)) print(len(g.key2node[1].kids)) print(len(g.key2node[2].kids))
def add_node(self,g,DNA): node=nd.Node() q=qu.Quadrant(DNA) p=tplane.tangent_plane() node.objects.append(q) q.objects.append(p) g.add_node(DNA,node)
def add_node(g, i): node = nd.Node() q = qu.Quadrant(i) p = tplane.tangent_plane() node.objects.append(q) q.objects.append(p) g.add_node(i, node)
def add_pool_layer(num_layer, source_DNA): k_d = initialize_kernel() if num_layer > len(DNA2layers(source_DNA)) - 4: return None def relabler(k): if k == -2: return num_layer elif k < num_layer: return k else: return k + 1 g = DNA2graph(source_DNA) total_layers = len(DNA2layers(source_DNA)) if num_layer - 1 > total_layers - 3: return None else: node = nd.Node() if num_layer == 0: clone_node = g.key2node.get(num_layer - 1) clone_layer = clone_node.objects[0] node.objects.append( (0, clone_layer[2], clone_layer[2], k_d, k_d, 2)) g.add_node(-2, node) g.add_edges(-1, [-2]) g.add_edges(-2, [num_layer]) else: clone_node = get_random_kid(g.key2node.get(num_layer)) key_o = g.node2key.get(clone_node) if graph2full_node(g) == clone_node: clone_node = o_node o_layer = clone_node.objects[0] clone_layer = clone_node.objects[0] node = nd.Node() node.objects.append( (0, clone_layer[2], clone_layer[2], k_d, k_d, 2)) g.add_node(-2, node) g.add_edges(key_o, [-2]) g.add_edges(-2, [num_layer]) t_node = g.key2node.get(num_layer) t_layer = t_node.objects[0] t_layer = layer_chanel(t_layer, clone_layer[2]) t_node.objects[0] = t_layer g.relable(relabler) fix_fully_conected(g) return Persistent_synapse_condition(graph2DNA(g))
def parse_sentence(self, sentence, port=3228): #Requires parsing server to be running! #Break raw parser output into nested lists sentence = escape(sentence.encode('cp1252')) sentence_str = str(check_output(['echo ' + str(sentence) + ' | nc localhost ' + str(port)], shell=True).encode('utf8')) sentence_list = sentence_str.split('\n\n') sentence_list = [s.split('\n') for s in sentence_list] word_data = [] for sentence in sentence_list: if len(sentence) > 1: word_data.append([]) for word in sentence: word_data[-1].append(word.split('\t')) #Store in unlinked nodes nodes = [] for sentence in word_data: node_list = [] for line in sentence: if len(line) > 1: #Parser generates some blank lines name = str(line[2]) node = Node(name, []) node.data = { 'id': int(line[0]), 'sent_id': int(line[1]), 'pos': str(line[4]), 'parent_id': int(line[5]), 'phrase': str(line[6]) } node_list.append(node) nodes.append(node_list) #Link nodes into a tree for sentence_tree in nodes: for node in sentence_tree: parent_node = [n for n in sentence_tree if n.data['sent_id'] == node.data['parent_id']] if len(parent_node) == 1: parent_node[0].add_child(node) #Find roots roots = [] for tree in nodes: for node in tree: if node.data['parent_id'] == 0: roots.append(node) return roots
def add_layer(num_layer, source_DNA): if num_layer > len(DNA2layers(source_DNA)) - 3: return None def relabler(k): if k == -2: return num_layer elif k < num_layer: return k else: return k + 1 g = DNA2graph(source_DNA) total_layers = len(DNA2layers(source_DNA)) if num_layer - 1 > total_layers - 3: return None else: node = nd.Node() if num_layer == 0: clone_node = g.key2node.get(num_layer) clone_layer = clone_node.objects[0] node.objects.append((0, 3, int(clone_layer[2] / 2), 3, 3)) g.add_node(-2, node) g.add_edges(-1, [-2]) g.add_edges(-2, [num_layer]) else: o_node = g.key2node.get(num_layer - 1) clone_node = g.key2node.get(num_layer) if graph2full_node(g) == clone_node: clone_node = o_node o_layer = o_node.objects[0] clone_layer = clone_node.objects[0] node = nd.Node() node.objects.append((0, o_layer[2], int(clone_layer[2] / 2), 3, 3)) g.add_node(-2, node) g.add_edges(num_layer - 1, [-2]) g.add_edges(-2, [num_layer]) t_node = g.key2node.get(num_layer) t_layer = t_node.objects[0] t_layer = layer_chanel(t_layer, int(clone_layer[2] / 2)) t_node.objects[0] = t_layer node.objects[0] = un_pool(list(node.objects[0]).copy()) g.relable(relabler) fix_fully_conected(g) return Persistent_synapse_condition(graph2DNA(g))
def Indexes(n): a = nd.Node() tr.P_tree(a, 2, n) tr.Op(Index_node, a) c = tr.Leaves(a) d = [] for Node in c: d.append(Node.objects) return d
def P_tree(Node,period,layers): if layers==0: pass else: i=0 while i<period: Node.kids.append(P_tree(nd.Node(),period,layers-1)) i=i+1 return Node
def __build_tree(self, X, y, n_features, feature_indices, depth): node_data_set = np.column_stack((X, y)) sample_size = len(y) if len(y) <= self.min_samples_split or (depth != None and depth == self.max_depth): estimated_value = np.mean(y) # leaf = Leaf(estimated_value=estimated_value, sample_size=sample_size, leaf_data_set=node_data_set) return leaf #寻找分裂属性和最优分裂点 best_feature_index, threshold, min_mes = find_split( X, y, self.criterion, feature_indices) X_true, y_true, X_false, y_false = split(X, y, best_feature_index, threshold) # 分成左子树和右子树 node = Node(feature_index=best_feature_index, threshold=threshold, min_mes=min_mes, sample_size=sample_size, node_data_set=node_data_set) # # 随机的选特征 feature_indices = random.sample(range(n_features), int(self.max_features)) ## 递归的创建左子树 node.branch_true = self.__build_tree(X_true, y_true, n_features, feature_indices, depth + 1) ## 随机的选特征 feature_indices = random.sample(range(n_features), int(self.max_features)) node.branch_false = self.__build_tree(X_false, y_false, n_features, feature_indices, depth + 1) return node
def DNA2graph(DNA): layers = DNA2layers(DNA) synapses = DNA2synapses(DNA) g = gr.Graph(True) k = -1 for layer in layers: node = nd.Node() node.objects.append(layer) g.add_node(k, node) k = k + 1 for synapse in synapses: g.add_edges(synapse[1], [synapse[2]]) return g
from utilities import Node # Node 1 new_node1 = Node() new_node1.coord = [0.1, 0.6] new_node1.cost = 15 new_node1.parent = 1 print(new_node1) # Node 2 new_node2 = Node() new_node2.coord = [0.1, 0.2] new_node2.cost = 10 new_node2.parent = 2 print(new_node2)
#Initializes the Status (later will be moved to buttons) program.initialize_parameters(Status) program.create_objects(Status) pygame.init() n=5 #display_width = 1500 #display_height = 700 display_width = 400 display_height = 400 #Initializes sectors and load buttons on them sectors=nd.Node() sectors.objects.append(qu.Quadrant([[0,display_width], [0,display_height]])) qu.Divide(sectors,n) Node_buttons=bu.load(sectors,[[3,3,n,'run_stop'],[6,3,n,'initialize'], [3,6,n,'beta'],[9,3,n,'num_particels'], [12,3,n,'dt'],[15,3,n,'dx'],[18,3,n,'alpha'],[3,9,n,'r_value']]) gameDisplay = pygame.display.set_mode((display_width,display_height)) pygame.display.set_caption('Gui') black = (0,0,0) white = (255,255,255)
def build_tree(self, X, y, feature_indices,fa_feature_index,select_feature_fa, father_node,depth): """ 建立决策树 X : y: feature_indices:随机选择的特征集合 fa_feature_index:父节点选择的哪个特征作为分裂特征,、初始时为-1, depth :树的深度 select_feature_fa :记录当前节点的父节点的最优分割属性 """ select_feature_fa.append(fa_feature_index) n_features = X.shape[1] n_features_list = [i for i in range(n_features)] #记录选择的特征 self.select_feature.append(feature_indices) self.sample_num.append(len(y)) node_data_set = np.column_stack((X, y)) # 树终止条件 if self.criterion == 'entropy': if depth is self.max_depth or len(y) < self.min_samples_split or entropy(y) is 0: return mode(y)[0][0]# 返回y数组的众数 # 树终止条件 if self.criterion == 'gini': temp_gini = gini(y) self.gini_.append(temp_gini) sample_num = len(y) if depth is self.max_depth or sample_num < self.min_samples_split or temp_gini < self.min_impurity_split: # if depth is self.max_depth or temp_gini < self.min_impurity_split: #所有的特征都已经被选择了,就随机选择一个特征,使得叶子节点构成双特征 if set(n_features_list) == set(select_feature_fa): index = random.randrange(len(n_features_list)) current_feature_index = n_features_list[index] current_max_value = np.max(X[:, current_feature_index]) current_min_value = np.min(X[:, current_feature_index]) else: to_be_select = list(set(n_features_list) - set(select_feature_fa)) index = random.randrange(len(to_be_select)) current_feature_index = to_be_select[index] current_max_value = np.max(X[:, current_feature_index]) current_min_value = np.min(X[:, current_feature_index]) leaf = Leaf(mode(y)[0][0],fa_feature_index , np.max(X[:,fa_feature_index]), np.min(X[:,fa_feature_index]),current_feature_index,current_max_value, current_min_value,select_feature_fa,node_data_set,sample_num,prior_node= father_node) self.leaf_list.append(leaf) return leaf # feature_index最佳分割属性, threshold 最佳分割属性值,gini_ 系数 feature_index, threshold, max_value ,min_value ,gini_ = find_split(X, y, self.criterion, feature_indices) fa_max_value = np.max(X[:, fa_feature_index]) # 该节点记录父节点分裂特征的最大值 fa_min_value = np.min(X[:, fa_feature_index]) # 该节点记录父节点分裂特征的最小值 X_true, y_true, X_false, y_false = split(X, y, feature_index, threshold)# 分成左子树和右子树 # 没有元素 if y_true.shape[0] is 0 or y_false.shape[0] is 0: if set(n_features_list) == set(select_feature_fa): index = random.randrange(len(n_features_list)) current_feature_index = n_features_list[index] current_max_value = np.max(X[:, current_feature_index]) current_min_value = np.min(X[:, current_feature_index]) else: to_be_select = list(set(n_features_list) - set(select_feature_fa)) index = random.randrange(len(to_be_select)) current_feature_index = to_be_select[index] current_max_value = np.max(X[:, current_feature_index]) current_min_value = np.min(X[:, current_feature_index]) leaf = Leaf(mode(y)[0][0], fa_feature_index, np.max(X[:, fa_feature_index]), np.min(X[:, fa_feature_index]), current_feature_index,current_max_value,current_min_value,select_feature_fa,node_data_set,prior_node= father_node,sample_num= 0) self.leaf_list.append(leaf) return leaf node = Node(feature_index=feature_index, fa_feature_index = fa_feature_index, threshold = threshold, max_value = max_value, min_value = min_value, fa_max_value = fa_max_value, fa_min_value = fa_min_value, gini_coefficient = gini_, node_data_set = node_data_set) # # 随机的选特征 n_features = X.shape[1] n_sub_features = int(self.max_features) # feature_indices = random.sample(range(n_features), n_sub_features) select_feature = list() select_feature += select_feature_fa # 记录节点选择的特征 ## 递归的创建左子树 node.branch_true = self.build_tree(X_true, y_true, feature_indices,feature_index, select_feature,node,depth + 1) ## 随机的选特征 feature_indices = random.sample(range(n_features), n_sub_features) # 递归的创建右子树 select_feature = list() select_feature += select_feature_fa # 记录节点选择的特征 node.branch_false = self.build_tree(X_false, y_false, feature_indices,feature_index, select_feature,node,depth + 1) node.prior_node = father_node #指向前驱节点 return node
def add_node(self, key): node = nd.Node() Graph = self.Graph Graph.add_node(key, node) node.attach(self.log_creator())