def shingle_sketch(self, graphs, args, is_gexf): param_w = args.win_size jaccard = [] cosine = [] index = 0 sketch_vecs = [] #for g in graphs: for g in tqdm(range(1, len(graphs) + 1)): if is_gexf: graph = graphs[g] else: graph = graph_utils.create_graph(graphs[g]) walk_len = len(graph.edges()) * args.N # print("Edge Count: ", walk_len) walk_path = self.random_walk(graph, walk_len) shingles = self.generate_shingles(walk_path, walk_len, args.k_shingle) # graph_utils.draw_graph(graph, g) disc_shingles = self.get_disc_shingles(args.sketch_size) # self.get_win_sketch(disc_shingles) # not in use now #print("\n\n Window Sketech: ", self.win_sketch) sketch_vec = self.get_graph_sketch(shingles, disc_shingles) sketch_vecs.append(sketch_vec) #print("\n\n Graph Sketech: ", sketch_vec) # print(disc_shingles) if index >= param_w: jaccard.append(self.calculate_similarity(shingles)) # cosine.append(spatial.distance.cosine(self.win_sketch, sketch_vec)) self.update_one_step_forward_window(shingles, index, param_w) index += 1 sketch_vecs = np.array(sketch_vecs[3:]).astype(np.float64) # print("Vector : \n", sketch_vecs) np.savetxt(args.sketch_vector, sketch_vecs, delimiter=',')
def merge_tracklets(self): graph = gr.create_graph(self.tracklets) # -- Temporal overlap -- ## for t1, tracklet_1 in enumerate(self.tracklets): for t2, tracklet_2 in enumerate(self.tracklets): if t1 == t2: continue if tracklet_2.start_frame < tracklet_1.start_frame: continue cons_count_max = 0 cons_pos_max = None for token1 in reversed(self.tracklets[t1].tokens[-MAX_SHARED_TOKS:]): cons_count = 0 cons_pos = 0 cons = False for token2 in self.tracklets[t2].tokens[:MAX_SHARED_TOKS]: if token1.f == token2.f: sim = token1.calc_similarity(token2) else: continue if sim < TOKEN_SIM_THRESH: cons = True cons_count += 1 cons_pos = self.tracklets[t2].tokens.index(token2) else: break if cons == True and cons_count > cons_count_max: cons_pos_max = cons_pos if cons_pos_max is not None: graph[t1].append(t2) for t, tok in enumerate(self.tracklets[t2].tokens): if t<=cons_pos_max: self.tracklets[t2].score -= self.tracklets[t2].tokens[t].score self.tracklets[t2].tokens[t].score = 0 start_nodes, end_nodes = gr.get_start_end_nodes(graph) for item in graph.items(): print(item) longest_path = {} path_list = [] for node_s in start_nodes: for node, conn in graph.items(): longest_path[node] = {'score':0, 'path':[]} gr.get_longest_paths(self.tracklets, longest_path, graph, node_s) for node_e in end_nodes: path_list.append(longest_path[node_e]) score = 0 best_path = None for path in path_list: if path['score'] > score: score=path['score'] best_path = path if best_path is not None: merged_track = Tracklet(start_frame=self.tracklets[best_path['path'][0]].start_frame) f=-1 for t in best_path['path']: for tok in self.tracklets[t].tokens: if tok.f > f: merged_track.add_token(tok) f = tok.f for tracklet in self.tracklets: tracklet.is_valid = False self.add_tracklet(merged_track)
req_vars[i][lhs] = int(rhs) for e in obs.split(): (lhs, rhs) = e.split('=') req_obs[i][lhs] = int(rhs) for _ in range(m): expected_results.append(float(next(fp))) return variables, parents, probabilities, req_vars, req_obs, expected_results if __name__ == '__main__': for filepath in os.listdir(TEST_DIR): variables, parents, probabilities, req_vars, req_obs, expected_results = read_file(TEST_DIR + '/' + filepath) G = create_graph(variables, parents) U = create_undirected_graph(G) H = moralize_graph(U, parents) H_star = triangualate_graph(H) max_cliques = [] bron_kerbosch(H_star, [], H_star.get_var_names(), [], max_cliques) C = create_graph_of_cliques(max_cliques) T = kruskal(C, max_cliques) factor_wrapper = FactorWrapper(variables, parents, probabilities, T) for ro, rv, er in list(zip(req_obs, req_vars, expected_results)): result = factor_wrapper.query(ro, rv) if not result: print('Not implemented') continue
def merge_tracklets(self): graph = gr.create_graph(self.tracklets) ## -- Same start frame -- ## for t1 in self.tracklets: hiscore = t1.score for t2 in self.tracklets: if t1 is not t2: if t1.start_frame == t2.start_frame: # tracklets start at the same point # remove the tracklet with the lower score if t2.score > hiscore: hiscore = t2.score t1.is_valid = False else: t2.is_valid = False # -- Temporal overlap -- ## for t in range(len(self.tracklets) - 1): cons_count_max = 0 cons_pos_max = None for tok1 in reversed( self.tracklets[t].tokens[-c.MAX_SHARED_TOKS:]): cons_count = 0 cons_pos = 0 cons = False for tok2 in self.tracklets[t + 1].tokens[:c.MAX_SHARED_TOKS]: if tok1.f == tok2.f: sim = tok1.calc_similarity(tok2) else: continue if sim < c.TOKEN_SIM_THRESH: cons = True cons_count += 1 cons_pos = self.tracklets[t + 1].tokens.index(tok2) else: break if cons == True and cons_count > cons_count_max: cons_pos_max = cons_pos if cons_pos_max is not None: graph[t].append(t + 1) for i, tok in enumerate(self.tracklets[t + 1].tokens): if i <= cons_pos_max: self.tracklets[t + 1].score -= self.tracklets[ t + 1].tokens[i].score self.tracklets[t + 1].tokens[i].score = 0 else: if self.tracklets[t].length > 3 and self.tracklets[ t + 1].length > 3: first_extrapolation_points = [] second_extrapolation_points = [] for i in range(3): first_extrapolation_points.append( self.tracklets[t].tokens[i - 3].coords) second_extrapolation_points.append( self.tracklets[t + 1].tokens[2 - i].coords) for i in range(c.EXTRAPOLATE_N): first_extrapolation_points.append( make_est(first_extrapolation_points[-3], first_extrapolation_points[-2], first_extrapolation_points[-1])) second_extrapolation_points.append( make_est(second_extrapolation_points[-3], second_extrapolation_points[-2], second_extrapolation_points[-1])) first_extrapolation_points = first_extrapolation_points[ -c.EXTRAPOLATE_N:] second_extrapolation_points = second_extrapolation_points[ -c.EXTRAPOLATE_N:] best_match = c.TOKEN_SIM_THRESH best_f_p = None best_s_p = None for i, f_p in enumerate(first_extrapolation_points): for j, s_p in enumerate(second_extrapolation_points): sim = calc_dist(f_p - s_p) if sim < c.TOKEN_SIM_THRESH: best_match = sim best_f_p = i best_s_p = j break if best_f_p is not None: break if best_f_p is not None and best_s_p is not None: new_first_points = first_extrapolation_points[:i] new_second_points = second_extrapolation_points[:j] for first_point in new_first_points: self.tracklets[t].add_token( Token(self.tracklets[t].tokens[-1].f + 1, first_point, score=1)) for second_point in reversed(new_second_points): self.tracklets[t].add_token( Token(self.tracklets[t].tokens[-1].f + 1, second_point, score=1)) graph[t].append(t + 1) start_nodes, end_nodes = gr.get_start_end_nodes(graph) longest_path = {} path_list = [] for node_s in start_nodes: for node, conn in graph.items(): longest_path[node] = {'score': 0, 'path': []} gr.get_longest_paths(self.tracklets, longest_path, graph, node_s) for node_e in end_nodes: path_list.append(longest_path[node_e]) score = 0 best_path = None for path in path_list: if path['score'] > score: score = path['score'] best_path = path if best_path is not None: merged_track = Tracklet( start_frame=self.tracklets[best_path['path'][0]].start_frame) f = -1 for t in best_path['path']: for tok in self.tracklets[t].tokens: if tok.f > f: merged_track.add_token( Token(f=tok.f, coords=tok.coords, score=tok.score)) f = tok.f self.tracklets[t].is_valid = False return merged_track else: return None