def compute_mesh_expanding(self, item_list, length): result = item_list #1. call find_boundary_points() boundary_points = Map.find_boundary_points(item_list) #2. for point in boundary_points: new_seg_set = self.compute_fixed_expanding(point[0], point[1], point[2], option.MAX_SPEED) new_seg_set = EdgeSegmentSet.clean_fixed_expanding(new_seg_set) result = EdgeSegmentSet.union(result, new_seg_set) return result
def process_existing_clique(self, clique, timestamp): free_nodes = [] satisfied = True #new clique satisfied or not #1. get new_loc expanding_list = {} #dict of lists query_list = [] for node in clique: query = self.query_log.trajs[node][timestamp] query_list.append(query) for query in query_list: seg_list = self.map_data.compute_fixed_expanding( query.x, query.y, query.cur_edge_id, option.INIT_DISTANCE) seg_list = EdgeSegmentSet.clean_fixed_expanding(seg_list) expanding_list[query.obj_id] = seg_list #2. build graph degree = {} adj = {} for query in query_list: degree[query.obj_id] = 0 for i in range(len(query_list)): for j in range(i + 1, len(query_list)): if EdgeSegmentSet.is_set_cover(Point(query_list[i]), expanding_list[query_list[j].obj_id]) and \ EdgeSegmentSet.is_set_cover(Point(query_list[j]), expanding_list[query_list[i].obj_id]): adj[(query_list[i].obj_id, query_list[j].obj_id)] = 1 adj[(query_list[j].obj_id, query_list[i].obj_id)] = 1 for pair in adj.iterkeys(): degree[pair[0]] += 1 degree[pair[1]] += 1 for node in clique: if degree[node] == 0: free_nodes.append(node) del expanding_list[node] satisfied = False #3. compute union (mesh) and ... mesh = [] for seg_list in expanding_list.itervalues(): mesh = EdgeSegmentSet.union(mesh, seg_list) #3.2 check total_len if EdgeSegmentSet.length(mesh) > option.MAX_MESH_LENGTH: satisfied = False # remove (heuristically) nodes which have low degrees while True: node = min(degree, degree.get) free_nodes.append(node) del degree[node] for node_2 in clique: if adj.has_key((node, node_2)): del adj[(node, node_2)] del adj[(node_2, node)] degree[node_2] = degree[node_2] - 1 #re-compute union (mesh) and check total_len mesh = [] for seg_list in expanding_list.itervalues(): mesh = EdgeSegmentSet.union(mesh, seg_list) if EdgeSegmentSet.length(mesh) <= option.MAX_MESH_LENGTH: break #4. return return (satisfied, free_nodes, mesh)
def init_mc_set(self): start_time = time.clock() expanding_list = {} #[[]] * option.MAX_USER #dict of lists query_list = self.query_log.frames[0] #init timestamp min_obj_id = option.MAX_USER max_obj_id = 0 #1. compute expanding_list for query in query_list: if max_obj_id < query.obj_id: max_obj_id = query.obj_id if min_obj_id > query.obj_id: min_obj_id = query.obj_id # self.num_user = max(self.num_user, query.obj_id) seg_list = self.map_data.compute_fixed_expanding( query.x, query.y, query.cur_edge_id, option.INIT_DISTANCE) seg_list = EdgeSegmentSet.clean_fixed_expanding(seg_list) expanding_list[query.obj_id] = seg_list #1.2 check connectivity of each seg_list --> OK # print "Connectivity check: STARTED" # for query in query_list: # if not Map.check_connected_expanding(expanding_list[query.obj_id]): # print "error found at obj_id=", query.obj_id # print "Connectivity check: DONE" #2. init self.mc_set self.reset() for query in query_list: self.mc_set.append(set([query.obj_id])) #3. compute mc_set num_edges = 0 list_edges = [] for i in range(len(query_list)): for j in range(i + 1, len(query_list)): if get_distance(query_list[i].x, query_list[i].y, query_list[j].x, query_list[j].y) > \ option.INIT_DISTANCE: continue if EdgeSegmentSet.is_set_cover(Point(query_list[i]), expanding_list[query_list[j].obj_id]) and \ EdgeSegmentSet.is_set_cover(Point(query_list[j]), expanding_list[query_list[i].obj_id]): num_edges += 1 list_edges.append( (query_list[i].obj_id, query_list[j].obj_id)) print "num_edges=", num_edges print "list_edges - elapsed : ", (time.clock() - start_time) start_time = time.clock() graph.add_to_mc_set(list_edges) print "add_to_mc_set - elapsed : ", (time.clock() - start_time) print "mc_set =", self.mc_set #4. compute user_mc_set, max clique for each obj_id self.user_mc_set = {} for clique in self.mc_set: if len(clique) >= option.K_ANONYMITY: for obj_id in clique: if not self.user_mc_set.has_key(obj_id): self.user_mc_set[obj_id] = clique elif len(self.user_mc_set[obj_id]) < len(clique): self.user_mc_set[obj_id] = clique print "Compute user_mc_set: DONE" print "user_mc_set = ", self.user_mc_set #5. compute MMBs (CLOAKING MESHES) and self.user_set max_mesh_len = 0 min_mesh_len = 1000000 for clique in self.mc_set: if len(clique) >= option.K_ANONYMITY: mesh = [] for obj_id in clique: mesh = EdgeSegmentSet.union(mesh, expanding_list[obj_id]) # temp_len = EdgeSegmentSet.length(mesh) if max_mesh_len < temp_len: max_mesh_len = temp_len if min_mesh_len > temp_len: min_mesh_len = temp_len # assign mesh to all obj_id in 'clique' for obj_id in clique: self.user_mesh[obj_id] = mesh # for obj_id in clique: self.user_set = self.user_set | set([obj_id]) print "self.user_set = ", self.user_set print "Compute CLOAKING MESH: DONE" print "max_mesh_len =", max_mesh_len print "min_mesh_len =", min_mesh_len #5.2 check connectivity of each user_mesh --> OK # start_time = time.clock() # print "MMB Connectivity check: STARTED" # for clique in self.mc_set: # for obj_id in clique: # if not Map.check_connected_expanding(self.user_mesh[obj_id]): # print "error found at clique=", clique # continue # print "MMB Connectivity check: DONE" # print "elapsed : ", (time.clock() - start_time) #DEBUG print "len(graph.mc_set) = ", len(graph.mc_set) print graph.mc_set
query_log = QueryLog(map_data) query_log.read_query(option.QUERY_PATH, option.QUERY_FILE, max_time_stamp=5) print "Load Query : DONE" print "max_speed = ", query_log.max_speed print "elapsed : ", (time.clock() - start_time) #TEST graph = Graph(0, map_data, query_log, None) # graph.run_timestamps(0,2) # print "graph.run_timestamps - DONE" # PREPARE edge_segment_set = EdgeSegmentSet() ppservers = () job_server = pp.Server(ncpus=num_groups, ppservers=ppservers) print "Starting pp with", job_server.get_ncpus(), "workers" #LOOP for timestamp in range(0, timestep + 1): print "--------->>" print "TIMESTAMP : ", timestamp print "START - Now: ", datetime.now() #0. reset graph.reset()
f = open(option.RESULT_PATH + "query_log.out", "r") query_log = cPickle.load(f) query_log.map_data = map_data print "LOAD map_data, query_log - elapsed : ", (time.clock() - start_time) # timestamp = 0 query_list = query_log.frames[timestamp] # timestamp trajs = {} for query in query_list: trajs[query.obj_id] = query_log.trajs[query.obj_id][timestamp] edge_segment_set = EdgeSegmentSet() # load expanding_list start_time = time.clock() f = open(option.RESULT_PATH + "expanding_list.out", "r") # global expanding_list expanding_list = cPickle.load(f) print "Load expanding_list: DONE! - elapsed : ", (time.clock() - start_time) # print "size(expanding_list) =", sys.getsizeof(expanding_list) # load mc_set f = open(option.RESULT_PATH + "mc_set.out", "r") mc_set = cPickle.load(f) print "Load mc_set: DONE, len(mc_set) =", len(mc_set)
def check_MMB_MAB( self, checking_pairs, cover_mesh, cover_mesh_mmb, new_cover_mesh, new_cover_mesh_mmb, ): # start_time = time.clock() # # prepare MAB # positive_mesh = [] # for (pos_id, check_list) in checking_pairs.iteritems(): # if len(check_list) == 0: # positive_mesh.append(0) # continue # mesh = [] # for obj_id in positive_mc_set[pos_id]: # mesh.extend(expanding_list[obj_id]) # mesh = EdgeSegmentSet.clean_fixed_expanding(mesh) # positive_mesh.append(mesh) # print "prepare MAB - Step 1 - elapsed : ", (time.clock() - start_time) # # start_time = time.clock() # positive_mab = [] # count_id = 0 # for (pos_id, check_list) in checking_pairs.iteritems(): # if len(check_list) == 0: # continue # # compute MAB # mesh = self.map_data.compute_mesh_expanding(positive_mesh[pos_id], option.MAX_SPEED) # # # positive_mab.append(mesh) # # count_id += 1 # if count_id % 100 == 0: # print "count_id =", count_id # # print "prepare MAB - Step 2 - elapsed : ", (time.clock() - start_time) # start_time = time.clock() count_pair = 0 for (pos_id, check_list) in enumerate(checking_pairs): if len(check_list) == 0: print "ERROR in checking_pairs at pos_id =", pos_id # CASE inter_len = 0 in find_next_cover() continue for old_cover_id in check_list: # 1. MMB new_mesh = new_cover_mesh[pos_id] old_mesh_mmb = cover_mesh_mmb[old_cover_id] # inter_set = EdgeSegmentSet.intersect(old_mesh_mmb, new_mesh) if len(inter_set) > 0 and len(inter_set) < option.S_GLOBAL: print "MMB FAULT at (pos_id, old_cover_id):", pos_id, old_cover_id # 2. MAB new_mesh_mab = new_cover_mesh_mmb[pos_id] old_mesh = cover_mesh[old_cover_id] # inter_set = EdgeSegmentSet.intersect(old_mesh, new_mesh_mab) if len(inter_set) > 0 and len(inter_set) < option.S_GLOBAL: print "MAB FAULT at (pos_id, cover_id):", pos_id, old_cover_id # count_pair += 1 if count_pair % 100 == 0: print "count_pair =", count_pair print "check MMB/MAB - elapsed : ", (time.clock() - start_time)
def solve_new_queries(self, timestamp): expanding_list = {} #dict of lists query_list = self.query_log.frames[timestamp] # timestamp #0. reset self.reset() #1. compute expanding_list start_time = time.clock() for query in query_list: seg_list = self.map_data.compute_fixed_expanding(query.x, query.y, query.cur_edge_id, query.dist) #old: option.DISTANCE_CONSTRAINT seg_list = EdgeSegmentSet.clean_fixed_expanding(seg_list) expanding_list[query.obj_id] = seg_list print "expanding_list - elapsed : ", (time.clock() - start_time) #2. compute mc_set # start_time = time.clock() # num_edges = 0 # list_edges = [] # for i in range(len(query_list)): # for j in range(i+1,len(query_list)): # if get_distance(query_list[i].x, query_list[i].y, query_list[j].x, query_list[j].y) > \ # option.INIT_GRAPH_DISTANCE: # continue # # if EdgeSegmentSet.is_set_cover(Point(query_list[i]), expanding_list[query_list[j].obj_id]) and \ # EdgeSegmentSet.is_set_cover(Point(query_list[j]), expanding_list[query_list[i].obj_id]): # num_edges += 1 # list_edges.append((query_list[i].obj_id, query_list[j].obj_id)) # # print "num_edges=", num_edges # print "list_edges OLD - elapsed : ", (time.clock() - start_time) start_time = time.clock() (num_edges, list_edges) = self.compute_edge_list(expanding_list, query_list) print "num_edges=", num_edges print "list_edges NEW - elapsed : ", (time.clock() - start_time) # write list_edges[] to file self.write_list_edges(list_edges) # start_time = time.clock() # # (OLD) # graph.add_to_mc_set(list_edges) # (NEW) call([option.MACE_EXECUTABLE, "M", option.MAXIMAL_CLIQUE_FILE_IN, option.MAXIMAL_CLIQUE_FILE_OUT], shell=False) f = open(option.MAXIMAL_CLIQUE_FILE_OUT, "r") fstr = f.read() f.close() for line in fstr.split("\n"): node_list = line.split(" ") if len(node_list) < 2: continue self.mc_set.append(set([int(node) for node in node_list])) print len(self.mc_set) print "add_to_mc_set - elapsed : ", (time.clock() - start_time) # print "mc_set =", self.mc_set #3. start_time = time.clock() self.find_cloaking_sets(timestamp, expanding_list) print "find_cloaking_sets - elapsed : ", (time.clock() - start_time) #4. 'Set Cover Problem' (from weighted_set_cover.py) start_time = time.clock() num_element = max(query_list, key=lambda query: query.obj_id).obj_id + 1 # avoid out of range if timestamp == 0: self.cover_set, num_cloaked_users = find_init_cover(self.positive_mc_set, num_element) self.new_cover_set = self.cover_set # for compute CLOAKING MESH else: self.new_cover_set, num_cloaked_users, checking_pairs = find_next_cover(self.positive_mc_set, num_element, self.cover_set, option.K_GLOBAL) print "Success rate =", float(num_cloaked_users)/len(query_list) print "compute cover_set - elapsed : ", (time.clock() - start_time) #5. compute CLOAKING MESH start_time = time.clock() total_mesh_length = 0 total_query = 0 self.new_cover_mesh = [] # NEW for clique_id in range(len(self.new_cover_set)): clique = self.new_cover_set[clique_id] #compute length of mesh mesh = [] for obj_id in clique: mesh = EdgeSegmentSet.union(mesh, expanding_list[obj_id]) self.new_cover_mesh.append(mesh) #NEW total_mesh_length += EdgeSegmentSet.length(mesh) total_query += len(clique) average_mesh_query = total_mesh_length/total_query print "total_mesh_length =", total_mesh_length print "average_mesh_query =", average_mesh_query print "Compute CLOAKING MBR - elapsed : ", (time.clock() - start_time) # print "user_mesh = ", self.user_mesh #5.2 Check MMB/MAB # self.new_cover_mesh_mmb = self.compute_cover_mesh_mmb(self.new_cover_mesh, expanding_list) # # if timestamp > 0: # start_time = time.clock() # # self.check_MMB_MAB(checking_pairs, self.cover_mesh, self.cover_mesh_mmb, self.new_cover_mesh, self.new_cover_mesh_mmb) # # print "check_MMB_MAB() - elapsed : ", (time.clock() - start_time) # UPDATE self.cover_set = self.new_cover_set self.cover_mesh = self.new_cover_mesh # self.cover_mesh_mmb = self.new_cover_mesh_mmb #6. compute user_mc_set (max clique for each obj_id), replace self.positive_mc_set by self.cover_set start_time = time.clock() self.user_mc_set = {} for clique_id in range(len(self.cover_set)): clique = self.cover_set[clique_id] for obj_id in clique: # if not self.user_mc_set.has_key(obj_id): self.user_mc_set[obj_id] = clique_id #use id elif len(self.cover_set[self.user_mc_set[obj_id]]) < len(clique): self.user_mc_set[obj_id] = clique_id #store the maximum # for obj_id in clique: if self.user_mc_set[obj_id] == clique_id: #clique id comparison self.user_mesh[obj_id] = self.cover_mesh[clique_id] print "Compute user_mc_set - elapsed : ", (time.clock() - start_time) # print "user_mc_set = ", self.user_mc_set #7. publish MBRs (write to file) start_time = time.clock() self.write_results_to_files(timestamp) print "write_results_to_files - elapsed : ", (time.clock() - start_time)
def find_cloaking_sets(self, timestamp, expanding_list): #1. find positive and negative cliques self.positive_mc_set = [] negative_mc_set = [] for clique in self.mc_set: if len(clique) == 1: continue # max_min_length = 0 max_k_anom = 0 query_list = [] for obj_id in clique: query = self.query_log.trajs[obj_id][timestamp] query_list.append(query) if max_min_length < query.min_length: max_min_length = query.min_length if max_k_anom < query.k_anom: max_k_anom = query.k_anom #compute length of mesh mesh = [] for obj_id in clique: # mesh = EdgeSegmentSet.union(mesh, expanding_list[obj_id]) # NEW (trial) mesh.extend(expanding_list[obj_id]) # NEW (trial) mesh = EdgeSegmentSet.clean_fixed_expanding(mesh) clique_len = EdgeSegmentSet.length(mesh) # if len(clique) >= max_k_anom and \ clique_len >= max_min_length * self.map_data.total_map_len: self.positive_mc_set.append(clique) elif len(clique) > 2: negative_mc_set.append(clique) #2.convert negative cliques (heuristically) new_negative_mc_set = [] for clique in negative_mc_set: query_list = [] for obj_id in clique: query = self.query_log.trajs[obj_id][timestamp] query_list.append(query) #sort query_list = sorted(query_list, key=lambda query: query.k_anom) while True: query_list.pop() #remove the last if len(query_list) == 0: break; max_min_length = max(query_list, key=lambda query: query.min_length).min_length #compute length of mesh mesh = [] for query in query_list: # mesh = EdgeSegmentSet.union(mesh, expanding_list[query.obj_id]) # NEW (trial) mesh.extend(expanding_list[query.obj_id]) # NEW (trial) mesh = EdgeSegmentSet.clean_fixed_expanding(mesh) clique_len = EdgeSegmentSet.length(mesh) # if len(query_list) >= query_list[-1].k_anom and \ clique_len >= max_min_length * self.map_data.total_map_len: break # if len(query_list) > 1: clique = set([query.obj_id for query in query_list]) new_negative_mc_set.append(clique) #3. # print "positive_mc_set =", self.positive_mc_set # print "new_negative_mc_set =", new_negative_mc_set self.positive_mc_set.extend(new_negative_mc_set)
print "length(item_list) = ", len(result) #600 Elapsed 0.00164145542381 #700 Elapsed 0.00333834461428 #800 Elapsed 0.0090295446578 #900 Elapsed 0.013230192309 #1000 Elapsed 0.0228197206651 #1100 Elapsed 0.0399487545624 #1200 Elapsed 0.073717157418 #1300 Elapsed 0.131422168682 #1400 Elapsed 0.247271041862 #1500 Elapsed 0.45686200739 start = time.clock() result = EdgeSegmentSet.clean_fixed_expanding(result) elapsed = (time.clock() - start) print "Elapsed ", elapsed print "length(item_list) = ", len(result) # for item in result: # print "%15d %8.2f %10.2f %10.2f %10.2f %10.2f" % (item.cur_edge_id, EdgeSegment.length(item), \ # item.start_x, item.start_y, item.end_x, item.end_y) #TEST 3: EdgeSegmentSet.union() # result_1 = map_data.compute_fixed_expanding(8545.43, 16095.95, 98307178, 300) # result_1 = EdgeSegmentSet.clean_fixed_expanding(result_1) #
# timestamp = 0; expanding_list = {} #dict of lists query_list = query_log.frames[timestamp] # timestamp #1. compute expanding_list start_time = time.clock() print "#users =", len(query_list) count = 0 for query in query_list: seg_list = map_data.compute_fixed_expanding(query.x, query.y, query.cur_edge_id, query.dist) #old: option.DISTANCE_CONSTRAINT seg_list_length = 0.0 for seg in seg_list: seg_list_length += EdgeSegment.length(seg); print "seg_list.size = %d - %f" %(len(seg_list), seg_list_length) seg_list = EdgeSegmentSet.clean_fixed_expanding(seg_list) print "AFTER seg_list.size =", len(seg_list) expanding_list[query.obj_id] = seg_list count += 1 if count == 10: break; print "expanding_list - elapsed : ", (time.clock() - start_time)
map_data.read_map(option.MAP_PATH, option.MAP_FILE) print "Load Map : DONE" query_log = QueryLog(map_data) query_log.read_query(option.QUERY_PATH, option.QUERY_FILE, max_time_stamp = 5) print "Load Query : DONE" print "max_speed = ", query_log.max_speed print "elapsed : ", (time.clock() - start_time) #TEST graph = Graph(0, map_data, query_log, None) # graph.run_timestamps(0,2) # print "graph.run_timestamps - DONE" # PREPARE edge_segment_set = EdgeSegmentSet() ppservers = () job_server = pp.Server(ncpus = num_groups, ppservers=ppservers) print "Starting pp with", job_server.get_ncpus(), "workers" #LOOP for timestamp in range(0,timestep+1): print "--------->>" print "TIMESTAMP : ", timestamp print "START - Now: ", datetime.now() #0. reset graph.reset()
print "max_speed = ", query_log.max_speed print "elapsed : ", (time.clock() - start_time) print "size(map_data) =", sys.getsizeof(map_data) # expanding_list_1 = {} #dict of lists timestamp = 0 if len(sys.argv) > 1: timestamp = int(sys.argv[1]) print "timestamp =", timestamp query_list = query_log.frames[timestamp] # timestamp edge_segment_set = EdgeSegmentSet() #1. PARALLEL num_groups = 8 ppservers = () job_server = pp.Server(ncpus = num_groups, ppservers=ppservers) print "Starting pp with", job_server.get_ncpus(), "workers" start_time = time.clock() query_groups = [] num_queries = len(query_list) for i in range(num_groups-1): start = i*num_queries/num_groups end = (i+1)*num_queries/num_groups query_groups.append(query_list[start:end])
print "max_speed = ", query_log.max_speed print "elapsed : ", (time.clock() - start_time) print "size(map_data) =", sys.getsizeof(map_data) # expanding_list_1 = {} #dict of lists timestamp = 0 if len(sys.argv) > 1: timestamp = int(sys.argv[1]) print "timestamp =", timestamp query_list = query_log.frames[timestamp] # timestamp edge_segment_set = EdgeSegmentSet() #1. PARALLEL num_groups = 8 ppservers = () job_server = pp.Server(ncpus=num_groups, ppservers=ppservers) print "Starting pp with", job_server.get_ncpus(), "workers" start_time = time.clock() query_groups = [] num_queries = len(query_list) for i in range(num_groups - 1): start = i * num_queries / num_groups end = (i + 1) * num_queries / num_groups query_groups.append(query_list[start:end])
start_time = time.clock() timestamp = 0 if len(sys.argv) > 1: timestamp = int(sys.argv[1]) print "timestamp =", timestamp # query_list = query_log.frames[timestamp] # timestamp # # trajs = {} # for query in query_list: # trajs[query.obj_id] = query_log.trajs[query.obj_id][timestamp] #Deep-Copy or NOT # print "prepare trajs - elapsed : ", (time.clock() - start_time) # print "size(trajs) =", sys.getsizeof(trajs) edge_segment_set = EdgeSegmentSet() # load expanding_list # start_time = time.clock() # f = open(option.RESULT_PATH + "expanding_list.out", "r") # expanding_list = cPickle.load(f) # # print "Load expanding_list: DONE! - elapsed : ", (time.clock() - start_time) # print "size(expanding_list) =", sys.getsizeof(expanding_list) # mem-file mapping trajs = edge_segment_set.parse_query_log(timestamp) expanding_list = edge_segment_set.parse_expanding_list() # load mc_set
def solve_new_queries(self, timestamp): start_time = time.clock() # mmb_list = [] #list of lists # mmb_list_index = [] #list of sets query_list = self.query_log.frames[timestamp] #1. compute SC, SD, SN, (SF) set_F = set([]) new_user_set = set([]) for query in query_list: new_user_set = new_user_set | set([query.obj_id]) set_C = self.user_set & new_user_set set_D = self.user_set - new_user_set set_N = new_user_set - self.user_set print "set_D", set_D print "set_N", set_N #2. process set_D (disappear) self.remove_from_mc_set(set_D) #3. compute_mesh_expanding (MAX_SPEED) for clique in self.mc_set: if len(clique) < option.K_ANONYMITY: continue node = list(clique)[0] #print "debug", clique, node seg_list = self.map_data.compute_mesh_expanding( self.user_mesh[node], option.MAX_SPEED) self.mmb_mesh[node] = seg_list for node_2 in clique: if node_2 != node and self.user_mc_set[ node_2] == self.user_mc_set[node]: self.mmb_mesh[node_2] = seg_list #3.1 num_edges = 0 list_edges = [] for node_1 in set_C: for node_2 in set_C: if node_1 != node_2 and self.mmb_mesh.has_key( node_1) and self.mmb_mesh.has_key(node_2): point_1 = Point(self.query_log.trajs[node_1][timestamp]) point_2 = Point(self.query_log.trajs[node_2][timestamp]) if EdgeSegmentSet.is_set_cover(point_1, self.mmb_mesh[node_2]) and \ EdgeSegmentSet.is_set_cover(point_2, self.mmb_mesh[node_1]): num_edges += 1 list_edges.append((node_1, node_2)) print "num_edges (set_C) = ", num_edges graph.add_to_mc_set(list_edges) #3.2 num_edges = 0 list_edges = [] for node_1 in set_C: for node_2 in set_N: point_2 = Point(self.query_log.trajs[node_2][timestamp]) if self.mmb_mesh.has_key( node_1) and EdgeSegmentSet.is_set_cover( point_2, self.mmb_mesh[node_1]): num_edges += 1 list_edges.append((node_1, node_2)) print "num_edges (set_N) = ", num_edges graph.add_to_mc_set(list_edges) #4. update self.user_mc_set, self.user_mesh, self.user_set #4.1 compute user_mc_set, max clique for each obj_id self.user_mc_set = {} for clique in self.mc_set: for obj_id in clique: if not self.user_mc_set.has_key(obj_id): self.user_mc_set[obj_id] = clique elif len(self.user_mc_set[obj_id]) < len(clique): self.user_mc_set[obj_id] = clique print "Compute user_mc_set: DONE" #4.2 compute MMBs (CLOAKING MESHES) and self.user_set max_mesh_len = 0 min_mesh_len = 1000000 self.user_mesh = {} for clique in self.mc_set: if len(clique) >= option.K_ANONYMITY: locations = [] for node in clique: locations.append(self.query_log.trajs[node][timestamp]) mesh = self.map_data.compute_mesh_mbr( locations) #TEMPORARILY !! # temp_len = EdgeSegmentSet.length(mesh) if max_mesh_len < temp_len: max_mesh_len = temp_len if min_mesh_len > temp_len: min_mesh_len = temp_len # assign mesh to all obj_id in 'clique' for obj_id in clique: self.user_mesh[obj_id] = mesh # for obj_id in clique: self.user_set = self.user_set | set([obj_id]) print "Compute CLOAKING MESH: DONE" print "max_mesh_len =", max_mesh_len print "min_mesh_len =", min_mesh_len print "elapsed : ", (time.clock() - start_time)
def init_mc_set(self): start_time = time.clock() expanding_list = {} #[[]] * option.MAX_USER #dict of lists query_list = self.query_log.frames[0] #init timestamp min_obj_id = option.MAX_USER max_obj_id = 0 #1. compute expanding_list for query in query_list: if max_obj_id < query.obj_id: max_obj_id = query.obj_id if min_obj_id > query.obj_id: min_obj_id = query.obj_id # self.num_user = max(self.num_user, query.obj_id) seg_list = self.map_data.compute_fixed_expanding(query.x, query.y, query.cur_edge_id, option.INIT_DISTANCE) seg_list = EdgeSegmentSet.clean_fixed_expanding(seg_list) expanding_list[query.obj_id] = seg_list #1.2 check connectivity of each seg_list --> OK # print "Connectivity check: STARTED" # for query in query_list: # if not Map.check_connected_expanding(expanding_list[query.obj_id]): # print "error found at obj_id=", query.obj_id # print "Connectivity check: DONE" #2. init self.mc_set self.reset() for query in query_list: self.mc_set.append(set([query.obj_id])) #3. compute mc_set num_edges = 0 list_edges = [] for i in range(len(query_list)): for j in range(i+1,len(query_list)): if get_distance(query_list[i].x, query_list[i].y, query_list[j].x, query_list[j].y) > \ option.INIT_DISTANCE: continue if EdgeSegmentSet.is_set_cover(Point(query_list[i]), expanding_list[query_list[j].obj_id]) and \ EdgeSegmentSet.is_set_cover(Point(query_list[j]), expanding_list[query_list[i].obj_id]): num_edges += 1 list_edges.append((query_list[i].obj_id, query_list[j].obj_id)) print "num_edges=", num_edges print "list_edges - elapsed : ", (time.clock() - start_time) start_time = time.clock() graph.add_to_mc_set(list_edges) print "add_to_mc_set - elapsed : ", (time.clock() - start_time) print "mc_set =", self.mc_set #4. compute user_mc_set, max clique for each obj_id self.user_mc_set = {} for clique in self.mc_set: if len(clique) >= option.K_ANONYMITY: for obj_id in clique: if not self.user_mc_set.has_key(obj_id): self.user_mc_set[obj_id] = clique elif len(self.user_mc_set[obj_id]) < len(clique): self.user_mc_set[obj_id] = clique print "Compute user_mc_set: DONE" print "user_mc_set = ", self.user_mc_set #5. compute MMBs (CLOAKING MESHES) and self.user_set max_mesh_len = 0 min_mesh_len = 1000000 for clique in self.mc_set: if len(clique) >= option.K_ANONYMITY: mesh = [] for obj_id in clique: mesh = EdgeSegmentSet.union(mesh, expanding_list[obj_id]) # temp_len = EdgeSegmentSet.length(mesh) if max_mesh_len < temp_len: max_mesh_len = temp_len if min_mesh_len > temp_len: min_mesh_len = temp_len # assign mesh to all obj_id in 'clique' for obj_id in clique: self.user_mesh[obj_id] = mesh # for obj_id in clique: self.user_set = self.user_set | set([obj_id]) print "self.user_set = ", self.user_set print "Compute CLOAKING MESH: DONE" print "max_mesh_len =", max_mesh_len print "min_mesh_len =", min_mesh_len #5.2 check connectivity of each user_mesh --> OK # start_time = time.clock() # print "MMB Connectivity check: STARTED" # for clique in self.mc_set: # for obj_id in clique: # if not Map.check_connected_expanding(self.user_mesh[obj_id]): # print "error found at clique=", clique # continue # print "MMB Connectivity check: DONE" # print "elapsed : ", (time.clock() - start_time) #DEBUG print "len(graph.mc_set) = ", len(graph.mc_set) print graph.mc_set
def process_existing_clique(self, clique, timestamp): free_nodes = [] satisfied = True #new clique satisfied or not #1. get new_loc expanding_list = {} #dict of lists query_list = [] for node in clique: query = self.query_log.trajs[node][timestamp] query_list.append(query) for query in query_list: seg_list = self.map_data.compute_fixed_expanding(query.x, query.y, query.cur_edge_id, option.INIT_DISTANCE) seg_list = EdgeSegmentSet.clean_fixed_expanding(seg_list) expanding_list[query.obj_id] = seg_list #2. build graph degree = {} adj = {} for query in query_list: degree[query.obj_id] = 0 for i in range(len(query_list)): for j in range(i+1,len(query_list)): if EdgeSegmentSet.is_set_cover(Point(query_list[i]), expanding_list[query_list[j].obj_id]) and \ EdgeSegmentSet.is_set_cover(Point(query_list[j]), expanding_list[query_list[i].obj_id]): adj[(query_list[i].obj_id, query_list[j].obj_id)] = 1 adj[(query_list[j].obj_id, query_list[i].obj_id)] = 1 for pair in adj.iterkeys(): degree[pair[0]] += 1 degree[pair[1]] += 1 for node in clique: if degree[node] == 0: free_nodes.append(node) del expanding_list[node] satisfied = False #3. compute union (mesh) and ... mesh = [] for seg_list in expanding_list.itervalues(): mesh = EdgeSegmentSet.union(mesh, seg_list) #3.2 check total_len if EdgeSegmentSet.length(mesh) > option.MAX_MESH_LENGTH: satisfied = False # remove (heuristically) nodes which have low degrees while True: node = min(degree, degree.get) free_nodes.append(node) del degree[node] for node_2 in clique: if adj.has_key((node, node_2)): del adj[(node, node_2)] del adj[(node_2, node)] degree[node_2] = degree[node_2] - 1 #re-compute union (mesh) and check total_len mesh = [] for seg_list in expanding_list.itervalues(): mesh = EdgeSegmentSet.union(mesh, seg_list) if EdgeSegmentSet.length(mesh) <= option.MAX_MESH_LENGTH: break #4. return return (satisfied, free_nodes, mesh)
def check_MMB_MAB(self, checking_pairs, cover_mesh, cover_mesh_mmb, new_cover_mesh, new_cover_mesh_mmb,): # start_time = time.clock() # # prepare MAB # positive_mesh = [] # for (pos_id, check_list) in checking_pairs.iteritems(): # if len(check_list) == 0: # positive_mesh.append(0) # continue # mesh = [] # for obj_id in positive_mc_set[pos_id]: # mesh.extend(expanding_list[obj_id]) # mesh = EdgeSegmentSet.clean_fixed_expanding(mesh) # positive_mesh.append(mesh) # print "prepare MAB - Step 1 - elapsed : ", (time.clock() - start_time) # # start_time = time.clock() # positive_mab = [] # count_id = 0 # for (pos_id, check_list) in checking_pairs.iteritems(): # if len(check_list) == 0: # continue # # compute MAB # mesh = self.map_data.compute_mesh_expanding(positive_mesh[pos_id], option.MAX_SPEED) # # # positive_mab.append(mesh) # # count_id += 1 # if count_id % 100 == 0: # print "count_id =", count_id # # print "prepare MAB - Step 2 - elapsed : ", (time.clock() - start_time) # start_time = time.clock() count_pair = 0 for (pos_id, check_list) in enumerate(checking_pairs): if len(check_list) == 0: print "ERROR in checking_pairs at pos_id =", pos_id # CASE inter_len = 0 in find_next_cover() continue for old_cover_id in check_list: # 1. MMB new_mesh = new_cover_mesh[pos_id] old_mesh_mmb = cover_mesh_mmb[old_cover_id] # inter_set = EdgeSegmentSet.intersect(old_mesh_mmb, new_mesh) if len(inter_set) > 0 and len(inter_set) < option.S_GLOBAL: print "MMB FAULT at (pos_id, old_cover_id):", pos_id, old_cover_id # 2. MAB new_mesh_mab = new_cover_mesh_mmb[pos_id] old_mesh = cover_mesh[old_cover_id] # inter_set = EdgeSegmentSet.intersect(old_mesh, new_mesh_mab) if len(inter_set) > 0 and len(inter_set) < option.S_GLOBAL: print "MAB FAULT at (pos_id, cover_id):", pos_id, old_cover_id # count_pair += 1 if count_pair % 100 == 0: print "count_pair =", count_pair print "check MMB/MAB - elapsed : ", (time.clock() - start_time)
def solve_new_queries(self, timestamp): start_time = time.clock() # mmb_list = [] #list of lists # mmb_list_index = [] #list of sets query_list = self.query_log.frames[timestamp] #1. compute SC, SD, SN, (SF) set_F = set([]) new_user_set = set([]) for query in query_list: new_user_set = new_user_set | set([query.obj_id]) set_C = self.user_set & new_user_set set_D = self.user_set - new_user_set set_N = new_user_set - self.user_set print "set_D", set_D print "set_N", set_N #2. process set_D (disappear) self.remove_from_mc_set(set_D) #3. compute_mesh_expanding (MAX_SPEED) for clique in self.mc_set: if len(clique) < option.K_ANONYMITY: continue node = list(clique)[0] #print "debug", clique, node seg_list = self.map_data.compute_mesh_expanding(self.user_mesh[node], option.MAX_SPEED) self.mmb_mesh[node] = seg_list for node_2 in clique: if node_2 != node and self.user_mc_set[node_2] == self.user_mc_set[node]: self.mmb_mesh[node_2] = seg_list #3.1 num_edges = 0 list_edges = [] for node_1 in set_C: for node_2 in set_C: if node_1 != node_2 and self.mmb_mesh.has_key(node_1) and self.mmb_mesh.has_key(node_2): point_1 = Point(self.query_log.trajs[node_1][timestamp]) point_2 = Point(self.query_log.trajs[node_2][timestamp]) if EdgeSegmentSet.is_set_cover(point_1, self.mmb_mesh[node_2]) and \ EdgeSegmentSet.is_set_cover(point_2, self.mmb_mesh[node_1]): num_edges += 1 list_edges.append((node_1, node_2)) print "num_edges (set_C) = ", num_edges graph.add_to_mc_set(list_edges) #3.2 num_edges = 0 list_edges = [] for node_1 in set_C: for node_2 in set_N: point_2 = Point(self.query_log.trajs[node_2][timestamp]) if self.mmb_mesh.has_key(node_1) and EdgeSegmentSet.is_set_cover(point_2, self.mmb_mesh[node_1]): num_edges += 1 list_edges.append((node_1, node_2)) print "num_edges (set_N) = ", num_edges graph.add_to_mc_set(list_edges) #4. update self.user_mc_set, self.user_mesh, self.user_set #4.1 compute user_mc_set, max clique for each obj_id self.user_mc_set = {} for clique in self.mc_set: for obj_id in clique: if not self.user_mc_set.has_key(obj_id): self.user_mc_set[obj_id] = clique elif len(self.user_mc_set[obj_id]) < len(clique): self.user_mc_set[obj_id] = clique print "Compute user_mc_set: DONE" #4.2 compute MMBs (CLOAKING MESHES) and self.user_set max_mesh_len = 0 min_mesh_len = 1000000 self.user_mesh = {} for clique in self.mc_set: if len(clique) >= option.K_ANONYMITY: locations = [] for node in clique: locations.append(self.query_log.trajs[node][timestamp]) mesh = self.map_data.compute_mesh_mbr(locations) #TEMPORARILY !! # temp_len = EdgeSegmentSet.length(mesh) if max_mesh_len < temp_len: max_mesh_len = temp_len if min_mesh_len > temp_len: min_mesh_len = temp_len # assign mesh to all obj_id in 'clique' for obj_id in clique: self.user_mesh[obj_id] = mesh # for obj_id in clique: self.user_set = self.user_set | set([obj_id]) print "Compute CLOAKING MESH: DONE" print "max_mesh_len =", max_mesh_len print "min_mesh_len =", min_mesh_len print "elapsed : ", (time.clock() - start_time)
def find_cloaking_sets(self, timestamp, expanding_list): #1. find positive and negative cliques self.positive_mc_set = [] negative_mc_set = [] for clique in self.mc_set: if len(clique) == 1: continue # max_min_length = 0 max_k_anom = 0 query_list = [] for obj_id in clique: query = self.query_log.trajs[obj_id][timestamp] query_list.append(query) if max_min_length < query.min_length: max_min_length = query.min_length if max_k_anom < query.k_anom: max_k_anom = query.k_anom #compute length of mesh mesh = [] for obj_id in clique: # mesh = EdgeSegmentSet.union(mesh, expanding_list[obj_id]) # NEW (trial) mesh.extend(expanding_list[obj_id]) # NEW (trial) mesh = EdgeSegmentSet.clean_fixed_expanding(mesh) clique_len = EdgeSegmentSet.length(mesh) # if len(clique) >= max_k_anom and \ clique_len >= max_min_length * self.map_data.total_map_len: self.positive_mc_set.append(clique) elif len(clique) > 2: negative_mc_set.append(clique) #2.convert negative cliques (heuristically) new_negative_mc_set = [] for clique in negative_mc_set: query_list = [] for obj_id in clique: query = self.query_log.trajs[obj_id][timestamp] query_list.append(query) #sort query_list = sorted(query_list, key=lambda query: query.k_anom) while True: query_list.pop() #remove the last if len(query_list) == 0: break max_min_length = max( query_list, key=lambda query: query.min_length).min_length #compute length of mesh mesh = [] for query in query_list: # mesh = EdgeSegmentSet.union(mesh, expanding_list[query.obj_id]) # NEW (trial) mesh.extend(expanding_list[query.obj_id]) # NEW (trial) mesh = EdgeSegmentSet.clean_fixed_expanding(mesh) clique_len = EdgeSegmentSet.length(mesh) # if len(query_list) >= query_list[-1].k_anom and \ clique_len >= max_min_length * self.map_data.total_map_len: break # if len(query_list) > 1: clique = set([query.obj_id for query in query_list]) new_negative_mc_set.append(clique) #3. # print "positive_mc_set =", self.positive_mc_set # print "new_negative_mc_set =", new_negative_mc_set self.positive_mc_set.extend(new_negative_mc_set)
print "Elapsed ", elapsed print "length(item_list) = ", len(result) #600 Elapsed 0.00164145542381 #700 Elapsed 0.00333834461428 #800 Elapsed 0.0090295446578 #900 Elapsed 0.013230192309 #1000 Elapsed 0.0228197206651 #1100 Elapsed 0.0399487545624 #1200 Elapsed 0.073717157418 #1300 Elapsed 0.131422168682 #1400 Elapsed 0.247271041862 #1500 Elapsed 0.45686200739 start = time.clock() result = EdgeSegmentSet.clean_fixed_expanding(result) elapsed = (time.clock() - start) print "Elapsed ", elapsed print "length(item_list) = ", len(result) # for item in result: # print "%15d %8.2f %10.2f %10.2f %10.2f %10.2f" % (item.cur_edge_id, EdgeSegment.length(item), \ # item.start_x, item.start_y, item.end_x, item.end_y) #TEST 3: EdgeSegmentSet.union() # result_1 = map_data.compute_fixed_expanding(8545.43, 16095.95, 98307178, 300) # result_1 = EdgeSegmentSet.clean_fixed_expanding(result_1) # # result_2 = map_data.compute_fixed_expanding(8381, 16296, 98307178, 300)
def solve_new_queries(self, timestamp): expanding_list = {} #dict of lists query_list = self.query_log.frames[timestamp] # timestamp #0. reset self.reset() #1. compute expanding_list start_time = time.clock() for query in query_list: seg_list = self.map_data.compute_fixed_expanding( query.x, query.y, query.cur_edge_id, query.dist) #old: option.DISTANCE_CONSTRAINT seg_list = EdgeSegmentSet.clean_fixed_expanding(seg_list) expanding_list[query.obj_id] = seg_list print "expanding_list - elapsed : ", (time.clock() - start_time) #2. compute mc_set # start_time = time.clock() # num_edges = 0 # list_edges = [] # for i in range(len(query_list)): # for j in range(i+1,len(query_list)): # if get_distance(query_list[i].x, query_list[i].y, query_list[j].x, query_list[j].y) > \ # option.INIT_GRAPH_DISTANCE: # continue # # if EdgeSegmentSet.is_set_cover(Point(query_list[i]), expanding_list[query_list[j].obj_id]) and \ # EdgeSegmentSet.is_set_cover(Point(query_list[j]), expanding_list[query_list[i].obj_id]): # num_edges += 1 # list_edges.append((query_list[i].obj_id, query_list[j].obj_id)) # # print "num_edges=", num_edges # print "list_edges OLD - elapsed : ", (time.clock() - start_time) start_time = time.clock() (num_edges, list_edges) = self.compute_edge_list(expanding_list, query_list) print "num_edges=", num_edges print "list_edges NEW - elapsed : ", (time.clock() - start_time) # write list_edges[] to file self.write_list_edges(list_edges) # start_time = time.clock() # # (OLD) # graph.add_to_mc_set(list_edges) # (NEW) call([ option.MACE_EXECUTABLE, "M", option.MAXIMAL_CLIQUE_FILE_IN, option.MAXIMAL_CLIQUE_FILE_OUT ], shell=False) f = open(option.MAXIMAL_CLIQUE_FILE_OUT, "r") fstr = f.read() f.close() for line in fstr.split("\n"): node_list = line.split(" ") if len(node_list) < 2: continue self.mc_set.append(set([int(node) for node in node_list])) print len(self.mc_set) print "add_to_mc_set - elapsed : ", (time.clock() - start_time) # print "mc_set =", self.mc_set #3. start_time = time.clock() self.find_cloaking_sets(timestamp, expanding_list) print "find_cloaking_sets - elapsed : ", (time.clock() - start_time) #4. 'Set Cover Problem' (from weighted_set_cover.py) start_time = time.clock() num_element = max( query_list, key=lambda query: query.obj_id).obj_id + 1 # avoid out of range if timestamp == 0: self.cover_set, num_cloaked_users = find_init_cover( self.positive_mc_set, num_element) self.new_cover_set = self.cover_set # for compute CLOAKING MESH else: self.new_cover_set, num_cloaked_users, checking_pairs = find_next_cover( self.positive_mc_set, num_element, self.cover_set, option.K_GLOBAL) print "Success rate =", float(num_cloaked_users) / len(query_list) print "compute cover_set - elapsed : ", (time.clock() - start_time) #5. compute CLOAKING MESH start_time = time.clock() total_mesh_length = 0 total_query = 0 self.new_cover_mesh = [] # NEW for clique_id in range(len(self.new_cover_set)): clique = self.new_cover_set[clique_id] #compute length of mesh mesh = [] for obj_id in clique: mesh = EdgeSegmentSet.union(mesh, expanding_list[obj_id]) self.new_cover_mesh.append(mesh) #NEW total_mesh_length += EdgeSegmentSet.length(mesh) total_query += len(clique) average_mesh_query = total_mesh_length / total_query print "total_mesh_length =", total_mesh_length print "average_mesh_query =", average_mesh_query print "Compute CLOAKING MBR - elapsed : ", (time.clock() - start_time) # print "user_mesh = ", self.user_mesh #5.2 Check MMB/MAB # self.new_cover_mesh_mmb = self.compute_cover_mesh_mmb(self.new_cover_mesh, expanding_list) # # if timestamp > 0: # start_time = time.clock() # # self.check_MMB_MAB(checking_pairs, self.cover_mesh, self.cover_mesh_mmb, self.new_cover_mesh, self.new_cover_mesh_mmb) # # print "check_MMB_MAB() - elapsed : ", (time.clock() - start_time) # UPDATE self.cover_set = self.new_cover_set self.cover_mesh = self.new_cover_mesh # self.cover_mesh_mmb = self.new_cover_mesh_mmb #6. compute user_mc_set (max clique for each obj_id), replace self.positive_mc_set by self.cover_set start_time = time.clock() self.user_mc_set = {} for clique_id in range(len(self.cover_set)): clique = self.cover_set[clique_id] for obj_id in clique: # if not self.user_mc_set.has_key(obj_id): self.user_mc_set[obj_id] = clique_id #use id elif len(self.cover_set[self.user_mc_set[obj_id]]) < len( clique): self.user_mc_set[obj_id] = clique_id #store the maximum # for obj_id in clique: if self.user_mc_set[ obj_id] == clique_id: #clique id comparison self.user_mesh[obj_id] = self.cover_mesh[clique_id] print "Compute user_mc_set - elapsed : ", (time.clock() - start_time) # print "user_mc_set = ", self.user_mc_set #7. publish MBRs (write to file) start_time = time.clock() self.write_results_to_files(timestamp) print "write_results_to_files - elapsed : ", (time.clock() - start_time)
# result_1 = map_data.compute_fixed_expanding(8545.43, 16095.95, 98307178, option.MAX_SPEED) # result_1 = EdgeSegmentSet.clean_fixed_expanding(result_1) # # print Map.check_connected_expanding(result_1) # # #TEST 6: # result_1 = map_data.compute_fixed_expanding(8545.43, 16095.95, 98307178, option.MAX_SPEED) # result_1 = EdgeSegmentSet.clean_fixed_expanding(result_1) # # print Map.find_boundary_points(result_1) #TEST 7: start = time.clock() result_1 = map_data.compute_fixed_expanding(8545.43, 16095.95, 98307178, 1000) result_1 = EdgeSegmentSet.clean_fixed_expanding(result_1) print "Elapsed ", (time.clock() - start) print "result_1 total_len=", EdgeSegmentSet.length(result_1) print "length(item_list) = ", len(result_1) # for item in result_1: # print "%15d %8.2f %10.2f %10.2f %10.2f %10.2f" % (item.cur_edge_id, EdgeSegment.length(item), \ # item.start_x, item.start_y, item.end_x, item.end_y) start = time.clock() result_1 = map_data.compute_mesh_expanding(result_1, option.MAX_SPEED) print "Elapsed ", (time.clock() - start) print "length(item_list) = ", len(result_1) # for item in result_1:
def solve_new_queries(self, timestamp): start_time = time.clock() expanding_list = {} #dict of lists query_list = self.query_log.frames[timestamp] # timestamp min_obj_id = option.MAX_USER max_obj_id = 0 #1. compute expanding_list for query in query_list: if max_obj_id < query.obj_id: max_obj_id = query.obj_id if min_obj_id > query.obj_id: min_obj_id = query.obj_id # seg_list = self.map_data.compute_fixed_expanding( query.x, query.y, query.cur_edge_id, option.INIT_DISTANCE) seg_list = EdgeSegmentSet.clean_fixed_expanding(seg_list) expanding_list[query.obj_id] = seg_list #2. init self.mc_set self.reset() for query in query_list: self.mc_set.append(set([query.obj_id])) #3. compute mc_set start_time = time.clock() num_edges = 0 list_edges = [] for i in range(len(query_list)): for j in range(i + 1, len(query_list)): if get_distance(query_list[i].x, query_list[i].y, query_list[j].x, query_list[j].y) > \ option.INIT_DISTANCE: continue if EdgeSegmentSet.is_set_cover(Point(query_list[i]), expanding_list[query_list[j].obj_id]) and \ EdgeSegmentSet.is_set_cover(Point(query_list[j]), expanding_list[query_list[i].obj_id]): num_edges += 1 list_edges.append( (query_list[i].obj_id, query_list[j].obj_id)) print "num_edges=", num_edges print "list_edges - elapsed : ", (time.clock() - start_time) start_time = time.clock() graph.add_to_mc_set(list_edges) print "add_to_mc_set - elapsed : ", (time.clock() - start_time) print "mc_set =", self.mc_set #4. compute user_mc_set, max clique for each obj_id start_time = time.clock() self.user_mc_set = {} for clique in self.mc_set: if len(clique) >= option.K_ANONYMITY: for obj_id in clique: if not self.user_mc_set.has_key(obj_id): self.user_mc_set[obj_id] = clique elif len(self.user_mc_set[obj_id]) < len(clique): self.user_mc_set[obj_id] = clique print "Compute user_mc_set - elapsed : ", (time.clock() - start_time) print "user_mc_set = ", self.user_mc_set #5. compute MMBs (CLOAKING MESHES) and self.user_set start_time = time.clock() max_mesh_len = 0 min_mesh_len = 1000000 for clique in self.mc_set: if len(clique) >= option.K_ANONYMITY: mesh = [] for obj_id in clique: mesh = EdgeSegmentSet.union(mesh, expanding_list[obj_id]) # temp_len = EdgeSegmentSet.length(mesh) if max_mesh_len < temp_len: max_mesh_len = temp_len if min_mesh_len > temp_len: min_mesh_len = temp_len # assign mesh to all obj_id in 'clique' for obj_id in clique: self.user_mesh[obj_id] = mesh # for obj_id in clique: self.user_set = self.user_set | set([obj_id]) #print "self.user_set = ", self.user_set print "Compute CLOAKING MESH - elapsed : ", (time.clock() - start_time) print "max_mesh_len =", max_mesh_len print "min_mesh_len =", min_mesh_len