def main(): centrality_data_folder = sys.argv[1] input_file_prefix = sys.argv[2] output_file = sys.argv[3] metric = sys.argv[4] intervals = ccfcr.load_json(centrality_data_folder) out_file = open(output_file, 'w') day = 0 top_list_prev = [] top_list = [] ret_sort = [] num_nodes = 1 for inter in intervals["centrality_test"]["intervals"]: print("[ day = " + str(day) +" ]") if inter["interval"]["graph_stat"]["num_nodes"] != 0: top_list, ret_sort = pre_proc(centrality_data_folder + "/centrality_scores", input_file_prefix, day) num_prev_nodes = num_nodes num_nodes = inter["interval"]["graph_stat"]["num_nodes"] else: #out_file.write(str(inter["interval"]["time"]["start"])+" - - 0 -1.0 -1.0\n") out_file.write(str(day)+" - - 0 -1.0 -1.0\n") day+=1 continue if day != 0: #centralities = [str(inter["interval"]["time"]["start"])] centralities = [str(day)] centralities += compute(top_list_prev, top_list, ret_sort, metric) num_new_nodes = inter["interval"]["graph_stat"]["new_nodes"] num_deleted_nodes = inter["interval"]["graph_stat"]["deleted_nodes"] centralities.append(num_nodes) centralities.append(float(num_new_nodes) / num_nodes) centralities.append(float(num_deleted_nodes) / num_prev_nodes) ccfcr.write_out(out_file, centralities) day+=1 top_list_prev = top_list
def main(): centrality_data_folder = sys.argv[1] input_file_prefix = sys.argv[2] lookback = int(sys.argv[3]) baseline_type = int(sys.argv[4]) output_folder = sys.argv[5] if not os.path.exists(output_folder): os.makedirs(output_folder) intervals = ccfcr.load_json(centrality_data_folder) day = 0 day_data_maps = [] processed_indices = [] index_counter = 0 for inter in intervals["centrality_test"]["intervals"]: print("[preproc day = " + str(day) +" ]") if inter["interval"]["graph_stat"]["num_nodes"] != 0: day_data_maps.append(pre_proc(centrality_data_folder + "/centrality_scores", input_file_prefix, day)) if day >= lookback: processed_indices.append(index_counter) index_counter += 1 else: processed_indices.append(-1) else: print "empty day" processed_indices.append(-1) day+=1 #print day_data_maps #print processed_indices print "evaluating baseline STARTED" if baseline_type == 1: processed_day_maps = eval_and_sort_days(day_data_maps, lookback, baseline_1) elif baseline_type == 2: processed_day_maps = eval_and_sort_days(day_data_maps, lookback, baseline_2) else: print "ERROR: baseline_type must be 1 or 2!" return #print processed_day_maps for i in range(0, len(processed_indices)): #print str(processed_indices[i]) if processed_indices[i] == -1: continue else: write_out(output_folder, input_file_prefix, baseline_type, processed_day_maps[processed_indices[i]], i) print "evaluating baseline FINISHED"
import correlation_computer_for_centrality_ranking as ccfcr import correlation_computer_for_position_ranking as ccfpr argc = len(sys.argv) if argc == 10: baseline_folder = sys.argv[1] # for baseline centrality_folder = sys.argv[2] # for origi file_prefix = sys.argv[3] baseline_type = sys.argv[4] from_interval = int(sys.argv[5]) to_interval = int(sys.argv[6]) rank_type = sys.argv[7] metric = sys.argv[8] output_file = sys.argv[9] intervals = ccfcr.load_json(centrality_folder) out_file = open(output_file, 'w') top_list_other = [] top_list = [] ret_sort = [] intervals_list = intervals["centrality_test"]["intervals"] for day in range(from_interval, to_interval+1): print("[ day = " + str(day) +" ]") if intervals_list[day]["interval"]["graph_stat"]["num_nodes"] != 0: if rank_type == "centrality": top_list_other, ret_sort_other = ccfcr.pre_proc(baseline_folder, file_prefix + "_baseline" + baseline_type, day) # for baseline top_list, ret_sort = ccfcr.pre_proc(centrality_folder + "/centrality_scores", file_prefix, day) # for origi elif rank_type == "pos": top_list_other, ret_sort_other = ccfpr.pre_proc(baseline_folder, file_prefix + "_baseline" + baseline_type, day) # for baseline top_list, ret_sort = ccfpr.pre_proc(centrality_folder + "/centrality_scores", file_prefix, day) # for origi else: