strategy = 'isolated_segment' dataset_name = 'IMDBMULTI' residual_type = 'none' if 1: epoch_number = 500 result_obj = ResultSaving('', '') result_obj.result_destination_folder_path = './result/AuGBert/' + strategy + '/' + dataset_name + '/' result_list = [] time_list = [] for fold in range(1, 11): result_obj.result_destination_file_name = dataset_name + '_' + str( fold) + '_' + str( epoch_number) + '_' + residual_type + '_' + strategy loaded_result = result_obj.load() time_list.append( sum([loaded_result[epoch]['time'] for epoch in loaded_result])) result_list.append( np.max( [loaded_result[epoch]['acc_test'] for epoch in loaded_result])) print('accuracy: {:.2f}$\pm${:.2f}'.format(100 * np.mean(result_list), 100 * np.std(result_list))) print('time: {:.2f}$\pm${:.2f}'.format(np.mean(time_list), np.std(time_list))) dataset_name = 'PROTEINS' strategy = 'padding_pruning' if 0: epoch_number = 500
import matplotlib.pyplot as plt from code.ResultSaving import ResultSaving #---------- clustering results evaluation ----------------- dataset_name = 'pubmed' if 0: pre_train_task = 'node_reconstruction+structure_recovery' result_obj = ResultSaving('', '') result_obj.result_destination_folder_path = './result/GraphBert/' result_obj.result_destination_file_name = 'clustering_' + dataset_name + '_' + pre_train_task loaded_result = result_obj.load() eval_obj = EvaluateClustering() eval_obj.data = loaded_result eval_result = eval_obj.evaluate() print(eval_result) #--------------- Graph Bert Pre-Training Records Convergence -------------- dataset_name = 'cora' if 0: if dataset_name == 'cora': k = 7 elif dataset_name == 'citeseer': k = 5 elif dataset_name == 'pubmed':