def main(): t1 = time.time() graph_iter = generate_graphs(**config.generator_dictionary) graphs = list(graph_iter) for graph in graphs: Node_Name_Rule.graph_semantics_apply(graph,config.node_semantics) Edge_Semantics_Rule.graph_semantics_apply(graph,config.edge_semantics) inference_obj = Inference() result_graphs, result_posterior, result_loglik, result_dict = inference_obj.p_graph_given_d(graphs,config.options) edges_of_interest = result_config.edges_of_interest filename_base = "hidden_structure_results" filename = filename_utility(filename_base) filename = os.path.join("results",filename) for idx,g in enumerate(result_graphs): for edge in edges_of_interest: if edge in g.edges(): edges_of_interest[edge]+=result_posterior[idx] with open(filename,'wb') as f: np.savez(f,posterior=result_posterior,loglik=result_loglik,init_dict=result_dict) elapsed= time.time() - t1 print(elapsed)
def main(): t1 = time.time() graph_iter = generate_graphs(**config.generator_dictionary) graphs = list(graph_iter) for graph in graphs: Node_Name_Rule.graph_semantics_apply(graph, config.node_semantics) Edge_Semantics_Rule.graph_semantics_apply(graph, config.edge_semantics) inference_obj = Inference() result_graphs, result_posterior, result_loglik, result_dict = inference_obj.p_graph_given_d( graphs, config.options) edges_of_interest = result_config.edges_of_interest filename_base = "hidden_structure_results" filename = filename_utility(filename_base) filename = os.path.join("results", filename) for idx, g in enumerate(result_graphs): for edge in edges_of_interest: if edge in g.edges(): edges_of_interest[edge] += result_posterior[idx] with open(filename, 'wb') as f: np.savez(f, posterior=result_posterior, loglik=result_loglik, init_dict=result_dict) elapsed = time.time() - t1 print(elapsed)
def main(): t1 = time.time() graph_iter = generate_graphs(**config.generator_dictionary) graphs = list(graph_iter) for graph in graphs: Node_Name_Rule.graph_semantics_apply(graph, config.node_semantics) Edge_Semantics_Rule.graph_semantics_apply(graph, config.edge_semantics) # num_conditions = 4 # options = [config.options]*num_conditions # for i in range(num_conditions): # options[i]["data_sets"] = cond_to_data(config.conds[i,:]) num_conditions = 3 options = [config.options] * num_conditions for i in range(num_conditions): options[i]["data_sets"] = cond_to_data(config.lesser_conds[i, :]) options[i]["parallel"] = True result_graphs = [None] * num_conditions result_posteriors = [None] * num_conditions result_logliks = [None] * num_conditions result_dicts = [None] * num_conditions result_params = [None] * num_conditions inference_obj = Inference() for i in range(num_conditions): result_graphs[i], result_posteriors[i], result_logliks[ i], result_dicts[i], result_params[ i] = inference_obj.p_graph_given_d(graphs, options[i]) # no longer valid edges_of_interest code # edges_of_interest = result_config.edges_of_interest # for idx,g in enumerate(result_graphs): # for edge in edges_of_interest: # if edge in g.edges(): # edges_of_interest[edge]+=result_posteriors[idx] result_graphs_strings = [ json_graph_list_dumps(g_list) for g_list in result_graphs ] filename_base = "hidden_structure_results" filename = filename_utility(filename_base) filename = os.path.join("results", filename) with open(filename, 'wb') as f: np.savez(f, g_list_strings=result_graphs_strings, posterior=result_posteriors, loglik=result_logliks, init_dict=result_dicts, params=result_params) elapsed = time.time() - t1 print(elapsed)
def main(): t1 = time.time() graph_iter = generate_graphs(**config.generator_dictionary) graphs = list(graph_iter) for graph in graphs: Node_Name_Rule.graph_semantics_apply(graph,config.node_semantics) Edge_Semantics_Rule.graph_semantics_apply(graph,config.edge_semantics) # num_conditions = 4 # options = [config.options]*num_conditions # for i in range(num_conditions): # options[i]["data_sets"] = cond_to_data(config.conds[i,:]) num_conditions = 3 options = [config.options]*num_conditions for i in range(num_conditions): options[i]["data_sets"] = cond_to_data(config.lesser_conds[i,:]) options[i]["parallel"] = True result_graphs = [None]*num_conditions result_posteriors = [None]*num_conditions result_logliks = [None]*num_conditions result_dicts = [None]*num_conditions result_params = [None]*num_conditions inference_obj = Inference() for i in range(num_conditions): result_graphs[i], result_posteriors[i], result_logliks[i], result_dicts[i], result_params[i] = inference_obj.p_graph_given_d(graphs,options[i]) # no longer valid edges_of_interest code # edges_of_interest = result_config.edges_of_interest # for idx,g in enumerate(result_graphs): # for edge in edges_of_interest: # if edge in g.edges(): # edges_of_interest[edge]+=result_posteriors[idx] result_graphs_strings = [json_graph_list_dumps(g_list) for g_list in result_graphs] filename_base = "hidden_structure_results" filename = filename_utility(filename_base) filename = os.path.join("results",filename) with open(filename,'wb') as f: np.savez(f, g_list_strings = result_graphs_strings, posterior = result_posteriors, loglik = result_logliks, init_dict = result_dicts, params = result_params) elapsed= time.time() - t1 print(elapsed)