def test_read_weighted_network(self): M_multiplex = network_construction.make_multiplex(self.imgdata, timewindow=7, overlap=2) network_io.write_weighted_network( M_multiplex, 'test_for_network_reading_WILL_BE_REMOVED.txt', 'Created by test_read_weighted_network') M_multiplex_read = network_io.read_weighted_network( 'test_for_network_reading_WILL_BE_REMOVED.txt') try: self.assertEqual(self.round_edge_weights(M_multiplex), self.round_edge_weights(M_multiplex_read)) finally: os.remove('test_for_network_reading_WILL_BE_REMOVED.txt') M_multilayer = network_construction.make_clustered_multilayer( self.imgdata, timewindow=7, overlap=2, n_clusters=3) network_io.write_weighted_network( M_multilayer, 'test_for_network_reading_WILL_BE_REMOVED.txt', 'Created by test_read_weighted_network') M_multilayer_read = network_io.read_weighted_network( 'test_for_network_reading_WILL_BE_REMOVED.txt') try: self.assertEqual(self.round_edge_weights(M_multilayer), self.round_edge_weights(M_multilayer_read)) finally: os.remove('test_for_network_reading_WILL_BE_REMOVED.txt')
def null_model_subgraphs_from_network_file(filename, intralayer_density, interlayer_density, null_model_function, nnodes, nlayers, number_of_repeats): M = network_io.read_weighted_network(filename) M = network_construction.threshold_multilayer_network( M, intralayer_density, interlayer_density) all_repetitions_results = null_model_subgraphs_from_network( M, null_model_function, nnodes, nlayers, number_of_repeats) return all_repetitions_results
def isomorphism_classes_from_existing_network_files(network_folder, subnets_savefolder, subgraph_size_dict, allowed_aspects=[0], intralayer_density=0.05, interlayer_density=0.05): ''' Find isomorphism classes from previously constructed and saved networks. Subgraph size dict can only contain one n_layers because saved nets have fixed n_layers. Aggregated results and examples dict only. ''' sorted_filenames = sorted(os.listdir(network_folder), key=lambda s: [int(l) for l in s.split('_')]) aggregated_dicts_dict = dict() examples_dicts_dict = dict() for n_layers in subgraph_size_dict: for n_nodes in subgraph_size_dict[n_layers]: aggregated_dicts_dict[(n_nodes, n_layers)] = collections.defaultdict(dict) examples_dicts_dict[(n_nodes, n_layers)] = dict() for filename in sorted_filenames: full_path = network_folder + filename M = network_io.read_weighted_network(full_path) M = network_construction.threshold_multilayer_network( M, intralayer_density, interlayer_density) for nlayers in subgraph_size_dict: for nnodes in subgraph_size_dict[nlayers]: subgraph_classification.find_isomorphism_classes( M, nnodes, nlayers, filename='this_file_should_not_exist', allowed_aspects=allowed_aspects, aggregated_dict=aggregated_dicts_dict[(nnodes, nlayers)], examples_dict=examples_dicts_dict[(nnodes, nlayers)]) for n_layers in subgraph_size_dict: for n_nodes in subgraph_size_dict[n_layers]: aggregated_dict_filename = subnets_savefolder + str( n_nodes) + '_' + str(n_layers) + '_agg.pickle' f = open(aggregated_dict_filename, 'w') pickle.dump(aggregated_dicts_dict[(n_nodes, n_layers)], f) f.close() del (aggregated_dicts_dict[(n_nodes, n_layers)]) examples_dict_filename = subnets_savefolder + 'examples_' + str( n_nodes) + '_' + str(n_layers) + '.pickle' f = open(examples_dict_filename, 'w') pickle.dump(examples_dicts_dict[(n_nodes, n_layers)], f) f.close() del (examples_dicts_dict[(n_nodes, n_layers)])