def read_neighbors(comm, filepath, iosize, node_ranks): def neighbors_default(): return {'src': array('L'), 'dst': array('L')} neighbors_dict = defaultdict(neighbors_default) (graph, _) = scatter_read_graph(filepath, io_size=iosize, map_type=0, node_rank_map=node_ranks, comm=comm) ## determine neighbors of vertex based on incoming edges for post, prj in graph.items(): for pre, edge_iter in prj.items(): for (n, edges) in edge_iter: neighbors_dict[n]['src'].extend(edges[0]) ## obtain outgoing edges (graph, _) = scatter_read_graph(filepath, io_size=iosize, map_type=1, node_rank_map=node_ranks, comm=comm) ## determine neighbors of vertex based on outgoing edges for pre, prj in graph.items(): for post, edge_iter in prj.items(): for (n, edges) in edge_iter: neighbors_dict[n]['dst'].extend(edges[0]) return neighbors_dict
from mpi4py import MPI from neuroh5.io import scatter_read_graph comm = MPI.COMM_WORLD #print "rank = ", comm.Get_rank() #print "size = ", comm.Get_size() (g, a) = scatter_read_graph("data/dentate_test.h5") print(g) #xprint a #print g
def connectcells(env, gid_list): datasetPath = os.path.join(env.datasetPrefix, env.datasetName) connectivityFilePath = os.path.join(datasetPath, env.modelConfig['Connection Data']) forestFilePath = os.path.join(datasetPath, env.modelConfig['Cell Data']) if env.verbose: if env.pc.id() == 0: print '*** Connectivity file path is %s' % connectivityFilePath prj_dict = defaultdict(list) for (src, dst) in read_projection_names(env.comm, connectivityFilePath): prj_dict[dst].append(src) if env.verbose: if env.pc.id() == 0: print '*** Reading projections: ', prj_dict.items() for (postsyn_name, presyn_names) in prj_dict.iteritems(): synapse_config = env.celltypes[postsyn_name]['synapses'] if synapse_config.has_key('spines'): spines = synapse_config['spines'] else: spines = False if synapse_config.has_key('unique'): unique = synapse_config['unique'] else: unique = False if synapse_config.has_key('weights'): has_weights = synapse_config['weights'] else: has_weights = False if synapse_config.has_key('weights namespace'): weights_namespace = synapse_config['weights namespace'] else: weights_namespace = 'Weights' if env.verbose: if int(env.pc.id()) == 0: print '*** Reading synapse attributes of population %s' % (postsyn_name) gid_index_synapses_map = get_cell_attributes_index_map(env.comm, forestFilePath, 'GC', 'Synapse Attributes') if synapse_config.has_key('weights namespace'): gid_index_weights_map = get_cell_attributes_index_map(env.comm, forestFilePath, 'GC', weights_namespace) cell_synapses_dict, cell_weights_dict = {}, {} for gid in gid_list: cell_attributes_dict = select_cell_attributes(gid, env.comm, forestFilePath, gid_index_synapses_map, 'GC', 'Synapse Attributes') cell_synapses_dict[gid] = {k: v for (k, v) in cell_attributes_dict['Synapse Attributes']} if has_weights: cell_attributes_dict.update(get_cell_attributes_by_gid(gid, env.comm, forestFilePath, gid_index_synapses_map, 'GC', weights_namespace)) cell_weights_dict[gid] = {k: v for (k, v) in cell_attributes_dict[weights_namespace]} if env.verbose: if env.pc.id() == 0: print '*** Found synaptic weights for population %s' % (postsyn_name) else: has_weights = False cell_weights_dict[gid] = None del cell_attributes_dict for presyn_name in presyn_names: edge_count = 0 if env.verbose: if env.pc.id() == 0: print '*** Connecting %s -> %s' % (presyn_name, postsyn_name) if env.nodeRanks is None: (graph, a) = scatter_read_graph(env.comm, connectivityFilePath, io_size=env.IOsize, projections=[(presyn_name, postsyn_name)], namespaces=['Synapses', 'Connections']) else: (graph, a) = scatter_read_graph(env.comm, connectivityFilePath, io_size=env.IOsize, node_rank_map=env.nodeRanks, projections=[(presyn_name, postsyn_name)], namespaces=['Synapses', 'Connections']) edge_iter = graph[postsyn_name][presyn_name] connection_dict = env.connection_generator[postsyn_name][presyn_name].connection_properties kinetics_dict = env.connection_generator[postsyn_name][presyn_name].synapse_kinetics syn_id_attr_index = a[postsyn_name][presyn_name]['Synapses']['syn_id'] distance_attr_index = a[postsyn_name][presyn_name]['Connections']['distance'] for (postsyn_gid, edges) in edge_iter: postsyn_cell = env.pc.gid2cell(postsyn_gid) cell_syn_dict = cell_synapses_dict[postsyn_gid] if has_weights: cell_wgt_dict = cell_weights_dict[postsyn_gid] syn_wgt_dict = {int(syn_id): float(weight) for (syn_id, weight) in itertools.izip(np.nditer(cell_wgt_dict['syn_id']), np.nditer(cell_wgt_dict['weight']))} else: syn_wgt_dict = None presyn_gids = edges[0] edge_syn_ids = edges[1]['Synapses'][syn_id_attr_index] edge_dists = edges[1]['Connections'][distance_attr_index] cell_syn_types = cell_syn_dict['syn_types'] cell_swc_types = cell_syn_dict['swc_types'] cell_syn_locs = cell_syn_dict['syn_locs'] cell_syn_sections = cell_syn_dict['syn_secs'] edge_syn_ps_dict = synapses.mksyns(postsyn_gid, postsyn_cell, edge_syn_ids, cell_syn_types, cell_swc_types, cell_syn_locs, cell_syn_sections, kinetics_dict, env, add_synapse=synapses.add_unique_synapse if unique else synapses.add_shared_synapse, spines=spines) if env.verbose: if int(env.pc.id()) == 0: if edge_count == 0: for sec in list(postsyn_cell.all): h.psection(sec=sec) wgt_count = 0 for (presyn_gid, edge_syn_id, distance) in itertools.izip(presyn_gids, edge_syn_ids, edge_dists): syn_ps_dict = edge_syn_ps_dict[edge_syn_id] for (syn_mech, syn_ps) in syn_ps_dict.iteritems(): connection_syn_mech_config = connection_dict[syn_mech] if has_weights and syn_wgt_dict.has_key(edge_syn_id): wgt_count += 1 weight = float(syn_wgt_dict[edge_syn_id]) * connection_syn_mech_config['weight'] else: weight = connection_syn_mech_config['weight'] delay = distance / connection_syn_mech_config['velocity'] if type(weight) is float: h.nc_appendsyn(env.pc, h.nclist, presyn_gid, postsyn_gid, syn_ps, weight, delay) else: h.nc_appendsyn_wgtvector(env.pc, h.nclist, presyn_gid, postsyn_gid, syn_ps, weight, delay) if env.verbose: if int(env.pc.id()) == 0: if edge_count == 0: print '*** Found %i synaptic weights for gid %i' % (wgt_count, postsyn_gid) edge_count += len(presyn_gids)
from mpi4py import MPI from neuroh5.io import scatter_read_graph import numpy as np comm = MPI.COMM_WORLD print "rank = ", comm.Get_rank() print "size = ", comm.Get_size() if comm.Get_rank() == 0: node_rank_vector = np.loadtxt("parts.4096", dtype=np.uint32) node_rank_vector = comm.bcast(node_rank_vector, root=0) else: node_rank_vector = None node_rank_vector = comm.bcast(node_rank_vector, root=0) g = scatter_read_graph( "/projects/sciteam/baef/Full_Scale_Control/dentate_Full_Scale_Control_MPP.h5", 128, node_rank_vector)
from mpi4py import MPI from neuroh5.io import scatter_read_graph comm = MPI.COMM_WORLD rank = comm.rank input_file='./data/dentate_test.h5' input_file='/oasis/scratch/comet/iraikov/temp_project/dentate/Full_Scale_Control/DG_Connections_Full_Scale_20180722.h5' input_file = '/scratch1/03320/iraikov/striped/dentate/Test_GC_1000/DG_Test_GC_1000_connections_20190625_compressed.h5' #(g,a) = scatter_read_graph(comm, input_file, io_size=1, namespaces=["Attributes"]) (graph, a) = scatter_read_graph(input_file,io_size=8) #projections=[('GC', 'MC'), ('MC', 'MC'), ('AAC', 'MC')], #namespaces=['Synapses','Connections']) #print graph.keys() edge_dict = {} edge_iter = graph['GC']['MC'] for (gid,edges) in edge_iter: edge_dict[gid] = edges print("rank %d: %s" % (rank, str(edge_dict)))