def measure_gap_junction_coupling(gid, population, v_init, env): h('objref gjlist, cells, Vlog1, Vlog2') pc = env.pc h.cells = h.List() h.gjlist = h.List() biophys_cell1 = init_biophys_cell(env, population, gid, register_cell=False, cell_dict=cell_dict) hoc_cell1 = biophys_cell1.hoc_cell cell1 = cells.make_neurotree_hoc_cell(template_class, neurotree_dict=tree) cell2 = cells.make_neurotree_hoc_cell(template_class, neurotree_dict=tree) h.cells.append(cell1) h.cells.append(cell2) ggid = 20000000 source = 10422930 destination = 10422670 weight = 5.4e-4 srcsec = int(cell1.somaidx.x[0]) dstsec = int(cell2.somaidx.x[0]) stimdur = 500 tstop = 2000 pc.set_gid2node(source, int(pc.id())) nc = cell1.connect2target(h.nil) pc.cell(source, nc, 1) soma1 = list(cell1.soma)[0] pc.set_gid2node(destination, int(pc.id())) nc = cell2.connect2target(h.nil) pc.cell(destination, nc, 1) soma2 = list(cell2.soma)[0] stim1 = h.IClamp(soma1(0.5)) stim1.delay = 250 stim1.dur = stimdur stim1.amp = -0.1 stim2 = h.IClamp(soma2(0.5)) stim2.delay = 500 + stimdur stim2.dur = stimdur stim2.amp = -0.1 log_size = old_div(tstop, h.dt) + 1 h.tlog = h.Vector(log_size, 0) h.tlog.record(h._ref_t) h.Vlog1 = h.Vector(log_size) h.Vlog1.record(soma1(0.5)._ref_v) h.Vlog2 = h.Vector(log_size) h.Vlog2.record(soma2(0.5)._ref_v) gjpos = 0.5 neuron_utils.mkgap(env, cell1, source, gjpos, srcsec, ggid, ggid + 1, weight) neuron_utils.mkgap(env, cell2, destination, gjpos, dstsec, ggid + 1, ggid, weight) pc.setup_transfer() pc.set_maxstep(10.0) h.stdinit() h.finitialize(v_init) pc.barrier() h.tstop = tstop pc.psolve(h.tstop)
def main(config, template_path, output_path, forest_path, populations, distance_bin_size, io_size, chunk_size, value_chunk_size, cache_size, verbose): """ :param config: :param template_path: :param forest_path: :param populations: :param io_size: :param chunk_size: :param value_chunk_size: :param cache_size: """ utils.config_logging(verbose) logger = utils.get_script_logger(script_name) comm = MPI.COMM_WORLD rank = comm.rank env = Env(comm=MPI.COMM_WORLD, config_file=config, template_paths=template_path) configure_hoc_env(env) if io_size == -1: io_size = comm.size if rank == 0: logger.info('%i ranks have been allocated' % comm.size) if output_path is None: output_path = forest_path if rank == 0: if not os.path.isfile(output_path): input_file = h5py.File(forest_path, 'r') output_file = h5py.File(output_path, 'w') input_file.copy('/H5Types', output_file) input_file.close() output_file.close() comm.barrier() layers = env.layers layer_idx_dict = { layers[layer_name]: layer_name for layer_name in ['GCL', 'IML', 'MML', 'OML', 'Hilus'] } (pop_ranges, _) = read_population_ranges(forest_path, comm=comm) start_time = time.time() for population in populations: logger.info('Rank %i population: %s' % (rank, population)) count = 0 (population_start, _) = pop_ranges[population] template_class = load_cell_template(env, population, bcast_template=True) measures_dict = {} for gid, morph_dict in NeuroH5TreeGen(forest_path, population, io_size=io_size, comm=comm, topology=True): if gid is not None: logger.info('Rank %i gid: %i' % (rank, gid)) cell = cells.make_neurotree_hoc_cell(template_class, neurotree_dict=morph_dict, gid=gid) secnodes_dict = morph_dict['section_topology']['nodes'] apicalidx = set(cell.apicalidx) basalidx = set(cell.basalidx) dendrite_area_dict = {k: 0.0 for k in layer_idx_dict} dendrite_length_dict = {k: 0.0 for k in layer_idx_dict} dendrite_distances = [] dendrite_diams = [] for (i, sec) in enumerate(cell.sections): if (i in apicalidx) or (i in basalidx): secnodes = secnodes_dict[i] for seg in sec.allseg(): L = seg.sec.L nseg = seg.sec.nseg seg_l = L / nseg seg_area = h.area(seg.x) seg_diam = seg.diam seg_distance = get_distance_to_node( cell, list(cell.soma)[0], seg.sec, seg.x) dendrite_diams.append(seg_diam) dendrite_distances.append(seg_distance) layer = synapses.get_node_attribute( 'layer', morph_dict, seg.sec, secnodes, seg.x) dendrite_length_dict[layer] += seg_l dendrite_area_dict[layer] += seg_area dendrite_distance_array = np.asarray(dendrite_distances) dendrite_diam_array = np.asarray(dendrite_diams) dendrite_distance_bin_range = int( ((np.max(dendrite_distance_array)) - np.min(dendrite_distance_array)) / distance_bin_size) + 1 dendrite_distance_counts, dendrite_distance_edges = np.histogram( dendrite_distance_array, bins=dendrite_distance_bin_range, density=False) dendrite_diam_sums, _ = np.histogram( dendrite_distance_array, weights=dendrite_diam_array, bins=dendrite_distance_bin_range, density=False) dendrite_mean_diam_hist = np.zeros_like(dendrite_diam_sums) np.divide(dendrite_diam_sums, dendrite_distance_counts, where=dendrite_distance_counts > 0, out=dendrite_mean_diam_hist) dendrite_area_per_layer = np.asarray([ dendrite_area_dict[k] for k in sorted(dendrite_area_dict.keys()) ], dtype=np.float32) dendrite_length_per_layer = np.asarray([ dendrite_length_dict[k] for k in sorted(dendrite_length_dict.keys()) ], dtype=np.float32) measures_dict[gid] = { 'dendrite_distance_hist_edges': np.asarray(dendrite_distance_edges, dtype=np.float32), 'dendrite_distance_counts': np.asarray(dendrite_distance_counts, dtype=np.int32), 'dendrite_mean_diam_hist': np.asarray(dendrite_mean_diam_hist, dtype=np.float32), 'dendrite_area_per_layer': dendrite_area_per_layer, 'dendrite_length_per_layer': dendrite_length_per_layer } del cell count += 1 else: logger.info('Rank %i gid is None' % rank) append_cell_attributes(output_path, population, measures_dict, namespace='Tree Measurements', comm=comm, io_size=io_size, chunk_size=chunk_size, value_chunk_size=value_chunk_size, cache_size=cache_size) MPI.Finalize()
def generate_gj_connections(env, forest_path, soma_coords_dict, gj_config_dict, gj_seed, connectivity_namespace, connectivity_path, io_size, chunk_size, value_chunk_size, cache_size, dry_run=False): """Generates gap junction connectivity based on Euclidean-distance-weighted probabilities. :param gj_config: connection configuration object (instance of env.GapjunctionConfig) :param gj_seed: random seed for determining gap junction connectivity :param connectivity_namespace: namespace of gap junction connectivity attributes :param connectivity_path: path to gap junction connectivity file :param io_size: number of I/O ranks to use for parallel connectivity append :param chunk_size: HDF5 chunk size for connectivity file (pointer and index datasets) :param value_chunk_size: HDF5 chunk size for connectivity file (value datasets) :param cache_size: how many cells to read ahead """ comm = env.comm rank = comm.rank size = comm.size if io_size == -1: io_size = comm.size start_time = time.time() ranstream_gj = np.random.RandomState(gj_seed) population_pairs = list(gj_config_dict.keys()) for pp in population_pairs: if rank == 0: logger.info('%s <-> %s' % (pp[0], pp[1])) total_count = 0 gid_count = 0 for (i, (pp, gj_config)) in enumerate(sorted(viewitems(gj_config_dict))): if rank == 0: logger.info('Generating gap junction connections between populations %s and %s...' % pp) ranstream_gj.seed(gj_seed + i) coupling_params = np.asarray(gj_config.coupling_parameters) coupling_coeffs = np.asarray(gj_config.coupling_coefficients) connection_prob = gj_config.connection_probability connection_params = np.asarray(gj_config.connection_parameters) connection_bounds = np.asarray(gj_config.connection_bounds) population_a = pp[0] population_b = pp[1] template_name_a = env.celltypes[population_a]['template'] template_name_b = env.celltypes[population_b]['template'] load_cell_template(env, population_a, bcast_template=True) load_cell_template(env, population_b, bcast_template=True) template_class_a = getattr(h, template_name_a) template_class_b = getattr(h, template_name_b) clst_a = [] gid_a = [] for (gid, coords) in viewitems(soma_coords_dict[population_a]): clst_a.append(np.asarray(coords)) gid_a.append(gid) gid_a = np.asarray(gid_a) sortidx_a = np.argsort(gid_a) coords_a = np.asarray([clst_a[i] for i in sortidx_a]) clst_b = [] gid_b = [] for (gid, coords) in viewitems(soma_coords_dict[population_b]): clst_b.append(np.asarray(coords)) gid_b.append(gid) gid_b = np.asarray(gid_b) sortidx_b = np.argsort(gid_b) coords_b = np.asarray([clst_b[i] for i in sortidx_b]) gj_prob_dict = filter_by_distance(gid_a[sortidx_a], coords_a, gid_b[sortidx_b], coords_b, connection_bounds, connection_params) gj_probs = [] gj_distances = [] gids_a = [] gids_b = [] for gid, v in viewitems(gj_prob_dict): if gid % size == rank: (nngids, nndists, nnprobs) = v gids_a.append(np.full(nngids.shape, gid, np.int32)) gids_b.append(nngids) gj_probs.append(nnprobs) gj_distances.append(nndists) gids_a = np.concatenate(gids_a) gids_b = np.concatenate(gids_b) gj_probs = np.concatenate(gj_probs) gj_probs = gj_probs / gj_probs.sum() gj_distances = np.concatenate(gj_distances) gids_a = np.asarray(gids_a, dtype=np.uint32) gids_b = np.asarray(gids_b, dtype=np.uint32) cell_dict_a = {} selection_a = set(gids_a) if rank == 0: logger.info('Reading tree selection of population %s (%d cells)...' % (pp[0], len(selection_a))) (tree_iter_a, _) = read_tree_selection(forest_path, population_a, list(selection_a)) for (gid, tree_dict) in tree_iter_a: cell_dict_a[gid] = cells.make_neurotree_hoc_cell(template_class_a, neurotree_dict=tree_dict, gid=gid) cell_dict_b = {} selection_b = set(gids_b) if rank == 0: logger.info('Reading tree selection of population %s (%d cells)...' % (pp[1], len(selection_b))) (tree_iter_b, _) = read_tree_selection(forest_path, population_b, list(selection_b)) for (gid, tree_dict) in tree_iter_b: cell_dict_b[gid] = cells.make_neurotree_hoc_cell(template_class_b, neurotree_dict=tree_dict, gid=gid) if rank == 0: logger.info('Generating gap junction pairs between populations %s and %s...' % pp) gj_dict = {} count = generate_gap_junctions(connection_prob, coupling_coeffs, coupling_params, ranstream_gj, gids_a, gids_b, gj_probs, gj_distances, cell_dict_a, cell_dict_b, gj_dict) gj_graph_dict = {pp[0]: {pp[1]: gj_dict}} if not dry_run: append_graph(connectivity_path, gj_graph_dict, io_size=io_size, comm=comm) total_count += count global_count = comm.gather(total_count, root=0) if rank == 0: logger.info( '%i ranks took %i s to generate %i edges' % (comm.size, time.time() - start_time, np.sum(global_count)))
def main(config, config_prefix, template_path, output_path, forest_path, populations, distribution, io_size, cache_size, chunk_size, value_chunk_size, write_size, verbose, dry_run, debug): """ :param config: :param config_prefix: :param template_path: :param forest_path: :param populations: :param distribution: :param io_size: :param chunk_size: :param value_chunk_size: """ utils.config_logging(verbose) logger = utils.get_script_logger(os.path.basename(__file__)) comm = MPI.COMM_WORLD rank = comm.rank if rank == 0: logger.info('%i ranks have been allocated' % comm.size) env = Env(comm=comm, config_file=config, config_prefix=config_prefix, template_paths=template_path) configure_hoc_env(env) if io_size == -1: io_size = comm.size if output_path is None: output_path = forest_path if not dry_run: if rank == 0: if not os.path.isfile(output_path): input_file = h5py.File(forest_path, 'r') output_file = h5py.File(output_path, 'w') input_file.copy('/H5Types', output_file) input_file.close() output_file.close() comm.barrier() (pop_ranges, _) = read_population_ranges(forest_path, comm=comm) start_time = time.time() syn_stats = dict() for population in populations: syn_stats[population] = { 'section': defaultdict(lambda: { 'excitatory': 0, 'inhibitory': 0 }), \ 'layer': defaultdict(lambda: { 'excitatory': 0, 'inhibitory': 0 }), \ 'swc_type': defaultdict(lambda: { 'excitatory': 0, 'inhibitory': 0 }), \ 'total': { 'excitatory': 0, 'inhibitory': 0 } } for population in populations: logger.info('Rank %i population: %s' % (rank, population)) (population_start, _) = pop_ranges[population] template_class = load_cell_template(env, population, bcast_template=True) density_dict = env.celltypes[population]['synapses']['density'] layer_set_dict = defaultdict(set) swc_set_dict = defaultdict(set) for sec_name, sec_dict in viewitems(density_dict): for syn_type, syn_dict in viewitems(sec_dict): swc_set_dict[syn_type].add(env.SWC_Types[sec_name]) for layer_name in syn_dict: if layer_name != 'default': layer = env.layers[layer_name] layer_set_dict[syn_type].add(layer) syn_stats_dict = { 'section': defaultdict(lambda: { 'excitatory': 0, 'inhibitory': 0 }), \ 'layer': defaultdict(lambda: { 'excitatory': 0, 'inhibitory': 0 }), \ 'swc_type': defaultdict(lambda: { 'excitatory': 0, 'inhibitory': 0 }), \ 'total': { 'excitatory': 0, 'inhibitory': 0 } } count = 0 gid_count = 0 synapse_dict = {} for gid, morph_dict in NeuroH5TreeGen(forest_path, population, io_size=io_size, comm=comm, cache_size=cache_size, topology=True): local_time = time.time() if gid is not None: logger.info(f'Rank {rank} gid: {gid}') cell = cells.make_neurotree_hoc_cell(template_class, neurotree_dict=morph_dict, gid=gid) cell_sec_dict = { 'apical': (cell.apical, None), 'basal': (cell.basal, None), 'soma': (cell.soma, None), 'ais': (cell.ais, None), 'hillock': (cell.hillock, None) } cell_secidx_dict = { 'apical': cell.apicalidx, 'basal': cell.basalidx, 'soma': cell.somaidx, 'ais': cell.aisidx, 'hillock': cell.hilidx } random_seed = env.model_config['Random Seeds'][ 'Synapse Locations'] + gid if distribution == 'uniform': syn_dict, seg_density_per_sec = synapses.distribute_uniform_synapses( random_seed, env.Synapse_Types, env.SWC_Types, env.layers, density_dict, morph_dict, cell_sec_dict, cell_secidx_dict) elif distribution == 'poisson': syn_dict, seg_density_per_sec = synapses.distribute_poisson_synapses( random_seed, env.Synapse_Types, env.SWC_Types, env.layers, density_dict, morph_dict, cell_sec_dict, cell_secidx_dict) else: raise Exception('Unknown distribution type: %s' % distribution) synapse_dict[gid] = syn_dict this_syn_stats = update_syn_stats(env, syn_stats_dict, syn_dict) check_syns(gid, morph_dict, this_syn_stats, seg_density_per_sec, layer_set_dict, swc_set_dict, env, logger) del cell num_syns = len(synapse_dict[gid]['syn_ids']) logger.info( f'Rank {rank} took {time.time() - local_time:.01f} s to compute {num_syns} synapse locations for {population} gid: {gid}' f'{population} gid {gid} synapses: {local_syn_summary(this_syn_stats)}' ) gid_count += 1 else: logger.info(f'Rank {rank} gid is None') gc.collect() if (not dry_run) and (write_size > 0) and (gid_count % write_size == 0): append_cell_attributes(output_path, population, synapse_dict, namespace='Synapse Attributes', comm=comm, io_size=io_size, chunk_size=chunk_size, value_chunk_size=value_chunk_size) synapse_dict = {} syn_stats[population] = syn_stats_dict count += 1 if debug and count == 5: break if not dry_run: append_cell_attributes(output_path, population, synapse_dict, namespace='Synapse Attributes', comm=comm, io_size=io_size, chunk_size=chunk_size, value_chunk_size=value_chunk_size) global_count, summary = global_syn_summary(comm, syn_stats, gid_count, root=0) if rank == 0: logger.info( 'target: %s, %i ranks took %i s to compute synapse locations for %i cells' % (population, comm.size, time.time() - start_time, np.sum(global_count))) logger.info(summary) comm.barrier() MPI.Finalize()