def init_biophys_cell(env, pop_name, gid, load_connections=True, register_cell=True, write_cell=False, cell_dict={}): """ Instantiates a BiophysCell instance and all its synapses. :param env: an instance of env.Env :param pop_name: population name :param gid: gid """ rank = int(env.pc.id()) ## Determine if a mechanism configuration file exists for this cell type if 'mech_file_path' in env.celltypes[pop_name]: mech_dict = env.celltypes[pop_name]['mech_dict'] else: mech_dict = None ## Determine if correct_for_spines flag has been specified for this cell type synapse_config = env.celltypes[pop_name]['synapses'] if 'correct_for_spines' in synapse_config: correct_for_spines_flag = synapse_config['correct_for_spines'] else: correct_for_spines_flag = False ## Determine presynaptic populations that connect to this cell type presyn_names = env.projection_dict[pop_name] ## Load cell gid and its synaptic attributes and connection data cell = load_biophys_cell(env, pop_name, gid, mech_dict=mech_dict, \ correct_for_spines=correct_for_spines_flag, \ load_connections=load_connections, tree_dict=cell_dict.get('morph', None), synapses_dict=cell_dict.get('synapse', None), connection_graph=cell_dict.get('connectivity', None), weight_dicts=cell_dict.get('weight', None)) if register_cell: cells.register_cell(env, pop_name, gid, cell) cells.report_topology(cell, env) env.cell_selection[pop_name] = [gid] if is_interactive: context.update(locals()) if write_cell: write_selection_file_path = "%s/%s_%d.h5" % (env.results_path, env.modelName, gid) if rank == 0: io_utils.mkout(env, write_selection_file_path) env.comm.barrier() io_utils.write_cell_selection(env, write_selection_file_path) if load_connections: io_utils.write_connection_selection(env, write_selection_file_path) return cell
def init_biophys_cell(env, pop_name, gid, load_weights=True, load_connections=True, register_cell=True, write_cell=False, validate_tree=True, cell_dict={}): """ Instantiates a BiophysCell instance and all its synapses. :param env: an instance of env.Env :param pop_name: population name :param gid: gid :param load_connections: bool :param register_cell: bool :param validate_tree: bool :param write_cell: bool :param cell_dict: dict Environment can be instantiated as: env = Env(config_file, template_paths, dataset_prefix, config_prefix) :param template_paths: str; colon-separated list of paths to directories containing hoc cell templates :param dataset_prefix: str; path to directory containing required neuroh5 data files :param config_prefix: str; path to directory containing network and cell mechanism config files """ rank = int(env.pc.id()) ## Determine template name for this cell type template_name = env.celltypes[pop_name]['template'] ## Determine if a mechanism configuration file exists for this cell type if 'mech_file_path' in env.celltypes[pop_name]: mech_dict = env.celltypes[pop_name]['mech_dict'] else: mech_dict = None ## Determine if correct_for_spines flag has been specified for this cell type synapse_config = env.celltypes[pop_name]['synapses'] if 'correct_for_spines' in synapse_config: correct_for_spines_flag = synapse_config['correct_for_spines'] else: correct_for_spines_flag = False ## Load cell gid and its synaptic attributes and connection data if template_name.lower() == 'izhikevich': cell = cells.make_izhikevich_cell( env, pop_name, gid, tree_dict=cell_dict.get('morph', None), synapses_dict=cell_dict.get('synapse', None), connection_graph=cell_dict.get('connectivity', None), weight_dict=cell_dict.get('weight', None), mech_dict=mech_dict, load_synapses=True, load_weights=load_weights, load_edges=load_connections) elif template_name.lower() == 'pr_nrn': cell = cells.make_PR_cell(env, pop_name, gid, tree_dict=cell_dict.get('morph', None), synapses_dict=cell_dict.get('synapse', None), connection_graph=cell_dict.get( 'connectivity', None), weight_dict=cell_dict.get('weight', None), mech_dict=mech_dict, load_synapses=True, load_weights=load_weights, load_edges=load_connections) else: cell = cells.make_biophys_cell( env, pop_name, gid, tree_dict=cell_dict.get('morph', None), synapses_dict=cell_dict.get('synapse', None), connection_graph=cell_dict.get('connectivity', None), weight_dict=cell_dict.get('weight', None), mech_dict=mech_dict, load_synapses=True, load_weights=load_weights, load_edges=load_connections, validate_tree=validate_tree) cells.init_biophysics(cell, reset_cable=True, correct_cm=correct_for_spines_flag, correct_g_pas=correct_for_spines_flag, env=env) synapses.init_syn_mech_attrs(cell, env) if register_cell: cells.register_cell(env, pop_name, gid, cell) is_reduced = False if hasattr(cell, 'is_reduced'): is_reduced = cell.is_reduced if not is_reduced: cells.report_topology(cell, env) env.cell_selection[pop_name] = [gid] if is_interactive: context.update(locals()) if write_cell: write_selection_file_path = "%s/%s_%d.h5" % (env.results_path, env.modelName, gid) if rank == 0: io_utils.mkout(env, write_selection_file_path) env.comm.barrier() io_utils.write_cell_selection(env, write_selection_file_path) if load_connections: io_utils.write_connection_selection(env, write_selection_file_path) return cell
def main(arena_id, bin_sample_count, config, config_prefix, dataset_prefix, distances_namespace, distance_bin_extent, input_features_path, input_features_namespaces, populations, spike_input_path, spike_input_namespace, spike_input_attr, output_path, io_size, trajectory_id, write_selection, verbose): utils.config_logging(verbose) logger = utils.get_script_logger(os.path.basename(__file__)) comm = MPI.COMM_WORLD rank = comm.rank env = Env(comm=comm, config_file=config, config_prefix=config_prefix, dataset_prefix=dataset_prefix, results_path=output_path, spike_input_path=spike_input_path, spike_input_namespace=spike_input_namespace, spike_input_attr=spike_input_attr, arena_id=arena_id, trajectory_id=trajectory_id) if io_size == -1: io_size = comm.size if rank == 0: logger.info('%i ranks have been allocated' % comm.size) pop_ranges, pop_size = read_population_ranges(env.connectivity_file_path, comm=comm) distance_U_dict = {} distance_V_dict = {} range_U_dict = {} range_V_dict = {} selection_dict = defaultdict(set) comm0 = env.comm.Split(2 if rank == 0 else 0, 0) local_random = np.random.RandomState() local_random.seed(1000) if len(populations) == 0: populations = sorted(pop_ranges.keys()) if rank == 0: for population in populations: distances = read_cell_attributes(env.data_file_path, population, namespace=distances_namespace, comm=comm0) soma_distances = {} if input_features_path is not None: num_fields_dict = {} for input_features_namespace in input_features_namespaces: if arena_id is not None: this_features_namespace = '%s %s' % ( input_features_namespace, arena_id) else: this_features_namespace = input_features_namespace input_features_iter = read_cell_attributes( input_features_path, population, namespace=this_features_namespace, mask=set(['Num Fields']), comm=comm0) count = 0 for gid, attr_dict in input_features_iter: num_fields_dict[gid] = attr_dict['Num Fields'] count += 1 logger.info( 'Read feature data from namespace %s for %i cells in population %s' % (this_features_namespace, count, population)) for (gid, v) in distances: num_fields = num_fields_dict.get(gid, 0) if num_fields > 0: soma_distances[gid] = (v['U Distance'][0], v['V Distance'][0]) else: for (gid, v) in distances: soma_distances[gid] = (v['U Distance'][0], v['V Distance'][0]) numitems = len(list(soma_distances.keys())) logger.info('read %s distances (%i elements)' % (population, numitems)) if numitems == 0: continue gid_array = np.asarray([gid for gid in soma_distances]) distance_U_array = np.asarray( [soma_distances[gid][0] for gid in gid_array]) distance_V_array = np.asarray( [soma_distances[gid][1] for gid in gid_array]) U_min = np.min(distance_U_array) U_max = np.max(distance_U_array) V_min = np.min(distance_V_array) V_max = np.max(distance_V_array) range_U_dict[population] = (U_min, U_max) range_V_dict[population] = (V_min, V_max) distance_U = { gid: soma_distances[gid][0] for gid in soma_distances } distance_V = { gid: soma_distances[gid][1] for gid in soma_distances } distance_U_dict[population] = distance_U distance_V_dict[population] = distance_V min_dist = U_min max_dist = U_max distance_bins = np.arange(U_min, U_max, distance_bin_extent) distance_bin_array = np.digitize(distance_U_array, distance_bins) selection_set = set([]) for bin_index in range(len(distance_bins) + 1): bin_gids = gid_array[np.where( distance_bin_array == bin_index)[0]] if len(bin_gids) > 0: selected_bin_gids = local_random.choice( bin_gids, replace=False, size=bin_sample_count) for gid in selected_bin_gids: selection_set.add(int(gid)) selection_dict[population] = selection_set yaml_output_dict = {} for k, v in utils.viewitems(selection_dict): yaml_output_dict[k] = list(sorted(v)) yaml_output_path = '%s/DG_slice.yaml' % output_path with open(yaml_output_path, 'w') as outfile: yaml.dump(yaml_output_dict, outfile) del (yaml_output_dict) env.comm.barrier() write_selection_file_path = None if write_selection: write_selection_file_path = "%s/%s_selection.h5" % (env.results_path, env.modelName) if write_selection_file_path is not None: if rank == 0: io_utils.mkout(env, write_selection_file_path) env.comm.barrier() selection_dict = env.comm.bcast(dict(selection_dict), root=0) env.cell_selection = selection_dict io_utils.write_cell_selection(env, write_selection_file_path, populations=populations) input_selection = io_utils.write_connection_selection( env, write_selection_file_path, populations=populations) if env.spike_input_ns is not None: io_utils.write_input_cell_selection(env, input_selection, write_selection_file_path, populations=populations) env.comm.barrier() MPI.Finalize()
def main(arena_id, config, config_prefix, dataset_prefix, distances_namespace, spike_input_path, spike_input_namespace, spike_input_attr, input_features_namespaces, input_features_path, selection_path, output_path, io_size, trajectory_id, verbose): utils.config_logging(verbose) logger = utils.get_script_logger(os.path.basename(__file__)) comm = MPI.COMM_WORLD rank = comm.rank if io_size == -1: io_size = comm.size env = Env(comm=comm, config_file=config, config_prefix=config_prefix, dataset_prefix=dataset_prefix, results_path=output_path, spike_input_path=spike_input_path, spike_input_namespace=spike_input_namespace, spike_input_attr=spike_input_attr, arena_id=arena_id, trajectory_id=trajectory_id, io_size=io_size) selection = [] f = open(selection_path, 'r') for line in f.readlines(): selection.append(int(line)) f.close() selection = set(selection) pop_ranges, pop_size = read_population_ranges(env.connectivity_file_path, comm=comm) distance_U_dict = {} distance_V_dict = {} range_U_dict = {} range_V_dict = {} selection_dict = defaultdict(set) comm0 = env.comm.Split(2 if rank == 0 else 0, 0) if rank == 0: for population in pop_ranges: distances = read_cell_attributes(env.data_file_path, population, namespace=distances_namespace, comm=comm0) soma_distances = { k: (v['U Distance'][0], v['V Distance'][0]) for (k,v) in distances } del distances numitems = len(list(soma_distances.keys())) if numitems == 0: continue distance_U_array = np.asarray([soma_distances[gid][0] for gid in soma_distances]) distance_V_array = np.asarray([soma_distances[gid][1] for gid in soma_distances]) U_min = np.min(distance_U_array) U_max = np.max(distance_U_array) V_min = np.min(distance_V_array) V_max = np.max(distance_V_array) range_U_dict[population] = (U_min, U_max) range_V_dict[population] = (V_min, V_max) distance_U = { gid: soma_distances[gid][0] for gid in soma_distances } distance_V = { gid: soma_distances[gid][1] for gid in soma_distances } distance_U_dict[population] = distance_U distance_V_dict[population] = distance_V min_dist = U_min max_dist = U_max selection_dict[population] = set([ k for k in distance_U if k in selection ]) env.comm.barrier() write_selection_file_path = "%s/%s_selection.h5" % (env.results_path, env.modelName) if rank == 0: io_utils.mkout(env, write_selection_file_path) env.comm.barrier() selection_dict = env.comm.bcast(dict(selection_dict), root=0) env.cell_selection = selection_dict io_utils.write_cell_selection(env, write_selection_file_path) input_selection = io_utils.write_connection_selection(env, write_selection_file_path) if spike_input_path: io_utils.write_input_cell_selection(env, input_selection, write_selection_file_path) if input_features_path: for this_input_features_namespace in sorted(input_features_namespaces): for population in sorted(input_selection): logger.info(f"Extracting input features {this_input_features_namespace} for population {population}...") it = read_cell_attribute_selection(input_features_path, population, namespace=f"{this_input_features_namespace} {arena_id}", selection=input_selection[population], comm=env.comm) output_features_dict = { cell_gid : cell_features_dict for cell_gid, cell_features_dict in it } append_cell_attributes(write_selection_file_path, population, output_features_dict, namespace=f"{this_input_features_namespace} {arena_id}", io_size=io_size, comm=env.comm) env.comm.barrier()
def main(arena_id, config, config_prefix, dataset_prefix, distances_namespace, distance_limits, spike_input_path, spike_input_namespace, spike_input_attr, output_path, io_size, trajectory_id, write_selection, verbose): utils.config_logging(verbose) logger = utils.get_script_logger(os.path.basename(__file__)) comm = MPI.COMM_WORLD rank = comm.rank if io_size == -1: io_size = comm.size env = Env(comm=comm, config_file=config, config_prefix=config_prefix, dataset_prefix=dataset_prefix, results_path=output_path, spike_input_path=spike_input_path, spike_input_namespace=spike_input_namespace, spike_input_attr=spike_input_attr, arena_id=arena_id, trajectory_id=trajectory_id, io_size=io_size) if rank == 0: logger.info('%i ranks have been allocated' % comm.size) pop_ranges, pop_size = read_population_ranges(env.connectivity_file_path, comm=comm) distance_U_dict = {} distance_V_dict = {} range_U_dict = {} range_V_dict = {} selection_dict = defaultdict(set) comm0 = env.comm.Split(2 if rank == 0 else 0, 0) if rank == 0: for population in pop_ranges: distances = read_cell_attributes(env.data_file_path, population, namespace=distances_namespace, comm=comm0) soma_distances = { k: (v['U Distance'][0], v['V Distance'][0]) for (k,v) in distances } del distances numitems = len(list(soma_distances.keys())) logger.info('read %s distances (%i elements)' % (population, numitems)) if numitems == 0: continue distance_U_array = np.asarray([soma_distances[gid][0] for gid in soma_distances]) distance_V_array = np.asarray([soma_distances[gid][1] for gid in soma_distances]) U_min = np.min(distance_U_array) U_max = np.max(distance_U_array) V_min = np.min(distance_V_array) V_max = np.max(distance_V_array) range_U_dict[population] = (U_min, U_max) range_V_dict[population] = (V_min, V_max) distance_U = { gid: soma_distances[gid][0] for gid in soma_distances } distance_V = { gid: soma_distances[gid][1] for gid in soma_distances } distance_U_dict[population] = distance_U distance_V_dict[population] = distance_V min_dist = U_min max_dist = U_max if distance_limits: min_dist = distance_limits[0] max_dist = distance_limits[1] selection_dict[population] = set([ k for k in distance_U if (distance_U[k] >= min_dist) and (distance_U[k] <= max_dist) ]) yaml_output_dict = {} for k, v in utils.viewitems(selection_dict): yaml_output_dict[k] = list(v) yaml_output_path = '%s/DG_slice.yaml' % output_path with open(yaml_output_path, 'w') as outfile: yaml.dump(yaml_output_dict, outfile) del(yaml_output_dict) env.comm.barrier() write_selection_file_path = None if write_selection: write_selection_file_path = "%s/%s_selection.h5" % (env.results_path, env.modelName) if write_selection_file_path is not None: if rank == 0: io_utils.mkout(env, write_selection_file_path) env.comm.barrier() selection_dict = env.comm.bcast(dict(selection_dict), root=0) env.cell_selection = selection_dict io_utils.write_cell_selection(env, write_selection_file_path) input_selection = io_utils.write_connection_selection(env, write_selection_file_path) io_utils.write_input_cell_selection(env, input_selection, write_selection_file_path)