Ejemplo n.º 1
0
def config_controller():
    """

    """
    utils.config_logging(context.verbose)
    context.logger = utils.get_script_logger(os.path.basename(__file__))

    try:
        context.env = Env(comm=context.controller_comm, **context.kwargs)
    except Exception as err:
        context.logger.exception(err)
        raise err

    opt_param_config = optimization_params(
        context.env.netclamp_config.optimize_parameters,
        context.target_populations, context.param_config_name)
    param_bounds = opt_param_config.param_bounds
    param_names = opt_param_config.param_bounds
    param_initial_dict = opt_param_config.param_initial_dict
    param_tuples = opt_param_config.param_tuples
    opt_targets = opt_param_config.opt_targets

    context.param_names = param_names
    context.bounds = [param_bounds[key] for key in param_names]
    context.x0 = param_initial_dict
    context.target_val = opt_targets
    context.target_range = opt_targets
    context.param_tuples = param_tuples
    # These kwargs will be sent from the controller to each worker context
    context.kwargs['param_tuples'] = param_tuples
Ejemplo n.º 2
0
def main(inst_rates_path, inst_rates_namespace, include, bin_size, nstdev,
         baseline_fraction, verbose):

    utils.config_logging(verbose)
    logger = utils.get_script_logger(script_name)

    if not include:
        population_names = read_population_names(inst_rates_path)
        for pop in population_names:
            include.append(pop)

    for i, population in enumerate(include):

        rate_inst_iter = read_cell_attributes(inst_rates_path,
                                              population,
                                              namespace=inst_rates_namespace)

        rate_inst_dict = dict(rate_inst_iter)

        spikedata.place_fields(population,
                               bin_size,
                               rate_inst_dict,
                               nstdev,
                               baseline_fraction=baseline_fraction,
                               saveData=inst_rates_path)
Ejemplo n.º 3
0
def main(coords_path, population, distances_namespace, reindex_namespace, reindex_attribute, verbose):
        
    utils.config_logging(verbose)
    logger = utils.get_script_logger(os.path.basename(script_name))

    env = Env(config_file=config, config_prefix=config_prefix)

    plot.plot_reindex_positions (env, coords_path, population, distances_namespace, reindex_namespace, reindex_attribute)
Ejemplo n.º 4
0
def main(gid, pop_name, config_file, template_paths, hoc_lib_path, dataset_prefix, config_prefix, mech_file,
         load_edges, load_weights, correct_for_spines, verbose):
    """

    :param gid: int
    :param pop_name: str
    :param config_file: str; model configuration file name
    :param template_paths: str; colon-separated list of paths to directories containing hoc cell templates
    :param hoc_lib_path: str; path to directory containing required hoc libraries
    :param dataset_prefix: str; path to directory containing required neuroh5 data files
    :param config_prefix: str; path to directory containing network and cell mechanism config files
    :param mech_file: str; cell mechanism config file name
    :param load_edges: bool; whether to attempt to load connections from a neuroh5 file
    :param load_weights: bool; whether to attempt to load connections from a neuroh5 file
    :param correct_for_spines: bool
    :param verbose: bool
    """
    utils.config_logging(verbose)
    logger = utils.get_script_logger(os.path.basename(__file__))

    comm = MPI.COMM_WORLD
    np.seterr(all='raise')
    env = Env(comm=comm, config_file=config_file, template_paths=template_paths, hoc_lib_path=hoc_lib_path,
              dataset_prefix=dataset_prefix, config_prefix=config_prefix, verbose=verbose)
    configure_hoc_env(env)

    mech_file_path = config_prefix + '/' + mech_file
    template_name = env.celltypes[pop_name]['template']
    if template_name.lower() == 'izhikevich':
        cell = make_izhikevich_cell(env, pop_name=pop_name, gid=gid,
                                    load_synapses=True, load_connections=True,
                                    load_edges=load_edges, load_weights=load_weights,
                                    mech_file_path=mech_file_path)
    elif template_name.lower() == 'pr_nrn':
        cell = make_PR_cell(env, pop_name=pop_name, gid=gid,
                            load_synapses=True, load_connections=True,
                            load_edges=load_edges, load_weights=load_weights,
                            mech_file_path=mech_file_path)
    else:
        cell = make_biophys_cell(env, pop_name=pop_name, gid=gid,
                                 load_synapses=True, load_connections=True,
                                 load_edges=load_edges, load_weights=load_weights,
                                 mech_file_path=mech_file_path)
    context.update(locals())

    init_biophysics(cell, reset_cable=True, correct_cm=correct_for_spines, correct_g_pas=correct_for_spines,
                    env=env, verbose=verbose)

    init_syn_mech_attrs(cell, env)
    config_biophys_cell_syns(env, gid, pop_name, insert=True, insert_netcons=True, insert_vecstims=True,
                             verbose=verbose)

    if verbose:
        for sec in list(cell.hoc_cell.all if hasattr(cell, 'hoc_cell') else cell.all):
            h.psection(sec=sec)
        report_topology(cell, env)
Ejemplo n.º 5
0
def main(config, template_path, prototype_gid, prototype_path, forest_path, population, io_size, verbose):
    """

    :param config:
    :param template_path:
    :param prototype_gid:
    :param prototype_path:
    :param forest_path:
    :param population:
    :param io_size:
    """

    utils.config_logging(verbose)
    logger = utils.get_script_logger(script_name)
        
    comm = MPI.COMM_WORLD
    rank = comm.rank
    
    env = Env(comm=MPI.COMM_WORLD, config_file=config, template_paths=template_path)
    configure_hoc_env(env)
    
    if io_size == -1:
        io_size = comm.size
    if rank == 0:
        logger.info('%i ranks have been allocated' % comm.size)
    
    layers = env.layers
    layer_idx_dict = { layers[layer_name]: layer_name 
                       for layer_name in ['GCL', 'IML', 'MML', 'OML', 'Hilus'] }

    (tree_iter, _) = read_tree_selection(prototype_path, population, selection=[prototype_gid])
    (_, prototype_morph_dict) = next(tree_iter)
    prototype_x = prototype_morph_dict['x']
    prototype_y = prototype_morph_dict['y']
    prototype_z = prototype_morph_dict['z']
    prototype_xyz = (prototype_x, prototype_y, prototype_z)

    (pop_ranges, _) = read_population_ranges(forest_path, comm=comm)
    start_time = time.time()

    (population_start, _) = pop_ranges[population]
    template_class = load_cell_template(env, population, bcast_template=True)
    for gid, morph_dict in NeuroH5TreeGen(forest_path, population, io_size=io_size, cache_size=1, comm=comm, topology=True):
#    trees, _ = scatter_read_trees(forest_path, population, io_size=io_size, comm=comm, topology=True)
 #   for gid, morph_dict in trees:
        if gid is not None:
            logger.info('Rank %i gid: %i' % (rank, gid))
            secnodes_dict = morph_dict['section_topology']['nodes']
            vx = morph_dict['x']
            vy = morph_dict['y']
            vz = morph_dict['z']
            if compare_points((vx,vy,vz), prototype_xyz):
                logger.info('Possible match: gid %i' % gid)
    MPI.Finalize()
Ejemplo n.º 6
0
def main(config_path, params_id, output_file_name, verbose):

    config_logging(verbose)
    logger = utils.get_script_logger(os.path.basename(__file__))

    eval_config = read_from_yaml(config_path,
                                 include_loader=utils.IncludeLoader)
    network_param_spec_src = eval_config['param_spec']
    network_param_values = eval_config['param_values']
    target_populations = eval_config['target_populations']

    network_param_spec = make_param_spec(target_populations,
                                         network_param_spec_src)

    def from_param_list(x):
        result = []
        for i, (param_name, param_tuple) in enumerate(
                zip(network_param_spec.param_names,
                    network_param_spec.param_tuples)):
            param_range = param_tuple.param_range
            #            assert((x[i] >= param_range[0]) and (x[i] <= param_range[1]))
            result.append((param_tuple, x[i]))
        return result

    x = network_param_values[params_id]
    param_tuple_values = from_param_list(x)

    def rec_dd():
        return defaultdict(rec_dd)

    def dd2dict(d):
        for k, v in d.items():

            if isinstance(v, dict):
                d[k] = dd2dict(v)
        return dict(d)

    param_output_ddict = rec_dd()

    for param_tuple, param_value in param_tuple_values:
        if isinstance(param_tuple.param_path, tuple):
            param_output_ddict[param_tuple.population][param_tuple.source][
                param_tuple.sec_type][param_tuple.syn_name][
                    param_tuple.param_path[0]][
                        param_tuple.param_path[1]] = param_value
        else:
            param_output_ddict[param_tuple.population][param_tuple.source][
                param_tuple.sec_type][param_tuple.syn_name][
                    param_tuple.param_path] = param_value

    param_output_dict = dd2dict(param_output_ddict)
    pprint.pprint(param_output_dict)
    write_to_yaml(output_file_name, param_output_dict)
Ejemplo n.º 7
0
def main(config, config_prefix, resolution, resample, alpha_radius, graph_type,
         verbose):

    utils.config_logging(verbose)
    logger = utils.get_script_logger(script_name)

    env = Env(config_file=config, config_prefix=config_prefix)

    layers = env.layers
    rotate = env.geometry['Parametric Surface']['Rotation']
    min_u = float('inf')
    max_u = 0.0
    min_v = float('inf')
    max_v = 0.0
    min_l = float('inf')
    max_l = 0.0
    for layer in list(layers.keys()):
        min_extent = env.geometry['Parametric Surface']['Minimum Extent'][
            layer]
        max_extent = env.geometry['Parametric Surface']['Maximum Extent'][
            layer]
        min_u = min(min_extent[0], min_u)
        max_u = max(max_extent[0], max_u)
        min_v = min(min_extent[1], min_v)
        max_v = max(max_extent[1], max_v)
        min_l = min(min_extent[2], min_l)
        max_l = max(max_extent[2], max_l)

    logger.info('Creating volume: min_l = %f max_l = %f...' % (min_l, max_l))
    ip_volume = make_volume((min_u, max_u), \
                            (min_v, max_v), \
                            (min_l, max_l), \
                            resolution=resolution, \
                            rotate=rotate)
    logger.info('Computing volume distances...')

    vol_dist = get_volume_distances(ip_volume,
                                    res=resample,
                                    alpha_radius=alpha_radius)
    (obs_uv, dist_u, dist_v) = vol_dist

    dist_dict = {}
    for i in range(0, len(dist_u)):
        dist_dict[i] = { 'U Distance': np.asarray([dist_u[i]], dtype=np.float32), \
                         'V Distance': np.asarray([dist_v[i]], dtype=np.float32) }

    plot.plot_positions(env,
                        "DG Volume",
                        dist_dict,
                        verbose=verbose,
                        saveFig=True,
                        graphType=graph_type)
Ejemplo n.º 8
0
def main(config, config_prefix, connectivity_path, coords_path,
         distances_namespace, target_gid, destination, source, extent_type,
         direction, normed, bin_size, font_size, save_format, verbose):

    utils.config_logging(verbose)
    logger = utils.get_script_logger(os.path.basename(script_name))

    env = Env(config_file=config, config_prefix=config_prefix)

    plot.plot_single_vertex_dist (env, connectivity_path, coords_path, distances_namespace, \
                                  target_gid, destination, source, direction=direction, \
                                  normed=normed, extent_type=extent_type, bin_size=bin_size, \
                                  fontSize=font_size, saveFig=True, figFormat=save_format)
Ejemplo n.º 9
0
def init_network_objfun(operational_config, opt_targets, param_names,
                        param_tuples, worker, **kwargs):

    param_tuples = [
        syn_param_from_dict(param_tuple) for param_tuple in param_tuples
    ]

    objective_names = operational_config['objective_names']
    target_populations = operational_config['target_populations']
    target_features_path = operational_config['target_features_path']
    target_features_namespace = operational_config['target_features_namespace']
    kwargs['results_file_id'] = 'DG_optimize_network_%d_%s' % \
                                (worker.worker_id, operational_config['run_ts'])

    logger = utils.get_script_logger(os.path.basename(__file__))
    env = init_network(comm=MPI.COMM_WORLD, kwargs=kwargs)
    gc.collect()

    t_start = 50.
    t_stop = env.tstop
    time_range = (t_start, t_stop)

    target_trj_rate_map_dict = {}
    target_features_arena = env.arena_id
    target_features_trajectory = env.trajectory_id
    for pop_name in target_populations:
        if ('%s target rate dist residual' % pop_name) not in objective_names:
            continue
        my_cell_index_set = set(env.biophys_cells[pop_name].keys())
        trj_rate_maps = {}
        trj_rate_maps = rate_maps_from_features(
            env,
            pop_name,
            cell_index_set=list(my_cell_index_set),
            input_features_path=target_features_path,
            input_features_namespace=target_features_namespace,
            time_range=time_range)
        target_trj_rate_map_dict[pop_name] = trj_rate_maps

    def from_param_dict(params_dict):
        result = []
        for param_name, param_tuple in zip(param_names, param_tuples):
            result.append((param_tuple, params_dict[param_name]))
        return result

    return partial(network_objfun, env, operational_config, opt_targets,
                   target_trj_rate_map_dict, from_param_dict, t_start, t_stop,
                   target_populations)
Ejemplo n.º 10
0
def main(config, coords_path, coords_namespace, resample, resolution, populations, projection_depth, io_size, chunk_size, value_chunk_size, cache_size, verbose):

    utils.config_logging(verbose)
    logger = utils.get_script_logger(script_name)
    
    comm = MPI.COMM_WORLD
    rank = comm.rank

    env = Env(comm=comm, config_file=config)

    soma_coords = {}

    if rank == 0:
        logger.info('Reading population coordinates...')

    rotate = env.geometry['Parametric Surface']['Rotation']
    min_l = float('inf')
    max_l = 0.0
    population_ranges = read_population_ranges(coords_path)[0]
    population_extents = {}
    for population in population_ranges:
        min_extent = env.geometry['Cell Layers']['Minimum Extent'][population]
        max_extent = env.geometry['Cell Layers']['Maximum Extent'][population]
        min_l = min(min_extent[2], min_l)
        max_l = max(max_extent[2], max_l)
        population_extents[population] = (min_extent, max_extent)
        
    for population in populations:
        coords = bcast_cell_attributes(coords_path, population, 0, \
                                       namespace=coords_namespace)

        soma_coords[population] = { k: (v['U Coordinate'][0], v['V Coordinate'][0], v['L Coordinate'][0]) for (k,v) in coords }
        del coords
        gc.collect()
    
    output_path = coords_path
    soma_coords = icp_transform(comm, soma_coords, projection_depth, population_extents, \
                                populations=populations, rotate=rotate, verbose=verbose)
    
    for population in populations:

        if rank == 0:
            logger.info('Writing transformed coordinates for population %s...' % population)

        append_cell_attributes(output_path, population, soma_coords[population],
                               namespace='Soma Projections', comm=comm,
                               io_size=io_size, chunk_size=chunk_size,
                               value_chunk_size=value_chunk_size, cache_size=cache_size)
Ejemplo n.º 11
0
def config_worker():
    """

    """
    utils.config_logging(context.verbose)
    context.logger = utils.get_script_logger(os.path.basename(__file__))
    if 'results_id' not in context():
        context.results_id = 'DG_test_network_subworlds_%s_%s' % \
                             (context.interface.worker_id, datetime.datetime.today().strftime('%Y%m%d_%H%M'))
    if 'env' not in context():
        try:
            init_network()
        except Exception as err:
            context.logger.exception(err)
            raise err
        context.bin_size = 5.0
Ejemplo n.º 12
0
def config_controller():
    """

    """
    utils.config_logging(context.verbose)
    context.logger = utils.get_script_logger(os.path.basename(__file__))
    if 'results_file_id' not in context():
        context.results_file_id = 'DG_optimize_network_subworlds_%s_%s' % \
                             (context.interface.worker_id, datetime.datetime.today().strftime('%Y%m%d_%H%M'))
    if 'env' not in context():
        try:
            context.comm = MPI.COMM_WORLD
            #init_env()
        except Exception as err:
            context.logger.exception(err)
            raise err
Ejemplo n.º 13
0
def config_controller():
    """

    """
    utils.config_logging(context.verbose)
    context.logger = utils.get_script_logger(os.path.basename(__file__))

    context.init_params = context.kwargs
    context.init_params['target_rate_map_arena'] = context.init_params['arena_id']
    context.init_params['target_rate_map_trajectory'] = context.init_params['trajectory_id']
    context.gid = int(context.init_params['gid'])
    context.target_val = {}
    
    if 'results_file_id' not in context():
        context.results_file_id = 'DG_optimize_pf_%s_%s' % \
                             (context.interface.worker_id, datetime.datetime.today().strftime('%Y%m%d_%H%M'))
Ejemplo n.º 14
0
def main(config_file, population, gid, template_paths, dataset_prefix,
         config_prefix, load_synapses, syn_types, syn_sources,
         syn_source_threshold, font_size, bgcolor, colormap, verbose):

    utils.config_logging(verbose)
    logger = utils.get_script_logger(script_name)

    params = dict(locals())
    env = Env(**params)
    configure_hoc_env(env)

    ## Determine if a mechanism configuration file exists for this cell type
    if 'mech_file_path' in env.celltypes[population]:
        mech_file_path = env.celltypes[population]['mech_file_path']
    else:
        mech_file_path = None

    logger.info('loading cell %i' % gid)

    load_weights = False
    biophys_cell = get_biophys_cell(env,
                                    population,
                                    gid,
                                    load_synapses=load_synapses,
                                    load_weights=load_weights,
                                    load_edges=load_synapses,
                                    mech_file_path=mech_file_path)

    if len(syn_types) == 0:
        syn_types = None
    else:
        syn_types = list(syn_types)
    if len(syn_sources) == 0:
        syn_sources = None
    else:
        syn_sources = list(syn_sources)

    plot.plot_biophys_cell_tree(env,
                                biophys_cell,
                                saveFig=True,
                                syn_source_threshold=syn_source_threshold,
                                synapse_filters={
                                    'syn_types': syn_types,
                                    'sources': syn_sources
                                },
                                bgcolor=bgcolor,
                                colormap=colormap)
Ejemplo n.º 15
0
def main(config_path, params_id, output_file_name, verbose):

    config_logging(verbose)
    logger = utils.get_script_logger(os.path.basename(__file__))

    eval_config = read_from_yaml(config_path,
                                 include_loader=utils.IncludeLoader)
    network_param_spec_src = eval_config['param_spec']
    network_param_values = eval_config['param_values']
    target_populations = eval_config['target_populations']

    network_param_spec = make_param_spec(target_populations,
                                         network_param_spec_src)

    def from_param_list(x):
        result = []
        for i, (param_name, param_tuple) in enumerate(
                zip(network_param_spec.param_names,
                    network_param_spec.param_tuples)):
            param_range = param_tuple.param_range
            #            assert((x[i] >= param_range[0]) and (x[i] <= param_range[1]))
            result.append((param_tuple, x[i]))
        return result

    params_id_list = []
    if params_id is None:
        params_id_list = list(network_param_values.keys())
    else:
        params_id_list = [params_id]

    param_output_dict = dict()
    for this_params_id in params_id_list:
        x = network_param_values[this_params_id]
        param_tuple_values = from_param_list(x)
        this_param_list = []
        for param_tuple, param_value in param_tuple_values:
            this_param_list.append((param_tuple.population, param_tuple.source,
                                    param_tuple.sec_type, param_tuple.syn_name,
                                    param_tuple.param_path, param_value))
        param_output_dict[this_params_id] = this_param_list

    pprint.pprint(param_output_dict)
    if output_file_name is not None:
        write_to_yaml(output_file_name, param_output_dict)
Ejemplo n.º 16
0
def main(config, coords_path, coords_namespace, distances_namespace,
         populations, verbose):

    utils.config_logging(verbose)
    logger = utils.get_script_logger(__file__)

    comm = MPI.COMM_WORLD
    rank = comm.rank

    env = Env(comm=comm, config_file=config)
    output_path = coords_path

    soma_coords = {}
    soma_distances = {}

    if rank == 0:
        logger.info('Reading population coordinates and distances...')

    for population in populations:

        coords = bcast_cell_attributes(coords_path,
                                       population,
                                       0,
                                       namespace=coords_namespace,
                                       comm=comm)
        soma_coords[population] = {
            k:
            (v['U Coordinate'][0], v['V Coordinate'][0], v['L Coordinate'][0])
            for (k, v) in coords
        }
        del coords
        gc.collect()

        distances = bcast_cell_attributes(coords_path,
                                          population,
                                          0,
                                          namespace=distances_namespace,
                                          comm=comm)
        soma_distances = {
            k: (v['U Distance'][0], v['V Distance'][0])
            for (k, v) in distances
        }
        del distances
        gc.collect()
Ejemplo n.º 17
0
def generate_param_lattice(config_path, n_samples, output_file_dir, output_file_name, maxiter=5, verbose=False):
    from dmosopt import sampling

    logger = utils.get_script_logger(os.path.basename(__file__))

    output_path = None
    if output_file_name is not None:
        output_path = f'{output_file_dir}/{output_file_name}'
    eval_config = read_from_yaml(config_path, include_loader=utils.IncludeLoader)
    network_param_spec_src = eval_config['param_spec']

    target_populations = eval_config['target_populations']
    network_param_spec = make_param_spec(target_populations, network_param_spec_src)
    param_tuples = network_param_spec.param_tuples
    param_names = network_param_spec.param_names
    n_params = len(param_tuples)

    n_init = n_params * n_samples
    Xinit = sampling.glp(n_init, n_params, maxiter=maxiter)

    ub = []
    lb = []
    for param_name, param_tuple in zip(param_names, param_tuples):
        param_range = param_tuple.param_range
        ub.append(param_range[1])
        lb.append(param_range[0])

    ub = np.asarray(ub)
    lb = np.asarray(lb)

    for i in range(n_init):
        Xinit[i,:] = Xinit[i,:] * (ub - lb) + lb

    output_dict = {}
    for i in range(Xinit.shape[0]):
        output_dict[i] = list([float(x) for x in Xinit[i, :]])

    if output_path is not None:    
        write_to_yaml(output_path, output_dict)
    else:
        pprint.pprint(output_dict)
Ejemplo n.º 18
0
def main(coords_path, io_size, chunk_size, value_chunk_size):

    utils.config_logging(verbose)
    logger = utils.get_script_logger(__file__)
    
    comm = MPI.COMM_WORLD
    rank = comm.rank

    env = Env(comm=comm, config_file=config)
    output_path = coords_path

    if io_size == -1:
        io_size = comm.size
    if rank == 0:
        logger.info('%i ranks have been allocated' % comm.size)

    source_population_ranges = read_population_ranges(coords_path)
    source_populations = list(source_population_ranges.keys())

    for population in source_populations:
        if rank == 0:
            logger.info('population: ',population)
        soma_coords = bcast_cell_attributes(0, coords_path, population,
                                            namespace='Interpolated Coordinates', comm=comm)
        #print soma_coords.keys()
        u_coords = []
        gids = []
        for gid, attrs in viewitems(soma_coords):
            u_coords.append(attrs['U Coordinate'])
            gids.append(gid)
        u_coordv = np.asarray(u_coords, dtype=np.float32)
        gidv     = np.asarray(gids, dtype=np.uint32)
        sort_idx = np.argsort(u_coordv, axis=0)
        offset   = source_population_ranges[population][0]
        sorted_coords_dict = {}
        for i in range(0,sort_idx.size):
            sorted_coords_dict[offset+i] = soma_coords[gidv[sort_idx[i][0]]]
        
        append_cell_attributes(coords_path, population, sorted_coords_dict,
                                namespace='Sorted Coordinates', io_size=io_size, chunk_size=chunk_size,
                                value_chunk_size=value_chunk_size, comm=comm)
Ejemplo n.º 19
0
def main(config, config_prefix, connectivity_path, coords_path,
         vertex_metrics_namespace, distances_namespace, destination, sources,
         normed, metric, graph_type, bin_size, font_size, verbose):

    utils.config_logging(verbose)
    logger = utils.get_script_logger(script_name)

    env = Env(config_file=config, config_prefix=config_prefix)

    plot.plot_vertex_metrics(env,
                             connectivity_path,
                             coords_path,
                             vertex_metrics_namespace,
                             distances_namespace,
                             destination,
                             sources,
                             metric=metric,
                             normed=normed,
                             bin_size=bin_size,
                             fontSize=font_size,
                             graph_type=graph_type,
                             saveFig=True)
def main(config, config_prefix, features_path, coords_path, features_namespace,
         arena_id, trajectory_id, distances_namespace, include, bin_size,
         from_spikes, normed, font_size, verbose, save_fig):

    utils.config_logging(verbose)

    logger = utils.get_script_logger(os.path.basename(script_name))

    env = Env(config_file=config, config_prefix=config_prefix)

    plot.plot_stimulus_spatial_rate_map(env,
                                        features_path,
                                        coords_path,
                                        arena_id,
                                        trajectory_id,
                                        features_namespace,
                                        distances_namespace,
                                        include,
                                        bin_size=bin_size,
                                        from_spikes=from_spikes,
                                        normed=normed,
                                        fontSize=font_size,
                                        saveFig=save_fig,
                                        verbose=verbose)
Ejemplo n.º 21
0
def main(config, config_prefix, max_section_length, population, forest_path,
         template_path, output_path, io_size, chunk_size, value_chunk_size,
         dry_run, verbose):
    """

    :param population: str
    :param forest_path: str (path)
    :param output_path: str (path)
    :param io_size: int
    :param chunk_size: int
    :param value_chunk_size: int
    :param verbose: bool
    """

    utils.config_logging(verbose)
    logger = utils.get_script_logger(os.path.basename(__file__))

    comm = MPI.COMM_WORLD
    rank = comm.rank

    if io_size == -1:
        io_size = comm.size
    if rank == 0:
        logger.info('%i ranks have been allocated' % comm.size)

    env = Env(comm=comm,
              config_file=config,
              config_prefix=config_prefix,
              template_paths=template_path)

    if rank == 0:
        if not os.path.isfile(output_path):
            input_file = h5py.File(forest_path, 'r')
            output_file = h5py.File(output_path, 'w')
            input_file.copy('/H5Types', output_file)
            input_file.close()
            output_file.close()
    comm.barrier()

    (forest_pop_ranges, _) = read_population_ranges(forest_path)
    (forest_population_start,
     forest_population_count) = forest_pop_ranges[population]

    (pop_ranges, _) = read_population_ranges(output_path)

    (population_start, population_count) = pop_ranges[population]

    new_trees_dict = {}
    for gid, tree_dict in NeuroH5TreeGen(forest_path,
                                         population,
                                         io_size=io_size,
                                         comm=comm,
                                         topology=False):
        if gid is not None:
            logger.info("Rank %d received gid %d" % (rank, gid))
            logger.info(pprint.pformat(tree_dict))
            new_tree_dict = cells.resize_tree_sections(tree_dict,
                                                       max_section_length)
            logger.info(pprint.pformat(new_tree_dict))
            new_trees_dict[gid] = new_tree_dict

    if not dry_run:
        append_cell_trees(output_path,
                          population,
                          new_trees_dict,
                          io_size=io_size,
                          comm=comm)

    comm.barrier()
    if (not dry_run) and (rank == 0):
        logger.info('Appended resized trees to %s' % output_path)
Ejemplo n.º 22
0
def main(arena_id, config, config_prefix, dataset_prefix, distances_namespace, spike_input_path, spike_input_namespace, spike_input_attr, input_features_namespaces, input_features_path, selection_path, output_path, io_size, trajectory_id, verbose):

    utils.config_logging(verbose)
    logger = utils.get_script_logger(os.path.basename(__file__))

    comm = MPI.COMM_WORLD
    rank = comm.rank
    if io_size == -1:
        io_size = comm.size

    env = Env(comm=comm, config_file=config, 
              config_prefix=config_prefix, dataset_prefix=dataset_prefix, 
              results_path=output_path, spike_input_path=spike_input_path, 
              spike_input_namespace=spike_input_namespace, spike_input_attr=spike_input_attr,
              arena_id=arena_id, trajectory_id=trajectory_id, io_size=io_size)

    selection = []
    f = open(selection_path, 'r')
    for line in f.readlines():
        selection.append(int(line))
    f.close()
    selection = set(selection)

    pop_ranges, pop_size = read_population_ranges(env.connectivity_file_path, comm=comm)

    distance_U_dict = {}
    distance_V_dict = {}
    range_U_dict = {}
    range_V_dict = {}

    selection_dict = defaultdict(set)

    comm0 = env.comm.Split(2 if rank == 0 else 0, 0)

    if rank == 0:
        for population in pop_ranges:
            distances = read_cell_attributes(env.data_file_path, population, namespace=distances_namespace, comm=comm0)
            soma_distances = { k: (v['U Distance'][0], v['V Distance'][0]) for (k,v) in distances }
            del distances
        
            numitems = len(list(soma_distances.keys()))

            if numitems == 0:
                continue

            distance_U_array = np.asarray([soma_distances[gid][0] for gid in soma_distances])
            distance_V_array = np.asarray([soma_distances[gid][1] for gid in soma_distances])

            U_min = np.min(distance_U_array)
            U_max = np.max(distance_U_array)
            V_min = np.min(distance_V_array)
            V_max = np.max(distance_V_array)

            range_U_dict[population] = (U_min, U_max)
            range_V_dict[population] = (V_min, V_max)
            
            distance_U = { gid: soma_distances[gid][0] for gid in soma_distances }
            distance_V = { gid: soma_distances[gid][1] for gid in soma_distances }
            
            distance_U_dict[population] = distance_U
            distance_V_dict[population] = distance_V
            
            min_dist = U_min
            max_dist = U_max 

            selection_dict[population] = set([ k for k in distance_U if k in selection ])
    

    env.comm.barrier()

    write_selection_file_path =  "%s/%s_selection.h5" % (env.results_path, env.modelName)

    if rank == 0:
        io_utils.mkout(env, write_selection_file_path)
    env.comm.barrier()
    selection_dict = env.comm.bcast(dict(selection_dict), root=0)
    env.cell_selection = selection_dict
    io_utils.write_cell_selection(env, write_selection_file_path)
    input_selection = io_utils.write_connection_selection(env, write_selection_file_path)
    if spike_input_path:
        io_utils.write_input_cell_selection(env, input_selection, write_selection_file_path)
    if input_features_path:
        for this_input_features_namespace in sorted(input_features_namespaces):
            for population in sorted(input_selection):
                logger.info(f"Extracting input features {this_input_features_namespace} for population {population}...")
                it = read_cell_attribute_selection(input_features_path, population, 
                                                   namespace=f"{this_input_features_namespace} {arena_id}", 
                                                   selection=input_selection[population], comm=env.comm)
                output_features_dict = { cell_gid : cell_features_dict for cell_gid, cell_features_dict in it }
                append_cell_attributes(write_selection_file_path, population, output_features_dict,
                                       namespace=f"{this_input_features_namespace} {arena_id}", 
                                       io_size=io_size, comm=env.comm)
    env.comm.barrier()
Ejemplo n.º 23
0
def config_worker():
    """

    """
    utils.config_logging(context.verbose)
    context.logger = utils.get_script_logger(os.path.basename(__file__))
    if 'results_file_id' not in context():
        context.results_file_id = 'DG_optimize_network_subworlds_%s_%s' % \
                             (context.interface.worker_id, datetime.datetime.today().strftime('%Y%m%d_%H%M'))
    if 'env' not in context():
        try:
            context.comm = MPI.COMM_WORLD
            init_network()
        except Exception as err:
            context.logger.exception(err)
            raise err
        context.bin_size = 5.0

    param_bounds = {}
    param_names = []
    param_initial_dict = {}
    param_range_tuples = []
    opt_targets = {}

    for pop_name in context.target_populations:

        if (pop_name in context.env.netclamp_config.optimize_parameters):
            opt_params = context.env.netclamp_config.optimize_parameters[pop_name]
            param_ranges = opt_params['Parameter ranges']
        else:
            raise RuntimeError(
                "optimize_network_subworlds: population %s does not have optimization configuration" % pop_name)

        for target_name, target_val in viewitems(opt_params['Targets']):
            opt_targets['%s %s' % (pop_name, target_name)] = target_val

        for source, source_dict in sorted(viewitems(param_ranges), key=lambda k_v3: k_v3[0]):
            for sec_type, sec_type_dict in sorted(viewitems(source_dict), key=lambda k_v2: k_v2[0]):
                for syn_name, syn_mech_dict in sorted(viewitems(sec_type_dict), key=lambda k_v1: k_v1[0]):
                    for param_fst, param_rst in sorted(viewitems(syn_mech_dict), key=lambda k_v: k_v[0]):
                        if isinstance(param_rst, dict):
                            for const_name, const_range in sorted(viewitems(param_rst)):
                                param_path = (param_fst, const_name)
                                param_range_tuples.append((pop_name, source, sec_type, syn_name, param_path, const_range))
                                param_key = '%s.%s.%s.%s.%s.%s' % (pop_name, source, sec_type, syn_name, param_fst, const_name)
                                param_initial_value = (const_range[1] - const_range[0]) / 2.0
                                param_initial_dict[param_key] = param_initial_value
                                param_bounds[param_key] = const_range
                                param_names.append(param_key)
                        else:
                            param_name = param_fst
                            param_range = param_rst
                            param_range_tuples.append((pop_name, source, sec_type, syn_name, param_name, param_range))
                            param_key = '%s.%s.%s.%s.%s' % (pop_name, source, sec_type, syn_name, param_name)
                            param_initial_value = (param_range[1] - param_range[0]) / 2.0
                            param_initial_dict[param_key] = param_initial_value
                            param_bounds[param_key] = param_range
                            param_names.append(param_key)

    def from_param_vector(params):
        result = []
        assert (len(params) == len(param_range_tuples))
        for i, (pop_name, source, sec_type, syn_name, param_name, param_range) in enumerate(param_range_tuples):
            result.append((pop_name, source, sec_type, syn_name, param_name, params[i]))
        return result

    def to_param_vector(params):
        result = []
        for (source, sec_type, syn_name, param_name, param_value) in params:
            result.append(param_value)
        return result

    context.param_names = param_names
    context.bounds = [ param_bounds[key] for key in param_names ]
    context.x0 = param_initial_dict
    context.from_param_vector = from_param_vector
    context.to_param_vector = to_param_vector
    context.target_val = opt_targets
    context.target_range = opt_targets
def main(config, coordinates, gid, field_width, peak_rate, input_features_path,
         input_features_namespaces, output_features_namespace,
         output_weights_path, output_features_path, initial_weights_path,
         reference_weights_path, h5types_path, synapse_name,
         initial_weights_namespace, reference_weights_namespace,
         output_weights_namespace, reference_weights_are_delta,
         connections_path, optimize_method, destination, sources, arena_id,
         max_delta_weight, field_width_scale, max_iter, verbose, dry_run,
         plot):
    """
    :param config: str (path to .yaml file)
    :param coordinates: tuple of float
    :param gid: int
    :param field_width: float
    :param peak_rate: float
    :param input_features_path: str (path to .h5 file)
    :param input_features_namespaces: str
    :param output_features_namespace: str
    :param output_weights_path: str (path to .h5 file)
    :param output_features_path: str (path to .h5 file)
    :param initial_weights_path: str (path to .h5 file)
    :param reference_weights_path: str (path to .h5 file)
    :param h5types_path: str (path to .h5 file)
    :param synapse_name: str
    :param initial_weights_namespace: str
    :param output_weights_namespace: str
    :param reference_weights_are_delta: bool
    :param connections_path: str (path to .h5 file)
    :param destination: str (population name)
    :param sources: list of str (population name)
    :param arena_id: str
    :param max_delta_weight: float
    :param field_width_scale: float
    :param max_iter: int
    :param verbose: bool
    :param dry_run: bool
    :param interactive: bool
    :param plot: bool
    """
    utils.config_logging(verbose)
    logger = utils.get_script_logger(__file__)

    env = Env(config_file=config)

    if not dry_run:
        if output_weights_path is None:
            raise RuntimeError(
                'Missing required argument: output_weights_path.')
        if not os.path.isfile(output_weights_path):
            if initial_weights_path is not None and os.path.isfile(
                    initial_weights_path):
                input_file_path = initial_weights_path
            elif h5types_path is not None and os.path.isfile(h5types_path):
                input_file_path = h5types_path
            else:
                raise RuntimeError(
                    'Missing required source for h5types: either an initial_weights_path or an '
                    'h5types_path must be provided.')
            with h5py.File(output_weights_path, 'a') as output_file:
                with h5py.File(input_file_path, 'r') as input_file:
                    input_file.copy('/H5Types', output_file)

    this_input_features_namespaces = [
        '%s %s' % (input_features_namespace, arena_id)
        for input_features_namespace in input_features_namespaces
    ]
    features_attr_names = ['Arena Rate Map']
    spatial_resolution = env.stimulus_config['Spatial Resolution']  # cm
    arena = env.stimulus_config['Arena'][arena_id]
    default_run_vel = arena.properties['default run velocity']  # cm/s
    arena_x, arena_y = stimulus.get_2D_arena_spatial_mesh(
        arena, spatial_resolution)
    dim_x = len(arena_x)
    dim_y = len(arena_y)

    if gid is None:
        target_gids = []
    else:
        target_gids = [gid]

    dst_input_features = defaultdict(dict)
    num_fields = len(coordinates)
    this_field_width = np.array([field_width] * num_fields, dtype=np.float32)
    this_scaled_field_width = np.array([field_width * field_width_scale] *
                                       num_fields,
                                       dtype=np.float32)
    this_peak_rate = np.array([peak_rate] * num_fields, dtype=np.float32)
    this_x0 = np.array([x for x, y in coordinates], dtype=np.float32)
    this_y0 = np.array([y for x, y in coordinates], dtype=np.float32)
    this_rate_map = np.asarray(get_rate_map(this_x0, this_y0, this_field_width,
                                            this_peak_rate, arena_x, arena_y),
                               dtype=np.float32)
    target_map = np.asarray(get_rate_map(this_x0, this_y0,
                                         this_scaled_field_width,
                                         this_peak_rate, arena_x, arena_y),
                            dtype=np.float32)
    selectivity_type = env.selectivity_types['place']
    dst_input_features[destination][target_gid] = {
        'Selectivity Type': np.array([selectivity_type], dtype=np.uint8),
        'Num Fields': np.array([num_fields], dtype=np.uint8),
        'Field Width': this_field_width,
        'Peak Rate': this_peak_rate,
        'X Offset': this_x0,
        'Y Offset': this_y0,
        'Arena Rate Map': this_rate_map.ravel()
    }

    initial_weights_by_syn_id_dict = dict()
    selection = [target_gid]
    if initial_weights_path is not None:
        initial_weights_iter = \
            read_cell_attribute_selection(initial_weights_path, destination, namespace=initial_weights_namespace,
                                          selection=selection)
        syn_weight_attr_dict = dict(initial_weights_iter)

        syn_ids = syn_weight_attr_dict[target_gid]['syn_id']
        weights = syn_weight_attr_dict[target_gid][synapse_name]

        for (syn_id, weight) in zip(syn_ids, weights):
            initial_weights_by_syn_id_dict[int(syn_id)] = float(weight)

        logger.info(
            'destination: %s; gid %i; read initial synaptic weights for %i synapses'
            % (destination, target_gid, len(initial_weights_by_syn_id_dict)))

    reference_weights_by_syn_id_dict = None
    if reference_weights_path is not None:
        reference_weights_by_syn_id_dict = dict()
        reference_weights_iter = \
            read_cell_attribute_selection(reference_weights_path, destination, namespace=reference_weights_namespace,
                                          selection=selection)
        syn_weight_attr_dict = dict(reference_weights_iter)

        syn_ids = syn_weight_attr_dict[target_gid]['syn_id']
        weights = syn_weight_attr_dict[target_gid][synapse_name]

        for (syn_id, weight) in zip(syn_ids, weights):
            reference_weights_by_syn_id_dict[int(syn_id)] = float(weight)

        logger.info(
            'destination: %s; gid %i; read reference synaptic weights for %i synapses'
            % (destination, target_gid, len(reference_weights_by_syn_id_dict)))

    source_gid_set_dict = defaultdict(set)
    syn_ids_by_source_gid_dict = defaultdict(list)
    initial_weights_by_source_gid_dict = dict()
    if reference_weights_by_syn_id_dict is None:
        reference_weights_by_source_gid_dict = None
    else:
        reference_weights_by_source_gid_dict = dict()
    (graph, edge_attr_info) = read_graph_selection(file_name=connections_path,
                                                   selection=[target_gid],
                                                   namespaces=['Synapses'])
    syn_id_attr_index = None
    for source, edge_iter in viewitems(graph[destination]):
        if source not in sources:
            continue
        this_edge_attr_info = edge_attr_info[destination][source]
        if 'Synapses' in this_edge_attr_info and \
           'syn_id' in this_edge_attr_info['Synapses']:
            syn_id_attr_index = this_edge_attr_info['Synapses']['syn_id']
        for (destination_gid, edges) in edge_iter:
            assert destination_gid == target_gid
            source_gids, edge_attrs = edges
            syn_ids = edge_attrs['Synapses'][syn_id_attr_index]
            count = 0
            for i in range(len(source_gids)):
                this_source_gid = int(source_gids[i])
                source_gid_set_dict[source].add(this_source_gid)
                this_syn_id = int(syn_ids[i])
                if this_syn_id not in initial_weights_by_syn_id_dict:
                    this_weight = \
                        env.connection_config[destination][source].mechanisms['default'][synapse_name]['weight']
                    initial_weights_by_syn_id_dict[this_syn_id] = this_weight
                syn_ids_by_source_gid_dict[this_source_gid].append(this_syn_id)
                if this_source_gid not in initial_weights_by_source_gid_dict:
                    initial_weights_by_source_gid_dict[this_source_gid] = \
                        initial_weights_by_syn_id_dict[this_syn_id]
                    if reference_weights_by_source_gid_dict is not None:
                        reference_weights_by_source_gid_dict[this_source_gid] = \
                            reference_weights_by_syn_id_dict[this_syn_id]
                count += 1
            logger.info(
                'destination: %s; gid %i; set initial synaptic weights for %d inputs from source population '
                '%s' % (destination, destination_gid, count, source))

    syn_count_by_source_gid_dict = dict()
    for source_gid in syn_ids_by_source_gid_dict:
        syn_count_by_source_gid_dict[source_gid] = len(
            syn_ids_by_source_gid_dict[source_gid])

    input_rate_maps_by_source_gid_dict = dict()
    for source in sources:
        source_gids = list(source_gid_set_dict[source])
        for input_features_namespace in this_input_features_namespaces:
            input_features_iter = read_cell_attribute_selection(
                input_features_path,
                source,
                namespace=input_features_namespace,
                mask=set(features_attr_names),
                selection=source_gids)
            count = 0
            for gid, attr_dict in input_features_iter:
                input_rate_maps_by_source_gid_dict[gid] = attr_dict[
                    'Arena Rate Map'].reshape((dim_x, dim_y))
                count += 1
            logger.info('Read %s feature data for %i cells in population %s' %
                        (input_features_namespace, count, source))

    if is_interactive:
        context.update(locals())

    normalized_delta_weights_dict, arena_LS_map = \
        synapses.generate_structured_weights(target_map=target_map,
                                             initial_weight_dict=initial_weights_by_source_gid_dict,
                                             input_rate_map_dict=input_rate_maps_by_source_gid_dict,
                                             syn_count_dict=syn_count_by_source_gid_dict,
                                             max_delta_weight=max_delta_weight, arena_x=arena_x, arena_y=arena_y,
                                             reference_weight_dict=reference_weights_by_source_gid_dict,
                                             reference_weights_are_delta=reference_weights_are_delta,
                                             reference_weights_namespace=reference_weights_namespace,
                                             optimize_method=optimize_method, verbose=verbose, plot=plot)

    output_syn_ids = np.empty(len(initial_weights_by_syn_id_dict),
                              dtype='uint32')
    output_weights = np.empty(len(initial_weights_by_syn_id_dict),
                              dtype='float32')
    i = 0
    for source_gid, this_weight in viewitems(normalized_delta_weights_dict):
        for syn_id in syn_ids_by_source_gid_dict[source_gid]:
            output_syn_ids[i] = syn_id
            output_weights[i] = this_weight
            i += 1
    output_weights_dict = {
        target_gid: {
            'syn_id': output_syn_ids,
            synapse_name: output_weights
        }
    }

    logger.info('destination: %s; gid %i; generated %s for %i synapses' %
                (destination, target_gid, output_weights_namespace,
                 len(output_weights)))

    if not dry_run:
        this_output_weights_namespace = '%s %s' % (output_weights_namespace,
                                                   arena_id)
        logger.info('Destination: %s; appending %s ...' %
                    (destination, this_output_weights_namespace))
        append_cell_attributes(output_weights_path,
                               destination,
                               output_weights_dict,
                               namespace=this_output_weights_namespace)
        logger.info('Destination: %s; appended %s' %
                    (destination, this_output_weights_namespace))
        output_weights_dict.clear()
        if output_features_path is not None:
            this_output_features_namespace = '%s %s' % (
                output_features_namespace, arena_id)
            cell_attr_dict = dst_input_features[destination]
            cell_attr_dict[target_gid]['Arena State Map'] = np.asarray(
                arena_LS_map.ravel(), dtype=np.float32)
            logger.info('Destination: %s; appending %s ...' %
                        (destination, this_output_features_namespace))
            append_cell_attributes(output_features_path,
                                   destination,
                                   cell_attr_dict,
                                   namespace=this_output_features_namespace)

    if is_interactive:
        context.update(locals())
Ejemplo n.º 25
0
def main(config_file, config_prefix, input_path, population, template_paths,
         dataset_prefix, results_path, results_file_id, results_namespace_id,
         v_init, io_size, chunk_size, value_chunk_size, write_size, verbose):

    utils.config_logging(verbose)
    logger = utils.get_script_logger(os.path.basename(__file__))

    comm = MPI.COMM_WORLD
    rank = comm.rank

    if rank == 0:
        logger.info('%i ranks have been allocated' % comm.size)

    if io_size == -1:
        io_size = comm.size

    if results_file_id is None:
        if rank == 0:
            result_file_id = uuid.uuid4()
        results_file_id = comm.bcast(results_file_id, root=0)
    if results_namespace_id is None:
        results_namespace_id = 'Cell Clamp Results'
    comm = MPI.COMM_WORLD
    np.seterr(all='raise')
    verbose = True
    params = dict(locals())
    env = Env(**params)
    configure_hoc_env(env)
    if rank == 0:
        io_utils.mkout(env, env.results_file_path)
    env.comm.barrier()
    env.cell_selection = {}
    template_class = load_cell_template(env, population)

    if input_path is not None:
        env.data_file_path = input_path
        env.load_celltypes()

    synapse_config = env.celltypes[population]['synapses']

    weights_namespaces = []
    if 'weights' in synapse_config:
        has_weights = synapse_config['weights']
        if has_weights:
            if 'weights namespace' in synapse_config:
                weights_namespaces.append(synapse_config['weights namespace'])
            elif 'weights namespaces' in synapse_config:
                weights_namespaces.extend(synapse_config['weights namespaces'])
            else:
                weights_namespaces.append('Weights')
    else:
        has_weights = False

    start_time = time.time()
    count = 0
    gid_count = 0
    attr_dict = {}
    if input_path is None:
        cell_path = env.data_file_path
        connectivity_path = env.connectivity_file_path
    else:
        cell_path = input_path
        connectivity_path = input_path

    for gid, morph_dict in NeuroH5TreeGen(cell_path,
                                          population,
                                          io_size=io_size,
                                          comm=env.comm,
                                          topology=True):
        local_time = time.time()
        if gid is not None:
            color = 0
            comm0 = comm.Split(color, 0)

            logger.info('Rank %i gid: %i' % (rank, gid))
            cell_dict = {'morph': morph_dict}
            synapses_iter = read_cell_attribute_selection(cell_path,
                                                          population, [gid],
                                                          'Synapse Attributes',
                                                          comm=comm0)
            _, synapse_dict = next(synapses_iter)
            cell_dict['synapse'] = synapse_dict

            if has_weights:
                cell_weights_iters = [
                    read_cell_attribute_selection(cell_path,
                                                  population, [gid],
                                                  weights_namespace,
                                                  comm=comm0)
                    for weights_namespace in weights_namespaces
                ]
                weight_dict = dict(
                    zip_longest(weights_namespaces, cell_weights_iters))
                cell_dict['weight'] = weight_dict

            (graph,
             a) = read_graph_selection(file_name=connectivity_path,
                                       selection=[gid],
                                       namespaces=['Synapses', 'Connections'],
                                       comm=comm0)
            cell_dict['connectivity'] = (graph, a)

            gid_count += 1

            attr_dict[gid] = {}
            attr_dict[gid].update(
                cell_clamp.measure_passive(gid,
                                           population,
                                           v_init,
                                           env,
                                           cell_dict=cell_dict))
            attr_dict[gid].update(
                cell_clamp.measure_ap(gid,
                                      population,
                                      v_init,
                                      env,
                                      cell_dict=cell_dict))
            attr_dict[gid].update(
                cell_clamp.measure_ap_rate(gid,
                                           population,
                                           v_init,
                                           env,
                                           cell_dict=cell_dict))
            attr_dict[gid].update(
                cell_clamp.measure_fi(gid,
                                      population,
                                      v_init,
                                      env,
                                      cell_dict=cell_dict))

        else:
            color = 1
            comm0 = comm.Split(color, 0)
            logger.info('Rank %i gid is None' % (rank))
        comm0.Free()

        count += 1
        if (results_path is not None) and (count % write_size == 0):
            append_cell_attributes(env.results_file_path,
                                   population,
                                   attr_dict,
                                   namespace=env.results_namespace_id,
                                   comm=env.comm,
                                   io_size=env.io_size,
                                   chunk_size=chunk_size,
                                   value_chunk_size=value_chunk_size)
            attr_dict = {}

    env.comm.barrier()
    if results_path is not None:
        append_cell_attributes(env.results_file_path,
                               population,
                               attr_dict,
                               namespace=env.results_namespace_id,
                               comm=env.comm,
                               io_size=env.io_size,
                               chunk_size=chunk_size,
                               value_chunk_size=value_chunk_size)
    global_count = env.comm.gather(gid_count, root=0)

    MPI.Finalize()
Ejemplo n.º 26
0
def main(config, template_path, output_path, forest_path, populations,
         distance_bin_size, io_size, chunk_size, value_chunk_size, cache_size,
         verbose):
    """

    :param config:
    :param template_path:
    :param forest_path:
    :param populations:
    :param io_size:
    :param chunk_size:
    :param value_chunk_size:
    :param cache_size:
    """

    utils.config_logging(verbose)
    logger = utils.get_script_logger(script_name)

    comm = MPI.COMM_WORLD
    rank = comm.rank

    env = Env(comm=MPI.COMM_WORLD,
              config_file=config,
              template_paths=template_path)
    configure_hoc_env(env)

    if io_size == -1:
        io_size = comm.size
    if rank == 0:
        logger.info('%i ranks have been allocated' % comm.size)

    if output_path is None:
        output_path = forest_path

    if rank == 0:
        if not os.path.isfile(output_path):
            input_file = h5py.File(forest_path, 'r')
            output_file = h5py.File(output_path, 'w')
            input_file.copy('/H5Types', output_file)
            input_file.close()
            output_file.close()
    comm.barrier()

    layers = env.layers
    layer_idx_dict = {
        layers[layer_name]: layer_name
        for layer_name in ['GCL', 'IML', 'MML', 'OML', 'Hilus']
    }

    (pop_ranges, _) = read_population_ranges(forest_path, comm=comm)
    start_time = time.time()
    for population in populations:
        logger.info('Rank %i population: %s' % (rank, population))
        count = 0
        (population_start, _) = pop_ranges[population]
        template_class = load_cell_template(env,
                                            population,
                                            bcast_template=True)
        measures_dict = {}
        for gid, morph_dict in NeuroH5TreeGen(forest_path,
                                              population,
                                              io_size=io_size,
                                              comm=comm,
                                              topology=True):
            if gid is not None:
                logger.info('Rank %i gid: %i' % (rank, gid))
                cell = cells.make_neurotree_cell(template_class,
                                                 neurotree_dict=morph_dict,
                                                 gid=gid)
                secnodes_dict = morph_dict['section_topology']['nodes']

                apicalidx = set(cell.apicalidx)
                basalidx = set(cell.basalidx)

                dendrite_area_dict = {k: 0.0 for k in layer_idx_dict}
                dendrite_length_dict = {k: 0.0 for k in layer_idx_dict}
                dendrite_distances = []
                dendrite_diams = []
                for (i, sec) in enumerate(cell.sections):
                    if (i in apicalidx) or (i in basalidx):
                        secnodes = secnodes_dict[i]
                        for seg in sec.allseg():
                            L = seg.sec.L
                            nseg = seg.sec.nseg
                            seg_l = L / nseg
                            seg_area = h.area(seg.x)
                            seg_diam = seg.diam
                            seg_distance = get_distance_to_node(
                                cell,
                                list(cell.soma)[0], seg.sec, seg.x)
                            dendrite_diams.append(seg_diam)
                            dendrite_distances.append(seg_distance)
                            layer = synapses.get_node_attribute(
                                'layer', morph_dict, seg.sec, secnodes, seg.x)
                            dendrite_length_dict[layer] += seg_l
                            dendrite_area_dict[layer] += seg_area

                dendrite_distance_array = np.asarray(dendrite_distances)
                dendrite_diam_array = np.asarray(dendrite_diams)
                dendrite_distance_bin_range = int(
                    ((np.max(dendrite_distance_array)) -
                     np.min(dendrite_distance_array)) / distance_bin_size) + 1
                dendrite_distance_counts, dendrite_distance_edges = np.histogram(
                    dendrite_distance_array,
                    bins=dendrite_distance_bin_range,
                    density=False)
                dendrite_diam_sums, _ = np.histogram(
                    dendrite_distance_array,
                    weights=dendrite_diam_array,
                    bins=dendrite_distance_bin_range,
                    density=False)
                dendrite_mean_diam_hist = np.zeros_like(dendrite_diam_sums)
                np.divide(dendrite_diam_sums,
                          dendrite_distance_counts,
                          where=dendrite_distance_counts > 0,
                          out=dendrite_mean_diam_hist)

                dendrite_area_per_layer = np.asarray([
                    dendrite_area_dict[k]
                    for k in sorted(dendrite_area_dict.keys())
                ],
                                                     dtype=np.float32)
                dendrite_length_per_layer = np.asarray([
                    dendrite_length_dict[k]
                    for k in sorted(dendrite_length_dict.keys())
                ],
                                                       dtype=np.float32)

                measures_dict[gid] = {
                    'dendrite_distance_hist_edges':
                    np.asarray(dendrite_distance_edges, dtype=np.float32),
                    'dendrite_distance_counts':
                    np.asarray(dendrite_distance_counts, dtype=np.int32),
                    'dendrite_mean_diam_hist':
                    np.asarray(dendrite_mean_diam_hist, dtype=np.float32),
                    'dendrite_area_per_layer':
                    dendrite_area_per_layer,
                    'dendrite_length_per_layer':
                    dendrite_length_per_layer
                }

                del cell
                count += 1
            else:
                logger.info('Rank %i gid is None' % rank)
        append_cell_attributes(output_path,
                               population,
                               measures_dict,
                               namespace='Tree Measurements',
                               comm=comm,
                               io_size=io_size,
                               chunk_size=chunk_size,
                               value_chunk_size=value_chunk_size,
                               cache_size=cache_size)
    MPI.Finalize()
Ejemplo n.º 27
0
def main(arena_id, bin_sample_count, config, config_prefix, dataset_prefix,
         distances_namespace, distance_bin_extent, input_features_path,
         input_features_namespaces, populations, spike_input_path,
         spike_input_namespace, spike_input_attr, output_path, io_size,
         trajectory_id, write_selection, verbose):

    utils.config_logging(verbose)
    logger = utils.get_script_logger(os.path.basename(__file__))

    comm = MPI.COMM_WORLD
    rank = comm.rank

    env = Env(comm=comm,
              config_file=config,
              config_prefix=config_prefix,
              dataset_prefix=dataset_prefix,
              results_path=output_path,
              spike_input_path=spike_input_path,
              spike_input_namespace=spike_input_namespace,
              spike_input_attr=spike_input_attr,
              arena_id=arena_id,
              trajectory_id=trajectory_id)

    if io_size == -1:
        io_size = comm.size
    if rank == 0:
        logger.info('%i ranks have been allocated' % comm.size)

    pop_ranges, pop_size = read_population_ranges(env.connectivity_file_path,
                                                  comm=comm)

    distance_U_dict = {}
    distance_V_dict = {}
    range_U_dict = {}
    range_V_dict = {}

    selection_dict = defaultdict(set)

    comm0 = env.comm.Split(2 if rank == 0 else 0, 0)

    local_random = np.random.RandomState()
    local_random.seed(1000)

    if len(populations) == 0:
        populations = sorted(pop_ranges.keys())

    if rank == 0:
        for population in populations:
            distances = read_cell_attributes(env.data_file_path,
                                             population,
                                             namespace=distances_namespace,
                                             comm=comm0)

            soma_distances = {}
            if input_features_path is not None:
                num_fields_dict = {}
                for input_features_namespace in input_features_namespaces:
                    if arena_id is not None:
                        this_features_namespace = '%s %s' % (
                            input_features_namespace, arena_id)
                    else:
                        this_features_namespace = input_features_namespace
                    input_features_iter = read_cell_attributes(
                        input_features_path,
                        population,
                        namespace=this_features_namespace,
                        mask=set(['Num Fields']),
                        comm=comm0)
                    count = 0
                    for gid, attr_dict in input_features_iter:
                        num_fields_dict[gid] = attr_dict['Num Fields']
                        count += 1
                    logger.info(
                        'Read feature data from namespace %s for %i cells in population %s'
                        % (this_features_namespace, count, population))

                for (gid, v) in distances:
                    num_fields = num_fields_dict.get(gid, 0)
                    if num_fields > 0:
                        soma_distances[gid] = (v['U Distance'][0],
                                               v['V Distance'][0])
            else:
                for (gid, v) in distances:
                    soma_distances[gid] = (v['U Distance'][0],
                                           v['V Distance'][0])

            numitems = len(list(soma_distances.keys()))
            logger.info('read %s distances (%i elements)' %
                        (population, numitems))

            if numitems == 0:
                continue

            gid_array = np.asarray([gid for gid in soma_distances])
            distance_U_array = np.asarray(
                [soma_distances[gid][0] for gid in gid_array])
            distance_V_array = np.asarray(
                [soma_distances[gid][1] for gid in gid_array])

            U_min = np.min(distance_U_array)
            U_max = np.max(distance_U_array)
            V_min = np.min(distance_V_array)
            V_max = np.max(distance_V_array)

            range_U_dict[population] = (U_min, U_max)
            range_V_dict[population] = (V_min, V_max)

            distance_U = {
                gid: soma_distances[gid][0]
                for gid in soma_distances
            }
            distance_V = {
                gid: soma_distances[gid][1]
                for gid in soma_distances
            }

            distance_U_dict[population] = distance_U
            distance_V_dict[population] = distance_V

            min_dist = U_min
            max_dist = U_max

            distance_bins = np.arange(U_min, U_max, distance_bin_extent)
            distance_bin_array = np.digitize(distance_U_array, distance_bins)

            selection_set = set([])
            for bin_index in range(len(distance_bins) + 1):
                bin_gids = gid_array[np.where(
                    distance_bin_array == bin_index)[0]]
                if len(bin_gids) > 0:
                    selected_bin_gids = local_random.choice(
                        bin_gids, replace=False, size=bin_sample_count)
                    for gid in selected_bin_gids:
                        selection_set.add(int(gid))
            selection_dict[population] = selection_set

        yaml_output_dict = {}
        for k, v in utils.viewitems(selection_dict):
            yaml_output_dict[k] = list(sorted(v))

        yaml_output_path = '%s/DG_slice.yaml' % output_path
        with open(yaml_output_path, 'w') as outfile:
            yaml.dump(yaml_output_dict, outfile)

        del (yaml_output_dict)

    env.comm.barrier()

    write_selection_file_path = None
    if write_selection:
        write_selection_file_path = "%s/%s_selection.h5" % (env.results_path,
                                                            env.modelName)

    if write_selection_file_path is not None:
        if rank == 0:
            io_utils.mkout(env, write_selection_file_path)
        env.comm.barrier()
        selection_dict = env.comm.bcast(dict(selection_dict), root=0)
        env.cell_selection = selection_dict
        io_utils.write_cell_selection(env,
                                      write_selection_file_path,
                                      populations=populations)
        input_selection = io_utils.write_connection_selection(
            env, write_selection_file_path, populations=populations)

        if env.spike_input_ns is not None:
            io_utils.write_input_cell_selection(env,
                                                input_selection,
                                                write_selection_file_path,
                                                populations=populations)
    env.comm.barrier()
    MPI.Finalize()
Ejemplo n.º 28
0
def config_worker():
    """

    """
    if 'debug' not in context():
        context.debug = False

    if context.debug:
        if context.comm.rank == 1:
            print('# of parameters: %i' % len(context.param_names))
            print('param_names: ', context.param_names)
            print('target_val: ', context.target_val)
            print('target_range: ', context.target_range)
            print('param_tuples: ', context.param_tuples)
            sys.stdout.flush()

    utils.config_logging(context.verbose)
    context.logger = utils.get_script_logger(os.path.basename(__file__))
    # TODO: Do you want this to be identical on all ranks in a subworld? You can use context.comm.bcast
    if 'results_file_id' not in context():
        context.results_file_id = 'DG_optimize_network_subworlds_%s_%s' % \
                             (context.interface.worker_id, datetime.datetime.today().strftime('%Y%m%d_%H%M'))

    # 'env' might be in context on controller, but it needs to be re-built when the controller is in a worker subworld
    try:
        if context.debug:
            print(
                'debug: config_worker; local_comm.rank: %i/%i; global_comm.rank: %i/%i'
                % (context.comm.rank, context.comm.size,
                   context.global_comm.rank, context.global_comm.size))
            if context.global_comm.rank == 0:
                print('t_start: %.1f, t_stop: %.1f' %
                      (context.t_start, context.t_stop))
            sys.stdout.flush()
        if context.debug:
            raise RuntimeError('config_worker: debug')
        init_network()
    except Exception as err:
        context.logger.exception(err)
        raise err

    if 't_start' not in context():
        context.t_start = 50.
    else:
        context.t_start = float(context.t_start)
    if 't_stop' not in context():
        context.t_stop = context.env.tstop
    else:
        context.t_stop = float(context.t_stop)
    time_range = (context.t_start, context.t_stop)

    context.target_trj_rate_map_dict = {}
    target_rate_map_path = context.target_rate_map_path
    target_rate_map_namespace = context.target_rate_map_namespace
    target_rate_map_arena = context.env.arena_id
    target_rate_map_trajectory = context.env.trajectory_id
    for pop_name in context.target_populations:
        my_cell_index_set = set(context.env.biophys_cells[pop_name].keys())
        trj_rate_maps = rate_maps_from_features(
            context.env,
            pop_name,
            cell_index_set=list(my_cell_index_set),
            input_features_path=target_rate_map_path,
            input_features_namespace=target_rate_map_namespace,
            time_range=time_range)
        if len(trj_rate_maps) > 0:
            context.target_trj_rate_map_dict[pop_name] = trj_rate_maps

    # TODO: This is not put in context or used elsewhere
    target_rate_vector_dict = {
        gid: trj_rate_maps[gid]
        for gid in trj_rate_maps
    }
    for gid, target_rate_vector in viewitems(target_rate_vector_dict):
        idxs = np.where(
            np.isclose(target_rate_vector, 0., atol=1e-4, rtol=1e-4))[0]
        target_rate_vector[idxs] = 0.
Ejemplo n.º 29
0
def main(config, template_path, output_path, forest_path, populations, io_size,
         chunk_size, value_chunk_size, cache_size, verbose):
    """

    :param config:
    :param template_path:
    :param forest_path:
    :param populations:
    :param io_size:
    :param chunk_size:
    :param value_chunk_size:
    :param cache_size:
    """

    utils.config_logging(verbose)
    logger = utils.get_script_logger(script_name)

    comm = MPI.COMM_WORLD
    rank = comm.rank

    env = Env(comm=MPI.COMM_WORLD,
              config_file=config,
              template_paths=template_path)
    h('objref nil, pc, templatePaths')
    h.load_file("nrngui.hoc")
    h.load_file("./templates/Value.hoc")
    h.xopen("./lib.hoc")
    h.pc = h.ParallelContext()

    if io_size == -1:
        io_size = comm.size
    if rank == 0:
        logger.info('%i ranks have been allocated' % comm.size)

    h.templatePaths = h.List()
    for path in env.templatePaths:
        h.templatePaths.append(h.Value(1, path))

    if output_path is None:
        output_path = forest_path

    if rank == 0:
        if not os.path.isfile(output_path):
            input_file = h5py.File(forest_path, 'r')
            output_file = h5py.File(output_path, 'w')
            input_file.copy('/H5Types', output_file)
            input_file.close()
            output_file.close()
    comm.barrier()

    (pop_ranges, _) = read_population_ranges(forest_path, comm=comm)
    start_time = time.time()
    for population in populations:
        logger.info('Rank %i population: %s' % (rank, population))
        count = 0
        (population_start, _) = pop_ranges[population]
        template_name = env.celltypes[population]['template']
        h.find_template(h.pc, h.templatePaths, template_name)
        template_class = eval('h.%s' % template_name)
        measures_dict = {}
        for gid, morph_dict in NeuroH5TreeGen(forest_path,
                                              population,
                                              io_size=io_size,
                                              comm=comm,
                                              topology=True):
            if gid is not None:
                logger.info('Rank %i gid: %i' % (rank, gid))
                cell = cells.make_neurotree_cell(template_class,
                                                 neurotree_dict=morph_dict,
                                                 gid=gid)
                secnodes_dict = morph_dict['section_topology']['nodes']

                apicalidx = set(cell.apicalidx)
                basalidx = set(cell.basalidx)

                dendrite_area_dict = {k + 1: 0.0 for k in range(0, 4)}
                dendrite_length_dict = {k + 1: 0.0 for k in range(0, 4)}
                for (i, sec) in enumerate(cell.sections):
                    if (i in apicalidx) or (i in basalidx):
                        secnodes = secnodes_dict[i]
                        prev_layer = None
                        for seg in sec.allseg():
                            L = seg.sec.L
                            nseg = seg.sec.nseg
                            seg_l = old_div(L, nseg)
                            seg_area = h.area(seg.x)
                            layer = cells.get_node_attribute(
                                'layer', morph_dict, seg.sec, secnodes, seg.x)
                            layer = layer if layer > 0 else (
                                prev_layer if prev_layer is not None else 1)
                            prev_layer = layer
                            dendrite_length_dict[layer] += seg_l
                            dendrite_area_dict[layer] += seg_area

                measures_dict[gid] = { 'dendrite_area': np.asarray([ dendrite_area_dict[k] for k in sorted(dendrite_area_dict.keys()) ], dtype=np.float32), \
                                       'dendrite_length': np.asarray([ dendrite_length_dict[k] for k in sorted(dendrite_length_dict.keys()) ], dtype=np.float32) }

                del cell
                count += 1
            else:
                logger.info('Rank %i gid is None' % rank)
        append_cell_attributes(output_path,
                               population,
                               measures_dict,
                               namespace='Tree Measurements',
                               comm=comm,
                               io_size=io_size,
                               chunk_size=chunk_size,
                               value_chunk_size=value_chunk_size,
                               cache_size=cache_size)
    MPI.Finalize()
Ejemplo n.º 30
0
def main(connectivity_path, output_path, coords_path, distances_namespace,
         destination, bin_size, cache_size, verbose):
    """
    Measures vertex distribution with respect to septo-temporal distance

    :param connectivity_path:
    :param coords_path:
    :param distances_namespace: 
    :param destination: 
    :param source: 

    """

    utils.config_logging(verbose)
    logger = utils.get_script_logger(os.path.basename(__file__))

    comm = MPI.COMM_WORLD

    rank = comm.Get_rank()

    (population_ranges, _) = read_population_ranges(coords_path)

    destination_start = population_ranges[destination][0]
    destination_count = population_ranges[destination][1]

    if rank == 0:
        logger.info('reading %s distances...' % destination)
    destination_soma_distances = bcast_cell_attributes(
        coords_path,
        destination,
        namespace=distances_namespace,
        comm=comm,
        root=0)

    destination_soma_distance_U = {}
    destination_soma_distance_V = {}
    for k, v in destination_soma_distances:
        destination_soma_distance_U[k] = v['U Distance'][0]
        destination_soma_distance_V[k] = v['V Distance'][0]

    del (destination_soma_distances)

    sources = []
    for (src, dst) in read_projection_names(connectivity_path):
        if dst == destination:
            sources.append(src)

    source_soma_distances = {}
    for s in sources:
        if rank == 0:
            logger.info('reading %s distances...' % s)
        source_soma_distances[s] = bcast_cell_attributes(
            coords_path, s, namespace=distances_namespace, comm=comm, root=0)

    source_soma_distance_U = {}
    source_soma_distance_V = {}
    for s in sources:
        this_source_soma_distance_U = {}
        this_source_soma_distance_V = {}
        for k, v in source_soma_distances[s]:
            this_source_soma_distance_U[k] = v['U Distance'][0]
            this_source_soma_distance_V[k] = v['V Distance'][0]
        source_soma_distance_U[s] = this_source_soma_distance_U
        source_soma_distance_V[s] = this_source_soma_distance_V
    del (source_soma_distances)

    logger.info('reading connections %s -> %s...' %
                (str(sources), destination))
    gg = [
        NeuroH5ProjectionGen(connectivity_path,
                             source,
                             destination,
                             cache_size=cache_size,
                             comm=comm) for source in sources
    ]

    dist_bins = defaultdict(dict)
    dist_u_bins = defaultdict(dict)
    dist_v_bins = defaultdict(dict)

    for prj_gen_tuple in utils.zip_longest(*gg):
        destination_gid = prj_gen_tuple[0][0]
        if not all([
                prj_gen_elt[0] == destination_gid
                for prj_gen_elt in prj_gen_tuple
        ]):
            raise Exception(
                'destination %s: destination_gid %i not matched across multiple projection generators: %s'
                % (destination, destination_gid,
                   [prj_gen_elt[0] for prj_gen_elt in prj_gen_tuple]))

        if destination_gid is not None:
            logger.info('reading connections of gid %i' % destination_gid)
            for (source, (this_destination_gid,
                          rest)) in zip(sources, prj_gen_tuple):
                this_source_soma_distance_U = source_soma_distance_U[source]
                this_source_soma_distance_V = source_soma_distance_V[source]
                this_dist_bins = dist_bins[source]
                this_dist_u_bins = dist_u_bins[source]
                this_dist_v_bins = dist_v_bins[source]
                (source_indexes, attr_dict) = rest
                dst_U = destination_soma_distance_U[destination_gid]
                dst_V = destination_soma_distance_V[destination_gid]
                for source_gid in source_indexes:
                    dist_u = dst_U - this_source_soma_distance_U[source_gid]
                    dist_v = dst_V - this_source_soma_distance_V[source_gid]
                    dist = abs(dist_u) + abs(dist_v)

                    update_bins(this_dist_bins, bin_size, dist)
                    update_bins(this_dist_u_bins, bin_size, dist_u)
                    update_bins(this_dist_v_bins, bin_size, dist_v)
    comm.barrier()

    logger.info('merging distance dictionaries...')
    add_bins_op = MPI.Op.Create(add_bins, commute=True)
    for source in sources:
        dist_bins[source] = comm.reduce(dist_bins[source],
                                        op=add_bins_op,
                                        root=0)
        dist_u_bins[source] = comm.reduce(dist_u_bins[source],
                                          op=add_bins_op,
                                          root=0)
        dist_v_bins[source] = comm.reduce(dist_v_bins[source],
                                          op=add_bins_op,
                                          root=0)

    comm.barrier()

    if rank == 0:
        color = 1
    else:
        color = 0

    ## comm0 includes only rank 0
    comm0 = comm.Split(color, 0)

    if rank == 0:
        if output_path is None:
            output_path = connectivity_path
        logger.info('writing output to %s...' % output_path)

        #f = h5py.File(output_path, 'a', driver='mpio', comm=comm0)
        #if 'Nodes' in f:
        #    nodes_grp = f['Nodes']
        #else:
        #    nodes_grp = f.create_group('Nodes')
        #grp = nodes_grp.create_group('Connectivity Distance Histogram')
        #dst_grp = grp.create_group(destination)
        for source in sources:
            dist_histoCount, dist_bin_edges = finalize_bins(
                dist_bins[source], bin_size)
            dist_u_histoCount, dist_u_bin_edges = finalize_bins(
                dist_u_bins[source], bin_size)
            dist_v_histoCount, dist_v_bin_edges = finalize_bins(
                dist_v_bins[source], bin_size)
            np.savetxt('%s Distance U Bin Count.dat' % source,
                       dist_u_histoCount)
            np.savetxt('%s Distance U Bin Edges.dat' % source,
                       dist_u_bin_edges)
            np.savetxt('%s Distance V Bin Count.dat' % source,
                       dist_v_histoCount)
            np.savetxt('%s Distance V Bin Edges.dat' % source,
                       dist_v_bin_edges)
            np.savetxt('%s Distance Bin Count.dat' % source, dist_histoCount)
            np.savetxt('%s Distance Bin Edges.dat' % source, dist_bin_edges)
        #f.close()
    comm.barrier()