コード例 #1
0
def main(coords_path, coords_namespace):

    comm = MPI.COMM_WORLD
    rank = comm.rank
    size = comm.size

    print ('Allocated %i ranks' % size)
    sys.stdout.flush()

    population_ranges = read_population_ranges(coords_path)[0]
    
    soma_coords = {}
    for population in ['GC']:


        attr_iter = bcast_cell_attributes(coords_path, population, namespace=coords_namespace, 
                                          root=0, mask=set(['U Coordinate', 'V Coordinate', 'L Coordinate']),
                                          comm=comm)
        
        for cell_gid, coords_dict in attr_iter:

            cell_u = coords_dict['U Coordinate']
            cell_v = coords_dict['V Coordinate']
                
            print ('Rank %i: gid = %i u = %f v = %f' % (rank, cell_gid, cell_u, cell_v))
コード例 #2
0
ファイル: bcast_coords.py プロジェクト: soltesz-lab/dentate
def main(config, coords_path, coords_namespace, distances_namespace,
         populations, verbose):

    utils.config_logging(verbose)
    logger = utils.get_script_logger(__file__)

    comm = MPI.COMM_WORLD
    rank = comm.rank

    env = Env(comm=comm, config_file=config)
    output_path = coords_path

    soma_coords = {}
    soma_distances = {}

    if rank == 0:
        logger.info('Reading population coordinates and distances...')

    for population in populations:

        coords = bcast_cell_attributes(coords_path,
                                       population,
                                       0,
                                       namespace=coords_namespace,
                                       comm=comm)
        soma_coords[population] = {
            k:
            (v['U Coordinate'][0], v['V Coordinate'][0], v['L Coordinate'][0])
            for (k, v) in coords
        }
        del coords
        gc.collect()

        distances = bcast_cell_attributes(coords_path,
                                          population,
                                          0,
                                          namespace=distances_namespace,
                                          comm=comm)
        soma_distances = {
            k: (v['U Distance'][0], v['V Distance'][0])
            for (k, v) in distances
        }
        del distances
        gc.collect()
コード例 #3
0
ファイル: project_somas.py プロジェクト: soltesz-lab/dentate
def main(config, coords_path, coords_namespace, resample, resolution, populations, projection_depth, io_size, chunk_size, value_chunk_size, cache_size, verbose):

    utils.config_logging(verbose)
    logger = utils.get_script_logger(script_name)
    
    comm = MPI.COMM_WORLD
    rank = comm.rank

    env = Env(comm=comm, config_file=config)

    soma_coords = {}

    if rank == 0:
        logger.info('Reading population coordinates...')

    rotate = env.geometry['Parametric Surface']['Rotation']
    min_l = float('inf')
    max_l = 0.0
    population_ranges = read_population_ranges(coords_path)[0]
    population_extents = {}
    for population in population_ranges:
        min_extent = env.geometry['Cell Layers']['Minimum Extent'][population]
        max_extent = env.geometry['Cell Layers']['Maximum Extent'][population]
        min_l = min(min_extent[2], min_l)
        max_l = max(max_extent[2], max_l)
        population_extents[population] = (min_extent, max_extent)
        
    for population in populations:
        coords = bcast_cell_attributes(coords_path, population, 0, \
                                       namespace=coords_namespace)

        soma_coords[population] = { k: (v['U Coordinate'][0], v['V Coordinate'][0], v['L Coordinate'][0]) for (k,v) in coords }
        del coords
        gc.collect()
    
    output_path = coords_path
    soma_coords = icp_transform(comm, soma_coords, projection_depth, population_extents, \
                                populations=populations, rotate=rotate, verbose=verbose)
    
    for population in populations:

        if rank == 0:
            logger.info('Writing transformed coordinates for population %s...' % population)

        append_cell_attributes(output_path, population, soma_coords[population],
                               namespace='Soma Projections', comm=comm,
                               io_size=io_size, chunk_size=chunk_size,
                               value_chunk_size=value_chunk_size, cache_size=cache_size)
コード例 #4
0
def main(coords_path, io_size, chunk_size, value_chunk_size):

    utils.config_logging(verbose)
    logger = utils.get_script_logger(__file__)
    
    comm = MPI.COMM_WORLD
    rank = comm.rank

    env = Env(comm=comm, config_file=config)
    output_path = coords_path

    if io_size == -1:
        io_size = comm.size
    if rank == 0:
        logger.info('%i ranks have been allocated' % comm.size)

    source_population_ranges = read_population_ranges(coords_path)
    source_populations = list(source_population_ranges.keys())

    for population in source_populations:
        if rank == 0:
            logger.info('population: ',population)
        soma_coords = bcast_cell_attributes(0, coords_path, population,
                                            namespace='Interpolated Coordinates', comm=comm)
        #print soma_coords.keys()
        u_coords = []
        gids = []
        for gid, attrs in viewitems(soma_coords):
            u_coords.append(attrs['U Coordinate'])
            gids.append(gid)
        u_coordv = np.asarray(u_coords, dtype=np.float32)
        gidv     = np.asarray(gids, dtype=np.uint32)
        sort_idx = np.argsort(u_coordv, axis=0)
        offset   = source_population_ranges[population][0]
        sorted_coords_dict = {}
        for i in range(0,sort_idx.size):
            sorted_coords_dict[offset+i] = soma_coords[gidv[sort_idx[i][0]]]
        
        append_cell_attributes(coords_path, population, sorted_coords_dict,
                                namespace='Sorted Coordinates', io_size=io_size, chunk_size=chunk_size,
                                value_chunk_size=value_chunk_size, comm=comm)
コード例 #5
0
def main(config, config_prefix, coords_path, distances_namespace, bin_distance,
         selectivity_path, selectivity_namespace, subset_seed, arena_id,
         populations, io_size, cache_size, verbose, debug, show_fig, save_fig,
         save_fig_dir, font_size, fig_size, colormap, fig_format):
    """

    :param config: str (.yaml file name)
    :param config_prefix: str (path to dir)
    :param coords_path: str (path to file)
    :param distances_namespace: str
    :param bin_distance: float
    :param selectivity_path: str
    :param subset_seed: int; for reproducible choice of gids to plot individual rate maps
    :param arena_id: str
    :param populations: tuple of str
    :param io_size: int
    :param cache_size: int
    :param verbose: bool
    :param debug: bool
    :param show_fig: bool
    :param save_fig: str (base file name)
    :param save_fig_dir:  str (path to dir)
    :param font_size: float
    :param fig_format: str
    """
    comm = MPI.COMM_WORLD
    rank = comm.rank

    config_logging(verbose)

    env = Env(comm=comm,
              config_file=config,
              config_prefix=config_prefix,
              template_paths=None)
    if io_size == -1:
        io_size = comm.size
    if rank == 0:
        logger.info('%i ranks have been allocated' % comm.size)

    fig_options = copy.copy(default_fig_options)
    fig_options.saveFigDir = save_fig_dir
    fig_options.fontSize = font_size
    fig_options.figFormat = fig_format
    fig_options.showFig = show_fig
    fig_options.figSize = fig_size

    if save_fig is not None:
        save_fig = '%s %s' % (save_fig, arena_id)
    fig_options.saveFig = save_fig

    population_ranges = read_population_ranges(selectivity_path, comm)[0]
    coords_population_ranges = read_population_ranges(coords_path, comm)[0]

    if len(populations) == 0:
        populations = ('MC', 'ConMC', 'LPP', 'GC', 'MPP', 'CA3c')

    valid_selectivity_namespaces = dict()
    if rank == 0:
        for population in populations:
            if population not in population_ranges:
                raise RuntimeError(
                    'plot_input_selectivity_features: specified population: %s not found in '
                    'provided selectivity_path: %s' %
                    (population, selectivity_path))
            if population not in env.stimulus_config[
                    'Selectivity Type Probabilities']:
                raise RuntimeError(
                    'plot_input_selectivity_features: selectivity type not specified for '
                    'population: %s' % population)
            valid_selectivity_namespaces[population] = []
            with h5py.File(selectivity_path, 'r') as selectivity_f:
                for this_namespace in selectivity_f['Populations'][population]:
                    if f'{selectivity_namespace} {arena_id}' in this_namespace:
                        valid_selectivity_namespaces[population].append(
                            this_namespace)
                if len(valid_selectivity_namespaces[population]) == 0:
                    raise RuntimeError(
                        'plot_input_selectivity_features: no selectivity data in arena: %s found '
                        'for specified population: %s in provided selectivity_path: %s'
                        % (arena_id, population, selectivity_path))

    valid_selectivity_namespaces = comm.bcast(valid_selectivity_namespaces,
                                              root=0)
    selectivity_type_names = dict(
        (val, key) for (key, val) in viewitems(env.selectivity_types))

    reference_u_arc_distance_bounds = None
    reference_v_arc_distance_bounds = None
    if rank == 0:
        for population in populations:
            if population not in coords_population_ranges:
                raise RuntimeError(
                    'plot_input_selectivity_features: specified population: %s not found in '
                    'provided coords_path: %s' % (population, coords_path))
            with h5py.File(coords_path, 'r') as coords_f:
                pop_size = population_ranges[population][1]
                unique_gid_count = len(
                    set(coords_f['Populations'][population]
                        [distances_namespace]['U Distance']['Cell Index'][:]))
                if pop_size != unique_gid_count:
                    raise RuntimeError(
                        'plot_input_selectivity_features: only %i/%i unique cell indexes found '
                        'for specified population: %s in provided coords_path: %s'
                        %
                        (unique_gid_count, pop_size, population, coords_path))
                if reference_u_arc_distance_bounds is None:
                    try:
                        reference_u_arc_distance_bounds = \
                            coords_f['Populations'][population][distances_namespace].attrs['Reference U Min'], \
                            coords_f['Populations'][population][distances_namespace].attrs['Reference U Max']
                    except Exception:
                        raise RuntimeError(
                            'plot_input_selectivity_features: problem locating attributes '
                            'containing reference bounds in namespace: %s for population: %s from '
                            'coords_path: %s' %
                            (distances_namespace, population, coords_path))
                if reference_v_arc_distance_bounds is None:
                    try:
                        reference_v_arc_distance_bounds = \
                            coords_f['Populations'][population][distances_namespace].attrs['Reference V Min'], \
                            coords_f['Populations'][population][distances_namespace].attrs['Reference V Max']
                    except Exception:
                        raise RuntimeError(
                            'plot_input_selectivity_features: problem locating attributes '
                            'containing reference bounds in namespace: %s for population: %s from '
                            'coords_path: %s' %
                            (distances_namespace, population, coords_path))
    reference_u_arc_distance_bounds = comm.bcast(
        reference_u_arc_distance_bounds, root=0)
    reference_v_arc_distance_bounds = comm.bcast(
        reference_v_arc_distance_bounds, root=0)

    u_edges = np.arange(reference_u_arc_distance_bounds[0],
                        reference_u_arc_distance_bounds[1] + bin_distance / 2.,
                        bin_distance)
    v_edges = np.arange(reference_v_arc_distance_bounds[0],
                        reference_v_arc_distance_bounds[1] + bin_distance / 2.,
                        bin_distance)

    if arena_id not in env.stimulus_config['Arena']:
        raise RuntimeError(
            'Arena with ID: %s not specified by configuration at file path: %s'
            % (arena_id, config_prefix + '/' + config))

    arena = env.stimulus_config['Arena'][arena_id]
    arena_x_mesh, arena_y_mesh = None, None
    if rank == 0:
        arena_x_mesh, arena_y_mesh = \
            get_2D_arena_spatial_mesh(arena=arena, spatial_resolution=env.stimulus_config['Spatial Resolution'])
    arena_x_mesh = comm.bcast(arena_x_mesh, root=0)
    arena_y_mesh = comm.bcast(arena_y_mesh, root=0)

    for population in populations:

        start_time = time.time()
        u_distances_by_gid = dict()
        v_distances_by_gid = dict()
        distances_attr_gen = \
            bcast_cell_attributes(coords_path, population, root=0, namespace=distances_namespace, comm=comm)
        for gid, distances_attr_dict in distances_attr_gen:
            u_distances_by_gid[gid] = distances_attr_dict['U Distance'][0]
            v_distances_by_gid[gid] = distances_attr_dict['V Distance'][0]

        if rank == 0:
            logger.info(
                'Reading %i cell positions for population %s took %.2f s' %
                (len(u_distances_by_gid), population,
                 time.time() - start_time))

        for this_selectivity_namespace in valid_selectivity_namespaces[
                population]:
            start_time = time.time()
            if rank == 0:
                logger.info('Reading from %s namespace for population %s...' %
                            (this_selectivity_namespace, population))
            gid_count = 0
            gathered_cell_attributes = defaultdict(list)
            gathered_component_attributes = defaultdict(list)
            u_distances_by_cell = list()
            v_distances_by_cell = list()
            u_distances_by_component = list()
            v_distances_by_component = list()
            rate_map_sum_by_module = defaultdict(
                lambda: np.zeros_like(arena_x_mesh))
            start_time = time.time()
            selectivity_attr_gen = NeuroH5CellAttrGen(
                selectivity_path,
                population,
                namespace=this_selectivity_namespace,
                comm=comm,
                io_size=io_size,
                cache_size=cache_size)
            for iter_count, (
                    gid,
                    selectivity_attr_dict) in enumerate(selectivity_attr_gen):
                if gid is not None:
                    gid_count += 1
                    this_selectivity_type = selectivity_attr_dict[
                        'Selectivity Type'][0]
                    this_selectivity_type_name = selectivity_type_names[
                        this_selectivity_type]
                    input_cell_config = \
                        get_input_cell_config(selectivity_type=this_selectivity_type,
                                               selectivity_type_names=selectivity_type_names,
                                               selectivity_attr_dict=selectivity_attr_dict)
                    rate_map = input_cell_config.get_rate_map(x=arena_x_mesh,
                                                              y=arena_y_mesh)
                    u_distances_by_cell.append(u_distances_by_gid[gid])
                    v_distances_by_cell.append(v_distances_by_gid[gid])
                    this_cell_attrs, component_count, this_component_attrs = input_cell_config.gather_attributes(
                    )
                    for attr_name, attr_val in viewitems(this_cell_attrs):
                        gathered_cell_attributes[attr_name].append(attr_val)
                    gathered_cell_attributes['Mean Rate'].append(
                        np.mean(rate_map))
                    if component_count > 0:
                        u_distances_by_component.extend(
                            [u_distances_by_gid[gid]] * component_count)
                        v_distances_by_component.extend(
                            [v_distances_by_gid[gid]] * component_count)
                        for attr_name, attr_val in viewitems(
                                this_component_attrs):
                            gathered_component_attributes[attr_name].extend(
                                attr_val)
                    this_module_id = this_cell_attrs['Module ID']
                    if debug and rank == 0:
                        fig_title = '%s %s cell %i' % (
                            population, this_selectivity_type_name, gid)
                        if save_fig is not None:
                            fig_options.saveFig = '%s %s' % (save_fig,
                                                             fig_title)
                        plot_2D_rate_map(
                            x=arena_x_mesh,
                            y=arena_y_mesh,
                            rate_map=rate_map,
                            peak_rate=env.stimulus_config['Peak Rate']
                            [population][this_selectivity_type],
                            title='%s\nModule: %i' %
                            (fig_title, this_module_id),
                            **fig_options())
                    rate_map_sum_by_module[this_module_id] = np.add(
                        rate_map, rate_map_sum_by_module[this_module_id])
                if debug and iter_count >= 10:
                    break

            cell_count_hist, _, _ = np.histogram2d(u_distances_by_cell,
                                                   v_distances_by_cell,
                                                   bins=[u_edges, v_edges])
            component_count_hist, _, _ = np.histogram2d(
                u_distances_by_component,
                v_distances_by_component,
                bins=[u_edges, v_edges])

            if debug:
                context.update(locals())

            gathered_cell_attr_hist = dict()
            gathered_component_attr_hist = dict()
            for key in gathered_cell_attributes:
                gathered_cell_attr_hist[key], _, _ = \
                    np.histogram2d(u_distances_by_cell, v_distances_by_cell, bins=[u_edges, v_edges],
                                   weights=gathered_cell_attributes[key])
            for key in gathered_component_attributes:
                gathered_component_attr_hist[key], _, _ = \
                    np.histogram2d(u_distances_by_component, v_distances_by_component, bins=[u_edges, v_edges],
                                   weights=gathered_component_attributes[key])
            gid_count = comm.gather(gid_count, root=0)
            cell_count_hist = comm.gather(cell_count_hist, root=0)
            component_count_hist = comm.gather(component_count_hist, root=0)
            gathered_cell_attr_hist = comm.gather(gathered_cell_attr_hist,
                                                  root=0)
            gathered_component_attr_hist = comm.gather(
                gathered_component_attr_hist, root=0)
            rate_map_sum_by_module = dict(rate_map_sum_by_module)
            rate_map_sum_by_module = comm.gather(rate_map_sum_by_module,
                                                 root=0)

            if rank == 0:
                gid_count = sum(gid_count)
                cell_count_hist = np.sum(cell_count_hist, axis=0)
                component_count_hist = np.sum(component_count_hist, axis=0)
                merged_cell_attr_hist = defaultdict(
                    lambda: np.zeros_like(cell_count_hist))
                merged_component_attr_hist = defaultdict(
                    lambda: np.zeros_like(component_count_hist))
                for each_cell_attr_hist in gathered_cell_attr_hist:
                    for key in each_cell_attr_hist:
                        merged_cell_attr_hist[key] = np.add(
                            merged_cell_attr_hist[key],
                            each_cell_attr_hist[key])
                for each_component_attr_hist in gathered_component_attr_hist:
                    for key in each_component_attr_hist:
                        merged_component_attr_hist[key] = np.add(
                            merged_component_attr_hist[key],
                            each_component_attr_hist[key])
                merged_rate_map_sum_by_module = defaultdict(
                    lambda: np.zeros_like(arena_x_mesh))
                for each_rate_map_sum_by_module in rate_map_sum_by_module:
                    for this_module_id in each_rate_map_sum_by_module:
                        merged_rate_map_sum_by_module[this_module_id] = \
                            np.add(merged_rate_map_sum_by_module[this_module_id],
                                   each_rate_map_sum_by_module[this_module_id])

                logger.info('Processing %i %s %s cells took %.2f s' %
                            (gid_count, population, this_selectivity_type_name,
                             time.time() - start_time))

                if debug:
                    context.update(locals())

                for key in merged_cell_attr_hist:
                    fig_title = '%s %s cells %s distribution' % (
                        population, this_selectivity_type_name, key)
                    if save_fig is not None:
                        fig_options.saveFig = '%s %s' % (save_fig, fig_title)
                    if colormap is not None:
                        fig_options.colormap = colormap
                    title = '%s %s cells\n%s distribution' % (
                        population, this_selectivity_type_name, key)
                    fig = plot_2D_histogram(
                        merged_cell_attr_hist[key],
                        x_edges=u_edges,
                        y_edges=v_edges,
                        norm=cell_count_hist,
                        ylabel='Transverse position (um)',
                        xlabel='Septo-temporal position (um)',
                        title=title,
                        cbar_label='Mean value per bin',
                        cbar=True,
                        **fig_options())
                    close_figure(fig)

                for key in merged_component_attr_hist:
                    fig_title = '%s %s cells %s distribution' % (
                        population, this_selectivity_type_name, key)
                    if save_fig is not None:
                        fig_options.saveFig = '%s %s' % (save_fig, fig_title)
                    title = '%s %s cells\n%s distribution' % (
                        population, this_selectivity_type_name, key)
                    fig = plot_2D_histogram(
                        merged_component_attr_hist[key],
                        x_edges=u_edges,
                        y_edges=v_edges,
                        norm=component_count_hist,
                        ylabel='Transverse position (um)',
                        xlabel='Septo-temporal position (um)',
                        title=title,
                        cbar_label='Mean value per bin',
                        cbar=True,
                        **fig_options())
                    close_figure(fig)

                for this_module_id in merged_rate_map_sum_by_module:
                    fig_title = '%s %s Module %i summed rate maps' % \
                                (population, this_selectivity_type_name, this_module_id)
                    if save_fig is not None:
                        fig_options.saveFig = '%s %s' % (save_fig, fig_title)
                    fig = plot_2D_rate_map(
                        x=arena_x_mesh,
                        y=arena_y_mesh,
                        rate_map=merged_rate_map_sum_by_module[this_module_id],
                        title='%s %s summed rate maps\nModule %i' %
                        (population, this_selectivity_type_name,
                         this_module_id),
                        **fig_options())
                    close_figure(fig)

    if is_interactive and rank == 0:
        context.update(locals())
コード例 #6
0
def main(forest_path, connectivity_namespace, coords_path, coords_namespace, io_size, chunk_size, value_chunk_size,
         cache_size):
    """

    :param forest_path:
    :param connectivity_namespace:
    :param coords_path:
    :param coords_namespace:
    :param io_size:
    :param chunk_size:
    :param value_chunk_size:
    :param cache_size:
    """
    comm = MPI.COMM_WORLD
    rank = comm.rank  # The process ID (integer 0-3 for 4-process run)

    if io_size == -1:
        io_size = comm.size
    if rank == 0:
        print('%i ranks have been allocated' % comm.size)
    sys.stdout.flush()

    start_time = time.time()

    soma_coords = {}
    source_populations = list(read_population_ranges(MPI._addressof(comm), coords_path).keys())
    for population in source_populations:
        soma_coords[population] = bcast_cell_attributes(MPI._addressof(comm), 0, coords_path, population,
                                                            namespace=coords_namespace)

    for population in soma_coords:
        for cell in viewvalues(soma_coords[population]):
            cell['u_index'] = get_array_index(u, cell['U Coordinate'][0])
            cell['v_index'] = get_array_index(v, cell['V Coordinate'][0])

    target = 'GC'

    layer_set, swc_type_set, syn_type_set = set(), set(), set()
    for source in layers[target]:
        layer_set.update(layers[target][source])
        swc_type_set.update(swc_types[target][source])
        syn_type_set.update(syn_types[target][source])

    count = 0
    for target_gid, attributes_dict in NeuroH5CellAttrGen(MPI._addressof(comm), forest_path, target, io_size=io_size,
                                                        cache_size=cache_size, namespace='Synapse_Attributes'):
        last_time = time.time()
        connection_dict = {}
        p_dict = {}
        source_gid_dict = {}
        if target_gid is None:
            print('Rank %i target gid is None' % rank)
        else:
            print('Rank %i received attributes for target: %s, gid: %i' % (rank, target, target_gid))
            synapse_dict = attributes_dict['Synapse_Attributes']
            connection_dict[target_gid] = {}
            local_np_random.seed(target_gid + connectivity_seed_offset)
            connection_dict[target_gid]['source_gid'] = np.array([], dtype='uint32')
            connection_dict[target_gid]['syn_id'] = np.array([], dtype='uint32')

            for layer in layer_set:
                for swc_type in swc_type_set:
                    for syn_type in syn_type_set:
                        sources, this_proportions = filter_sources(target, layer, swc_type, syn_type)
                        if sources:
                            if rank == 0 and count == 0:
                                source_list_str = '[' + ', '.join(['%s' % xi for xi in sources]) + ']'
                                print('Connections to target: %s in layer: %i ' \
                                    '(swc_type: %i, syn_type: %i): %s' % \
                                    (target, layer, swc_type, syn_type, source_list_str))
                            p, source_gid = np.array([]), np.array([])
                            for source, this_proportion in zip(sources, this_proportions):
                                if source not in source_gid_dict:
                                    this_p, this_source_gid = p_connect.get_p(target, source, target_gid, soma_coords,
                                                                              distance_U, distance_V)
                                    source_gid_dict[source] = this_source_gid
                                    p_dict[source] = this_p
                                else:
                                    this_source_gid = source_gid_dict[source]
                                    this_p = p_dict[source]
                                p = np.append(p, this_p * this_proportion)
                                source_gid = np.append(source_gid, this_source_gid)
                            syn_indexes = filter_synapses(synapse_dict, layer, swc_type, syn_type)
                            connection_dict[target_gid]['syn_id'] = \
                                np.append(connection_dict[target_gid]['syn_id'],
                                          synapse_dict['syn_id'][syn_indexes]).astype('uint32', copy=False)
                            this_source_gid = local_np_random.choice(source_gid, len(syn_indexes), p=p)
                            connection_dict[target_gid]['source_gid'] = \
                                np.append(connection_dict[target_gid]['source_gid'],
                                          this_source_gid).astype('uint32', copy=False)
            count += 1
            print('Rank %i took %i s to compute connectivity for target: %s, gid: %i' % (rank, time.time() - last_time,
                                                                                         target, target_gid))
            sys.stdout.flush()
        last_time = time.time()
        append_cell_attributes(MPI._addressof(comm), forest_path, target, connection_dict,
                               namespace=connectivity_namespace, io_size=io_size, chunk_size=chunk_size,
                               value_chunk_size=value_chunk_size)
        if rank == 0:
            print('Appending connectivity attributes for target: %s took %i s' % (target, time.time() - last_time))
        sys.stdout.flush()
        del connection_dict
        del p_dict
        del source_gid_dict
        gc.collect()

    global_count = comm.gather(count, root=0)
    if rank == 0:
        print('%i ranks took took %i s to compute connectivity for %i cells' % (comm.size, time.time() - start_time,
                                                                                  np.sum(global_count)))
コード例 #7
0
def main(config, config_prefix, selectivity_path, selectivity_namespace, coords_path, distances_namespace,
         arena_id, populations, n_trials, io_size, chunk_size,
         value_chunk_size, cache_size, write_size, output_path, spikes_namespace, spike_train_attr_name, phase_mod,
         gather, debug, plot, show_fig, save_fig, save_fig_dir, font_size, fig_format,
         verbose, dry_run):
    """

    :param config: str (.yaml file name)
    :param config_prefix: str (path to dir)
    :param selectivity_path: str (path to file)
    :param selectivity_namespace: str
    :param arena_id: str
    :param populations: str
    :param n_trials: int
    :param io_size: int
    :param chunk_size: int
    :param value_chunk_size: int
    :param cache_size: int
    :param write_size: int
    :param output_path: str (path to file)
    :param spikes_namespace: str
    :param spike_train_attr_name: str
    :param gather: bool
    :param debug: bool
    :param plot: bool
    :param show_fig: bool
    :param save_fig: str (base file name)
    :param save_fig_dir:  str (path to dir)
    :param font_size: float
    :param fig_format: str
    :param verbose: bool
    :param dry_run: bool
    """
    comm = MPI.COMM_WORLD
    rank = comm.rank

    config_logging(verbose)

    if phase_mod and (coords_path is None):
        raise RuntimeError("generate_input_spike_trains: when phase_mod is True, coords_path is required")

    
    env = Env(comm=comm, config_file=config, config_prefix=config_prefix, template_paths=None)
    if io_size == -1:
        io_size = comm.size
    if rank == 0:
        logger.info('%i ranks have been allocated' % comm.size)

    if save_fig is not None:
        plot = True

    if plot:
        from dentate.plot import default_fig_options

        fig_options = copy.copy(default_fig_options)
        fig_options.saveFigDir = save_fig_dir
        fig_options.fontSize = font_size
        fig_options.figFormat = fig_format
        fig_options.showFig = show_fig

    population_ranges = read_population_ranges(selectivity_path, comm)[0]

    if len(populations) == 0:
        populations = sorted(population_ranges.keys())

    soma_positions_dict = None
    if coords_path is not None:
        soma_positions_dict = {}
        for population in populations:
            reference_u_arc_distance_bounds = None
            if rank == 0:
                with h5py.File(coords_path, 'r') as coords_f:
                    reference_u_arc_distance_bounds = \
                     coords_f['Populations'][population][distances_namespace].attrs['Reference U Min'], \
                     coords_f['Populations'][population][distances_namespace].attrs['Reference U Max']
            comm.barrier()
            reference_u_arc_distance_bounds = comm.bcast(reference_u_arc_distance_bounds, root=0)
            distances = bcast_cell_attributes(coords_path, population, namespace=distances_namespace, root=0)
            abs_positions = { k: v['U Distance'][0] - reference_u_arc_distance_bounds[0] for (k,v) in distances }
            soma_positions_dict[population] = abs_positions
            del distances
        
    if arena_id not in env.stimulus_config['Arena']:
        raise RuntimeError('Arena with ID: %s not specified by configuration at file path: %s' %
                           (arena_id, config_prefix + '/' + config))
    arena = env.stimulus_config['Arena'][arena_id]

    valid_selectivity_namespaces = dict()
    if rank == 0:
        for population in populations:
            if population not in population_ranges:
                raise RuntimeError('generate_input_spike_trains: specified population: %s not found in '
                                   'provided selectivity_path: %s' % (population, selectivity_path))
            if population not in env.stimulus_config['Selectivity Type Probabilities']:
                raise RuntimeError('generate_input_spike_trains: selectivity type not specified for '
                                   'population: %s' % population)
            valid_selectivity_namespaces[population] = []
            with h5py.File(selectivity_path, 'r') as selectivity_f:
                for this_namespace in selectivity_f['Populations'][population]:
                    if 'Selectivity %s' % arena_id in this_namespace:
                        valid_selectivity_namespaces[population].append(this_namespace)
                if len(valid_selectivity_namespaces[population]) == 0:
                    raise RuntimeError('generate_input_spike_trains: no selectivity data in arena: %s found '
                                       'for specified population: %s in provided selectivity_path: %s' %
                                       (arena_id, population, selectivity_path))
    comm.barrier()

                
    valid_selectivity_namespaces = comm.bcast(valid_selectivity_namespaces, root=0)
    selectivity_type_names = dict((val, key) for (key, val) in viewitems(env.selectivity_types))

    equilibrate = get_equilibration(env)

    for trajectory_id in sorted(arena.trajectories.keys()):
        trajectory = arena.trajectories[trajectory_id]
        t, x, y, d = None, None, None, None
        if rank == 0:
            t, x, y, d = generate_linear_trajectory(trajectory,
                                                    temporal_resolution=env.stimulus_config['Temporal Resolution'],
                                                    equilibration_duration=env.stimulus_config['Equilibration Duration'])
    
        t = comm.bcast(t, root=0)
        x = comm.bcast(x, root=0)
        y = comm.bcast(y, root=0)
        d = comm.bcast(d, root=0)
        trajectory = t, x, y, d

        trajectory_namespace = 'Trajectory %s %s' % (arena_id, trajectory_id)
        output_namespace = '%s %s %s' % (spikes_namespace, arena_id, trajectory_id)

        if not dry_run and rank == 0:
            if output_path is None:
                raise RuntimeError('generate_input_spike_trains: missing output_path')
            if not os.path.isfile(output_path):
                with h5py.File(output_path, 'w') as output_file:
                    input_file = h5py.File(selectivity_path, 'r')
                    input_file.copy('/H5Types', output_file)
                    input_file.close()
            with h5py.File(output_path, 'a') as f:
                if trajectory_namespace not in f:
                    logger.info('Appending %s datasets to file at path: %s' % (trajectory_namespace, output_path))
                group = f.create_group(trajectory_namespace)
                for key, value in zip(['t', 'x', 'y', 'd'], [t, x, y, d]):
                    dataset = group.create_dataset(key, data=value, dtype='float32')
                else:
                    loaded_t = f[trajectory_namespace]['t'][:]
                    if len(t) != len(loaded_t):
                        raise RuntimeError('generate_input_spike_trains: file at path: %s already contains the '
                                           'namespace: %s, but the dataset sizes are inconsistent with the provided input'
                                           'configuration' % (output_path, trajectory_namespace))
        comm.barrier()

        if rank == 0:
            context.update(locals())

        spike_hist_sum_dict = {}
        spike_hist_resolution = 1000

        write_every = max(1, int(math.floor(write_size / comm.size)))
        for population in populations:

            req = comm.Ibarrier()
            gc.collect()
            req.wait()

            pop_start = int(population_ranges[population][0])
            num_cells = int(population_ranges[population][1])

            phase_mod_config_dict = None
            if phase_mod:
                phase_mod_config_dict = oscillation_phase_mod_config(env, population, soma_positions_dict[population])

            this_spike_hist_sum = defaultdict(lambda: np.zeros(spike_hist_resolution))

            process_time = dict()
            for this_selectivity_namespace in sorted(valid_selectivity_namespaces[population]):

                if rank == 0:
                    logger.info('Generating input source spike trains for population %s [%s]...' % (population, this_selectivity_namespace))
            
                start_time = time.time()
                req = comm.Ibarrier()
                selectivity_attr_gen = NeuroH5CellAttrGen(selectivity_path, population,
                                                          namespace=this_selectivity_namespace,
                                                          comm=comm, io_size=io_size,
                                                          cache_size=cache_size)
                req.wait()
                spikes_attr_dict = dict()
                gid_count = 0
                for iter_count, (gid, selectivity_attr_dict) in enumerate(selectivity_attr_gen):
                    if gid is not None:
                        if rank == 0:
                            logger.info(f'Rank {rank}: generating spike trains for gid {gid}...')
                        context.update(locals())
                        phase_mod_config = None
                        if phase_mod_config_dict is not None:
                            phase_mod_config = phase_mod_config_dict[gid]
                        spikes_attr_dict[gid] = \
                            generate_input_spike_trains(env, population, selectivity_type_names, trajectory,
                                                        gid, selectivity_attr_dict, n_trials=n_trials,
                                                        spike_train_attr_name=spike_train_attr_name,
                                                        spike_hist_resolution=spike_hist_resolution,
                                                        equilibrate=equilibrate,
                                                        phase_mod_config=phase_mod_config,
                                                        spike_hist_sum=this_spike_hist_sum,
                                                        return_selectivity_features=False,
                                                        debug= (debug_callback, context) if debug else False)
                        gid_count += 1

                    if (iter_count > 0 and iter_count % write_every == 0) or (debug and iter_count == 10):
                        req = comm.Ibarrier()
                        total_gid_count = comm.reduce(gid_count, root=0, op=MPI.SUM)
                        if rank == 0:
                            logger.info('generated spike trains for %i %s cells' %
                                        (total_gid_count, population))
                        req.wait()
                    
                        req = comm.Ibarrier()
                        if not dry_run:
                            append_cell_attributes(output_path, population, spikes_attr_dict,
                                                   namespace=output_namespace, comm=comm, io_size=io_size,
                                                   chunk_size=chunk_size, value_chunk_size=value_chunk_size)
                        req.wait()
                        req = comm.Ibarrier()
                        del spikes_attr_dict
                        spikes_attr_dict = dict()
                        gc.collect()
                        req.wait()
                        if debug and iter_count == 10:
                            break
            
            if not dry_run:
                req = comm.Ibarrier()
                append_cell_attributes(output_path, population, spikes_attr_dict,
                                       namespace=output_namespace, comm=comm, io_size=io_size,
                                       chunk_size=chunk_size, value_chunk_size=value_chunk_size)
                req.wait()
                req = comm.Ibarrier()
                del spikes_attr_dict
                spikes_attr_dict = dict()
                req.wait()
            process_time = time.time() - start_time
            
            req = comm.Ibarrier()
            total_gid_count = comm.reduce(gid_count, root=0, op=MPI.SUM)
            if rank == 0:
                logger.info('generated spike trains for %i %s cells in %.2f s' %
                            (total_gid_count, population, process_time))
            req.wait()

            if gather:
                spike_hist_sum_dict[population] = this_spike_hist_sum


        if gather:
            this_spike_hist_sum = dict([(key, dict(val.items())) for key, val in viewitems(spike_hist_sum_dict)])
            spike_hist_sum = comm.gather(this_spike_hist_sum, root=0)

            if rank == 0:
                merged_spike_hist_sum = defaultdict(lambda: defaultdict(lambda: np.zeros(spike_hist_resolution)))
                for each_spike_hist_sum in spike_hist_sum:
                    for population in each_spike_hist_sum:
                        for selectivity_type_name in each_spike_hist_sum[population]:
                            merged_spike_hist_sum[population][selectivity_type_name] = \
                                np.add(merged_spike_hist_sum[population][selectivity_type_name],
                                       each_spike_hist_sum[population][selectivity_type_name])

                if plot:
                
                    if save_fig is not None:
                        fig_options.saveFig = save_fig
                    
                        plot_summed_spike_psth(t, trajectory_id, selectivity_type_name, merged_spike_hist_sum,
                                               spike_hist_resolution, fig_options)


    if is_interactive and rank == 0:
        context.update(locals())
コード例 #8
0
def main(config, template_path, types_path, forest_path, connectivity_path,
         connectivity_namespace, coords_path, coords_namespace, io_size,
         chunk_size, value_chunk_size, cache_size, write_size, verbose,
         dry_run):

    utils.config_logging(verbose)
    logger = utils.get_script_logger(os.path.basename(__file__))

    comm = MPI.COMM_WORLD
    rank = comm.rank

    env = Env(comm=comm, config_file=config, template_paths=template_path)
    configure_hoc_env(env)

    gj_config = env.gapjunctions
    gj_seed = int(env.model_config['Random Seeds']['Gap Junctions'])

    soma_coords = {}

    if (not dry_run) and (rank == 0):
        if not os.path.isfile(connectivity_path):
            input_file = h5py.File(types_path, 'r')
            output_file = h5py.File(connectivity_path, 'w')
            input_file.copy('/H5Types', output_file)
            input_file.close()
            output_file.close()
    comm.barrier()

    population_ranges = read_population_ranges(coords_path)[0]
    populations = sorted(population_ranges.keys())

    if rank == 0:
        logger.info('Reading population coordinates...')

    soma_distances = {}
    for population in populations:
        coords_iter = bcast_cell_attributes(coords_path,
                                            population,
                                            0,
                                            namespace=coords_namespace)

        soma_coords[population] = {
            k:
            (v['X Coordinate'][0], v['Y Coordinate'][0], v['Z Coordinate'][0])
            for (k, v) in coords_iter
        }

        gc.collect()

    generate_gj_connections(env,
                            forest_path,
                            soma_coords,
                            gj_config,
                            gj_seed,
                            connectivity_namespace,
                            connectivity_path,
                            io_size,
                            chunk_size,
                            value_chunk_size,
                            cache_size,
                            dry_run=dry_run)

    MPI.Finalize()
コード例 #9
0
def main(stimulus_path, stimulus_namespace, weights_path,
         initial_weights_namespace, structured_weights_namespace,
         connections_path, io_size, chunk_size, value_chunk_size, cache_size,
         trajectory_id, seed_offset, target_sparsity, debug):
    """

    :param stimulus_path: str
    :param stimulus_namespace: str
    :param weights_path: str
    :param initial_weights_namespace: str
    :param structured_weights_namespace: str
    :param connections_path: str
    :param io_size: int
    :param chunk_size: int
    :param value_chunk_size: int
    :param cache_size: int
    :param trajectory_id: int
    :param seed_offset: int
    :param target_sparsity: float
    :param debug:  bool
    """
    # make sure random seeds are not being reused for various types of stochastic sampling
    seed_offset *= 2e6

    comm = MPI.COMM_WORLD
    rank = comm.rank

    if io_size == -1:
        io_size = comm.size
    if rank == 0:
        print '%s: %i ranks have been allocated' % (script_name, comm.size)
    sys.stdout.flush()

    stimulus_namespace += ' ' + str(trajectory_id)

    stimulus_attrs = {}
    source_population_list = ['MPP', 'LPP']
    for source in source_population_list:
        stimulus_attr_gen = bcast_cell_attributes(comm,
                                                  0,
                                                  stimulus_path,
                                                  source,
                                                  namespace=stimulus_namespace)
        stimulus_attrs[source] = {
            gid: attr_dict
            for gid, attr_dict in stimulus_attr_gen
        }

    trajectory_namespace = 'Trajectory %s' % str(trajectory_id)

    arena_dimension = 100.  # minimum distance from origin to boundary (cm)
    default_run_vel = 30.  # cm/s
    spatial_resolution = 1.  # cm

    with h5py.File(stimulus_path, 'a', driver='mpio', comm=comm) as f:
        if trajectory_namespace not in f:
            print 'Rank: %i; Creating %s datasets' % (rank,
                                                      trajectory_namespace)
            group = f.create_group(trajectory_namespace)
            t, x, y, d = stimulus.generate_trajectory(
                arena_dimension=arena_dimension,
                velocity=default_run_vel,
                spatial_resolution=spatial_resolution)
            for key, value in zip(['x', 'y', 'd', 't'], [x, y, d, t]):
                dataset = group.create_dataset(key, (value.shape[0], ),
                                               dtype='float32')
                with dataset.collective:
                    dataset[:] = value.astype('float32', copy=False)
        else:
            print 'Rank: %i; Reading %s datasets' % (rank,
                                                     trajectory_namespace)
            group = f[trajectory_namespace]
            dataset = group['x']
            with dataset.collective:
                x = dataset[:]
            dataset = group['y']
            with dataset.collective:
                y = dataset[:]
            dataset = group['d']
            with dataset.collective:
                d = dataset[:]
            dataset = group['t']
            with dataset.collective:
                t = dataset[:]

    plasticity_window_dur = 4.  # s
    plasticity_kernel_sigma = plasticity_window_dur * default_run_vel / 3. / np.sqrt(
        2.)  # cm
    plasticity_kernel = lambda d, d_offset: np.exp(-(
        (d - d_offset) / plasticity_kernel_sigma)**2.)
    plasticity_kernel = np.vectorize(plasticity_kernel, excluded=[1])
    max_plasticity_kernel_area = np.sum(plasticity_kernel(
        d,
        np.max(d) / 2.)) * spatial_resolution

    target = 'GC'

    pop_ranges, pop_size = read_population_ranges(comm, stimulus_path)
    target_gid_offset = pop_ranges[target][0]

    count = 0
    structured_count = 0
    start_time = time.time()

    gid_index_map = get_cell_attributes_gid_index_map(
        comm, weights_path, target, initial_weights_namespace)

    connection_gen_list = []
    for source in source_population_list:
        connection_gen_list.append(
            NeuroH5ProjectionGen(comm,
                                 connections_path,
                                 source,
                                 target,
                                 io_size=io_size,
                                 cache_size=cache_size,
                                 namespaces=['Synapses']))

    maxiter = 100 if debug else None
    for itercount, attr_gen_package in enumerate(
            izip_longest(*connection_gen_list)):
        local_time = time.time()
        syn_weight_map = {}
        source_syn_map = defaultdict(list)
        syn_peak_index_map = {}
        structured_weights_dict = {}
        modulated_inputs = 0
        source_gid_array = None
        conn_attr_dict = None
        target_gid = attr_gen_package[0][0]
        if not all([
                attr_gen_items[0] == target_gid
                for attr_gen_items in attr_gen_package
        ]):
            raise Exception(
                'Rank: %i; target: %s; target_gid not matched across multiple attribute generators: %s'
                % (rank, target,
                   [attr_gen_items[0] for attr_gen_items in attr_gen_package]))
            sys.stdout.flush()
        # else:
        #    print 'Rank: %i; received target: %s; target_gid: %s' % (rank, target, str(target_gid))
        initial_weights_dict = get_cell_attributes_by_gid(
            target_gid, comm, weights_path, gid_index_map, target,
            initial_weights_namespace, target_gid_offset)
        if target_gid is not None:
            if initial_weights_dict is None:
                raise Exception(
                    'Rank: %i; target: %s; target_gid: %s; get_cell_attributes_by_gid didn\'t work'
                    % (rank, target, str(target_gid)))
            local_random.seed(int(target_gid + seed_offset))
            syn_weight_map = dict(
                zip(initial_weights_dict['syn_id'],
                    initial_weights_dict['weight']))
            for this_target_gid, (source_gid_array,
                                  conn_attr_dict) in attr_gen_package:
                for i in xrange(len(source_gid_array)):
                    this_source_gid = source_gid_array[i]
                    this_syn_id = conn_attr_dict['Synapses'][0][i]
                    source_syn_map[this_source_gid].append(this_syn_id)
            if local_random.uniform() <= target_sparsity:
                modify_weights = True
                peak_loc = local_random.choice(d)
                this_plasticity_kernel = plasticity_kernel(d, peak_loc)
            else:
                modify_weights = False
            for source in stimulus_attrs:
                peak_rate = peak_rate_dict[source]
                for this_source_gid in stimulus_attrs[source]:
                    peak_index = stimulus_attrs[source][this_source_gid][
                        'peak_index'][0]
                    if modify_weights:
                        norm_rate = stimulus_attrs[source][this_source_gid][
                            'rate'] / peak_rate
                        this_plasticity_signal = np.sum(np.multiply(norm_rate, this_plasticity_kernel)) * \
                                                 spatial_resolution / max_plasticity_kernel_area
                        delta_weight = 2. * this_plasticity_signal
                    else:
                        delta_weight = 0.
                    for this_syn_id in source_syn_map[this_source_gid]:
                        syn_peak_index_map[this_syn_id] = peak_index
                        if delta_weight >= 0.1:
                            modulated_inputs += 1
                        syn_weight_map[this_syn_id] += delta_weight
            structured_weights_dict[target_gid - target_gid_offset] = \
                {'syn_id': np.array(syn_peak_index_map.keys()).astype('uint32', copy=False),
                 'weight': np.array([syn_weight_map[syn_id] for syn_id in syn_peak_index_map]).astype('float32',
                                                                                                      copy=False),
                 'peak_index': np.array(syn_peak_index_map.values()).astype('uint32', copy=False),
                 'structured': np.array([int(modify_weights)], dtype='uint32')}
            if modify_weights:
                print 'Rank %i; target: %s; gid %i; generated structured weights for %i/%i inputs in %.2f s' % \
                      (rank, target, target_gid, modulated_inputs, len(syn_weight_map), time.time() - local_time)
                structured_count += 1
            else:
                print 'Rank %i; target: %s; gid %i; calculated input peak_locs for %i inputs in %.2f s (not selected ' \
                      'for structured weights)' % (rank, target, target_gid, len(syn_weight_map),
                                                   time.time() - local_time)
            count += 1
        else:
            print 'Rank: %i received target_gid as None' % rank
        if not debug:
            append_cell_attributes(comm,
                                   weights_path,
                                   target,
                                   structured_weights_dict,
                                   namespace=structured_weights_namespace,
                                   io_size=io_size,
                                   chunk_size=chunk_size,
                                   value_chunk_size=value_chunk_size)
        sys.stdout.flush()
        del syn_weight_map
        del source_syn_map
        del syn_peak_index_map
        del structured_weights_dict
        del modulated_inputs
        del source_gid_array
        del conn_attr_dict
        gc.collect()
        if debug:
            comm.barrier()
            if maxiter is not None and itercount > maxiter:
                break
    if debug:
        print 'Rank: %i exited the loop' % rank
    global_count = comm.gather(count, root=0)
    global_structured_count = comm.gather(structured_count, root=0)
    if rank == 0:
        print 'target: %s; %i ranks processed %i cells (%i assigned structured weights) in %.2f s' % \
              (target, comm.size, np.sum(global_count), np.sum(global_structured_count), time.time() - start_time)
コード例 #10
0
def main(features_path, connectivity_path, connectivity_namespace, io_size,
         chunk_size, value_chunk_size, cache_size, trajectory_id, debug):
    """

    :param features_path:
    :param connectivity_path:
    :param connectivity_namespace:
    :param io_size:
    :param chunk_size:
    :param value_chunk_size:
    :param cache_size:
    :param trajectory_id:
    :param debug:
    """
    comm = MPI.COMM_WORLD
    rank = comm.rank

    if io_size == -1:
        io_size = comm.size
    if rank == 0:
        print('%i ranks have been allocated' % comm.size)
    sys.stdout.flush()

    population_range_dict = read_population_ranges(comm, features_path)

    features_dict = {}
    for population in ['MPP', 'LPP']:
        features_dict[population] = bcast_cell_attributes(
            comm,
            0,
            features_path,
            population,
            namespace='Feature Selectivity')

    arena_dimension = 100.  # minimum distance from origin to boundary (cm)

    run_vel = 30.  # cm/s
    spatial_resolution = 1.  # cm
    x = np.arange(-arena_dimension, arena_dimension, spatial_resolution)
    y = np.arange(-arena_dimension, arena_dimension, spatial_resolution)
    distance = np.insert(
        np.cumsum(np.sqrt(np.sum(
            [np.diff(x)**2., np.diff(y)**2.], axis=0))), 0, 0.)
    interp_distance = np.arange(distance[0], distance[-1], spatial_resolution)
    t = old_div(interp_distance, run_vel * 1000.)  # ms
    interp_x = np.interp(interp_distance, distance, x)
    interp_y = np.interp(interp_distance, distance, y)

    with h5py.File(features_path, 'a', driver='mpio', comm=comm) as f:
        if 'Trajectories' not in f:
            f.create_group('Trajectories')
        if str(trajectory_id) not in f['Trajectories']:
            f['Trajectories'].create_group(str(trajectory_id))
            f['Trajectories'][str(trajectory_id)].create_dataset(
                'x', dtype='float32', data=interp_x)
            f['Trajectories'][str(trajectory_id)].create_dataset(
                'y', dtype='float32', data=interp_y)
            f['Trajectories'][str(trajectory_id)].create_dataset(
                'd', dtype='float32', data=interp_distance)
            f['Trajectories'][str(trajectory_id)].create_dataset(
                't', dtype='float32', data=t)
        x = f['Trajectories'][str(trajectory_id)]['x'][:]
        y = f['Trajectories'][str(trajectory_id)]['y'][:]
        d = f['Trajectories'][str(trajectory_id)]['d'][:]

    prediction_namespace = 'Response Prediction ' + str(trajectory_id)

    target_population = 'GC'
    count = 0
    start_time = time.time()
    attr_gen = NeuroH5CellAttrGen(comm,
                                  connectivity_path,
                                  target_population,
                                  io_size=io_size,
                                  cache_size=cache_size,
                                  namespace=connectivity_namespace)
    if debug:
        attr_gen_wrapper = (next(attr_gen) for i in range(2))
    else:
        attr_gen_wrapper = attr_gen
    for gid, connectivity_dict in attr_gen_wrapper:
        local_time = time.time()
        source_gid_counts = {}
        response_dict = {}
        response = np.zeros_like(d, dtype='float32')
        if gid is not None:
            for population in ['MPP', 'LPP']:
                indexes = np.where(
                    (connectivity_dict[connectivity_namespace]['source_gid'] >=
                     population_range_dict[population][0])
                    & (connectivity_dict[connectivity_namespace]['source_gid']
                       < population_range_dict[population][0] +
                       population_range_dict[population][1]))[0]
                source_gid_counts[population] = \
                    Counter(connectivity_dict[connectivity_namespace]['source_gid'][indexes])
            for population in ['MPP', 'LPP']:
                for source_gid in (
                        source_gid
                        for source_gid in source_gid_counts[population]
                        if source_gid in features_dict[population]):
                    this_feature_dict = features_dict[population][source_gid]
                    selectivity_type = this_feature_dict['Selectivity Type'][0]
                    contact_count = source_gid_counts[population][source_gid]
                    if selectivity_type == selectivity_grid:
                        ori_offset = this_feature_dict['Grid Orientation'][0]
                        grid_spacing = this_feature_dict['Grid Spacing'][0]
                        x_offset = this_feature_dict['X Offset'][0]
                        y_offset = this_feature_dict['Y Offset'][0]
                        rate = np.vectorize(
                            grid_rate(grid_spacing, ori_offset, x_offset,
                                      y_offset))
                    elif selectivity_type == selectivity_place_field:
                        field_width = this_feature_dict['Field Width'][0]
                        x_offset = this_feature_dict['X Offset'][0]
                        y_offset = this_feature_dict['Y Offset'][0]
                        rate = np.vectorize(
                            place_rate(field_width, x_offset, y_offset))
                    response = np.add(response,
                                      contact_count * rate(x, y),
                                      dtype='float32')
            response_dict[gid] = {'waveform': response}
            baseline = np.mean(response[np.where(
                response <= np.percentile(response, 10.))[0]])
            peak = np.mean(response[np.where(
                response >= np.percentile(response, 90.))[0]])
            modulation = 0. if peak <= 0.1 else old_div(
                (peak - baseline), peak)
            peak_index = np.where(response == np.max(response))[0][0]
            response_dict[gid]['modulation'] = np.array([modulation],
                                                        dtype='float32')
            response_dict[gid]['peak_index'] = np.array([peak_index],
                                                        dtype='uint32')
            print('Rank %i: took %.2f s to compute predicted response for %s gid %i' % \
                  (rank, time.time() - local_time, target_population, gid))
            count += 1
        if not debug:
            append_cell_attributes(comm,
                                   features_path,
                                   target_population,
                                   response_dict,
                                   namespace=prediction_namespace,
                                   io_size=io_size,
                                   chunk_size=chunk_size,
                                   value_chunk_size=value_chunk_size)
        sys.stdout.flush()
        del response
        del response_dict
        del source_gid_counts
        gc.collect()

    global_count = comm.gather(count, root=0)
    if rank == 0:
        print('%i ranks took %.2f s to compute selectivity parameters for %i %s cells' % \
              (comm.size, time.time() - start_time, np.sum(global_count), target_population))
コード例 #11
0
def main(connectivity_path, output_path, coords_path, distances_namespace,
         destination, bin_size, cache_size, verbose):
    """
    Measures vertex distribution with respect to septo-temporal distance

    :param connectivity_path:
    :param coords_path:
    :param distances_namespace: 
    :param destination: 
    :param source: 

    """

    utils.config_logging(verbose)
    logger = utils.get_script_logger(os.path.basename(__file__))

    comm = MPI.COMM_WORLD

    rank = comm.Get_rank()

    (population_ranges, _) = read_population_ranges(coords_path)

    destination_start = population_ranges[destination][0]
    destination_count = population_ranges[destination][1]

    if rank == 0:
        logger.info('reading %s distances...' % destination)
    destination_soma_distances = bcast_cell_attributes(
        coords_path,
        destination,
        namespace=distances_namespace,
        comm=comm,
        root=0)

    destination_soma_distance_U = {}
    destination_soma_distance_V = {}
    for k, v in destination_soma_distances:
        destination_soma_distance_U[k] = v['U Distance'][0]
        destination_soma_distance_V[k] = v['V Distance'][0]

    del (destination_soma_distances)

    sources = []
    for (src, dst) in read_projection_names(connectivity_path):
        if dst == destination:
            sources.append(src)

    source_soma_distances = {}
    for s in sources:
        if rank == 0:
            logger.info('reading %s distances...' % s)
        source_soma_distances[s] = bcast_cell_attributes(
            coords_path, s, namespace=distances_namespace, comm=comm, root=0)

    source_soma_distance_U = {}
    source_soma_distance_V = {}
    for s in sources:
        this_source_soma_distance_U = {}
        this_source_soma_distance_V = {}
        for k, v in source_soma_distances[s]:
            this_source_soma_distance_U[k] = v['U Distance'][0]
            this_source_soma_distance_V[k] = v['V Distance'][0]
        source_soma_distance_U[s] = this_source_soma_distance_U
        source_soma_distance_V[s] = this_source_soma_distance_V
    del (source_soma_distances)

    logger.info('reading connections %s -> %s...' %
                (str(sources), destination))
    gg = [
        NeuroH5ProjectionGen(connectivity_path,
                             source,
                             destination,
                             cache_size=cache_size,
                             comm=comm) for source in sources
    ]

    dist_bins = defaultdict(dict)
    dist_u_bins = defaultdict(dict)
    dist_v_bins = defaultdict(dict)

    for prj_gen_tuple in utils.zip_longest(*gg):
        destination_gid = prj_gen_tuple[0][0]
        if not all([
                prj_gen_elt[0] == destination_gid
                for prj_gen_elt in prj_gen_tuple
        ]):
            raise Exception(
                'destination %s: destination_gid %i not matched across multiple projection generators: %s'
                % (destination, destination_gid,
                   [prj_gen_elt[0] for prj_gen_elt in prj_gen_tuple]))

        if destination_gid is not None:
            logger.info('reading connections of gid %i' % destination_gid)
            for (source, (this_destination_gid,
                          rest)) in zip(sources, prj_gen_tuple):
                this_source_soma_distance_U = source_soma_distance_U[source]
                this_source_soma_distance_V = source_soma_distance_V[source]
                this_dist_bins = dist_bins[source]
                this_dist_u_bins = dist_u_bins[source]
                this_dist_v_bins = dist_v_bins[source]
                (source_indexes, attr_dict) = rest
                dst_U = destination_soma_distance_U[destination_gid]
                dst_V = destination_soma_distance_V[destination_gid]
                for source_gid in source_indexes:
                    dist_u = dst_U - this_source_soma_distance_U[source_gid]
                    dist_v = dst_V - this_source_soma_distance_V[source_gid]
                    dist = abs(dist_u) + abs(dist_v)

                    update_bins(this_dist_bins, bin_size, dist)
                    update_bins(this_dist_u_bins, bin_size, dist_u)
                    update_bins(this_dist_v_bins, bin_size, dist_v)
    comm.barrier()

    logger.info('merging distance dictionaries...')
    add_bins_op = MPI.Op.Create(add_bins, commute=True)
    for source in sources:
        dist_bins[source] = comm.reduce(dist_bins[source],
                                        op=add_bins_op,
                                        root=0)
        dist_u_bins[source] = comm.reduce(dist_u_bins[source],
                                          op=add_bins_op,
                                          root=0)
        dist_v_bins[source] = comm.reduce(dist_v_bins[source],
                                          op=add_bins_op,
                                          root=0)

    comm.barrier()

    if rank == 0:
        color = 1
    else:
        color = 0

    ## comm0 includes only rank 0
    comm0 = comm.Split(color, 0)

    if rank == 0:
        if output_path is None:
            output_path = connectivity_path
        logger.info('writing output to %s...' % output_path)

        #f = h5py.File(output_path, 'a', driver='mpio', comm=comm0)
        #if 'Nodes' in f:
        #    nodes_grp = f['Nodes']
        #else:
        #    nodes_grp = f.create_group('Nodes')
        #grp = nodes_grp.create_group('Connectivity Distance Histogram')
        #dst_grp = grp.create_group(destination)
        for source in sources:
            dist_histoCount, dist_bin_edges = finalize_bins(
                dist_bins[source], bin_size)
            dist_u_histoCount, dist_u_bin_edges = finalize_bins(
                dist_u_bins[source], bin_size)
            dist_v_histoCount, dist_v_bin_edges = finalize_bins(
                dist_v_bins[source], bin_size)
            np.savetxt('%s Distance U Bin Count.dat' % source,
                       dist_u_histoCount)
            np.savetxt('%s Distance U Bin Edges.dat' % source,
                       dist_u_bin_edges)
            np.savetxt('%s Distance V Bin Count.dat' % source,
                       dist_v_histoCount)
            np.savetxt('%s Distance V Bin Edges.dat' % source,
                       dist_v_bin_edges)
            np.savetxt('%s Distance Bin Count.dat' % source, dist_histoCount)
            np.savetxt('%s Distance Bin Edges.dat' % source, dist_bin_edges)
        #f.close()
    comm.barrier()
コード例 #12
0
from mpi4py import MPI
from neuroh5.io import read_population_ranges, bcast_cell_attributes

# import mkl

comm = MPI.COMM_WORLD
rank = comm.rank

if rank == 0:
    print('%i ranks have been allocated' % comm.size)
sys.stdout.flush()

prefix = os.getenv("WORK")
stimulus_path = '%s/Full_Scale_Control/DG_input_spike_trains_20190724_compressed.h5' % prefix
stimulus_namespace = 'Input Spikes A Diag'
sources = ['MPP', 'LPP']
cell_attrs = {}
for source in sources:
    if rank == 0:
        print("reading attributes for %s" % source)
        sys.stdout.flush()
    cell_attr_gen = bcast_cell_attributes(stimulus_path,
                                          source,
                                          namespace=stimulus_namespace,
                                          root=0,
                                          comm=comm,
                                          mask=set(["Spike Train"]))
    cell_attrs[source] = {gid: attr_dict for gid, attr_dict in cell_attr_gen}
if rank == 0:
    print(cell_attrs)
コード例 #13
0
ファイル: reindex_trees.py プロジェクト: soltesz-lab/dentate
def main(population, forest_path, output_path, index_path, types_path,
         index_namespace, coords_namespace, sample_count, io_size, chunk_size,
         value_chunk_size, verbose):
    """

    :param population: str
    :param forest_path: str (path)
    :param output_path: str (path)
    :param index_path: str (path)
    :param io_size: int
    :param chunk_size: int
    :param value_chunk_size: int
    :param verbose: bool
    """

    utils.config_logging(verbose)
    logger = utils.get_script_logger(os.path.basename(__file__))

    comm = MPI.COMM_WORLD
    rank = comm.rank

    if io_size == -1:
        io_size = comm.size
    if rank == 0:
        logger.info('%i ranks have been allocated' % comm.size)

    random.seed(13)

    if rank == 0:
        if not os.path.isfile(output_path):
            input_file = h5py.File(types_path, 'r')
            output_file = h5py.File(output_path, 'w')
            input_file.copy('/H5Types', output_file)
            input_file.close()
            output_file.close()
    comm.barrier()

    (forest_pop_ranges, _) = read_population_ranges(forest_path)
    (forest_population_start,
     forest_population_count) = forest_pop_ranges[population]

    (pop_ranges, _) = read_population_ranges(output_path)

    (population_start, population_count) = pop_ranges[population]

    if rank == 0:
        logger.info('reading new cell index map...')

    reindex_map1 = {}
    reindex_map_gen = bcast_cell_attributes(index_path,
                                            population,
                                            namespace=index_namespace,
                                            root=0,
                                            comm=comm)
    for gid, attr_dict in reindex_map_gen:
        reindex_map1[gid] = attr_dict['New Cell Index'][0]

    if rank == 0:
        logger.info('reading cell coordinates...')

    old_coords_dict = {}
    coords_map_gen = bcast_cell_attributes(index_path,
                                           population,
                                           namespace=coords_namespace,
                                           root=0,
                                           comm=comm)
    for gid, attr_dict in coords_map_gen:
        old_coords_dict[gid] = attr_dict

    gc.collect()
    if rank == 0:
        logger.info('sampling cell population reindex...')

    N = len(reindex_map1)
    if sample_count is None:
        sample_count = min(population_count, N)
    else:
        sample_count = min(sample_count, N)
    reindex_map = None
    if rank == 0:
        reindex_map = {}
        reindex_map = dict(
            random_subset(utils.viewitems(reindex_map1), sample_count))
    reindex_map = comm.bcast(reindex_map, root=0)

    if rank == 0:
        logger.info('computing new population index...')

    gid_map = {
        k: i + population_start
        for i, k in enumerate(sorted(reindex_map.keys()))
    }

    new_coords_dict = {}
    new_trees_dict = {}
    for gid, old_trees_dict in NeuroH5TreeGen(forest_path,
                                              population,
                                              io_size=io_size,
                                              comm=comm,
                                              topology=False):
        if gid is not None and gid in reindex_map:
            reindex_gid = reindex_map[gid]
            new_gid = gid_map[gid]
            new_trees_dict[new_gid] = old_trees_dict
            new_coords_dict[new_gid] = old_coords_dict[gid]
            logger.info('Rank: %i mapping old gid: %i to new gid: %i' %
                        (rank, gid, new_gid))
    append_cell_trees(output_path,
                      population,
                      new_trees_dict,
                      io_size=io_size,
                      comm=comm)
    append_cell_attributes(output_path, population, new_coords_dict, \
                           namespace=coords_namespace, io_size=io_size, comm=comm)

    comm.barrier()
    if rank == 0:
        logger.info('Appended reindexed trees to %s' % output_path)
コード例 #14
0
def main(config, config_prefix, include, forest_path, connectivity_path,
         connectivity_namespace, coords_path, coords_namespace,
         synapses_namespace, distances_namespace, resolution,
         interp_chunk_size, io_size, chunk_size, value_chunk_size, cache_size,
         write_size, verbose, dry_run):

    utils.config_logging(verbose)
    logger = utils.get_script_logger(os.path.basename(__file__))

    comm = MPI.COMM_WORLD
    rank = comm.rank

    env = Env(comm=comm, config_file=config, config_prefix=config_prefix)
    configure_hoc_env(env)

    connection_config = env.connection_config
    extent = {}
    soma_coords = {}

    if (not dry_run) and (rank == 0):
        if not os.path.isfile(connectivity_path):
            input_file = h5py.File(coords_path, 'r')
            output_file = h5py.File(connectivity_path, 'w')
            input_file.copy('/H5Types', output_file)
            input_file.close()
            output_file.close()
    comm.barrier()

    population_ranges = read_population_ranges(coords_path)[0]
    populations = sorted(list(population_ranges.keys()))

    soma_distances = {}
    for population in populations:
        if rank == 0:
            logger.info('Reading %s population coordinates...' % population)
        coords_iter = bcast_cell_attributes(coords_path,
                                            population,
                                            0,
                                            namespace=coords_namespace)
        distances_iter = bcast_cell_attributes(coords_path,
                                               population,
                                               0,
                                               namespace=distances_namespace)

        soma_coords[population] = {
            k:
            (v['U Coordinate'][0], v['V Coordinate'][0], v['L Coordinate'][0])
            for (k, v) in coords_iter
        }

        distances = {
            k: (v['U Distance'][0], v['V Distance'][0])
            for (k, v) in distances_iter
        }

        if len(distances) > 0:
            soma_distances[population] = distances

        gc.collect()

    forest_populations = sorted(read_population_names(forest_path))
    if (include is None) or (len(include) == 0):
        destination_populations = forest_populations
    else:
        destination_populations = []
        for p in include:
            if p in forest_populations:
                destination_populations.append(p)
    if rank == 0:
        logger.info('Generating connectivity for populations %s...' %
                    str(destination_populations))

    if len(soma_distances) == 0:
        (origin_ranges, ip_dist_u,
         ip_dist_v) = make_distance_interpolant(env,
                                                resolution=resolution,
                                                nsample=nsample)
        ip_dist = (origin_ranges, ip_dist_u, ip_dist_v)
        soma_distances = measure_distances(env,
                                           soma_coords,
                                           ip_dist,
                                           resolution=resolution)

    for destination_population in destination_populations:

        if rank == 0:
            logger.info(
                'Generating connection probabilities for population %s...' %
                destination_population)

        connection_prob = ConnectionProb(destination_population, soma_coords, soma_distances, \
                                         env.connection_extents)

        synapse_seed = int(
            env.model_config['Random Seeds']['Synapse Projection Partitions'])

        connectivity_seed = int(env.model_config['Random Seeds']
                                ['Distance-Dependent Connectivity'])
        cluster_seed = int(
            env.model_config['Random Seeds']['Connectivity Clustering'])

        if rank == 0:
            logger.info('Generating connections for population %s...' %
                        destination_population)

        populations_dict = env.model_config['Definitions']['Populations']
        generate_uv_distance_connections(comm,
                                         populations_dict,
                                         connection_config,
                                         connection_prob,
                                         forest_path,
                                         synapse_seed,
                                         connectivity_seed,
                                         cluster_seed,
                                         synapses_namespace,
                                         connectivity_namespace,
                                         connectivity_path,
                                         io_size,
                                         chunk_size,
                                         value_chunk_size,
                                         cache_size,
                                         write_size,
                                         dry_run=dry_run)
    MPI.Finalize()
コード例 #15
0
def vertex_distribution(connectivity_path,
                        coords_path,
                        distances_namespace,
                        destination,
                        sources,
                        bin_size=20.0,
                        cache_size=100,
                        comm=None):
    """
    Obtain spatial histograms of source vertices connecting to a given destination population.

    :param connectivity_path:
    :param coords_path:
    :param distances_namespace: 
    :param destination: 
    :param source: 

    """

    if comm is None:
        comm = MPI.COMM_WORLD

    rank = comm.Get_rank()

    (population_ranges, _) = read_population_ranges(coords_path)

    destination_start = population_ranges[destination][0]
    destination_count = population_ranges[destination][1]

    if rank == 0:
        logger.info('reading %s distances...' % destination)

    destination_soma_distances = bcast_cell_attributes(
        coords_path,
        destination,
        namespace=distances_namespace,
        comm=comm,
        root=0)

    destination_soma_distance_U = {}
    destination_soma_distance_V = {}
    for k, v in destination_soma_distances:
        destination_soma_distance_U[k] = v['U Distance'][0]
        destination_soma_distance_V[k] = v['V Distance'][0]

    del (destination_soma_distances)

    if sources == ():
        sources = []
        for (src, dst) in read_projection_names(connectivity_path):
            if dst == destination:
                sources.append(src)

    source_soma_distances = {}
    for s in sources:
        if rank == 0:
            logger.info('reading %s distances...' % s)
        source_soma_distances[s] = bcast_cell_attributes(
            coords_path, s, namespace=distances_namespace, comm=comm, root=0)

    source_soma_distance_U = {}
    source_soma_distance_V = {}
    for s in sources:
        this_source_soma_distance_U = {}
        this_source_soma_distance_V = {}
        for k, v in source_soma_distances[s]:
            this_source_soma_distance_U[k] = v['U Distance'][0]
            this_source_soma_distance_V[k] = v['V Distance'][0]
        source_soma_distance_U[s] = this_source_soma_distance_U
        source_soma_distance_V[s] = this_source_soma_distance_V
    del (source_soma_distances)

    if rank == 0:
        logger.info('reading connections %s -> %s...' %
                    (str(sources), destination))

    dist_bins = defaultdict(dict)
    dist_u_bins = defaultdict(dict)
    dist_v_bins = defaultdict(dict)

    gg = [
        NeuroH5ProjectionGen(connectivity_path,
                             source,
                             destination,
                             cache_size=cache_size,
                             comm=comm) for source in sources
    ]

    for prj_gen_tuple in zip_longest(*gg):
        destination_gid = prj_gen_tuple[0][0]
        if rank == 0 and destination_gid is not None:
            logger.info('%d' % destination_gid)
        if not all([
                prj_gen_elt[0] == destination_gid
                for prj_gen_elt in prj_gen_tuple
        ]):
            raise RuntimeError(
                'destination %s: destination_gid %i not matched across multiple projection generators: '
                '%s' % (destination, destination_gid,
                        [prj_gen_elt[0] for prj_gen_elt in prj_gen_tuple]))

        if destination_gid is not None:
            for (source, (this_destination_gid,
                          rest)) in zip_longest(sources, prj_gen_tuple):
                this_source_soma_distance_U = source_soma_distance_U[source]
                this_source_soma_distance_V = source_soma_distance_V[source]
                this_dist_bins = dist_bins[source]
                this_dist_u_bins = dist_u_bins[source]
                this_dist_v_bins = dist_v_bins[source]
                (source_indexes, attr_dict) = rest
                dst_U = destination_soma_distance_U[destination_gid]
                dst_V = destination_soma_distance_V[destination_gid]
                for source_gid in source_indexes:
                    dist_u = dst_U - this_source_soma_distance_U[source_gid]
                    dist_v = dst_V - this_source_soma_distance_V[source_gid]
                    dist = abs(dist_u) + abs(dist_v)

                    update_bins(this_dist_bins, bin_size, dist)
                    update_bins(this_dist_u_bins, bin_size, dist_u)
                    update_bins(this_dist_v_bins, bin_size, dist_v)

    add_bins_op = MPI.Op.Create(add_bins, commute=True)
    for source in sources:
        dist_bins[source] = comm.reduce(dist_bins[source], op=add_bins_op)
        dist_u_bins[source] = comm.reduce(dist_u_bins[source], op=add_bins_op)
        dist_v_bins[source] = comm.reduce(dist_v_bins[source], op=add_bins_op)

    dist_hist_dict = defaultdict(dict)
    dist_u_hist_dict = defaultdict(dict)
    dist_v_hist_dict = defaultdict(dict)

    if rank == 0:
        for source in sources:
            dist_hist_dict[destination][source] = finalize_bins(
                dist_bins[source], bin_size)
            dist_u_hist_dict[destination][source] = finalize_bins(
                dist_u_bins[source], bin_size)
            dist_v_hist_dict[destination][source] = finalize_bins(
                dist_v_bins[source], bin_size)

    return {
        'Total distance': dist_hist_dict,
        'U distance': dist_u_hist_dict,
        'V distance': dist_v_hist_dict
    }
コード例 #16
0
def spatial_bin_graph(connectivity_path,
                      coords_path,
                      distances_namespace,
                      destination,
                      sources,
                      extents,
                      bin_size=20.0,
                      cache_size=100,
                      comm=None):
    """
    Obtain reduced graphs of the specified projections by binning nodes according to their spatial position.

    :param connectivity_path:
    :param coords_path:
    :param distances_namespace: 
    :param destination: 
    :param source: 

    """

    import networkx as nx

    if comm is None:
        comm = MPI.COMM_WORLD

    rank = comm.Get_rank()

    (population_ranges, _) = read_population_ranges(coords_path)

    destination_start = population_ranges[destination][0]
    destination_count = population_ranges[destination][1]

    if rank == 0:
        logger.info('reading %s distances...' % destination)

    destination_soma_distances = bcast_cell_attributes(
        coords_path,
        destination,
        namespace=distances_namespace,
        comm=comm,
        root=0)

    ((x_min, x_max), (y_min, y_max)) = extents
    u_bins = np.arange(x_min, x_max, bin_size)
    v_bins = np.arange(y_min, y_max, bin_size)

    dest_u_bins = {}
    dest_v_bins = {}
    destination_soma_distance_U = {}
    destination_soma_distance_V = {}
    for k, v in destination_soma_distances:
        dist_u = v['U Distance'][0]
        dist_v = v['V Distance'][0]
        dest_u_bins[k] = np.searchsorted(u_bins, dist_u, side='left')
        dest_v_bins[k] = np.searchsorted(v_bins, dist_v, side='left')
        destination_soma_distance_U[k] = dist_u
        destination_soma_distance_V[k] = dist_v

    del (destination_soma_distances)

    if (sources == ()) or (sources == []) or (sources is None):
        sources = []
        for (src, dst) in read_projection_names(connectivity_path):
            if dst == destination:
                sources.append(src)

    source_soma_distances = {}
    for s in sources:
        if rank == 0:
            logger.info('reading %s distances...' % s)
        source_soma_distances[s] = bcast_cell_attributes(
            coords_path, s, namespace=distances_namespace, comm=comm, root=0)

    source_u_bins = {}
    source_v_bins = {}
    source_soma_distance_U = {}
    source_soma_distance_V = {}
    for s in sources:
        this_source_soma_distance_U = {}
        this_source_soma_distance_V = {}
        this_source_u_bins = {}
        this_source_v_bins = {}
        for k, v in source_soma_distances[s]:
            dist_u = v['U Distance'][0]
            dist_v = v['V Distance'][0]
            this_source_u_bins[k] = np.searchsorted(u_bins,
                                                    dist_u,
                                                    side='left')
            this_source_v_bins[k] = np.searchsorted(v_bins,
                                                    dist_v,
                                                    side='left')
            this_source_soma_distance_U[k] = dist_u
            this_source_soma_distance_V[k] = dist_v
        source_soma_distance_U[s] = this_source_soma_distance_U
        source_soma_distance_V[s] = this_source_soma_distance_V
        source_u_bins[s] = this_source_u_bins
        source_v_bins[s] = this_source_v_bins
    del (source_soma_distances)

    if rank == 0:
        logger.info('reading connections %s -> %s...' %
                    (str(sources), destination))
    gg = [
        NeuroH5ProjectionGen(connectivity_path,
                             source,
                             destination,
                             cache_size=cache_size,
                             comm=comm) for source in sources
    ]

    dist_bins = defaultdict(dict)
    dist_u_bins = defaultdict(dict)
    dist_v_bins = defaultdict(dict)

    local_u_bin_graph = defaultdict(dict)
    local_v_bin_graph = defaultdict(dict)

    for prj_gen_tuple in zip_longest(*gg):
        destination_gid = prj_gen_tuple[0][0]
        if not all([
                prj_gen_elt[0] == destination_gid
                for prj_gen_elt in prj_gen_tuple
        ]):
            raise RuntimeError(
                'destination %s: destination_gid %i not matched across multiple projection generators: '
                '%s' % (destination, destination_gid,
                        [prj_gen_elt[0] for prj_gen_elt in prj_gen_tuple]))

        if destination_gid is not None:
            dest_u_bin = dest_u_bins[destination_gid]
            dest_v_bin = dest_v_bins[destination_gid]
            for (source, (this_destination_gid,
                          rest)) in zip_longest(sources, prj_gen_tuple):
                this_source_u_bins = source_u_bins[source]
                this_source_v_bins = source_v_bins[source]
                (source_indexes, attr_dict) = rest
                source_u_bin_dict = defaultdict(int)
                source_v_bin_dict = defaultdict(int)
                for source_gid in source_indexes:
                    source_u_bin = this_source_u_bins[source_gid]
                    source_v_bin = this_source_v_bins[source_gid]
                    source_u_bin_dict[source_u_bin] += 1
                    source_v_bin_dict[source_v_bin] += 1
                local_u_bin_graph[dest_u_bin][source] = source_u_bin_dict
                local_v_bin_graph[dest_v_bin][source] = source_v_bin_dict

    local_u_bin_graphs = comm.gather(dict(local_u_bin_graph), root=0)
    local_v_bin_graphs = comm.gather(dict(local_v_bin_graph), root=0)

    u_bin_graph = None
    v_bin_graph = None
    nu = None
    nv = None

    if rank == 0:

        u_bin_edges = {destination: dict(ChainMap(*local_u_bin_graphs))}
        v_bin_edges = {destination: dict(ChainMap(*local_v_bin_graphs))}

        nu = len(u_bins)
        u_bin_graph = nx.Graph()
        for pop in [destination] + list(sources):
            for i in range(nu):
                u_bin_graph.add_node((pop, i))

        for i, ss in viewitems(u_bin_edges[destination]):
            for source, ids in viewitems(ss):
                u_bin_graph.add_weighted_edges_from([
                    ((source, j), (destination, i), count)
                    for j, count in viewitems(ids)
                ])

        nv = len(v_bins)
        v_bin_graph = nx.Graph()
        for pop in [destination] + list(sources):
            for i in range(nv):
                v_bin_graph.add_node((pop, i))

        for i, ss in viewitems(v_bin_edges[destination]):
            for source, ids in viewitems(ss):
                v_bin_graph.add_weighted_edges_from([
                    ((source, j), (destination, i), count)
                    for j, count in viewitems(ids)
                ])

    label = '%s to %s' % (str(sources), destination)

    return {
        'label': label,
        'bin size': bin_size,
        'destination': destination,
        'sources': sources,
        'U graph': u_bin_graph,
        'V graph': v_bin_graph
    }
コード例 #17
0
def main(config, config_prefix, coords_path, distances_namespace, bin_distance,
         selectivity_path, selectivity_namespace, spatial_resolution, arena_id,
         populations, io_size, cache_size, verbose, debug, show_fig, save_fig,
         save_fig_dir, font_size, fig_size, colormap, fig_format):
    """

    :param config: str (.yaml file name)
    :param config_prefix: str (path to dir)
    :param coords_path: str (path to file)
    :param distances_namespace: str
    :param bin_distance: float
    :param selectivity_path: str
    :param arena_id: str
    :param populations: tuple of str
    :param io_size: int
    :param cache_size: int
    :param verbose: bool
    :param debug: bool
    :param show_fig: bool
    :param save_fig: str (base file name)
    :param save_fig_dir:  str (path to dir)
    :param font_size: float
    :param fig_format: str
    """
    comm = MPI.COMM_WORLD
    rank = comm.rank

    config_logging(verbose)

    env = Env(comm=comm,
              config_file=config,
              config_prefix=config_prefix,
              template_paths=None)
    if io_size == -1:
        io_size = comm.size
    if rank == 0:
        logger.info(f'{comm.size} ranks have been allocated')

    fig_options = copy.copy(default_fig_options)
    fig_options.saveFigDir = save_fig_dir
    fig_options.fontSize = font_size
    fig_options.figFormat = fig_format
    fig_options.showFig = show_fig
    fig_options.figSize = fig_size

    if save_fig is not None:
        save_fig = f'{save_fig} {arena_id}'
    fig_options.saveFig = save_fig

    population_ranges = read_population_ranges(selectivity_path, comm)[0]
    coords_population_ranges = read_population_ranges(coords_path, comm)[0]

    if len(populations) == 0:
        populations = ('MC', 'ConMC', 'LPP', 'GC', 'MPP', 'CA3c')

    valid_selectivity_namespaces = dict()
    if rank == 0:
        for population in populations:
            if population not in population_ranges:
                raise RuntimeError(
                    f'plot_input_selectivity_features: specified population: {population} not found in '
                    f'provided selectivity_path: {selectivity_path}')
            if population not in env.stimulus_config[
                    'Selectivity Type Probabilities']:
                raise RuntimeError(
                    'plot_input_selectivity_features: selectivity type not specified for '
                    f'population: {population}')
            valid_selectivity_namespaces[population] = []
            with h5py.File(selectivity_path, 'r') as selectivity_f:
                for this_namespace in selectivity_f['Populations'][population]:
                    if f'{selectivity_namespace} {arena_id}' in this_namespace:
                        valid_selectivity_namespaces[population].append(
                            this_namespace)
                if len(valid_selectivity_namespaces[population]) == 0:
                    raise RuntimeError(
                        f'plot_input_selectivity_features: no selectivity data in arena: {arena_id} found '
                        f'for specified population: {population} in provided selectivity_path: {selectivity_path}'
                    )

    valid_selectivity_namespaces = comm.bcast(valid_selectivity_namespaces,
                                              root=0)
    selectivity_type_names = dict(
        (val, key) for (key, val) in viewitems(env.selectivity_types))

    reference_u_arc_distance_bounds = None
    reference_v_arc_distance_bounds = None
    if rank == 0:
        for population in populations:
            if population not in coords_population_ranges:
                raise RuntimeError(
                    f'plot_input_selectivity_features: specified population: {population} not found in '
                    f'provided coords_path: {coords_path}')
            with h5py.File(coords_path, 'r') as coords_f:
                pop_size = population_ranges[population][1]
                unique_gid_count = len(
                    set(coords_f['Populations'][population]
                        [distances_namespace]['U Distance']['Cell Index'][:]))
                if pop_size != unique_gid_count:
                    raise RuntimeError(
                        f'plot_input_selectivity_features: only {unique_gid_count}/{pop_size} unique cell indexes found '
                        f'for specified population: {population} in provided coords_path: {coords_path}'
                    )
                if reference_u_arc_distance_bounds is None:
                    try:
                        reference_u_arc_distance_bounds = \
                            coords_f['Populations'][population][distances_namespace].attrs['Reference U Min'], \
                            coords_f['Populations'][population][distances_namespace].attrs['Reference U Max']
                    except Exception:
                        raise RuntimeError(
                            'plot_input_selectivity_features: problem locating attributes '
                            f'containing reference bounds in namespace: {distances_namespace} '
                            f'for population: {population} from coords_path: {coords_path}'
                        )
                if reference_v_arc_distance_bounds is None:
                    try:
                        reference_v_arc_distance_bounds = \
                            coords_f['Populations'][population][distances_namespace].attrs['Reference V Min'], \
                            coords_f['Populations'][population][distances_namespace].attrs['Reference V Max']
                    except Exception:
                        raise RuntimeError(
                            'plot_input_selectivity_features: problem locating attributes '
                            f'containing reference bounds in namespace: {distances_namespace} '
                            f'for population: {population} from coords_path: {coords_path}'
                        )
    reference_u_arc_distance_bounds = comm.bcast(
        reference_u_arc_distance_bounds, root=0)
    reference_v_arc_distance_bounds = comm.bcast(
        reference_v_arc_distance_bounds, root=0)

    u_edges = np.arange(reference_u_arc_distance_bounds[0],
                        reference_u_arc_distance_bounds[1] + bin_distance / 2.,
                        bin_distance)
    v_edges = np.arange(reference_v_arc_distance_bounds[0],
                        reference_v_arc_distance_bounds[1] + bin_distance / 2.,
                        bin_distance)

    if arena_id not in env.stimulus_config['Arena']:
        raise RuntimeError(
            f'Arena with ID: {arena_id} not specified by configuration at file path: {config_prefix}/{config}'
        )

    if spatial_resolution is None:
        spatial_resolution = env.stimulus_config['Spatial Resolution']
    arena = env.stimulus_config['Arena'][arena_id]
    arena_x_mesh, arena_y_mesh = None, None
    if rank == 0:
        arena_x_mesh, arena_y_mesh = \
            get_2D_arena_spatial_mesh(arena=arena, spatial_resolution=spatial_resolution)
    arena_x_mesh = comm.bcast(arena_x_mesh, root=0)
    arena_y_mesh = comm.bcast(arena_y_mesh, root=0)
    x0_dict = {}
    y0_dict = {}

    for population in populations:

        start_time = time.time()
        u_distances_by_gid = dict()
        v_distances_by_gid = dict()
        distances_attr_gen = \
            bcast_cell_attributes(coords_path, population, root=0, namespace=distances_namespace, comm=comm)
        for gid, distances_attr_dict in distances_attr_gen:
            u_distances_by_gid[gid] = distances_attr_dict['U Distance'][0]
            v_distances_by_gid[gid] = distances_attr_dict['V Distance'][0]

        if rank == 0:
            logger.info(
                f'Reading {len(u_distances_by_gid)} cell positions for population {population} took '
                f'{time.time() - start_time:.2f} s')

        for this_selectivity_namespace in valid_selectivity_namespaces[
                population]:
            start_time = time.time()
            if rank == 0:
                logger.info(
                    f'Reading from {this_selectivity_namespace} namespace for population {population}...'
                )
            gid_count = 0
            gathered_cell_attributes = defaultdict(list)
            gathered_component_attributes = defaultdict(list)
            u_distances_by_cell = list()
            v_distances_by_cell = list()
            u_distances_by_component = list()
            v_distances_by_component = list()
            rate_map_sum_by_module = defaultdict(
                lambda: np.zeros_like(arena_x_mesh))
            count_by_module = defaultdict(int)
            start_time = time.time()
            x0_list_by_module = defaultdict(list)
            y0_list_by_module = defaultdict(list)
            selectivity_attr_gen = NeuroH5CellAttrGen(
                selectivity_path,
                population,
                namespace=this_selectivity_namespace,
                comm=comm,
                io_size=io_size,
                cache_size=cache_size)
            for iter_count, (
                    gid,
                    selectivity_attr_dict) in enumerate(selectivity_attr_gen):
                if gid is not None:
                    gid_count += 1
                    this_selectivity_type = selectivity_attr_dict[
                        'Selectivity Type'][0]
                    this_selectivity_type_name = selectivity_type_names[
                        this_selectivity_type]
                    input_cell_config = \
                        get_input_cell_config(selectivity_type=this_selectivity_type,
                                               selectivity_type_names=selectivity_type_names,
                                               selectivity_attr_dict=selectivity_attr_dict)
                    rate_map = input_cell_config.get_rate_map(x=arena_x_mesh,
                                                              y=arena_y_mesh)
                    u_distances_by_cell.append(u_distances_by_gid[gid])
                    v_distances_by_cell.append(v_distances_by_gid[gid])
                    this_cell_attrs, component_count, this_component_attrs = input_cell_config.gather_attributes(
                    )
                    for attr_name, attr_val in viewitems(this_cell_attrs):
                        gathered_cell_attributes[attr_name].append(attr_val)
                    gathered_cell_attributes['Mean Rate'].append(
                        np.mean(rate_map))
                    if component_count > 0:
                        u_distances_by_component.extend(
                            [u_distances_by_gid[gid]] * component_count)
                        v_distances_by_component.extend(
                            [v_distances_by_gid[gid]] * component_count)
                        for attr_name, attr_val in viewitems(
                                this_component_attrs):
                            gathered_component_attributes[attr_name].extend(
                                attr_val)
                    this_module_id = this_cell_attrs['Module ID']
                    if debug and rank == 0:
                        fig_title = f'{population} {this_selectivity_type_name} cell {gid}'
                        if save_fig is not None:
                            fig_options.saveFig = f'{save_fig} {fig_title}'
                        plot_2D_rate_map(
                            x=arena_x_mesh,
                            y=arena_y_mesh,
                            rate_map=rate_map,
                            peak_rate=env.stimulus_config['Peak Rate']
                            [population][this_selectivity_type],
                            title=f'{fig_title}\nModule: {this_module_id}',
                            **fig_options())
                    x0_list_by_module[this_module_id].append(
                        selectivity_attr_dict['X Offset'])
                    y0_list_by_module[this_module_id].append(
                        selectivity_attr_dict['Y Offset'])
                    rate_map_sum_by_module[this_module_id] = np.add(
                        rate_map, rate_map_sum_by_module[this_module_id])
                    count_by_module[this_module_id] += 1
                if debug and iter_count >= 10:
                    break

            if rank == 0:
                logger.info(
                    f'Done reading from {this_selectivity_namespace} namespace for population {population}...'
                )

            cell_count_hist, _, _ = np.histogram2d(u_distances_by_cell,
                                                   v_distances_by_cell,
                                                   bins=[u_edges, v_edges])
            component_count_hist, _, _ = np.histogram2d(
                u_distances_by_component,
                v_distances_by_component,
                bins=[u_edges, v_edges])

            if debug:
                context.update(locals())

            gathered_cell_attr_hist = dict()
            gathered_component_attr_hist = dict()
            for key in gathered_cell_attributes:
                gathered_cell_attr_hist[key], _, _ = \
                    np.histogram2d(u_distances_by_cell, v_distances_by_cell, bins=[u_edges, v_edges],
                                   weights=gathered_cell_attributes[key])
            for key in gathered_component_attributes:
                gathered_component_attr_hist[key], _, _ = \
                    np.histogram2d(u_distances_by_component, v_distances_by_component, bins=[u_edges, v_edges],
                                   weights=gathered_component_attributes[key])
            gid_count = comm.gather(gid_count, root=0)
            cell_count_hist = comm.gather(cell_count_hist, root=0)
            component_count_hist = comm.gather(component_count_hist, root=0)
            gathered_cell_attr_hist = comm.gather(gathered_cell_attr_hist,
                                                  root=0)
            gathered_component_attr_hist = comm.gather(
                gathered_component_attr_hist, root=0)
            x0_list_by_module = dict(x0_list_by_module)
            y0_list_by_module = dict(y0_list_by_module)
            x0_list_by_module = comm.reduce(x0_list_by_module,
                                            op=mpi_op_merge_list_dict,
                                            root=0)
            y0_list_by_module = comm.reduce(y0_list_by_module,
                                            op=mpi_op_merge_list_dict,
                                            root=0)
            rate_map_sum_by_module = dict(rate_map_sum_by_module)
            rate_map_sum_by_module = comm.gather(rate_map_sum_by_module,
                                                 root=0)
            count_by_module = dict(count_by_module)
            count_by_module = comm.reduce(count_by_module,
                                          op=mpi_op_merge_count_dict,
                                          root=0)

            if rank == 0:
                gid_count = sum(gid_count)
                cell_count_hist = np.sum(cell_count_hist, axis=0)
                component_count_hist = np.sum(component_count_hist, axis=0)
                merged_cell_attr_hist = defaultdict(
                    lambda: np.zeros_like(cell_count_hist))
                merged_component_attr_hist = defaultdict(
                    lambda: np.zeros_like(component_count_hist))
                for each_cell_attr_hist in gathered_cell_attr_hist:
                    for key in each_cell_attr_hist:
                        merged_cell_attr_hist[key] = np.add(
                            merged_cell_attr_hist[key],
                            each_cell_attr_hist[key])
                for each_component_attr_hist in gathered_component_attr_hist:
                    for key in each_component_attr_hist:
                        merged_component_attr_hist[key] = np.add(
                            merged_component_attr_hist[key],
                            each_component_attr_hist[key])
                merged_rate_map_sum_by_module = defaultdict(
                    lambda: np.zeros_like(arena_x_mesh))
                for each_rate_map_sum_by_module in rate_map_sum_by_module:
                    for this_module_id in each_rate_map_sum_by_module:
                        merged_rate_map_sum_by_module[this_module_id] = \
                            np.add(merged_rate_map_sum_by_module[this_module_id],
                                   each_rate_map_sum_by_module[this_module_id])

                logger.info(
                    f'Processing {gid_count} {population} {this_selectivity_type_name} cells '
                    f'took {time.time() - start_time:.2f} s')

                if debug:
                    context.update(locals())

                fig_title = f'{population} {this_selectivity_type_name} field offsets'
                if save_fig is not None:
                    fig_options.saveFig = f'{save_fig} {fig_title}'

                for key in merged_cell_attr_hist:
                    fig_title = f'{population} {this_selectivity_type_name} cells {key} distribution'
                    if save_fig is not None:
                        fig_options.saveFig = f'{save_fig} {fig_title}'
                    if colormap is not None:
                        fig_options.colormap = colormap
                    title = f'{population} {this_selectivity_type_name} cells\n{key} distribution'
                    fig = plot_2D_histogram(
                        merged_cell_attr_hist[key],
                        x_edges=u_edges,
                        y_edges=v_edges,
                        norm=cell_count_hist,
                        ylabel='Transverse position (um)',
                        xlabel='Septo-temporal position (um)',
                        title=title,
                        cbar_label='Mean value per bin',
                        cbar=True,
                        **fig_options())
                    close_figure(fig)

                for key in merged_component_attr_hist:
                    fig_title = f'{population} {this_selectivity_type_name} cells {key} distribution'
                    if save_fig is not None:
                        fig_options.saveFig = f'{save_fig} {fig_title}'
                    title = f'{population} {this_selectivity_type_name} cells\n{key} distribution'
                    fig = plot_2D_histogram(
                        merged_component_attr_hist[key],
                        x_edges=u_edges,
                        y_edges=v_edges,
                        norm=component_count_hist,
                        ylabel='Transverse position (um)',
                        xlabel='Septo-temporal position (um)',
                        title=title,
                        cbar_label='Mean value per bin',
                        cbar=True,
                        **fig_options())
                    close_figure(fig)

                for this_module_id in merged_rate_map_sum_by_module:
                    num_cells = count_by_module[this_module_id]
                    x0 = np.concatenate(x0_list_by_module[this_module_id])
                    y0 = np.concatenate(y0_list_by_module[this_module_id])
                    fig_title = f'{population} {this_selectivity_type_name} Module {this_module_id} rate map'
                    if save_fig is not None:
                        fig_options.saveFig = f'{save_fig} {fig_title}'
                    fig = plot_2D_rate_map(
                        x=arena_x_mesh,
                        y=arena_y_mesh,
                        x0=x0,
                        y0=y0,
                        rate_map=merged_rate_map_sum_by_module[this_module_id],
                        title=
                        (f'{population} {this_selectivity_type_name} rate map\n'
                         f'Module {this_module_id} ({num_cells} cells)'),
                        **fig_options())
                    close_figure(fig)

    if is_interactive and rank == 0:
        context.update(locals())
コード例 #18
0
def main(config, forest_path, connectivity_namespace, coords_path, coords_namespace, io_size, chunk_size, value_chunk_size,
         cache_size, debug):
    """

    :param forest_path:
    :param connectivity_namespace:
    :param coords_path:
    :param coords_namespace:
    :param io_size:
    :param chunk_size:
    :param value_chunk_size:
    :param cache_size:
    """
    # troubleshooting
    if False:
        forest_path = '../morphologies/DGC_forest_connectivity_20170508.h5'
        coords_path = '../morphologies/dentate_Full_Scale_Control_coords_selectivity_20170615.h5'
        coords_namespace = 'Coordinates'
        io_size = -1
        chunk_size = 1000
        value_chunk_size = 1000
        cache_size = 50

    comm = MPI.COMM_WORLD
    rank = comm.rank  # The process ID (integer 0-3 for 4-process run)

    env = Env(comm=comm, config_file=config)

    connection_config = env.connection_config    
    proportions = connection_config.synapse_proportions
    layers      = connection_config.synapse_layers
    syn_types   = connection_config.synapse_types
    swc_types   = connection_config.synapse_locations

    if io_size == -1:
        io_size = comm.size
    if rank == 0:
        print('%i ranks have been allocated' % comm.size)
    sys.stdout.flush()

    start_time = time.time()

    soma_coords = {}
    source_populations = list(read_population_ranges(comm, coords_path).keys())
    for population in source_populations:
        soma_coords[population] = bcast_cell_attributes(comm, 0, coords_path, population,
                                                            namespace=coords_namespace)

    target = 'GC'

    layer_set, swc_type_set, syn_type_set = set(), set(), set()
    for source in layers[target]:
        layer_set.update(layers[target][source])
        swc_type_set.update(swc_types[target][source])
        syn_type_set.update(syn_types[target][source])

    count = 0
    attr_gen = NeuroH5CellAttrGen(comm, forest_path, target, io_size=io_size, cache_size=cache_size,
                                  namespace='Synapse Attributes')
    if debug:
        attr_gen_wrapper = (next(attr_gen) for i in range(2))
    else:
        attr_gen_wrapper = attr_gen
    for target_gid, attributes_dict in attr_gen_wrapper:
        last_time = time.time()
        connection_dict = {}
        p_dict = {}
        source_gid_dict = {}
        if target_gid is None:
            print('Rank %i target gid is None' % rank)
        else:
            print('Rank %i received attributes for target: %s, gid: %i' % (rank, target, target_gid))
            synapse_dict = attributes_dict['Synapse_Attributes']
            connection_dict[target_gid] = {}
            local_np_random.seed(target_gid + connectivity_seed_offset)
            connection_dict[target_gid]['source_gid'] = np.array([], dtype='uint32')
            connection_dict[target_gid]['syn_id'] = np.array([], dtype='uint32')

            for layer in layer_set:
                for swc_type in swc_type_set:
                    for syn_type in syn_type_set:
                        sources, this_proportions = filter_sources(target, layer, swc_type, syn_type, connection_config)
                        if sources:
                            if rank == 0 and count == 0:
                                source_list_str = '[' + ', '.join(['%s' % xi for xi in sources]) + ']'
                                print('Connections to target: %s in layer: %i ' \
                                    '(swc_type: %i, syn_type: %i): %s' % \
                                    (target, layer, swc_type, syn_type, source_list_str))
                            p, source_gid = np.array([]), np.array([])
                            for source, this_proportion in zip(sources, this_proportions):
                                if source not in source_gid_dict:
                                    this_source_gid = list(soma_coords[source].keys())
                                    this_p = np.ones(len(this_source_gid)) / float(len(this_source_gid))
                                    source_gid_dict[source] = this_source_gid
                                    p_dict[source] = this_p
                                else:
                                    this_source_gid = source_gid_dict[source]
                                    this_p = p_dict[source]
                                p = np.append(p, this_p * this_proportion)
                                source_gid = np.append(source_gid, this_source_gid)
                            syn_indexes = filter_synapses(synapse_dict, layer, swc_type, syn_type)
                            connection_dict[target_gid]['syn_id'] = \
                                np.append(connection_dict[target_gid]['syn_id'],
                                          synapse_dict['syn_id'][syn_indexes]).astype('uint32', copy=False)
                            this_source_gid = local_np_random.choice(source_gid, len(syn_indexes), p=p)
                            connection_dict[target_gid]['source_gid'] = \
                                np.append(connection_dict[target_gid]['source_gid'],
                                          this_source_gid).astype('uint32', copy=False)
            count += 1
            print('Rank %i took %.2f s to compute connectivity for target: %s, gid: %i' % (rank,
                                                                                           time.time() - last_time,
                                                                                           target, target_gid))
            sys.stdout.flush()
        if not debug:
            append_cell_attributes(comm, forest_path, target, connection_dict,
                                   namespace=connectivity_namespace, io_size=io_size, chunk_size=chunk_size,
                                   value_chunk_size=value_chunk_size)
        sys.stdout.flush()
        del connection_dict
        del p_dict
        del source_gid_dict
        gc.collect()

    global_count = comm.gather(count, root=0)
    if rank == 0:
        print('%i ranks took took %.2f s to compute connectivity for %i cells' % (comm.size, time.time() - start_time,
                                                                                  np.sum(global_count)))
コード例 #19
0
ファイル: measure_distances.py プロジェクト: soltesz-lab/ca1
def main(config, coords_path, coords_namespace, geometry_path, populations, interp_chunk_size, resolution, alpha_radius, nsample, io_size, chunk_size, value_chunk_size, cache_size, verbose):

    utils.config_logging(verbose)
    logger = utils.get_script_logger(__file__)
    
    comm = MPI.COMM_WORLD
    rank = comm.rank

    env = Env(comm=comm, config_file=config)
    output_path = coords_path

    soma_coords = {}

    if rank == 0:
        logger.info('Reading population coordinates...')
        
    for population in sorted(populations):
        coords = bcast_cell_attributes(coords_path, population, 0, \
                                       namespace=coords_namespace, comm=comm)

        soma_coords[population] = { k: (v['U Coordinate'][0], v['V Coordinate'][0], v['L Coordinate'][0]) 
                                    for (k,v) in coords }
        del coords
        gc.collect()

    
    has_ip_dist=False
    origin_ranges=None
    ip_dist_u=None
    ip_dist_v=None
    ip_dist_path = 'Distance Interpolant/%d/%d/%d' % resolution
    if rank == 0:
        if geometry_path is not None:
            f = h5py.File(geometry_path,"a")
            pkl_path = f'{ip_dist_path}/ip_dist.pkl'
            if pkl_path in f:
                has_ip_dist = True
                ip_dist_dset = f[pkl_path]
                origin_ranges, ip_dist_u, ip_dist_v = pickle.loads(base64.b64decode(ip_dist_dset[()]))
            f.close()
    has_ip_dist = env.comm.bcast(has_ip_dist, root=0)
    
    if not has_ip_dist:
        if rank == 0:
            logger.info('Creating distance interpolant...')
        (origin_ranges, ip_dist_u, ip_dist_v) = make_distance_interpolant(env.comm, geometry_config=env.geometry,
                                                                          make_volume=make_CA1_volume,
                                                                          resolution=resolution, nsample=nsample)
        if rank == 0:
            if geometry_path is not None:
                f = h5py.File(geometry_path, 'a')
                pkl_path = f'{ip_dist_path}/ip_dist.pkl'
                pkl = pickle.dumps((origin_ranges, ip_dist_u, ip_dist_v))
                pklstr = base64.b64encode(pkl)
                f[pkl_path] = pklstr
                f.close()
                
    ip_dist = (origin_ranges, ip_dist_u, ip_dist_v)
    if rank == 0:
        logger.info('Measuring soma distances...')

    soma_distances = measure_distances(env.comm, env.geometry, soma_coords, ip_dist, resolution=resolution)
                                       
    for population in list(sorted(soma_distances.keys())):

        if rank == 0:
            logger.info(f'Writing distances for population {population}...')

        dist_dict = soma_distances[population]
        attr_dict = {}
        for k, v in viewitems(dist_dict):
            attr_dict[k] = { 'U Distance': np.asarray([v[0]],dtype=np.float32), \
                             'V Distance': np.asarray([v[1]],dtype=np.float32) }
        append_cell_attributes(output_path, population, attr_dict,
                               namespace='Arc Distances', comm=comm,
                               io_size=io_size, chunk_size=chunk_size,
                               value_chunk_size=value_chunk_size, cache_size=cache_size)
        if rank == 0:
            f = h5py.File(output_path, 'a')
            f['Populations'][population]['Arc Distances'].attrs['Reference U Min'] = origin_ranges[0][0]
            f['Populations'][population]['Arc Distances'].attrs['Reference U Max'] = origin_ranges[0][1]
            f['Populations'][population]['Arc Distances'].attrs['Reference V Min'] = origin_ranges[1][0]
            f['Populations'][population]['Arc Distances'].attrs['Reference V Max'] = origin_ranges[1][1]
            f.close()

    comm.Barrier()