コード例 #1
0
def main(connectivity_path, output_path, coords_path, distances_namespace,
         destination, bin_size, cache_size, verbose):
    """
    Measures vertex distribution with respect to septo-temporal distance

    :param connectivity_path:
    :param coords_path:
    :param distances_namespace: 
    :param destination: 
    :param source: 

    """

    utils.config_logging(verbose)
    logger = utils.get_script_logger(os.path.basename(__file__))

    comm = MPI.COMM_WORLD

    rank = comm.Get_rank()

    (population_ranges, _) = read_population_ranges(coords_path)

    destination_start = population_ranges[destination][0]
    destination_count = population_ranges[destination][1]

    if rank == 0:
        logger.info('reading %s distances...' % destination)
    destination_soma_distances = bcast_cell_attributes(
        coords_path,
        destination,
        namespace=distances_namespace,
        comm=comm,
        root=0)

    destination_soma_distance_U = {}
    destination_soma_distance_V = {}
    for k, v in destination_soma_distances:
        destination_soma_distance_U[k] = v['U Distance'][0]
        destination_soma_distance_V[k] = v['V Distance'][0]

    del (destination_soma_distances)

    sources = []
    for (src, dst) in read_projection_names(connectivity_path):
        if dst == destination:
            sources.append(src)

    source_soma_distances = {}
    for s in sources:
        if rank == 0:
            logger.info('reading %s distances...' % s)
        source_soma_distances[s] = bcast_cell_attributes(
            coords_path, s, namespace=distances_namespace, comm=comm, root=0)

    source_soma_distance_U = {}
    source_soma_distance_V = {}
    for s in sources:
        this_source_soma_distance_U = {}
        this_source_soma_distance_V = {}
        for k, v in source_soma_distances[s]:
            this_source_soma_distance_U[k] = v['U Distance'][0]
            this_source_soma_distance_V[k] = v['V Distance'][0]
        source_soma_distance_U[s] = this_source_soma_distance_U
        source_soma_distance_V[s] = this_source_soma_distance_V
    del (source_soma_distances)

    logger.info('reading connections %s -> %s...' %
                (str(sources), destination))
    gg = [
        NeuroH5ProjectionGen(connectivity_path,
                             source,
                             destination,
                             cache_size=cache_size,
                             comm=comm) for source in sources
    ]

    dist_bins = defaultdict(dict)
    dist_u_bins = defaultdict(dict)
    dist_v_bins = defaultdict(dict)

    for prj_gen_tuple in utils.zip_longest(*gg):
        destination_gid = prj_gen_tuple[0][0]
        if not all([
                prj_gen_elt[0] == destination_gid
                for prj_gen_elt in prj_gen_tuple
        ]):
            raise Exception(
                'destination %s: destination_gid %i not matched across multiple projection generators: %s'
                % (destination, destination_gid,
                   [prj_gen_elt[0] for prj_gen_elt in prj_gen_tuple]))

        if destination_gid is not None:
            logger.info('reading connections of gid %i' % destination_gid)
            for (source, (this_destination_gid,
                          rest)) in zip(sources, prj_gen_tuple):
                this_source_soma_distance_U = source_soma_distance_U[source]
                this_source_soma_distance_V = source_soma_distance_V[source]
                this_dist_bins = dist_bins[source]
                this_dist_u_bins = dist_u_bins[source]
                this_dist_v_bins = dist_v_bins[source]
                (source_indexes, attr_dict) = rest
                dst_U = destination_soma_distance_U[destination_gid]
                dst_V = destination_soma_distance_V[destination_gid]
                for source_gid in source_indexes:
                    dist_u = dst_U - this_source_soma_distance_U[source_gid]
                    dist_v = dst_V - this_source_soma_distance_V[source_gid]
                    dist = abs(dist_u) + abs(dist_v)

                    update_bins(this_dist_bins, bin_size, dist)
                    update_bins(this_dist_u_bins, bin_size, dist_u)
                    update_bins(this_dist_v_bins, bin_size, dist_v)
    comm.barrier()

    logger.info('merging distance dictionaries...')
    add_bins_op = MPI.Op.Create(add_bins, commute=True)
    for source in sources:
        dist_bins[source] = comm.reduce(dist_bins[source],
                                        op=add_bins_op,
                                        root=0)
        dist_u_bins[source] = comm.reduce(dist_u_bins[source],
                                          op=add_bins_op,
                                          root=0)
        dist_v_bins[source] = comm.reduce(dist_v_bins[source],
                                          op=add_bins_op,
                                          root=0)

    comm.barrier()

    if rank == 0:
        color = 1
    else:
        color = 0

    ## comm0 includes only rank 0
    comm0 = comm.Split(color, 0)

    if rank == 0:
        if output_path is None:
            output_path = connectivity_path
        logger.info('writing output to %s...' % output_path)

        #f = h5py.File(output_path, 'a', driver='mpio', comm=comm0)
        #if 'Nodes' in f:
        #    nodes_grp = f['Nodes']
        #else:
        #    nodes_grp = f.create_group('Nodes')
        #grp = nodes_grp.create_group('Connectivity Distance Histogram')
        #dst_grp = grp.create_group(destination)
        for source in sources:
            dist_histoCount, dist_bin_edges = finalize_bins(
                dist_bins[source], bin_size)
            dist_u_histoCount, dist_u_bin_edges = finalize_bins(
                dist_u_bins[source], bin_size)
            dist_v_histoCount, dist_v_bin_edges = finalize_bins(
                dist_v_bins[source], bin_size)
            np.savetxt('%s Distance U Bin Count.dat' % source,
                       dist_u_histoCount)
            np.savetxt('%s Distance U Bin Edges.dat' % source,
                       dist_u_bin_edges)
            np.savetxt('%s Distance V Bin Count.dat' % source,
                       dist_v_histoCount)
            np.savetxt('%s Distance V Bin Edges.dat' % source,
                       dist_v_bin_edges)
            np.savetxt('%s Distance Bin Count.dat' % source, dist_histoCount)
            np.savetxt('%s Distance Bin Edges.dat' % source, dist_bin_edges)
        #f.close()
    comm.barrier()
コード例 #2
0
ファイル: env.py プロジェクト: pramodk/dentate
    def __init__(self,
                 comm=None,
                 config_file=None,
                 template_paths="templates",
                 hoc_lib_path=None,
                 configure_nrn=True,
                 dataset_prefix=None,
                 config_prefix=None,
                 results_path=None,
                 results_file_id=None,
                 results_namespace_id=None,
                 node_rank_file=None,
                 io_size=0,
                 recording_profile=None,
                 recording_fraction=1.0,
                 coredat=False,
                 tstop=0.,
                 v_init=-65,
                 stimulus_onset=0.0,
                 n_trials=1,
                 max_walltime_hours=0.5,
                 checkpoint_interval=500.0,
                 checkpoint_clear_data=True,
                 results_write_time=0,
                 dt=0.025,
                 ldbal=False,
                 lptbal=False,
                 transfer_debug=False,
                 cell_selection_path=None,
                 spike_input_path=None,
                 spike_input_namespace=None,
                 spike_input_attr=None,
                 cleanup=True,
                 cache_queries=False,
                 profile_memory=False,
                 verbose=False,
                 **kwargs):
        """
        :param comm: :class:'MPI.COMM_WORLD'
        :param config_file: str; model configuration file name
        :param template_paths: str; colon-separated list of paths to directories containing hoc cell templates
        :param hoc_lib_path: str; path to directory containing required hoc libraries
        :param dataset_prefix: str; path to directory containing required neuroh5 data files
        :param config_prefix: str; path to directory containing network and cell mechanism config files
        :param results_path: str; path to directory to export output files
        :param results_file_id: str; label for neuroh5 files to write spike and voltage trace data
        :param results_namespace_id: str; label for neuroh5 namespaces to write spike and voltage trace data
        :param node_rank_file: str; name of file specifying assignment of node gids to MPI ranks
        :param io_size: int; the number of MPI ranks to be used for I/O operations
        :param recording_profile: str; intracellular recording configuration to use
        :param coredat: bool; Save CoreNEURON data
        :param tstop: int; physical time to simulate (ms)
        :param v_init: float; initialization membrane potential (mV)
        :param stimulus_onset: float; starting time of stimulus (ms)
        :param max_walltime_hours: float; maximum wall time (hours)
        :param results_write_time: float; time to write out results at end of simulation
        :param dt: float; simulation time step
        :param ldbal: bool; estimate load balance based on cell complexity
        :param lptbal: bool; calculate load balance with LPT algorithm
        :param cleanup: bool; clean up auxiliary cell and synapse structures after network init
        :param profile: bool; profile memory usage
        :param cache_queries: bool; whether to use a cache to speed up queries to filter_synapses
        :param verbose: bool; print verbose diagnostic messages while constructing the network
        """
        self.kwargs = kwargs

        self.SWC_Types = {}
        self.SWC_Type_index = {}
        self.Synapse_Types = {}
        self.layers = {}
        self.globals = {}

        self.gidset = set([])
        self.gjlist = []
        self.cells = defaultdict(list)
        self.artificial_cells = defaultdict(dict)
        self.biophys_cells = defaultdict(dict)
        self.spike_onset_delay = {}
        self.recording_sets = {}

        self.pc = None
        if comm is None:
            self.comm = MPI.COMM_WORLD
        else:
            self.comm = comm
        rank = self.comm.Get_rank()

        if configure_nrn:
            from dentate.neuron_utils import h, find_template

        # If true, the biophysical cells and synapses dictionary will be freed
        # as synapses and connections are instantiated.
        self.cleanup = cleanup

        # If true, compute and print memory usage at various points
        # during simulation initialization
        self.profile_memory = profile_memory

        # print verbose diagnostic messages
        self.verbose = verbose
        config_logging(verbose)
        self.logger = get_root_logger()

        # Directories for cell templates
        if template_paths is not None:
            self.template_paths = template_paths.split(':')
        else:
            self.template_paths = []
        self.template_dict = {}

        # The location of required hoc libraries
        self.hoc_lib_path = hoc_lib_path

        # Checkpoint interval in ms of simulation time
        self.checkpoint_interval = max(float(checkpoint_interval), 1.0)
        self.checkpoint_clear_data = checkpoint_clear_data
        self.last_checkpoint = 0.

        # The location of all datasets
        self.dataset_prefix = dataset_prefix

        # The path where results files should be written
        self.results_path = results_path

        # Identifier used to construct results data namespaces
        self.results_namespace_id = results_namespace_id
        # Identifier used to construct results data files
        self.results_file_id = results_file_id

        # Number of MPI ranks to be used for I/O operations
        self.io_size = int(io_size)

        # Initialization voltage
        self.v_init = float(v_init)

        # simulation time [ms]
        self.tstop = float(tstop)

        # stimulus onset time [ms]
        self.stimulus_onset = float(stimulus_onset)

        # number of trials
        self.n_trials = int(n_trials)

        # maximum wall time in hours
        self.max_walltime_hours = float(max_walltime_hours)

        # time to write out results at end of simulation
        self.results_write_time = float(results_write_time)

        # time step
        self.dt = float(dt)

        # used to estimate cell complexity
        self.cxvec = None

        # measure/perform load balancing
        self.optldbal = ldbal
        self.optlptbal = lptbal

        self.transfer_debug = transfer_debug

        # Save CoreNEURON data
        self.coredat = coredat

        # cache queries to filter_synapses
        self.cache_queries = cache_queries

        self.config_prefix = config_prefix
        if config_file is not None:
            if config_prefix is not None:
                config_file_path = self.config_prefix + '/' + config_file
            else:
                config_file_path = config_file
            if not os.path.isfile(config_file_path):
                raise RuntimeError("configuration file %s was not found" %
                                   config_file_path)
            with open(config_file_path) as fp:
                self.model_config = yaml.load(fp, IncludeLoader)
        else:
            raise RuntimeError("missing configuration file")

        if 'Definitions' in self.model_config:
            self.parse_definitions()
            self.SWC_Type_index = dict([(item[1], item[0])
                                        for item in viewitems(self.SWC_Types)])

        if 'Global Parameters' in self.model_config:
            self.parse_globals()

        self.geometry = None
        if 'Geometry' in self.model_config:
            self.geometry = self.model_config['Geometry']

        if 'Origin' in self.geometry['Parametric Surface']:
            self.parse_origin_coords()

        self.celltypes = self.model_config['Cell Types']
        self.cell_attribute_info = {}

        # The name of this model
        if 'Model Name' in self.model_config:
            self.modelName = self.model_config['Model Name']
        # The dataset to use for constructing the network
        if 'Dataset Name' in self.model_config:
            self.datasetName = self.model_config['Dataset Name']

        if rank == 0:
            self.logger.info('env.dataset_prefix = %s' %
                             str(self.dataset_prefix))

        # Cell selection for simulations of subsets of the network
        self.cell_selection = None
        self.cell_selection_path = cell_selection_path
        if rank == 0:
            self.logger.info('env.cell_selection_path = %s' %
                             str(self.cell_selection_path))
        if cell_selection_path is not None:
            with open(cell_selection_path) as fp:
                self.cell_selection = yaml.load(fp, IncludeLoader)

        # Spike input path
        self.spike_input_path = spike_input_path
        self.spike_input_ns = spike_input_namespace
        self.spike_input_attr = spike_input_attr
        self.spike_input_attribute_info = None
        if self.spike_input_path is not None:
            if rank == 0:
                self.logger.info('env.spike_input_path = %s' %
                                 str(self.spike_input_path))
            self.spike_input_attribute_info = \
              read_cell_attribute_info(self.spike_input_path, sorted(self.Populations.keys()), comm=self.comm)
            if rank == 0:
                self.logger.info('env.spike_input_attribute_info = %s' %
                                 str(self.spike_input_attribute_info))
        if results_path:
            if self.results_file_id is None:
                self.results_file_path = "%s/%s_results.h5" % (
                    self.results_path, self.modelName)
            else:
                self.results_file_path = "%s/%s_results_%s.h5" % (
                    self.results_path, self.modelName, self.results_file_id)
        else:
            if self.results_file_id is None:
                self.results_file_path = "%s_results.h5" % (self.modelName)
            else:
                self.results_file_path = "%s_results_%s.h5" % (
                    self.modelName, self.results_file_id)

        if 'Connection Generator' in self.model_config:
            self.parse_connection_config()
            self.parse_gapjunction_config()

        if self.dataset_prefix is not None:
            self.dataset_path = os.path.join(self.dataset_prefix,
                                             self.datasetName)
            if 'Cell Data' in self.model_config:
                self.data_file_path = os.path.join(
                    self.dataset_path, self.model_config['Cell Data'])
                self.forest_file_path = os.path.join(
                    self.dataset_path, self.model_config['Cell Data'])
                self.load_celltypes()
            else:
                self.data_file_path = None
                self.forest_file_path = None
            if rank == 0:
                self.logger.info('env.data_file_path = %s' %
                                 self.data_file_path)
            if 'Connection Data' in self.model_config:
                self.connectivity_file_path = os.path.join(
                    self.dataset_path, self.model_config['Connection Data'])
            else:
                self.connectivity_file_path = None
            if 'Gap Junction Data' in self.model_config:
                self.gapjunctions_file_path = os.path.join(
                    self.dataset_path, self.model_config['Gap Junction Data'])
            else:
                self.gapjunctions_file_path = None
        else:
            self.dataset_path = None
            self.data_file_path = None
            self.connectivity_file_path = None
            self.forest_file_path = None
            self.gapjunctions_file_path = None

        self.node_ranks = None
        if node_rank_file:
            self.load_node_ranks(node_rank_file)

        self.netclamp_config = None
        if 'Network Clamp' in self.model_config:
            self.parse_netclamp_config()

        self.stimulus_config = None
        self.arena_id = None
        self.trajectory_id = None
        if 'Stimulus' in self.model_config:
            self.parse_stimulus_config()
            self.init_stimulus_config(**kwargs)

        self.analysis_config = None
        if 'Analysis' in self.model_config:
            self.analysis_config = self.model_config['Analysis']

        self.projection_dict = defaultdict(list)
        if self.dataset_prefix is not None:
            if rank == 0:
                self.logger.info('env.connectivity_file_path = %s' %
                                 str(self.connectivity_file_path))
            if self.connectivity_file_path is not None:
                for (src,
                     dst) in read_projection_names(self.connectivity_file_path,
                                                   comm=self.comm):
                    self.projection_dict[dst].append(src)
                if rank == 0:
                    self.logger.info('projection_dict = %s' %
                                     str(self.projection_dict))

        # Configuration profile for recording intracellular quantities
        assert ((recording_fraction >= 0.0) and (recording_fraction <= 1.0))
        self.recording_fraction = recording_fraction
        self.recording_profile = None
        if ('Recording' in self.model_config) and (recording_profile
                                                   is not None):
            self.recording_profile = self.model_config['Recording'][
                'Intracellular'][recording_profile]
            self.recording_profile['label'] = recording_profile
            for recvar, recdict in viewitems(
                    self.recording_profile.get('synaptic quantity', {})):
                filters = {}
                if 'syn types' in recdict:
                    filters['syn_types'] = recdict['syn types']
                if 'swc types' in recdict:
                    filters['swc_types'] = recdict['swc types']
                if 'layers' in recdict:
                    filters['layers'] = recdict['layers']
                if 'sources' in recdict:
                    filters['sources'] = recdict['sources']
                syn_filters = get_syn_filter_dict(self, filters, convert=True)
                recdict['syn_filters'] = syn_filters

        # Configuration profile for recording local field potentials
        self.LFP_config = {}
        if 'Recording' in self.model_config:
            for label, config in viewitems(
                    self.model_config['Recording']['LFP']):
                self.LFP_config[label] = {
                    'position': tuple(config['position']),
                    'maxEDist': config['maxEDist'],
                    'fraction': config['fraction'],
                    'rho': config['rho'],
                    'dt': config['dt']
                }

        self.t_vec = None
        self.id_vec = None
        self.t_rec = None
        self.recs_dict = {}  # Intracellular samples on this host
        for pop_name, _ in viewitems(self.Populations):
            self.recs_dict[pop_name] = defaultdict(list)

        # used to calculate model construction times and run time
        self.mkcellstime = 0
        self.mkstimtime = 0
        self.connectcellstime = 0
        self.connectgjstime = 0

        self.simtime = None
        self.lfp = {}

        self.edge_count = defaultdict(dict)
        self.syns_set = defaultdict(set)
コード例 #3
0
def connectcells(env, gid_list):
    datasetPath = os.path.join(env.datasetPrefix, env.datasetName)
    connectivityFilePath = os.path.join(datasetPath, env.modelConfig['Connection Data'])
    forestFilePath = os.path.join(datasetPath, env.modelConfig['Cell Data'])

    if env.verbose:
        if env.pc.id() == 0:
            print '*** Connectivity file path is %s' % connectivityFilePath

    prj_dict = defaultdict(list)
    for (src, dst) in read_projection_names(env.comm, connectivityFilePath):
        prj_dict[dst].append(src)

    if env.verbose:
        if env.pc.id() == 0:
            print '*** Reading projections: ', prj_dict.items()

    for (postsyn_name, presyn_names) in prj_dict.iteritems():

        synapse_config = env.celltypes[postsyn_name]['synapses']
        if synapse_config.has_key('spines'):
            spines = synapse_config['spines']
        else:
            spines = False

        if synapse_config.has_key('unique'):
            unique = synapse_config['unique']
        else:
            unique = False

        if synapse_config.has_key('weights'):
            has_weights = synapse_config['weights']
        else:
            has_weights = False

        if synapse_config.has_key('weights namespace'):
            weights_namespace = synapse_config['weights namespace']
        else:
            weights_namespace = 'Weights'

        if env.verbose:
            if int(env.pc.id()) == 0:
                print '*** Reading synapse attributes of population %s' % (postsyn_name)

        gid_index_synapses_map = get_cell_attributes_index_map(env.comm, forestFilePath, 'GC', 'Synapse Attributes')
        if synapse_config.has_key('weights namespace'):
            gid_index_weights_map = get_cell_attributes_index_map(env.comm, forestFilePath, 'GC', weights_namespace)
        cell_synapses_dict, cell_weights_dict = {}, {}
        for gid in gid_list:
            cell_attributes_dict = select_cell_attributes(gid, env.comm, forestFilePath, gid_index_synapses_map,
                                                              'GC', 'Synapse Attributes')
            cell_synapses_dict[gid] = {k: v for (k, v) in cell_attributes_dict['Synapse Attributes']}
            if has_weights:
                cell_attributes_dict.update(get_cell_attributes_by_gid(gid, env.comm, forestFilePath,
                                                                       gid_index_synapses_map, 'GC', weights_namespace))
                cell_weights_dict[gid] = {k: v for (k, v) in cell_attributes_dict[weights_namespace]}
                if env.verbose:
                    if env.pc.id() == 0:
                        print '*** Found synaptic weights for population %s' % (postsyn_name)
            else:
                has_weights = False
                cell_weights_dict[gid] = None
            del cell_attributes_dict

        for presyn_name in presyn_names:

            edge_count = 0

            if env.verbose:
                if env.pc.id() == 0:
                    print '*** Connecting %s -> %s' % (presyn_name, postsyn_name)

            if env.nodeRanks is None:
                (graph, a) = scatter_read_graph(env.comm, connectivityFilePath, io_size=env.IOsize,
                                                projections=[(presyn_name, postsyn_name)],
                                                namespaces=['Synapses', 'Connections'])
            else:
                (graph, a) = scatter_read_graph(env.comm, connectivityFilePath, io_size=env.IOsize,
                                                node_rank_map=env.nodeRanks,
                                                projections=[(presyn_name, postsyn_name)],
                                                namespaces=['Synapses', 'Connections'])

            edge_iter = graph[postsyn_name][presyn_name]

            connection_dict = env.connection_generator[postsyn_name][presyn_name].connection_properties
            kinetics_dict = env.connection_generator[postsyn_name][presyn_name].synapse_kinetics

            syn_id_attr_index = a[postsyn_name][presyn_name]['Synapses']['syn_id']
            distance_attr_index = a[postsyn_name][presyn_name]['Connections']['distance']

            for (postsyn_gid, edges) in edge_iter:

                postsyn_cell = env.pc.gid2cell(postsyn_gid)
                cell_syn_dict = cell_synapses_dict[postsyn_gid]

                if has_weights:
                    cell_wgt_dict = cell_weights_dict[postsyn_gid]
                    syn_wgt_dict = {int(syn_id): float(weight) for (syn_id, weight) in
                                    itertools.izip(np.nditer(cell_wgt_dict['syn_id']),
                                                   np.nditer(cell_wgt_dict['weight']))}
                else:
                    syn_wgt_dict = None

                presyn_gids = edges[0]
                edge_syn_ids = edges[1]['Synapses'][syn_id_attr_index]
                edge_dists = edges[1]['Connections'][distance_attr_index]

                cell_syn_types = cell_syn_dict['syn_types']
                cell_swc_types = cell_syn_dict['swc_types']
                cell_syn_locs = cell_syn_dict['syn_locs']
                cell_syn_sections = cell_syn_dict['syn_secs']

                edge_syn_ps_dict = synapses.mksyns(postsyn_gid,
                                                   postsyn_cell,
                                                   edge_syn_ids,
                                                   cell_syn_types,
                                                   cell_swc_types,
                                                   cell_syn_locs,
                                                   cell_syn_sections,
                                                   kinetics_dict, env,
                                                   add_synapse=synapses.add_unique_synapse if unique else synapses.add_shared_synapse,
                                                   spines=spines)

                if env.verbose:
                    if int(env.pc.id()) == 0:
                        if edge_count == 0:
                            for sec in list(postsyn_cell.all):
                                h.psection(sec=sec)

                wgt_count = 0
                for (presyn_gid, edge_syn_id, distance) in itertools.izip(presyn_gids, edge_syn_ids, edge_dists):
                    syn_ps_dict = edge_syn_ps_dict[edge_syn_id]
                    for (syn_mech, syn_ps) in syn_ps_dict.iteritems():
                        connection_syn_mech_config = connection_dict[syn_mech]
                        if has_weights and syn_wgt_dict.has_key(edge_syn_id):
                            wgt_count += 1
                            weight = float(syn_wgt_dict[edge_syn_id]) * connection_syn_mech_config['weight']
                        else:
                            weight = connection_syn_mech_config['weight']
                        delay = distance / connection_syn_mech_config['velocity']
                        if type(weight) is float:
                            h.nc_appendsyn(env.pc, h.nclist, presyn_gid, postsyn_gid, syn_ps, weight, delay)
                        else:
                            h.nc_appendsyn_wgtvector(env.pc, h.nclist, presyn_gid, postsyn_gid, syn_ps, weight, delay)
                if env.verbose:
                    if int(env.pc.id()) == 0:
                        if edge_count == 0:
                            print '*** Found %i synaptic weights for gid %i' % (wgt_count, postsyn_gid)

                edge_count += len(presyn_gids)
コード例 #4
0
def vertex_distribution(connectivity_path,
                        coords_path,
                        distances_namespace,
                        destination,
                        sources,
                        bin_size=20.0,
                        cache_size=100,
                        comm=None):
    """
    Obtain spatial histograms of source vertices connecting to a given destination population.

    :param connectivity_path:
    :param coords_path:
    :param distances_namespace: 
    :param destination: 
    :param source: 

    """

    if comm is None:
        comm = MPI.COMM_WORLD

    rank = comm.Get_rank()

    color = 0
    if rank == 0:
        color = 1
    comm0 = comm.Split(color, 0)

    (population_ranges, _) = read_population_ranges(coords_path)

    destination_start = population_ranges[destination][0]
    destination_count = population_ranges[destination][1]

    destination_soma_distances = {}
    if rank == 0:
        logger.info(f'Reading {destination} distances...')
        distances_iter = read_cell_attributes(
            coords_path,
            destination,
            comm=comm0,
            mask=set(['U Distance', 'V Distance']),
            namespace=distances_namespace)

        destination_soma_distances = {
            k: (float(v['U Distance'][0]), float(v['V Distance'][0]))
            for (k, v) in distances_iter
        }

        gc.collect()

    comm.barrier()

    destination_soma_distances = comm.bcast(destination_soma_distances, root=0)
    destination_soma_distance_U = {}
    destination_soma_distance_V = {}
    for k, v in viewitems(destination_soma_distances):
        destination_soma_distance_U[k] = v[0]
        destination_soma_distance_V[k] = v[1]

    del (destination_soma_distances)

    if sources == ():
        sources = []
        for (src, dst) in read_projection_names(connectivity_path):
            if dst == destination:
                sources.append(src)

    source_soma_distances = {}
    if rank == 0:
        for s in sources:
            logger.info(f'Reading {s} distances...')
            distances_iter = read_cell_attributes(
                coords_path,
                s,
                comm=comm0,
                mask=set(['U Distance', 'V Distance']),
                namespace=distances_namespace)

            source_soma_distances[s] = {
                k: (float(v['U Distance'][0]), float(v['V Distance'][0]))
                for (k, v) in distances_iter
            }

            gc.collect()

    comm.barrier()
    comm0.Free()

    source_soma_distances = comm.bcast(source_soma_distances, root=0)

    source_soma_distance_U = {}
    source_soma_distance_V = {}
    for s in sources:
        this_source_soma_distance_U = {}
        this_source_soma_distance_V = {}
        for k, v in viewitems(source_soma_distances[s]):
            this_source_soma_distance_U[k] = v[0]
            this_source_soma_distance_V[k] = v[1]
        source_soma_distance_U[s] = this_source_soma_distance_U
        source_soma_distance_V[s] = this_source_soma_distance_V
    del (source_soma_distances)

    if rank == 0:
        logger.info('Reading connections %s -> %s...' %
                    (str(sources), destination))

    dist_bins = defaultdict(dict)
    dist_u_bins = defaultdict(dict)
    dist_v_bins = defaultdict(dict)

    gg = [
        NeuroH5ProjectionGen(connectivity_path,
                             source,
                             destination,
                             cache_size=cache_size,
                             comm=comm) for source in sources
    ]

    for prj_gen_tuple in zip_longest(*gg):
        destination_gid = prj_gen_tuple[0][0]
        if rank == 0 and destination_gid is not None:
            logger.info('%d' % destination_gid)
        if not all([
                prj_gen_elt[0] == destination_gid
                for prj_gen_elt in prj_gen_tuple
        ]):
            raise RuntimeError(
                'destination %s: destination gid %i not matched across multiple projection generators: '
                '%s' % (destination, destination_gid,
                        [prj_gen_elt[0] for prj_gen_elt in prj_gen_tuple]))

        if destination_gid is not None:
            for (source, (this_destination_gid,
                          rest)) in zip_longest(sources, prj_gen_tuple):
                this_source_soma_distance_U = source_soma_distance_U[source]
                this_source_soma_distance_V = source_soma_distance_V[source]
                this_dist_bins = dist_bins[source]
                this_dist_u_bins = dist_u_bins[source]
                this_dist_v_bins = dist_v_bins[source]
                (source_indexes, attr_dict) = rest
                dst_U = destination_soma_distance_U[destination_gid]
                dst_V = destination_soma_distance_V[destination_gid]
                for source_gid in source_indexes:
                    dist_u = dst_U - this_source_soma_distance_U[source_gid]
                    dist_v = dst_V - this_source_soma_distance_V[source_gid]
                    dist = abs(dist_u) + abs(dist_v)

                    update_bins(this_dist_bins, bin_size, dist)
                    update_bins(this_dist_u_bins, bin_size, dist_u)
                    update_bins(this_dist_v_bins, bin_size, dist_v)

    add_bins_op = MPI.Op.Create(add_bins, commute=True)
    for source in sources:
        dist_bins[source] = comm.reduce(dist_bins[source], op=add_bins_op)
        dist_u_bins[source] = comm.reduce(dist_u_bins[source], op=add_bins_op)
        dist_v_bins[source] = comm.reduce(dist_v_bins[source], op=add_bins_op)

    dist_hist_dict = defaultdict(dict)
    dist_u_hist_dict = defaultdict(dict)
    dist_v_hist_dict = defaultdict(dict)

    if rank == 0:
        for source in sources:
            dist_hist_dict[destination][source] = finalize_bins(
                dist_bins[source], bin_size)
            dist_u_hist_dict[destination][source] = finalize_bins(
                dist_u_bins[source], bin_size)
            dist_v_hist_dict[destination][source] = finalize_bins(
                dist_v_bins[source], bin_size)

    return {
        'Total distance': dist_hist_dict,
        'U distance': dist_u_hist_dict,
        'V distance': dist_v_hist_dict
    }
コード例 #5
0
def spatial_bin_graph(connectivity_path,
                      coords_path,
                      distances_namespace,
                      destination,
                      sources,
                      extents,
                      bin_size=20.0,
                      cache_size=100,
                      comm=None):
    """
    Obtain reduced graphs of the specified projections by binning nodes according to their spatial position.

    :param connectivity_path:
    :param coords_path:
    :param distances_namespace: 
    :param destination: 
    :param source: 

    """

    import networkx as nx

    if comm is None:
        comm = MPI.COMM_WORLD

    rank = comm.Get_rank()

    (population_ranges, _) = read_population_ranges(coords_path)

    destination_start = population_ranges[destination][0]
    destination_count = population_ranges[destination][1]

    if rank == 0:
        logger.info('reading %s distances...' % destination)

    destination_soma_distances = bcast_cell_attributes(
        coords_path,
        destination,
        namespace=distances_namespace,
        comm=comm,
        root=0)

    ((x_min, x_max), (y_min, y_max)) = extents
    u_bins = np.arange(x_min, x_max, bin_size)
    v_bins = np.arange(y_min, y_max, bin_size)

    dest_u_bins = {}
    dest_v_bins = {}
    destination_soma_distance_U = {}
    destination_soma_distance_V = {}
    for k, v in destination_soma_distances:
        dist_u = v['U Distance'][0]
        dist_v = v['V Distance'][0]
        dest_u_bins[k] = np.searchsorted(u_bins, dist_u, side='left')
        dest_v_bins[k] = np.searchsorted(v_bins, dist_v, side='left')
        destination_soma_distance_U[k] = dist_u
        destination_soma_distance_V[k] = dist_v

    del (destination_soma_distances)

    if (sources == ()) or (sources == []) or (sources is None):
        sources = []
        for (src, dst) in read_projection_names(connectivity_path):
            if dst == destination:
                sources.append(src)

    source_soma_distances = {}
    for s in sources:
        if rank == 0:
            logger.info('reading %s distances...' % s)
        source_soma_distances[s] = bcast_cell_attributes(
            coords_path, s, namespace=distances_namespace, comm=comm, root=0)

    source_u_bins = {}
    source_v_bins = {}
    source_soma_distance_U = {}
    source_soma_distance_V = {}
    for s in sources:
        this_source_soma_distance_U = {}
        this_source_soma_distance_V = {}
        this_source_u_bins = {}
        this_source_v_bins = {}
        for k, v in source_soma_distances[s]:
            dist_u = v['U Distance'][0]
            dist_v = v['V Distance'][0]
            this_source_u_bins[k] = np.searchsorted(u_bins,
                                                    dist_u,
                                                    side='left')
            this_source_v_bins[k] = np.searchsorted(v_bins,
                                                    dist_v,
                                                    side='left')
            this_source_soma_distance_U[k] = dist_u
            this_source_soma_distance_V[k] = dist_v
        source_soma_distance_U[s] = this_source_soma_distance_U
        source_soma_distance_V[s] = this_source_soma_distance_V
        source_u_bins[s] = this_source_u_bins
        source_v_bins[s] = this_source_v_bins
    del (source_soma_distances)

    if rank == 0:
        logger.info('reading connections %s -> %s...' %
                    (str(sources), destination))
    gg = [
        NeuroH5ProjectionGen(connectivity_path,
                             source,
                             destination,
                             cache_size=cache_size,
                             comm=comm) for source in sources
    ]

    dist_bins = defaultdict(dict)
    dist_u_bins = defaultdict(dict)
    dist_v_bins = defaultdict(dict)

    local_u_bin_graph = defaultdict(dict)
    local_v_bin_graph = defaultdict(dict)

    for prj_gen_tuple in zip_longest(*gg):
        destination_gid = prj_gen_tuple[0][0]
        if not all([
                prj_gen_elt[0] == destination_gid
                for prj_gen_elt in prj_gen_tuple
        ]):
            raise RuntimeError(
                'destination %s: destination_gid %i not matched across multiple projection generators: '
                '%s' % (destination, destination_gid,
                        [prj_gen_elt[0] for prj_gen_elt in prj_gen_tuple]))

        if destination_gid is not None:
            dest_u_bin = dest_u_bins[destination_gid]
            dest_v_bin = dest_v_bins[destination_gid]
            for (source, (this_destination_gid,
                          rest)) in zip_longest(sources, prj_gen_tuple):
                this_source_u_bins = source_u_bins[source]
                this_source_v_bins = source_v_bins[source]
                (source_indexes, attr_dict) = rest
                source_u_bin_dict = defaultdict(int)
                source_v_bin_dict = defaultdict(int)
                for source_gid in source_indexes:
                    source_u_bin = this_source_u_bins[source_gid]
                    source_v_bin = this_source_v_bins[source_gid]
                    source_u_bin_dict[source_u_bin] += 1
                    source_v_bin_dict[source_v_bin] += 1
                local_u_bin_graph[dest_u_bin][source] = source_u_bin_dict
                local_v_bin_graph[dest_v_bin][source] = source_v_bin_dict

    local_u_bin_graphs = comm.gather(dict(local_u_bin_graph), root=0)
    local_v_bin_graphs = comm.gather(dict(local_v_bin_graph), root=0)

    u_bin_graph = None
    v_bin_graph = None
    nu = None
    nv = None

    if rank == 0:

        u_bin_edges = {destination: dict(ChainMap(*local_u_bin_graphs))}
        v_bin_edges = {destination: dict(ChainMap(*local_v_bin_graphs))}

        nu = len(u_bins)
        u_bin_graph = nx.Graph()
        for pop in [destination] + list(sources):
            for i in range(nu):
                u_bin_graph.add_node((pop, i))

        for i, ss in viewitems(u_bin_edges[destination]):
            for source, ids in viewitems(ss):
                u_bin_graph.add_weighted_edges_from([
                    ((source, j), (destination, i), count)
                    for j, count in viewitems(ids)
                ])

        nv = len(v_bins)
        v_bin_graph = nx.Graph()
        for pop in [destination] + list(sources):
            for i in range(nv):
                v_bin_graph.add_node((pop, i))

        for i, ss in viewitems(v_bin_edges[destination]):
            for source, ids in viewitems(ss):
                v_bin_graph.add_weighted_edges_from([
                    ((source, j), (destination, i), count)
                    for j, count in viewitems(ids)
                ])

    label = '%s to %s' % (str(sources), destination)

    return {
        'label': label,
        'bin size': bin_size,
        'destination': destination,
        'sources': sources,
        'U graph': u_bin_graph,
        'V graph': v_bin_graph
    }
コード例 #6
0
def vertex_metrics(connectivity_path,
                   coords_path,
                   vertex_metrics_namespace,
                   distances_namespace,
                   destination,
                   sources,
                   bin_size=50.,
                   metric='Indegree'):
    """
    Obtain vertex metrics with respect to septo-temporal position (longitudinal and transverse arc distances to reference points).

    :param connectivity_path:
    :param coords_path:
    :param distances_namespace: 
    :param destination: 
    :param source: 
    :param bin_size: 
    :param metric: 

    """

    (population_ranges, _) = read_population_ranges(coords_path)

    destination_start = population_ranges[destination][0]
    destination_count = population_ranges[destination][1]

    if sources == ():
        sources = []
        for (src, dst) in read_projection_names(connectivity_path):
            if dst == destination:
                sources.append(src)

    degrees_dict = {}
    with h5py.File(connectivity_path, 'r') as f:
        for source in sources:
            degrees_dict[source] = f['Nodes'][vertex_metrics_namespace][
                '%s %s -> %s' %
                (metric, source,
                 destination)]['Attribute Value'][0:destination_count]

    for source in sources:
        logger.info('projection: %s -> %s: max: %i min: %i mean: %i stdev: %i (%d units)' % \
                        (source, destination, \
                         np.max(degrees_dict[source]), \
                         np.min(degrees_dict[source]), \
                         np.mean(degrees_dict[source]), \
                         np.std(degrees_dict[source]), \
                         len(degrees_dict[source])))

    if metric == 'Indegree':
        distances = read_cell_attributes(coords_path,
                                         destination,
                                         namespace=distances_namespace)
        soma_distances = {
            k: (v['U Distance'][0], v['V Distance'][0])
            for (k, v) in distances
        }
        del distances
    elif metric == 'Outdegree':
        distances = read_cell_attributes(coords_path,
                                         sources[0],
                                         namespace=distances_namespace)
        soma_distances = {
            k: (v['U Distance'][0], v['V Distance'][0])
            for (k, v) in distances
        }
        del distances

    gids = sorted(soma_distances.keys())
    distance_U = np.asarray([soma_distances[gid][0] for gid in gids])
    distance_V = np.asarray([soma_distances[gid][1] for gid in gids])

    return (distance_U, distance_V, degrees_dict)