示例#1
0
def make_section_graph(neurotree_dict):
    """
    Creates a graph of sections that follows the topological organization of the given neuron.
    :param neurotree_dict:
    :return: NetworkX.DiGraph
    """
    import networkx as nx

    if 'section_topology' in neurotree_dict:
        sec_src = neurotree_dict['section_topology']['src']
        sec_dst = neurotree_dict['section_topology']['dst']
        sec_loc = neurotree_dict['section_topology']['loc']
    else:
        sec_src = neurotree_dict['src']
        sec_dst = neurotree_dict['dst']
        sec_loc = []
        sec_nodes = {}
        pt_sections = neurotree_dict['sections']
        pt_parents = neurotree_dict['parent']
        sec_nodes = make_section_node_dict(neurotree_dict)
        for src, dst in zip_longest(sec_src, sec_dst):
            src_pts = sec_nodes[src]
            dst_pts = sec_nodes[dst]
            dst_parent = pt_parents[dst_pts[0]]
            loc = np.argwhere(src_pts == dst_parent)[0]
            sec_loc.append(loc)

    sec_graph = nx.DiGraph()
    for i, j, loc in zip(sec_src, sec_dst, sec_loc):
        sec_graph.add_edge(i, j, loc=loc)

    return sec_graph
示例#2
0
def make_spike_dict(spkinds, spkts):
    """
    Given arrays with cell indices and spike times, returns a dictionary with per-cell spike times.
    """
    spk_dict = defaultdict(list)
    for spkind, spkt in zip(np.nditer(spkinds), np.nditer(spkts)):
        spk_dict[int(spkind)].append(float(spkt))
    return spk_dict
示例#3
0
def import_morphology_from_hoc(cell, hoc_cell, section_content=None):
    """
    Append sections from an existing instance of a NEURON cell template to a Python cell wrapper.
    :param cell: :class:'BiophysCell'
    :param hoc_cell: :class:'h.hocObject': instance of a NEURON cell template
    
    """
    sec_info_dict = {}
    root_sec = None
    for sec_type, sec_index_list in viewitems(default_hoc_sec_lists):
        hoc_sec_attr_name = sec_type
        if not hasattr(hoc_cell, hoc_sec_attr_name):
            hoc_sec_attr_name = f'{sec_type}_list'
        if hasattr(hoc_cell, hoc_sec_attr_name) and (getattr(
                hoc_cell, hoc_sec_attr_name) is not None):
            sec_list = list(getattr(hoc_cell, hoc_sec_attr_name))
            if hasattr(hoc_cell, sec_index_list):
                sec_indexes = list(getattr(hoc_cell, sec_index_list))
            else:
                raise AttributeError(
                    'import_morphology_from_hoc: %s is not an attribute of the hoc cell'
                    % sec_index_list)
            if sec_type == 'soma':
                root_sec = sec_list[0]
            for sec, index in zip(sec_list, sec_indexes):
                if section_content is not None:
                    sec_info_dict[sec] = {
                        'section_type': sec_type,
                        'section_index': int(index),
                        'section_content': section_content[index]
                    }
                else:
                    sec_info_dict[sec] = {
                        'section_type': sec_type,
                        'section_index': int(index)
                    }
    if root_sec:
        insert_section_tree(cell, [root_sec], sec_info_dict)
    else:
        raise RuntimeError(
            f'import_morphology_from_hoc: unable to locate root section')
示例#4
0
def generate_synaptic_connections(rank,
                                  gid,
                                  ranstream_syn,
                                  ranstream_con,
                                  cluster_seed,
                                  destination_gid,
                                  synapse_dict,
                                  population_dict,
                                  projection_synapse_dict,
                                  projection_prob_dict,
                                  connection_dict,
                                  random_choice=random_choice_w_replacement,
                                  debug_flag=False):
    """
    Given a set of synapses for a particular gid, projection
    configuration, projection and connection probability dictionaries,
    generates a set of possible connections for each synapse. The
    procedure first assigns each synapse to a projection, using the
    given proportions of each synapse type, and then chooses source
    gids for each synapse using the given projection probability
    dictionary.

    :param ranstream_syn: random stream for the synapse partitioning step
    :param ranstream_con: random stream for the choosing source gids step
    :param destination_gid: destination gid
    :param synapse_dict: synapse configurations, a dictionary with fields: 1) syn_ids (synapse ids) 2) syn_types (excitatory, inhibitory, etc).,
                        3) swc_types (SWC types(s) of synapse location in the neuronal morphological structure 3) syn_layers (synapse layer placement)
    :param population_dict: mapping of population names to population indices
    :param projection_synapse_dict: mapping of projection names to a tuple of the form: <syn_layer, swc_type, syn_type, syn_proportion>
    :param projection_prob_dict: mapping of presynaptic population names to sets of source probabilities and source gids
    :param connection_dict: output connection dictionary
    :param random_choice: random choice procedure (default uses np.ranstream.multinomial)

    """
    num_projections = len(projection_synapse_dict)
    source_populations = sorted(projection_synapse_dict)
    prj_pop_index = {population: i for (i, population) in enumerate(source_populations)}
    synapse_prj_counts = np.zeros((num_projections,))
    synapse_prj_partition = defaultdict(lambda: defaultdict(list))
    maxit = 10
    it = 0
    syn_cdist_dict = {}
    ## assign each synapse to a projection
    while (np.count_nonzero(synapse_prj_counts) < num_projections) and (it < maxit):
        log_flag = it > 1
        if log_flag or debug_flag:
            logger.info(f"generate_synaptic_connections: gid {gid}: iteration {it}: "
                        f"source_populations = {source_populations} "
                        f"synapse_prj_counts = {synapse_prj_counts}")
        if debug_flag:
            logger.info(f'synapse_dict = {synapse_dict}')
        synapse_prj_counts.fill(0)
        synapse_prj_partition.clear()
        for (syn_id, syn_cdist, syn_type, swc_type, syn_layer) in zip(synapse_dict['syn_ids'],
                                                                      synapse_dict['syn_cdists'],
                                                                      synapse_dict['syn_types'],
                                                                      synapse_dict['swc_types'],
                                                                      synapse_dict['syn_layers']):
            syn_cdist_dict[syn_id] = syn_cdist
            projection = choose_synapse_projection(ranstream_syn, syn_layer, swc_type, syn_type, \
                                                   population_dict, projection_synapse_dict, log=log_flag)
            if log_flag or debug_flag:
                logger.info(f'generate_synaptic_connections: gid {gid}: '
                            f'syn_id = {syn_id} syn_type = {syn_type} swc_type = {swc_type} '
                            f'syn_layer = {syn_layer} source = {projection}')
            log_flag = False
            assert (projection is not None)
            synapse_prj_counts[prj_pop_index[projection]] += 1
            synapse_prj_partition[projection][syn_layer].append(syn_id)
        it += 1

    empty_projections = []

    for projection in projection_synapse_dict:
        logger.debug(f'Rank {rank}: gid {destination_gid}: source {projection} has {len(synapse_prj_partition[projection])} synapses')
        if not (len(synapse_prj_partition[projection]) > 0):
            empty_projections.append(projection)

    if len(empty_projections) > 0:
        logger.warning(f"Rank {rank}: gid {destination_gid}: projections {empty_projections} have an empty synapse list; "
                       f"swc types are {set(synapse_dict['swc_types'].flat)} layers are {set(synapse_dict['syn_layers'].flat)}")
    assert (len(empty_projections) == 0)

    ## Choose source connections based on distance-weighted probability
    count = 0
    for projection, prj_layer_dict in viewitems(synapse_prj_partition):
        (syn_config_type, syn_config_layers, syn_config_sections, syn_config_proportions, syn_config_contacts) = \
            projection_synapse_dict[projection]
        gid_dict = connection_dict[projection]
        prj_source_vertices = []
        prj_syn_ids = []
        prj_distances = []
        for prj_layer, syn_ids in viewitems(prj_layer_dict):
            source_probs, source_gids, distances_u, distances_v = \
                projection_prob_dict[projection][prj_layer]
            distance_dict = {source_gid: distance_u + distance_v \
                             for (source_gid, distance_u, distance_v) in \
                             zip(source_gids, distances_u, distances_v)}
            if len(source_gids) > 0:
                ordered_syn_ids = sorted(syn_ids, key=lambda x: syn_cdist_dict[x])
                n_syn_groups = int(math.ceil(float(len(syn_ids)) / float(syn_config_contacts)))
                source_gid_counts = random_choice(ranstream_con, n_syn_groups, source_probs)
                total_count = 0
                if syn_config_contacts > 1:
                    ncontacts = int(math.ceil(syn_config_contacts))
                    for i in range(0, len(source_gid_counts)):
                        if source_gid_counts[i] > 0:
                            source_gid_counts[i] *= ncontacts
                if len(source_gid_counts) == 0:
                    logger.warning(f'Rank {rank}: source vertices list is empty for gid: {destination_gid} ' 
                                   f'source: {projection} layer: {layer} '
                                   f'source probs: {source_probs} distances_u: {distances_u} distances_v: {distances_v}')

                source_vertices = np.asarray(random_clustered_shuffle(len(source_gids), \
                                                                      source_gid_counts, \
                                                                      center_ids=source_gids, \
                                                                      cluster_std=2.0, \
                                                                      random_seed=cluster_seed), \
                                             dtype=np.uint32)[0:len(syn_ids)]
                assert (len(source_vertices) == len(syn_ids))
                distances = np.asarray([distance_dict[gid] for gid in source_vertices], \
                                       dtype=np.float32).reshape(-1, )
                prj_source_vertices.append(source_vertices)
                prj_syn_ids.append(ordered_syn_ids)
                prj_distances.append(distances)
                gid_dict[destination_gid] = (np.asarray([], dtype=np.uint32),
                                             {'Synapses': {'syn_id': np.asarray([], dtype=np.uint32)},
                                              'Connections': {'distance': np.asarray([], dtype=np.float32)}
                                              })
                cluster_seed += 1
        if len(prj_source_vertices) > 0:
            prj_source_vertices_array = np.concatenate(prj_source_vertices)
        else:
            prj_source_vertices_array = np.asarray([], dtype=np.uint32)
        del (prj_source_vertices)
        if len(prj_syn_ids) > 0:
            prj_syn_ids_array = np.concatenate(prj_syn_ids)
        else:
            prj_syn_ids_array = np.asarray([], dtype=np.uint32)
        del (prj_syn_ids)
        if len(prj_distances) > 0:
            prj_distances_array = np.concatenate(prj_distances)
        else:
            prj_distances_array = np.asarray([], dtype=np.float32)
        del (prj_distances)
        if len(prj_source_vertices_array) == 0:
            logger.warning(f'Rank {rank}: source gid list is empty for gid: {destination_gid} source: {projection}')
        count += len(prj_source_vertices_array)
        gid_dict[destination_gid] = (prj_source_vertices_array,
                                     {'Synapses': {'syn_id': np.asarray(prj_syn_ids_array, \
                                                                        dtype=np.uint32)},
                                      'Connections': {'distance': prj_distances_array}
                                      })

    return count
示例#5
0
def make_morph_graph(biophys_cell, node_filters={}):
    """
    Creates a graph of 3d points that follows the morphological organization of the given neuron.
    :param neurotree_dict:
    :return: NetworkX.DiGraph
    """
    import networkx as nx

    nodes = filter_nodes(biophys_cell, **node_filters)
    tree = biophys_cell.tree

    sec_layers = {}
    src_sec = []
    dst_sec = []
    connection_locs = []
    pt_xs = []
    pt_ys = []
    pt_zs = []
    pt_locs = []
    pt_idxs = []
    pt_layers = []
    pt_idx = 0
    sec_pts = collections.defaultdict(list)

    for node in nodes:
        sec = node.sec
        nn = sec.n3d()
        L = sec.L
        for i in range(nn):
            pt_xs.append(sec.x3d(i))
            pt_ys.append(sec.y3d(i))
            pt_zs.append(sec.z3d(i))
            loc = sec.arc3d(i) / L
            pt_locs.append(loc)
            pt_layers.append(node.get_layer(loc))
            pt_idxs.append(pt_idx)
            sec_pts[node.index].append(pt_idx)
            pt_idx += 1

        for child in tree.successors(node):
            src_sec.append(node.index)
            dst_sec.append(child.index)
            connection_locs.append(h.parent_connection(sec=child.sec))

    sec_pt_idxs = {}
    edges = []
    for sec, pts in viewitems(sec_pts):
        sec_pt_idxs[pts[0]] = sec
        for i in range(1, len(pts)):
            sec_pt_idxs[pts[i]] = sec
            src_pt = pts[i - 1]
            dst_pt = pts[i]
            edges.append((src_pt, dst_pt))

    for (s, d, parent_loc) in zip(src_sec, dst_sec, connection_locs):
        for src_pt in sec_pts[s]:
            if pt_locs[src_pt] >= parent_loc:
                break
        dst_pt = sec_pts[d][0]
        edges.append((src_pt, dst_pt))

    morph_graph = nx.Graph()
    morph_graph.add_nodes_from([(i, {
        'x': x,
        'y': y,
        'z': z,
        'sec': sec_pt_idxs[i],
        'loc': loc,
        'layer': layer
    }) for (i, x, y, z, loc, layer) in zip(range(len(pt_idxs)), pt_xs, pt_ys,
                                           pt_zs, pt_locs, pt_layers)])
    for i, j in edges:
        morph_graph.add_edge(i, j)

    return morph_graph