def test_save_multinetwork_1():
    net1 = NetworkBuilder('NET1')
    net1.add_nodes(N=100, position=[(0.0, 1.0, -1.0)] * 100, cell_type='Scnna1', ei='e')
    net1.add_edges(source={'ei': 'e'}, target={'ei': 'e'}, connection_rule=5, ctype_1='n1_rec')
    net1.build()

    net2 = NetworkBuilder('NET2')
    net2.add_nodes(N=10, position=[(0.0, 1.0, -1.0)] * 10, cell_type='PV1', ei='i')
    net2.add_edges(connection_rule=10, ctype_1='n2_rec')
    net2.add_edges(source=net1.nodes(), target={'ei': 'i'}, connection_rule=1, ctype_2='n1_n2')
    net2.add_edges(target=net1.nodes(cell_type='Scnna1'), source={'cell_type': 'PV1'}, connection_rule=2,
                   ctype_2='n2_n1')
    net2.build()
    net_dir = tempfile.mkdtemp()
    net2.save_edges(edges_file_name='NET2_NET1_edges.h5', edge_types_file_name='NET2_NET1_edge_types.csv',
                    output_dir=net_dir, src_network='NET2')

    n1_n2_fname = '{}/{}_{}'.format(net_dir, 'NET2', 'NET1')
    edges_h5 = h5py.File(n1_n2_fname + '_edges.h5', 'r')
    assert(len(edges_h5['/edges/NET2_to_NET1/target_node_id']) == 100*10)
    assert(len(edges_h5['/edges/NET2_to_NET1/0/nsyns']) == 100*10)
    assert(edges_h5['/edges/NET2_to_NET1/0/nsyns'][0] == 2)
    edge_types_csv = pd.read_csv(n1_n2_fname + '_edge_types.csv', sep=' ')
    assert(len(edge_types_csv) == 1)
    assert('ctype_1' not in edge_types_csv.columns.values)
    assert(edge_types_csv['ctype_2'].iloc[0] == 'n2_n1')
Exemple #2
0
def test_cross_population_edges():
    tmp_dir = make_tmp_dir()
    edges_file = make_tmp_file(suffix='.h5')
    edge_types_file = make_tmp_file(suffix='.csv')

    net_a1 = NetworkBuilder('A1')
    net_a1.add_nodes(N=100, model='A')
    net_a1.build()

    net_a2 = NetworkBuilder('A2')
    net_a2.add_nodes(N=100, model='B')
    net_a2.add_edges(
        source=net_a1.nodes(),
        target=net_a2.nodes(),
        connection_rule=lambda s, t: 1 if s.node_id == t.node_id else 0
    )
    net_a2.build()
    net_a2.save_edges(
        edges_file_name=edges_file,
        edge_types_file_name=edge_types_file,
        output_dir=tmp_dir,
        name='A1_A2'
    )

    edges_h5_path = os.path.join(tmp_dir, edges_file)
    assert(os.path.exists(edges_h5_path))
    with h5py.File(edges_h5_path, 'r') as h5:
        assert('/edges/A1_A2' in h5)
        assert(len(h5['/edges/A1_A2/source_node_id']) == 100)
        assert(h5['/edges/A1_A2/source_node_id'].attrs['node_population'] == 'A1')
        assert(len(h5['/edges/A1_A2/target_node_id']) == 100)
        assert(h5['/edges/A1_A2/target_node_id'].attrs['node_population'] == 'A2')

    barrier()
def test_save_weights():
    net = NetworkBuilder('NET1')
    net.add_nodes(N=100, position=[(0.0, 1.0, -1.0)]*100, cell_type='Scnna1', ei='e')
    net.add_nodes(N=100, position=[(0.0, 1.0, -1.0)]*100, cell_type='PV1', ei='i')
    net.add_nodes(N=100, position=[(0.0, 1.0, -1.0)]*100, tags=np.linspace(0, 100, 100), cell_type='PV2', ei='i')
    cm = net.add_edges(source={'ei': 'i'}, target={'ei': 'e'}, connection_rule=lambda s, t: 3,
                       p1='e2i', p2='e2i')  # 200*100 = 60000 edges
    cm.add_properties(names=['segment', 'distance'], rule=lambda s, t: [1, 0.5], dtypes=[np.int, np.float])

    net.add_edges(source=net.nodes(cell_type='Scnna1'), target=net.nodes(cell_type='PV1'),
                  connection_rule=lambda s, t: 2, p1='s2p')  # 100*100 = 20000'

    net.build()
    net_dir = tempfile.mkdtemp()
    net.save_nodes('tmp_nodes.h5', 'tmp_node_types.csv', output_dir=net_dir)
    net.save_edges('tmp_edges.h5', 'tmp_edge_types.csv', output_dir=net_dir)

    edges_h5 = h5py.File('{}/tmp_edges.h5'.format(net_dir), 'r')
    assert(net.nedges == 80000)
    assert(len(edges_h5['/edges/NET1_to_NET1/0/distance']) == 60000)
    assert(len(edges_h5['/edges/NET1_to_NET1/0/segment']) == 60000)
    assert(len(edges_h5['/edges/NET1_to_NET1/1/nsyns']) == 10000)
    assert(edges_h5['/edges/NET1_to_NET1/0/distance'][0] == 0.5)
    assert(edges_h5['/edges/NET1_to_NET1/0/segment'][0] == 1)
    assert(edges_h5['/edges/NET1_to_NET1/1/nsyns'][0] == 2)
Exemple #4
0
def test_connection_map():
    tmp_dir = tempfile.mkdtemp()
    edges_file = make_tmp_file(suffix='.h5')
    edge_types_file = make_tmp_file(suffix='.csv')

    net = NetworkBuilder('test')
    net.add_nodes(N=10, x=range(10), model='A')
    net.add_nodes(N=20, x=range(10, 30), model='B')

    net.add_edges(source={'model': 'A'}, target={'model': 'B'}, connection_rule=1, edge_model='A')

    cm = net.add_edges(source={'model': 'B'}, target={'model': 'B'}, connection_rule=2, edge_model='B')
    cm.add_properties(names='a', rule=5, dtypes=int)

    cm = net.add_edges(source={'model': 'B'}, target={'x': 0}, connection_rule=3, edge_model='C')
    cm.add_properties(names='b', rule=0.5, dtypes=float)
    cm.add_properties(names='c', rule=lambda *_: 2, dtypes=int)

    net.build()
    net.save_edges(
        edges_file_name=edges_file,
        edge_types_file_name=edge_types_file,
        output_dir=tmp_dir,
        name='test_test'
    )

    edges_h5_path = os.path.join(tmp_dir, edges_file)
    assert(os.path.exists(edges_h5_path))
    with h5py.File(edges_h5_path, 'r') as h5:
        n_edges = 10*20*1 + 20*20*2 + 20*1*3
        assert('/edges/test_test' in h5)
        assert(len(h5['/edges/test_test/target_node_id']) == n_edges)
        assert(h5['/edges/test_test/target_node_id'].attrs['node_population'] == 'test')
        assert(len(h5['/edges/test_test/source_node_id']) == n_edges)
        assert(h5['/edges/test_test/source_node_id'].attrs['node_population'] == 'test')
        assert(len(h5['/edges/test_test/edge_type_id']) == n_edges)
        assert(len(h5['/edges/test_test/edge_group_id']) == n_edges)
        assert(len(h5['/edges/test_test/edge_group_index']) == n_edges)
        assert(len(np.unique(h5['/edges/test_test/edge_type_id'])) == 3)
        assert(len(np.unique(h5['/edges/test_test/edge_group_id'])) == 3)

        for grp_id, grp in h5['/edges/test_test'].items():
            if not isinstance(grp, h5py.Group) or grp_id in ['indicies', 'indices']:
                continue
            assert(int('nsyns' in grp) + int('a' in grp) + int('c' in grp and 'c' in grp) == 1)

    edge_type_csv_path = os.path.join(tmp_dir, edge_types_file)
    assert(os.path.exists(edge_type_csv_path))
    edge_types_df = pd.read_csv(edge_type_csv_path, sep=' ')
    assert(len(edge_types_df) == 3)
    assert('edge_type_id' in edge_types_df.columns)
    assert('edge_model' in edge_types_df.columns)

    barrier()
Exemple #5
0
def test_edge_models():
    tmp_dir = tempfile.mkdtemp()
    edges_file = make_tmp_file(suffix='.h5')
    edge_types_file = make_tmp_file(suffix='.csv')

    net = NetworkBuilder('test')
    net.add_nodes(N=100, x=range(100), model='A')
    net.add_nodes(N=100, x=range(100, 200), model='B')
    net.add_edges(source={'model': 'A'}, target={'model': 'B'}, connection_rule=1, model='A')
    net.add_edges(source={'model': 'A'}, target={'x': 0}, connection_rule=2, model='B')
    net.add_edges(source={'model': 'A'}, target={'x': [1, 2, 3]}, connection_rule=3, model='C')
    net.add_edges(source={'model': 'A', 'x': 0}, target={'model': 'B', 'x': 100}, connection_rule=4, model='D')
    net.build()
    net.save_edges(
        edges_file_name=edges_file,
        edge_types_file_name=edge_types_file,
        output_dir=tmp_dir,
        name='test_test'
    )

    edges_h5_path = os.path.join(tmp_dir, edges_file)
    assert(os.path.exists(edges_h5_path))
    with h5py.File(edges_h5_path, 'r') as h5:
        n_edges = 100*100 + 100*1 + 100*3 + 1
        assert('/edges/test_test' in h5)
        assert(len(h5['/edges/test_test/target_node_id']) == n_edges)
        assert(h5['/edges/test_test/target_node_id'].attrs['node_population'] == 'test')
        assert(len(h5['/edges/test_test/source_node_id']) == n_edges)
        assert(h5['/edges/test_test/source_node_id'].attrs['node_population'] == 'test')
        assert(len(h5['/edges/test_test/edge_type_id']) == n_edges)
        assert(len(h5['/edges/test_test/edge_group_id']) == n_edges)
        assert(len(h5['/edges/test_test/edge_group_index']) == n_edges)

        assert(len(np.unique(h5['/edges/test_test/edge_type_id'])) == 4)
        assert(len(np.unique(h5['/edges/test_test/edge_group_id'])) == 1)
        grp_id = str(h5['/edges/test_test/edge_group_id'][0])
        assert(len(h5['/edges/test_test'][grp_id]['nsyns']) == n_edges)

    edge_type_csv_path = os.path.join(tmp_dir, edge_types_file)
    assert(os.path.exists(edge_type_csv_path))
    edge_types_df = pd.read_csv(edge_type_csv_path, sep=' ')
    assert(len(edge_types_df) == 4)
    assert('edge_type_id' in edge_types_df.columns)
    assert('model' in edge_types_df.columns)

    barrier()
Exemple #6
0
net.add_edges(source={'ei': 'e'}, target={'ei': 'i'},
              syn_weight=0.005,
              nsyns=20,
              delay=0.002,
              dynamics_params='ExcToInh.json')

net.add_edges(source={'ei': 'i'}, target={'ei': 'e'},
              syn_weight=-0.002,
              nsyns=10,
              delay=0.002,
              dynamics_params='InhToExc.json')

net.build()
net.save_nodes(nodes_file_name='brunel_nodes.h5', node_types_file_name='brunel_node_types.csv', output_dir='network')
net.save_edges(edges_file_name='brunel_edges.h5', edge_types_file_name='brunel_edge_types.csv', output_dir='network')


input_net = NetworkBuilder('inputs')
input_net.add_nodes(pop_name='tON',
                    ei='e',
                    model_type='virtual')

input_net.add_edges(target=net.nodes(ei='e'),
                    syn_weight=0.0025,
                    nsyns=10,
                    delay=0.002,
                    dynamics_params='input_ExcToExc.json')

input_net.build()
input_net.save_nodes(nodes_file_name='input_nodes.h5', node_types_file_name='input_node_types.csv',
# net.add_edges(source=exc_stim.nodes(), target=net.nodes(),
#                 connection_rule=1,
#                 syn_weight=1,
#                 target_sections=['apic', 'dend'],
#                 delay=0.1,
#                 #distance_range=[149.0, 151.0], #0.348->0.31, 0.459->0.401
#                 distance_range=[50, 2000],#(2013, Pouille et al.)
#                 #distance_range=[1250,2000],
#                 #distance_range=[-500, 500],
#                 dynamics_params='PN2PN.json',
#                 model_template=syn['PN2PN.json']['level_of_detail'])

# Build and save our networks
net.build()
net.save_nodes(output_dir='network')
net.save_edges(output_dir='network')

# exc_stim.build()
# exc_stim.save_nodes(output_dir='network')

# import h5py
# f = h5py.File('exc_stim_spikes.h5', 'w')
# f.create_group('spikes')
# f['spikes'].create_group('exc_stim')
# f['spikes']['exc_stim'].create_dataset("node_ids", data=[0])
# f['spikes']['exc_stim'].create_dataset("timestamps", data=[400])
# f.close()

from bmtk.utils.sim_setup import build_env_bionet

build_env_bionet(base_dir='./',
Exemple #8
0
class SimulationBuilder:
    """Class used to build our BMTK simulation.

        Attributes
        ----------
        params : dict
                contains parameters for the network
        seed : int
                base random seed for the simulation
        syn : dict
                contains synaptic templates

        n_dend_exc : int
                number of excitatory input cells on the basal dendrites
        n_apic_exc : int
                number of excitatory input cells on the apical dendrites

        n_dend_inh : int
                number of inhibitory (SOM+) input cells on the basal dendrites 
                more than 50 um from the soma.
        n_apic_inh : int
                number of inhibitory (SOM+) input cells on the apical dendrites
        n_prox_dend_inh : int
                number of inhibitory (PV+) input cells on the basal dendrites
                less than 50 um from the soma
        n_soma_inh : int
                number of inhibitory (PV+) input cells on the soma

        clust_per_group : int
                number of clusters per functional group

        net : NetworkBuilder
                the BMTK network for the biophysical cell
        exc_stim : NetworkBuilder
                the BMTK network for excitatory inputs
        prox_inh_stim : NetworkBuilder
                the BMTK network for perisomatic inhibition
        dist_inh_stim : NetworkBuilder
                the BMTK network for dendritic inhibition

        dend_groups : list
                all excitatory functional groups on the basal dendrites
        apic_groups : list
                all excitatory functional groups on the apical dendrites

        Methods
        -------
        build()
                builds the network
        save_groups()
                saves the functional groups to a csv

        _set_prefixed_directory(base_dir_name : str)
                sets up the correct biophy_components structure based on the cell prefix in params for the given directory base

        _build_exc()
                creates excitatory input nodes and edges
        _build_exc_nodes(segs : pandas.DataFrame, base_name : str, n_cells : int, start=0 : int)
                builds excitatory nodes
        _build_exc_edges(group_list : list)
                builds excitatory edges

        _save_nets()
                builds and saves the BMTK NetworkBuilders

        _build_inh()
                creates inhibitory input nodes and edges
        
        _make_rasters()
                creates the inhibitory and excitatory input rasters
        _gen_exc_spikes(fname : str)
                generates and saves the excitatory spike rasters
        _gen_inh_spikes(n_cells : int, mean_fr : float, std_fr : float, key : str, fname : str)
                creates inhibitory spike rasters, using a noise trace based on averaging excitation and shifting it
        _modify_jsons()
                modifies the various json files however is needed after they are built
        _modify_sim_config()
                modifies the simulation_config.json however is needed
        _update_cellvar_record_locs(sim_config : dict)
                modifies the location of cellvar recordings in the given JSON simulation_config

        Static Methods
        --------------
        _get_directory_prefix(directory : str)
                reads the prefix.txt fil in directory and returns the contents
        _connector_func(sources : list, targets : list, cells : list)
                sets the number of synapses from the given cells
        _set_location(source : dict, target : dict, cells : list, start_id : int)
                sets the location of the given edge

        _norm_connect(source : dict, target : dict, m : float, s : float, low : int, high : int)
                used to normally distribute connection counts

        _gen_group_spikes(writer : SonataWriter, group : FunctionalGroup, seconds : float, start_time : float, dist : func)
                creates and saves a functional group's spike raster
        _norm_rvs(mean : float, std : float)
                generates a random float from a normal distribution with a near zero minimum
        """
    def __init__(self, params_file, seed=123):
        """Initializes the simulation builder, 
                setting up attributes but not actually building the BMTK network.

                Parameters
                ----------
                params_file : str
                    path to the JSON file with network parameters
                seed : int
                    base random seed for the simulation
                """
        #Loads the JSON file with information about the network.
        with open(params_file) as f:
            self.params = json.load(f)

        self.seed = seed

        #Loads synapse templates.
        synapses.load()
        self.syn = synapses.syn_params_dicts()

        avg_exc_div = np.mean(list(self.params["divergence"]["exc"].values()))

        self.n_dend_exc = int(
            (self.params["lengths"]["basal_dist"] *
             self.params["syn_density"]["exc"]) / avg_exc_div)
        self.n_apic_exc = int(
            (self.params["lengths"]["apic"] *
             self.params["syn_density"]["exc"]) / avg_exc_div)

        self.n_dend_inh = int((self.params["lengths"]["basal_dist"] *
                               self.params["syn_density"]["inh"]) /
                              self.params["divergence"]["basal_inh"]["m"])
        self.n_apic_inh = int((self.params["lengths"]["apic"] *
                               self.params["syn_density"]["inh"]) /
                              self.params["divergence"]["apic_inh"]["m"])

        self.n_prox_dend_inh = int((self.params["lengths"]["basal_prox"] *
                                    self.params["syn_density"]["inh"]) /
                                   self.params["divergence"]["peri_inh"]["m"])
        self.n_soma_inh = int(self.params["n_soma_syns"] /
                              self.params["divergence"]["peri_inh"]["m"])

        self.clust_per_group = int(
            (self.params["groups"]["cells_per_group"] * avg_exc_div) //
            (self.params["syn_density"]["exc"] * 10))
        if self.params["file_current_clamp"]["input_file"] == "None":
            self.file_current_clamp = None
        else:
            self.file_current_clamp = self.params["file_current_clamp"]

    def build(self):
        """Builds the nodes and edges for the network.
                """
        np.random.seed(self.seed)

        self._set_prefixed_directory("mechanisms")
        self._set_prefixed_directory("templates")

        self.net = NetworkBuilder("biophysical")

        self.net.add_nodes(
            N=1,
            pop_name='Pyrc',
            potental='exc',
            model_type='biophysical',
            dynamics_params=self.params["cell"]["dynamic_params"],
            model_template=self.params["cell"]["model_template"],
            model_processing=self.params["cell"]["model_processing"],
            morphology=self.params["cell"]["morphology"])

        self._build_exc()
        self._build_inh()
        self._save_nets()

        self._make_rasters()

        #Final build step.
        build_env_bionet(
            base_dir='./',
            network_dir='./network',
            dt=self.params["dt"],
            tstop=self.params["time"]["stop"] * 1000.0,
            report_vars=self.params["record_cellvars"]["vars"],
            dL=self.params["dL"],  #target length (um) of segments
            spikes_threshold=-10,
            file_current_clamp=self.file_current_clamp,
            spikes_inputs=[('exc_stim', 'exc_stim_spikes.h5'),
                           ('prox_inh_stim', 'prox_inh_stim_spikes.h5'),
                           ('dist_inh_stim', 'dist_inh_stim_spikes.h5')],
            components_dir='../biophys_components',
            compile_mechanisms=True)

        self._modify_jsons()

    def save_groups(self):
        """saves the apic and dend groups into a csv.
                one row for each node containgin the id of the functional group it is in.
                """
        all_groups = self.dend_groups + self.apic_groups
        node_ids = []
        func_groups = []

        for func_id, group in enumerate(all_groups):
            for i in range(group.start_id, group.start_id + group.n_cells):
                node_ids.append(i)
                func_groups.append(func_id)

        df = pd.DataFrame()
        df["Node ID"] = node_ids
        df["Functional Group"] = func_groups
        df.to_csv("FunctionalGroups.csv", index=False)

    def _set_prefixed_directory(self, base_dir_name):
        """Fixes the biophy_components directory. There should be only one directory
                named <base_dir_name> and it should be the one with the prefix.txt file in it
                that has the same prefix as params.

                Parameters
                ----------
                base_dir_name : str
                        base name of the set of directories to be fixed
                """
        #import pdb; pdb.set_trace()
        components_path = "../biophys_components/"
        biophys_subdirs = [
            f.name for f in os.scandir(components_path) if f.is_dir()
        ]

        for dir_name in biophys_subdirs:
            if base_dir_name == dir_name:
                prefix = SimulationBuilder._get_directory_prefix(
                    components_path + dir_name)
                if prefix == self.params["cell"]["prefix"]:
                    return
                else:
                    os.rename(components_path + base_dir_name,
                              components_path + prefix + base_dir_name)

        for dir_name in biophys_subdirs:
            if base_dir_name in dir_name and self.params["cell"][
                    "prefix"] in dir_name:
                os.rename(components_path + dir_name,
                          components_path + base_dir_name)

    def _get_directory_prefix(directory):
        """Returns the contents of the prefix.txt file in the given directory.

                Parameters
                ----------
                directory : str
                        directory to look in

                Returns
                -------
                str
                        contents of prefix.txt
                """
        with open(directory + "/prefix.txt", 'r') as f:
            return f.read()

    def _build_exc(self):
        """Builds the excitatory input cells and their synapses.
                """

        # External excitatory inputs
        self.exc_stim = NetworkBuilder('exc_stim')

        #DataFrame of all segments on the cell.
        segs = pd.read_csv(self.params["cell"]["segments_file"])

        dends = segs[(segs["Type"] == "dend") & (segs["Distance"] >= 50)]
        apics = segs[(segs["Type"] == "apic")]

        np.random.seed(self.seed + 1)
        apic_start, self.dend_groups = self._build_exc_nodes(
            dends, "dend", self.n_dend_exc)

        np.random.seed(self.seed + 2)
        _, self.apic_groups = self._build_exc_nodes(apics,
                                                    "apic",
                                                    self.n_apic_exc,
                                                    start=apic_start)

        np.random.seed(self.seed + 3)
        self._build_exc_edges(self.dend_groups)

        np.random.seed(self.seed + 4)
        self._build_exc_edges(self.apic_groups)

    #Sets the number of synapses for each input cell.
    def _connector_func(sources, target, cells):
        """Used to set the number of synapses from each excitatory input
                cell in a functional group. Use with "all_to_one" iterator.

                Parameters
                ----------
                sources : list
                        presynaptic nodes (represented as dicts)
                target : dict
                        postsynaptic node
                cells : list
                        list of Cells in the FunctionalGroup

                Returns
                -------
                list
                        list of synapses for each pairing
                """
        return [cell.n_syns for cell in cells]

    #Sets the location of synapses based on the given cell list.
    def _set_location(source, target, cells, start_id):
        """Sets the location of the given synapse.

                Parameters
                ----------
                source : dict
                    source node information
                target : dict
                    target node information
                cells : list
                    Cells in the functional group
                start_id : int
                    start_id for the functional groups the cells come from

                Returns
                -------
                int
                    BMTK section id
                float
                    distance along the section
                """
        #Gets the proper index within the cell list.
        index = source.node_id - start_id

        seg = cells[index].get_seg()
        return seg.bmtk_id, seg.x

    #Creates the functional groups and adds the virtual cells to the
    #BMTK NetworkBuilder.
    def _build_exc_nodes(self, segs, base_name, n_cells, start=0):
        """Creates the functional groups and adds the virtual cells to the
                BMTK NetworkBuilder

                Parameters
                ----------
                segs : pandas.DataFrame
                    all the segments available for the functional groups
                base_name : str
                    the string that is appended to to make the group names.
                    groups get 0 - n_groups appended to their names.
                n_cells : int
                    total number of input cells that should be added.
                start : int, optional
                    starting id to be associated with the functional groups, by default 0
                    this is used later to associate cells in functional groups with the correct
                    locations and synapses.

                Returns
                -------
                int
                    what the start parameter should be for the next call to _build_exc_nodes
                list
                    list of functional groups that were created
                """
        start_id = start

        n_groups = n_cells // self.params["groups"]["cells_per_group"]
        n_extra = n_cells % self.params["groups"][
            "cells_per_group"]  #number of extra cells that don't evenly fit into groups

        group_list = []

        for i in range(n_groups):
            name = base_name + str(i)

            #Spreads out the extra cells.
            N = self.params["groups"]["cells_per_group"]
            if i < n_extra:
                N += 1

            self.exc_stim.add_nodes(N=N,
                                    pop_name=name,
                                    potential="exc",
                                    model_type='virtual')

            new_group = FunctionalGroup(
                segs,
                segs.sample().iloc[0], N, self.clust_per_group, name, start_id,
                partial(make_seg_sphere,
                        radius=self.params["groups"]["group_radius"]),
                partial(make_seg_sphere,
                        radius=self.params["groups"]["cluster_radius"]))
            group_list.append(new_group)
            start_id += N

        return start_id, group_list

    def _build_exc_edges(self, group_list):
        """Creates the connections between each cell in the list of groups
                and the biophysical cell.

                Parameters
                ----------
                group_list : list
                    list of functional groups
                """
        for i in range(len(group_list)):
            group = group_list[i]

            #Creates the edges from each excitatory input cells in the group.
            conn = self.net.add_edges(
                source=self.exc_stim.nodes(pop_name=group.name),
                target=self.net.nodes(),
                iterator="all_to_one",
                connection_rule=SimulationBuilder._connector_func,
                connection_params={'cells': group.cells},
                syn_weight=1,
                delay=0.1,
                dynamics_params='PN2PN.json',
                model_template=self.syn['PN2PN.json']['level_of_detail'],
            )

            #Sets the postsynaptic locations of the connections.
            conn.add_properties(['sec_id', "sec_x"],
                                rule=SimulationBuilder._set_location,
                                rule_params={
                                    'cells': group.cells,
                                    'start_id': group.start_id
                                },
                                dtypes=[np.int, np.float])

    def _save_nets(self):
        """builds and saves the BMTK NetworkBuilders
                """
        # Build and save our networks
        np.random.seed(self.seed + 12)
        self.net.build()
        self.net.save_nodes(output_dir='network')
        np.random.seed(self.seed + 16)
        self.net.save_edges(output_dir='network')

        np.random.seed(self.seed + 13)
        self.exc_stim.build()
        self.exc_stim.save_nodes(output_dir='network')

        np.random.seed(self.seed + 14)
        self.prox_inh_stim.build()
        self.prox_inh_stim.save_nodes(output_dir='network')

        np.random.seed(self.seed + 15)
        self.dist_inh_stim.build()
        self.dist_inh_stim.save_nodes(output_dir='network')

    def _build_inh(self):
        """Creates inhibitory input nodes and their connections onto the biophysical cell
                """

        #####################Perisomatic Inhibition##############################
        self.prox_inh_stim = NetworkBuilder('prox_inh_stim')

        #Nodes that connect to soma.
        self.prox_inh_stim.add_nodes(N=self.n_soma_inh,
                                     pop_name='on_soma',
                                     potential='exc',
                                     model_type='virtual')

        #Nodes that connect to proximal dendrites.
        self.prox_inh_stim.add_nodes(N=self.n_prox_dend_inh,
                                     pop_name='on_dend',
                                     potential='exc',
                                     model_type='virtual')

        div_params = self.params["divergence"]["peri_inh"]

        #On soma.
        np.random.seed(self.seed + 5)
        self.net.add_edges(
            source=self.prox_inh_stim.nodes(pop_name='on_soma'),
            target=self.net.nodes(),
            connection_rule=SimulationBuilder._norm_connect,
            connection_params={
                "m": div_params["m"],
                "s": div_params["s"],
                "low": div_params["min"],
                "high": div_params["max"]
            },
            syn_weight=1,
            delay=0.1,
            dynamics_params='PV2PN.json',
            model_template=self.syn['PV2PN.json']['level_of_detail'],
            distance_range=[-2000, 2000.0],
            target_sections=['somatic'])

        #On dendrites within 50 um
        np.random.seed(self.seed + 6)
        self.net.add_edges(
            source=self.prox_inh_stim.nodes(pop_name='on_dend'),
            target=self.net.nodes(),
            connection_rule=SimulationBuilder._norm_connect,
            connection_params={
                "m": div_params["m"],
                "s": div_params["s"],
                "low": div_params["min"],
                "high": div_params["max"]
            },
            syn_weight=1,
            delay=0.1,
            dynamics_params='PV2PN.json',
            model_template=self.syn['PV2PN.json']['level_of_detail'],
            distance_range=[0, 50.0],
            target_sections=['dend'])
        #######################################################################################

        #############################Dendritic Inhibition######################################
        self.dist_inh_stim = NetworkBuilder('dist_inh_stim')

        self.dist_inh_stim.add_nodes(N=self.n_dend_inh,
                                     pop_name='dend',
                                     potential='exc',
                                     model_type='virtual')

        self.dist_inh_stim.add_nodes(N=self.n_apic_inh,
                                     pop_name='apic',
                                     potential='exc',
                                     model_type='virtual')

        div_params = self.params["divergence"]["basal_inh"]

        #Basal edges.
        np.random.seed(self.seed + 7)
        self.net.add_edges(
            source=self.dist_inh_stim.nodes(pop_name="dend"),
            target=self.net.nodes(),
            connection_rule=SimulationBuilder._norm_connect,
            connection_params={
                "m": div_params["m"],
                "s": div_params["s"],
                "low": div_params["min"],
                "high": div_params["max"]
            },
            syn_weight=1,
            delay=0.1,
            dynamics_params='SOM2PN.json',
            model_template=self.syn['SOM2PN.json']['level_of_detail'],
            distance_range=[50, 2000.0],
            target_sections=['dend'])

        div_params = self.params["divergence"]["apic_inh"]

        #Apic edges.
        np.random.seed(self.seed + 8)
        self.net.add_edges(
            source=self.dist_inh_stim.nodes(pop_name="apic"),
            target=self.net.nodes(),
            connection_rule=SimulationBuilder._norm_connect,
            connection_params={
                "m": div_params["m"],
                "s": div_params["s"],
                "low": div_params["min"],
                "high": div_params["max"]
            },
            syn_weight=1,
            delay=0.1,
            dynamics_params='SOM2PN.json',
            model_template=self.syn['SOM2PN.json']['level_of_detail'],
            distance_range=[50, 2000.0],
            target_sections=['apic'])

    def _norm_connect(source, target, m, s, low, high):
        """Returns a random number of synapses based on
                the given distribution.

                Parameters
                ----------
                source : dict
                    source node
                target : dict
                    target node
                m : float
                    mean number of connections
                s : float
                    standard deviation of number of connections
                low : int
                    minimum number of connections
                high : int
                    maximum number of connections

                Returns
                -------
                int
                    number of connections
                """
        return int(min(max(np.random.normal(m, s), low), high))

    def _make_rasters(self):
        """Generates excitatory and inhibitory input rasters
                """
        np.random.seed(self.seed + 9)
        self._gen_exc_spikes('exc_stim_spikes.h5')

        inh_frs = self.params["inh_frs"]

        #Makes perisomatic inhibitory raster.
        np.random.seed(self.seed + 10)
        self._gen_inh_spikes(self.n_soma_inh + self.n_prox_dend_inh,
                             inh_frs["proximal"]["m"],
                             inh_frs["proximal"]["s"],
                             inh_frs["proximal"]["rhythmicity"],
                             "prox_inh_stim", 'prox_inh_stim_spikes.h5')

        #Makes dendritic inhibitory raster.
        np.random.seed(self.seed + 11)
        self._gen_inh_spikes(self.n_apic_inh + self.n_dend_inh,
                             inh_frs["distal"]["m"], inh_frs["distal"]["s"],
                             inh_frs["distal"]["rhythmicity"], "dist_inh_stim",
                             'dist_inh_stim_spikes.h5')

    #Generates the spike raster for a given group.
    #The group has the same noise.
    def _gen_group_spikes(writer, group, seconds, start_time, dist):
        """Generates and writes to a h5 file the given functional group's spike trains

                Parameters
                ----------
                writer : SonataWriter
                    how the spike trains are saved
                group : FunctionalGroup
                    the functional group that the spike trains are being made for
                seconds : float
                    length of the spike trains in seconds
                start_time : float
                    what time (ms) the spike trains should start at
                dist : func
                    function for random distribution used for an individual cell's firing rate
                """
        z = make_noise(
            num_samples=(int(seconds * 1000)) - 1, num_traces=1
        )  #generates the noise trace common to each cell in the functional group.
        make_save_spikes(writer,
                         True,
                         dist(size=group.n_cells),
                         numUnits=group.n_cells,
                         rateProf=np.tile(z[0, :], (group.n_cells, 1)),
                         start_id=group.start_id,
                         start_time=start_time)

    #Creates the excitatory input raster from the functional groups.
    def _gen_exc_spikes(self, fname):
        """Generates the excitatory input raster for all of the functional groups

                Parameters
                ----------
                fname : str
                    name of the file to save the rasters in (.h5)
                """
        #distribution used for generating excitatory firing rates.
        levy_dist = partial(st.levy_stable.rvs,
                            alpha=1.37,
                            beta=-1.00,
                            loc=0.92,
                            scale=0.44,
                            size=1)

        length = self.params["time"]["stop"] - self.params["time"]["start"]
        buffer = self.params["time"]["start"]

        writer = SonataWriter(fname, ["spikes", "exc_stim"],
                              ["timestamps", "node_ids"], [np.float, np.int])

        for group in (self.dend_groups + self.apic_groups):
            SimulationBuilder._gen_group_spikes(writer, group, length,
                                                buffer * 1000, levy_dist)

    #Blocks off the bottom of a normal distribution.
    def _norm_rvs(mean, std):
        """Generates a random float from a normal distribution with a near zero minimum

                Parameters
                ----------
                mean : float
                    mean of the distribution
                std : float
                    standard deviation of the distribution

                Returns
                -------
                float
                    random float
                """
        return max(st.norm.rvs(loc=mean, scale=std, size=1), 0.001)

    # #Makes a spike raster with each cell having its own noise trace.
    # def gen_inh_spikes(n_cells, mean_fr, std_fr, key, file, times):
    #         # node_ids = []
    #         # timestamps = []

    #         length = times[1] - times[0]
    #         buffer = times[0]

    #         writer = SonataWriter(file, ["spikes", key], ["timestamps", "node_ids"], [np.float, np.int])

    #         z = make_noise(num_samples=(int(length*1000))-1,num_traces=1)
    #         make_save_spikes(writer, False, partial(positive_normal, mean=mean_fr, std=std_fr), numUnits=n_cells,rateProf=z[0,:],start_time=buffer*1000)

    #Creates a spike raster with each cell having the same noise coming from the a shifted average of excitation.
    def _gen_inh_spikes(self, n_cells, mean_fr, std_fr, rhythmic_dict, key,
                        fname):
        """Generates a spike raster with each train having the noise trace from
                averaging excitation. Distributes firing rates normally.

                Parameters
                ----------
                n_cells : int
                    number of spike trains
                mean_fr : float
                    mean firing rate
                std_fr : float
                    standard deviation of the firing rate
                rhythmic_dict : dict
                    dictionary with keys f - frequency, mod - depth of modulation
                key : str
                    name of the second group in the h5 file
                fname : str
                    name of file to save the raster to
                """
        # node_ids = []
        # timestamps = []
        a, b = (0 - mean_fr) / std_fr, (100 - mean_fr) / std_fr
        d = partial(st.truncnorm.rvs, a=a, b=b, loc=mean_fr, scale=std_fr)

        if rhythmic_dict['f'] == "None":
            f = h5py.File("exc_stim_spikes.h5", "r")
            ts = f['spikes']["exc_stim"]['timestamps']
            nid = f['spikes']["exc_stim"]['node_ids']

            #Creates a noise trace based on the excitatory spike raster.
            z = shift_exc_noise(ts,
                                nid,
                                self.params["time"]["stop"],
                                time_shift=self.params["inh_shift"])
            z = np.tile(z, (n_cells, 1))

            writer = SonataWriter(fname, ["spikes", key],
                                  ["timestamps", "node_ids"],
                                  [np.float, np.int])
            make_save_spikes(writer,
                             False,
                             d(size=n_cells),
                             numUnits=n_cells,
                             rateProf=z)

        else:
            # make an array of modulated sin waves
            # make_save_spikes should be written so that the firing rates are generated
            #    outside instead of inside the function.
            frs = d(size=n_cells)

            t = np.arange(0, self.params["time"]["stop"], 0.001)
            z = np.zeros((n_cells, t.shape[0]))
            P = 0
            for i in np.arange(0, n_cells):
                offset = frs[i]
                A = offset / ((1 / rhythmic_dict['mod']) - 1)
                z[i, :] = A * np.sin(
                    (2 * np.pi * rhythmic_dict['f'] * t) + P) + offset

            writer = SonataWriter(fname, ["spikes", key],
                                  ["timestamps", "node_ids"],
                                  [np.float, np.int])
            make_save_spikes(writer,
                             False,
                             np.ones((n_cells, 1)),
                             numUnits=n_cells,
                             rateProf=z)

    def _modify_jsons(self):
        """modifies the various json files however is needed after they are built"""
        self._modify_sim_config()

    def _modify_sim_config(self):
        """modifies the simulation_config.json however is needed"""
        with open("simulation_config.json", "r") as jsonFile:
            sim_config = json.load(jsonFile)

        self._update_cellvar_record_locs(sim_config)

        with open("simulation_config.json", "w") as jsonFile:
            json.dump(sim_config, jsonFile, indent=2)

    def _update_cellvar_record_locs(self, sim_config):
        """modifies the location of cellvar recordings in the given JSON simulation_config
                
                Parameters
                ----------
                sim_config : dict
                    simulation_config to modify
                """
        reports = sim_config["reports"]
        cellvar_reports = [
            report for report in reports.values()
            if report["module"] == "membrane_report"
        ]

        for loc, report in zip(self.params["record_cellvars"]["locs"],
                               cellvar_reports):
            report["sections"] = loc
        'max_dist': 300.0,
        'min_syns': 1,
        'max_syns': 2
    },
    syn_weight=10,
    #weight_function = 'Lognormal',
    #weight_sigma=5,
    dynamics_params='GABA_InhToExc.json',
    model_template='exp2syn',
    distance_range=[0.0, 300.0],
    target_sections=['basal', 'apical'],
    delay=2.0)

netff.build()
netff.save_nodes(output_dir='network')
netff.save_edges(output_dir='network')

print("Internal nodes and edges built")


# Create connections between "thalamus" and Pyramidals
# First define the connection rule
def one_to_one(source, target):
    #print("one to one")
    sid = source.node_id
    tid = target.node_id

    if tid > 89:
        print("working on Bask")
    if sid == tid:
        print("connecting cell {} to {}".format(sid, tid))
Exemple #10
0
def test_basic():
    tmp_dir = make_tmp_dir()
    nodes_file = make_tmp_file(suffix='.h5')
    node_types_file = make_tmp_file(suffix='.csv')
    edges_file = make_tmp_file(suffix='.h5')
    edge_types_file = make_tmp_file(suffix='.csv')

    net = NetworkBuilder('test')
    net.add_nodes(N=100, a=np.arange(100), b='B')
    net.add_edges(
        source={'a': 0},
        target=net.nodes(),
        connection_rule=2,
        x='X'
    )

    net.build()
    net.save_nodes(
        nodes_file_name=nodes_file,
        node_types_file_name=node_types_file,
        output_dir=tmp_dir
    )
    net.save_edges(
        edges_file_name=edges_file,
        edge_types_file_name=edge_types_file,
        output_dir=tmp_dir,
        name='test_test'
    )

    nodes_h5_path = os.path.join(tmp_dir, nodes_file)
    assert(os.path.exists(nodes_h5_path))
    with h5py.File(nodes_h5_path, 'r') as h5:
        assert('/nodes/test' in h5)
        assert(len(h5['/nodes/test/node_id']) == 100)
        assert(len(h5['/nodes/test/node_type_id']) == 100)
        assert('/nodes/test/node_group_id' in h5)
        assert('/nodes/test/node_group_index' in h5)
        assert(len(h5['/nodes/test/0/a']) == 100)

    node_types_csv_path = os.path.join(tmp_dir, node_types_file)
    assert(os.path.exists(node_types_csv_path))
    node_types_df = pd.read_csv(node_types_csv_path, sep=' ')
    assert(len(node_types_df) == 1)
    assert('node_type_id' in node_types_df.columns)
    assert('b' in node_types_df.columns)

    edges_h5_path = os.path.join(tmp_dir, edges_file)
    assert(os.path.exists(edges_h5_path))
    with h5py.File(edges_h5_path, 'r') as h5:
        assert('/edges/test_test' in h5)
        assert(len(h5['/edges/test_test/target_node_id']) == 100)
        assert(h5['/edges/test_test/target_node_id'].attrs['node_population'] == 'test')
        assert(set(h5['/edges/test_test/target_node_id'][()]) == set(range(100)))

        assert(len(h5['/edges/test_test/source_node_id']) == 100)
        assert (h5['/edges/test_test/source_node_id'].attrs['node_population'] == 'test')
        assert(all(np.unique(h5['/edges/test_test/source_node_id'][()] == [0])))

        assert (h5['/edges/test_test/source_node_id'].attrs['node_population'] == 'test')
        assert(len(h5['/edges/test_test/edge_type_id']) == 100)
        assert('/edges/test_test/edge_group_id' in h5)
        assert('/edges/test_test/edge_group_index' in h5)
        assert(len(h5['/edges/test_test/0/nsyns']) == 100)

    edge_type_csv_path = os.path.join(tmp_dir, edge_types_file)
    assert(os.path.exists(edge_type_csv_path))
    edge_types_df = pd.read_csv(edge_type_csv_path, sep=' ')
    assert(len(edge_types_df) == 1)
    assert('edge_type_id' in edge_types_df.columns)
    assert('x' in edge_types_df.columns)

    barrier()
def test_save_nsyn_table():
    net = NetworkBuilder('NET1')
    net.add_nodes(N=100,
                  position=[(0.0, 1.0, -1.0)] * 100,
                  cell_type='Scnna1',
                  ei='e')
    net.add_nodes(N=100,
                  position=[(0.0, 1.0, -1.0)] * 100,
                  cell_type='PV1',
                  ei='i')
    net.add_nodes(N=100,
                  position=[(0.0, 1.0, -1.0)] * 100,
                  tags=np.linspace(0, 100, 100),
                  cell_type='PV2',
                  ei='i')
    net.add_edges(source={'ei': 'i'},
                  target={'ei': 'e'},
                  connection_rule=lambda s, t: 1,
                  p1='e2i',
                  p2='e2i')  # 200*100 = 20000 edges
    net.add_edges(source=net.nodes(cell_type='Scnna1'),
                  target=net.nodes(cell_type='PV1'),
                  connection_rule=lambda s, t: 2,
                  p1='s2p')  # 100*100*2 = 20000
    net.build()
    net.save_nodes('tmp_nodes.h5', 'tmp_node_types.csv')
    net.save_edges('tmp_edges.h5', 'tmp_edge_types.csv')

    assert (os.path.exists('tmp_nodes.h5')
            and os.path.exists('tmp_node_types.csv'))
    node_types_df = pd.read_csv('tmp_node_types.csv', sep=' ')
    assert (len(node_types_df) == 3)
    assert ('cell_type' in node_types_df.columns)
    assert ('ei' in node_types_df.columns)
    assert ('positions' not in node_types_df.columns)

    nodes_h5 = h5py.File('tmp_nodes.h5', 'r')
    assert ('node_gid' in nodes_h5['/nodes'])
    assert (len(nodes_h5['/nodes/node_gid']) == 300)
    assert (len(nodes_h5['/nodes/node_type_id']) == 300)
    assert (len(nodes_h5['/nodes/node_group']) == 300)
    assert (len(nodes_h5['/nodes/node_group_index']) == 300)

    node_groups = {
        id: grp
        for id, grp in nodes_h5['/nodes'].items()
        if isinstance(grp, h5py.Group)
    }
    for grp in node_groups.values():
        if len(grp) == 1:
            assert ('position' in grp and len(grp['position']) == 200)

        elif len(grp) == 2:
            assert ('position' in grp and len(grp['position']) == 100)
            assert ('tags' in grp and len(grp['tags']) == 100)

        else:
            assert (False)

    assert (os.path.exists('tmp_edges.h5')
            and os.path.exists('tmp_edge_types.csv'))
    edge_types_df = pd.read_csv('tmp_edge_types.csv', sep=' ')
    assert (len(edge_types_df) == 2)
    assert ('p1' in edge_types_df.columns)
    assert ('p2' in edge_types_df.columns)

    edges_h5 = h5py.File('tmp_edges.h5', 'r')
    assert (len(edges_h5['/edges/index_pointer']) == 301)
    assert (len(edges_h5['/edges/target_gid']) == 30000)
    assert (len(edges_h5['/edges/source_gid']) == 30000)

    assert (edges_h5['/edges/target_gid'][0] == 0)
    assert (edges_h5['/edges/source_gid'][0] == 100)
    assert (edges_h5['/edges/edge_group'][0] == 0)
    assert (edges_h5['/edges/edge_type_id'][0] == 100)
    assert (edges_h5['/edges/0/nsyns'][0] == 1)

    assert (edges_h5['/edges/target_gid'][29999] == 199)
    assert (edges_h5['/edges/source_gid'][29999] == 99)
    assert (edges_h5['/edges/edge_group'][29999] == 0)
    assert (edges_h5['/edges/edge_type_id'][29999] == 101)
    assert (edges_h5['/edges/0/nsyns'][29999] == 2)

    try:
        os.remove('tmp_nodes.h5')
        os.remove('tmp_node_types.csv')
        os.remove('tmp_edges.h5')
        os.remove('tmp_edge_types.csv')
    except:
        pass
Exemple #12
0
              syn_weight=2.0,
              delay=1.5,
              dynamics_params='ExcToInh.json',
              model_template='static_synapse')

net.add_edges(source={'ei': 'i'},
              connection_rule=random_connections,
              connection_params={'p': 0.1},
              syn_weight=-1.5,
              delay=1.5,
              dynamics_params='InhToExc.json',
              model_template='static_synapse')

net.build()
net.save_nodes(output_dir='network')
net.save_edges(output_dir='network')

input_network_model = {
    'input_network': {
        'N': 100,
        'ei': 'e',
        'pop_name': 'input_network',
        'model_type': 'virtual'
    }
}

inputNetwork = NetworkBuilder("thalamus")
inputNetwork.add_nodes(**input_network_model['input_network'])

inputNetwork.add_edges(target=net.nodes(),
                       connection_rule=random_connections,
Exemple #13
0
def test_save_multinetwork():
    net1 = NetworkBuilder('NET1')
    net1.add_nodes(N=100,
                   position=[(0.0, 1.0, -1.0)] * 100,
                   cell_type='Scnna1',
                   ei='e')
    net1.add_edges(source={'ei': 'e'},
                   target={'ei': 'e'},
                   connection_rule=5,
                   ctype_1='n1_rec')
    net1.build()

    net2 = NetworkBuilder('NET2')
    net2.add_nodes(N=10,
                   position=[(0.0, 1.0, -1.0)] * 10,
                   cell_type='PV1',
                   ei='i')
    net2.add_edges(connection_rule=10, ctype_1='n2_rec')
    net2.add_edges(source=net1.nodes(),
                   target={'ei': 'i'},
                   connection_rule=1,
                   ctype_2='n1_n2')
    net2.add_edges(target=net1.nodes(cell_type='Scnna1'),
                   source={'cell_type': 'PV1'},
                   connection_rule=2,
                   ctype_2='n2_n1')
    net2.build()

    net1.save_edges(output_dir='tmp_output')
    net2.save_edges(output_dir='tmp_output')

    n1_n1_fname = 'tmp_output/{}_{}'.format('NET1', 'NET1')
    edges_h5 = h5py.File(n1_n1_fname + '_edges.h5', 'r')
    assert (len(edges_h5['/edges/target_gid']) == 100 * 100)
    assert (len(edges_h5['/edges/0/nsyns']) == 100 * 100)
    assert (edges_h5['/edges/0/nsyns'][0] == 5)
    edge_types_csv = pd.read_csv(n1_n1_fname + '_edge_types.csv', sep=' ')
    assert (len(edge_types_csv) == 1)
    assert ('ctype_2' not in edge_types_csv.columns.values)
    assert (edge_types_csv['ctype_1'].iloc[0] == 'n1_rec')

    n1_n2_fname = 'tmp_output/{}_{}'.format('NET1', 'NET2')
    edges_h5 = h5py.File(n1_n2_fname + '_edges.h5', 'r')
    assert (len(edges_h5['/edges/target_gid']) == 100 * 10)
    assert (len(edges_h5['/edges/0/nsyns']) == 100 * 10)
    assert (edges_h5['/edges/0/nsyns'][0] == 1)
    edge_types_csv = pd.read_csv(n1_n2_fname + '_edge_types.csv', sep=' ')
    assert (len(edge_types_csv) == 1)
    assert ('ctype_1' not in edge_types_csv.columns.values)
    assert (edge_types_csv['ctype_2'].iloc[0] == 'n1_n2')

    n2_n1_fname = 'tmp_output/{}_{}'.format('NET2', 'NET1')
    edges_h5 = h5py.File(n2_n1_fname + '_edges.h5', 'r')
    assert (len(edges_h5['/edges/target_gid']) == 100 * 10)
    assert (len(edges_h5['/edges/0/nsyns']) == 100 * 10)
    assert (edges_h5['/edges/0/nsyns'][0] == 2)
    edge_types_csv = pd.read_csv(n2_n1_fname + '_edge_types.csv', sep=' ')
    assert (len(edge_types_csv) == 1)
    assert ('ctype_1' not in edge_types_csv.columns.values)
    assert (edge_types_csv['ctype_2'].iloc[0] == 'n2_n1')

    n2_n2_fname = 'tmp_output/{}_{}'.format('NET2', 'NET2')
    edges_h5 = h5py.File(n2_n2_fname + '_edges.h5', 'r')
    assert (len(edges_h5['/edges/target_gid']) == 10 * 10)
    assert (len(edges_h5['/edges/0/nsyns']) == 10 * 10)
    assert (edges_h5['/edges/0/nsyns'][0] == 10)
    edge_types_csv = pd.read_csv(n2_n2_fname + '_edge_types.csv', sep=' ')
    assert (len(edge_types_csv) == 1)
    assert ('ctype_2' not in edge_types_csv.columns.values)
    assert (edge_types_csv['ctype_1'].iloc[0] == 'n2_rec')

    try:
        shutil.rmtree('tmp_output')
    except:
        pass
Exemple #14
0
def test_save_nsyn_table():
    net = NetworkBuilder('NET1')
    net.add_nodes(N=100,
                  position=[(0.0, 1.0, -1.0)] * 100,
                  cell_type='Scnna1',
                  ei='e')
    net.add_nodes(N=100,
                  position=[(0.0, 1.0, -1.0)] * 100,
                  cell_type='PV1',
                  ei='i')
    net.add_nodes(N=100,
                  position=[(0.0, 1.0, -1.0)] * 100,
                  tags=np.linspace(0, 100, 100),
                  cell_type='PV2',
                  ei='i')
    net.add_edges(source={'ei': 'i'},
                  target={'ei': 'e'},
                  connection_rule=lambda s, t: 1,
                  p1='e2i',
                  p2='e2i')  # 200*100 = 20000 edges
    net.add_edges(source=net.nodes(cell_type='Scnna1'),
                  target=net.nodes(cell_type='PV1'),
                  connection_rule=lambda s, t: 2,
                  p1='s2p')  # 100*100*2 = 20000
    net.build()
    nodes_h5 = tempfile.NamedTemporaryFile(suffix='.h5')
    nodes_csv = tempfile.NamedTemporaryFile(suffix='.csv')
    edges_h5 = tempfile.NamedTemporaryFile(suffix='.h5')
    edges_csv = tempfile.NamedTemporaryFile(suffix='.csv')

    net.save_nodes(nodes_h5.name, nodes_csv.name)
    net.save_edges(edges_h5.name, edges_csv.name)

    assert (os.path.exists(nodes_h5.name) and os.path.exists(nodes_csv.name))
    node_types_df = pd.read_csv(nodes_csv.name, sep=' ')
    assert (len(node_types_df) == 3)
    assert ('cell_type' in node_types_df.columns)
    assert ('ei' in node_types_df.columns)
    assert ('positions' not in node_types_df.columns)

    nodes_h5 = h5py.File(nodes_h5.name, 'r')
    assert ('node_id' in nodes_h5['/nodes/NET1'])
    assert (len(nodes_h5['/nodes/NET1/node_id']) == 300)
    assert (len(nodes_h5['/nodes/NET1/node_type_id']) == 300)
    assert (len(nodes_h5['/nodes/NET1/node_group_id']) == 300)
    assert (len(nodes_h5['/nodes/NET1/node_group_index']) == 300)

    node_groups = {
        nid: grp
        for nid, grp in nodes_h5['/nodes/NET1'].items()
        if isinstance(grp, h5py.Group)
    }
    for grp in node_groups.values():
        if len(grp) == 1:
            assert ('position' in grp and len(grp['position']) == 200)

        elif len(grp) == 2:
            assert ('position' in grp and len(grp['position']) == 100)
            assert ('tags' in grp and len(grp['tags']) == 100)

        else:
            assert False

    assert (os.path.exists(edges_h5.name) and os.path.exists(edges_csv.name))
    edge_types_df = pd.read_csv(edges_csv.name, sep=' ')
    assert (len(edge_types_df) == 2)
    assert ('p1' in edge_types_df.columns)
    assert ('p2' in edge_types_df.columns)

    edges_h5 = h5py.File(edges_h5.name, 'r')
    assert ('source_to_target' in edges_h5['/edges/NET1_to_NET1/indicies'])
    assert ('target_to_source' in edges_h5['/edges/NET1_to_NET1/indicies'])
    assert (len(edges_h5['/edges/NET1_to_NET1/target_node_id']) == 30000)
    assert (len(edges_h5['/edges/NET1_to_NET1/source_node_id']) == 30000)

    assert (edges_h5['/edges/NET1_to_NET1/target_node_id'][0] == 0)
    assert (edges_h5['/edges/NET1_to_NET1/source_node_id'][0] == 100)
    assert (edges_h5['/edges/NET1_to_NET1/edge_group_index'][0] == 0)
    assert (edges_h5['/edges/NET1_to_NET1/edge_type_id'][0] == 100)
    assert (edges_h5['/edges/NET1_to_NET1/0/nsyns'][0] == 1)

    assert (edges_h5['/edges/NET1_to_NET1/target_node_id'][29999] == 199)
    assert (edges_h5['/edges/NET1_to_NET1/source_node_id'][29999] == 99)
    assert (edges_h5['/edges/NET1_to_NET1/edge_group_id'][29999] == 0)
    assert (edges_h5['/edges/NET1_to_NET1/edge_type_id'][29999] == 101)
    assert (edges_h5['/edges/NET1_to_NET1/0/nsyns'][29999] == 2)