def test_mulitnet_iterator(): net1 = NetworkBuilder('NET1') net1.add_nodes(N=50, cell_type='Rorb', ei='e') net1.build() net2 = NetworkBuilder('NET2') net2.add_nodes(N=100, cell_type='Scnna1', ei='e') net2.add_nodes(N=100, cell_type='PV1', ei='i') net2.add_edges(source={'ei': 'e'}, target={'ei': 'i'}, connection_rule=5, syn_type='e2i', net_type='rec') net2.add_edges(source=net1.nodes(), target={'ei': 'e'}, connection_rule=1, syn_type='e2e', net_type='fwd') net2.build() assert (len(net2.edges()) == 50 * 100 + 100 * 100) assert (len(net2.edges(source_network='NET2', target_network='NET1')) == 0) assert (len(net2.edges(source_network='NET1', target_network='NET2')) == 50 * 100) assert (len(net2.edges(target_network='NET2', net_type='rec')) == 100 * 100) edges = net2.edges(source_network='NET1') assert (len(edges) == 50 * 100) for e in edges: assert (e['net_type'] == 'fwd')
def test_cross_population_edges(): tmp_dir = make_tmp_dir() edges_file = make_tmp_file(suffix='.h5') edge_types_file = make_tmp_file(suffix='.csv') net_a1 = NetworkBuilder('A1') net_a1.add_nodes(N=100, model='A') net_a1.build() net_a2 = NetworkBuilder('A2') net_a2.add_nodes(N=100, model='B') net_a2.add_edges( source=net_a1.nodes(), target=net_a2.nodes(), connection_rule=lambda s, t: 1 if s.node_id == t.node_id else 0 ) net_a2.build() net_a2.save_edges( edges_file_name=edges_file, edge_types_file_name=edge_types_file, output_dir=tmp_dir, name='A1_A2' ) edges_h5_path = os.path.join(tmp_dir, edges_file) assert(os.path.exists(edges_h5_path)) with h5py.File(edges_h5_path, 'r') as h5: assert('/edges/A1_A2' in h5) assert(len(h5['/edges/A1_A2/source_node_id']) == 100) assert(h5['/edges/A1_A2/source_node_id'].attrs['node_population'] == 'A1') assert(len(h5['/edges/A1_A2/target_node_id']) == 100) assert(h5['/edges/A1_A2/target_node_id'].attrs['node_population'] == 'A2') barrier()
def test_add_edges(): net = NetworkBuilder('V1') net.add_nodes(N=10, cell_type='Scnna1', ei='e') net.add_nodes(N=10, cell_type='PV1', ei='i') net.add_nodes(N=10, cell_type='PV2', ei='i') net.add_edges( source={'ei': 'i'}, target={'ei': 'e'}, connection_rule=lambda s, t: 1, edge_arg='i2e' ) net.add_edges( source=net.nodes(cell_type='Scnna1'), target=net.nodes(cell_type='PV1'), connection_rule=2, edge_arg='e2i' ) net.build() assert(net.nedges == 200 + 200) assert(net.edges_built is True) for e in net.edges(target_nodes=net.nodes(cell_type='Scnna1')): assert(e['edge_arg'] == 'i2e') assert(e['nsyns'] == 1) for e in net.edges(target_nodes=net.nodes(cell_type='PV1')): assert(e['edge_arg'] == 'e2i') assert(e['nsyns'] == 2)
def build_injective_inputs(target_net): print('Building External Network') input_network_model = { 'external': { 'N': len(target_net.nodes() ), # Need one virtual node for every LIF_network node 'ei': 'e', 'pop_name': 'input_network', 'model_type': 'virtual' } } inputNetwork = NetworkBuilder("external") inputNetwork.add_nodes(**input_network_model['external']) inputNetwork.add_edges( target=target_net.nodes(pop_name='LIF_exc'), connection_rule=injective_connections, iterator='all_to_one', # will make building a little faster syn_weight=200, delay=D, dynamics_params='ExcToExc.json', model_template='static_synapse') inputNetwork.add_edges(target=target_net.nodes(pop_name='LIF_inh'), connection_rule=injective_connections, iterator='all_to_one', syn_weight=100, delay=D, dynamics_params='ExcToExc.json', model_template='static_synapse') inputNetwork.build() inputNetwork.save(output_dir='network')
def test_save_weights(): net = NetworkBuilder('NET1') net.add_nodes(N=100, position=[(0.0, 1.0, -1.0)]*100, cell_type='Scnna1', ei='e') net.add_nodes(N=100, position=[(0.0, 1.0, -1.0)]*100, cell_type='PV1', ei='i') net.add_nodes(N=100, position=[(0.0, 1.0, -1.0)]*100, tags=np.linspace(0, 100, 100), cell_type='PV2', ei='i') cm = net.add_edges(source={'ei': 'i'}, target={'ei': 'e'}, connection_rule=lambda s, t: 3, p1='e2i', p2='e2i') # 200*100 = 60000 edges cm.add_properties(names=['segment', 'distance'], rule=lambda s, t: [1, 0.5], dtypes=[np.int, np.float]) net.add_edges(source=net.nodes(cell_type='Scnna1'), target=net.nodes(cell_type='PV1'), connection_rule=lambda s, t: 2, p1='s2p') # 100*100 = 20000' net.build() net_dir = tempfile.mkdtemp() net.save_nodes('tmp_nodes.h5', 'tmp_node_types.csv', output_dir=net_dir) net.save_edges('tmp_edges.h5', 'tmp_edge_types.csv', output_dir=net_dir) edges_h5 = h5py.File('{}/tmp_edges.h5'.format(net_dir), 'r') assert(net.nedges == 80000) assert(len(edges_h5['/edges/NET1_to_NET1/0/distance']) == 60000) assert(len(edges_h5['/edges/NET1_to_NET1/0/segment']) == 60000) assert(len(edges_h5['/edges/NET1_to_NET1/1/nsyns']) == 10000) assert(edges_h5['/edges/NET1_to_NET1/0/distance'][0] == 0.5) assert(edges_h5['/edges/NET1_to_NET1/0/segment'][0] == 1) assert(edges_h5['/edges/NET1_to_NET1/1/nsyns'][0] == 2)
def test_nsyn_edges(): net = NetworkBuilder('NET1') net.add_nodes(N=100, cell_type='Scnna1', ei='e') net.add_nodes(N=100, cell_type='PV1', ei='i') net.add_nodes(N=100, cell_type='PV2', ei='i') net.add_edges(source={'ei': 'i'}, target={'ei': 'e'}, connection_rule=lambda s, t: 1) # 200*100 = 20000 edges net.add_edges(source=net.nodes(cell_type='Scnna1'), target=net.nodes(cell_type='PV1'), connection_rule=lambda s, t: 2) # 100*100*2 = 20000 net.build() assert(net.nedges == 20000 + 20000) assert(net.edges_built is True)
def test_connection_map(): tmp_dir = tempfile.mkdtemp() edges_file = make_tmp_file(suffix='.h5') edge_types_file = make_tmp_file(suffix='.csv') net = NetworkBuilder('test') net.add_nodes(N=10, x=range(10), model='A') net.add_nodes(N=20, x=range(10, 30), model='B') net.add_edges(source={'model': 'A'}, target={'model': 'B'}, connection_rule=1, edge_model='A') cm = net.add_edges(source={'model': 'B'}, target={'model': 'B'}, connection_rule=2, edge_model='B') cm.add_properties(names='a', rule=5, dtypes=int) cm = net.add_edges(source={'model': 'B'}, target={'x': 0}, connection_rule=3, edge_model='C') cm.add_properties(names='b', rule=0.5, dtypes=float) cm.add_properties(names='c', rule=lambda *_: 2, dtypes=int) net.build() net.save_edges( edges_file_name=edges_file, edge_types_file_name=edge_types_file, output_dir=tmp_dir, name='test_test' ) edges_h5_path = os.path.join(tmp_dir, edges_file) assert(os.path.exists(edges_h5_path)) with h5py.File(edges_h5_path, 'r') as h5: n_edges = 10*20*1 + 20*20*2 + 20*1*3 assert('/edges/test_test' in h5) assert(len(h5['/edges/test_test/target_node_id']) == n_edges) assert(h5['/edges/test_test/target_node_id'].attrs['node_population'] == 'test') assert(len(h5['/edges/test_test/source_node_id']) == n_edges) assert(h5['/edges/test_test/source_node_id'].attrs['node_population'] == 'test') assert(len(h5['/edges/test_test/edge_type_id']) == n_edges) assert(len(h5['/edges/test_test/edge_group_id']) == n_edges) assert(len(h5['/edges/test_test/edge_group_index']) == n_edges) assert(len(np.unique(h5['/edges/test_test/edge_type_id'])) == 3) assert(len(np.unique(h5['/edges/test_test/edge_group_id'])) == 3) for grp_id, grp in h5['/edges/test_test'].items(): if not isinstance(grp, h5py.Group) or grp_id in ['indicies', 'indices']: continue assert(int('nsyns' in grp) + int('a' in grp) + int('c' in grp and 'c' in grp) == 1) edge_type_csv_path = os.path.join(tmp_dir, edge_types_file) assert(os.path.exists(edge_type_csv_path)) edge_types_df = pd.read_csv(edge_type_csv_path, sep=' ') assert(len(edge_types_df) == 3) assert('edge_type_id' in edge_types_df.columns) assert('edge_model' in edge_types_df.columns) barrier()
def test_cross_pop_edges(): # Uses connection map functionality to create edges with unique parameters net1 = NetworkBuilder('V1') net1.add_nodes(N=10, arg_list=range(10), arg_ctype='e') net1.build() net2 = NetworkBuilder('V2') net2.add_nodes(N=5, arg_list=range(10, 15), arg_ctype='i') net2.add_edges(source={'arg_ctype': 'i'}, target=net1.nodes(arg_ctype='e'), connection_rule=lambda s, t: 1, edge_arg='i2e') net2.build() assert(net2.nedges == 50)
def test_save_multinetwork_1(): net1 = NetworkBuilder('NET1') net1.add_nodes(N=100, position=[(0.0, 1.0, -1.0)] * 100, cell_type='Scnna1', ei='e') net1.add_edges(source={'ei': 'e'}, target={'ei': 'e'}, connection_rule=5, ctype_1='n1_rec') net1.build() net2 = NetworkBuilder('NET2') net2.add_nodes(N=10, position=[(0.0, 1.0, -1.0)] * 10, cell_type='PV1', ei='i') net2.add_edges(connection_rule=10, ctype_1='n2_rec') net2.add_edges(source=net1.nodes(), target={'ei': 'i'}, connection_rule=1, ctype_2='n1_n2') net2.add_edges(target=net1.nodes(cell_type='Scnna1'), source={'cell_type': 'PV1'}, connection_rule=2, ctype_2='n2_n1') net2.build() net_dir = tempfile.mkdtemp() net2.save_edges(edges_file_name='NET2_NET1_edges.h5', edge_types_file_name='NET2_NET1_edge_types.csv', output_dir=net_dir, src_network='NET2') n1_n2_fname = '{}/{}_{}'.format(net_dir, 'NET2', 'NET1') edges_h5 = h5py.File(n1_n2_fname + '_edges.h5', 'r') assert(len(edges_h5['/edges/NET2_to_NET1/target_node_id']) == 100*10) assert(len(edges_h5['/edges/NET2_to_NET1/0/nsyns']) == 100*10) assert(edges_h5['/edges/NET2_to_NET1/0/nsyns'][0] == 2) edge_types_csv = pd.read_csv(n1_n2_fname + '_edge_types.csv', sep=' ') assert(len(edge_types_csv) == 1) assert('ctype_1' not in edge_types_csv.columns.values) assert(edge_types_csv['ctype_2'].iloc[0] == 'n2_n1')
def test_add_edges_custom_params(): # Uses connection map functionality to create edges with unique parameters net = NetworkBuilder('V1') net.add_nodes(N=10, arg_list=range(10), arg_ctype='e') net.add_nodes(N=5, arg_list=range(10, 15), arg_ctype='i') cm = net.add_edges( source={'arg_ctype': 'e'}, target={'arg_ctype': 'i'}, connection_rule=2 ) cm.add_properties('syn_weight', rule=0.5, dtypes=float) cm.add_properties( ['src_num', 'trg_num'], rule=lambda s, t: [s['node_id'], t['node_id']], dtypes=[int, int] ) net.build() assert(net.nedges == 2*50) assert(net.edges_built is True) for e in net.edges(): assert(e['syn_weight'] == 0.5) assert(e['src_num'] == e.source_node_id) assert(e['trg_num'] == e.target_node_id)
def test_itr_basic(): net = NetworkBuilder('NET1') net.add_nodes(N=100, position=[(0.0, 1.0, -1.0)] * 100, cell_type='Scnna1', ei='e') net.add_nodes(N=100, position=[(0.0, 1.0, -1.0)] * 100, cell_type='PV1', ei='i') net.add_edges(source={'ei': 'e'}, target={'ei': 'i'}, connection_rule=5, syn_type='e2i') net.add_edges(source={'cell_type': 'PV1'}, target={'cell_type': 'Scnna1'}, connection_rule=5, syn_type='i2e') net.build() edges = net.edges() assert (len(edges) == 100 * 100 * 2) assert (edges[0]['nsyns'] == 5)
def build_source_network(target_net): input_network_model = { 'external': { 'N': 1, 'ei': 'e', 'pop_name': 'input_network', 'model_type': 'virtual' } } inputNetwork = NetworkBuilder("external") inputNetwork.add_nodes(**input_network_model['external']) inputNetwork.add_edges(target=target_net.nodes(pop_name='LIF_exc'), connection_rule=random_connections, connection_params={'p': 0.1}, syn_weight=400, delay=D, dynamics_params='ExcToExc.json', model_template='static_synapse') inputNetwork.add_edges(target=target_net.nodes(pop_name='LIF_inh'), connection_rule=random_connections, connection_params={'p': 0.1}, syn_weight=400, delay=D, dynamics_params='ExcToExc.json', model_template='static_synapse') inputNetwork.build() net.save(output_dir='network') #inputNetwork.save_nodes(nodes_file_name='one_input_node.h5', node_types_file_name='one_input_node_type.csv', # output_dir='lif_network') #inputNetwork.save_edges(edges_file_name='one_input_edges.h5', edge_types_file_name='one_input_edge_type.csv', # output_dir='lif_network') return inputNetwork
def test_edge_models(): tmp_dir = tempfile.mkdtemp() edges_file = make_tmp_file(suffix='.h5') edge_types_file = make_tmp_file(suffix='.csv') net = NetworkBuilder('test') net.add_nodes(N=100, x=range(100), model='A') net.add_nodes(N=100, x=range(100, 200), model='B') net.add_edges(source={'model': 'A'}, target={'model': 'B'}, connection_rule=1, model='A') net.add_edges(source={'model': 'A'}, target={'x': 0}, connection_rule=2, model='B') net.add_edges(source={'model': 'A'}, target={'x': [1, 2, 3]}, connection_rule=3, model='C') net.add_edges(source={'model': 'A', 'x': 0}, target={'model': 'B', 'x': 100}, connection_rule=4, model='D') net.build() net.save_edges( edges_file_name=edges_file, edge_types_file_name=edge_types_file, output_dir=tmp_dir, name='test_test' ) edges_h5_path = os.path.join(tmp_dir, edges_file) assert(os.path.exists(edges_h5_path)) with h5py.File(edges_h5_path, 'r') as h5: n_edges = 100*100 + 100*1 + 100*3 + 1 assert('/edges/test_test' in h5) assert(len(h5['/edges/test_test/target_node_id']) == n_edges) assert(h5['/edges/test_test/target_node_id'].attrs['node_population'] == 'test') assert(len(h5['/edges/test_test/source_node_id']) == n_edges) assert(h5['/edges/test_test/source_node_id'].attrs['node_population'] == 'test') assert(len(h5['/edges/test_test/edge_type_id']) == n_edges) assert(len(h5['/edges/test_test/edge_group_id']) == n_edges) assert(len(h5['/edges/test_test/edge_group_index']) == n_edges) assert(len(np.unique(h5['/edges/test_test/edge_type_id'])) == 4) assert(len(np.unique(h5['/edges/test_test/edge_group_id'])) == 1) grp_id = str(h5['/edges/test_test/edge_group_id'][0]) assert(len(h5['/edges/test_test'][grp_id]['nsyns']) == n_edges) edge_type_csv_path = os.path.join(tmp_dir, edge_types_file) assert(os.path.exists(edge_type_csv_path)) edge_types_df = pd.read_csv(edge_type_csv_path, sep=' ') assert(len(edge_types_df) == 4) assert('edge_type_id' in edge_types_df.columns) assert('model' in edge_types_df.columns) barrier()
def test_itr_advanced_search(): net = NetworkBuilder('NET1') net.add_nodes(N=1, cell_type='Scnna1', ei='e') net.add_nodes(N=50, cell_type='PV1', ei='i') net.add_nodes(N=100, cell_type='PV2', ei='i') net.add_edges(source={'ei': 'e'}, target={'ei': 'i'}, connection_rule=5, syn_type='e2i', nm='A') net.add_edges(source={'cell_type': 'PV1'}, target={'cell_type': 'PV2'}, connection_rule=5, syn_type='i2i', nm='B') net.add_edges(source={'cell_type': 'PV2'}, target={'ei': 'i'}, connection_rule=5, syn_type='i2i', nm='C') net.build() edges = net.edges(target_nodes=net.nodes(cell_type='Scnna1')) assert (len(edges) == 0) edges = net.edges(source_nodes={'ei': 'e'}, target_nodes={'ei': 'i'}) assert (len(edges) == 50 + 100) edges = net.edges(source_nodes=[n.node_id for n in net.nodes(ei='e')]) assert (len(edges) == 50 + 100) edges = net.edges(source_nodes={'ei': 'i'}) assert (len(edges) == 100 * 100 * 2) for e in edges: assert (e['syn_type'] == 'i2i') edges = net.edges(syn_type='i2i') print len(edges) == 100 * 100 * 2 for e in edges: assert (e['nm'] != 'A') edges = net.edges(syn_type='i2i', nm='C') assert (len(edges) == 100 * 150)
lower_bound = num_per * tid upper_bound = lower_bound + num_per if sid < upper_bound and sid >= lower_bound: #print("connecting cell {} to {}".format(sid+start,tid)) return 1 else: return None #Inhibitory on soma. net.add_edges(source=inh_stim.nodes(), target=net.nodes(), connection_rule=correct_cell, connection_params={'num_per': num_soma_inh , 'start':0}, syn_weight=1, delay=0.1, dynamics_params='INT2PN.json', model_template=syn['INT2PN.json']['level_of_detail'], distance_range=[-2000.0, 2000.0], target_sections=['somatic']) #start += np.sum(inh_bounds_apic) start = 0 # Create connections between Exc --> Pyr cells #Excitatory on basal dendrites. net.add_edges(source=exc_stim.nodes(), target=net.nodes(), connection_rule=correct_cell, connection_params={'num_per': num_dend_exc, 'start':start}, syn_weight=1,
# Get morphology and soma center for the target cell swc_reader = morphologies[trg['model_name']] target_coords = [trg['x'], trg['y'], trg['z']] sec_ids, sec_xs = swc_reader.choose_sections(sections, dist_range) # randomly choose sec_ids coords = swc_reader.get_coord(sec_ids, sec_xs, soma_center=target_coords) # get coords of sec_ids dist = swc_reader.get_dist(sec_ids) swctype = swc_reader.get_type(sec_ids) return sec_ids, sec_xs, coords[0][0], coords[0][1], coords[0][2], dist[0], swctype[0] # Feedfoward excitatory virtual cells exc_net = NetworkBuilder('excvirt') exc_net.add_nodes(N=10, model_type='virtual', ei='e') cm = exc_net.add_edges(target=cortex.nodes(), source=exc_net.nodes(ei='e'), connection_rule=lambda *_: np.random.randint(4, 12), dynamics_params='AMPA_ExcToExc.json', model_template='Exp2Syn', delay=2.0) cm.add_properties('syn_weight', rule=3.4e-4, dtypes=np.float) cm.add_properties(['sec_id', 'sec_x', 'pos_x', 'pos_y', 'pos_z', 'dist', 'type'], rule=build_edges, dtypes=[np.int32, np.float, np.float, np.float, np.float, np.float, np.uint8]) exc_net.build() exc_net.save(output_dir='network') if not os.path.exists('inputs/exc_spike_trains.h5'): # Build spike-trains for excitatory virtual cells if not os.path.exists('inputs'): os.mkdir('inputs') psg = PoissonSpikesGenerator(range(10), 10.0, tstop=3000.0) psg.to_hdf5('inputs/exc_spike_trains.h5')
class SimulationBuilder: """Class used to build our BMTK simulation. Attributes ---------- params : dict contains parameters for the network seed : int base random seed for the simulation syn : dict contains synaptic templates n_dend_exc : int number of excitatory input cells on the basal dendrites n_apic_exc : int number of excitatory input cells on the apical dendrites n_dend_inh : int number of inhibitory (SOM+) input cells on the basal dendrites more than 50 um from the soma. n_apic_inh : int number of inhibitory (SOM+) input cells on the apical dendrites n_prox_dend_inh : int number of inhibitory (PV+) input cells on the basal dendrites less than 50 um from the soma n_soma_inh : int number of inhibitory (PV+) input cells on the soma clust_per_group : int number of clusters per functional group net : NetworkBuilder the BMTK network for the biophysical cell exc_stim : NetworkBuilder the BMTK network for excitatory inputs prox_inh_stim : NetworkBuilder the BMTK network for perisomatic inhibition dist_inh_stim : NetworkBuilder the BMTK network for dendritic inhibition dend_groups : list all excitatory functional groups on the basal dendrites apic_groups : list all excitatory functional groups on the apical dendrites Methods ------- build() builds the network save_groups() saves the functional groups to a csv _set_prefixed_directory(base_dir_name : str) sets up the correct biophy_components structure based on the cell prefix in params for the given directory base _build_exc() creates excitatory input nodes and edges _build_exc_nodes(segs : pandas.DataFrame, base_name : str, n_cells : int, start=0 : int) builds excitatory nodes _build_exc_edges(group_list : list) builds excitatory edges _save_nets() builds and saves the BMTK NetworkBuilders _build_inh() creates inhibitory input nodes and edges _make_rasters() creates the inhibitory and excitatory input rasters _gen_exc_spikes(fname : str) generates and saves the excitatory spike rasters _gen_inh_spikes(n_cells : int, mean_fr : float, std_fr : float, key : str, fname : str) creates inhibitory spike rasters, using a noise trace based on averaging excitation and shifting it _modify_jsons() modifies the various json files however is needed after they are built _modify_sim_config() modifies the simulation_config.json however is needed _update_cellvar_record_locs(sim_config : dict) modifies the location of cellvar recordings in the given JSON simulation_config Static Methods -------------- _get_directory_prefix(directory : str) reads the prefix.txt fil in directory and returns the contents _connector_func(sources : list, targets : list, cells : list) sets the number of synapses from the given cells _set_location(source : dict, target : dict, cells : list, start_id : int) sets the location of the given edge _norm_connect(source : dict, target : dict, m : float, s : float, low : int, high : int) used to normally distribute connection counts _gen_group_spikes(writer : SonataWriter, group : FunctionalGroup, seconds : float, start_time : float, dist : func) creates and saves a functional group's spike raster _norm_rvs(mean : float, std : float) generates a random float from a normal distribution with a near zero minimum """ def __init__(self, params_file, seed=123): """Initializes the simulation builder, setting up attributes but not actually building the BMTK network. Parameters ---------- params_file : str path to the JSON file with network parameters seed : int base random seed for the simulation """ #Loads the JSON file with information about the network. with open(params_file) as f: self.params = json.load(f) self.seed = seed #Loads synapse templates. synapses.load() self.syn = synapses.syn_params_dicts() avg_exc_div = np.mean(list(self.params["divergence"]["exc"].values())) self.n_dend_exc = int( (self.params["lengths"]["basal_dist"] * self.params["syn_density"]["exc"]) / avg_exc_div) self.n_apic_exc = int( (self.params["lengths"]["apic"] * self.params["syn_density"]["exc"]) / avg_exc_div) self.n_dend_inh = int((self.params["lengths"]["basal_dist"] * self.params["syn_density"]["inh"]) / self.params["divergence"]["basal_inh"]["m"]) self.n_apic_inh = int((self.params["lengths"]["apic"] * self.params["syn_density"]["inh"]) / self.params["divergence"]["apic_inh"]["m"]) self.n_prox_dend_inh = int((self.params["lengths"]["basal_prox"] * self.params["syn_density"]["inh"]) / self.params["divergence"]["peri_inh"]["m"]) self.n_soma_inh = int(self.params["n_soma_syns"] / self.params["divergence"]["peri_inh"]["m"]) self.clust_per_group = int( (self.params["groups"]["cells_per_group"] * avg_exc_div) // (self.params["syn_density"]["exc"] * 10)) if self.params["file_current_clamp"]["input_file"] == "None": self.file_current_clamp = None else: self.file_current_clamp = self.params["file_current_clamp"] def build(self): """Builds the nodes and edges for the network. """ np.random.seed(self.seed) self._set_prefixed_directory("mechanisms") self._set_prefixed_directory("templates") self.net = NetworkBuilder("biophysical") self.net.add_nodes( N=1, pop_name='Pyrc', potental='exc', model_type='biophysical', dynamics_params=self.params["cell"]["dynamic_params"], model_template=self.params["cell"]["model_template"], model_processing=self.params["cell"]["model_processing"], morphology=self.params["cell"]["morphology"]) self._build_exc() self._build_inh() self._save_nets() self._make_rasters() #Final build step. build_env_bionet( base_dir='./', network_dir='./network', dt=self.params["dt"], tstop=self.params["time"]["stop"] * 1000.0, report_vars=self.params["record_cellvars"]["vars"], dL=self.params["dL"], #target length (um) of segments spikes_threshold=-10, file_current_clamp=self.file_current_clamp, spikes_inputs=[('exc_stim', 'exc_stim_spikes.h5'), ('prox_inh_stim', 'prox_inh_stim_spikes.h5'), ('dist_inh_stim', 'dist_inh_stim_spikes.h5')], components_dir='../biophys_components', compile_mechanisms=True) self._modify_jsons() def save_groups(self): """saves the apic and dend groups into a csv. one row for each node containgin the id of the functional group it is in. """ all_groups = self.dend_groups + self.apic_groups node_ids = [] func_groups = [] for func_id, group in enumerate(all_groups): for i in range(group.start_id, group.start_id + group.n_cells): node_ids.append(i) func_groups.append(func_id) df = pd.DataFrame() df["Node ID"] = node_ids df["Functional Group"] = func_groups df.to_csv("FunctionalGroups.csv", index=False) def _set_prefixed_directory(self, base_dir_name): """Fixes the biophy_components directory. There should be only one directory named <base_dir_name> and it should be the one with the prefix.txt file in it that has the same prefix as params. Parameters ---------- base_dir_name : str base name of the set of directories to be fixed """ #import pdb; pdb.set_trace() components_path = "../biophys_components/" biophys_subdirs = [ f.name for f in os.scandir(components_path) if f.is_dir() ] for dir_name in biophys_subdirs: if base_dir_name == dir_name: prefix = SimulationBuilder._get_directory_prefix( components_path + dir_name) if prefix == self.params["cell"]["prefix"]: return else: os.rename(components_path + base_dir_name, components_path + prefix + base_dir_name) for dir_name in biophys_subdirs: if base_dir_name in dir_name and self.params["cell"][ "prefix"] in dir_name: os.rename(components_path + dir_name, components_path + base_dir_name) def _get_directory_prefix(directory): """Returns the contents of the prefix.txt file in the given directory. Parameters ---------- directory : str directory to look in Returns ------- str contents of prefix.txt """ with open(directory + "/prefix.txt", 'r') as f: return f.read() def _build_exc(self): """Builds the excitatory input cells and their synapses. """ # External excitatory inputs self.exc_stim = NetworkBuilder('exc_stim') #DataFrame of all segments on the cell. segs = pd.read_csv(self.params["cell"]["segments_file"]) dends = segs[(segs["Type"] == "dend") & (segs["Distance"] >= 50)] apics = segs[(segs["Type"] == "apic")] np.random.seed(self.seed + 1) apic_start, self.dend_groups = self._build_exc_nodes( dends, "dend", self.n_dend_exc) np.random.seed(self.seed + 2) _, self.apic_groups = self._build_exc_nodes(apics, "apic", self.n_apic_exc, start=apic_start) np.random.seed(self.seed + 3) self._build_exc_edges(self.dend_groups) np.random.seed(self.seed + 4) self._build_exc_edges(self.apic_groups) #Sets the number of synapses for each input cell. def _connector_func(sources, target, cells): """Used to set the number of synapses from each excitatory input cell in a functional group. Use with "all_to_one" iterator. Parameters ---------- sources : list presynaptic nodes (represented as dicts) target : dict postsynaptic node cells : list list of Cells in the FunctionalGroup Returns ------- list list of synapses for each pairing """ return [cell.n_syns for cell in cells] #Sets the location of synapses based on the given cell list. def _set_location(source, target, cells, start_id): """Sets the location of the given synapse. Parameters ---------- source : dict source node information target : dict target node information cells : list Cells in the functional group start_id : int start_id for the functional groups the cells come from Returns ------- int BMTK section id float distance along the section """ #Gets the proper index within the cell list. index = source.node_id - start_id seg = cells[index].get_seg() return seg.bmtk_id, seg.x #Creates the functional groups and adds the virtual cells to the #BMTK NetworkBuilder. def _build_exc_nodes(self, segs, base_name, n_cells, start=0): """Creates the functional groups and adds the virtual cells to the BMTK NetworkBuilder Parameters ---------- segs : pandas.DataFrame all the segments available for the functional groups base_name : str the string that is appended to to make the group names. groups get 0 - n_groups appended to their names. n_cells : int total number of input cells that should be added. start : int, optional starting id to be associated with the functional groups, by default 0 this is used later to associate cells in functional groups with the correct locations and synapses. Returns ------- int what the start parameter should be for the next call to _build_exc_nodes list list of functional groups that were created """ start_id = start n_groups = n_cells // self.params["groups"]["cells_per_group"] n_extra = n_cells % self.params["groups"][ "cells_per_group"] #number of extra cells that don't evenly fit into groups group_list = [] for i in range(n_groups): name = base_name + str(i) #Spreads out the extra cells. N = self.params["groups"]["cells_per_group"] if i < n_extra: N += 1 self.exc_stim.add_nodes(N=N, pop_name=name, potential="exc", model_type='virtual') new_group = FunctionalGroup( segs, segs.sample().iloc[0], N, self.clust_per_group, name, start_id, partial(make_seg_sphere, radius=self.params["groups"]["group_radius"]), partial(make_seg_sphere, radius=self.params["groups"]["cluster_radius"])) group_list.append(new_group) start_id += N return start_id, group_list def _build_exc_edges(self, group_list): """Creates the connections between each cell in the list of groups and the biophysical cell. Parameters ---------- group_list : list list of functional groups """ for i in range(len(group_list)): group = group_list[i] #Creates the edges from each excitatory input cells in the group. conn = self.net.add_edges( source=self.exc_stim.nodes(pop_name=group.name), target=self.net.nodes(), iterator="all_to_one", connection_rule=SimulationBuilder._connector_func, connection_params={'cells': group.cells}, syn_weight=1, delay=0.1, dynamics_params='PN2PN.json', model_template=self.syn['PN2PN.json']['level_of_detail'], ) #Sets the postsynaptic locations of the connections. conn.add_properties(['sec_id', "sec_x"], rule=SimulationBuilder._set_location, rule_params={ 'cells': group.cells, 'start_id': group.start_id }, dtypes=[np.int, np.float]) def _save_nets(self): """builds and saves the BMTK NetworkBuilders """ # Build and save our networks np.random.seed(self.seed + 12) self.net.build() self.net.save_nodes(output_dir='network') np.random.seed(self.seed + 16) self.net.save_edges(output_dir='network') np.random.seed(self.seed + 13) self.exc_stim.build() self.exc_stim.save_nodes(output_dir='network') np.random.seed(self.seed + 14) self.prox_inh_stim.build() self.prox_inh_stim.save_nodes(output_dir='network') np.random.seed(self.seed + 15) self.dist_inh_stim.build() self.dist_inh_stim.save_nodes(output_dir='network') def _build_inh(self): """Creates inhibitory input nodes and their connections onto the biophysical cell """ #####################Perisomatic Inhibition############################## self.prox_inh_stim = NetworkBuilder('prox_inh_stim') #Nodes that connect to soma. self.prox_inh_stim.add_nodes(N=self.n_soma_inh, pop_name='on_soma', potential='exc', model_type='virtual') #Nodes that connect to proximal dendrites. self.prox_inh_stim.add_nodes(N=self.n_prox_dend_inh, pop_name='on_dend', potential='exc', model_type='virtual') div_params = self.params["divergence"]["peri_inh"] #On soma. np.random.seed(self.seed + 5) self.net.add_edges( source=self.prox_inh_stim.nodes(pop_name='on_soma'), target=self.net.nodes(), connection_rule=SimulationBuilder._norm_connect, connection_params={ "m": div_params["m"], "s": div_params["s"], "low": div_params["min"], "high": div_params["max"] }, syn_weight=1, delay=0.1, dynamics_params='PV2PN.json', model_template=self.syn['PV2PN.json']['level_of_detail'], distance_range=[-2000, 2000.0], target_sections=['somatic']) #On dendrites within 50 um np.random.seed(self.seed + 6) self.net.add_edges( source=self.prox_inh_stim.nodes(pop_name='on_dend'), target=self.net.nodes(), connection_rule=SimulationBuilder._norm_connect, connection_params={ "m": div_params["m"], "s": div_params["s"], "low": div_params["min"], "high": div_params["max"] }, syn_weight=1, delay=0.1, dynamics_params='PV2PN.json', model_template=self.syn['PV2PN.json']['level_of_detail'], distance_range=[0, 50.0], target_sections=['dend']) ####################################################################################### #############################Dendritic Inhibition###################################### self.dist_inh_stim = NetworkBuilder('dist_inh_stim') self.dist_inh_stim.add_nodes(N=self.n_dend_inh, pop_name='dend', potential='exc', model_type='virtual') self.dist_inh_stim.add_nodes(N=self.n_apic_inh, pop_name='apic', potential='exc', model_type='virtual') div_params = self.params["divergence"]["basal_inh"] #Basal edges. np.random.seed(self.seed + 7) self.net.add_edges( source=self.dist_inh_stim.nodes(pop_name="dend"), target=self.net.nodes(), connection_rule=SimulationBuilder._norm_connect, connection_params={ "m": div_params["m"], "s": div_params["s"], "low": div_params["min"], "high": div_params["max"] }, syn_weight=1, delay=0.1, dynamics_params='SOM2PN.json', model_template=self.syn['SOM2PN.json']['level_of_detail'], distance_range=[50, 2000.0], target_sections=['dend']) div_params = self.params["divergence"]["apic_inh"] #Apic edges. np.random.seed(self.seed + 8) self.net.add_edges( source=self.dist_inh_stim.nodes(pop_name="apic"), target=self.net.nodes(), connection_rule=SimulationBuilder._norm_connect, connection_params={ "m": div_params["m"], "s": div_params["s"], "low": div_params["min"], "high": div_params["max"] }, syn_weight=1, delay=0.1, dynamics_params='SOM2PN.json', model_template=self.syn['SOM2PN.json']['level_of_detail'], distance_range=[50, 2000.0], target_sections=['apic']) def _norm_connect(source, target, m, s, low, high): """Returns a random number of synapses based on the given distribution. Parameters ---------- source : dict source node target : dict target node m : float mean number of connections s : float standard deviation of number of connections low : int minimum number of connections high : int maximum number of connections Returns ------- int number of connections """ return int(min(max(np.random.normal(m, s), low), high)) def _make_rasters(self): """Generates excitatory and inhibitory input rasters """ np.random.seed(self.seed + 9) self._gen_exc_spikes('exc_stim_spikes.h5') inh_frs = self.params["inh_frs"] #Makes perisomatic inhibitory raster. np.random.seed(self.seed + 10) self._gen_inh_spikes(self.n_soma_inh + self.n_prox_dend_inh, inh_frs["proximal"]["m"], inh_frs["proximal"]["s"], inh_frs["proximal"]["rhythmicity"], "prox_inh_stim", 'prox_inh_stim_spikes.h5') #Makes dendritic inhibitory raster. np.random.seed(self.seed + 11) self._gen_inh_spikes(self.n_apic_inh + self.n_dend_inh, inh_frs["distal"]["m"], inh_frs["distal"]["s"], inh_frs["distal"]["rhythmicity"], "dist_inh_stim", 'dist_inh_stim_spikes.h5') #Generates the spike raster for a given group. #The group has the same noise. def _gen_group_spikes(writer, group, seconds, start_time, dist): """Generates and writes to a h5 file the given functional group's spike trains Parameters ---------- writer : SonataWriter how the spike trains are saved group : FunctionalGroup the functional group that the spike trains are being made for seconds : float length of the spike trains in seconds start_time : float what time (ms) the spike trains should start at dist : func function for random distribution used for an individual cell's firing rate """ z = make_noise( num_samples=(int(seconds * 1000)) - 1, num_traces=1 ) #generates the noise trace common to each cell in the functional group. make_save_spikes(writer, True, dist(size=group.n_cells), numUnits=group.n_cells, rateProf=np.tile(z[0, :], (group.n_cells, 1)), start_id=group.start_id, start_time=start_time) #Creates the excitatory input raster from the functional groups. def _gen_exc_spikes(self, fname): """Generates the excitatory input raster for all of the functional groups Parameters ---------- fname : str name of the file to save the rasters in (.h5) """ #distribution used for generating excitatory firing rates. levy_dist = partial(st.levy_stable.rvs, alpha=1.37, beta=-1.00, loc=0.92, scale=0.44, size=1) length = self.params["time"]["stop"] - self.params["time"]["start"] buffer = self.params["time"]["start"] writer = SonataWriter(fname, ["spikes", "exc_stim"], ["timestamps", "node_ids"], [np.float, np.int]) for group in (self.dend_groups + self.apic_groups): SimulationBuilder._gen_group_spikes(writer, group, length, buffer * 1000, levy_dist) #Blocks off the bottom of a normal distribution. def _norm_rvs(mean, std): """Generates a random float from a normal distribution with a near zero minimum Parameters ---------- mean : float mean of the distribution std : float standard deviation of the distribution Returns ------- float random float """ return max(st.norm.rvs(loc=mean, scale=std, size=1), 0.001) # #Makes a spike raster with each cell having its own noise trace. # def gen_inh_spikes(n_cells, mean_fr, std_fr, key, file, times): # # node_ids = [] # # timestamps = [] # length = times[1] - times[0] # buffer = times[0] # writer = SonataWriter(file, ["spikes", key], ["timestamps", "node_ids"], [np.float, np.int]) # z = make_noise(num_samples=(int(length*1000))-1,num_traces=1) # make_save_spikes(writer, False, partial(positive_normal, mean=mean_fr, std=std_fr), numUnits=n_cells,rateProf=z[0,:],start_time=buffer*1000) #Creates a spike raster with each cell having the same noise coming from the a shifted average of excitation. def _gen_inh_spikes(self, n_cells, mean_fr, std_fr, rhythmic_dict, key, fname): """Generates a spike raster with each train having the noise trace from averaging excitation. Distributes firing rates normally. Parameters ---------- n_cells : int number of spike trains mean_fr : float mean firing rate std_fr : float standard deviation of the firing rate rhythmic_dict : dict dictionary with keys f - frequency, mod - depth of modulation key : str name of the second group in the h5 file fname : str name of file to save the raster to """ # node_ids = [] # timestamps = [] a, b = (0 - mean_fr) / std_fr, (100 - mean_fr) / std_fr d = partial(st.truncnorm.rvs, a=a, b=b, loc=mean_fr, scale=std_fr) if rhythmic_dict['f'] == "None": f = h5py.File("exc_stim_spikes.h5", "r") ts = f['spikes']["exc_stim"]['timestamps'] nid = f['spikes']["exc_stim"]['node_ids'] #Creates a noise trace based on the excitatory spike raster. z = shift_exc_noise(ts, nid, self.params["time"]["stop"], time_shift=self.params["inh_shift"]) z = np.tile(z, (n_cells, 1)) writer = SonataWriter(fname, ["spikes", key], ["timestamps", "node_ids"], [np.float, np.int]) make_save_spikes(writer, False, d(size=n_cells), numUnits=n_cells, rateProf=z) else: # make an array of modulated sin waves # make_save_spikes should be written so that the firing rates are generated # outside instead of inside the function. frs = d(size=n_cells) t = np.arange(0, self.params["time"]["stop"], 0.001) z = np.zeros((n_cells, t.shape[0])) P = 0 for i in np.arange(0, n_cells): offset = frs[i] A = offset / ((1 / rhythmic_dict['mod']) - 1) z[i, :] = A * np.sin( (2 * np.pi * rhythmic_dict['f'] * t) + P) + offset writer = SonataWriter(fname, ["spikes", key], ["timestamps", "node_ids"], [np.float, np.int]) make_save_spikes(writer, False, np.ones((n_cells, 1)), numUnits=n_cells, rateProf=z) def _modify_jsons(self): """modifies the various json files however is needed after they are built""" self._modify_sim_config() def _modify_sim_config(self): """modifies the simulation_config.json however is needed""" with open("simulation_config.json", "r") as jsonFile: sim_config = json.load(jsonFile) self._update_cellvar_record_locs(sim_config) with open("simulation_config.json", "w") as jsonFile: json.dump(sim_config, jsonFile, indent=2) def _update_cellvar_record_locs(self, sim_config): """modifies the location of cellvar recordings in the given JSON simulation_config Parameters ---------- sim_config : dict simulation_config to modify """ reports = sim_config["reports"] cellvar_reports = [ report for report in reports.values() if report["module"] == "membrane_report" ] for loc, report in zip(self.params["record_cellvars"]["locs"], cellvar_reports): report["sections"] = loc
# rule_params={'min_delay':syn[dynamics_file]['delay']}, dtypes=[np.float]) # Create connections between Pyr --> Bask cells dynamics_file = 'PN2INT.json' # # add_delays.append(True) # min_delays.append(syn[dynamics_file]['delay']) # # conn = net.add_edges(source={'pop_name': ['PyrA', 'PyrC']}, target={'pop_name': 'Bask'}, iterator='one_to_one', connection_rule=dist_conn_perc, connection_params={'min_dist': 0.0, 'max_dist': 50.0, 'min_syns': 1, 'max_syns': 2, 'A': 0.3217, 'B': 0.005002}, syn_weight=1, delay=0.1, dynamics_params=dynamics_file, model_template=syn[dynamics_file]['level_of_detail'], distance_range=[0.0, 300.0], target_sections=['somatic'], sec_id=0, sec_x=0.5) # if add_properties: # if do_pos: # conn.add_properties(names=['delay', 'sec_id', 'sec_x'], # rule=syn_dist_delay_section, # rule_params={'min_delay':syn[dynamics_file]['delay'], # 'sec_id':0, 'sec_x':0.9}, # dtypes=[np.float, np.int32, np.float]) # else:
else: tmp_nsyn = 0 else: return 0 return tmp_nsyn # Create connections between Pyr --> Bask cells netff.add_edges(source={'pop_name': ['Cell_A', 'Cell_C']}, target={'pop_name': 'Cell_Bask'}, connection_rule=dist_conn_perc, connection_params={ 'prob': 0.12, 'min_dist': 0.0, 'max_dist': 300.0, 'min_syns': 1, 'max_syns': 2 }, syn_weight=5.0e-03, dynamics_params='AMPA_ExcToInh.json', model_template='Exp2Syn', distance_range=[0.0, 300.0], target_sections=['somatic'], delay=2.0) # Create connections between Bask --> Pyr cells print("?????") netff.add_edges(source={'pop_name': 'Cell_Bask'}, target={'pop_name': ['Cell_A', 'Cell_C']}, connection_rule=dist_conn_perc, connection_params={ 'prob': 0.34,
pair will connect the two with a probability prob (excludes self-connections)""" if src.node_id == trg.node_id: return 0 return 0 if np.random.uniform() > prob else np.random.randint( min_syns, max_syns) # Connections onto glif components, use the connection map to save section and position of every synapse # exc --> exc connections internal.add_edges(source={'ei': 'e'}, target={ 'ei': 'e', 'orig_model': 'glif' }, connection_rule=n_connections, connection_params={'prob': 0.2}, dynamics_params='e2e.json', model_template='static_synapse', syn_weight=2.5, delay=2.0) # exc --> inh connections internal.add_edges(source={'ei': 'e'}, target={ 'ei': 'i', 'orig_model': 'glif' }, connection_rule=n_connections, dynamics_params='e2i.json', model_template='static_synapse',
#print('PyrA, PyrC connection=',tmp_nsyn) return tmp_nsyn # Create connections between Pyr --> Bask cells netff.add_edges( source={'pop_name': ['L5PNA', 'L5PNC']}, target={'pop_name': ['Cell_Bask']}, connection_rule=dist_conn_perc, connection_params={ 'prob': 0.12, 'min_dist': 0.0, 'max_dist': 300.0, 'min_syns': 1, 'max_syns': 2 }, syn_weight=20, #weight_function = 'Lognormal', #weight_sigma=2, dynamics_params='AMPA_ExcToInh.json', model_template='exp2syn', distance_range=[0.0, 300.0], target_sections=['basal', 'apical'], delay=2.0) netff.add_edges( source={'pop_name': ['L5PNA', 'L5PNC']}, target={'pop_name': ['L5PNA', 'L5PNC']}, connection_rule=dist_conn_perc, connection_params={
def connection(source, target, id): if target.node_id == id: return 1 else: return 0 for i in range(N): #import pdb; pdb.set_trace() net.add_edges(source=exc_stim.nodes(), target=net.nodes(), connection_rule=connection, connection_params={"id": i}, syn_weight=1, sec_id=ids[dendrites][i], delay=0.1, sec_x=xs[dendrites][i], dynamics_params='PN2PN.json', model_template=syn['PN2PN.json']['level_of_detail']) # # Create connections between Exc --> Pyr cells # net.add_edges(source=exc_stim.nodes(), target=net.nodes(), # connection_rule=1, # syn_weight=1, # target_sections=['apic', 'dend'], # delay=0.1, # #distance_range=[149.0, 151.0], #0.348->0.31, 0.459->0.401 # distance_range=[50, 2000],#(2013, Pouille et al.) # #distance_range=[1250,2000], # #distance_range=[-500, 500],
# What we're doing here is looping through the different connection # probabilities based on distance apart instead of re-writing this # large block of code several times for p2p_prop in p2p_props: #dynamics_file = 'PN2PN.json' dynamics_file = 'PN2PN_feng_min.json' conn = net.add_edges( source={'pop_name': ['PyrA', 'PyrC']}, target={'pop_name': ['PyrA', 'PyrC']}, iterator='one_to_one', connection_rule=syn_percent, connection_params={'p': p2p_prop['syn_prob']}, syn_weight=1, delay=0.1, dynamics_params=dynamics_file, model_template=syn[dynamics_file]['level_of_detail'], distance_range=[p2p_prop['min_dist'], p2p_prop['max_dist']], target_sections=['basal'], sec_id=0, sec_x=0.9) conn.add_properties(names=['delay', 'sec_id', 'sec_x'], rule=syn_dist_delay_feng_section, rule_params={ 'sec_id': 0, 'sec_x': 0.9 }, dtypes=[np.float, np.int32, np.float])
if src.node_id == trg.node_id: return 0 return 0 if np.random.uniform() > prob else np.random.randint( min_syns, max_syns) # Connections onto biophysical components, use the connection map to save section and position of every synapse # exc --> exc connections internal.add_edges(source={'ei': 'e'}, target={ 'ei': 'e', 'model_type': 'biophysical' }, connection_rule=n_connections, connection_params={'prob': 0.2}, dynamics_params='AMPA_ExcToExc.json', model_template='Exp2Syn', syn_weight=6.0e-05, delay=2.0, target_sections=['basal', 'apical'], distance_range=[30.0, 150.0]) # exc --> inh connections internal.add_edges(source={'ei': 'e'}, target={ 'ei': 'i', 'model_type': 'biophysical' }, connection_rule=n_connections, dynamics_params='AMPA_ExcToInh.json',
tid = target.node_id sid = sid + 8 if sid == tid: print("connecting BG {} to olm{}".format(sid, tid)) tmp_nsyn = 1 else: return None return tmp_nsyn net.add_edges(source=shock.nodes(), target=net.nodes(pop_name='OLM'), connection_rule=one_to_all_shock2OLM, syn_weight=1.0, target_sections=['somatic'], delay=0.1, distance_range=[10.0, 11.0], dynamics_params='shock2INT12.json', model_template=syn['shock2INT12.json']['level_of_detail']) net.add_edges(source=shock.nodes(), target=net.nodes(pop_name='PV'), connection_rule=one_to_all_shock2PV, syn_weight=1.0, target_sections=['somatic'], delay=0.1, distance_range=[10.0, 11.0], dynamics_params='shock2INT12.json', model_template=syn['shock2INT12.json']['level_of_detail'])
if sid < upper_bound and sid >= lower_bound: #print("connecting cell {} to {}".format(sid,tid)) return 1 else: return None #Create connections between Inh --> Pyr cells net.add_edges(source=inh_stim.nodes(), target=net.nodes(), connection_rule=correct_cell, connection_params={'bounds': inh_bounds}, syn_weight=5.0e-03, weight_function='lognormal', weight_sigma=3.0e-03, weight_max=20e-03, dynamics_params='GABA_InhToExc.json', model_template='Exp2Syn', distance_range=[0.0, 300.0], target_sections=['somatic'], delay=2.0) # Create connections between Exc --> Pyr cells net.add_edges(source=exc_stim.nodes(), target=net.nodes(), connection_rule=correct_cell, connection_params={'bounds': exc_bounds}, syn_weight=10.0e-03, weight_function='lognormal', weight_sigma=3.0e-03,
tid = target.node_id if sid == tid: #print("connecting cell {} to {}".format(sid,tid)) tmp_nsyn = 1 else: return None return tmp_nsyn # Create connections between Shock --> Pyr cells net.add_edges(source=shock.nodes(), target=net.nodes(), connection_rule=one_to_one, syn_weight=1.0, target_sections=['somatic'], delay=0.1, distance_range=[10.0, 11.0], dynamics_params='shock2PN.json', model_template=syn['shock2PN.json']['level_of_detail']) # Create connections between Tone --> Pyr cells net.add_edges(source=tone.nodes(), target=net.nodes(), connection_rule=one_to_one, syn_weight=1.0, target_sections=['somatic'], delay=0.1, distance_range=[10.0, 11.0], dynamics_params='tone2PN.json', model_template=syn['tone2PN.json']['level_of_detail'])
def BG_to_PV(source, target): sid = source.node_id tid = target.node_id sid = sid + 1 if sid == tid: print("connecting BG {} to PV{}".format(sid, tid)) return 1 else: return 0 conn = net.add_edges(source=net.nodes(pop_name='PyrC'), target=net.nodes(pop_name="PV"), connection_rule=one_to_all, syn_weight=1.0, delay=0.1, distance_range=[-10000, 10000], dynamics_params='PN2PV.json', model_template=syn['PN2PV.json']['level_of_detail']) conn.add_properties(['sec_id', 'sec_x'], rule=(1, 0.9), dtypes=[np.int32, np.float]) conn = net.add_edges(source=net.nodes(pop_name='PV'), target=net.nodes(pop_name="PyrC"), connection_rule=one_to_all, syn_weight=1.0, delay=0.1, distance_range=[-10000, 10000], dynamics_params='PV2PN.json', model_template=syn['PV2PN.json']['level_of_detail'])
net = NetworkBuilder('brunel') net.add_nodes(pop_name='excitatory', ei='e', model_type='population', model_template='dipde:Internal', dynamics_params='exc_model.json') net.add_nodes(pop_name='inhibitory', ei='i', model_type='population', model_template='dipde:Internal', dynamics_params='inh_model.json') net.add_edges(source={'ei': 'e'}, target={'ei': 'i'}, syn_weight=0.005, nsyns=20, delay=0.002, dynamics_params='ExcToInh.json') net.add_edges(source={'ei': 'i'}, target={'ei': 'e'}, syn_weight=-0.002, nsyns=10, delay=0.002, dynamics_params='InhToExc.json') net.build() net.save_nodes(nodes_file_name='brunel_nodes.h5', node_types_file_name='brunel_node_types.csv', output_dir='network') net.save_edges(edges_file_name='brunel_edges.h5', edge_types_file_name='brunel_edge_types.csv', output_dir='network') input_net = NetworkBuilder('inputs')
sid = source.node_id tid = target.node_id if sid == tid: #print("connecting cell {} to {}".format(sid,tid)) tmp_nsyn = 1 else: return None return tmp_nsyn # Create connections between Shock --> Pyr cells net.add_edges(source=shock.nodes(), target=net.nodes(pop_name='PyrA'), connection_rule=3, syn_weight=1.0, target_sections=['somatic'], delay=0.1, distance_range=[10.0,11.0], dynamics_params='shock2PN.json', model_template=syn['shock2PN.json']['level_of_detail']) net.add_edges(source=shock.nodes(), target=net.nodes(pop_name='PyrC'), connection_rule=2, syn_weight=1.0, target_sections=['somatic'], delay=0.1, distance_range=[10.0,11.0], dynamics_params='shock2PN.json', model_template=syn['shock2PN.json']['level_of_detail']) net.add_edges(source=shock.nodes(), target=net.nodes(pop_name='Int'), connection_rule=2,