def test_add_edges(): net = NetworkBuilder('V1') net.add_nodes(N=10, cell_type='Scnna1', ei='e') net.add_nodes(N=10, cell_type='PV1', ei='i') net.add_nodes(N=10, cell_type='PV2', ei='i') net.add_edges( source={'ei': 'i'}, target={'ei': 'e'}, connection_rule=lambda s, t: 1, edge_arg='i2e' ) net.add_edges( source=net.nodes(cell_type='Scnna1'), target=net.nodes(cell_type='PV1'), connection_rule=2, edge_arg='e2i' ) net.build() assert(net.nedges == 200 + 200) assert(net.edges_built is True) for e in net.edges(target_nodes=net.nodes(cell_type='Scnna1')): assert(e['edge_arg'] == 'i2e') assert(e['nsyns'] == 1) for e in net.edges(target_nodes=net.nodes(cell_type='PV1')): assert(e['edge_arg'] == 'e2i') assert(e['nsyns'] == 2)
def test_node_sets(): net = NetworkBuilder('NET1') net.add_nodes(N=100, prop_n='prop1', pool1='p1', sp='sp', param1=range(100)) net.add_nodes(N=100, prop_n='prop2', pool2='p2', sp='sp', param1=range(100)) net.add_nodes(N=100, prop_n='prop3', pool3='p3', sp='sp', param1=range(100)) node_pool_1 = net.nodes(prop_n='prop1') assert(len(node_pool_1) == 100) assert(node_pool_1.filter_str == "prop_n=='prop1'") for n in node_pool_1: assert('pool1' in n and n['prop_n'] == 'prop1') node_pool_2 = net.nodes(sp='sp') assert(node_pool_2.filter_str == "sp=='sp'") assert(len(node_pool_2) == 300) for n in node_pool_2: assert(n['sp'] == 'sp') node_pool_3 = net.nodes(param1=10) assert(len(node_pool_3) == 3) assert(node_pool_3.filter_str == "param1=='10'") nodes = list(node_pool_3) assert(nodes[0]['node_id'] == 10) assert(nodes[1]['node_id'] == 110) assert(nodes[2]['node_id'] == 210) assert(nodes[0]['node_type_id'] != nodes[1]['node_type_id'] != nodes[2]['node_type_id'])
def test_cross_population_edges(): tmp_dir = make_tmp_dir() edges_file = make_tmp_file(suffix='.h5') edge_types_file = make_tmp_file(suffix='.csv') net_a1 = NetworkBuilder('A1') net_a1.add_nodes(N=100, model='A') net_a1.build() net_a2 = NetworkBuilder('A2') net_a2.add_nodes(N=100, model='B') net_a2.add_edges( source=net_a1.nodes(), target=net_a2.nodes(), connection_rule=lambda s, t: 1 if s.node_id == t.node_id else 0 ) net_a2.build() net_a2.save_edges( edges_file_name=edges_file, edge_types_file_name=edge_types_file, output_dir=tmp_dir, name='A1_A2' ) edges_h5_path = os.path.join(tmp_dir, edges_file) assert(os.path.exists(edges_h5_path)) with h5py.File(edges_h5_path, 'r') as h5: assert('/edges/A1_A2' in h5) assert(len(h5['/edges/A1_A2/source_node_id']) == 100) assert(h5['/edges/A1_A2/source_node_id'].attrs['node_population'] == 'A1') assert(len(h5['/edges/A1_A2/target_node_id']) == 100) assert(h5['/edges/A1_A2/target_node_id'].attrs['node_population'] == 'A2') barrier()
def test_save_multinetwork_1(): net1 = NetworkBuilder('NET1') net1.add_nodes(N=100, position=[(0.0, 1.0, -1.0)] * 100, cell_type='Scnna1', ei='e') net1.add_edges(source={'ei': 'e'}, target={'ei': 'e'}, connection_rule=5, ctype_1='n1_rec') net1.build() net2 = NetworkBuilder('NET2') net2.add_nodes(N=10, position=[(0.0, 1.0, -1.0)] * 10, cell_type='PV1', ei='i') net2.add_edges(connection_rule=10, ctype_1='n2_rec') net2.add_edges(source=net1.nodes(), target={'ei': 'i'}, connection_rule=1, ctype_2='n1_n2') net2.add_edges(target=net1.nodes(cell_type='Scnna1'), source={'cell_type': 'PV1'}, connection_rule=2, ctype_2='n2_n1') net2.build() net_dir = tempfile.mkdtemp() net2.save_edges(edges_file_name='NET2_NET1_edges.h5', edge_types_file_name='NET2_NET1_edge_types.csv', output_dir=net_dir, src_network='NET2') n1_n2_fname = '{}/{}_{}'.format(net_dir, 'NET2', 'NET1') edges_h5 = h5py.File(n1_n2_fname + '_edges.h5', 'r') assert(len(edges_h5['/edges/NET2_to_NET1/target_node_id']) == 100*10) assert(len(edges_h5['/edges/NET2_to_NET1/0/nsyns']) == 100*10) assert(edges_h5['/edges/NET2_to_NET1/0/nsyns'][0] == 2) edge_types_csv = pd.read_csv(n1_n2_fname + '_edge_types.csv', sep=' ') assert(len(edge_types_csv) == 1) assert('ctype_1' not in edge_types_csv.columns.values) assert(edge_types_csv['ctype_2'].iloc[0] == 'n2_n1')
def test_save_weights(): net = NetworkBuilder('NET1') net.add_nodes(N=100, position=[(0.0, 1.0, -1.0)]*100, cell_type='Scnna1', ei='e') net.add_nodes(N=100, position=[(0.0, 1.0, -1.0)]*100, cell_type='PV1', ei='i') net.add_nodes(N=100, position=[(0.0, 1.0, -1.0)]*100, tags=np.linspace(0, 100, 100), cell_type='PV2', ei='i') cm = net.add_edges(source={'ei': 'i'}, target={'ei': 'e'}, connection_rule=lambda s, t: 3, p1='e2i', p2='e2i') # 200*100 = 60000 edges cm.add_properties(names=['segment', 'distance'], rule=lambda s, t: [1, 0.5], dtypes=[np.int, np.float]) net.add_edges(source=net.nodes(cell_type='Scnna1'), target=net.nodes(cell_type='PV1'), connection_rule=lambda s, t: 2, p1='s2p') # 100*100 = 20000' net.build() net_dir = tempfile.mkdtemp() net.save_nodes('tmp_nodes.h5', 'tmp_node_types.csv', output_dir=net_dir) net.save_edges('tmp_edges.h5', 'tmp_edge_types.csv', output_dir=net_dir) edges_h5 = h5py.File('{}/tmp_edges.h5'.format(net_dir), 'r') assert(net.nedges == 80000) assert(len(edges_h5['/edges/NET1_to_NET1/0/distance']) == 60000) assert(len(edges_h5['/edges/NET1_to_NET1/0/segment']) == 60000) assert(len(edges_h5['/edges/NET1_to_NET1/1/nsyns']) == 10000) assert(edges_h5['/edges/NET1_to_NET1/0/distance'][0] == 0.5) assert(edges_h5['/edges/NET1_to_NET1/0/segment'][0] == 1) assert(edges_h5['/edges/NET1_to_NET1/1/nsyns'][0] == 2)
def test_failed_search(): net = NetworkBuilder('NET1') net.add_nodes(N=100, p1='p1', q1=range(100)) node_pool = net.nodes(p1='p2') assert (len(node_pool) == 0) node_pool = net.nodes(q2=10) assert (len(node_pool) == 0)
def test_nsyn_edges(): net = NetworkBuilder('NET1') net.add_nodes(N=100, cell_type='Scnna1', ei='e') net.add_nodes(N=100, cell_type='PV1', ei='i') net.add_nodes(N=100, cell_type='PV2', ei='i') net.add_edges(source={'ei': 'i'}, target={'ei': 'e'}, connection_rule=lambda s, t: 1) # 200*100 = 20000 edges net.add_edges(source=net.nodes(cell_type='Scnna1'), target=net.nodes(cell_type='PV1'), connection_rule=lambda s, t: 2) # 100*100*2 = 20000 net.build() assert(net.nedges == 20000 + 20000) assert(net.edges_built is True)
def test_build_nodes1(): net = NetworkBuilder('NET1') net.add_nodes(N=3, node_id=[100, 200, 300], node_type_id=101, name=['one', 'two', 'three']) node_one = list(net.nodes(name='one'))[0] assert(node_one['name'] == 'one') assert(node_one['node_id'] == 100) assert(node_one['node_type_id'] == 101) node_three = list(net.nodes(name='three'))[0] assert(node_three['name'] == 'three') assert(node_three['node_id'] == 300) assert(node_three['node_type_id'] == 101)
def test_add_nodes_ids(): # Special case if parameters node_id and node_type_id are explicitly defined by the user net = NetworkBuilder('V1') net.add_nodes(N=3, node_id=[100, 200, 300], node_type_id=101, name=['one', 'two', 'three']) node_one = list(net.nodes(name='one'))[0] assert(node_one['name'] == 'one') assert(node_one['node_id'] == 100) assert(node_one['node_type_id'] == 101) node_three = list(net.nodes(name='three'))[0] assert(node_three['name'] == 'three') assert(node_three['node_id'] == 300) assert(node_three['node_type_id'] == 101)
def test_mulitnet_iterator(): net1 = NetworkBuilder('NET1') net1.add_nodes(N=50, cell_type='Rorb', ei='e') net1.build() net2 = NetworkBuilder('NET2') net2.add_nodes(N=100, cell_type='Scnna1', ei='e') net2.add_nodes(N=100, cell_type='PV1', ei='i') net2.add_edges(source={'ei': 'e'}, target={'ei': 'i'}, connection_rule=5, syn_type='e2i', net_type='rec') net2.add_edges(source=net1.nodes(), target={'ei': 'e'}, connection_rule=1, syn_type='e2e', net_type='fwd') net2.build() assert (len(net2.edges()) == 50 * 100 + 100 * 100) assert (len(net2.edges(source_network='NET2', target_network='NET1')) == 0) assert (len(net2.edges(source_network='NET1', target_network='NET2')) == 50 * 100) assert (len(net2.edges(target_network='NET2', net_type='rec')) == 100 * 100) edges = net2.edges(source_network='NET1') assert (len(edges) == 50 * 100) for e in edges: assert (e['net_type'] == 'fwd')
def test_single_node(): net = NetworkBuilder('NET1') net.add_nodes(prop1='prop1', prop2='prop2', param1=['param1']) nodes = list(net.nodes()) assert (len(nodes) == 1) assert (nodes[0]['param1'] == 'param1') assert (nodes[0]['prop1'] == 'prop1') assert (nodes[0]['prop2'] == 'prop2')
def test_multi_search(): net = NetworkBuilder('NET1') net.add_nodes(N=10, prop_n='prop1', sp='sp1', param1=range(0, 10)) net.add_nodes(N=10, prop_n='prop1', sp='sp2', param1=range(5, 15)) net.add_nodes(N=20, prop_n='prop2', sp='sp2', param1=range(20)) node_pool = net.nodes(prop_n='prop1', param1=5) assert (len(node_pool) == 2) nodes = list(node_pool) assert (nodes[0]['node_id'] == 5) assert (nodes[1]['node_id'] == 10)
def test_itr_advanced_search(): net = NetworkBuilder('NET1') net.add_nodes(N=1, cell_type='Scnna1', ei='e') net.add_nodes(N=50, cell_type='PV1', ei='i') net.add_nodes(N=100, cell_type='PV2', ei='i') net.add_edges(source={'ei': 'e'}, target={'ei': 'i'}, connection_rule=5, syn_type='e2i', nm='A') net.add_edges(source={'cell_type': 'PV1'}, target={'cell_type': 'PV2'}, connection_rule=5, syn_type='i2i', nm='B') net.add_edges(source={'cell_type': 'PV2'}, target={'ei': 'i'}, connection_rule=5, syn_type='i2i', nm='C') net.build() edges = net.edges(target_nodes=net.nodes(cell_type='Scnna1')) assert (len(edges) == 0) edges = net.edges(source_nodes={'ei': 'e'}, target_nodes={'ei': 'i'}) assert (len(edges) == 50 + 100) edges = net.edges(source_nodes=[n.node_id for n in net.nodes(ei='e')]) assert (len(edges) == 50 + 100) edges = net.edges(source_nodes={'ei': 'i'}) assert (len(edges) == 100 * 100 * 2) for e in edges: assert (e['syn_type'] == 'i2i') edges = net.edges(syn_type='i2i') print len(edges) == 100 * 100 * 2 for e in edges: assert (e['nm'] != 'A') edges = net.edges(syn_type='i2i', nm='C') assert (len(edges) == 100 * 150)
def test_basic(): net = NetworkBuilder('CA1') assert(net.name == 'CA1') assert(net.nnodes == 0) assert(net.nedges == 0) assert(net.nodes_built is False) assert(net.edges_built is False) assert(len(net.nodes()) == 0) assert(len(net.edges()) == 0) assert(net.nodes_built is True) assert(net.edges_built is True)
def test_add_nodes(): # Tests that mutliple models of nodes can be added to network net = NetworkBuilder('V1') net.add_nodes(N=10, arg_list=range(10), arg_const='pop1', arg_shared='global') net.add_nodes(N=1, arg_list=[11], arg_const='pop2', arg_shared='global') net.add_nodes(N=5, arg_unique=range(12, 17), arg_const='pop3', arg_shared='global') # diff param signature net.build() assert(net.nodes_built is True) assert(net.nnodes == 16) assert(net.nedges == 0) assert(len(net.nodes()) == 16) assert(len(net.nodes(arg_const='pop1')) == 10) assert(len(net.nodes(arg_const='pop2')) == 1) assert(len(net.nodes(arg_shared='global')) == 16) assert(len(net.nodes(arg_shared='invalid')) == 0) node_set = net.nodes(arg_list=2) assert(len(node_set) == 1) node = list(node_set)[0] assert(node['arg_const'] == 'pop1') assert(node['arg_shared'] == 'global') node_set = net.nodes(arg_unique=12) assert(len(node_set) == 1) node = list(node_set)[0] assert(node['arg_const'] == 'pop3') assert(node['arg_shared'] == 'global') assert('arg_list' not in node)
def test_add_node_ids_mixed(): net = NetworkBuilder('V1') net.add_nodes(N=3, node_id=[0, 2, 4], vals=[0, 2, 4]) net.add_nodes(N=3, vals=[1, 1, 1]) net.add_nodes(nodes_ids=[6], vals=[6]) unique_ids = set() for n in net.nodes(): unique_ids.add(n.node_id) if n.node_id % 2 == 0: assert(n['vals'] == n.node_id) else: assert(n['vals'] == 1) assert(len(unique_ids) == 7)
def test_cross_pop_edges(): # Uses connection map functionality to create edges with unique parameters net1 = NetworkBuilder('V1') net1.add_nodes(N=10, arg_list=range(10), arg_ctype='e') net1.build() net2 = NetworkBuilder('V2') net2.add_nodes(N=5, arg_list=range(10, 15), arg_ctype='i') net2.add_edges(source={'arg_ctype': 'i'}, target=net1.nodes(arg_ctype='e'), connection_rule=lambda s, t: 1, edge_arg='i2e') net2.build() assert(net2.nedges == 50)
def test_node_set(): net = NetworkBuilder('NET1') net.add_nodes(N=100, prop1='prop1', param1=range(100)) node_pool = net.nodes() assert (node_pool.filter_str == '*') nodes = list(node_pool) assert (len(nodes) == 100) assert (nodes[0]['prop1'] == 'prop1') assert (nodes[0]['param1'] == 0) assert (nodes[99]['prop1'] == 'prop1') assert (nodes[99]['param1'] == 99) assert (nodes[0]['node_type_id'] == nodes[99]['node_type_id']) assert (nodes[0]['node_id'] != nodes[99]['node_id'])
def test_build_nodes(): net = NetworkBuilder('NET1') net.add_nodes(N=100, position=[(100.0, -50.0, 50.0)] * 100, tunning_angle=np.linspace(0, 365.0, 100, endpoint=False), cell_type='Scnna1', model_type='Biophys1', location='V1', ei='e') net.add_nodes(N=25, position=np.random.rand(25, 3) * [100.0, 50.0, 100.0], model_type='intfire1', location='V1', ei='e') net.add_nodes(N=150, position=np.random.rand(150, 3) * [100.0, 50.0, 100.0], tunning_angle=np.linspace(0, 365.0, 150, endpoint=False), cell_type='SST', model_type='Biophys1', location='V1', ei='i') net.build() assert (net.nodes_built is True) assert (net.nnodes == 275) assert (net.nedges == 0) assert (len(net.nodes()) == 275) assert (len(net.nodes(ei='e')) == 125) assert (len(net.nodes(model_type='Biophys1')) == 250) assert (len(net.nodes(location='V1', model_type='Biophys1'))) intfire_nodes = list(net.nodes(model_type='intfire1')) assert (len(intfire_nodes) == 25) node1 = intfire_nodes[0] assert (node1['model_type'] == 'intfire1' and 'cell_type' not in node1)
def test_add_nodes_tuples(): # Should be able to store tuples of values in single parameters for a given node net = NetworkBuilder('V1') net.add_nodes(N=10, arg_list=range(10), arg_tuples=[(r, r+1) for r in range(10)], arg_const=('a', 'b')) net.build() assert(net.nodes_built is True) assert(net.nnodes == 10) for node in net.nodes(): assert(len(node['arg_tuples']) == 2) assert(node['arg_tuples'][0] == node['arg_list'] and node['arg_tuples'][1] == node['arg_list']+1) assert(len(node['arg_const']) == 2) assert(node['arg_const'][0] == 'a' and node['arg_const'][1] == 'b')
nsyns=20, delay=0.002, dynamics_params='ExcToInh.json') net.add_edges(source={'ei': 'i'}, target={'ei': 'e'}, syn_weight=-0.002, nsyns=10, delay=0.002, dynamics_params='InhToExc.json') net.build() net.save_nodes(nodes_file_name='brunel_nodes.h5', node_types_file_name='brunel_node_types.csv', output_dir='network') net.save_edges(edges_file_name='brunel_edges.h5', edge_types_file_name='brunel_edge_types.csv', output_dir='network') input_net = NetworkBuilder('inputs') input_net.add_nodes(pop_name='tON', ei='e', model_type='virtual') input_net.add_edges(target=net.nodes(ei='e'), syn_weight=0.0025, nsyns=10, delay=0.002, dynamics_params='input_ExcToExc.json') input_net.build() input_net.save_nodes(nodes_file_name='input_nodes.h5', node_types_file_name='input_node_types.csv', output_dir='network') input_net.save_edges(edges_file_name='input_edges.h5', edge_types_file_name='input_edge_types.csv', output_dir='network')
def BG_to_OLM(source, target): sid = source.node_id tid = target.node_id sid = sid + 8 if sid == tid: print("connecting BG {} to olm{}".format(sid, tid)) tmp_nsyn = 1 else: return None return tmp_nsyn net.add_edges(source=shock.nodes(), target=net.nodes(pop_name='OLM'), connection_rule=one_to_all_shock2OLM, syn_weight=1.0, target_sections=['somatic'], delay=0.1, distance_range=[10.0, 11.0], dynamics_params='shock2INT12.json', model_template=syn['shock2INT12.json']['level_of_detail']) net.add_edges(source=shock.nodes(), target=net.nodes(pop_name='PV'), connection_rule=one_to_all_shock2PV, syn_weight=1.0, target_sections=['somatic'], delay=0.1,
def one_to_one(source, target): sid = source.node_id tid = target.node_id if sid == tid: #print("connecting cell {} to {}".format(sid,tid)) tmp_nsyn = 1 else: return None return tmp_nsyn # Create connections between Shock --> Pyr cells net.add_edges(source=shock.nodes(), target=net.nodes(), connection_rule=one_to_one, syn_weight=1.0, target_sections=['somatic'], delay=0.1, distance_range=[10.0, 11.0], dynamics_params='shock2PN.json', model_template=syn['shock2PN.json']['level_of_detail']) # Create connections between Tone --> Pyr cells net.add_edges(source=tone.nodes(), target=net.nodes(), connection_rule=one_to_one, syn_weight=1.0, target_sections=['somatic'],
def build_edges(src, trg, sections=['basal', 'apical'], dist_range=[50.0, 150.0]): # Get morphology and soma center for the target cell swc_reader = morphologies[trg['model_name']] target_coords = [trg['x'], trg['y'], trg['z']] sec_ids, sec_xs = swc_reader.choose_sections(sections, dist_range) # randomly choose sec_ids coords = swc_reader.get_coord(sec_ids, sec_xs, soma_center=target_coords) # get coords of sec_ids dist = swc_reader.get_dist(sec_ids) swctype = swc_reader.get_type(sec_ids) return sec_ids, sec_xs, coords[0][0], coords[0][1], coords[0][2], dist[0], swctype[0] # Feedfoward excitatory virtual cells exc_net = NetworkBuilder('excvirt') exc_net.add_nodes(N=10, model_type='virtual', ei='e') cm = exc_net.add_edges(target=cortex.nodes(), source=exc_net.nodes(ei='e'), connection_rule=lambda *_: np.random.randint(4, 12), dynamics_params='AMPA_ExcToExc.json', model_template='Exp2Syn', delay=2.0) cm.add_properties('syn_weight', rule=3.4e-4, dtypes=np.float) cm.add_properties(['sec_id', 'sec_x', 'pos_x', 'pos_y', 'pos_z', 'dist', 'type'], rule=build_edges, dtypes=[np.int32, np.float, np.float, np.float, np.float, np.float, np.uint8]) exc_net.build() exc_net.save(output_dir='network') if not os.path.exists('inputs/exc_spike_trains.h5'): # Build spike-trains for excitatory virtual cells if not os.path.exists('inputs'): os.mkdir('inputs')
lower_bound = 0 if tid > 0: lower_bound = bounds[tid - 1] upper_bound = bounds[tid] if sid < upper_bound and sid >= lower_bound: #print("connecting cell {} to {}".format(sid,tid)) return 1 else: return None #Create connections between Inh --> Pyr cells net.add_edges(source=inh_stim.nodes(), target=net.nodes(), connection_rule=correct_cell, connection_params={'bounds': inh_bounds}, syn_weight=5.0e-03, weight_function='lognormal', weight_sigma=3.0e-03, weight_max=20e-03, dynamics_params='GABA_InhToExc.json', model_template='Exp2Syn', distance_range=[0.0, 300.0], target_sections=['somatic'], delay=2.0) # Create connections between Exc --> Pyr cells net.add_edges(source=exc_stim.nodes(),
class SimulationBuilder: """Class used to build our BMTK simulation. Attributes ---------- params : dict contains parameters for the network seed : int base random seed for the simulation syn : dict contains synaptic templates n_dend_exc : int number of excitatory input cells on the basal dendrites n_apic_exc : int number of excitatory input cells on the apical dendrites n_dend_inh : int number of inhibitory (SOM+) input cells on the basal dendrites more than 50 um from the soma. n_apic_inh : int number of inhibitory (SOM+) input cells on the apical dendrites n_prox_dend_inh : int number of inhibitory (PV+) input cells on the basal dendrites less than 50 um from the soma n_soma_inh : int number of inhibitory (PV+) input cells on the soma clust_per_group : int number of clusters per functional group net : NetworkBuilder the BMTK network for the biophysical cell exc_stim : NetworkBuilder the BMTK network for excitatory inputs prox_inh_stim : NetworkBuilder the BMTK network for perisomatic inhibition dist_inh_stim : NetworkBuilder the BMTK network for dendritic inhibition dend_groups : list all excitatory functional groups on the basal dendrites apic_groups : list all excitatory functional groups on the apical dendrites Methods ------- build() builds the network save_groups() saves the functional groups to a csv _set_prefixed_directory(base_dir_name : str) sets up the correct biophy_components structure based on the cell prefix in params for the given directory base _build_exc() creates excitatory input nodes and edges _build_exc_nodes(segs : pandas.DataFrame, base_name : str, n_cells : int, start=0 : int) builds excitatory nodes _build_exc_edges(group_list : list) builds excitatory edges _save_nets() builds and saves the BMTK NetworkBuilders _build_inh() creates inhibitory input nodes and edges _make_rasters() creates the inhibitory and excitatory input rasters _gen_exc_spikes(fname : str) generates and saves the excitatory spike rasters _gen_inh_spikes(n_cells : int, mean_fr : float, std_fr : float, key : str, fname : str) creates inhibitory spike rasters, using a noise trace based on averaging excitation and shifting it _modify_jsons() modifies the various json files however is needed after they are built _modify_sim_config() modifies the simulation_config.json however is needed _update_cellvar_record_locs(sim_config : dict) modifies the location of cellvar recordings in the given JSON simulation_config Static Methods -------------- _get_directory_prefix(directory : str) reads the prefix.txt fil in directory and returns the contents _connector_func(sources : list, targets : list, cells : list) sets the number of synapses from the given cells _set_location(source : dict, target : dict, cells : list, start_id : int) sets the location of the given edge _norm_connect(source : dict, target : dict, m : float, s : float, low : int, high : int) used to normally distribute connection counts _gen_group_spikes(writer : SonataWriter, group : FunctionalGroup, seconds : float, start_time : float, dist : func) creates and saves a functional group's spike raster _norm_rvs(mean : float, std : float) generates a random float from a normal distribution with a near zero minimum """ def __init__(self, params_file, seed=123): """Initializes the simulation builder, setting up attributes but not actually building the BMTK network. Parameters ---------- params_file : str path to the JSON file with network parameters seed : int base random seed for the simulation """ #Loads the JSON file with information about the network. with open(params_file) as f: self.params = json.load(f) self.seed = seed #Loads synapse templates. synapses.load() self.syn = synapses.syn_params_dicts() avg_exc_div = np.mean(list(self.params["divergence"]["exc"].values())) self.n_dend_exc = int( (self.params["lengths"]["basal_dist"] * self.params["syn_density"]["exc"]) / avg_exc_div) self.n_apic_exc = int( (self.params["lengths"]["apic"] * self.params["syn_density"]["exc"]) / avg_exc_div) self.n_dend_inh = int((self.params["lengths"]["basal_dist"] * self.params["syn_density"]["inh"]) / self.params["divergence"]["basal_inh"]["m"]) self.n_apic_inh = int((self.params["lengths"]["apic"] * self.params["syn_density"]["inh"]) / self.params["divergence"]["apic_inh"]["m"]) self.n_prox_dend_inh = int((self.params["lengths"]["basal_prox"] * self.params["syn_density"]["inh"]) / self.params["divergence"]["peri_inh"]["m"]) self.n_soma_inh = int(self.params["n_soma_syns"] / self.params["divergence"]["peri_inh"]["m"]) self.clust_per_group = int( (self.params["groups"]["cells_per_group"] * avg_exc_div) // (self.params["syn_density"]["exc"] * 10)) if self.params["file_current_clamp"]["input_file"] == "None": self.file_current_clamp = None else: self.file_current_clamp = self.params["file_current_clamp"] def build(self): """Builds the nodes and edges for the network. """ np.random.seed(self.seed) self._set_prefixed_directory("mechanisms") self._set_prefixed_directory("templates") self.net = NetworkBuilder("biophysical") self.net.add_nodes( N=1, pop_name='Pyrc', potental='exc', model_type='biophysical', dynamics_params=self.params["cell"]["dynamic_params"], model_template=self.params["cell"]["model_template"], model_processing=self.params["cell"]["model_processing"], morphology=self.params["cell"]["morphology"]) self._build_exc() self._build_inh() self._save_nets() self._make_rasters() #Final build step. build_env_bionet( base_dir='./', network_dir='./network', dt=self.params["dt"], tstop=self.params["time"]["stop"] * 1000.0, report_vars=self.params["record_cellvars"]["vars"], dL=self.params["dL"], #target length (um) of segments spikes_threshold=-10, file_current_clamp=self.file_current_clamp, spikes_inputs=[('exc_stim', 'exc_stim_spikes.h5'), ('prox_inh_stim', 'prox_inh_stim_spikes.h5'), ('dist_inh_stim', 'dist_inh_stim_spikes.h5')], components_dir='../biophys_components', compile_mechanisms=True) self._modify_jsons() def save_groups(self): """saves the apic and dend groups into a csv. one row for each node containgin the id of the functional group it is in. """ all_groups = self.dend_groups + self.apic_groups node_ids = [] func_groups = [] for func_id, group in enumerate(all_groups): for i in range(group.start_id, group.start_id + group.n_cells): node_ids.append(i) func_groups.append(func_id) df = pd.DataFrame() df["Node ID"] = node_ids df["Functional Group"] = func_groups df.to_csv("FunctionalGroups.csv", index=False) def _set_prefixed_directory(self, base_dir_name): """Fixes the biophy_components directory. There should be only one directory named <base_dir_name> and it should be the one with the prefix.txt file in it that has the same prefix as params. Parameters ---------- base_dir_name : str base name of the set of directories to be fixed """ #import pdb; pdb.set_trace() components_path = "../biophys_components/" biophys_subdirs = [ f.name for f in os.scandir(components_path) if f.is_dir() ] for dir_name in biophys_subdirs: if base_dir_name == dir_name: prefix = SimulationBuilder._get_directory_prefix( components_path + dir_name) if prefix == self.params["cell"]["prefix"]: return else: os.rename(components_path + base_dir_name, components_path + prefix + base_dir_name) for dir_name in biophys_subdirs: if base_dir_name in dir_name and self.params["cell"][ "prefix"] in dir_name: os.rename(components_path + dir_name, components_path + base_dir_name) def _get_directory_prefix(directory): """Returns the contents of the prefix.txt file in the given directory. Parameters ---------- directory : str directory to look in Returns ------- str contents of prefix.txt """ with open(directory + "/prefix.txt", 'r') as f: return f.read() def _build_exc(self): """Builds the excitatory input cells and their synapses. """ # External excitatory inputs self.exc_stim = NetworkBuilder('exc_stim') #DataFrame of all segments on the cell. segs = pd.read_csv(self.params["cell"]["segments_file"]) dends = segs[(segs["Type"] == "dend") & (segs["Distance"] >= 50)] apics = segs[(segs["Type"] == "apic")] np.random.seed(self.seed + 1) apic_start, self.dend_groups = self._build_exc_nodes( dends, "dend", self.n_dend_exc) np.random.seed(self.seed + 2) _, self.apic_groups = self._build_exc_nodes(apics, "apic", self.n_apic_exc, start=apic_start) np.random.seed(self.seed + 3) self._build_exc_edges(self.dend_groups) np.random.seed(self.seed + 4) self._build_exc_edges(self.apic_groups) #Sets the number of synapses for each input cell. def _connector_func(sources, target, cells): """Used to set the number of synapses from each excitatory input cell in a functional group. Use with "all_to_one" iterator. Parameters ---------- sources : list presynaptic nodes (represented as dicts) target : dict postsynaptic node cells : list list of Cells in the FunctionalGroup Returns ------- list list of synapses for each pairing """ return [cell.n_syns for cell in cells] #Sets the location of synapses based on the given cell list. def _set_location(source, target, cells, start_id): """Sets the location of the given synapse. Parameters ---------- source : dict source node information target : dict target node information cells : list Cells in the functional group start_id : int start_id for the functional groups the cells come from Returns ------- int BMTK section id float distance along the section """ #Gets the proper index within the cell list. index = source.node_id - start_id seg = cells[index].get_seg() return seg.bmtk_id, seg.x #Creates the functional groups and adds the virtual cells to the #BMTK NetworkBuilder. def _build_exc_nodes(self, segs, base_name, n_cells, start=0): """Creates the functional groups and adds the virtual cells to the BMTK NetworkBuilder Parameters ---------- segs : pandas.DataFrame all the segments available for the functional groups base_name : str the string that is appended to to make the group names. groups get 0 - n_groups appended to their names. n_cells : int total number of input cells that should be added. start : int, optional starting id to be associated with the functional groups, by default 0 this is used later to associate cells in functional groups with the correct locations and synapses. Returns ------- int what the start parameter should be for the next call to _build_exc_nodes list list of functional groups that were created """ start_id = start n_groups = n_cells // self.params["groups"]["cells_per_group"] n_extra = n_cells % self.params["groups"][ "cells_per_group"] #number of extra cells that don't evenly fit into groups group_list = [] for i in range(n_groups): name = base_name + str(i) #Spreads out the extra cells. N = self.params["groups"]["cells_per_group"] if i < n_extra: N += 1 self.exc_stim.add_nodes(N=N, pop_name=name, potential="exc", model_type='virtual') new_group = FunctionalGroup( segs, segs.sample().iloc[0], N, self.clust_per_group, name, start_id, partial(make_seg_sphere, radius=self.params["groups"]["group_radius"]), partial(make_seg_sphere, radius=self.params["groups"]["cluster_radius"])) group_list.append(new_group) start_id += N return start_id, group_list def _build_exc_edges(self, group_list): """Creates the connections between each cell in the list of groups and the biophysical cell. Parameters ---------- group_list : list list of functional groups """ for i in range(len(group_list)): group = group_list[i] #Creates the edges from each excitatory input cells in the group. conn = self.net.add_edges( source=self.exc_stim.nodes(pop_name=group.name), target=self.net.nodes(), iterator="all_to_one", connection_rule=SimulationBuilder._connector_func, connection_params={'cells': group.cells}, syn_weight=1, delay=0.1, dynamics_params='PN2PN.json', model_template=self.syn['PN2PN.json']['level_of_detail'], ) #Sets the postsynaptic locations of the connections. conn.add_properties(['sec_id', "sec_x"], rule=SimulationBuilder._set_location, rule_params={ 'cells': group.cells, 'start_id': group.start_id }, dtypes=[np.int, np.float]) def _save_nets(self): """builds and saves the BMTK NetworkBuilders """ # Build and save our networks np.random.seed(self.seed + 12) self.net.build() self.net.save_nodes(output_dir='network') np.random.seed(self.seed + 16) self.net.save_edges(output_dir='network') np.random.seed(self.seed + 13) self.exc_stim.build() self.exc_stim.save_nodes(output_dir='network') np.random.seed(self.seed + 14) self.prox_inh_stim.build() self.prox_inh_stim.save_nodes(output_dir='network') np.random.seed(self.seed + 15) self.dist_inh_stim.build() self.dist_inh_stim.save_nodes(output_dir='network') def _build_inh(self): """Creates inhibitory input nodes and their connections onto the biophysical cell """ #####################Perisomatic Inhibition############################## self.prox_inh_stim = NetworkBuilder('prox_inh_stim') #Nodes that connect to soma. self.prox_inh_stim.add_nodes(N=self.n_soma_inh, pop_name='on_soma', potential='exc', model_type='virtual') #Nodes that connect to proximal dendrites. self.prox_inh_stim.add_nodes(N=self.n_prox_dend_inh, pop_name='on_dend', potential='exc', model_type='virtual') div_params = self.params["divergence"]["peri_inh"] #On soma. np.random.seed(self.seed + 5) self.net.add_edges( source=self.prox_inh_stim.nodes(pop_name='on_soma'), target=self.net.nodes(), connection_rule=SimulationBuilder._norm_connect, connection_params={ "m": div_params["m"], "s": div_params["s"], "low": div_params["min"], "high": div_params["max"] }, syn_weight=1, delay=0.1, dynamics_params='PV2PN.json', model_template=self.syn['PV2PN.json']['level_of_detail'], distance_range=[-2000, 2000.0], target_sections=['somatic']) #On dendrites within 50 um np.random.seed(self.seed + 6) self.net.add_edges( source=self.prox_inh_stim.nodes(pop_name='on_dend'), target=self.net.nodes(), connection_rule=SimulationBuilder._norm_connect, connection_params={ "m": div_params["m"], "s": div_params["s"], "low": div_params["min"], "high": div_params["max"] }, syn_weight=1, delay=0.1, dynamics_params='PV2PN.json', model_template=self.syn['PV2PN.json']['level_of_detail'], distance_range=[0, 50.0], target_sections=['dend']) ####################################################################################### #############################Dendritic Inhibition###################################### self.dist_inh_stim = NetworkBuilder('dist_inh_stim') self.dist_inh_stim.add_nodes(N=self.n_dend_inh, pop_name='dend', potential='exc', model_type='virtual') self.dist_inh_stim.add_nodes(N=self.n_apic_inh, pop_name='apic', potential='exc', model_type='virtual') div_params = self.params["divergence"]["basal_inh"] #Basal edges. np.random.seed(self.seed + 7) self.net.add_edges( source=self.dist_inh_stim.nodes(pop_name="dend"), target=self.net.nodes(), connection_rule=SimulationBuilder._norm_connect, connection_params={ "m": div_params["m"], "s": div_params["s"], "low": div_params["min"], "high": div_params["max"] }, syn_weight=1, delay=0.1, dynamics_params='SOM2PN.json', model_template=self.syn['SOM2PN.json']['level_of_detail'], distance_range=[50, 2000.0], target_sections=['dend']) div_params = self.params["divergence"]["apic_inh"] #Apic edges. np.random.seed(self.seed + 8) self.net.add_edges( source=self.dist_inh_stim.nodes(pop_name="apic"), target=self.net.nodes(), connection_rule=SimulationBuilder._norm_connect, connection_params={ "m": div_params["m"], "s": div_params["s"], "low": div_params["min"], "high": div_params["max"] }, syn_weight=1, delay=0.1, dynamics_params='SOM2PN.json', model_template=self.syn['SOM2PN.json']['level_of_detail'], distance_range=[50, 2000.0], target_sections=['apic']) def _norm_connect(source, target, m, s, low, high): """Returns a random number of synapses based on the given distribution. Parameters ---------- source : dict source node target : dict target node m : float mean number of connections s : float standard deviation of number of connections low : int minimum number of connections high : int maximum number of connections Returns ------- int number of connections """ return int(min(max(np.random.normal(m, s), low), high)) def _make_rasters(self): """Generates excitatory and inhibitory input rasters """ np.random.seed(self.seed + 9) self._gen_exc_spikes('exc_stim_spikes.h5') inh_frs = self.params["inh_frs"] #Makes perisomatic inhibitory raster. np.random.seed(self.seed + 10) self._gen_inh_spikes(self.n_soma_inh + self.n_prox_dend_inh, inh_frs["proximal"]["m"], inh_frs["proximal"]["s"], inh_frs["proximal"]["rhythmicity"], "prox_inh_stim", 'prox_inh_stim_spikes.h5') #Makes dendritic inhibitory raster. np.random.seed(self.seed + 11) self._gen_inh_spikes(self.n_apic_inh + self.n_dend_inh, inh_frs["distal"]["m"], inh_frs["distal"]["s"], inh_frs["distal"]["rhythmicity"], "dist_inh_stim", 'dist_inh_stim_spikes.h5') #Generates the spike raster for a given group. #The group has the same noise. def _gen_group_spikes(writer, group, seconds, start_time, dist): """Generates and writes to a h5 file the given functional group's spike trains Parameters ---------- writer : SonataWriter how the spike trains are saved group : FunctionalGroup the functional group that the spike trains are being made for seconds : float length of the spike trains in seconds start_time : float what time (ms) the spike trains should start at dist : func function for random distribution used for an individual cell's firing rate """ z = make_noise( num_samples=(int(seconds * 1000)) - 1, num_traces=1 ) #generates the noise trace common to each cell in the functional group. make_save_spikes(writer, True, dist(size=group.n_cells), numUnits=group.n_cells, rateProf=np.tile(z[0, :], (group.n_cells, 1)), start_id=group.start_id, start_time=start_time) #Creates the excitatory input raster from the functional groups. def _gen_exc_spikes(self, fname): """Generates the excitatory input raster for all of the functional groups Parameters ---------- fname : str name of the file to save the rasters in (.h5) """ #distribution used for generating excitatory firing rates. levy_dist = partial(st.levy_stable.rvs, alpha=1.37, beta=-1.00, loc=0.92, scale=0.44, size=1) length = self.params["time"]["stop"] - self.params["time"]["start"] buffer = self.params["time"]["start"] writer = SonataWriter(fname, ["spikes", "exc_stim"], ["timestamps", "node_ids"], [np.float, np.int]) for group in (self.dend_groups + self.apic_groups): SimulationBuilder._gen_group_spikes(writer, group, length, buffer * 1000, levy_dist) #Blocks off the bottom of a normal distribution. def _norm_rvs(mean, std): """Generates a random float from a normal distribution with a near zero minimum Parameters ---------- mean : float mean of the distribution std : float standard deviation of the distribution Returns ------- float random float """ return max(st.norm.rvs(loc=mean, scale=std, size=1), 0.001) # #Makes a spike raster with each cell having its own noise trace. # def gen_inh_spikes(n_cells, mean_fr, std_fr, key, file, times): # # node_ids = [] # # timestamps = [] # length = times[1] - times[0] # buffer = times[0] # writer = SonataWriter(file, ["spikes", key], ["timestamps", "node_ids"], [np.float, np.int]) # z = make_noise(num_samples=(int(length*1000))-1,num_traces=1) # make_save_spikes(writer, False, partial(positive_normal, mean=mean_fr, std=std_fr), numUnits=n_cells,rateProf=z[0,:],start_time=buffer*1000) #Creates a spike raster with each cell having the same noise coming from the a shifted average of excitation. def _gen_inh_spikes(self, n_cells, mean_fr, std_fr, rhythmic_dict, key, fname): """Generates a spike raster with each train having the noise trace from averaging excitation. Distributes firing rates normally. Parameters ---------- n_cells : int number of spike trains mean_fr : float mean firing rate std_fr : float standard deviation of the firing rate rhythmic_dict : dict dictionary with keys f - frequency, mod - depth of modulation key : str name of the second group in the h5 file fname : str name of file to save the raster to """ # node_ids = [] # timestamps = [] a, b = (0 - mean_fr) / std_fr, (100 - mean_fr) / std_fr d = partial(st.truncnorm.rvs, a=a, b=b, loc=mean_fr, scale=std_fr) if rhythmic_dict['f'] == "None": f = h5py.File("exc_stim_spikes.h5", "r") ts = f['spikes']["exc_stim"]['timestamps'] nid = f['spikes']["exc_stim"]['node_ids'] #Creates a noise trace based on the excitatory spike raster. z = shift_exc_noise(ts, nid, self.params["time"]["stop"], time_shift=self.params["inh_shift"]) z = np.tile(z, (n_cells, 1)) writer = SonataWriter(fname, ["spikes", key], ["timestamps", "node_ids"], [np.float, np.int]) make_save_spikes(writer, False, d(size=n_cells), numUnits=n_cells, rateProf=z) else: # make an array of modulated sin waves # make_save_spikes should be written so that the firing rates are generated # outside instead of inside the function. frs = d(size=n_cells) t = np.arange(0, self.params["time"]["stop"], 0.001) z = np.zeros((n_cells, t.shape[0])) P = 0 for i in np.arange(0, n_cells): offset = frs[i] A = offset / ((1 / rhythmic_dict['mod']) - 1) z[i, :] = A * np.sin( (2 * np.pi * rhythmic_dict['f'] * t) + P) + offset writer = SonataWriter(fname, ["spikes", key], ["timestamps", "node_ids"], [np.float, np.int]) make_save_spikes(writer, False, np.ones((n_cells, 1)), numUnits=n_cells, rateProf=z) def _modify_jsons(self): """modifies the various json files however is needed after they are built""" self._modify_sim_config() def _modify_sim_config(self): """modifies the simulation_config.json however is needed""" with open("simulation_config.json", "r") as jsonFile: sim_config = json.load(jsonFile) self._update_cellvar_record_locs(sim_config) with open("simulation_config.json", "w") as jsonFile: json.dump(sim_config, jsonFile, indent=2) def _update_cellvar_record_locs(self, sim_config): """modifies the location of cellvar recordings in the given JSON simulation_config Parameters ---------- sim_config : dict simulation_config to modify """ reports = sim_config["reports"] cellvar_reports = [ report for report in reports.values() if report["module"] == "membrane_report" ] for loc, report in zip(self.params["record_cellvars"]["locs"], cellvar_reports): report["sections"] = loc
# rule_params={'min_delay':syn[dynamics_file]['delay']}, dtypes=[np.float]) # add_delays.append(False) # min_delays.append(-1)#Want to append -1 if not adding delays. # conn = net.add_gap_junctions(source={'pop_name': ['Bask']}, # target={'pop_name': ['Bask']}, # resistance = 0.0001, target_sections=['somatic'], # connection_rule=dist_conn_perc, # connection_params={'min_dist':0.0, # 'max_dist':300.0,'min_syns':1, # 'max_syns':2,'A':0.08,'B':0.0}) # conn._edge_type_properties['sec_id'] = 0 # conn._edge_type_properties['sec_x'] = 0.9 net.add_edges(source=thalamus.nodes(), target=net.nodes(pop_name='PyrA'), connection_rule=perc_conn, syn_weight=1.0e-03, # changed from e-04 weight_function='lognormal', weight_sigma=1.0e-04, target_sections=['basal'], delay=0.1, distance_range=[0.0, 300.0], dynamics_params='AMPA_ExcToExc.json', model_template='exp2syn') net.add_edges(source=thalamus.nodes(), target=net.nodes(pop_name='PyrC'), connection_rule=perc_conn, syn_weight=1.0e-03, # same here weight_function='lognormal', weight_sigma=1.0e-04,
syn_weight=-4.0, delay=2.0) # Build and save internal network internal.build() print('Saving internal network') internal.save(output_dir='network') # Build a network of 100 virtual cells that will connect to and drive the simulation of the internal network print('Building external connections') external = NetworkBuilder("external") external.add_nodes(N=100, model_type='virtual', ei='e') # Targets all glif excitatory cells external.add_edges(target=internal.nodes(ei='e', orig_model='glif'), source=external.nodes(), connection_rule=lambda *_: np.random.randint(0, 5), dynamics_params='LGN_to_GLIF.json', model_template='static_synapse', delay=2.0, syn_weight=11.0) # Targets all glif inhibitory cells external.add_edges(target=internal.nodes(ei='i', orig_model='glif'), source=external.nodes(), connection_rule=lambda *_: np.random.randint(0, 5), dynamics_params='LGN_to_GLIF.json', model_template='static_synapse', delay=2.0, syn_weight=14.0)
tmp_nsyn = 0 return tmp_nsyn ################################################################################ ############################# BACKGROUND INPUTS ################################ # External inputs thalamus = NetworkBuilder('mthalamus') thalamus.add_nodes(N=numL5PNA + numL5PNC + numBask, pop_name='tON', potential='exc', model_type='virtual') print(thalamus.nodes) thalamus.add_edges(source=thalamus.nodes(), target=netff.nodes(pop_name=['L5PNA', 'L5PNC']), connection_rule=one_to_one, syn_weight=70, delay=2.0, weight_function=None, target_sections=['basal', 'apical'], distance_range=[0.0, 300.0], dynamics_params='AMPA_ExcToExc.json', model_template='exp2syn') thalamus.add_edges(source=thalamus.nodes(), target=netff.nodes(pop_name='Cell_Bask'), connection_rule=one_to_one, syn_weight=5, target_sections=['somatic'],
target_sections=['somatic'], sec_id=0, sec_x=0.9) conn.add_properties(names=['delay', 'sec_id', 'sec_x'], rule=syn_dist_delay_feng_section, rule_params={ 'sec_id': 0, 'sec_x': 0.9 }, dtypes=[np.float, np.int32, np.float]) ########################################################################## ######################### BACKGROUND INPUT ############################### net.add_edges(source=thalamus.nodes(), target=net.nodes(pop_name='PyrA'), connection_rule=one_to_one, syn_weight=1, target_sections=['basal'], delay=0.1, distance_range=[0.0, 9999.9], dynamics_params='BG2PNe_feng.json', model_template='bg2pyr', sec_x=0.9) net.add_edges(source=thalamus.nodes(), target=net.nodes(pop_name='PyrC'), connection_rule=one_to_one, syn_weight=1, target_sections=['basal'],