def generate_hippocampal_net(network_id, conndata="430", nrn_runname="TestRun", validate=True, randomSeed=12345, generate_LEMS_simulation=False, duration=100, dt=0.01, temperature="34.0 degC"): seed(randomSeed) cell_types = [ 'axoaxonic', 'bistratified', 'cck', 'cutsuridis', 'ivy', 'ngf', 'olm', 'poolosyn', 'pvbasket', 'sca' ] synapse_types = ['exp2Synapses', 'customGABASynapses'] ###### Create network doc ##### nml_doc = neuroml.NeuroMLDocument(id=network_id) for cell in cell_types: nml_doc.includes.append( neuroml.IncludeType(href="../cells/%s.cell.nml" % cell)) for synapse in synapse_types: nml_doc.includes.append( neuroml.IncludeType(href="../synapses/%s.synapse.nml" % synapse)) nml_doc.includes.append(neuroml.IncludeType(href="stimulations.nml")) # Create network net = neuroml.Network(id=network_id, type="networkWithTemperature", temperature=temperature) from neuroml import __version__ net.notes = "Network generated using libNeuroML v%s" % __version__ nml_doc.networks.append(net) # Create populations print("Creating populations...") dCellIDs, dNumCells = create_populations(net, cell_types, nrn_runname, randomSeed) # Create synapses print("Connecting cells...") add_synapses(net, conndata, nrn_runname, dCellIDs, dNumCells, write_synapse_file=False) # initialise voltage print("Initialising cell voltage..") # TODO: this shouldn't be hard coded ... dClamps = {} dClamps["axoaxonic"] = -65.0127 dClamps["bistratified"] = -67.0184 dClamps["cck"] = -70.6306 dClamps["ivy"] = -59.9512 dClamps["ngf"] = -59.9512 dClamps["olm"] = -71.1411 dClamps["poolosyn"] = -62.9601 dClamps["pvbasket"] = -65.0246 dClamps["sca"] = -70.5652 init_voltage(nml_doc, net, dClamps, dNumCells) ####### Write to file ###### print("Saving to file...") nml_file = network_id + '.net.nml' writers.NeuroMLWriter.write(nml_doc, nml_file) print("Written network file to: " + nml_file) if validate: ###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 validate_neuroml2(nml_file) if generate_LEMS_simulation: # Create a LEMSSimulation to manage creation of LEMS file ls = LEMSSimulation('Sim_' + network_id, duration, dt) # Point to network as target of simulation ls.assign_simulation_target(net.id) # Incude generated/existing NeuroML2 files channel_types = [ 'CavL', 'CavN', 'HCN', 'HCNolm', 'HCNp', 'KCaS', 'Kdrfast', 'Kdrfastngf', 'Kdrp', 'Kdrslow', 'KvA', 'KvAdistp', 'KvAngf', 'KvAolm', 'KvAproxp', 'KvCaB', 'KvGroup', 'Nav', 'Navaxonp', 'Navbis', 'Navcck', 'Navngf', 'Navp', 'leak_chan' ] for channel in channel_types: ls.include_neuroml2_file("../channels/%s.channel.nml" % channel, include_included=False) ls.include_neuroml2_file("../channels/Capool.nml", include_included=False) for cell in cell_types: ls.include_neuroml2_file("../cells/%s.cell.nml" % cell, include_included=False) for synapse in synapse_types: ls.include_neuroml2_file("../synapses/%s.synapse.nml" % synapse, include_included=False) ls.include_neuroml2_file("stimulations.nml", include_included=False) ls.include_neuroml2_file(nml_file, include_included=False) ###### Specify Display and output files ##### max_traces = 9 # the 10th color in NEURON is white ... for cell_type, numCells in dNumCells.iteritems(): PC = False if numCells > 0: of = "of_%s" % cell_type ls.create_output_file(of, "%s.v.dat" % cell_type) if cell_type == 'poolosyn' or cell_type == 'cutsuridis': # TODO: ensure that only one of them is used for modelling pyramidal cells (in a given simulation) PC = True ls.create_event_output_file("spikes_PC", "PC.spikes.dat") ls.create_display("disp_PC", "Voltages Pyramidal cells", "-80", "50") cell_id = "%scell" % cell_type pop_id = "pop_%s" % cell_type for i in range(numCells): quantity = "%s/%i/%s/v" % (pop_id, i, cell_id) ls.add_column_to_output_file(of, "v_%i" % i, quantity) if PC: ls.add_selection_to_event_output_file( "spikes_PC", i, select='%s/%i/%s' % (pop_id, i, cell_id), event_port='spike') if i < max_traces: ls.add_line_to_display("disp_PC", "PC %i: V[mV]" % i, quantity, "1mV", pynml.get_next_hex_color()) # Save to LEMS file print("Writing LEMS file...") lems_file_name = ls.save_to_file() else: ls = None lems_file_name = '' print("-----------------------------------") return ls, lems_file_name
def generate_WB_network(cell_id, synapse_id, numCells_bc, connection_probability, I_mean, I_sigma, generate_LEMS_simulation, duration, x_size = 100, y_size = 100, z_size = 100, network_id = ref+'Network', color = '0 0 1', connection = True, temperature = '37 degC', validate = True, dt = 0.001): nml_doc = neuroml.NeuroMLDocument(id=network_id) nml_doc.includes.append(neuroml.IncludeType(href='WangBuzsakiCell.xml')) nml_doc.includes.append(neuroml.IncludeType(href='WangBuzsakiSynapse.xml')) # Create network net = neuroml.Network(id=network_id, type='networkWithTemperature', temperature=temperature) net.notes = 'Network generated using libNeuroML v%s'%__version__ nml_doc.networks.append(net) # Create population pop = neuroml.Population(id=ref+'pop', component=cell_id, type='populationList', size=numCells_bc) if color is not None: pop.properties.append(neuroml.Property('color', color)) net.populations.append(pop) for i in range(0, numCells_bc): inst = neuroml.Instance(id=i) pop.instances.append(inst) inst.location = neuroml.Location(x=str(x_size*rnd.random()), y=str(y_size*rnd.random()), z=str(z_size*rnd.random())) # Add connections proj = neuroml.ContinuousProjection(id=ref+'proj', presynaptic_population=pop.id, postsynaptic_population=pop.id) conn_count = 0 for i in range(0, numCells_bc): for j in range(0, numCells_bc): if i != j and rnd.random() < connection_probability: connection = neuroml.ContinuousConnectionInstance(id=conn_count, pre_cell='../%s/%i/%s'%(pop.id, i, cell_id), pre_component='silent', post_cell='../%s/%i/%s'%(pop.id, j, cell_id), post_component=synapse_id) proj.continuous_connection_instances.append(connection) conn_count += 1 net.continuous_projections.append(proj) # make cell pop inhomogenouos (different V_init-s with voltage-clamp) vc_dur = 2 # ms for i in range(0, numCells_bc): tmp = -75 + (rnd.random()*15) vc = neuroml.VoltageClamp(id='VClamp%i'%i, delay='0ms', duration='%ims'%vc_dur, simple_series_resistance='1e6ohm', target_voltage='%imV'%tmp) nml_doc.voltage_clamps.append(vc) input_list = neuroml.InputList(id='input_%i'%i, component='VClamp%i'%i, populations=pop.id) input = neuroml.Input(id=i, target='../%s/%i/%s'%(pop.id, i, cell_id), destination='synapses') input_list.input.append(input) net.input_lists.append(input_list) # Add outer input (IClamp) tmp = rnd.normal(I_mean, I_sigma**2, numCells_bc) # random numbers from Gaussian distribution for i in range(0, numCells_bc): pg = neuroml.PulseGenerator(id='IClamp%i'%i, delay='%ims'%vc_dur, duration='%ims'%(duration-vc_dur), amplitude='%fpA'%(tmp[i])) nml_doc.pulse_generators.append(pg) input_list = neuroml.InputList(id='input%i'%i, component='IClamp%i'%i, populations=pop.id) input = neuroml.Input(id=i, target='../%s/%i/%s'%(pop.id, i, cell_id), destination='synapses') input_list.input.append(input) net.input_lists.append(input_list) # Write to file nml_file = '%sNet.nml'%ref print 'Writing network file to:', nml_file, '...' neuroml.writers.NeuroMLWriter.write(nml_doc, nml_file) if validate: # Validate the NeuroML from neuroml.utils import validate_neuroml2 validate_neuroml2(nml_file) if generate_LEMS_simulation: # Vreate a LEMSSimulation to manage creation of LEMS file ls = LEMSSimulation(sim_id='%sNetSim'%ref, duration=duration, dt=dt) # Point to network as target of simulation ls.assign_simulation_target(net.id) # Incude generated/existing NeuroML2 files ls.include_neuroml2_file('WangBuzsakiCell.xml', include_included=False) ls.include_neuroml2_file('WangBuzsakiSynapse.xml', include_included=False) ls.include_neuroml2_file(nml_file, include_included=False) # Specify Display and output files disp_bc = 'display_bc' ls.create_display(disp_bc, 'Basket Cell Voltage trace', '-80', '40') of_bc = 'volts_file_bc' ls.create_output_file(of_bc, 'wangbuzsaki_network.dat') of_spikes_bc = 'spikes_bc' ls.create_event_output_file(of_spikes_bc, 'wangbuzsaki_network_spikes.dat') max_traces = 9 # the 10th color in NEURON is white ... for i in range(numCells_bc): quantity = '%s/%i/%s/v'%(pop.id, i, cell_id) if i < max_traces: ls.add_line_to_display(disp_bc, 'BC %i: Vm'%i, quantity, '1mV', pynml.get_next_hex_color()) ls.add_column_to_output_file(of_bc, 'v_%i'%i, quantity) ls.add_selection_to_event_output_file(of_spikes_bc, i, select='%s/%i/%s'%(pop.id, i, cell_id), event_port='spike') # Save to LEMS file print 'Writing LEMS file...' lems_file_name = ls.save_to_file() else: ls = None lems_file_name = '' return ls, lems_file_name
def generate_WB_network(cell_id, synapse_id, numCells_bc, connection_probability, I_mean, I_sigma, generate_LEMS_simulation, duration, x_size=100, y_size=100, z_size=100, network_id=ref + 'Network', color='0 0 1', connection=True, temperature='37 degC', validate=True, dt=0.01): nml_doc = neuroml.NeuroMLDocument(id=network_id) nml_doc.includes.append(neuroml.IncludeType(href='WangBuzsaki.cell.nml')) nml_doc.includes.append(neuroml.IncludeType(href='WangBuzsakiSynapse.xml')) # Create network net = neuroml.Network(id=network_id, type='networkWithTemperature', temperature=temperature) net.notes = 'Network generated using libNeuroML v%s' % __version__ nml_doc.networks.append(net) # Create population pop = neuroml.Population(id=ref + 'pop', component=cell_id, type='populationList', size=numCells_bc) if color is not None: pop.properties.append(neuroml.Property('color', color)) net.populations.append(pop) for i in range(0, numCells_bc): inst = neuroml.Instance(id=i) pop.instances.append(inst) inst.location = neuroml.Location(x=str(x_size * rnd.random()), y=str(y_size * rnd.random()), z=str(z_size * rnd.random())) # Add connections proj = neuroml.ContinuousProjection(id=ref + 'proj', presynaptic_population=pop.id, postsynaptic_population=pop.id) conn_count = 0 for i in range(0, numCells_bc): for j in range(0, numCells_bc): if i != j and rnd.random() < connection_probability: connection = neuroml.ContinuousConnectionInstance( id=conn_count, pre_cell='../%s/%i/%s' % (pop.id, i, cell_id), pre_component='silent', post_cell='../%s/%i/%s' % (pop.id, j, cell_id), post_component=synapse_id) proj.continuous_connection_instances.append(connection) conn_count += 1 net.continuous_projections.append(proj) # make cell pop inhomogenouos (different V_init-s with voltage-clamp) vc_dur = 2 # ms for i in range(0, numCells_bc): tmp = -75 + (rnd.random() * 15) vc = neuroml.VoltageClamp(id='VClamp%i' % i, delay='0ms', duration='%ims' % vc_dur, simple_series_resistance='1e6ohm', target_voltage='%imV' % tmp) nml_doc.voltage_clamps.append(vc) input_list = neuroml.InputList(id='input_%i' % i, component='VClamp%i' % i, populations=pop.id) input = neuroml.Input(id=i, target='../%s/%i/%s' % (pop.id, i, cell_id), destination='synapses') input_list.input.append(input) net.input_lists.append(input_list) # Add outer input (IClamp) tmp = rnd.normal(I_mean, I_sigma**2, numCells_bc) # random numbers from Gaussian distribution for i in range(0, numCells_bc): pg = neuroml.PulseGenerator(id='IClamp%i' % i, delay='%ims' % vc_dur, duration='%ims' % (duration - vc_dur), amplitude='%fpA' % (tmp[i])) nml_doc.pulse_generators.append(pg) input_list = neuroml.InputList(id='input%i' % i, component='IClamp%i' % i, populations=pop.id) input = neuroml.Input(id=i, target='../%s/%i/%s' % (pop.id, i, cell_id), destination='synapses') input_list.input.append(input) net.input_lists.append(input_list) # Write to file nml_file = '%s100Cells.net.nml' % ref print 'Writing network file to:', nml_file, '...' neuroml.writers.NeuroMLWriter.write(nml_doc, nml_file) if validate: # Validate the NeuroML from neuroml.utils import validate_neuroml2 validate_neuroml2(nml_file) if generate_LEMS_simulation: # Vreate a LEMSSimulation to manage creation of LEMS file ls = LEMSSimulation(sim_id='%sNetSim' % ref, duration=duration, dt=dt) # Point to network as target of simulation ls.assign_simulation_target(net.id) # Incude generated/existing NeuroML2 files ls.include_neuroml2_file('WangBuzsaki.cell.nml', include_included=False) ls.include_neuroml2_file('WangBuzsakiSynapse.xml', include_included=False) ls.include_neuroml2_file(nml_file, include_included=False) # Specify Display and output files disp_bc = 'display_bc' ls.create_display(disp_bc, 'Basket Cell Voltage trace', '-80', '40') of_bc = 'volts_file_bc' ls.create_output_file(of_bc, 'wangbuzsaki_network.dat') of_spikes_bc = 'spikes_bc' ls.create_event_output_file(of_spikes_bc, 'wangbuzsaki_network_spikes.dat') max_traces = 9 # the 10th color in NEURON is white ... for i in range(numCells_bc): quantity = '%s/%i/%s/v' % (pop.id, i, cell_id) if i < max_traces: ls.add_line_to_display(disp_bc, 'BC %i: Vm' % i, quantity, '1mV', pynml.get_next_hex_color()) ls.add_column_to_output_file(of_bc, 'v_%i' % i, quantity) ls.add_selection_to_event_output_file(of_spikes_bc, i, select='%s/%i/%s' % (pop.id, i, cell_id), event_port='spike') # Save to LEMS file print 'Writing LEMS file...' lems_file_name = ls.save_to_file() else: ls = None lems_file_name = '' return ls, lems_file_name
def generate_lems_file_for_neuroml( sim_id, neuroml_file, target, duration, dt, lems_file_name, target_dir, nml_doc=None, # Use this if the nml doc has already been loaded (to avoid delay in reload) include_extra_files=[], gen_plots_for_all_v=True, plot_all_segments=False, gen_plots_for_quantities={}, # Dict with displays vs lists of quantity paths gen_plots_for_only_populations=[], # List of populations, all pops if=[] gen_saves_for_all_v=True, save_all_segments=False, gen_saves_for_only_populations=[], # List of populations, all pops if=[] gen_saves_for_quantities={}, # Dict with file names vs lists of quantity paths gen_spike_saves_for_all_somas=False, gen_spike_saves_for_only_populations=[], # List of populations, all pops if=[] gen_spike_saves_for_cells={}, # Dict with file names vs lists of quantity paths spike_time_format='ID_TIME', copy_neuroml=True, report_file_name=None, lems_file_generate_seed=None, verbose=False, simulation_seed=12345): my_random = random.Random() if lems_file_generate_seed: my_random.seed( lems_file_generate_seed ) # To ensure same LEMS file (e.g. colours of plots) are generated every time for the same input else: my_random.seed( 12345 ) # To ensure same LEMS file (e.g. colours of plots) are generated every time for the same input file_name_full = '%s/%s' % (target_dir, lems_file_name) print_comment_v( 'Creating LEMS file at: %s for NeuroML 2 file: %s (copy: %s)' % (file_name_full, neuroml_file, copy_neuroml)) ls = LEMSSimulation(sim_id, duration, dt, target, simulation_seed=simulation_seed) if nml_doc is None: nml_doc = read_neuroml2_file(neuroml_file, include_includes=True, verbose=verbose) nml_doc_inc_not_included = read_neuroml2_file(neuroml_file, include_includes=False, verbose=False) else: nml_doc_inc_not_included = nml_doc ls.set_report_file(report_file_name) quantities_saved = [] for f in include_extra_files: ls.include_neuroml2_file(f, include_included=False) if not copy_neuroml: rel_nml_file = os.path.relpath(os.path.abspath(neuroml_file), os.path.abspath(target_dir)) print_comment_v("Including existing NeuroML file (%s) as: %s" % (neuroml_file, rel_nml_file)) ls.include_neuroml2_file(rel_nml_file, include_included=True, relative_to_dir=os.path.abspath(target_dir)) else: print_comment_v( "Copying a NeuroML file (%s) to: %s (abs path: %s)" % (neuroml_file, target_dir, os.path.abspath(target_dir))) if not os.path.isdir(target_dir): raise Exception("Target directory %s does not exist!" % target_dir) if os.path.realpath( os.path.dirname(neuroml_file)) != os.path.realpath(target_dir): shutil.copy(neuroml_file, target_dir) else: print_comment_v("No need, same file...") neuroml_file_name = os.path.basename(neuroml_file) ls.include_neuroml2_file(neuroml_file_name, include_included=False) nml_dir = os.path.dirname(neuroml_file) if len( os.path.dirname(neuroml_file)) > 0 else '.' for include in nml_doc_inc_not_included.includes: if nml_dir == '.' and os.path.isfile(include.href): incl_curr = include.href else: incl_curr = '%s/%s' % (nml_dir, include.href) if os.path.isfile(include.href): incl_curr = include.href print_comment_v( ' - Including %s (located at %s; nml dir: %s), copying to %s' % (include.href, incl_curr, nml_dir, target_dir)) ''' if not os.path.isfile("%s/%s" % (target_dir, os.path.basename(incl_curr))) and \ not os.path.isfile("%s/%s" % (target_dir, incl_curr)) and \ not os.path.isfile(incl_curr): shutil.copy(incl_curr, target_dir) else: print_comment_v("No need to copy...")''' f1 = "%s/%s" % (target_dir, os.path.basename(incl_curr)) f2 = "%s/%s" % (target_dir, incl_curr) if os.path.isfile(f1): print_comment_v("No need to copy, file exists: %s..." % f1) elif os.path.isfile(f2): print_comment_v("No need to copy, file exists: %s..." % f2) else: shutil.copy(incl_curr, target_dir) ls.include_neuroml2_file(include.href, include_included=False) sub_doc = read_neuroml2_file(incl_curr) sub_dir = os.path.dirname(incl_curr) if len( os.path.dirname(incl_curr)) > 0 else '.' if sub_doc.__class__ == neuroml.nml.nml.NeuroMLDocument: for include in sub_doc.includes: incl_curr = '%s/%s' % (sub_dir, include.href) print_comment_v(' -- Including %s located at %s' % (include.href, incl_curr)) if not os.path.isfile("%s/%s" % (target_dir, os.path.basename(incl_curr))) and \ not os.path.isfile("%s/%s" % (target_dir, incl_curr)): shutil.copy(incl_curr, target_dir) ls.include_neuroml2_file(include.href, include_included=False) if gen_plots_for_all_v \ or gen_saves_for_all_v \ or len(gen_plots_for_only_populations) > 0 \ or len(gen_saves_for_only_populations) > 0 \ or gen_spike_saves_for_all_somas \ or len(gen_spike_saves_for_only_populations) > 0: for network in nml_doc.networks: for population in network.populations: variable = "v" quantity_template_e = "%s[%i]" component = population.component size = population.size cell = None segment_ids = [] for c in nml_doc.spike_generator_poissons: if c.id == component: variable = "tsince" for c in nml_doc.SpikeSourcePoisson: if c.id == component: variable = "tsince" quantity_template = "%s[%i]/" + variable if plot_all_segments or gen_spike_saves_for_all_somas: for c in nml_doc.cells: if c.id == component: cell = c for segment in cell.morphology.segments: segment_ids.append(segment.id) segment_ids.sort() if population.type and population.type == 'populationList': quantity_template = "%s/%i/" + component + "/" + variable quantity_template_e = "%s/%i/" + component + "" # Multicompartmental cell # Needs to be supported in NeuronWriter # if len(segment_ids)>1: # quantity_template_e = "%s/%i/"+component+"/0" size = len(population.instances) if gen_plots_for_all_v or population.id in gen_plots_for_only_populations: print_comment( 'Generating %i plots for %s in population %s' % (size, component, population.id)) disp0 = 'DispPop__%s' % population.id ls.create_display( disp0, "Membrane potentials of cells in %s" % population.id, "-90", "50") for i in range(size): if cell is not None and plot_all_segments: quantity_template_seg = "%s/%i/" + component + "/%i/v" for segment_id in segment_ids: quantity = quantity_template_seg % ( population.id, i, segment_id) ls.add_line_to_display( disp0, "%s[%i] seg %i: v" % (population.id, i, segment_id), quantity, "1mV", get_next_hex_color(my_random)) else: quantity = quantity_template % (population.id, i) ls.add_line_to_display( disp0, "%s[%i]: v" % (population.id, i), quantity, "1mV", get_next_hex_color(my_random)) if gen_saves_for_all_v or population.id in gen_saves_for_only_populations: print_comment( 'Saving %i values of %s for %s in population %s' % (size, variable, component, population.id)) of0 = 'Volts_file__%s' % population.id ls.create_output_file( of0, "%s.%s.%s.dat" % (sim_id, population.id, variable)) for i in range(size): if cell is not None and save_all_segments: quantity_template_seg = "%s/%i/" + component + "/%i/v" for segment_id in segment_ids: quantity = quantity_template_seg % ( population.id, i, segment_id) ls.add_column_to_output_file( of0, 'v_%s' % safe_variable(quantity), quantity) quantities_saved.append(quantity) else: quantity = quantity_template % (population.id, i) ls.add_column_to_output_file( of0, 'v_%s' % safe_variable(quantity), quantity) quantities_saved.append(quantity) if gen_spike_saves_for_all_somas or population.id in gen_spike_saves_for_only_populations: print_comment( 'Saving spikes in %i somas for %s in population %s' % (size, component, population.id)) eof0 = 'Spikes_file__%s' % population.id ls.create_event_output_file(eof0, "%s.%s.spikes" % (sim_id, population.id), format=spike_time_format) for i in range(size): quantity = quantity_template_e % (population.id, i) ls.add_selection_to_event_output_file( eof0, i, quantity, "spike") quantities_saved.append(quantity) for display in sorted(gen_plots_for_quantities.keys()): quantities = gen_plots_for_quantities[display] max_ = "1" min_ = "-1" scale = "1" # Check for v ... if quantities and len(quantities) > 0 and quantities[0].endswith('/v'): max_ = "40" min_ = "-80" scale = "1mV" ls.create_display(display, "Plots of %s" % display, min_, max_) for q in quantities: ls.add_line_to_display(display, safe_variable(q), q, scale, get_next_hex_color(my_random)) for file_name in sorted(gen_saves_for_quantities.keys()): quantities = gen_saves_for_quantities[file_name] of_id = safe_variable(file_name) ls.create_output_file(of_id, file_name) for q in quantities: ls.add_column_to_output_file(of_id, safe_variable(q), q) quantities_saved.append(q) for file_name in sorted(gen_spike_saves_for_cells.keys()): quantities = gen_spike_saves_for_cells[file_name] of_id = safe_variable(file_name) ls.create_event_output_file(of_id, file_name) pop_here = None for i, quantity in enumerate(quantities): pop, index = get_pop_index(quantity) if pop_here: if pop_here != pop: raise Exception('Problem with generating LEMS for saving spikes for file %s.\n' % file_name + \ 'Multiple cells from different populations in one file will cause issues with index/spike id.') pop_here = pop # print('===== Adding to %s (%s) event %i for %s, pop: %s, i: %s' % (file_name, of_id, i, quantity, pop, index)) ls.add_selection_to_event_output_file(of_id, index, quantity, "spike") quantities_saved.append(quantity) ls.save_to_file(file_name=file_name_full) return quantities_saved, ls
def generate_grc_layer_network( runID, correlationRadius, NADT, duration, dt, minimumISI, # ms ONRate, # Hz OFFRate, # Hz run=False): ######################################## # Load parameters for this run file = open('../params_file.pkl', 'r') p = pkl.load(file) N_syn = p['N_syn'][int(runID) - 1] f_mf = p['f_mf'][int(runID) - 1] run_num = p['run_num'][int(runID) - 1] file.close() ################################################################################# # Get connectivity matrix between cells file = open('../../network_structures/GCLconnectivity_' + str(N_syn) + '.pkl') p = pkl.load(file) conn_mat = p['conn_mat'] N_mf, N_grc = conn_mat.shape assert (np.all(conn_mat.sum( axis=0) == N_syn)), 'Connectivity matrix is incorrect.' # Get MF activity pattern if correlationRadius == 0: # Activate MFs randomly N_mf_ON = int(N_mf * f_mf) mf_indices_ON = random.sample(range(N_mf), N_mf_ON) mf_indices_ON.sort() elif correlationRadius > 0: # Spatially correlated MFs f_mf_range = np.linspace(.05, .95, 19) f_mf_ix = np.where(f_mf_range == f_mf)[0][0] p = io.loadmat('../../input_statistics/mf_patterns_r' + str(correlationRadius) + '.mat') R = p['Rs'][:, :, f_mf_ix] g = p['gs'][f_mf_ix] t = np.dot(R.transpose(), np.random.randn(N_mf)) S = (t > -g * np.ones(N_mf)) mf_indices_ON = np.where(S)[0] N_mf_ON = len(mf_indices_ON) # N_mf_OFF = N_mf - N_mf_ON mf_indices_OFF = [x for x in range(N_mf) if x not in mf_indices_ON] mf_indices_OFF.sort() ################################################################################# # load NeuroML components, LEMS components and LEMS componentTypes from external files # Spike generator (for Poisson MF spiking) spike_generator_file_name = "../../grc_lemsDefinitions/spikeGenerators.xml" spike_generator_doc = pynml.read_lems_file(spike_generator_file_name) # Integrate-and-fire GC model # if NADT = 1, loads model GC iaf_nml2_file_name = "../../grc_lemsDefinitions/IaF_GrC.nml" if NADT == 0 else "../../grc_lemsDefinitions/IaF_GrC_" + '{:.2f}'.format( f_mf) + ".nml" iaF_GrC_doc = pynml.read_neuroml2_file(iaf_nml2_file_name) iaF_GrC = iaF_GrC_doc.iaf_ref_cells[0] # AMPAR and NMDAR mediated synapses ampa_syn_filename = "../../grc_lemsDefinitions/RothmanMFToGrCAMPA_" + str( N_syn) + ".xml" nmda_syn_filename = "../../grc_lemsDefinitions/RothmanMFToGrCNMDA_" + str( N_syn) + ".xml" rothmanMFToGrCAMPA_doc = pynml.read_lems_file(ampa_syn_filename) rothmanMFToGrCNMDA_doc = pynml.read_lems_file(nmda_syn_filename) # # Define components from the componentTypes we just loaded # Refractory poisson input -- representing active MF spike_generator_ref_poisson_type = spike_generator_doc.component_types[ 'spikeGeneratorRefPoisson'] lems_instances_doc = lems.Model() spike_generator_on = lems.Component("mossySpikerON", spike_generator_ref_poisson_type.name) spike_generator_on.set_parameter("minimumISI", "%s ms" % minimumISI) spike_generator_on.set_parameter("averageRate", "%s Hz" % ONRate) lems_instances_doc.add(spike_generator_on) # Refractory poisson input -- representing silent MF spike_generator_off = lems.Component("mossySpikerOFF", spike_generator_ref_poisson_type.name) spike_generator_off.set_parameter("minimumISI", "%s ms" % minimumISI) spike_generator_off.set_parameter("averageRate", "%s Hz" % OFFRate) lems_instances_doc.add(spike_generator_off) # Synapses rothmanMFToGrCAMPA = rothmanMFToGrCAMPA_doc.components[ 'RothmanMFToGrCAMPA'].id rothmanMFToGrCNMDA = rothmanMFToGrCNMDA_doc.components[ 'RothmanMFToGrCNMDA'].id # # Create ON MF, OFF MF, and GC populations GrCPop = nml.Population(id="GrCPop", component=iaF_GrC.id, size=N_grc) mossySpikersPopON = nml.Population(id=spike_generator_on.id + "Pop", component=spike_generator_on.id, size=N_mf_ON) mossySpikersPopOFF = nml.Population(id=spike_generator_off.id + "Pop", component=spike_generator_off.id, size=N_mf_OFF) # # Create network and add populations net = nml.Network(id="network") net_doc = nml.NeuroMLDocument(id=net.id) net_doc.networks.append(net) net.populations.append(GrCPop) net.populations.append(mossySpikersPopON) net.populations.append(mossySpikersPopOFF) # # MF-GC connectivity # First connect ON MFs to GCs for mf_ix_ON in range(N_mf_ON): mf_ix = mf_indices_ON[mf_ix_ON] # Find which GCs are neighbors innervated_grcs = np.where(conn_mat[mf_ix, :] == 1)[0] for grc_ix in innervated_grcs: # Add AMPAR and NMDAR mediated synapses for synapse in [rothmanMFToGrCAMPA, rothmanMFToGrCNMDA]: connection = nml.SynapticConnection( from_='{}[{}]'.format(mossySpikersPopON.id, mf_ix_ON), synapse=synapse, to='GrCPop[{}]'.format(grc_ix)) net.synaptic_connections.append(connection) # # Now connect OFF MFs to GCs for mf_ix_OFF in range(N_mf_OFF): mf_ix = mf_indices_OFF[mf_ix_OFF] # Find which GCs are neighbors innervated_grcs = np.where(conn_mat[mf_ix, :] == 1)[0] for grc_ix in innervated_grcs: # Add AMPAR and NMDAR mediated synapses for synapse in [rothmanMFToGrCAMPA, rothmanMFToGrCNMDA]: connection = nml.SynapticConnection( from_='{}[{}]'.format(mossySpikersPopOFF.id, mf_ix_OFF), synapse=synapse, to='GrCPop[{}]'.format(grc_ix)) net.synaptic_connections.append(connection) # # Write network to file net_file_name = 'generated_network_' + runID + '.net.nml' pynml.write_neuroml2_file(net_doc, net_file_name) # Write LEMS instances to file lems_instances_file_name = 'instances_' + runID + '.xml' pynml.write_lems_file(lems_instances_doc, lems_instances_file_name, validate=False) # Create a LEMSSimulation to manage creation of LEMS file ls = LEMSSimulation('sim_' + runID, duration, dt, lems_seed=int(np.round(1000 * random.random()))) # Point to network as target of simulation ls.assign_simulation_target(net.id) # Include generated/existing NeuroML2 files ls.include_neuroml2_file(iaf_nml2_file_name) ls.include_lems_file(spike_generator_file_name, include_included=False) ls.include_lems_file(lems_instances_file_name) ls.include_lems_file(ampa_syn_filename, include_included=False) ls.include_lems_file(nmda_syn_filename, include_included=False) ls.include_neuroml2_file(net_file_name) # Specify Displays and Output Files # Details for saving output files basedir = '../data_r' + str( correlationRadius) + '/' if NADT == 0 else '../data_r' + str( correlationRadius) + '_NADT/' end_filename = str(N_syn) + '_{:.2f}'.format(f_mf) + '_' + str( run_num) # Add parameter values to spike time filename # Save MF spike times under basedir + MF_spikes_ + end_filename eof0 = 'MFspikes_file' ls.create_event_output_file(eof0, basedir + "MF_spikes_" + end_filename + ".dat") # ON MFs for i in range(mossySpikersPopON.size): ls.add_selection_to_event_output_file( eof0, mf_indices_ON[i], "%s[%i]" % (mossySpikersPopON.id, i), 'spike') # OFF MFs for i in range(mossySpikersPopOFF.size): ls.add_selection_to_event_output_file( eof0, mf_indices_OFF[i], "%s[%i]" % (mossySpikersPopOFF.id, i), 'spike') # Save GC spike times under basedir + GrC_spikes_ + end_filename eof1 = 'GrCspikes_file' ls.create_event_output_file( eof1, basedir + "GrC_spikes_" + end_filename + ".dat") # for i in range(GrCPop.size): ls.add_selection_to_event_output_file(eof1, i, "%s[%i]" % (GrCPop.id, i), 'spike') # lems_file_name = ls.save_to_file() # if run: results = pynml.run_lems_with_jneuroml(lems_file_name, max_memory="8G", nogui=True, load_saved_data=False, plot=False) return results
def generate_grc_layer_network( p_mf_ON, duration, dt, minimumISI, # ms ONRate, # Hz OFFRate, # Hz run=False): # Load connectivity matrix file = open('GCLconnectivity.pkl') p = pkl.load(file) conn_mat = p['conn_mat'] N_mf, N_grc = conn_mat.shape assert (np.all(conn_mat.sum( axis=0) == 4)), 'Connectivity matrix is incorrect.' # Load GrC and MF rosette positions grc_pos = p['grc_pos'] glom_pos = p['glom_pos'] # Choose which mossy fibers are on, which are off N_mf_ON = int(N_mf * p_mf_ON) mf_indices_ON = random.sample(range(N_mf), N_mf_ON) mf_indices_ON.sort() N_mf_OFF = N_mf - N_mf_ON mf_indices_OFF = [x for x in range(N_mf) if x not in mf_indices_ON] mf_indices_OFF.sort() # load NeuroML components, LEMS components and LEMS componentTypes from external files ##spikeGeneratorRefPoisson is now a standard nml type... ##spike_generator_doc = pynml.read_lems_file(spike_generator_file_name) iaF_GrC = nml.IafRefCell(id="iaF_GrC", refract="2ms", C="3.22pF", thresh="-40mV", reset="-63mV", leak_conductance="1.498nS", leak_reversal="-79.67mV") ampa_syn_filename = "RothmanMFToGrCAMPA.xml" nmda_syn_filename = "RothmanMFToGrCNMDA.xml" rothmanMFToGrCAMPA_doc = pynml.read_lems_file(ampa_syn_filename) rothmanMFToGrCNMDA_doc = pynml.read_lems_file(nmda_syn_filename) # define some components from the componentTypes we just loaded ##spike_generator_ref_poisson_type = spike_generator_doc.component_types['spikeGeneratorRefPoisson'] lems_instances_doc = lems.Model() spike_generator_ref_poisson_type_name = 'spikeGeneratorRefPoisson' spike_generator_on = lems.Component("mossySpikerON", spike_generator_ref_poisson_type_name) spike_generator_on.set_parameter("minimumISI", "%s ms" % minimumISI) spike_generator_on.set_parameter("averageRate", "%s Hz" % ONRate) lems_instances_doc.add(spike_generator_on) spike_generator_off = lems.Component( "mossySpikerOFF", spike_generator_ref_poisson_type_name) spike_generator_off.set_parameter("minimumISI", "%s ms" % minimumISI) spike_generator_off.set_parameter("averageRate", "%s Hz" % OFFRate) lems_instances_doc.add(spike_generator_off) rothmanMFToGrCAMPA = rothmanMFToGrCAMPA_doc.components[ 'RothmanMFToGrCAMPA'].id rothmanMFToGrCNMDA = rothmanMFToGrCNMDA_doc.components[ 'RothmanMFToGrCNMDA'].id # create populations GrCPop = nml.Population(id=iaF_GrC.id + "Pop", component=iaF_GrC.id, type="populationList", size=N_grc) GrCPop.properties.append(nml.Property(tag='color', value='0 0 0.8')) GrCPop.properties.append(nml.Property(tag='radius', value=2)) mossySpikersPopON = nml.Population(id=spike_generator_on.id + "Pop", component=spike_generator_on.id, type="populationList", size=N_mf_ON) mossySpikersPopON.properties.append( nml.Property(tag='color', value='0.8 0 0')) mossySpikersPopON.properties.append(nml.Property(tag='radius', value=2)) mossySpikersPopOFF = nml.Population(id=spike_generator_off.id + "Pop", component=spike_generator_off.id, size=N_mf_OFF) mossySpikersPopOFF.properties.append( nml.Property(tag='color', value='0 0.8 0')) mossySpikersPopOFF.properties.append(nml.Property(tag='radius', value=2)) # create network and add populations net = nml.Network(id="network") net_doc = nml.NeuroMLDocument(id=net.id) net_doc.networks.append(net) net_doc.iaf_ref_cells.append(iaF_GrC) net.populations.append(GrCPop) net.populations.append(mossySpikersPopON) net.populations.append(mossySpikersPopOFF) #net_doc.includes.append(nml.IncludeType(href=iaf_nml2_file_name)) # Add locations for GCs for grc in range(N_grc): inst = nml.Instance(id=grc) GrCPop.instances.append(inst) inst.location = nml.Location(x=grc_pos[grc, 0], y=grc_pos[grc, 1], z=grc_pos[grc, 2]) # ON MFs: locations and connectivity ONprojectionAMPA = nml.Projection( id="ONProjAMPA", presynaptic_population=mossySpikersPopON.id, postsynaptic_population=GrCPop.id, synapse=rothmanMFToGrCAMPA) ONprojectionNMDA = nml.Projection( id="ONProjNMDA", presynaptic_population=mossySpikersPopON.id, postsynaptic_population=GrCPop.id, synapse=rothmanMFToGrCNMDA) net.projections.append(ONprojectionAMPA) net.projections.append(ONprojectionNMDA) ix = 0 for mf_ix_ON in range(N_mf_ON): mf_ix = mf_indices_ON[mf_ix_ON] inst = nml.Instance(id=mf_ix_ON) mossySpikersPopON.instances.append(inst) inst.location = nml.Location(x=glom_pos[mf_ix, 0], y=glom_pos[mf_ix, 1], z=glom_pos[mf_ix, 2]) # find which granule cells are neighbors innervated_grcs = np.where(conn_mat[mf_ix, :] == 1)[0] for grc_ix in innervated_grcs: for synapse in [rothmanMFToGrCAMPA, rothmanMFToGrCNMDA]: connection = nml.Connection( id=ix, pre_cell_id='../{}/{}/{}'.format(mossySpikersPopON.id, mf_ix_ON, spike_generator_on.id), post_cell_id='../{}/{}/{}'.format(GrCPop.id, grc_ix, iaF_GrC.id)) ONprojectionAMPA.connections.append(connection) ONprojectionNMDA.connections.append(connection) ix = ix + 1 # OFF MFs: locations and connectivity OFFprojectionAMPA = nml.Projection( id="OFFProjAMPA", presynaptic_population=mossySpikersPopOFF.id, postsynaptic_population=GrCPop.id, synapse=rothmanMFToGrCAMPA) OFFprojectionNMDA = nml.Projection( id="OFFProjNMDA", presynaptic_population=mossySpikersPopOFF.id, postsynaptic_population=GrCPop.id, synapse=rothmanMFToGrCNMDA) net.projections.append(OFFprojectionAMPA) net.projections.append(OFFprojectionNMDA) ix = 0 for mf_ix_OFF in range(N_mf_OFF): mf_ix = mf_indices_OFF[mf_ix_OFF] inst = nml.Instance(id=mf_ix_OFF) mossySpikersPopOFF.instances.append(inst) inst.location = nml.Location(x=glom_pos[mf_ix, 0], y=glom_pos[mf_ix, 1], z=glom_pos[mf_ix, 2]) # find which granule cells are neighbors innervated_grcs = np.where(conn_mat[mf_ix, :] == 1)[0] for grc_ix in innervated_grcs: for synapse in [rothmanMFToGrCAMPA, rothmanMFToGrCNMDA]: connection = nml.Connection( id=ix, pre_cell_id='../{}/{}/{}'.format(mossySpikersPopOFF.id, mf_ix_OFF, spike_generator_on.id), post_cell_id='../{}/{}/{}'.format(GrCPop.id, grc_ix, iaF_GrC.id)) OFFprojectionAMPA.connections.append(connection) OFFprojectionNMDA.connections.append(connection) ix = ix + 1 # Write network to file net_file_name = 'OSBnet.nml' pynml.write_neuroml2_file(net_doc, net_file_name) # Write LEMS instances to file lems_instances_file_name = 'instances.xml' pynml.write_lems_file(lems_instances_doc, lems_instances_file_name, validate=False) # Create a LEMSSimulation to manage creation of LEMS file ls = LEMSSimulation( 'sim', duration, dt, simulation_seed=123) # int(np.round(1000*random.random()))) # Point to network as target of simulation ls.assign_simulation_target(net.id) # Include generated/existing NeuroML2 files ###ls.include_lems_file(spike_generator_file_name, include_included=False) ls.include_lems_file(lems_instances_file_name) ls.include_lems_file(ampa_syn_filename, include_included=False) ls.include_lems_file(nmda_syn_filename, include_included=False) ls.include_neuroml2_file(net_file_name) # Specify Displays and Output Files basedir = '' eof0 = 'Volts_file' ls.create_event_output_file(eof0, basedir + "MF_spikes.dat") for i in range(mossySpikersPopON.size): ls.add_selection_to_event_output_file( eof0, mf_indices_ON[i], '{}/{}/{}'.format(mossySpikersPopON.id, i, spike_generator_on.id), 'spike') for i in range(mossySpikersPopOFF.size): ls.add_selection_to_event_output_file( eof0, mf_indices_OFF[i], '{}/{}/{}'.format(mossySpikersPopOFF.id, i, spike_generator_on.id), 'spike') eof1 = 'GrCspike_file' ls.create_event_output_file(eof1, basedir + "GrC_spikes.dat") for i in range(GrCPop.size): ls.add_selection_to_event_output_file( eof1, i, '{}/{}/{}'.format(GrCPop.id, i, iaF_GrC.id), 'spike') lems_file_name = ls.save_to_file() if run: print('Running the generated LEMS file: %s for simulation of %sms' % (lems_file_name, duration)) results = pynml.run_lems_with_jneuroml(lems_file_name, max_memory="8G", nogui=True, load_saved_data=False, plot=False) return results
def generate_lems_file_for_neuroml(sim_id, neuroml_file, target, duration, dt, lems_file_name, target_dir, nml_doc = None, # Use this if the nml doc has already been loaded (to avoid delay in reload) include_extra_files = [], gen_plots_for_all_v = True, plot_all_segments = False, gen_plots_for_quantities = {}, # Dict with displays vs lists of quantity paths gen_plots_for_only_populations = [], # List of populations, all pops if = [] gen_saves_for_all_v = True, save_all_segments = False, gen_saves_for_only_populations = [], # List of populations, all pops if = [] gen_saves_for_quantities = {}, # Dict with file names vs lists of quantity paths gen_spike_saves_for_all_somas = False, gen_spike_saves_for_only_populations = [], # List of populations, all pops if = [] gen_spike_saves_for_cells = {}, # Dict with file names vs lists of quantity paths spike_time_format='ID_TIME', copy_neuroml = True, report_file_name = None, lems_file_generate_seed=None, verbose=False, simulation_seed=12345): my_random = random.Random() if lems_file_generate_seed: my_random.seed(lems_file_generate_seed) # To ensure same LEMS file (e.g. colours of plots) are generated every time for the same input else: my_random.seed(12345) # To ensure same LEMS file (e.g. colours of plots) are generated every time for the same input file_name_full = '%s/%s'%(target_dir,lems_file_name) print_comment_v('Creating LEMS file at: %s for NeuroML 2 file: %s (copy: %s)'%(file_name_full,neuroml_file,copy_neuroml)) ls = LEMSSimulation(sim_id, duration, dt, target,simulation_seed=simulation_seed) if nml_doc == None: nml_doc = read_neuroml2_file(neuroml_file, include_includes=True, verbose=verbose) nml_doc_inc_not_included = read_neuroml2_file(neuroml_file, include_includes=False, verbose=False) else: nml_doc_inc_not_included = nml_doc ls.set_report_file(report_file_name) quantities_saved = [] for f in include_extra_files: ls.include_neuroml2_file(f, include_included=False) if not copy_neuroml: rel_nml_file = os.path.relpath(os.path.abspath(neuroml_file), os.path.abspath(target_dir)) print_comment_v("Including existing NeuroML file (%s) as: %s"%(neuroml_file, rel_nml_file)) ls.include_neuroml2_file(rel_nml_file, include_included=True, relative_to_dir=os.path.abspath(target_dir)) else: print_comment_v("Copying a NeuroML file (%s) to: %s (abs path: %s)"%(neuroml_file, target_dir, os.path.abspath(target_dir))) if not os.path.isdir(target_dir): raise Exception("Target directory %s does not exist!"%target_dir) if os.path.realpath(os.path.dirname(neuroml_file))!=os.path.realpath(target_dir): shutil.copy(neuroml_file, target_dir) else: print_comment_v("No need, same file...") neuroml_file_name = os.path.basename(neuroml_file) ls.include_neuroml2_file(neuroml_file_name, include_included=False) nml_dir = os.path.dirname(neuroml_file) if len(os.path.dirname(neuroml_file))>0 else '.' for include in nml_doc_inc_not_included.includes: if nml_dir=='.' and os.path.isfile(include.href): incl_curr = include.href else: incl_curr = '%s/%s'%(nml_dir,include.href) if os.path.isfile(include.href): incl_curr = include.href print_comment_v(' - Including %s (located at %s; nml dir: %s), copying to %s'%(include.href, incl_curr, nml_dir, target_dir)) ''' if not os.path.isfile("%s/%s"%(target_dir, os.path.basename(incl_curr))) and \ not os.path.isfile("%s/%s"%(target_dir, incl_curr)) and \ not os.path.isfile(incl_curr): shutil.copy(incl_curr, target_dir) else: print_comment_v("No need to copy...")''' f1 = "%s/%s"%(target_dir, os.path.basename(incl_curr)) f2 = "%s/%s"%(target_dir, incl_curr) if os.path.isfile(f1): print_comment_v("No need to copy, file exists: %s..."%f1) elif os.path.isfile(f2): print_comment_v("No need to copy, file exists: %s..."%f2) else: shutil.copy(incl_curr, target_dir) ls.include_neuroml2_file(include.href, include_included=False) sub_doc = read_neuroml2_file(incl_curr) sub_dir = os.path.dirname(incl_curr) if len(os.path.dirname(incl_curr))>0 else '.' for include in sub_doc.includes: incl_curr = '%s/%s'%(sub_dir,include.href) print_comment_v(' -- Including %s located at %s'%(include.href, incl_curr)) if not os.path.isfile("%s/%s"%(target_dir, os.path.basename(incl_curr))) and \ not os.path.isfile("%s/%s"%(target_dir, incl_curr)): shutil.copy(incl_curr, target_dir) ls.include_neuroml2_file(include.href, include_included=False) if gen_plots_for_all_v \ or gen_saves_for_all_v \ or len(gen_plots_for_only_populations)>0 \ or len(gen_saves_for_only_populations)>0 \ or gen_spike_saves_for_all_somas \ or len(gen_spike_saves_for_only_populations)>0 : for network in nml_doc.networks: for population in network.populations: variable = "v" # quantity_template_e = "%s[%i]" component = population.component size = population.size cell = None segment_ids = [] for c in nml_doc.spike_generator_poissons: if c.id == component: variable = "tsince" for c in nml_doc.SpikeSourcePoisson: if c.id == component: variable = "tsince" quantity_template = "%s[%i]/"+variable if plot_all_segments or gen_spike_saves_for_all_somas: for c in nml_doc.cells: if c.id == component: cell = c for segment in cell.morphology.segments: segment_ids.append(segment.id) segment_ids.sort() if population.type and population.type == 'populationList': quantity_template = "%s/%i/"+component+"/"+variable quantity_template_e = "%s/%i/"+component+"" # Multicompartmental cell ### Needs to be supported in NeuronWriter ###if len(segment_ids)>1: ### quantity_template_e = "%s/%i/"+component+"/0" size = len(population.instances) if gen_plots_for_all_v or population.id in gen_plots_for_only_populations: print_comment('Generating %i plots for %s in population %s'%(size, component, population.id)) disp0 = 'DispPop__%s'%population.id ls.create_display(disp0, "Membrane potentials of cells in %s"%population.id, "-90", "50") for i in range(size): if cell!=None and plot_all_segments: quantity_template_seg = "%s/%i/"+component+"/%i/v" for segment_id in segment_ids: quantity = quantity_template_seg%(population.id, i, segment_id) ls.add_line_to_display(disp0, "%s[%i] seg %i: v"%(population.id, i, segment_id), quantity, "1mV", get_next_hex_color(my_random)) else: quantity = quantity_template%(population.id, i) ls.add_line_to_display(disp0, "%s[%i]: v"%(population.id, i), quantity, "1mV", get_next_hex_color(my_random)) if gen_saves_for_all_v or population.id in gen_saves_for_only_populations: print_comment('Saving %i values of %s for %s in population %s'%(size, variable, component, population.id)) of0 = 'Volts_file__%s'%population.id ls.create_output_file(of0, "%s.%s.%s.dat"%(sim_id,population.id,variable)) for i in range(size): if cell!=None and save_all_segments: quantity_template_seg = "%s/%i/"+component+"/%i/v" for segment_id in segment_ids: quantity = quantity_template_seg%(population.id, i, segment_id) ls.add_column_to_output_file(of0, 'v_%s'%safe_variable(quantity), quantity) quantities_saved.append(quantity) else: quantity = quantity_template%(population.id, i) ls.add_column_to_output_file(of0, 'v_%s'%safe_variable(quantity), quantity) quantities_saved.append(quantity) if gen_spike_saves_for_all_somas or population.id in gen_spike_saves_for_only_populations: print_comment('Saving spikes in %i somas for %s in population %s'%(size, component, population.id)) eof0 = 'Spikes_file__%s'%population.id ls.create_event_output_file(eof0, "%s.%s.spikes"%(sim_id,population.id), format=spike_time_format) for i in range(size): quantity = quantity_template_e%(population.id, i) ls.add_selection_to_event_output_file(eof0, i, quantity, "spike") quantities_saved.append(quantity) for display in gen_plots_for_quantities.keys(): quantities = gen_plots_for_quantities[display] max_ = "1" min_ = "-1" scale = "1" # Check for v ... if quantities and len(quantities)>0 and quantities[0].endswith('/v'): max_ = "40" min_ = "-80" scale = "1mV" ls.create_display(display, "Plots of %s"%display, min_, max_) for q in quantities: ls.add_line_to_display(display, safe_variable(q), q, scale, get_next_hex_color(my_random)) for file_name in gen_saves_for_quantities.keys(): quantities = gen_saves_for_quantities[file_name] of_id = safe_variable(file_name) ls.create_output_file(of_id, file_name) for q in quantities: ls.add_column_to_output_file(of_id, safe_variable(q), q) quantities_saved.append(q) for file_name in gen_spike_saves_for_cells.keys(): cells = gen_spike_saves_for_cells[file_name] of_id = safe_variable(file_name) ls.create_event_output_file(of_id, file_name) for i, c in enumerate(cells): ls.add_selection_to_event_output_file(of_id, i, c, "spike") quantities_saved.append(c) ls.save_to_file(file_name=file_name_full) return quantities_saved, ls