pre_cell="../%s/%i/%s"%(from_pop,0,pop.component), \ pre_segment=pre_seg_id, \ pre_fraction_along=random.random(), post_cell="../%s[%i]"%(to_pop,0), \ post_segment=post_seg_id, post_fraction_along=random.random(), pre_component=sil_syn.id, \ post_component=grad_syn.id, \ weight=5) continuous_projection_iw.continuous_connection_instance_ws.append( continuous_connection_iw) nml_file = 'test_files/complete.nml' writers.NeuroMLWriter.write(nml_doc, nml_file) summary0 = nml_doc.summary() print("Created:\n" + summary0) print("Written network file to: " + nml_file) ###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 validate_neuroml2(nml_file) nml_h5_file = 'test_files/complete.nml.h5' writers.NeuroMLHdf5Writer.write(nml_doc, nml_h5_file) print("Written H5 network file to: " + nml_h5_file)
def _generate_neuron_files_from_neuroml(network, verbose=False, dir_for_mod_files=None): """ Generate NEURON hoc/mod files from the NeuroML files which are marked as included in the NeuroMLlite description; also compiles the mod files """ print_v( "------------- Generating NEURON files from NeuroML for %s (default dir: %s)..." % (network.id, dir_for_mod_files)) nml_src_files = [] from neuroml import NeuroMLDocument import neuroml.writers as writers temp_nml_doc = NeuroMLDocument(id="temp") dirs_for_mod_files = [] if dir_for_mod_files != None: dirs_for_mod_files.append(os.path.abspath(dir_for_mod_files)) for c in network.cells: if c.neuroml2_source_file: nml_src_files.append(c.neuroml2_source_file) dir_for_mod = os.path.dirname( os.path.abspath(c.neuroml2_source_file)) if not dir_for_mod in dirs_for_mod_files: dirs_for_mod_files.append(dir_for_mod) for s in network.synapses: if s.neuroml2_source_file: nml_src_files.append(s.neuroml2_source_file) dir_for_mod = os.path.dirname( os.path.abspath(s.neuroml2_source_file)) if not dir_for_mod in dirs_for_mod_files: dirs_for_mod_files.append(dir_for_mod) for i in network.input_sources: if i.neuroml2_source_file: nml_src_files.append(i.neuroml2_source_file) dir_for_mod = os.path.dirname( os.path.abspath(i.neuroml2_source_file)) if not dir_for_mod in dirs_for_mod_files: dirs_for_mod_files.append(dir_for_mod) temp_nml_doc = _extract_pynn_components_to_neuroml(network) summary = temp_nml_doc.summary() if 'IF_' in summary: import tempfile temp_nml_file = tempfile.NamedTemporaryFile(delete=False, suffix='.nml', dir=dir_for_mod_files) print_v("Writing temporary NML file to: %s, summary: " % temp_nml_file.name) print_v(summary) writers.NeuroMLWriter.write(temp_nml_doc, temp_nml_file.name) nml_src_files.append(temp_nml_file.name) for f in nml_src_files: from pyneuroml import pynml print_v("Generating/compiling hoc/mod files for: %s" % f) pynml.run_lems_with_jneuroml_neuron(f, nogui=True, only_generate_scripts=True, compile_mods=True, verbose=False) for dir_for_mod_files in dirs_for_mod_files: if not dir_for_mod_files in locations_mods_loaded_from: print_v( "Generated NEURON code; loading mechanisms from %s (cwd: %s; already loaded: %s)" % (dir_for_mod_files, os.getcwd(), locations_mods_loaded_from)) try: from neuron import load_mechanisms if os.getcwd() == dir_for_mod_files: print_v( "That's current dir => importing neuron module loads mods here..." ) else: load_mechanisms(dir_for_mod_files) locations_mods_loaded_from.append(dir_for_mod_files) except: print_v("Failed to load mod file mechanisms...") else: print_v("Already loaded mechanisms from %s (all loaded: %s)" % (dir_for_mod_files, locations_mods_loaded_from))
input = Input(id=pre_index, target="../%s/%i/%s" % (from_pop, pre_index, pop.component), destination="synapses") input_list.input.append(input) nml_file = 'tmp/testh5.nml' writers.NeuroMLWriter.write(nml_doc, nml_file) print("Written network file to: " + nml_file) nml_h5_file = 'tmp/testh5.nml.h5' writers.NeuroMLHdf5Writer.write(nml_doc, nml_h5_file) print("Written H5 network file to: " + nml_h5_file) sum2 = nml_doc.summary() from neuroml.loaders import NeuroMLHdf5Loader nml_doc2 = NeuroMLHdf5Loader.load(nml_h5_file) sum1 = nml_doc2.summary() assert (sum1 == sum2) ###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 validate_neuroml2(nml_file)
nml_file = 'tmp/testh5.nml' writers.NeuroMLWriter.write(nml_doc, nml_file) print("Written network file to: "+nml_file) nml_h5_file = 'tmp/testh5.nml.h5' writers.NeuroMLHdf5Writer.write(nml_doc, nml_h5_file) print("Written H5 network file to: "+nml_h5_file) sum2 = nml_doc.summary() from neuroml.loaders import NeuroMLHdf5Loader nml_doc2 = NeuroMLHdf5Loader.load(nml_h5_file) sum1 = nml_doc2.summary() assert(sum1==sum2) ###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 validate_neuroml2(nml_file)
cell2 = create_object('TwoSeg', '1 .4 0', x=0, y=100, z=0) add_segment(cell1, (0, 0, 0, 10), (20, 0, 0, 10), name='soma') add_segment(cell2, (0, 0, 0, 10), (20, 0, 0, 10), name='soma') add_segment(cell2, (20, 0, 0, 5), (40, 0, 0, 5), name='soma') ''' dend_seg_num = 10 seg_length = 10 seg_diam = 2 for i in range(dend_seg_num): add_segment(cell1,(0,seg_length*i,0,seg_diam),(0,seg_length*(i+1),0,seg_diam),name='dend_%s'%i) ''' add_instance(cell1.id, 100, 0, 0) ''' add_instance(cell1.id, -10,100,0) add_instance(cell1.id, 10,0,100)''' nml_file = '%s.net.nml' % net.id writers.NeuroMLWriter.write(nml_doc, nml_file) print("Created:\n" + nml_doc.summary()) print("Written network file to: " + nml_file) ###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 validate_neuroml2(nml_file)
def tune_izh_model(acq_list: List, metrics_from_data: Dict, currents: Dict) -> Dict: """Tune networks model against the data. Here we generate a network with the necessary number of Izhikevich cells, one for each current stimulus, and tune them against the experimental data. :param acq_list: list of indices of acquisitions/sweeps to tune against :type acq_list: list :param metrics_from_data: dictionary with the sweep number as index, and the dictionary containing metrics generated from the analysis :type metrics_from_data: dict :param currents: dictionary with sweep number as index and stimulus current value """ # length of simulation of the cells---should match the length of the # experiment sim_time = 1500.0 # Create a NeuroML template network simulation file that we will use for # the tuning template_doc = NeuroMLDocument(id="IzhTuneNet") # Add an Izhikevich cell with some parameters to the document template_doc.izhikevich2007_cells.append( Izhikevich2007Cell( id="Izh2007", C="100pF", v0="-60mV", k="0.7nS_per_mV", vr="-60mV", vt="-40mV", vpeak="35mV", a="0.03per_ms", b="-2nS", c="-50.0mV", d="100pA", )) template_doc.networks.append(Network(id="Network0")) # Add a cell for each acquisition list popsize = len(acq_list) template_doc.networks[0].populations.append( Population(id="Pop0", component="Izh2007", size=popsize)) # Add a current source for each cell, matching the currents that # were used in the experimental study. counter = 0 for acq in acq_list: template_doc.pulse_generators.append( PulseGenerator( id="Stim{}".format(counter), delay="80ms", duration="1000ms", amplitude="{}pA".format(currents[acq]), )) template_doc.networks[0].explicit_inputs.append( ExplicitInput(target="Pop0[{}]".format(counter), input="Stim{}".format(counter))) counter = counter + 1 # Print a summary print(template_doc.summary()) # Write to a neuroml file and validate it. reference = "TuneIzhFergusonPyr3" template_filename = "{}.net.nml".format(reference) write_neuroml2_file(template_doc, template_filename, validate=True) # Now for the tuning bits # format is type:id/variable:id/units # supported types: cell/channel/izhikevich2007cell # supported variables: # - channel: vShift # - cell: channelDensity, vShift_channelDensity, channelDensityNernst, # erev_id, erev_ion, specificCapacitance, resistivity # - izhikevich2007Cell: all available attributes # we want to tune these parameters within these ranges # param: (min, max) parameters = { "izhikevich2007Cell:Izh2007/C/pF": (100, 300), "izhikevich2007Cell:Izh2007/k/nS_per_mV": (0.01, 2), "izhikevich2007Cell:Izh2007/vr/mV": (-70, -50), "izhikevich2007Cell:Izh2007/vt/mV": (-60, 0), "izhikevich2007Cell:Izh2007/vpeak/mV": (35, 70), "izhikevich2007Cell:Izh2007/a/per_ms": (0.001, 0.4), "izhikevich2007Cell:Izh2007/b/nS": (-10, 10), "izhikevich2007Cell:Izh2007/c/mV": (-65, -10), "izhikevich2007Cell:Izh2007/d/pA": (50, 500), } # type: Dict[str, Tuple[float, float]] # Set up our target data and so on ctr = 0 target_data = {} weights = {} for acq in acq_list: # data to fit to: # format: path/to/variable:metric # metric from pyelectro, for example: # https://pyelectro.readthedocs.io/en/latest/pyelectro.html?highlight=mean_spike_frequency#pyelectro.analysis.mean_spike_frequency mean_spike_frequency = "Pop0[{}]/v:mean_spike_frequency".format(ctr) average_last_1percent = "Pop0[{}]/v:average_last_1percent".format(ctr) first_spike_time = "Pop0[{}]/v:first_spike_time".format(ctr) # each metric can have an associated weight weights[mean_spike_frequency] = 1 weights[average_last_1percent] = 1 weights[first_spike_time] = 1 # value of the target data from our data set target_data[mean_spike_frequency] = metrics_from_data[acq][ "{}:mean_spike_frequency".format(acq)] target_data[average_last_1percent] = metrics_from_data[acq][ "{}:average_last_1percent".format(acq)] target_data[first_spike_time] = metrics_from_data[acq][ "{}:first_spike_time".format(acq)] # only add these if the experimental data includes them # these are only generated for traces with spikes if "{}:average_maximum".format(acq) in metrics_from_data[acq]: average_maximum = "Pop0[{}]/v:average_maximum".format(ctr) weights[average_maximum] = 1 target_data[average_maximum] = metrics_from_data[acq][ "{}:average_maximum".format(acq)] if "{}:average_minimum".format(acq) in metrics_from_data[acq]: average_minimum = "Pop0[{}]/v:average_minimum".format(ctr) weights[average_minimum] = 1 target_data[average_minimum] = metrics_from_data[acq][ "{}:average_minimum".format(acq)] ctr = ctr + 1 # simulator to use simulator = "jNeuroML" return run_optimisation( # Prefix for new files prefix="TuneIzh", # Name of the NeuroML template file neuroml_file=template_filename, # Name of the network target="Network0", # Parameters to be fitted parameters=list(parameters.keys()), # Our max and min constraints min_constraints=[v[0] for v in parameters.values()], max_constraints=[v[1] for v in parameters.values()], # Weights we set for parameters weights=weights, # The experimental metrics to fit to target_data=target_data, # Simulation time sim_time=sim_time, # EC parameters population_size=100, max_evaluations=500, num_selected=30, num_offspring=50, mutation_rate=0.9, num_elites=3, # Seed value seed=12345, # Simulator simulator=simulator, dt=0.025, show_plot_already='-nogui' not in sys.argv, save_to_file="fitted_izhikevich_fitness.png", save_to_file_scatter="fitted_izhikevich_scatter.png", save_to_file_hist="fitted_izhikevich_hist.png", save_to_file_output="fitted_izhikevich_output.png", num_parallel_evaluations=4, )
def run_fitted_cell_simulation(sweeps_to_tune_against: List, tuning_report: Dict, simulation_id: str) -> None: """Run a simulation with the values obtained from the fitting :param tuning_report: tuning report from the optimser :type tuning_report: Dict :param simulation_id: text id of simulation :type simulation_id: str """ # get the fittest variables fittest_vars = tuning_report["fittest vars"] C = str(fittest_vars["izhikevich2007Cell:Izh2007/C/pF"]) + "pF" k = str( fittest_vars["izhikevich2007Cell:Izh2007/k/nS_per_mV"]) + "nS_per_mV" vr = str(fittest_vars["izhikevich2007Cell:Izh2007/vr/mV"]) + "mV" vt = str(fittest_vars["izhikevich2007Cell:Izh2007/vt/mV"]) + "mV" vpeak = str(fittest_vars["izhikevich2007Cell:Izh2007/vpeak/mV"]) + "mV" a = str(fittest_vars["izhikevich2007Cell:Izh2007/a/per_ms"]) + "per_ms" b = str(fittest_vars["izhikevich2007Cell:Izh2007/b/nS"]) + "nS" c = str(fittest_vars["izhikevich2007Cell:Izh2007/c/mV"]) + "mV" d = str(fittest_vars["izhikevich2007Cell:Izh2007/d/pA"]) + "pA" # Create a simulation using our obtained parameters. # Note that the tuner generates a graph with the fitted values already, but # we want to keep a copy of our fitted cell also, so we'll create a NeuroML # Document ourselves also. sim_time = 1500.0 simulation_doc = NeuroMLDocument(id="FittedNet") # Add an Izhikevich cell with some parameters to the document simulation_doc.izhikevich2007_cells.append( Izhikevich2007Cell( id="Izh2007", C=C, v0="-60mV", k=k, vr=vr, vt=vt, vpeak=vpeak, a=a, b=b, c=c, d=d, )) simulation_doc.networks.append(Network(id="Network0")) # Add a cell for each acquisition list popsize = len(sweeps_to_tune_against) simulation_doc.networks[0].populations.append( Population(id="Pop0", component="Izh2007", size=popsize)) # Add a current source for each cell, matching the currents that # were used in the experimental study. counter = 0 for acq in sweeps_to_tune_against: simulation_doc.pulse_generators.append( PulseGenerator( id="Stim{}".format(counter), delay="80ms", duration="1000ms", amplitude="{}pA".format(currents[acq]), )) simulation_doc.networks[0].explicit_inputs.append( ExplicitInput(target="Pop0[{}]".format(counter), input="Stim{}".format(counter))) counter = counter + 1 # Print a summary print(simulation_doc.summary()) # Write to a neuroml file and validate it. reference = "FittedIzhFergusonPyr3" simulation_filename = "{}.net.nml".format(reference) write_neuroml2_file(simulation_doc, simulation_filename, validate=True) simulation = LEMSSimulation( sim_id=simulation_id, duration=sim_time, dt=0.1, target="Network0", simulation_seed=54321, ) simulation.include_neuroml2_file(simulation_filename) simulation.create_output_file("output0", "{}.v.dat".format(simulation_id)) counter = 0 for acq in sweeps_to_tune_against: simulation.add_column_to_output_file("output0", "Pop0[{}]".format(counter), "Pop0[{}]/v".format(counter)) counter = counter + 1 simulation_file = simulation.save_to_file() # simulate run_lems_with_jneuroml(simulation_file, max_memory="2G", nogui=True, plot=False)