def run_one_optimisation(ref, seed, population_size, max_evaluations, num_selected, num_offspring, mutation_rate, num_elites, simulator, nogui): run_optimisation(prefix=ref, neuroml_file='models/RS/SSTest.net.nml', target='network_RS', parameters=parameters, max_constraints=max_constraints, min_constraints=min_constraints, weights=weights, target_data=target_data, sim_time=sim_time, dt=0.025, seed=seed, population_size=population_size, max_evaluations=max_evaluations, num_selected=num_selected, num_offspring=num_offspring, mutation_rate=mutation_rate, num_elites=num_elites, simulator=simulator, nogui=nogui)
def run_one_optimisation(ref, seed, population_size, max_evaluations, num_selected, num_offspring, mutation_rate, num_elites, simulator, nogui, parameters, max_constraints, min_constraints, neuroml_file = 'prototypes/RS/AllenTest.net.nml', target = 'network_RS', weights = weights, target_data = target_data, dt = 0.025): ref = '%s__s%s_p%s_m%s_s%s_o%s_m%s_e%s'%(ref, seed, population_size, max_evaluations, num_selected, num_offspring, mutation_rate, num_elites) return run_optimisation(prefix = ref, neuroml_file = neuroml_file, target = target, parameters = parameters, max_constraints = max_constraints, min_constraints = min_constraints, weights = weights, target_data = target_data, sim_time = 1500, dt = dt, seed = seed, population_size = population_size, max_evaluations = max_evaluations, num_selected = num_selected, num_offspring = num_offspring, mutation_rate = mutation_rate, num_elites = num_elites, simulator = simulator, nogui = nogui)
def run_one_optimisation(ref, seed, population_size, max_evaluations, num_selected, num_offspring, mutation_rate, num_elites, simulator, nogui, parameters, max_constraints, min_constraints, neuroml_file='prototypes/RS/AllenTest.net.nml', target='network_RS', weights=weights, target_data=target_data, dt=0.025): ref = '%s__s%s_p%s_m%s_s%s_o%s_m%s_e%s' % ( ref, seed, population_size, max_evaluations, num_selected, num_offspring, mutation_rate, num_elites) return run_optimisation(prefix=ref, neuroml_file=neuroml_file, target=target, parameters=parameters, max_constraints=max_constraints, min_constraints=min_constraints, weights=weights, target_data=target_data, sim_time=1500, dt=dt, seed=seed, population_size=population_size, max_evaluations=max_evaluations, num_selected=num_selected, num_offspring=num_offspring, mutation_rate=mutation_rate, num_elites=num_elites, simulator=simulator, nogui=nogui)
target_data = { max_peak_no: 34, average_maximum: 30.72, average_minimum: -75 } simulator = 'jNeuroML_NEURON' simulator = 'jNeuroML' run_optimisation(prefix='TestHHpy', neuroml_file='test_data/HHCellNetwork.net.nml', target='HHCellNetwork', parameters=parameters, max_constraints=max_constraints, min_constraints=min_constraints, weights=weights, target_data=target_data, sim_time=700, population_size=30, max_evaluations=100, num_selected=10, num_offspring=10, mutation_rate=0.15, num_elites=1, seed=12345, simulator=simulator, nogui=nogui, known_target_values=known_target_values, num_parallel_evaluations=1)
average_maximum: 22.50, average_minimum: -62.82} simulator = 'jNeuroML_NEURON' run_optimisation(prefix = 'TestPyr', neuroml_file = neuroml_file, target = 'network', parameters = parameters, max_constraints = max_constraints, min_constraints = min_constraints, weights = weights, target_data = target_data, sim_time = sim_time, population_size = 20, max_evaluations = 100, num_selected = 10, num_offspring = 10, mutation_rate = 0.5, num_elites = 3, seed = 12345, simulator = simulator, nogui = nogui, known_target_values = known_target_values) else: simulator = 'jNeuroML_NEURON'
target_data = {mean_spike_frequency: 32.49, average_maximum: 22.50, average_minimum: -62.82} simulator = "jNeuroML_NEURON" run_optimisation( prefix="TestPyr", neuroml_file=neuroml_file, target="network", parameters=parameters, max_constraints=max_constraints, min_constraints=min_constraints, weights=weights, target_data=target_data, sim_time=sim_time, population_size=20, max_evaluations=100, num_selected=10, num_offspring=10, mutation_rate=0.5, num_elites=3, seed=12345, simulator=simulator, nogui=nogui, known_target_values=known_target_values, ) else: simulator = "jNeuroML_NEURON"
target_data = {max_peak_no: 34, average_maximum: 30.72, average_minimum: -75} simulator = 'jNeuroML_NEURON' simulator = 'jNeuroML' run_optimisation(prefix = 'TestHHpy', neuroml_file = 'test_data/HHCellNetwork.net.nml', target = 'HHCellNetwork', parameters = parameters, max_constraints = max_constraints, min_constraints = min_constraints, weights = weights, target_data = target_data, sim_time = 700, population_size = 20, max_evaluations = 60, num_selected = 10, num_offspring = 10, mutation_rate = 0.5, num_elites = 3, seed = 12345, simulator = simulator, nogui = nogui, known_target_values = known_target_values, num_parallel_evaluations = 10)
'cell:Granule_98/channelDensity:GranPassiveCond_all/mS_per_cm2': 0.0330033, 'cell:Granule_98/specificCapacitance:all/uF_per_cm2': 1 } report = run_optimisation(prefix='Pas_' + prefix, neuroml_file=neuroml_file, target=target, parameters=parameters, max_constraints=max_constraints_pas, min_constraints=min_constraints_pas, weights=weights_pas, target_data=target_data_pas, sim_time=sim_time, dt=dt, population_size=population_size, max_evaluations=max_evaluations, num_selected=num_selected, num_offspring=num_offspring, mutation_rate=mutation_rate, num_elites=num_elites, simulator=simulator, nogui=nogui, show_plot_already=False, seed=seed, known_target_values=known_target_values) plot_baseline_data() eps = 0.01
'cell:Granule_98/channelDensity:Gran_KDr_98_all/mS_per_cm2':8.89691, 'cell:Granule_98/channelDensity:Gran_H_98_all/mS_per_cm2': 0.03090506, 'cell:Granule_98/channelDensity:GranPassiveCond_all/mS_per_cm2': 0.0330033, 'cell:Granule_98/specificCapacitance:all/uF_per_cm2': 1} report = run_optimisation(prefix = 'Pas_'+prefix, neuroml_file = neuroml_file, target = target, parameters = parameters, max_constraints = max_constraints_pas, min_constraints = min_constraints_pas, weights = weights_pas, target_data = target_data_pas, sim_time = sim_time, dt = dt, population_size = population_size, max_evaluations = max_evaluations, num_selected = num_selected, num_offspring = num_offspring, mutation_rate = mutation_rate, num_elites = num_elites, simulator = simulator, nogui = nogui, show_plot_already = False, seed = seed, known_target_values = known_target_values) plot_baseline_data() eps = 0.01
def tune_izh_model(acq_list: List, metrics_from_data: Dict, currents: Dict) -> Dict: """Tune networks model against the data. Here we generate a network with the necessary number of Izhikevich cells, one for each current stimulus, and tune them against the experimental data. :param acq_list: list of indices of acquisitions/sweeps to tune against :type acq_list: list :param metrics_from_data: dictionary with the sweep number as index, and the dictionary containing metrics generated from the analysis :type metrics_from_data: dict :param currents: dictionary with sweep number as index and stimulus current value """ # length of simulation of the cells---should match the length of the # experiment sim_time = 1500.0 # Create a NeuroML template network simulation file that we will use for # the tuning template_doc = NeuroMLDocument(id="IzhTuneNet") # Add an Izhikevich cell with some parameters to the document template_doc.izhikevich2007_cells.append( Izhikevich2007Cell( id="Izh2007", C="100pF", v0="-60mV", k="0.7nS_per_mV", vr="-60mV", vt="-40mV", vpeak="35mV", a="0.03per_ms", b="-2nS", c="-50.0mV", d="100pA", )) template_doc.networks.append(Network(id="Network0")) # Add a cell for each acquisition list popsize = len(acq_list) template_doc.networks[0].populations.append( Population(id="Pop0", component="Izh2007", size=popsize)) # Add a current source for each cell, matching the currents that # were used in the experimental study. counter = 0 for acq in acq_list: template_doc.pulse_generators.append( PulseGenerator( id="Stim{}".format(counter), delay="80ms", duration="1000ms", amplitude="{}pA".format(currents[acq]), )) template_doc.networks[0].explicit_inputs.append( ExplicitInput(target="Pop0[{}]".format(counter), input="Stim{}".format(counter))) counter = counter + 1 # Print a summary print(template_doc.summary()) # Write to a neuroml file and validate it. reference = "TuneIzhFergusonPyr3" template_filename = "{}.net.nml".format(reference) write_neuroml2_file(template_doc, template_filename, validate=True) # Now for the tuning bits # format is type:id/variable:id/units # supported types: cell/channel/izhikevich2007cell # supported variables: # - channel: vShift # - cell: channelDensity, vShift_channelDensity, channelDensityNernst, # erev_id, erev_ion, specificCapacitance, resistivity # - izhikevich2007Cell: all available attributes # we want to tune these parameters within these ranges # param: (min, max) parameters = { "izhikevich2007Cell:Izh2007/C/pF": (100, 300), "izhikevich2007Cell:Izh2007/k/nS_per_mV": (0.01, 2), "izhikevich2007Cell:Izh2007/vr/mV": (-70, -50), "izhikevich2007Cell:Izh2007/vt/mV": (-60, 0), "izhikevich2007Cell:Izh2007/vpeak/mV": (35, 70), "izhikevich2007Cell:Izh2007/a/per_ms": (0.001, 0.4), "izhikevich2007Cell:Izh2007/b/nS": (-10, 10), "izhikevich2007Cell:Izh2007/c/mV": (-65, -10), "izhikevich2007Cell:Izh2007/d/pA": (50, 500), } # type: Dict[str, Tuple[float, float]] # Set up our target data and so on ctr = 0 target_data = {} weights = {} for acq in acq_list: # data to fit to: # format: path/to/variable:metric # metric from pyelectro, for example: # https://pyelectro.readthedocs.io/en/latest/pyelectro.html?highlight=mean_spike_frequency#pyelectro.analysis.mean_spike_frequency mean_spike_frequency = "Pop0[{}]/v:mean_spike_frequency".format(ctr) average_last_1percent = "Pop0[{}]/v:average_last_1percent".format(ctr) first_spike_time = "Pop0[{}]/v:first_spike_time".format(ctr) # each metric can have an associated weight weights[mean_spike_frequency] = 1 weights[average_last_1percent] = 1 weights[first_spike_time] = 1 # value of the target data from our data set target_data[mean_spike_frequency] = metrics_from_data[acq][ "{}:mean_spike_frequency".format(acq)] target_data[average_last_1percent] = metrics_from_data[acq][ "{}:average_last_1percent".format(acq)] target_data[first_spike_time] = metrics_from_data[acq][ "{}:first_spike_time".format(acq)] # only add these if the experimental data includes them # these are only generated for traces with spikes if "{}:average_maximum".format(acq) in metrics_from_data[acq]: average_maximum = "Pop0[{}]/v:average_maximum".format(ctr) weights[average_maximum] = 1 target_data[average_maximum] = metrics_from_data[acq][ "{}:average_maximum".format(acq)] if "{}:average_minimum".format(acq) in metrics_from_data[acq]: average_minimum = "Pop0[{}]/v:average_minimum".format(ctr) weights[average_minimum] = 1 target_data[average_minimum] = metrics_from_data[acq][ "{}:average_minimum".format(acq)] ctr = ctr + 1 # simulator to use simulator = "jNeuroML" return run_optimisation( # Prefix for new files prefix="TuneIzh", # Name of the NeuroML template file neuroml_file=template_filename, # Name of the network target="Network0", # Parameters to be fitted parameters=list(parameters.keys()), # Our max and min constraints min_constraints=[v[0] for v in parameters.values()], max_constraints=[v[1] for v in parameters.values()], # Weights we set for parameters weights=weights, # The experimental metrics to fit to target_data=target_data, # Simulation time sim_time=sim_time, # EC parameters population_size=100, max_evaluations=500, num_selected=30, num_offspring=50, mutation_rate=0.9, num_elites=3, # Seed value seed=12345, # Simulator simulator=simulator, dt=0.025, show_plot_already='-nogui' not in sys.argv, save_to_file="fitted_izhikevich_fitness.png", save_to_file_scatter="fitted_izhikevich_scatter.png", save_to_file_hist="fitted_izhikevich_hist.png", save_to_file_output="fitted_izhikevich_output.png", num_parallel_evaluations=4, )