def export_to_neuroml1(hoc_file, nml1_file_name, level=1, validate=True): if not (level == 1 or level == 2): print_comment_v("Only options for Levels in NeuroMLv1.8.1 are 1 or 2") return None from neuron import * from nrn import * h.load_file(hoc_file) print_comment_v("Loaded NEURON file: %s" % hoc_file) h.load_file("mview.hoc") h('objref mv') h('mv = new ModelView()') h.load_file("%s/mview_neuroml1.hoc" % (os.path.dirname(__file__))) h('objref mvnml1') h('mvnml1 = new ModelViewNeuroML1(mv)') h.mvnml1.exportNeuroML(nml1_file_name, level) if validate: validate_neuroml1(nml1_file_name)
def process_channel_file(channel_file,a): ## Get name of channel mechanism to test if a.v: print_comment_v("Going to test channel from file: "+ channel_file) if not os.path.isfile(channel_file): raise IOError("File could not be found: %s!\n" % channel_file) channels = get_channels_from_channel_file(channel_file) channels_info = [] for channel in channels: if len(get_channel_gates(channel)) == 0: print_comment_v("Skipping %s in %s as it has no channels (probably passive conductance)"%(channel.id,channel_file)) else: new_lems_file = make_lems_file(channel,a) if not a.norun: results = run_lems_file(new_lems_file,a) if a.iv_curve: iv_data = compute_iv_curve(channel,a,results) else: iv_data = None if not a.nogui and not a.norun: plot_channel(channel,a,results,iv_data=iv_data) channel_info = {key:getattr(channel,key) for key in ['id','file','notes', 'species']} channel_info['expression'] = get_conductance_expression(channel) channel_info['ion_color'] = get_ion_color(channel.species) channels_info.append(channel_info) return channels_info
def export_to_neuroml1(hoc_file, nml1_file_name, level=1, validate=True): if not (level == 1 or level == 2): print_comment_v("Only options for Levels in NeuroMLv1.8.1 are 1 or 2") return None from neuron import * from nrn import * h.load_file(hoc_file) print_comment_v("Loaded NEURON file: %s" % hoc_file) h.load_file("mview.hoc") h("objref mv") h("mv = new ModelView()") h.load_file("%s/mview_neuroml1.hoc" % (os.path.dirname(__file__))) h("objref mvnml1") h("mvnml1 = new ModelViewNeuroML1(mv)") h.mvnml1.exportNeuroML(nml1_file_name, level) if validate: validate_neuroml1(nml1_file_name)
def make_lems_file(channel, a): gates = get_channel_gates(channel) lems_content = generate_lems_channel_analyser(channel.file, channel.id, a.min_v, a.step_target_voltage, a.max_v, a.clamp_delay, a.clamp_duration, a.clamp_base_voltage, a.duration, a.erev, gates, a.temperature, a.ca_conc, a.iv_curve, scale_dt=a.scale_dt, dat_suffix=a.dat_suffix, verbose=a.v) new_lems_file = os.path.join(OUTPUT_DIR, "LEMS_Test_%s.xml" % channel.id) lf = open(new_lems_file, 'w') lf.write(lems_content) lf.close() if a.v: print_comment_v("Written generated LEMS file to %s\n" % new_lems_file) return new_lems_file
def make_lems_file(channel, a): gates = get_channel_gates(channel) lems_content = generate_lems_channel_analyser( channel.file, channel.id, a.min_v, a.step_target_voltage, a.max_v, a.clamp_delay, a.clamp_duration, a.clamp_base_voltage, a.duration, a.erev, gates, a.temperature, a.ca_conc, a.iv_curve, a.dat_suffix, ) new_lems_file = os.path.join(OUTPUT_DIR, "LEMS_Test_%s.xml" % channel.id) lf = open(new_lems_file, "w") lf.write(lems_content) lf.close() if a.v: print_comment_v("Written generated LEMS file to %s\n" % new_lems_file) return new_lems_file
def merge_with_template(info): templfile = "TEMPLATE.channel.nml" if not os.path.isfile(templfile): templfile = os.path.join(os.path.dirname(sys.argv[0]), templfile) print_comment_v("Merging with template %s" % templfile) with open(templfile) as f: templ = airspeed.Template(f.read()) return templ.merge(info)
def make_html_file(info): merged = merge_with_template(info, HTML_TEMPLATE_FILE) html_dir = make_overview_dir() new_html_file = os.path.join(html_dir,'ChannelInfo.html') lf = open(new_html_file, 'w') lf.write(merged) lf.close() print_comment_v('Written HTML info to: %s' % new_html_file)
def make_md_file(info): merged = merge_with_template(info, MD_TEMPLATE_FILE) md_dir = make_overview_dir() new_md_file = os.path.join(md_dir,'README.md') lf = open(new_md_file, 'w') lf.write(merged) lf.close() print_comment_v('Written Markdown info to: %s' % new_md_file)
def make_html_file(info): merged = merge_with_template(info, HTML_TEMPLATE_FILE) html_dir = make_overview_dir() new_html_file = os.path.join(html_dir, 'ChannelInfo.html') lf = open(new_html_file, 'w') lf.write(merged) lf.close() print_comment_v('Written HTML info to: %s' % new_html_file)
def make_md_file(info): merged = merge_with_template(info, MD_TEMPLATE_FILE) md_dir = make_overview_dir() new_md_file = os.path.join(md_dir, 'README.md') lf = open(new_md_file, 'w') lf.write(merged) lf.close() print_comment_v('Written Markdown info to: %s' % new_md_file)
def generate_lems_channel_analyser(channel_file, channel, min_target_voltage, step_target_voltage, max_target_voltage, clamp_delay, clamp_duration, clamp_base_voltage, duration, erev, gates, temperature, ca_conc, iv_curve, dat_suffix=''): print_comment_v("Generating LEMS file to investigate %s in %s, %smV->%smV, %sdegC"%(channel, \ channel_file, min_target_voltage, max_target_voltage, temperature)) target_voltages = [] v = min_target_voltage while v <= max_target_voltage: target_voltages.append(v) v += step_target_voltage target_voltages_map = [] for t in target_voltages: fract = float(target_voltages.index(t)) / (len(target_voltages) - 1) info = {} info["v"] = t info["v_str"] = str(t).replace("-", "min") info["col"] = get_colour_hex(fract) target_voltages_map.append(info) model = { "channel_file": channel_file, "channel": channel, "target_voltages": target_voltages_map, "clamp_delay": clamp_delay, "clamp_duration": clamp_duration, "clamp_base_voltage": clamp_base_voltage, "min_target_voltage": min_target_voltage, "max_target_voltage": max_target_voltage, "duration": duration, "erev": erev, "gates": gates, "temperature": temperature, "ca_conc": ca_conc, "iv_curve": iv_curve, "dat_suffix": dat_suffix } #pp.pprint(model) merged = merge_with_template(model, TEMPLATE_FILE) return merged
def include_neuroml2_file(self, nml2_file_name, include_included=True, relative_to_dir='.'): full_path = os.path.abspath(relative_to_dir+'/'+nml2_file_name) base_path = os.path.dirname(full_path) print_comment_v("Including in generated LEMS file: %s (%s)"%(nml2_file_name, full_path)) self.lems_info['include_files'].append(nml2_file_name) if include_included: cell = read_neuroml2_file(full_path) for include in cell.includes: self.include_neuroml2_file(include.href, include_included=True, relative_to_dir=base_path)
def merge_with_template(info): templfile = "TEMPLATE.channel.nml" if not os.path.isfile(templfile): templfile = os.path.join(os.path.dirname(sys.argv[0]), templfile) print_comment_v("Merging with template %s"%templfile) with open(templfile) as f: templ = airspeed.Template(f.read()) return templ.merge(info)
def check_brackets(line, bracket_depth): if len(line) > 0: bracket_depth0 = bracket_depth for c in line: if c == '{': bracket_depth += 1 elif c == '}': bracket_depth -= 1 if bracket_depth0 != bracket_depth: print_comment_v(" <%s> moved bracket %i -> %i" % (line, bracket_depth0, bracket_depth)) return bracket_depth
def check_brackets(line, bracket_depth): if len(line)>0: bracket_depth0 = bracket_depth for c in line: if c=='{': bracket_depth+=1 elif c=='}': bracket_depth-=1 if bracket_depth0 !=bracket_depth: print_comment_v(" <%s> moved bracket %i -> %i"%(line, bracket_depth0,bracket_depth)) return bracket_depth
def generate_lems_channel_analyser(channel_file, channel, min_target_voltage, step_target_voltage, max_target_voltage, clamp_delay, clamp_duration, clamp_base_voltage, duration, erev, gates, temperature, ca_conc, iv_curve, dat_suffix=''): print_comment_v("Generating LEMS file to investigate %s in %s, %smV->%smV, %sdegC"%(channel, \ channel_file, min_target_voltage, max_target_voltage, temperature)) target_voltages = [] v = min_target_voltage while v <= max_target_voltage: target_voltages.append(v) v+=step_target_voltage target_voltages_map = [] for t in target_voltages: fract = float(target_voltages.index(t)) / (len(target_voltages)-1) info = {} info["v"] = t info["v_str"] = str(t).replace("-", "min") info["col"] = get_colour_hex(fract) target_voltages_map.append(info) includes = get_includes_from_channel_file(channel_file) includes_relative = [] base_path = os.path.dirname(channel_file) for inc in includes: includes_relative.append(os.path.abspath(base_path+'/'+inc)) model = {"channel_file": channel_file, "includes": includes_relative, "channel": channel, "target_voltages" : target_voltages_map, "clamp_delay": clamp_delay, "clamp_duration": clamp_duration, "clamp_base_voltage": clamp_base_voltage, "min_target_voltage": min_target_voltage, "max_target_voltage": max_target_voltage, "duration": duration, "erev": erev, "gates": gates, "temperature": temperature, "ca_conc": ca_conc, "iv_curve": iv_curve, "dat_suffix": dat_suffix} #pp.pprint(model) merged = merge_with_template(model, TEMPLATE_FILE) return merged
def include_neuroml2_file(self, nml2_file_name, include_included=True, relative_to_dir='.'): full_path = os.path.abspath(relative_to_dir + '/' + nml2_file_name) base_path = os.path.dirname(full_path) print_comment_v("Including in generated LEMS file: %s (%s)" % (nml2_file_name, full_path)) self.lems_info['include_files'].append(nml2_file_name) if include_included: cell = read_neuroml2_file(full_path) for include in cell.includes: self.include_neuroml2_file(include.href, include_included=True, relative_to_dir=base_path)
def run(a=None, **kwargs): a = build_namespace(a, **kwargs) #if (not a.nogui) or a.html: # print('mpl') info = { 'info': ("Channel information at: " "T = %s degC, " "E_rev = %s mV, " "[Ca2+] = %s mM") % (a.temperature, a.erev, a.ca_conc), 'channels': [] } na_chan_files = [] k_chan_files = [] ca_chan_files = [] other_chan_files = [] if len(a.channel_files) > 0: for channel_file in a.channel_files: channels = get_channels_from_channel_file(channel_file) #TODO look past 1st channel... if channels[0].species == 'na': na_chan_files.append(channel_file) elif channels[0].species == 'k': k_chan_files.append(channel_file) elif channels[0].species == 'ca': ca_chan_files.append(channel_file) else: other_chan_files.append(channel_file) channel_files = na_chan_files + k_chan_files + ca_chan_files + other_chan_files print_comment_v("\nAnalysing channels from files: %s\n" % channel_files) for channel_file in channel_files: channels_info = process_channel_file(channel_file, a) for channel_info in channels_info: info['channels'].append(channel_info) if not a.nogui and not a.html and not a.md: plt.show() else: if a.html: make_html_file(info) if a.md: make_md_file(info)
def convert_to_swc(nml_file_name): global line_count global line_index_vs_distals global line_index_vs_proximals # Reset line_count = 1 line_index_vs_distals = {} line_index_vs_proximals = {} base_dir = os.path.dirname(os.path.realpath(nml_file_name)) nml_doc = pynml.read_neuroml2_file(nml_file_name, include_includes=True, verbose=False, optimized=True) lines = [] for cell in nml_doc.cells: swc_file_name = '%s/%s.swc' % (base_dir, cell.id) swc_file = open(swc_file_name, 'w') print_comment_v("Converting cell %s as found in NeuroML doc %s to SWC..." % (cell.id, nml_file_name)) lines_sg, seg_ids = _get_lines_for_seg_group(cell, 'soma_group', 1) soma_seg_count = len(seg_ids) lines += lines_sg lines_sg, seg_ids = _get_lines_for_seg_group(cell, 'dendrite_group', 3) dend_seg_count = len(seg_ids) lines += lines_sg lines_sg, seg_ids = _get_lines_for_seg_group(cell, 'axon_group', 2) axon_seg_count = len(seg_ids) lines += lines_sg if not len(cell.morphology.segments) == soma_seg_count + dend_seg_count + axon_seg_count: raise Exception("The numbers of the segments in groups: soma_group+dendrite_group+axon_group (%i), is not the same as total number of segments (%s)! All bets are off!" % (soma_seg_count + dend_seg_count + axon_seg_count, len(cell.morphology.segments))) for i in range(len(lines)): l = lines[i] swc_line = '%s' % (l) print(swc_line) swc_file.write('%s\n' % swc_line) swc_file.close() print("Written to %s" % swc_file_name)
def run(a=None,**kwargs): a = build_namespace(a,**kwargs) #if (not a.nogui) or a.html: # print('mpl') info = {'info': ("Channel information at: " "T = %s degC, " "E_rev = %s mV, " "[Ca2+] = %s mM") % (a.temperature, a.erev, a.ca_conc), 'channels': []} na_chan_files = [] k_chan_files = [] ca_chan_files = [] other_chan_files = [] if len(a.channel_files) > 0: for channel_file in a.channel_files: channels = get_channels_from_channel_file(channel_file) #TODO look past 1st channel... if channels[0].species == 'na': na_chan_files.append(channel_file) elif channels[0].species == 'k': k_chan_files.append(channel_file) elif channels[0].species == 'ca': ca_chan_files.append(channel_file) else: other_chan_files.append(channel_file) channel_files = na_chan_files + k_chan_files + ca_chan_files + other_chan_files print_comment_v("\nAnalysing channels from files: %s\n"%channel_files) for channel_file in channel_files: channels_info = process_channel_file(channel_file,a) for channel_info in channels_info: info['channels'].append(channel_info) if not a.nogui and not a.html and not a.md: plt.show() else: if a.html: make_html_file(info) if a.md: make_md_file(info)
def run(self,candidates,parameters): """ Run simulation for each candidate This run method will loop through each candidate and run the simulation corresponding to it's parameter values. It will populate an array called traces with the resulting voltage traces for the simulation and return it. """ traces = [] for candidate in candidates: sim_var = dict(zip(parameters,candidate)) print_comment_v('\n\n - RUN %i; variables: %s\n'%(self.count,sim_var)) self.count+=1 t,v = self.run_individual(sim_var) traces.append([t,v]) return traces
def run(self, candidates, parameters): """ Run simulation for each candidate This run method will loop through each candidate and run the simulation corresponding to it's parameter values. It will populate an array called traces with the resulting voltage traces for the simulation and return it. """ traces = [] for candidate in candidates: sim_var = dict(zip(parameters, candidate)) print_comment_v('\n\n - RUN %i; variables: %s\n' % (self.count, sim_var)) self.count += 1 t, v = self.run_individual(sim_var) traces.append([t, v]) return traces
def export_to_neuroml2(hoc_or_python_file, nml2_file_name, includeBiophysicalProperties=True, separateCellFiles=False, known_rev_potentials={}, validate=True): from neuron import * from nrn import * if hoc_or_python_file is not None: if hoc_or_python_file.endswith(".py"): print_comment_v( "***************\nImporting Python scripts not yet implemented...\n***************" ) else: if not os.path.isfile(hoc_or_python_file): print_comment_v( "***************\nProblem importing file %s (%s)..\n***************" % (hoc_or_python_file, os.path.abspath(hoc_or_python_file))) h.load_file( 1, hoc_or_python_file ) # Using 1 to force loading of the file, in case file with same name was loaded before... else: print_comment_v( "hoc_or_python_file variable is None; exporting what's currently in memory..." ) for ion in known_rev_potentials.keys(): set_erev_for_mechanism(ion, known_rev_potentials[ion]) print_comment_v("Loaded NEURON file: %s" % hoc_or_python_file) h.load_file("mview.hoc") h('objref mv') h('mv = new ModelView(0)') h.load_file("%s/mview_neuroml2.hoc" % (os.path.dirname(__file__))) h('objref mvnml') h('mvnml = new ModelViewNeuroML2(mv)') nml2_level = 2 if includeBiophysicalProperties else 1 h.mvnml.exportNeuroML2(nml2_file_name, nml2_level, int(separateCellFiles)) if validate: validate_neuroml2(nml2_file_name) h('mv.destroy()')
def read_sonata_spikes_hdf5_file(file_name): full_path = os.path.abspath(file_name) pynml.print_comment_v("Loading SONATA spike times from: %s (%s)"%(file_name,full_path)) import tables # pytables for HDF5 support h5file=tables.open_file(file_name,mode='r') pynml.print_comment_v("Opened HDF5 file: %s; sorting=%s"%(h5file.filename,h5file.root.spikes._v_attrs.sorting)) gids = h5file.root.spikes.gids timestamps = h5file.root.spikes.timestamps ids_times = {} count=0 max_t = -1*sys.float_info.max min_t = sys.float_info.max for i in range(len(gids)): id = gids[i] t = timestamps[i] max_t = max(max_t,t) min_t = min(min_t,t) if not id in ids_times: ids_times[id] = [] ids_times[id].append(t) count+=1 ids = ids_times.keys() h5file.close() pynml.print_comment_v("Loaded %s spiketimes, ids (%s -> %s) times (%s -> %s)"%(count,min(ids), max(ids),min_t,max_t)) return ids_times
def read_sonata_spikes_hdf5_file(file_name): full_path = os.path.abspath(file_name) pynml.print_comment_v("Loading SONATA spike times from: %s (%s)" % (file_name, full_path)) import tables # pytables for HDF5 support h5file = tables.open_file(file_name, mode='r') pynml.print_comment_v( "Opened HDF5 file: %s; sorting=%s" % (h5file.filename, h5file.root.spikes._v_attrs.sorting)) gids = h5file.root.spikes.gids timestamps = h5file.root.spikes.timestamps ids_times = {} count = 0 max_t = -1 * sys.float_info.max min_t = sys.float_info.max for i in range(len(gids)): id = gids[i] t = timestamps[i] max_t = max(max_t, t) min_t = min(min_t, t) if not id in ids_times: ids_times[id] = [] ids_times[id].append(t) count += 1 ids = ids_times.keys() pynml.print_comment_v( "Loaded %s spiketimes, ids (%s -> %s) times (%s -> %s)" % (count, min(ids), max(ids), min_t, max_t)) return ids_times
def process_channel_file(channel_file, a): ## Get name of channel mechanism to test if a.v: print_comment_v("Going to test channel from file: " + channel_file) if not os.path.isfile(channel_file): raise IOError("File could not be found: %s!\n" % channel_file) channels = get_channels_from_channel_file(channel_file) channels_info = [] for channel in channels: if len(get_channel_gates(channel)) == 0: print_comment_v( "Skipping %s in %s as it has no channels (probably passive conductance)" % (channel.id, channel_file)) else: new_lems_file = make_lems_file(channel, a) if not a.norun: results = run_lems_file(new_lems_file, a.v) if a.iv_curve: iv_data = compute_iv_curve(channel, a, results) else: iv_data = None if not a.nogui and not a.norun: plot_channel(channel, a, results, iv_data=iv_data) channel_info = { key: getattr(channel, key) for key in ['id', 'file', 'notes', 'species'] } channel_info['expression'] = get_conductance_expression(channel) channel_info['ion_color'] = get_ion_color(channel.species) channels_info.append(channel_info) return channels_info
def go(self): lems_file_name = 'LEMS_%s.xml' % (self.reference) generate_lems_file_for_neuroml(self.reference, self.neuroml_file, self.target, self.sim_time, self.dt, lems_file_name=lems_file_name, target_dir=self.generate_dir, nml_doc=self.nml_doc) pynml.print_comment_v( "Running a simulation of %s ms with timestep %s ms: %s" % (self.sim_time, self.dt, lems_file_name)) self.already_run = True start = time.time() if self.simulator == 'jNeuroML': results = pynml.run_lems_with_jneuroml( lems_file_name, nogui=True, load_saved_data=True, plot=False, exec_in_dir=self.generate_dir, verbose=False, cleanup=self.cleanup) elif self.simulator == 'jNeuroML_NEURON': results = pynml.run_lems_with_jneuroml_neuron( lems_file_name, nogui=True, load_saved_data=True, plot=False, exec_in_dir=self.generate_dir, verbose=False, cleanup=self.cleanup) else: pynml.print_comment_v('Unsupported simulator: %s' % self.simulator) exit() secs = time.time() - start pynml.print_comment_v( "Ran simulation in %s in %f seconds (%f mins)\n\n" % (self.simulator, secs, secs / 60.0)) self.t = [t * 1000 for t in results['t']] self.volts = {} for key in results.keys(): if key != 't': self.volts[key] = [v * 1000 for v in results[key]]
def export_to_neuroml2(hoc_or_python_file, nml2_file_name, includeBiophysicalProperties=True, separateCellFiles=False, known_rev_potentials={}, validate=True): from neuron import * from nrn import * if hoc_or_python_file is not None: if hoc_or_python_file.endswith(".py"): print_comment_v("***************\nImporting Python scripts not yet implemented...\n***************") else: if not os.path.isfile(hoc_or_python_file): print_comment_v("***************\nProblem importing file %s (%s)..\n***************"%(hoc_or_python_file, os.path.abspath(hoc_or_python_file))) h.load_file(1, hoc_or_python_file) # Using 1 to force loading of the file, in case file with same name was loaded before... else: print_comment_v("hoc_or_python_file variable is None; exporting what's currently in memory...") for ion in known_rev_potentials.keys(): set_erev_for_mechanism(ion,known_rev_potentials[ion]) print_comment_v("Loaded NEURON file: %s"%hoc_or_python_file) h.load_file("mview.hoc") h('objref mv') h('mv = new ModelView(0)') h.load_file("%s/mview_neuroml2.hoc"%(os.path.dirname(__file__))) h('objref mvnml') h('mvnml = new ModelViewNeuroML2(mv)') nml2_level = 2 if includeBiophysicalProperties else 1 h.mvnml.exportNeuroML2(nml2_file_name, nml2_level, int(separateCellFiles)) if validate: validate_neuroml2(nml2_file_name) h('mv.destroy()')
def go(self): lems_file_name = 'LEMS_%s.xml'%(self.reference) generate_lems_file_for_neuroml(self.reference, self.neuroml_file, self.target, self.sim_time, self.dt, lems_file_name = lems_file_name, target_dir = self.generate_dir) pynml.print_comment_v("Running a simulation of %s ms with timestep %s ms: %s"%(self.sim_time, self.dt, lems_file_name)) self.already_run = True start = time.time() if self.simulator == 'jNeuroML': results = pynml.run_lems_with_jneuroml(lems_file_name, nogui=True, load_saved_data=True, plot=False, exec_in_dir = self.generate_dir, verbose=False) elif self.simulator == 'jNeuroML_NEURON': results = pynml.run_lems_with_jneuroml_neuron(lems_file_name, nogui=True, load_saved_data=True, plot=False, exec_in_dir = self.generate_dir, verbose=False) else: pynml.print_comment_v('Unsupported simulator: %s'%self.simulator) exit() secs = time.time()-start pynml.print_comment_v("Ran simulation in %s in %f seconds (%f mins)\n\n"%(self.simulator, secs, secs/60.0)) self.t = [t*1000 for t in results['t']] self.volts = {} for key in results.keys(): if key != 't': self.volts[key] = [v*1000 for v in results[key]]
def export_to_neuroml2( hoc_or_python_file, nml2_file_name, includeBiophysicalProperties=True, separateCellFiles=False, validate=True ): from neuron import * from nrn import * if hoc_or_python_file is not None: if hoc_or_python_file.endswith(".py"): print_comment_v("Importing Python scripts not yet implemented...") else: h.load_file( 1, hoc_or_python_file ) # Using 1 to force loading of the file, in case file with same name was loaded before... else: print_comment_v("hoc_or_python_file variable is None; exporting what's currently in memory...") print_comment_v("Loaded NEURON file: %s" % hoc_or_python_file) h.load_file("mview.hoc") h("objref mv") h("mv = new ModelView(0)") h.load_file("%s/mview_neuroml2.hoc" % (os.path.dirname(__file__))) h("objref mvnml") h("mvnml = new ModelViewNeuroML2(mv)") nml2_level = 2 if includeBiophysicalProperties else 1 h.mvnml.exportNeuroML2(nml2_file_name, nml2_level, int(separateCellFiles)) if validate: validate_neuroml2(nml2_file_name) h("mv.destroy()")
def go(self): pynml.print_comment_v( "Running a simulation of %s ms with timestep %s ms: %s" % (self.sim_time, self.dt, self.lems_file)) self.already_run = True print self.simulator start = time.time() if self.simulator == 'jNeuroML': results = pynml.run_lems_with_jneuroml( self.lems_file, #_name, nogui=True, load_saved_data=True, plot=False, exec_in_dir=self.generate_dir, verbose=False) elif self.simulator == 'jNeuroML_NEURON': results = pynml.run_lems_with_jneuroml_neuron( self.lems_file, nogui=True, load_saved_data=False, plot=False, exec_in_dir=self.generate_dir, verbose=False) else: pynml.print_comment_v('Unsupported simulator: %s' % self.simulator) exit() secs = time.time() - start pynml.print_comment_v( "Ran simulation in %s in %f seconds (%f mins)\n\n" % (self.simulator, secs, secs / 60.0))
def main(argv): args = process_args() #for v in range(int(args.minV),int(args.maxV)+5,5): print get_rainbow_color_for_volts(v, args) #exit() results = pynml.reload_saved_data(args.lems_file_name, plot=False) times = [t * 1000 for t in results['t']] dt = times[1] - times[0] #stepTime = (args.skip+1)*dt t = 0 times_used = [] frame_indices = [] to_skip = 0 index = 0 while t <= args.endTime: if to_skip == 0: times_used.append(t) frame_indices.append(index) to_skip = args.skip else: to_skip -= 1 index += 1 t = times[index] print_comment_v("There are %i time points total, max: %f ms, dt: %f ms" % (len(times), times[-1], dt)) print_comment_v("times_used: %s; frame_indices %s" % (times_used, frame_indices)) print_comment_v("All refs: %s" % results.keys()) volt_colors = {} for ref in results.keys(): if ref != 't': pathBits = ref.split('/') pop = pathBits[0] index = pathBits[1] seg = pathBits[3] ref2 = '%s_%s' % (pop, index) if seg == '0' or seg == 'v': volt_color = [] for i in frame_indices: v = results[ref][i] * 1000 colour = get_rainbow_color_for_volts( v, args) if args.rainbow else get_color_for_volts( v, args) volt_color.append(colour) volt_colors[ref2] = volt_color print_comment_v("All refs: %s" % volt_colors.keys()) print_comment_v("All volt_colors: %s" % volt_colors) t = args.startTime index = 0 #give the single frames an alphabetical order maxind = "00000" ind = "00000" bat_file_name = "%s_pov.bat" % (args.prefix) bat_file = open(bat_file_name, 'w') sh_file_name = "%s_pov.sh" % (args.prefix) sh_file = open(sh_file_name, 'w') for fi in frame_indices: t = times[fi] print_comment_v( "\n---- Exporting for time: %f, index %i frame index %i ----\n" % (t, index, fi)) if not args.singlecell: in_file_name = args.prefix + "_net.inc" in_file = open(in_file_name) out_file_name = args.prefix + "_net.inc" + str(index) out_file = open(out_file_name, 'w') print_comment_v("in_file_name %s; out_file_name: %s" % (in_file_name, out_file_name)) for line in in_file: if line.strip().startswith("//"): ref = line.strip()[2:] if ref in volt_colors.keys(): vs = volt_colors[ref] #print_comment_v(('-- %s: %s '%(ref,len(vs))) out_file.write(" %s // %s t= %s\n" % (vs[index], ref, t)) elif ref + ".0" in volt_colors.keys(): vs = volt_colors[ref + ".0"] out_file.write(" " + vs[index] + " //" + ref + " t= " + str(t) + "\n") else: out_file.write("// No ref there: " + ref + "\n") print_comment_v("Missing ref: " + ref) else: out_file.write(line) in_file.close() out_file.close() print_comment_v("Written file: %s for time: %f" % (out_file_name, t)) in_file = open(args.prefix + ".pov") out_file_name = "%s_T%i.pov" % (args.prefix, index) out_file = open(out_file_name, 'w') clock = args.rotations * (t - args.startTime) / (args.endTime - args.startTime) pre = '%s_net.inc' % args.prefix pre = pre.split('/')[-1] post = '%s_net.inc%i' % (args.prefix, index) post = post.split('/')[-1] print_comment_v("Swapping %s for %s" % (pre, post)) for line in in_file: if line.find(pre) >= 0: out_file.write(line.replace(pre, post)) else: out_file.write(line.replace("clock", str(clock))) print_comment_v("Written file: %s for time: %f" % (out_file_name, t)) in_file.close() out_file.close() toEx = os.path.realpath(out_file_name) bat_file.write( "C:\\Users\\Padraig\\AppData\\Local\\Programs\\POV-Ray\\v3.7\\bin\\pvengine.exe %s /nr /exit\n" % toEx) sh_file.write("povray %s %s\n" % (args.povrayOptions, toEx)) else: ind = maxind[0:len(maxind) - len(str(index))] #compute index indentation in_file = open(args.prefix + "_cells.inc") out_file_name = args.prefix + "_cells.inc" + ind + str(index) out_file = open(out_file_name, 'w') dummy_ref = 'CELL_GROUP_NAME_0' for line in in_file: if line.strip().startswith("//"): ref = line.strip()[2:] ref = ref.replace(dummy_ref, args.singlecell) if ref in volts.keys(): vs = volts[ref] out_file.write(" " + vs[index] + "\n//" + ref + " t= " + ind + str(t) + "\n") else: out_file.write("//No ref found: " + ref + ", was looking for " + dummy_ref + "\n") else: out_file.write(line) in_file.close() out_file.close() print_comment_v("Written file: %s for time: %f" % (out_file_name, t)) in_file = open(args.prefix + ".pov") out_file_name = "%s_T%s%i.pov" % (args.prefix, ind, index) out_file = open(out_file_name, 'w') for line in in_file: pre = '%s_cells.inc' % args.prefix post = '%s_cells.inc%s%i' % (args.prefix, ind, index) if line.find(pre) >= 0: out_file.write(line.replace(pre, post)) else: clock = args.rotations * (t - args.startTime) / ( args.endTime - args.startTime) out_file.write(line.replace("clock", str(clock))) print_comment_v("Written file: %s for time: %f" % (out_file_name, t)) in_file.close() out_file.close() toEx = os.path.realpath(out_file_name) bat_file.write( "C:\\Users\\Padraig\\AppData\\Local\\Programs\\POV-Ray\\v3.7\\bin\\pvengine.exe %s /nr /exit\n" % toEx) sh_file.write("povray %s %s\n" % (args.povrayOptions, toEx)) index = index + 1 print_comment_v("Done!: ") print_comment_v("\nTo generate images type:\n\n bash %s_pov.sh\n\n" % args.prefix)
def _get_lines_for_seg_group(cell, sg, type): global line_count global line_index_vs_distals global line_index_vs_proximals seg_ids = [] lines = [] ord_segs = cell.get_ordered_segments_in_groups([sg]) if sg in ord_segs: segs = ord_segs[sg] line_template = '%s %s %s %s %s %s %s %s' for segment in segs: seg_ids.append(segment.id) print_comment_v('Seg %s is one of %i in %s of %s' % (segment, len(segs), sg, cell.id)) id = int(segment.id) parent_seg_id = None if not segment.parent else segment.parent.segments parent_line = -1 #print parent_line #print parent_seg_id if parent_seg_id != None: fract = segment.parent.fraction_along if fract < 0.0001: fract = 0 if abs(fract-1) < 0.0001: fract = 1 if fract == 1: parent_line = line_index_vs_distals[parent_seg_id] elif segment.parent.fraction_along == 0: parent_line = line_index_vs_proximals[parent_seg_id] else: raise Exception("Can't handle case where a segment is not connected to the 0 or 1 point along the parent!\n" \ + "Segment %s is connected %s (%s) along parent %s" % (segment, segment.parent.fraction_along, fract, segment.parent)) if segment.proximal is not None: proximal = segment.proximal x = float(proximal.x) y = float(proximal.y) z = float(proximal.z) r = float(proximal.diameter) / 2.0 comment = ' # %s: %s (proximal)' % (segment, sg) comment = '' lines.append(line_template % (line_count, type, x, y, z, r, parent_line, comment)) line_index_vs_proximals[id] = line_count parent_line = line_count line_count += 1 distal = segment.distal x = float(distal.x) y = float(distal.y) z = float(distal.z) r = float(distal.diameter) / 2.0 comment = ' # %s: %s ' % (segment, sg) comment = '' lines.append(line_template % (line_count, type, x, y, z, r, parent_line, comment)) line_index_vs_distals[id] = line_count line_count += 1 return lines, seg_ids
def main (): args = process_args() xmlfile = args.neuroml_file pov_file_name = xmlfile.replace(".xml", ".pov").replace(".nml1", ".pov").replace(".nml.h5", ".pov").replace(".nml", ".pov") pov_file = open(pov_file_name, "w") header=''' /* POV-Ray file generated from NeuroML network */ #version 3.6; #include "colors.inc" background {rgbt %s} \n''' ### end of header pov_file.write(header%(args.background)) cells_file = pov_file net_file = pov_file splitOut = False cf = pov_file_name.replace(".pov", "_cells.inc") nf = pov_file_name.replace(".pov", "_net.inc") if args.split: splitOut = True cells_file = open(cf, "w") net_file = open(nf, "w") print_comment_v("Saving into %s and %s and %s"%(pov_file_name, cf, nf)) print_comment_v("Converting XML file: %s to %s"%(xmlfile, pov_file_name)) nml_doc = pynml.read_neuroml2_file(xmlfile, include_includes=True, verbose=args.v) cell_elements = [] cell_elements.extend(nml_doc.cells) cell_elements.extend(nml_doc.cell2_ca_poolses) minXc = 1e9 minYc = 1e9 minZc = 1e9 maxXc = -1e9 maxYc = -1e9 maxZc = -1e9 minX = 1e9 minY = 1e9 minZ = 1e9 maxX = -1e9 maxY = -1e9 maxZ = -1e9 declaredcells = {} print_comment_v("There are %i cells in the file"%len(cell_elements)) cell_id_vs_seg_id_vs_proximal = {} cell_id_vs_seg_id_vs_distal = {} cell_id_vs_cell = {} for cell in cell_elements: cellName = cell.id cell_id_vs_cell[cell.id] = cell print_comment_v("Handling cell: %s"%cellName) cell_id_vs_seg_id_vs_proximal[cell.id] = {} cell_id_vs_seg_id_vs_distal[cell.id] = {} declaredcell = "cell_"+cellName declaredcells[cellName] = declaredcell cells_file.write("#declare %s = \n"%declaredcell) cells_file.write("union {\n") prefix = "" segments = cell.morphology.segments distpoints = {} proxpoints = {} for segment in segments: id = int(segment.id) distal = segment.distal x = float(distal.x) y = float(distal.y) z = float(distal.z) r = max(float(distal.diameter)/2.0, args.mindiam) if x-r<minXc: minXc=x-r if y-r<minYc: minYc=y-r if z-r<minZc: minZc=z-r if x+r>maxXc: maxXc=x+r if y+r>maxYc: maxYc=y+r if z+r>maxZc: maxZc=z+r distalpoint = "<%f, %f, %f>, %f "%(x,y,z,r) distpoints[id] = distalpoint cell_id_vs_seg_id_vs_distal[cell.id][id] = (x,y,z) proximalpoint = "" if segment.proximal is not None: proximal = segment.proximal proximalpoint = "<%f, %f, %f>, %f "%(float(proximal.x),float(proximal.y),float(proximal.z),max(float(proximal.diameter)/2.0, args.mindiam)) cell_id_vs_seg_id_vs_proximal[cell.id][id] = (float(proximal.x),float(proximal.y),float(proximal.z)) else: parent = int(segment.parent.segments) proximalpoint = distpoints[parent] cell_id_vs_seg_id_vs_proximal[cell.id][id] = cell_id_vs_seg_id_vs_distal[cell.id][parent] proxpoints[id] = proximalpoint shape = "cone" if proximalpoint == distalpoint: shape = "sphere" proximalpoint = "" if ( shape == "cone" and (proximalpoint.split('>')[0] == distalpoint.split('>')[0])): comment = "Ignoring zero length segment (id = %i): %s -> %s\n"%(id, proximalpoint, distalpoint) print_comment_v(comment) cells_file.write(" // "+comment) else: cells_file.write(" %s {\n"%shape) cells_file.write(" %s\n"%distalpoint) if len(proximalpoint): cells_file.write(" %s\n"%proximalpoint) cells_file.write(" //%s_%s.%s\n"%('CELL_GROUP_NAME','0', id)) cells_file.write(" }\n") cells_file.write(" pigment { color rgb <%f,%f,%f> }\n"%(random.random(),random.random(),random.random())) cells_file.write("}\n\n") if splitOut: pov_file.write("#include \""+cf+"\"\n\n") pov_file.write("#include \""+nf+"\"\n\n") pov_file.write('''\n/*\n Defining a dummy cell to use when cell in population is not found in NeuroML file...\n*/\n#declare %s = union { sphere { <0.000000, 0.000000, 0.000000>, 5.000000 } pigment { color rgb <1,0,0> } }\n'''%_DUMMY_CELL) pov_file.write('''\n/*\n Defining the spheres to use for end points of connections...\n*/\n#declare conn_start_point = union { sphere { <0.000000, 0.000000, 0.000000>, 3.000000 } pigment { color rgb <0,1,0> } }\n\n#declare conn_end_point = union { sphere { <0.000000, 0.000000, 0.000000>, 3.000000 } pigment { color rgb <1,0,0> } }\n''') positions = {} popElements = nml_doc.networks[0].populations pop_id_vs_cell = {} print_comment_v("There are %i populations in the file"%len(popElements)) for pop in popElements: name = pop.id celltype = pop.component instances = pop.instances if pop.component in cell_id_vs_cell.keys(): #if cell_id_vs_cell.has_key(pop.component): pop_id_vs_cell[pop.id] = cell_id_vs_cell[pop.component] info = "Population: %s has %i positioned cells of type: %s"%(name,len(instances),celltype) print_comment_v(info) colour = "1" for prop in pop.properties: if prop.tag == 'color': colour = prop.value colour = colour.replace(" ", ",") #print "Colour determined to be: "+colour net_file.write("\n\n/* "+info+" */\n\n") pop_positions = {} if not celltype in declaredcells: cell_definition = _DUMMY_CELL minXc = 0 minYc = 0 minZc = 0 maxXc = 0 maxYc = 0 maxZc = 0 else: cell_definition = declaredcells[celltype] for instance in instances: location = instance.location id = int(instance.id) net_file.write("object {\n") net_file.write(" %s\n"%cell_definition) x = float(location.x) y = float(location.y) z = float(location.z) pop_positions[id] = (x,y,z) if x+minXc<minX: minX=x+minXc if y+minYc<minY: minY=y+minYc if z+minZc<minZ: minZ=z+minZc if x+maxXc>maxX: maxX=x+maxXc if y+maxYc>maxY: maxY=y+maxYc if z+maxZc>maxZ: maxZ=z+maxZc net_file.write(" translate <%s, %s, %s>\n"%(x,y,z)) if colour == '1': colour = "%f,%f,%f"%(random.random(),random.random(),random.random()) if colour is not None: net_file.write(" pigment { color rgb <%s> }"%(colour)) net_file.write("\n //%s_%s\n"%(name, id)) net_file.write("}\n") positions[name] = pop_positions if len(instances) == 0 and int(pop.size>0): info = "Population: %s has %i unpositioned cells of type: %s"%(name,pop.size,celltype) print_comment_v(info) colour = "1" ''' if pop.annotation: print dir(pop.annotation) print pop.annotation.anytypeobjs_ print pop.annotation.member_data_items_[0].name print dir(pop.annotation.member_data_items_[0]) for prop in pop.annotation.anytypeobjs_: print prop if len(prop.getElementsByTagName('meta:tag'))>0 and prop.getElementsByTagName('meta:tag')[0].childNodes[0].data == 'color': #print prop.getElementsByTagName('meta:tag')[0].childNodes colour = prop.getElementsByTagName('meta:value')[0].childNodes[0].data colour = colour.replace(" ", ",") elif prop.hasAttribute('tag') and prop.getAttribute('tag') == 'color': colour = prop.getAttribute('value') colour = colour.replace(" ", ",") print "Colour determined to be: "+colour ''' net_file.write("\n\n/* "+info+" */\n\n") net_file.write("object {\n") net_file.write(" %s\n"%cell_definition) x = 0 y = 0 z = 0 if x+minXc<minX: minX=x+minXc if y+minYc<minY: minY=y+minYc if z+minZc<minZ: minZ=z+minZc if x+maxXc>maxX: maxX=x+maxXc if y+maxYc>maxY: maxY=y+maxYc if z+maxZc>maxZ: maxZ=z+maxZc net_file.write(" translate <%s, %s, %s>\n"%(x,y,z)) if colour == '1': colour = "%f,%f,%f"%(random.random(),random.random(),random.random()) if colour is not None: net_file.write(" pigment { color rgb <%s> }"%(colour)) net_file.write("\n //%s_%s\n"%(name, id)) net_file.write("}\n") #print positions if args.conns or args.conn_points: # Note: segment specific connections not implemented yet... i.e. connections from dends to axons... #print_comment_v("************************\n*\n* Note: connection lines in 3D do not yet target dendritic locations!\n*\n************************") for projection in nml_doc.networks[0].projections: pre = projection.presynaptic_population post = projection.postsynaptic_population connections = projection.connections + projection.connection_wds print_comment_v("Adding %i connections %s -> %s "%(len(connections),pre,post)) #print cell_id_vs_seg_id_vs_distal #print cell_id_vs_seg_id_vs_proximal for connection in connections: pre_cell_id = connection.get_pre_cell_id() post_cell_id = connection.get_post_cell_id() pre_loc = (0,0,0) if pre in positions.keys():# positions.has_key(pre): if len(positions[pre])>0: pre_loc = positions[pre][pre_cell_id] post_loc = (0,0,0) if post in positions.keys(): #positions.has_key(post): post_loc = positions[post][post_cell_id] #if pop_id_vs_cell.has_key(projection.presynaptic_population): if projection.presynaptic_population in pop_id_vs_cell.keys(): pre_cell = pop_id_vs_cell[projection.presynaptic_population] d = cell_id_vs_seg_id_vs_distal[pre_cell.id][int(connection.pre_segment_id)] p = cell_id_vs_seg_id_vs_proximal[pre_cell.id][int(connection.pre_segment_id)] m = [ p[i]+float(connection.pre_fraction_along)*(d[i]-p[i]) for i in [0,1,2] ] print_comment("Pre point is %s, %s between %s and %s"%(m,connection.pre_fraction_along,p,d)) pre_loc = [ pre_loc[i]+m[i] for i in [0,1,2] ] if projection.postsynaptic_population in pop_id_vs_cell.keys(): #has_key(projection.postsynaptic_population): #if pop_id_vs_cell.has_key(projection.postsynaptic_population): post_cell = pop_id_vs_cell[projection.postsynaptic_population] d = cell_id_vs_seg_id_vs_distal[post_cell.id][int(connection.post_segment_id)] p = cell_id_vs_seg_id_vs_proximal[post_cell.id][int(connection.post_segment_id)] m = [ p[i]+float(connection.post_fraction_along)*(d[i]-p[i]) for i in [0,1,2] ] print_comment("Post point is %s, %s between %s and %s"%(m,connection.post_fraction_along,p,d)) post_loc = [ post_loc[i]+m[i] for i in [0,1,2] ] if post_loc != pre_loc: info = "// Connection from %s:%s %s -> %s:%s %s\n"%(pre, pre_cell_id, pre_loc, post, post_cell_id, post_loc) print_comment(info) net_file.write("// %s"%info) if args.conns: net_file.write("cylinder { <%s,%s,%s>, <%s,%s,%s>, .5 pigment{color Grey}}\n"%(pre_loc[0],pre_loc[1],pre_loc[2], post_loc[0],post_loc[1],post_loc[2])) if args.conn_points: net_file.write("object { conn_start_point translate <%s,%s,%s> }\n"%(pre_loc[0],pre_loc[1],pre_loc[2])) net_file.write("object { conn_end_point translate <%s,%s,%s> }\n"%(post_loc[0],post_loc[1],post_loc[2])) plane = ''' plane { y, vv(-1) pigment {checker color rgb 1.0, color rgb 0.8 scale 20} } ''' footer=''' #declare minX = %f; #declare minY = %f; #declare minZ = %f; #declare maxX = %f; #declare maxY = %f; #declare maxZ = %f; #macro uu(xx) 0.5 * (maxX *(1+xx) + minX*(1-xx)) #end #macro vv(xx) 0.5 * (maxY *(1+xx) + minY*(1-xx)) #end #macro ww(xx) 0.5 * (maxZ *(1+xx) + minZ*(1-xx)) #end light_source { <uu(5),uu(2),uu(5)> color rgb <1,1,1> } light_source { <uu(-5),uu(2),uu(-5)> color rgb <1,1,1> } light_source { <uu(5),uu(-2),uu(-5)> color rgb <1,1,1> } light_source { <uu(-5),uu(-2),uu(5)> color rgb <1,1,1> } // Trying to view box camera { location < uu(%s + %s * sin (clock * 2 * 3.141)) , vv(%s + %s * sin (clock * 2 * 3.141)) , ww(%s + %s * cos (clock * 2 * 3.141)) > look_at < uu(%s + 0) , vv(%s + 0.05+0.3*sin (clock * 2 * 3.141)) , ww(%s + 0)> } %s \n'''%(minX,minY,minZ,maxX,maxY,maxZ, args.posx, args.scalex, args.posy, args.scaley, args.posz, args.scalez, args.viewx, args.viewy, args.viewz, (plane if args.plane else "")) ### end of footer pov_file.write(footer) pov_file.close() if args.movie: ini_file_name = pov_file_name.replace(".pov", "_movie.ini") ini_movie = ''' Antialias=On +W800 +H600 Antialias_Threshold=0.3 Antialias_Depth=4 Input_File_Name=%s Initial_Frame=1 Final_Frame=%i Initial_Clock=0 Final_Clock=1 Cyclic_Animation=on Pause_when_Done=off ''' ini_file = open(ini_file_name, 'w') ini_file.write(ini_movie%(pov_file_name, args.frames)) ini_file.close() print_comment_v("Created file for generating %i movie frames at: %s. To run this type:\n\n povray %s\n"%(args.frames,ini_file_name,ini_file_name)) else: print_comment_v("Created file for generating image of network. To run this type:\n\n povray %s\n"%(pov_file_name)) print_comment_v("Or for higher resolution:\n\n povray Antialias=On Antialias_Depth=10 Antialias_Threshold=0.1 +W1200 +H900 %s\n"%(pov_file_name))
def generate_Vm_vs_time_plot(nml2_file, cell_id, inj_amp_nA = 80, delay_ms = 20, inj_dur_ms = 60, sim_dur_ms = 100, dt = 0.05, temperature = "32degC", spike_threshold_mV=0., plot_voltage_traces=False, show_plot_already=True, simulator="jNeuroML", include_included=True): # simulation parameters nogui = '-nogui' in sys.argv # Used to supress GUI in tests for Travis-CI ref = "Test" print_comment_v("Generating Vm(mV) vs Time(ms) plot for cell %s in %s using %s (Inj %snA / %sms dur after %sms delay)"% (cell_id, nml2_file, simulator, inj_amp_nA, inj_dur_ms, delay_ms)) sim_id = 'Vm_%s'%ref duration = sim_dur_ms ls = LEMSSimulation(sim_id, sim_dur_ms, dt) ls.include_neuroml2_file(nml2_file, include_included=include_included) ls.assign_simulation_target('network') nml_doc = nml.NeuroMLDocument(id=cell_id) nml_doc.includes.append(nml.IncludeType(href=nml2_file)) net = nml.Network(id="network") nml_doc.networks.append(net) input_id = ("input_%s"%str(inj_amp_nA).replace('.','_')) pg = nml.PulseGenerator(id=input_id, delay="%sms"%delay_ms, duration='%sms'%inj_dur_ms, amplitude='%spA'%inj_amp_nA) nml_doc.pulse_generators.append(pg) pop_id = 'hhpop' pop = nml.Population(id=pop_id, component='hhcell', size=1, type="populationList") inst = nml.Instance(id=0) pop.instances.append(inst) inst.location = nml.Location(x=0, y=0, z=0) net.populations.append(pop) # Add these to cells input_list = nml.InputList(id='il_%s'%input_id, component=pg.id, populations=pop_id) input = nml.Input(id='0', target='../hhpop/0/hhcell', destination="synapses") input_list.input.append(input) net.input_lists.append(input_list) sim_file_name = '%s.sim.nml'%sim_id pynml.write_neuroml2_file(nml_doc, sim_file_name) ls.include_neuroml2_file(sim_file_name) disp0 = 'Voltage_display' ls.create_display(disp0,"Voltages", "-90", "50") ls.add_line_to_display(disp0, "V", "hhpop/0/hhcell/v", scale='1mV') of0 = 'Volts_file' ls.create_output_file(of0, "%s.v.dat"%sim_id) ls.add_column_to_output_file(of0, "V", "hhpop/0/hhcell/v") lems_file_name = ls.save_to_file() if simulator == "jNeuroML": results = pynml.run_lems_with_jneuroml(lems_file_name, nogui=True, load_saved_data=True, plot=plot_voltage_traces, show_plot_already=False) elif simulator == "jNeuroML_NEURON": results = pynml.run_lems_with_jneuroml_neuron(lems_file_name, nogui=True, load_saved_data=True, plot=plot_voltage_traces, show_plot_already=False) if show_plot_already: from matplotlib import pyplot as plt plt.show() return of0
def run_individual(self, sim_var, show=False): """ Run an individual simulation. The candidate data has been flattened into the sim_var dict. The sim_var dict contains parameter:value key value pairs, which are applied to the model before it is simulated. """ nml_doc = read_neuroml2_file(self.neuroml_file, include_includes=True, verbose = True, already_included = []) for var_name in sim_var.keys(): words = var_name.split('/') type, id1 = words[0].split(':') if ':' in words[1]: variable, id2 = words[1].split(':') else: variable = words[1] id2 = None units = words[2] value = sim_var[var_name] print_comment_v(' Changing value of %s (%s) in %s (%s) to: %s %s'%(variable, id2, type, id1, value, units)) if type == 'cell': cell = None for c in nml_doc.cells: if c.id == id1: cell = c if variable == 'channelDensity': chanDens = None for cd in cell.biophysical_properties.membrane_properties.channel_densities: if cd.id == id2: chanDens = cd chanDens.cond_density = '%s %s'%(value, units) elif variable == 'erev_id': # change all values of erev in channelDensity elements with only this id chanDens = None for cd in cell.biophysical_properties.membrane_properties.channel_densities: if cd.id == id2: chanDens = cd chanDens.erev = '%s %s'%(value, units) elif variable == 'erev_ion': # change all values of erev in channelDensity elements with this ion chanDens = None for cd in cell.biophysical_properties.membrane_properties.channel_densities: if cd.ion == id2: chanDens = cd chanDens.erev = '%s %s'%(value, units) elif variable == 'specificCapacitance': specCap = None for sc in cell.biophysical_properties.membrane_properties.specific_capacitances: if (sc.segment_groups == None and id2 == 'all') or sc.segment_groups == id2 : specCap = sc specCap.value = '%s %s'%(value, units) else: print_comment_v('Unknown variable (%s) in variable expression: %s'%(variable, var_name)) exit() elif type == 'izhikevich2007Cell': izhcell = None for c in nml_doc.izhikevich2007_cells: if c.id == id1: izhcell = c izhcell.__setattr__(variable, '%s %s'%(value, units)) else: print_comment_v('Unknown type (%s) in variable expression: %s'%(type, var_name)) new_neuroml_file = '%s/%s'%(self.generate_dir,os.path.basename(self.neuroml_file)) if new_neuroml_file == self.neuroml_file: print_comment_v('Cannot use a directory for generating into (%s) which is the same location of the NeuroML file (%s)!'% \ (self.neuroml_file, self.generate_dir)) write_neuroml2_file(nml_doc, new_neuroml_file) sim = NeuroMLSimulation(self.ref, neuroml_file = new_neuroml_file, target = self.target, sim_time = self.sim_time, dt = self.dt, simulator = self.simulator, generate_dir = self.generate_dir) sim.go() if show: sim.show() return sim.t, sim.volts
def run_individual(self, sim_var, show=False): """ Run an individual simulation. The candidate data has been flattened into the sim_var dict. The sim_var dict contains parameter:value key value pairs, which are applied to the model before it is simulated. """ nml_doc = read_neuroml2_file(self.neuroml_file, include_includes=True, verbose=True, already_included=[]) for var_name in sim_var.keys(): words = var_name.split('/') type, id1 = words[0].split(':') if ':' in words[1]: variable, id2 = words[1].split(':') else: variable = words[1] id2 = None units = words[2] value = sim_var[var_name] print_comment_v( ' Changing value of %s (%s) in %s (%s) to: %s %s' % (variable, id2, type, id1, value, units)) if type == 'cell': cell = None for c in nml_doc.cells: if c.id == id1: cell = c if variable == 'channelDensity': chanDens = None for cd in cell.biophysical_properties.membrane_properties.channel_densities: if cd.id == id2: chanDens = cd chanDens.cond_density = '%s %s' % (value, units) elif variable == 'erev_id': # change all values of erev in channelDensity elements with only this id chanDens = None for cd in cell.biophysical_properties.membrane_properties.channel_densities: if cd.id == id2: chanDens = cd chanDens.erev = '%s %s' % (value, units) elif variable == 'erev_ion': # change all values of erev in channelDensity elements with this ion chanDens = None for cd in cell.biophysical_properties.membrane_properties.channel_densities: if cd.ion == id2: chanDens = cd chanDens.erev = '%s %s' % (value, units) elif variable == 'specificCapacitance': specCap = None for sc in cell.biophysical_properties.membrane_properties.specific_capacitances: if (sc.segment_groups == None and id2 == 'all') or sc.segment_groups == id2: specCap = sc specCap.value = '%s %s' % (value, units) else: print_comment_v( 'Unknown variable (%s) in variable expression: %s' % (variable, var_name)) exit() elif type == 'izhikevich2007Cell': izhcell = None for c in nml_doc.izhikevich2007_cells: if c.id == id1: izhcell = c izhcell.__setattr__(variable, '%s %s' % (value, units)) else: print_comment_v( 'Unknown type (%s) in variable expression: %s' % (type, var_name)) new_neuroml_file = '%s/%s' % (self.generate_dir, os.path.basename(self.neuroml_file)) if new_neuroml_file == self.neuroml_file: print_comment_v('Cannot use a directory for generating into (%s) which is the same location of the NeuroML file (%s)!'% \ (self.neuroml_file, self.generate_dir)) write_neuroml2_file(nml_doc, new_neuroml_file) sim = NeuroMLSimulation(self.ref, neuroml_file=new_neuroml_file, target=self.target, sim_time=self.sim_time, dt=self.dt, simulator=self.simulator, generate_dir=self.generate_dir) sim.go() if show: sim.show() return sim.t, sim.volts
def _get_lines_for_seg_group(cell, sg, type): global line_count global line_index_vs_distals global line_index_vs_proximals seg_ids = [] lines = [] ord_segs = cell.get_ordered_segments_in_groups([sg]) if sg in ord_segs: segs = ord_segs[sg] line_template = '%s %s %s %s %s %s %s %s' for segment in segs: seg_ids.append(segment.id) print_comment_v('Seg %s is one of %i in %s of %s' % (segment, len(segs), sg, cell.id)) id = int(segment.id) parent_seg_id = None if not segment.parent else segment.parent.segments parent_line = -1 # print parent_line # print parent_seg_id if parent_seg_id is not None: fract = segment.parent.fraction_along if fract < 0.0001: fract = 0 if abs(fract - 1) < 0.0001: fract = 1 if fract == 1: parent_line = line_index_vs_distals[parent_seg_id] elif segment.parent.fraction_along == 0: parent_line = line_index_vs_proximals[parent_seg_id] else: raise Exception("Can't handle case where a segment is not connected to the 0 or 1 point along the parent!\n" \ + "Segment %s is connected %s (%s) along parent %s" % (segment, segment.parent.fraction_along, fract, segment.parent)) if segment.proximal is not None: proximal = segment.proximal x = float(proximal.x) y = float(proximal.y) z = float(proximal.z) r = float(proximal.diameter) / 2.0 comment = ' # %s: %s (proximal)' % (segment, sg) comment = '' lines.append( line_template % (line_count, type, x, y, z, r, parent_line, comment)) line_index_vs_proximals[id] = line_count parent_line = line_count line_count += 1 distal = segment.distal x = float(distal.x) y = float(distal.y) z = float(distal.z) r = float(distal.diameter) / 2.0 comment = ' # %s: %s ' % (segment, sg) comment = '' lines.append(line_template % (line_count, type, x, y, z, r, parent_line, comment)) line_index_vs_distals[id] = line_count line_count += 1 return lines, seg_ids
def main(): args = process_args() xmlfile = args.neuroml_file pov_file_name = ( xmlfile.replace(".xml", ".pov").replace(".nml1", ".pov").replace(".nml.h5", ".pov").replace(".nml", ".pov") ) pov_file = open(pov_file_name, "w") header = """ /* POV-Ray file generated from NeuroML network */ #version 3.6; #include "colors.inc" background {rgbt %s} \n""" ### end of header pov_file.write(header % (args.background)) cells_file = pov_file net_file = pov_file splitOut = False cf = pov_file_name.replace(".pov", "_cells.inc") nf = pov_file_name.replace(".pov", "_net.inc") if args.split: splitOut = True cells_file = open(cf, "w") net_file = open(nf, "w") print_comment_v("Saving into %s and %s and %s" % (pov_file_name, cf, nf)) print_comment_v("Converting XML file: %s to %s" % (xmlfile, pov_file_name)) nml_doc = pynml.read_neuroml2_file(xmlfile, include_includes=True, verbose=args.v) cell_elements = [] cell_elements.extend(nml_doc.cells) cell_elements.extend(nml_doc.cell2_ca_poolses) minXc = 1e9 minYc = 1e9 minZc = 1e9 maxXc = -1e9 maxYc = -1e9 maxZc = -1e9 minX = 1e9 minY = 1e9 minZ = 1e9 maxX = -1e9 maxY = -1e9 maxZ = -1e9 declaredcells = {} print_comment_v("There are %i cells in the file" % len(cell_elements)) cell_id_vs_seg_id_vs_proximal = {} cell_id_vs_seg_id_vs_distal = {} cell_id_vs_cell = {} for cell in cell_elements: cellName = cell.id cell_id_vs_cell[cell.id] = cell print_comment_v("Handling cell: %s" % cellName) cell_id_vs_seg_id_vs_proximal[cell.id] = {} cell_id_vs_seg_id_vs_distal[cell.id] = {} declaredcell = "cell_" + cellName declaredcells[cellName] = declaredcell cells_file.write("#declare %s = \n" % declaredcell) cells_file.write("union {\n") prefix = "" segments = cell.morphology.segments distpoints = {} proxpoints = {} for segment in segments: id = int(segment.id) distal = segment.distal x = float(distal.x) y = float(distal.y) z = float(distal.z) r = max(float(distal.diameter) / 2.0, args.mindiam) if x - r < minXc: minXc = x - r if y - r < minYc: minYc = y - r if z - r < minZc: minZc = z - r if x + r > maxXc: maxXc = x + r if y + r > maxYc: maxYc = y + r if z + r > maxZc: maxZc = z + r distalpoint = "<%f, %f, %f>, %f " % (x, y, z, r) distpoints[id] = distalpoint cell_id_vs_seg_id_vs_distal[cell.id][id] = (x, y, z) proximalpoint = "" if segment.proximal is not None: proximal = segment.proximal proximalpoint = "<%f, %f, %f>, %f " % ( float(proximal.x), float(proximal.y), float(proximal.z), max(float(proximal.diameter) / 2.0, args.mindiam), ) cell_id_vs_seg_id_vs_proximal[cell.id][id] = (float(proximal.x), float(proximal.y), float(proximal.z)) else: parent = int(segment.parent.segments) proximalpoint = distpoints[parent] cell_id_vs_seg_id_vs_proximal[cell.id][id] = cell_id_vs_seg_id_vs_distal[cell.id][parent] proxpoints[id] = proximalpoint shape = "cone" if proximalpoint == distalpoint: shape = "sphere" proximalpoint = "" if shape == "cone" and (proximalpoint.split(">")[0] == distalpoint.split(">")[0]): comment = "Ignoring zero length segment (id = %i): %s -> %s\n" % (id, proximalpoint, distalpoint) print_comment_v(comment) cells_file.write(" // " + comment) else: cells_file.write(" %s {\n" % shape) cells_file.write(" %s\n" % distalpoint) if len(proximalpoint): cells_file.write(" %s\n" % proximalpoint) cells_file.write(" //%s_%s.%s\n" % ("CELL_GROUP_NAME", "0", id)) cells_file.write(" }\n") cells_file.write(" pigment { color rgb <%f,%f,%f> }\n" % (random.random(), random.random(), random.random())) cells_file.write("}\n\n") if splitOut: pov_file.write('#include "' + cf + '"\n\n') pov_file.write('#include "' + nf + '"\n\n') pov_file.write( """\n/*\n Defining a dummy cell to use when cell in population is not found in NeuroML file...\n*/\n#declare %s = union { sphere { <0.000000, 0.000000, 0.000000>, 5.000000 } pigment { color rgb <1,0,0> } }\n""" % _DUMMY_CELL ) pov_file.write( """\n/*\n Defining the spheres to use for end points of connections...\n*/\n#declare conn_start_point = union { sphere { <0.000000, 0.000000, 0.000000>, 3.000000 } pigment { color rgb <0,1,0> } }\n\n#declare conn_end_point = union { sphere { <0.000000, 0.000000, 0.000000>, 3.000000 } pigment { color rgb <1,0,0> } }\n""" ) positions = {} popElements = nml_doc.networks[0].populations pop_id_vs_cell = {} print_comment_v("There are %i populations in the file" % len(popElements)) for pop in popElements: name = pop.id celltype = pop.component instances = pop.instances if cell_id_vs_cell.has_key(pop.component): pop_id_vs_cell[pop.id] = cell_id_vs_cell[pop.component] info = "Population: %s has %i positioned cells of type: %s" % (name, len(instances), celltype) print_comment_v(info) colour = "1" for prop in pop.properties: if prop.tag == "color": colour = prop.value colour = colour.replace(" ", ",") # print "Colour determined to be: "+colour net_file.write("\n\n/* " + info + " */\n\n") pop_positions = {} if not celltype in declaredcells: cell_definition = _DUMMY_CELL minXc = 0 minYc = 0 minZc = 0 maxXc = 0 maxYc = 0 maxZc = 0 else: cell_definition = declaredcells[celltype] for instance in instances: location = instance.location id = int(instance.id) net_file.write("object {\n") net_file.write(" %s\n" % cell_definition) x = float(location.x) y = float(location.y) z = float(location.z) pop_positions[id] = (x, y, z) if x + minXc < minX: minX = x + minXc if y + minYc < minY: minY = y + minYc if z + minZc < minZ: minZ = z + minZc if x + maxXc > maxX: maxX = x + maxXc if y + maxYc > maxY: maxY = y + maxYc if z + maxZc > maxZ: maxZ = z + maxZc net_file.write(" translate <%s, %s, %s>\n" % (x, y, z)) if colour == "1": colour = "%f,%f,%f" % (random.random(), random.random(), random.random()) if colour is not None: net_file.write(" pigment { color rgb <%s> }" % (colour)) net_file.write("\n //%s_%s\n" % (name, id)) net_file.write("}\n") positions[name] = pop_positions if len(instances) == 0 and int(pop.size > 0): info = "Population: %s has %i unpositioned cells of type: %s" % (name, pop.size, celltype) print_comment_v(info) colour = "1" """ if pop.annotation: print dir(pop.annotation) print pop.annotation.anytypeobjs_ print pop.annotation.member_data_items_[0].name print dir(pop.annotation.member_data_items_[0]) for prop in pop.annotation.anytypeobjs_: print prop if len(prop.getElementsByTagName('meta:tag'))>0 and prop.getElementsByTagName('meta:tag')[0].childNodes[0].data == 'color': #print prop.getElementsByTagName('meta:tag')[0].childNodes colour = prop.getElementsByTagName('meta:value')[0].childNodes[0].data colour = colour.replace(" ", ",") elif prop.hasAttribute('tag') and prop.getAttribute('tag') == 'color': colour = prop.getAttribute('value') colour = colour.replace(" ", ",") print "Colour determined to be: "+colour """ net_file.write("\n\n/* " + info + " */\n\n") net_file.write("object {\n") net_file.write(" %s\n" % cell_definition) x = 0 y = 0 z = 0 if x + minXc < minX: minX = x + minXc if y + minYc < minY: minY = y + minYc if z + minZc < minZ: minZ = z + minZc if x + maxXc > maxX: maxX = x + maxXc if y + maxYc > maxY: maxY = y + maxYc if z + maxZc > maxZ: maxZ = z + maxZc net_file.write(" translate <%s, %s, %s>\n" % (x, y, z)) if colour == "1": colour = "%f,%f,%f" % (random.random(), random.random(), random.random()) if colour is not None: net_file.write(" pigment { color rgb <%s> }" % (colour)) net_file.write("\n //%s_%s\n" % (name, id)) net_file.write("}\n") # print positions if ( args.conns or args.conn_points ): # Note: segment specific connections not implemented yet... i.e. connections from dends to axons... # print_comment_v("************************\n*\n* Note: connection lines in 3D do not yet target dendritic locations!\n*\n************************") for projection in nml_doc.networks[0].projections: pre = projection.presynaptic_population post = projection.postsynaptic_population connections = projection.connections + projection.connection_wds print_comment_v("Adding %i connections %s -> %s " % (len(connections), pre, post)) # print cell_id_vs_seg_id_vs_distal # print cell_id_vs_seg_id_vs_proximal for connection in connections: pre_cell_id = connection.get_pre_cell_id() post_cell_id = connection.get_post_cell_id() pre_loc = (0, 0, 0) if positions.has_key(pre): if len(positions[pre]) > 0: pre_loc = positions[pre][pre_cell_id] post_loc = (0, 0, 0) if positions.has_key(post): post_loc = positions[post][post_cell_id] if pop_id_vs_cell.has_key(projection.presynaptic_population): pre_cell = pop_id_vs_cell[projection.presynaptic_population] d = cell_id_vs_seg_id_vs_distal[pre_cell.id][int(connection.pre_segment_id)] p = cell_id_vs_seg_id_vs_proximal[pre_cell.id][int(connection.pre_segment_id)] m = [p[i] + float(connection.pre_fraction_along) * (d[i] - p[i]) for i in [0, 1, 2]] print_comment("Pre point is %s, %s between %s and %s" % (m, connection.pre_fraction_along, p, d)) pre_loc = [pre_loc[i] + m[i] for i in [0, 1, 2]] if pop_id_vs_cell.has_key(projection.postsynaptic_population): post_cell = pop_id_vs_cell[projection.postsynaptic_population] d = cell_id_vs_seg_id_vs_distal[post_cell.id][int(connection.post_segment_id)] p = cell_id_vs_seg_id_vs_proximal[post_cell.id][int(connection.post_segment_id)] m = [p[i] + float(connection.post_fraction_along) * (d[i] - p[i]) for i in [0, 1, 2]] print_comment("Post point is %s, %s between %s and %s" % (m, connection.post_fraction_along, p, d)) post_loc = [post_loc[i] + m[i] for i in [0, 1, 2]] if post_loc != pre_loc: info = "// Connection from %s:%s %s -> %s:%s %s\n" % ( pre, pre_cell_id, pre_loc, post, post_cell_id, post_loc, ) print_comment(info) net_file.write("// %s" % info) if args.conns: net_file.write( "cylinder { <%s,%s,%s>, <%s,%s,%s>, .5 pigment{color Grey}}\n" % (pre_loc[0], pre_loc[1], pre_loc[2], post_loc[0], post_loc[1], post_loc[2]) ) if args.conn_points: net_file.write( "object { conn_start_point translate <%s,%s,%s> }\n" % (pre_loc[0], pre_loc[1], pre_loc[2]) ) net_file.write( "object { conn_end_point translate <%s,%s,%s> }\n" % (post_loc[0], post_loc[1], post_loc[2]) ) plane = """ plane { y, vv(-1) pigment {checker color rgb 1.0, color rgb 0.8 scale 20} } """ footer = """ #declare minX = %f; #declare minY = %f; #declare minZ = %f; #declare maxX = %f; #declare maxY = %f; #declare maxZ = %f; #macro uu(xx) 0.5 * (maxX *(1+xx) + minX*(1-xx)) #end #macro vv(xx) 0.5 * (maxY *(1+xx) + minY*(1-xx)) #end #macro ww(xx) 0.5 * (maxZ *(1+xx) + minZ*(1-xx)) #end light_source { <uu(5),uu(2),uu(5)> color rgb <1,1,1> } light_source { <uu(-5),uu(2),uu(-5)> color rgb <1,1,1> } light_source { <uu(5),uu(-2),uu(-5)> color rgb <1,1,1> } light_source { <uu(-5),uu(-2),uu(5)> color rgb <1,1,1> } // Trying to view box camera { location < uu(%s + %s * sin (clock * 2 * 3.141)) , vv(%s + %s * sin (clock * 2 * 3.141)) , ww(%s + %s * cos (clock * 2 * 3.141)) > look_at < uu(%s + 0) , vv(%s + 0.05+0.3*sin (clock * 2 * 3.141)) , ww(%s + 0)> } %s \n""" % ( minX, minY, minZ, maxX, maxY, maxZ, args.posx, args.scalex, args.posy, args.scaley, args.posz, args.scalez, args.viewx, args.viewy, args.viewz, (plane if args.plane else ""), ) ### end of footer pov_file.write(footer) pov_file.close() if args.movie: ini_file_name = pov_file_name.replace(".pov", "_movie.ini") ini_movie = """ Antialias=On +W800 +H600 Antialias_Threshold=0.3 Antialias_Depth=4 Input_File_Name=%s Initial_Frame=1 Final_Frame=%i Initial_Clock=0 Final_Clock=1 Cyclic_Animation=on Pause_when_Done=off """ ini_file = open(ini_file_name, "w") ini_file.write(ini_movie % (pov_file_name, args.frames)) ini_file.close() print_comment_v( "Created file for generating %i movie frames at: %s. To run this type:\n\n povray %s\n" % (args.frames, ini_file_name, ini_file_name) ) else: print_comment_v( "Created file for generating image of network. To run this type:\n\n povray %s\n" % (pov_file_name) ) print_comment_v( "Or for higher resolution:\n\n povray Antialias=On Antialias_Depth=10 Antialias_Threshold=0.1 +W1200 +H900 %s\n" % (pov_file_name) )
def main (): args = process_args() xmlfile = args.neuroml_file pov_file_name = xmlfile endings = [".xml",".h5",".nml"] for e in endings: if pov_file_name.endswith(e): pov_file_name.replace(e, ".pov") if pov_file_name == xmlfile: pov_file_name+='.pov' pov_file = open(pov_file_name, "w") header=''' /* POV-Ray file generated from NeuroML network */ #version 3.6; #include "colors.inc" background {rgbt %s} \n''' ### end of header pov_file.write(header%(args.background)) cells_file = pov_file net_file = pov_file splitOut = False cf = pov_file_name.replace(".pov", "_cells.inc") nf = pov_file_name.replace(".pov", "_net.inc") if args.split: splitOut = True cells_file = open(cf, "w") net_file = open(nf, "w") print_comment_v("Saving into %s and %s and %s"%(pov_file_name, cf, nf)) print_comment_v("Converting XML file: %s to %s"%(xmlfile, pov_file_name)) nml_doc = pynml.read_neuroml2_file(xmlfile, include_includes=True, verbose=args.v, optimized=True) cell_elements = [] cell_elements.extend(nml_doc.cells) cell_elements.extend(nml_doc.cell2_ca_poolses) minXc = 1e9 minYc = 1e9 minZc = 1e9 maxXc = -1e9 maxYc = -1e9 maxZc = -1e9 minX = 1e9 minY = 1e9 minZ = 1e9 maxX = -1e9 maxY = -1e9 maxZ = -1e9 declaredcells = {} print_comment_v("There are %i cells in the file"%len(cell_elements)) cell_id_vs_seg_id_vs_proximal = {} cell_id_vs_seg_id_vs_distal = {} cell_id_vs_cell = {} for cell in cell_elements: cellName = cell.id cell_id_vs_cell[cell.id] = cell print_comment_v("Handling cell: %s"%cellName) cell_id_vs_seg_id_vs_proximal[cell.id] = {} cell_id_vs_seg_id_vs_distal[cell.id] = {} declaredcell = "cell_"+cellName declaredcells[cellName] = declaredcell cells_file.write("#declare %s = \n"%declaredcell) cells_file.write("union {\n") prefix = "" segments = cell.morphology.segments distpoints = {} proxpoints = {} for segment in segments: id = int(segment.id) distal = segment.distal x = float(distal.x) y = float(distal.y) z = float(distal.z) r = max(float(distal.diameter)/2.0, args.mindiam) if x-r<minXc: minXc=x-r if y-r<minYc: minYc=y-r if z-r<minZc: minZc=z-r if x+r>maxXc: maxXc=x+r if y+r>maxYc: maxYc=y+r if z+r>maxZc: maxZc=z+r distalpoint = "<%f, %f, %f>, %f "%(x,y,z,r) distpoints[id] = distalpoint cell_id_vs_seg_id_vs_distal[cell.id][id] = (x,y,z) proximalpoint = "" if segment.proximal is not None: proximal = segment.proximal proximalpoint = "<%f, %f, %f>, %f "%(float(proximal.x),float(proximal.y),float(proximal.z),max(float(proximal.diameter)/2.0, args.mindiam)) cell_id_vs_seg_id_vs_proximal[cell.id][id] = (float(proximal.x),float(proximal.y),float(proximal.z)) else: parent = int(segment.parent.segments) proximalpoint = distpoints[parent] cell_id_vs_seg_id_vs_proximal[cell.id][id] = cell_id_vs_seg_id_vs_distal[cell.id][parent] proxpoints[id] = proximalpoint shape = "cone" if proximalpoint == distalpoint: shape = "sphere" proximalpoint = "" if ( shape == "cone" and (proximalpoint.split('>')[0] == distalpoint.split('>')[0])): comment = "Ignoring zero length segment (id = %i): %s -> %s\n"%(id, proximalpoint, distalpoint) print_comment_v(comment) cells_file.write(" // "+comment) else: cells_file.write(" %s {\n"%shape) cells_file.write(" %s\n"%distalpoint) if len(proximalpoint): cells_file.write(" %s\n"%proximalpoint) cells_file.write(" //%s_%s.%s\n"%('CELL_GROUP_NAME','0', id)) cells_file.write(" }\n") if args.segids: cells_file.write(' text {\n') cells_file.write(' ttf "timrom.ttf" "------- Segment: %s" .1, 0.01\n'%(segment.id)) cells_file.write(' pigment { Red }\n') cells_file.write(' rotate <0,180,0>\n') cells_file.write(' scale <10,10,10>') cells_file.write(' translate %s>\n'%distalpoint.split('>')[0]) cells_file.write(' }\n') cells_file.write(" pigment { color rgb <%f,%f,%f> }\n"%(random.random(),random.random(),random.random())) cells_file.write("}\n\n") if splitOut: pov_file.write("#include \""+cf+"\"\n\n") pov_file.write("#include \""+nf+"\"\n\n") pov_file.write('''\n/*\n Defining a dummy cell to use when cell in population is not found in NeuroML file...\n*/\n#declare %s = union { sphere { <0.000000, 0.000000, 0.000000>, 5.000000 } pigment { color rgb <1,0,0> } }\n'''%_DUMMY_CELL) pov_file.write('''\n/*\n Defining the spheres to use for end points of connections...\n*/ \n#declare conn_start_point = union { sphere { <0.000000, 0.000000, 0.000000>, 3.000000 } pigment { color rgb <0,1,0> } }\n \n#declare conn_end_point = union { sphere { <0.000000, 0.000000, 0.000000>, 3.000000 } pigment { color rgb <1,0,0> } }\n \n#declare input_object = union { cone { <0, 0, 0>, 0.1 // Center and radius of one end <0, -40, 0>, 2.5 // Center and radius of other end } pigment { color rgb <0.2,0.2,0.8> } }\n''') positions = {} popElements = nml_doc.networks[0].populations pop_id_vs_cell = {} print_comment_v("There are %i populations in the file"%len(popElements)) for pop in popElements: name = pop.id celltype = pop.component instances = pop.instances if pop.component in cell_id_vs_cell.keys(): pop_id_vs_cell[pop.id] = cell_id_vs_cell[pop.component] info = "Population: %s has %i positioned cells of type: %s"%(name,len(instances),celltype) print_comment_v(info) colour = "1" substitute_radius = None for prop in pop.properties: if prop.tag == 'color': colour = prop.value colour = colour.replace(" ", ",") #print "Colour determined to be: "+colour if prop.tag == 'radius': substitute_radius = float(prop.value) net_file.write("\n\n/* "+info+" */\n\n") pop_positions = {} if not celltype in declaredcells: minXc = 0 minYc = 0 minZc = 0 maxXc = 0 maxYc = 0 maxZc = 0 if substitute_radius: dummy_cell_name = define_dummy_cell(name, substitute_radius, pov_file) cell_definition = dummy_cell_name else: cell_definition = _DUMMY_CELL else: cell_definition = declaredcells[celltype] for instance in instances: location = instance.location id = int(instance.id) net_file.write("object {\n") net_file.write(" %s\n"%cell_definition) x = float(location.x) y = float(location.y) z = float(location.z) pop_positions[id] = (x,y,z) if x+minXc<minX: minX=x+minXc if y+minYc<minY: minY=y+minYc if z+minZc<minZ: minZ=z+minZc if x+maxXc>maxX: maxX=x+maxXc if y+maxYc>maxY: maxY=y+maxYc if z+maxZc>maxZ: maxZ=z+maxZc net_file.write(" translate <%s, %s, %s>\n"%(x,y,z)) if colour == '1': colour = "%f,%f,%f"%(random.random(),random.random(),random.random()) if colour is not None: net_file.write(" pigment { color rgb <%s> }"%(colour)) net_file.write("\n //%s_%s\n"%(name, id)) net_file.write("}\n") positions[name] = pop_positions if len(instances) == 0 and int(pop.size>0): info = "Population: %s has %i unpositioned cells of type: %s"%(name,pop.size,celltype) print_comment_v(info) colour = "1" ''' if pop.annotation: print dir(pop.annotation) print pop.annotation.anytypeobjs_ print pop.annotation.member_data_items_[0].name print dir(pop.annotation.member_data_items_[0]) for prop in pop.annotation.anytypeobjs_: print prop if len(prop.getElementsByTagName('meta:tag'))>0 and prop.getElementsByTagName('meta:tag')[0].childNodes[0].data == 'color': #print prop.getElementsByTagName('meta:tag')[0].childNodes colour = prop.getElementsByTagName('meta:value')[0].childNodes[0].data colour = colour.replace(" ", ",") elif prop.hasAttribute('tag') and prop.getAttribute('tag') == 'color': colour = prop.getAttribute('value') colour = colour.replace(" ", ",") print "Colour determined to be: "+colour ''' net_file.write("\n\n/* "+info+" */\n\n") net_file.write("object {\n") net_file.write(" %s\n"%cell_definition) x = 0 y = 0 z = 0 if x+minXc<minX: minX=x+minXc if y+minYc<minY: minY=y+minYc if z+minZc<minZ: minZ=z+minZc if x+maxXc>maxX: maxX=x+maxXc if y+maxYc>maxY: maxY=y+maxYc if z+maxZc>maxZ: maxZ=z+maxZc net_file.write(" translate <%s, %s, %s>\n"%(x,y,z)) if colour == '1': colour = "%f,%f,%f"%(random.random(),random.random(),random.random()) if colour is not None: net_file.write(" pigment { color rgb <%s> }"%(colour)) net_file.write("\n //%s_%s\n"%(name, id)) net_file.write("}\n") if args.conns or args.conn_points: projections = nml_doc.networks[0].projections + nml_doc.networks[0].electrical_projections + nml_doc.networks[0].continuous_projections for projection in projections: pre = projection.presynaptic_population post = projection.postsynaptic_population if isinstance(projection, neuroml.Projection): connections = [] for c in projection.connection_wds: connections.append(c) for c in projection.connections: connections.append(c) color='Grey' elif isinstance(projection, neuroml.ElectricalProjection): connections = projection.electrical_connections + projection.electrical_connection_instances + projection.electrical_connection_instance_ws color='Yellow' elif isinstance(projection, neuroml.ContinuousProjection): connections = projection.continuous_connections + projection.continuous_connection_instances + projection.continuous_connection_instance_ws color='Blue' print_comment_v("Adding %i connections for %s: %s -> %s "%(len(connections),projection.id,pre,post)) #print cell_id_vs_seg_id_vs_distal #print cell_id_vs_seg_id_vs_proximal for connection in connections: pre_cell_id = connection.get_pre_cell_id() post_cell_id = connection.get_post_cell_id() pre_loc = (0,0,0) if pre in positions.keys(): if len(positions[pre])>0: pre_loc = positions[pre][pre_cell_id] post_loc = (0,0,0) if post in positions.keys(): post_loc = positions[post][post_cell_id] if projection.presynaptic_population in pop_id_vs_cell.keys(): pre_cell = pop_id_vs_cell[projection.presynaptic_population] d = cell_id_vs_seg_id_vs_distal[pre_cell.id][connection.get_pre_segment_id()] p = cell_id_vs_seg_id_vs_proximal[pre_cell.id][connection.get_pre_segment_id()] m = [ p[i]+connection.get_pre_fraction_along()*(d[i]-p[i]) for i in [0,1,2] ] print_comment("Pre point is %s, %s between %s and %s"%(m,connection.get_pre_fraction_along(),p,d)) pre_loc = [ pre_loc[i]+m[i] for i in [0,1,2] ] if projection.postsynaptic_population in pop_id_vs_cell.keys(): post_cell = pop_id_vs_cell[projection.postsynaptic_population] d = cell_id_vs_seg_id_vs_distal[post_cell.id][connection.get_post_segment_id()] p = cell_id_vs_seg_id_vs_proximal[post_cell.id][connection.get_post_segment_id()] m = [ p[i]+connection.get_post_fraction_along()*(d[i]-p[i]) for i in [0,1,2] ] print_comment("Post point is %s, %s between %s and %s"%(m,connection.get_post_fraction_along(),p,d)) post_loc = [ post_loc[i]+m[i] for i in [0,1,2] ] if post_loc != pre_loc: info = "// Connection from %s:%s %s -> %s:%s %s\n"%(pre, pre_cell_id, pre_loc, post, post_cell_id, post_loc) print_comment(info) net_file.write("// %s"%info) if args.conns: net_file.write("cylinder { <%s,%s,%s>, <%s,%s,%s>, .5 pigment{color %s}}\n"%(pre_loc[0],pre_loc[1],pre_loc[2], post_loc[0],post_loc[1],post_loc[2],color)) if args.conn_points: net_file.write("object { conn_start_point translate <%s,%s,%s> }\n"%(pre_loc[0],pre_loc[1],pre_loc[2])) net_file.write("object { conn_end_point translate <%s,%s,%s> }\n"%(post_loc[0],post_loc[1],post_loc[2])) if args.inputs: for il in nml_doc.networks[0].input_lists: for input in il.input: popi = il.populations cell_id = input.get_target_cell_id() cell = pop_id_vs_cell[popi] loc = (0,0,0) if popi in positions.keys(): if len(positions[popi])>0: loc = positions[popi][cell_id] d = cell_id_vs_seg_id_vs_distal[cell.id][input.get_segment_id()] p = cell_id_vs_seg_id_vs_proximal[cell.id][input.get_segment_id()] m = [ p[i]+input.get_fraction_along()*(d[i]-p[i]) for i in [0,1,2] ] input_info = "Input on cell %s:%s at %s; point %s along (%s -> %s): %s"%(popi,cell_id, loc,input.get_fraction_along(),d,p,m) loc = [ loc[i]+m[i] for i in [0,1,2] ] net_file.write("/* %s */\n"%input_info) net_file.write("object { input_object translate <%s,%s,%s> }\n\n"%(loc[0],loc[1],loc[2])) plane = ''' plane { y, vv(-1) pigment {checker color rgb 1.0, color rgb 0.8 scale 20} } ''' footer=''' #declare minX = %f; #declare minY = %f; #declare minZ = %f; #declare maxX = %f; #declare maxY = %f; #declare maxZ = %f; #macro uu(xx) 0.5 * (maxX *(1+xx) + minX*(1-xx)) #end #macro vv(xx) 0.5 * (maxY *(1+xx) + minY*(1-xx)) #end #macro ww(xx) 0.5 * (maxZ *(1+xx) + minZ*(1-xx)) #end light_source { <uu(5),uu(2),uu(5)> color rgb <1,1,1> } light_source { <uu(-5),uu(2),uu(-5)> color rgb <1,1,1> } light_source { <uu(5),uu(-2),uu(-5)> color rgb <1,1,1> } light_source { <uu(-5),uu(-2),uu(5)> color rgb <1,1,1> } // Trying to view box camera { location < uu(%s + %s * sin (clock * 2 * 3.141)) , vv(%s + %s * sin (clock * 2 * 3.141)) , ww(%s + %s * cos (clock * 2 * 3.141)) > look_at < uu(%s + 0) , vv(%s + 0.05+0.3*sin (clock * 2 * 3.141)) , ww(%s + 0)> } %s \n'''%(minX,minY,minZ,maxX,maxY,maxZ, args.posx, args.scalex, args.posy, args.scaley, args.posz, args.scalez, args.viewx, args.viewy, args.viewz, (plane if args.plane else "")) ### end of footer pov_file.write(footer) pov_file.close() if args.movie: ini_file_name = pov_file_name.replace(".pov", "_movie.ini") ini_movie = ''' Antialias=On +W800 +H600 Antialias_Threshold=0.3 Antialias_Depth=4 Input_File_Name=%s Initial_Frame=1 Final_Frame=%i Initial_Clock=0 Final_Clock=1 Cyclic_Animation=on Pause_when_Done=off ''' ini_file = open(ini_file_name, 'w') ini_file.write(ini_movie%(pov_file_name, args.frames)) ini_file.close() print_comment_v("Created file for generating %i movie frames at: %s. To run this type:\n\n povray %s\n"%(args.frames,ini_file_name,ini_file_name)) else: print_comment_v("Created file for generating image of network. To run this type:\n\n povray %s\n"%(pov_file_name)) print_comment_v("Or for higher resolution:\n\n povray Antialias=On Antialias_Depth=10 Antialias_Threshold=0.1 +W1200 +H900 %s\n"%(pov_file_name))
def generate_Vm_vs_time_plot(nml2_file, cell_id, inj_amp_nA=80, delay_ms=20, inj_dur_ms=60, sim_dur_ms=100, dt=0.05, plot_voltage_traces=False, show_plot_already=True, simulator="jNeuroML", include_included=True): ref = "Test" print_comment_v( "Generating Vm(mV) vs Time(ms) plot for cell %s in %s using %s (Inj %snA / %sms dur after %sms delay)" % (cell_id, nml2_file, simulator, inj_amp_nA, inj_dur_ms, delay_ms)) sim_id = 'Vm_%s' % ref duration = sim_dur_ms ls = LEMSSimulation(sim_id, sim_dur_ms, dt) ls.include_neuroml2_file(nml2_file, include_included=include_included) ls.assign_simulation_target('network') nml_doc = nml.NeuroMLDocument(id=cell_id) nml_doc.includes.append(nml.IncludeType(href=nml2_file)) net = nml.Network(id="network") nml_doc.networks.append(net) input_id = ("input_%s" % str(inj_amp_nA).replace('.', '_')) pg = nml.PulseGenerator(id=input_id, delay="%sms" % delay_ms, duration='%sms' % inj_dur_ms, amplitude='%spA' % inj_amp_nA) nml_doc.pulse_generators.append(pg) pop_id = 'hhpop' pop = nml.Population(id=pop_id, component='hhcell', size=1, type="populationList") inst = nml.Instance(id=0) pop.instances.append(inst) inst.location = nml.Location(x=0, y=0, z=0) net.populations.append(pop) # Add these to cells input_list = nml.InputList(id='il_%s' % input_id, component=pg.id, populations=pop_id) input = nml.Input(id='0', target='../hhpop/0/hhcell', destination="synapses") input_list.input.append(input) net.input_lists.append(input_list) sim_file_name = '%s.sim.nml' % sim_id pynml.write_neuroml2_file(nml_doc, sim_file_name) ls.include_neuroml2_file(sim_file_name) disp0 = 'Voltage_display' ls.create_display(disp0, "Voltages", "-90", "50") ls.add_line_to_display(disp0, "V", "hhpop/0/hhcell/v", scale='1mV') of0 = 'Volts_file' ls.create_output_file(of0, "%s.v.dat" % sim_id) ls.add_column_to_output_file(of0, "V", "hhpop/0/hhcell/v") lems_file_name = ls.save_to_file() if simulator == "jNeuroML": results = pynml.run_lems_with_jneuroml(lems_file_name, nogui=True, load_saved_data=True, plot=plot_voltage_traces, show_plot_already=False) elif simulator == "jNeuroML_NEURON": results = pynml.run_lems_with_jneuroml_neuron(lems_file_name, nogui=True, load_saved_data=True, plot=plot_voltage_traces, show_plot_already=False) if show_plot_already: from matplotlib import pyplot as plt plt.show() return of0
def main (argv): args = process_args() #for v in range(int(args.minV),int(args.maxV)+5,5): print get_rainbow_color_for_volts(v, args) #exit() results = pynml.reload_saved_data(args.lems_file_name, plot=False) times = [t*1000 for t in results['t']] dt = times[1]-times[0] #stepTime = (args.skip+1)*dt t = 0 times_used = [] frame_indices = [] to_skip = 0 index = 0 while t<=args.endTime: if to_skip == 0: times_used.append(t) frame_indices.append(index) to_skip = args.skip else: to_skip -=1 index+=1 t = times[index] print_comment_v("There are %i time points total, max: %f ms, dt: %f ms"%(len(times),times[-1], dt)) print_comment_v("times_used: %s; frame_indices %s"%(times_used, frame_indices)) print_comment_v("All refs: %s"%results.keys()) volt_colors = {} for ref in results.keys(): if ref!='t': pathBits = ref.split('/') pop = pathBits[0] index = pathBits[1] seg = pathBits[3] ref2 = '%s_%s'%(pop, index) if seg == '0' or seg == 'v': volt_color =[] for i in frame_indices: v = results[ref][i]*1000 colour = get_rainbow_color_for_volts(v, args) if args.rainbow else get_color_for_volts(v, args) volt_color.append(colour) volt_colors[ref2] = volt_color print_comment_v("All refs: %s"%volt_colors.keys()) print_comment_v("All volt_colors: %s"%volt_colors) t=args.startTime index = 0 #give the single frames an alphabetical order maxind = "00000" ind = "00000" bat_file_name = "%s_pov.bat"%(args.prefix) bat_file = open(bat_file_name, 'w') sh_file_name = "%s_pov.sh"%(args.prefix) sh_file = open(sh_file_name, 'w') for fi in frame_indices: t = times[fi] print_comment_v("\n---- Exporting for time: %f, index %i frame index %i ----\n"%(t, index, fi)) if not args.singlecell: in_file_name = args.prefix+"_net.inc" in_file = open(in_file_name) out_file_name = args.prefix+"_net.inc"+str(index) out_file = open(out_file_name, 'w') print_comment_v("in_file_name %s; out_file_name: %s"%(in_file_name,out_file_name)) for line in in_file: if line.strip().startswith("//"): ref = line.strip()[2:] if ref in volt_colors.keys(): vs = volt_colors[ref] #print_comment_v(('-- %s: %s '%(ref,len(vs))) out_file.write(" %s // %s t= %s\n" %(vs[index], ref, t)) elif ref+".0" in volt_colors.keys(): vs = volt_colors[ref+".0"] out_file.write(" "+vs[index]+" //"+ref+" t= "+str(t)+"\n") else: out_file.write("// No ref there: "+ref+"\n") print_comment_v("Missing ref: "+ref) else: out_file.write(line) in_file.close() out_file.close() print_comment_v("Written file: %s for time: %f"%(out_file_name, t)) in_file = open(args.prefix+".pov") out_file_name = "%s_T%i.pov"%(args.prefix, index) out_file = open(out_file_name, 'w') clock = args.rotations * (t-args.startTime)/(args.endTime-args.startTime) pre = '%s_net.inc'%args.prefix pre = pre.split('/')[-1] post = '%s_net.inc%i'%(args.prefix,index) post = post.split('/')[-1] print_comment_v("Swapping %s for %s"%(pre, post)) for line in in_file: if line.find(pre)>=0: out_file.write(line.replace(pre,post)) else: out_file.write(line.replace("clock", str(clock))) print_comment_v("Written file: %s for time: %f"%(out_file_name, t)) in_file.close() out_file.close() toEx = os.path.realpath(out_file_name) bat_file.write("C:\\Users\\Padraig\\AppData\\Local\\Programs\\POV-Ray\\v3.7\\bin\\pvengine.exe %s /nr /exit\n"%toEx) sh_file.write("povray %s %s\n"%(args.povrayOptions,toEx) ) else: ind = maxind[0:len(maxind)-len(str(index))] #compute index indentation in_file = open(args.prefix+"_cells.inc") out_file_name = args.prefix+"_cells.inc"+ind+str(index) out_file = open(out_file_name, 'w') dummy_ref = 'CELL_GROUP_NAME_0' for line in in_file: if line.strip().startswith("//"): ref = line.strip()[2:] ref = ref.replace(dummy_ref, args.singlecell) if ref in volts.keys(): vs = volts[ref] out_file.write(" "+vs[index]+"\n//"+ref+" t= "+ind+str(t)+"\n") else: out_file.write("//No ref found: "+ref+", was looking for "+dummy_ref+"\n") else: out_file.write(line) in_file.close() out_file.close() print_comment_v("Written file: %s for time: %f"%(out_file_name, t)) in_file = open(args.prefix+".pov") out_file_name = "%s_T%s%i.pov"%(args.prefix, ind, index) out_file = open(out_file_name, 'w') for line in in_file: pre = '%s_cells.inc'%args.prefix post = '%s_cells.inc%s%i'%(args.prefix, ind, index) if line.find(pre)>=0: out_file.write(line.replace(pre,post)) else: clock = args.rotations * (t-args.startTime)/(args.endTime-args.startTime) out_file.write(line.replace("clock", str(clock))) print_comment_v("Written file: %s for time: %f"%(out_file_name, t)) in_file.close() out_file.close() toEx = os.path.realpath(out_file_name) bat_file.write("C:\\Users\\Padraig\\AppData\\Local\\Programs\\POV-Ray\\v3.7\\bin\\pvengine.exe %s /nr /exit\n"%toEx) sh_file.write("povray %s %s\n"%(args.povrayOptions,toEx) ) index=index+1 print_comment_v("Done!: ") print_comment_v("\nTo generate images type:\n\n bash %s_pov.sh\n\n"%args.prefix)
def main (argv): args = process_args() print_comment_v("Making a movie...") img_files_pre = [] img_files_post = [] gen_images = True gen_movie = False #gen_images = False gen_movie = True pref = args.prefix+'_T00' pref = args.prefix if gen_images: for i in range(args.frames): index = str(i+1) while len(index)<(len(str(args.frames))): index="0"+index file_name1 = "%s%s.png"%(pref,index) file_name2 = "%s%s.png"%(pref,str(i+1)) if not os.path.isfile(file_name1): if not os.path.isfile(file_name2): print_comment_v("File does not exist: %s (neither does %s)"%(file_name1, file_name2)) print_comment_v("Change network prefix parameter (currently %s) and/or number of frames to load (currently %i)"%(pref,args.frames)) exit(1) else: file_name1 = file_name2 img_files_pre.append(file_name1) print_comment_v("Found %i image files: [%s, ..., %s]"%(len(img_files_pre),img_files_pre[0],img_files_pre[-1])) for i in range(len(img_files_pre)): img_file = img_files_pre[i] img = cv2.imread(img_file) height , width , layers = img.shape print_comment_v("Read in file: %s (%sx%s)"%(img_file, width, height)) show = False if show: cv2.imshow('Image: '+img_file,img) cv2.waitKey(0) cv2.destroyAllWindows() t = args.startTime + i*float(args.endTime-args.startTime)/args.frames cv2.putText(img,'Time: %.3fms'%t,(width-220,50), font, 1,font_colour,scale_font) if args.activity: cv2.putText(img,'%imV : %imV'%(args.minV, args.maxV),(20,50), font, 1,font_colour,scale_font) cv2.putText(img,args.title,(15,550), font, 1,font_colour,scale_font) cv2.putText(img,args.left,(15,570), font, 1,font_colour,scale_font) generate_volt_scale(img, 20, 65, 12, 200, 50) new_file = args.name+'_'+img_file cv2.imwrite(new_file,img) print_comment_v("Written %s"%new_file) if gen_movie: for i in range(args.frames+1): index = str(i) while len(index)<(len(str(args.frames))): index="0"+index img_files_post.append("%s_%s%s.png"%(args.name,pref,index)) imgs = [] for i in range(len(img_files_post)): img_file = img_files_post[i] img = cv2.imread(img_file) print_comment_v("Read in %s"%img_file) imgs.append(img) format = 'avi' #format = 'mpg' format = 'divx' format = 'mp4' fps = 24 if format is 'avi': fourcc = cv.CV_FOURCC('X','V','I','D') mov_file = args.name+'.avi' out = cv2.VideoWriter(mov_file,fourcc, fps, (width,height)) if format is 'divx': fourcc = cv.CV_FOURCC('D','I','V','X') mov_file = args.name+'.avi' out = cv2.VideoWriter(mov_file,-1, fps, (width,height)) if format is 'mpg': fourcc = cv.CV_FOURCC('M','J','P','G') mov_file = args.name+'.mpg' out = cv2.VideoWriter(mov_file,fourcc, fps, (width,height)) if format is 'mp4': fourcc = cv2.cv.CV_FOURCC('m', 'p', '4', 'v') mov_file = args.name+'.avi' out = cv2.VideoWriter(mov_file,fourcc, fps, (width,height)) f = 0 for img in imgs: print_comment_v("Writing frame %i"%f) f+=1 out.write(img) out.release() print_comment_v("Saved movie file %s"%mov_file) print_comment_v("Done!")
def _run_optimisation(a): if isinstance(a.parameters, str): a.parameters = parse_list_arg(a.parameters) if isinstance(a.min_constraints, str): a.min_constraints = parse_list_arg(a.min_constraints) if isinstance(a.max_constraints, str): a.max_constraints = parse_list_arg(a.max_constraints) if isinstance(a.target_data, str): a.target_data = parse_dict_arg(a.target_data) if isinstance(a.weights, str): a.weights = parse_dict_arg(a.weights) if isinstance(a.known_target_values, str): a.known_target_values = parse_dict_arg(a.known_target_values) if isinstance(a.extra_report_info, str): a.extra_report_info = parse_dict_arg(a.extra_report_info) pynml.print_comment_v( "=====================================================================================" ) pynml.print_comment_v("Starting run_optimisation with: ") keys = sorted(a.__dict__.keys()) for key in keys: value = a.__dict__[key] pynml.print_comment_v(" %s = %s%s" % (key, ' ' * (30 - len(key)), value)) pynml.print_comment_v( "=====================================================================================" ) if a.dry_run: pynml.print_comment_v("Dry run; not running optimization...") return ref = a.prefix run_dir = "NT_%s_%s" % (ref, time.ctime().replace(' ', '_').replace( ':', '.')) os.mkdir(run_dir) my_controller = NeuroMLController( ref, a.neuroml_file, a.target, a.sim_time, a.dt, simulator=a.simulator, generate_dir=run_dir, num_parallel_evaluations=a.num_parallel_evaluations, cleanup=a.cleanup) peak_threshold = 0 analysis_var = { 'peak_delta': 0, 'baseline': 0, 'dvdt_threshold': 0, 'peak_threshold': peak_threshold } sim_var = OrderedDict() #make an evaluator, using automatic target evaluation: my_evaluator = evaluators.NetworkEvaluator( controller=my_controller, analysis_start_time=a.analysis_start_time, analysis_end_time=a.sim_time, parameters=a.parameters, analysis_var=analysis_var, weights=a.weights, targets=a.target_data) #make an optimizer my_optimizer = optimizers.CustomOptimizerA( a.max_constraints, a.min_constraints, my_evaluator, population_size=a.population_size, max_evaluations=a.max_evaluations, num_selected=a.num_selected, num_offspring=a.num_offspring, num_elites=a.num_elites, mutation_rate=a.mutation_rate, seeds=None, verbose=a.verbose) start = time.time() #run the optimizer best_candidate, fitness = my_optimizer.optimize(do_plot=False, seed=a.seed, summary_dir=run_dir) secs = time.time() - start reportj = {} info = "Ran %s evaluations (pop: %s) in %f seconds (%f mins total; %fs per eval)\n\n" % ( a.max_evaluations, a.population_size, secs, secs / 60.0, (secs / a.max_evaluations)) report = "----------------------------------------------------\n\n" + info reportj['comment'] = info reportj['time'] = secs for key, value in zip(a.parameters, best_candidate): sim_var[key] = value best_candidate_t, best_candidate_v = my_controller.run_individual( sim_var, show=False, cleanup=False) best_candidate_analysis = analysis.NetworkAnalysis( best_candidate_v, best_candidate_t, analysis_var, start_analysis=a.analysis_start_time, end_analysis=a.sim_time) best_cand_analysis_full = best_candidate_analysis.analyse() best_cand_analysis = best_candidate_analysis.analyse(a.weights.keys()) report += "---------- Best candidate ------------------------------------------\n" report += pp.pformat(best_cand_analysis_full) + "\n\n" report += "TARGETS: \n" report += pp.pformat(a.target_data) + "\n\n" report += "TUNED VALUES:\n" report += pp.pformat(best_cand_analysis) + "\n\n" report += "FITNESS: %f\n\n" % fitness report += "FITTEST: %s\n\n" % pp.pformat(dict(sim_var)) pynml.print_comment_v(report) reportj['fitness'] = fitness reportj['fittest vars'] = dict(sim_var) reportj['best_cand_analysis_full'] = best_cand_analysis_full reportj['best_cand_analysis'] = best_cand_analysis reportj['parameters'] = a.parameters reportj['analysis_var'] = analysis_var reportj['target_data'] = a.target_data reportj['weights'] = a.weights reportj['analysis_start_time'] = a.analysis_start_time reportj['population_size'] = a.population_size reportj['max_evaluations'] = a.max_evaluations reportj['num_selected'] = a.num_selected reportj['num_offspring'] = a.num_offspring reportj['mutation_rate'] = a.mutation_rate reportj['num_elites'] = a.num_elites reportj['seed'] = a.seed reportj['simulator'] = a.simulator reportj['sim_time'] = a.sim_time reportj['dt'] = a.dt reportj['run_directory'] = run_dir reportj['reference'] = ref if a.extra_report_info: for key in a.extra_report_info: reportj[key] = a.extra_report_info[key] report_file = open("%s/report.json" % run_dir, 'w') report_file.write(pp.pformat(reportj)) report_file.close() plot_file = open("%s/plotgens.py" % run_dir, 'w') plot_file.write( 'from neurotune.utils import plot_generation_evolution\nimport os\n') plot_file.write('\n') plot_file.write('parameters = %s\n' % a.parameters) plot_file.write('\n') plot_file.write( "curr_dir = os.path.dirname(__file__) if len(os.path.dirname(__file__))>0 else '.'\n" ) plot_file.write( "plot_generation_evolution(parameters, individuals_file_name = '%s/ga_individuals.csv'%curr_dir)\n" ) plot_file.close() if not a.nogui: added = [] #print("Plotting saved data from %s which are relevant for targets: %s"%(best_candidate_v.keys(), a.target_data.keys())) fig = plt.figure() fig.canvas.set_window_title( "Simulation of fittest individual from run: %s" % ref) for tref in best_candidate_v.keys( ): ##################a.target_data.keys(): ref = tref.split(':')[0] if not ref in added: added.append(ref) #pynml.print_comment(" - Adding plot of: %s"%ref) plt.plot(best_candidate_t, best_candidate_v[ref], label="%s - %i evaluations" % (ref, a.max_evaluations)) plt.legend() #plt.ylim(-80.0,80.0) plt.xlim(0.0, a.sim_time) plt.title("Models %s" % a.prefix) plt.xlabel("Time (ms)") plt.ylabel("Membrane potential(mV)") utils.plot_generation_evolution( sim_var.keys(), individuals_file_name='%s/ga_individuals.csv' % run_dir, target_values=a.known_target_values, show_plot_already=a.show_plot_already, title_prefix=ref) if a.show_plot_already: plt.show() return reportj
def run(a=None,**kwargs): a = build_namespace(a,**kwargs) pynml.print_comment_v('Generating spiketime plot for %s; plotting: %s; save to: %s'%(a.spiketime_files, a.show_plots_already, a.save_spike_plot_to)) xs = [] ys = [] labels = [] markers = [] linestyles = [] offset_id = 0 max_time = 0 max_id = 0 unique_ids = [] times = OrderedDict() ids_in_file = OrderedDict() if a.format == 'sonata' or a.format == 's': for file_name in a.spiketime_files: ids_times = read_sonata_spikes_hdf5_file(file_name) x = [] y = [] max_id_here = 0 name = file_name.split('/')[-1] if name.endswith('_spikes.h5'): name = name[:-10] elif name.endswith('.h5'): name = name[:-3] times[name] = [] ids_in_file[name] = [] for id in ids_times: for t in ids_times[id]: id_shifted = offset_id+int(float(id)) max_id = max(max_id,id_shifted) if not id_shifted in ids_in_file[name]: ids_in_file[name].append(id_shifted) times[name].append(t) max_id_here = max(max_id_here,id_shifted) max_time = max(t,max_time) if not id_shifted in unique_ids: unique_ids.append(id_shifted) x.append(t) y.append(id_shifted) print("max_id_here in %s: %i"%(file_name,max_id_here)) labels.append("%s (%i)"%(name,max_id_here-offset_id)) offset_id = max_id_here+1 xs.append(x) ys.append(y) markers.append('.') linestyles.append('') xlim = [max_time/-20.0, max_time*1.05] ylim = [max_id_here/-20., max_id_here*1.05] markersizes = [] for xx in xs: if len(unique_ids)>50: markersizes.append(2) elif len(unique_ids)>200: markersizes.append(1) else: markersizes.append(4) else: for file_name in a.spiketime_files: pynml.print_comment_v("Loading spike times from: %s"%file_name) spikes_file = open(file_name) x = [] y = [] max_id_here = 0 name = spikes_file.name if name.endswith('.spikes'): name = name[:-7] if name.endswith('.spike'): name = name[:-6] times[name] = [] ids_in_file[name] = [] for line in spikes_file: if not line.startswith('#'): if a.format == 'id_t': [id, t] = line.split() elif a.format == 't_id': [t, id] = line.split() id_shifted = offset_id+int(float(id)) max_id = max(max_id,id_shifted) t = float(t) if not id_shifted in ids_in_file[name]: ids_in_file[name].append(id_shifted) times[name].append(t) max_id_here = max(max_id_here,id_shifted) max_time = max(t,max_time) if not id_shifted in unique_ids: unique_ids.append(id_shifted) x.append(t) y.append(id_shifted) #print("max_id_here in %s: %i"%(file_name,max_id_here)) labels.append("%s (%i)"%(name,max_id_here-offset_id)) offset_id = max_id_here+1 xs.append(x) ys.append(y) markers.append('.') linestyles.append('') xlim = [max_time/-20.0, max_time*1.05] ylim = [max_id_here/-20., max_id_here*1.05] markersizes = [] for xx in xs: if len(unique_ids)>50: markersizes.append(2) elif len(unique_ids)>200: markersizes.append(1) else: markersizes.append(4) pynml.generate_plot(xs, ys, "Spike times from: %s"%a.spiketime_files, labels = labels, linestyles=linestyles, markers=markers, xaxis = "Time (s)", yaxis = "Cell index", xlim = xlim, ylim = ylim, markersizes = markersizes, grid = False, show_plot_already=False, save_figure_to=a.save_spike_plot_to, legend_position='right') if a.rates: plt.figure() bins = a.rate_bins for name in times: tt = times[name] ids_here = len(ids_in_file[name]) plt.hist(tt, bins=bins,histtype='step',weights=[bins*max(tt)/(float(ids_here))]*len(tt),label=name+"_h") hist, bin_edges = np.histogram(tt, bins=bins,weights=[bins*max(tt)/(float(ids_here))]*len(tt)) ''' width = bin_edges[1]-bin_edges[0] mids = [i+width/2 for i in bin_edges[:-1]] plt.plot(mids, hist,label=name)''' plt.figure() for name in times: tt = times[name] ids_here = len(ids_in_file[name]) hist, bin_edges = np.histogram(tt, bins=bins,weights=[bins*max(tt)/(float(ids_here))]*len(tt)) width = bin_edges[1]-bin_edges[0] mids = [i+width/2 for i in bin_edges[:-1]] boxes = [5,10,20,50] boxes = [20,50] boxes = [int(a.rate_window)] for b in boxes: box = np.ones(b) hist_c = np.convolve(hist/len(box), box) ys = hist_c xs = [i/(float(len(ys))) for i in range(len(ys))] plt.plot(xs, ys,label=name+'_%i_c'%b) #plt.legend() if a.show_plots_already: plt.show() else: plt.close()
def generate_channel_density_plots(nml2_file, text_densities=False, passives_erevs=False, target_directory=None): nml_doc = read_neuroml2_file(nml2_file, include_includes=True, verbose=False, optimized=True) cell_elements = [] cell_elements.extend(nml_doc.cells) cell_elements.extend(nml_doc.cell2_ca_poolses) svg_files = [] all_info = {} for cell in cell_elements: info = {} all_info[cell.id] = info print_comment_v("Extracting channel density info from %s"%cell.id) sb = '' ions = {} maxes = {} mins = {} row = 0 na_ions = [] k_ions = [] ca_ions = [] other_ions = [] if isinstance(cell, Cell2CaPools): cds = cell.biophysical_properties2_ca_pools.membrane_properties2_ca_pools.channel_densities + \ cell.biophysical_properties2_ca_pools.membrane_properties2_ca_pools.channel_density_nernsts elif isinstance(cell, Cell): cds = cell.biophysical_properties.membrane_properties.channel_densities + \ cell.biophysical_properties.membrane_properties.channel_density_nernsts epas = None ena = None ek = None eh = None eca = None for cd in cds: dens_si = get_value_in_si(cd.cond_density) print_comment_v("cd: %s, ion_channel: %s, ion: %s, density: %s (SI: %s)"%(cd.id,cd.ion_channel,cd.ion,cd.cond_density,dens_si)) ions[cd.ion_channel] = cd.ion erev_V = get_value_in_si(cd.erev) if hasattr(cd,'erev') else None erev = '%s mV'%format_float(erev_V*1000) if hasattr(cd,'erev') else None if cd.ion == 'na': if not cd.ion_channel in na_ions: na_ions.append(cd.ion_channel) ena = erev info['ena']=erev_V elif cd.ion == 'k': if not cd.ion_channel in k_ions: k_ions.append(cd.ion_channel) ek = erev info['ek']=erev_V elif cd.ion == 'ca': if not cd.ion_channel in ca_ions: ca_ions.append(cd.ion_channel) eca = erev info['eca']=erev_V else: if not cd.ion_channel in other_ions: other_ions.append(cd.ion_channel) if cd.ion == 'non_specific': epas = erev info['epas']=erev_V if cd.ion == 'h': eh = erev info['eh']=erev_V if cd.ion_channel in maxes: if dens_si>maxes[cd.ion_channel]: maxes[cd.ion_channel]=dens_si else: maxes[cd.ion_channel]=dens_si if cd.ion_channel in mins: if dens_si<mins[cd.ion_channel]: mins[cd.ion_channel]=dens_si else: mins[cd.ion_channel]=dens_si for ion_channel in na_ions + k_ions + ca_ions + other_ions: col = get_ion_color(ions[ion_channel]) info[ion_channel]={'max':maxes[ion_channel],'min':mins[ion_channel]} if maxes[ion_channel]>0: sb+=_get_rect(ion_channel, row, maxes[ion_channel],mins[ion_channel],col[0],col[1],col[2],text_densities) row+=1 if passives_erevs: if ena: sb+=add_text(row, "E Na = %s "%ena) row+=1 if ek: sb+=add_text(row, "E K = %s "%ek) row+=1 if eca: sb+=add_text(row, "E Ca = %s"%eca) row+=1 if eh: sb+=add_text(row, "E H = %s"%eh) row+=1 if epas: sb+=add_text(row, "E pas = %s"%epas) row+=1 for sc in cell.biophysical_properties.membrane_properties.specific_capacitances: sb+=add_text(row, "C (%s) = %s"%(sc.segment_groups, sc.value)) info['specific_capacitance_%s'%sc.segment_groups]=get_value_in_si(sc.value) row+=1 #sb+='<text x="%s" y="%s" fill="black" font-family="Arial">%s</text>\n'%(width/3., (height+spacing)*(row+1), text) sb="<?xml version='1.0' encoding='UTF-8'?>\n<svg xmlns=\"http://www.w3.org/2000/svg\" width=\""+str(width+text_densities*200)+"\" height=\""+str((height+spacing)*row)+"\">\n"+sb+"</svg>\n" print(sb) svg_file = nml2_file+"_channeldens.svg" if target_directory: svg_file = target_directory+"/"+svg_file.split('/')[-1] svg_files.append(svg_file) sf = open(svg_file,'w') sf.write(sb) sf.close() print_comment_v("Written to %s"%os.path.abspath(svg_file)) pp.pprint(all_info) return svg_files, all_info
def generate_lems_file_for_neuroml(sim_id, neuroml_file, target, duration, dt, lems_file_name, target_dir, include_extra_files = [], gen_plots_for_all_v = True, plot_all_segments = False, gen_plots_for_quantities = {}, # Dict with displays vs lists of quantity paths gen_plots_for_only_populations = [], # List of populations, all pops if = [] gen_saves_for_all_v = True, save_all_segments = False, gen_saves_for_only_populations = [], # List of populations, all pops if = [] gen_saves_for_quantities = {}, # Dict with file names vs lists of quantity paths copy_neuroml = True, seed=None): if seed: random.seed(seed) # To ensure same LEMS file (e.g. colours of plots) are generated every time for the same input file_name_full = '%s/%s'%(target_dir,lems_file_name) print_comment_v('Creating LEMS file at: %s for NeuroML 2 file: %s'%(file_name_full,neuroml_file)) ls = LEMSSimulation(sim_id, duration, dt, target) nml_doc = read_neuroml2_file(neuroml_file, include_includes=True, verbose=True) quantities_saved = [] for f in include_extra_files: ls.include_neuroml2_file(f, include_included=False) if not copy_neuroml: rel_nml_file = os.path.relpath(os.path.abspath(neuroml_file), os.path.abspath(target_dir)) print_comment_v("Including existing NeuroML file (%s) as: %s"%(neuroml_file, rel_nml_file)) ls.include_neuroml2_file(rel_nml_file, include_included=True, relative_to_dir=os.path.abspath(target_dir)) else: print_comment_v("Copying NeuroML file (%s) to: %s (%s)"%(neuroml_file, target_dir, os.path.abspath(target_dir))) if os.path.abspath(os.path.dirname(neuroml_file))!=os.path.abspath(target_dir): shutil.copy(neuroml_file, target_dir) neuroml_file_name = os.path.basename(neuroml_file) ls.include_neuroml2_file(neuroml_file_name, include_included=False) for include in nml_doc.includes: incl_curr = '%s/%s'%(os.path.dirname(neuroml_file),include.href) print_comment_v(' - Including %s located at %s'%(include.href, incl_curr)) shutil.copy(incl_curr, target_dir) ls.include_neuroml2_file(include.href, include_included=False) sub_doc = read_neuroml2_file(incl_curr) for include in sub_doc.includes: incl_curr = '%s/%s'%(os.path.dirname(neuroml_file),include.href) print_comment_v(' -- Including %s located at %s'%(include.href, incl_curr)) shutil.copy(incl_curr, target_dir) ls.include_neuroml2_file(include.href, include_included=False) if gen_plots_for_all_v or gen_saves_for_all_v or len(gen_plots_for_only_populations)>0 or len(gen_saves_for_only_populations)>0 : for network in nml_doc.networks: for population in network.populations: quantity_template = "%s[%i]/v" component = population.component size = population.size cell = None segment_ids = [] if plot_all_segments: for c in nml_doc.cells: if c.id == component: cell = c for segment in cell.morphology.segments: segment_ids.append(segment.id) segment_ids.sort() if population.type and population.type == 'populationList': quantity_template = "%s/%i/"+component+"/v" size = len(population.instances) if gen_plots_for_all_v or population.id in gen_plots_for_only_populations: print_comment('Generating %i plots for %s in population %s'%(size, component, population.id)) disp0 = 'DispPop__%s'%population.id ls.create_display(disp0, "Membrane potentials of cells in %s"%population.id, "-90", "50") for i in range(size): if cell!=None and plot_all_segments: quantity_template_seg = "%s/%i/"+component+"/%i/v" for segment_id in segment_ids: quantity = quantity_template_seg%(population.id, i, segment_id) ls.add_line_to_display(disp0, "%s[%i] seg %i: v"%(population.id, i, segment_id), quantity, "1mV", get_next_hex_color()) else: quantity = quantity_template%(population.id, i) ls.add_line_to_display(disp0, "%s[%i]: v"%(population.id, i), quantity, "1mV", get_next_hex_color()) if gen_saves_for_all_v or population.id in gen_saves_for_only_populations: print_comment('Saving %i values of v for %s in population %s'%(size, component, population.id)) of0 = 'Volts_file__%s'%population.id ls.create_output_file(of0, "%s.%s.v.dat"%(sim_id,population.id)) for i in range(size): if cell!=None and save_all_segments: quantity_template_seg = "%s/%i/"+component+"/%i/v" for segment_id in segment_ids: quantity = quantity_template_seg%(population.id, i, segment_id) ls.add_column_to_output_file(of0, 'v_%s'%safe_variable(quantity), quantity) quantities_saved.append(quantity) else: quantity = quantity_template%(population.id, i) ls.add_column_to_output_file(of0, 'v_%s'%safe_variable(quantity), quantity) quantities_saved.append(quantity) for display in gen_plots_for_quantities.keys(): quantities = gen_plots_for_quantities[display] ls.create_display(display, "Plots of %s"%display, "-90", "50") for q in quantities: ls.add_line_to_display(display, safe_variable(q), q, "1", get_next_hex_color()) for file_name in gen_saves_for_quantities.keys(): quantities = gen_saves_for_quantities[file_name] ls.create_output_file(file_name, file_name) for q in quantities: ls.add_column_to_output_file(file_name, safe_variable(q), q) ls.save_to_file(file_name=file_name_full) return quantities_saved
def _run_optimisation(a): if isinstance(a.parameters, str): a.parameters = parse_list_arg(a.parameters) if isinstance(a.min_constraints, str): a.min_constraints = parse_list_arg(a.min_constraints) if isinstance(a.max_constraints, str): a.max_constraints = parse_list_arg(a.max_constraints) if isinstance(a.target_data, str): a.target_data = parse_dict_arg(a.target_data) if isinstance(a.weights, str): a.weights = parse_dict_arg(a.weights) if isinstance(a.known_target_values, str): a.known_target_values = parse_dict_arg(a.known_target_values) pynml.print_comment_v("=====================================================================================") pynml.print_comment_v("Starting run_optimisation with: ") for key,value in a.__dict__.items(): pynml.print_comment_v(" %s = %s%s"%(key,' '*(30-len(key)),value)) pynml.print_comment_v("=====================================================================================") if a.dry_run: pynml.print_comment_v("Dry run; not running optimization...") return ref = a.prefix run_dir = "NT_%s_%s"%(ref, time.ctime().replace(' ','_' ).replace(':','.' )) os.mkdir(run_dir) my_controller = NeuroMLController(ref, a.neuroml_file, a.target, a.sim_time, a.dt, simulator = a.simulator, generate_dir=run_dir, num_parallel_evaluations = a.num_parallel_evaluations) peak_threshold = 0 analysis_var = {'peak_delta': 0, 'baseline': 0, 'dvdt_threshold': 0, 'peak_threshold': peak_threshold} sim_var = OrderedDict() #make an evaluator, using automatic target evaluation: my_evaluator=evaluators.NetworkEvaluator(controller=my_controller, analysis_start_time=a.analysis_start_time, analysis_end_time=a.sim_time, parameters=a.parameters, analysis_var=analysis_var, weights=a.weights, targets=a.target_data) #make an optimizer my_optimizer = optimizers.CustomOptimizerA(a.max_constraints, a.min_constraints, my_evaluator, population_size = a.population_size, max_evaluations = a.max_evaluations, num_selected = a.num_selected, num_offspring = a.num_offspring, num_elites = a.num_elites, mutation_rate = a.mutation_rate, seeds = None, verbose = a.verbose) start = time.time() #run the optimizer best_candidate, fitness = my_optimizer.optimize(do_plot = False, seed= a.seed, summary_dir = run_dir) secs = time.time()-start reportj = {} info = "Ran %s evaluations (pop: %s) in %f seconds (%f mins total; %fs per eval)\n\n"%(a.max_evaluations, a.population_size, secs, secs/60.0, (secs/a.max_evaluations)) report = "----------------------------------------------------\n\n"+ info reportj['comment'] = info reportj['time'] = secs for key,value in zip(a.parameters,best_candidate): sim_var[key]=value best_candidate_t, best_candidate_v = my_controller.run_individual(sim_var,show=False) best_candidate_analysis = analysis.NetworkAnalysis(best_candidate_v, best_candidate_t, analysis_var, start_analysis=a.analysis_start_time, end_analysis=a.sim_time) best_cand_analysis_full = best_candidate_analysis.analyse() best_cand_analysis = best_candidate_analysis.analyse(a.weights.keys()) report+="---------- Best candidate ------------------------------------------\n" report+=pp.pformat(best_cand_analysis_full)+"\n\n" report+="TARGETS: \n" report+=pp.pformat(a.target_data)+"\n\n" report+="TUNED VALUES:\n" report+=pp.pformat(best_cand_analysis)+"\n\n" report+="FITNESS: %f\n\n"%fitness report+="FITTEST: %s\n\n"%pp.pformat(dict(sim_var)) pynml.print_comment_v(report) reportj['fitness']=fitness reportj['fittest vars']=dict(sim_var) reportj['best_cand_analysis_full']=best_cand_analysis_full reportj['best_cand_analysis']=best_cand_analysis reportj['parameters']=a.parameters reportj['analysis_var']=analysis_var reportj['target_data']=a.target_data reportj['weights']=a.weights reportj['analysis_start_time']=a.analysis_start_time reportj['population_size']=a.population_size reportj['max_evaluations']=a.max_evaluations reportj['num_selected']=a.num_selected reportj['num_offspring']=a.num_offspring reportj['mutation_rate']=a.mutation_rate reportj['num_elites']=a.num_elites reportj['seed']=a.seed reportj['simulator']=a.simulator reportj['sim_time']=a.sim_time reportj['dt']=a.dt reportj['run_directory'] = run_dir reportj['reference'] = ref report_file = open("%s/report.json"%run_dir,'w') report_file.write(pp.pformat(reportj)) report_file.close() plot_file = open("%s/plotgens.py"%run_dir,'w') plot_file.write('from neurotune.utils import plot_generation_evolution\nimport os\n') plot_file.write('\n') plot_file.write('parameters = %s\n'%a.parameters) plot_file.write('\n') plot_file.write("curr_dir = os.path.dirname(__file__) if len(os.path.dirname(__file__))>0 else '.'\n") plot_file.write("plot_generation_evolution(parameters, individuals_file_name = '%s/ga_individuals.csv'%curr_dir)\n") plot_file.close() if not a.nogui: added =[] #print("Plotting saved data from %s which are relevant for targets: %s"%(best_candidate_v.keys(), a.target_data.keys())) fig = plt.figure() fig.canvas.set_window_title("Simulation of fittest individual from run: %s"%ref) for tref in best_candidate_v.keys(): ##################a.target_data.keys(): ref = tref.split(':')[0] if not ref in added: added.append(ref) #pynml.print_comment(" - Adding plot of: %s"%ref) plt.plot(best_candidate_t,best_candidate_v[ref], label="%s - %i evaluations"%(ref,a.max_evaluations)) plt.legend() #plt.ylim(-80.0,80.0) plt.xlim(0.0,a.sim_time) plt.title("Models %s"%a.prefix) plt.xlabel("Time (ms)") plt.ylabel("Membrane potential(mV)") utils.plot_generation_evolution(sim_var.keys(), individuals_file_name = '%s/ga_individuals.csv'%run_dir, target_values=a.known_target_values, show_plot_already = a.show_plot_already) if a.show_plot_already: plt.show() return reportj
def generate_lems_file_for_neuroml(sim_id, neuroml_file, target, duration, dt, lems_file_name, target_dir, gen_plots_for_all_v = True, gen_saves_for_all_v = True, copy_neuroml = True, seed=None): if seed: random.seed(seed) # To ensure same LEMS file (e.g. colours of plots) are generated every time for the same input file_name_full = '%s/%s'%(target_dir,lems_file_name) print_comment_v('Creating LEMS file at: %s for NeuroML 2 file: %s'%(file_name_full,neuroml_file)) ls = LEMSSimulation(sim_id, duration, dt, target) nml_doc = read_neuroml2_file(neuroml_file) quantities_saved = [] if not copy_neuroml: rel_nml_file = os.path.relpath(os.path.abspath(neuroml_file), os.path.abspath(target_dir)) print_comment_v("Including existing NeuroML file (%s) as: %s"%(neuroml_file, rel_nml_file)) ls.include_neuroml2_file(rel_nml_file, include_included=True, relative_to_dir=os.path.abspath(target_dir)) else: if os.path.abspath(os.path.dirname(neuroml_file))!=os.path.abspath(target_dir): shutil.copy(neuroml_file, target_dir) neuroml_file_name = os.path.basename(neuroml_file) ls.include_neuroml2_file(neuroml_file_name, include_included=False) for include in nml_doc.includes: incl_curr = '%s/%s'%(os.path.dirname(neuroml_file),include.href) print_comment_v(' - Including %s located at %s'%(include.href, incl_curr)) shutil.copy(incl_curr, target_dir) ls.include_neuroml2_file(include.href, include_included=False) sub_doc = read_neuroml2_file(incl_curr) for include in sub_doc.includes: incl_curr = '%s/%s'%(os.path.dirname(neuroml_file),include.href) print_comment_v(' -- Including %s located at %s'%(include.href, incl_curr)) shutil.copy(incl_curr, target_dir) ls.include_neuroml2_file(include.href, include_included=False) if gen_plots_for_all_v or gen_saves_for_all_v: for network in nml_doc.networks: for population in network.populations: size = population.size component = population.component quantity_template = "%s[%i]/v" if population.type and population.type == 'populationList': quantity_template = "%s/%i/"+component+"/v" if gen_plots_for_all_v: print_comment('Generating %i plots for %s in population %s'%(size, component, population.id)) disp0 = 'DispPop__%s'%population.id ls.create_display(disp0, "Voltages of %s"%disp0, "-90", "50") for i in range(size): quantity = quantity_template%(population.id, i) ls.add_line_to_display(disp0, "v %s"%safe_variable(quantity), quantity, "1mV", get_next_hex_color()) if gen_saves_for_all_v: print_comment('Saving %i values of v for %s in population %s'%(size, component, population.id)) of0 = 'Volts_file__%s'%population.id ls.create_output_file(of0, "%s.%s.v.dat"%(sim_id,population.id)) for i in range(size): quantity = quantity_template%(population.id, i) ls.add_column_to_output_file(of0, 'v_%s'%safe_variable(quantity), quantity) quantities_saved.append(quantity) ls.save_to_file(file_name=file_name_full) return quantities_saved
def generate_current_vs_frequency_curve(nml2_file, cell_id, start_amp_nA, end_amp_nA, step_nA, analysis_duration, analysis_delay, dt = 0.05, temperature = "32degC", spike_threshold_mV=0., plot_voltage_traces=False, plot_if=True, plot_iv=False, xlim_if = None, ylim_if = None, xlim_iv = None, ylim_iv = None, show_plot_already=True, save_if_figure_to=None, save_iv_figure_to=None, simulator="jNeuroML", include_included=True): from pyelectro.analysis import max_min from pyelectro.analysis import mean_spike_frequency import numpy as np print_comment_v("Generating FI curve for cell %s in %s using %s (%snA->%snA; %snA steps)"% (cell_id, nml2_file, simulator, start_amp_nA, end_amp_nA, step_nA)) sim_id = 'iv_%s'%cell_id duration = analysis_duration+analysis_delay ls = LEMSSimulation(sim_id, duration, dt) ls.include_neuroml2_file(nml2_file, include_included=include_included) stims = [] amp = start_amp_nA while amp<=end_amp_nA : stims.append(amp) amp+=step_nA number_cells = len(stims) pop = nml.Population(id="population_of_%s"%cell_id, component=cell_id, size=number_cells) # create network and add populations net_id = "network_of_%s"%cell_id net = nml.Network(id=net_id, type="networkWithTemperature", temperature=temperature) ls.assign_simulation_target(net_id) net_doc = nml.NeuroMLDocument(id=net.id) net_doc.networks.append(net) net_doc.includes.append(nml.IncludeType(nml2_file)) net.populations.append(pop) for i in range(number_cells): stim_amp = "%snA"%stims[i] input_id = ("input_%s"%stim_amp).replace('.','_').replace('-','min') pg = nml.PulseGenerator(id=input_id, delay="0ms", duration="%sms"%duration, amplitude=stim_amp) net_doc.pulse_generators.append(pg) # Add these to cells input_list = nml.InputList(id=input_id, component=pg.id, populations=pop.id) input = nml.Input(id='0', target="../%s[%i]"%(pop.id, i), destination="synapses") input_list.input.append(input) net.input_lists.append(input_list) net_file_name = '%s.net.nml'%sim_id pynml.write_neuroml2_file(net_doc, net_file_name) ls.include_neuroml2_file(net_file_name) disp0 = 'Voltage_display' ls.create_display(disp0,"Voltages", "-90", "50") of0 = 'Volts_file' ls.create_output_file(of0, "%s.v.dat"%sim_id) for i in range(number_cells): ref = "v_cell%i"%i quantity = "%s[%i]/v"%(pop.id, i) ls.add_line_to_display(disp0, ref, quantity, "1mV", pynml.get_next_hex_color()) ls.add_column_to_output_file(of0, ref, quantity) lems_file_name = ls.save_to_file() if simulator == "jNeuroML": results = pynml.run_lems_with_jneuroml(lems_file_name, nogui=True, load_saved_data=True, plot=plot_voltage_traces, show_plot_already=False) elif simulator == "jNeuroML_NEURON": results = pynml.run_lems_with_jneuroml_neuron(lems_file_name, nogui=True, load_saved_data=True, plot=plot_voltage_traces, show_plot_already=False) #print(results.keys()) if_results = {} iv_results = {} for i in range(number_cells): t = np.array(results['t'])*1000 v = np.array(results["%s[%i]/v"%(pop.id, i)])*1000 mm = max_min(v, t, delta=0, peak_threshold=spike_threshold_mV) spike_times = mm['maxima_times'] freq = 0 if len(spike_times) > 2: count = 0 for s in spike_times: if s >= analysis_delay and s < (analysis_duration+analysis_delay): count+=1 freq = 1000 * count/float(analysis_duration) mean_freq = mean_spike_frequency(spike_times) # print("--- %s nA, spike times: %s, mean_spike_frequency: %f, freq (%fms -> %fms): %f"%(stims[i],spike_times, mean_freq, analysis_delay, analysis_duration+analysis_delay, freq)) if_results[stims[i]] = freq if freq == 0: iv_results[stims[i]] = v[-1] if plot_if: stims = sorted(if_results.keys()) stims_pA = [ii*1000 for ii in stims] freqs = [if_results[s] for s in stims] pynml.generate_plot([stims_pA], [freqs], "Frequency versus injected current for: %s"%nml2_file, colors = ['k'], linestyles=['-'], markers=['o'], xaxis = 'Input current (pA)', yaxis = 'Firing frequency (Hz)', xlim = xlim_if, ylim = ylim_if, grid = True, show_plot_already=False, save_figure_to = save_if_figure_to) if plot_iv: stims = sorted(iv_results.keys()) stims_pA = [ii*1000 for ii in sorted(iv_results.keys())] vs = [iv_results[s] for s in stims] pynml.generate_plot([stims_pA], [vs], "Final membrane potential versus injected current for: %s"%nml2_file, colors = ['k'], linestyles=['-'], markers=['o'], xaxis = 'Input current (pA)', yaxis = 'Membrane potential (mV)', xlim = xlim_iv, ylim = ylim_iv, grid = True, show_plot_already=False, save_figure_to = save_iv_figure_to) if show_plot_already: from matplotlib import pyplot as plt plt.show() return if_results
def run(a=None, **kwargs): a = build_namespace(a, **kwargs) pynml.print_comment_v( 'Generating spiketime plot for %s; plotting: %s; save to: %s' % (a.spiketime_files, a.show_plots_already, a.save_spike_plot_to)) xs = [] ys = [] labels = [] markers = [] linestyles = [] offset_id = 0 max_time = 0 max_id = 0 unique_ids = [] times = OrderedDict() ids_in_file = OrderedDict() if a.format == 'sonata' or a.format == 's': for file_name in a.spiketime_files: ids_times = read_sonata_spikes_hdf5_file(file_name) x = [] y = [] max_id_here = 0 name = file_name.split('/')[-1] if name.endswith('_spikes.h5'): name = name[:-10] elif name.endswith('.h5'): name = name[:-3] times[name] = [] ids_in_file[name] = [] for id in ids_times: for t in ids_times[id]: id_shifted = offset_id + int(float(id)) max_id = max(max_id, id_shifted) if not id_shifted in ids_in_file[name]: ids_in_file[name].append(id_shifted) times[name].append(t) max_id_here = max(max_id_here, id_shifted) max_time = max(t, max_time) if not id_shifted in unique_ids: unique_ids.append(id_shifted) x.append(t) y.append(id_shifted) print("max_id_here in %s: %i" % (file_name, max_id_here)) labels.append("%s (%i)" % (name, max_id_here - offset_id)) offset_id = max_id_here + 1 xs.append(x) ys.append(y) markers.append('.') linestyles.append('') xlim = [max_time / -20.0, max_time * 1.05] ylim = [max_id_here / -20., max_id_here * 1.05] markersizes = [] for xx in xs: if len(unique_ids) > 50: markersizes.append(2) elif len(unique_ids) > 200: markersizes.append(1) else: markersizes.append(4) else: for file_name in a.spiketime_files: pynml.print_comment_v("Loading spike times from: %s" % file_name) spikes_file = open(file_name) x = [] y = [] max_id_here = 0 name = spikes_file.name if name.endswith('.spikes'): name = name[:-7] if name.endswith('.spike'): name = name[:-6] times[name] = [] ids_in_file[name] = [] for line in spikes_file: if not line.startswith('#'): if a.format == 'id_t': [id, t] = line.split() elif a.format == 't_id': [t, id] = line.split() id_shifted = offset_id + int(float(id)) max_id = max(max_id, id_shifted) t = float(t) if not id_shifted in ids_in_file[name]: ids_in_file[name].append(id_shifted) times[name].append(t) max_id_here = max(max_id_here, id_shifted) max_time = max(t, max_time) if not id_shifted in unique_ids: unique_ids.append(id_shifted) x.append(t) y.append(id_shifted) #print("max_id_here in %s: %i"%(file_name,max_id_here)) labels.append("%s (%i)" % (name, max_id_here - offset_id)) offset_id = max_id_here + 1 xs.append(x) ys.append(y) markers.append('.') linestyles.append('') xlim = [max_time / -20.0, max_time * 1.05] ylim = [max_id_here / -20., max_id_here * 1.05] markersizes = [] for xx in xs: if len(unique_ids) > 50: markersizes.append(2) elif len(unique_ids) > 200: markersizes.append(1) else: markersizes.append(4) pynml.generate_plot(xs, ys, "Spike times from: %s" % a.spiketime_files, labels=labels, linestyles=linestyles, markers=markers, xaxis="Time (s)", yaxis="Cell index", xlim=xlim, ylim=ylim, markersizes=markersizes, grid=False, show_plot_already=False, save_figure_to=a.save_spike_plot_to, legend_position='right') if a.rates: plt.figure() bins = a.rate_bins for name in times: tt = times[name] ids_here = len(ids_in_file[name]) plt.hist(tt, bins=bins, histtype='step', weights=[bins * max(tt) / (float(ids_here))] * len(tt), label=name + "_h") hist, bin_edges = np.histogram( tt, bins=bins, weights=[bins * max(tt) / (float(ids_here))] * len(tt)) ''' width = bin_edges[1]-bin_edges[0] mids = [i+width/2 for i in bin_edges[:-1]] plt.plot(mids, hist,label=name)''' plt.figure() for name in times: tt = times[name] ids_here = len(ids_in_file[name]) hist, bin_edges = np.histogram( tt, bins=bins, weights=[bins * max(tt) / (float(ids_here))] * len(tt)) width = bin_edges[1] - bin_edges[0] mids = [i + width / 2 for i in bin_edges[:-1]] boxes = [5, 10, 20, 50] boxes = [20, 50] boxes = [int(a.rate_window)] for b in boxes: box = np.ones(b) hist_c = np.convolve(hist / len(box), box) ys = hist_c xs = [i / (float(len(ys))) for i in range(len(ys))] plt.plot(xs, ys, label=name + '_%i_c' % b) #plt.legend() if a.show_plots_already: plt.show() else: plt.close()
def analyse_spiketime_vs_dt(nml2_file, target, duration, simulator, cell_v_path, dts, verbose=False, spike_threshold_mV = 0, show_plot_already=True, save_figure_to=None, num_of_last_spikes=None): from pyelectro.analysis import max_min import numpy as np all_results = {} dts=list(np.sort(dts)) for dt in dts: if verbose: print_comment_v(" == Generating simulation for dt = %s ms"%dt) ref = str("Sim_dt_%s"%dt).replace('.','_') lems_file_name = "LEMS_%s.xml"%ref generate_lems_file_for_neuroml(ref, nml2_file, target, duration, dt, lems_file_name, '.', gen_plots_for_all_v = True, gen_saves_for_all_v = True, copy_neuroml = False, seed=None) if simulator == 'jNeuroML': results = pynml.run_lems_with_jneuroml(lems_file_name, nogui=True, load_saved_data=True, plot=False, verbose=verbose) if simulator == 'jNeuroML_NEURON': results = pynml.run_lems_with_jneuroml_neuron(lems_file_name, nogui=True, load_saved_data=True, plot=False, verbose=verbose) print("Results reloaded: %s"%results.keys()) all_results[dt] = results xs = [] ys = [] labels = [] spxs = [] spys = [] linestyles = [] markers = [] colors=[] spike_times_final=[] array_of_num_of_spikes=[] for dt in dts: t = all_results[dt]['t'] v = all_results[dt][cell_v_path] xs.append(t) ys.append(v) labels.append(dt) mm = max_min(v, t, delta=0, peak_threshold=spike_threshold_mV) spike_times = mm['maxima_times'] spike_times_final.append(spike_times) array_of_num_of_spikes.append(len(spike_times)) max_num_of_spikes=max(array_of_num_of_spikes) min_dt_spikes=spike_times_final[0] bound_dts=[math.log(dts[0]),math.log(dts[-1])] if num_of_last_spikes == None: num_of_spikes=len(min_dt_spikes) else: if len(min_dt_spikes) >=num_of_last_spikes: num_of_spikes=num_of_last_spikes else: num_of_spikes=len(min_dt_spikes) spike_indices=[(-1)*ind for ind in range(1,num_of_spikes+1) ] if len(min_dt_spikes) > abs(spike_indices[-1]): earliest_spike_time=min_dt_spikes[spike_indices[-1]-1] else: earliest_spike_time=min_dt_spikes[spike_indices[-1]] for spike_ind in range(0,max_num_of_spikes): spike_time_values=[] dt_values=[] for dt in range(0,len(dts)): if spike_times_final[dt] !=[]: if len(spike_times_final[dt]) >= spike_ind+1: if spike_times_final[dt][spike_ind] >= earliest_spike_time: spike_time_values.append(spike_times_final[dt][spike_ind]) dt_values.append(math.log(dts[dt])) linestyles.append('') markers.append('o') colors.append('g') spxs.append(dt_values) spys.append(spike_time_values) for last_spike_index in spike_indices: vertical_line=[min_dt_spikes[last_spike_index],min_dt_spikes[last_spike_index] ] spxs.append(bound_dts) spys.append(vertical_line) linestyles.append('--') markers.append('') colors.append('k') pynml.generate_plot(spxs, spys, "Spike times vs dt", colors=colors, linestyles = linestyles, markers = markers, xaxis = 'ln ( dt (ms) )', yaxis = 'Spike times (s)', show_plot_already=show_plot_already, save_figure_to=save_figure_to) if verbose: pynml.generate_plot(xs, ys, "Membrane potentials in %s for %s"%(simulator,dts), labels = labels, show_plot_already=show_plot_already, save_figure_to=save_figure_to)
def generate_Vm_vs_time_plot(NML2_file, cell_id, # inj_amp_nA = 80, # delay_ms = 20, # inj_dur_ms = 0.5, sim_dur_ms = 1000, dt = 0.05, temperature = "35", spike_threshold_mV=0., plot_voltage_traces=False, show_plot_already=True, simulator="jNeuroML_NEURON", include_included=True): # simulation parameters nogui = '-nogui' in sys.argv # Used to supress GUI in tests for Travis-CI ref = "iMC1_cell_1_origin" print_comment_v("Generating Vm(mV) vs Time(ms) plot for cell %s in %s using %s"% # (Inj %snA / %sms dur after %sms delay)"% (cell_id, NML2_file, simulator))#, inj_amp_nA, inj_dur_ms, delay_ms)) sim_id = 'Vm_%s'%ref duration = sim_dur_ms ls = LEMSSimulation(sim_id, sim_dur_ms, dt) ls.include_neuroml2_file(NML2_file, include_included=include_included) ls.assign_simulation_target('network') nml_doc = nml.NeuroMLDocument(id=cell_id) nml_doc.includes.append(nml.IncludeType(href=NML2_file)) net = nml.Network(id="network", type='networkWithTemperature', temperature='%sdegC'%temperature) nml_doc.networks.append(net) #input_id = ("input_%s"%str(inj_amp_nA).replace('.','_')) #pg = nml.PulseGenerator(id=input_id, # delay="%sms"%delay_ms, # duration='%sms'%inj_dur_ms, # amplitude='%spA'%inj_amp_nA) #nml_doc.pulse_generators.append(pg) pop_id = 'single_cell' pop = nml.Population(id=pop_id, component='iMC1_cell_1_origin', size=1, type="populationList") inst = nml.Instance(id=0) pop.instances.append(inst) inst.location = nml.Location(x=0, y=0, z=0) net.populations.append(pop) # Add these to cells #input_list = nml.InputList(id='il_%s'%input_id, # component=pg.id, # populations=pop_id) #input = nml.Input(id='0', target='../hhpop/0/hhcell', # destination="synapses") #input_list.input.append(input) #net.input_lists.append(input_list) sim_file_name = '%s.sim.nml'%sim_id pynml.write_neuroml2_file(nml_doc, sim_file_name) ls.include_neuroml2_file(sim_file_name) disp0 = 'Voltage_display' ls.create_display(disp0,"Voltages", "-90", "50") ls.add_line_to_display(disp0, "V", "hhpop/0/hhcell/v", scale='1mV') of0 = 'Volts_file' ls.create_output_file(of0, "%s.v.dat"%sim_id) ls.add_column_to_output_file(of0, "V", "hhpop/0/hhcell/v") lems_file_name = ls.save_to_file() if simulator == "jNeuroML": results = pynml.run_lems_with_jneuroml(lems_file_name, nogui=True, load_saved_data=True, plot=plot_voltage_traces, show_plot_already=False) elif simulator == "jNeuroML_NEURON": results = pynml.run_lems_with_jneuroml_neuron(lems_file_name, nogui=True, load_saved_data=True, plot=plot_voltage_traces, show_plot_already=False) if show_plot_already: from matplotlib import pyplot as plt plt.show() #plt.plot("t","V") #plt.title("Vm(mV) vs Time(ms) plot for cell %s in %s using %s (Inj %snA / %sms dur after %sms delay)"% # (cell_id, nml2_file, simulator, inj_amp_nA, inj_dur_ms, delay_ms)) #plt.xlabel('Time (ms)') #plt.ylabel('Vmemb (mV)') #plt.legend(['Test'], loc='upper right') return of0
def generate_current_vs_frequency_curve(nml2_file, cell_id, start_amp_nA=-0.1, end_amp_nA=0.1, step_nA=0.01, custom_amps_nA=[], analysis_duration=1000, analysis_delay=0, pre_zero_pulse=0, post_zero_pulse=0, dt=0.05, temperature="32degC", spike_threshold_mV=0., plot_voltage_traces=False, plot_if=True, plot_iv=False, xlim_if=None, ylim_if=None, xlim_iv=None, ylim_iv=None, label_xaxis=True, label_yaxis=True, show_volts_label=True, grid=True, font_size=12, if_iv_color='k', linewidth=1, bottom_left_spines_only=False, show_plot_already=True, save_voltage_traces_to=None, save_if_figure_to=None, save_iv_figure_to=None, save_if_data_to=None, save_iv_data_to=None, simulator="jNeuroML", num_processors=1, include_included=True, title_above_plot=False, return_axes=False, verbose=False): print_comment( "Running generate_current_vs_frequency_curve() on %s (%s)" % (nml2_file, os.path.abspath(nml2_file)), verbose) from pyelectro.analysis import max_min from pyelectro.analysis import mean_spike_frequency import numpy as np traces_ax = None if_ax = None iv_ax = None sim_id = 'iv_%s' % cell_id total_duration = pre_zero_pulse + analysis_duration + analysis_delay + post_zero_pulse pulse_duration = analysis_duration + analysis_delay end_stim = pre_zero_pulse + analysis_duration + analysis_delay ls = LEMSSimulation(sim_id, total_duration, dt) ls.include_neuroml2_file(nml2_file, include_included=include_included) stims = [] if len(custom_amps_nA) > 0: stims = [float(a) for a in custom_amps_nA] stim_info = ['%snA' % float(a) for a in custom_amps_nA] else: amp = start_amp_nA while amp <= end_amp_nA: stims.append(amp) amp += step_nA stim_info = '(%snA->%snA; %s steps of %snA; %sms)' % ( start_amp_nA, end_amp_nA, len(stims), step_nA, total_duration) print_comment_v("Generating an IF curve for cell %s in %s using %s %s" % (cell_id, nml2_file, simulator, stim_info)) number_cells = len(stims) pop = nml.Population(id="population_of_%s" % cell_id, component=cell_id, size=number_cells) # create network and add populations net_id = "network_of_%s" % cell_id net = nml.Network(id=net_id, type="networkWithTemperature", temperature=temperature) ls.assign_simulation_target(net_id) net_doc = nml.NeuroMLDocument(id=net.id) net_doc.networks.append(net) net_doc.includes.append(nml.IncludeType(nml2_file)) net.populations.append(pop) for i in range(number_cells): stim_amp = "%snA" % stims[i] input_id = ("input_%s" % stim_amp).replace('.', '_').replace('-', 'min') pg = nml.PulseGenerator(id=input_id, delay="%sms" % pre_zero_pulse, duration="%sms" % pulse_duration, amplitude=stim_amp) net_doc.pulse_generators.append(pg) # Add these to cells input_list = nml.InputList(id=input_id, component=pg.id, populations=pop.id) input = nml.Input(id='0', target="../%s[%i]" % (pop.id, i), destination="synapses") input_list.input.append(input) net.input_lists.append(input_list) net_file_name = '%s.net.nml' % sim_id pynml.write_neuroml2_file(net_doc, net_file_name) ls.include_neuroml2_file(net_file_name) disp0 = 'Voltage_display' ls.create_display(disp0, "Voltages", "-90", "50") of0 = 'Volts_file' ls.create_output_file(of0, "%s.v.dat" % sim_id) for i in range(number_cells): ref = "v_cell%i" % i quantity = "%s[%i]/v" % (pop.id, i) ls.add_line_to_display(disp0, ref, quantity, "1mV", pynml.get_next_hex_color()) ls.add_column_to_output_file(of0, ref, quantity) lems_file_name = ls.save_to_file() print_comment( "Written LEMS file %s (%s)" % (lems_file_name, os.path.abspath(lems_file_name)), verbose) if simulator == "jNeuroML": results = pynml.run_lems_with_jneuroml(lems_file_name, nogui=True, load_saved_data=True, plot=False, show_plot_already=False, verbose=verbose) elif simulator == "jNeuroML_NEURON": results = pynml.run_lems_with_jneuroml_neuron(lems_file_name, nogui=True, load_saved_data=True, plot=False, show_plot_already=False, verbose=verbose) elif simulator == "jNeuroML_NetPyNE": results = pynml.run_lems_with_jneuroml_netpyne( lems_file_name, nogui=True, load_saved_data=True, plot=False, show_plot_already=False, num_processors=num_processors, verbose=verbose) else: raise Exception( "Sorry, cannot yet run current vs frequency analysis using simulator %s" % simulator) print_comment( "Completed run in simulator %s (results: %s)" % (simulator, results.keys()), verbose) #print(results.keys()) times_results = [] volts_results = [] volts_labels = [] if_results = {} iv_results = {} for i in range(number_cells): t = np.array(results['t']) * 1000 v = np.array(results["%s[%i]/v" % (pop.id, i)]) * 1000 if plot_voltage_traces: times_results.append(t) volts_results.append(v) volts_labels.append("%s nA" % stims[i]) mm = max_min(v, t, delta=0, peak_threshold=spike_threshold_mV) spike_times = mm['maxima_times'] freq = 0 if len(spike_times) > 2: count = 0 for s in spike_times: if s >= pre_zero_pulse + analysis_delay and s < ( pre_zero_pulse + analysis_duration + analysis_delay): count += 1 freq = 1000 * count / float(analysis_duration) mean_freq = mean_spike_frequency(spike_times) #print("--- %s nA, spike times: %s, mean_spike_frequency: %f, freq (%fms -> %fms): %f"%(stims[i],spike_times, mean_freq, analysis_delay, analysis_duration+analysis_delay, freq)) if_results[stims[i]] = freq if freq == 0: if post_zero_pulse == 0: iv_results[stims[i]] = v[-1] else: v_end = None for j in range(len(t)): if v_end == None and t[j] >= end_stim: v_end = v[j] iv_results[stims[i]] = v_end if plot_voltage_traces: traces_ax = pynml.generate_plot( times_results, volts_results, "Membrane potential traces for: %s" % nml2_file, xaxis='Time (ms)' if label_xaxis else ' ', yaxis='Membrane potential (mV)' if label_yaxis else '', xlim=[total_duration * -0.05, total_duration * 1.05], show_xticklabels=label_xaxis, font_size=font_size, bottom_left_spines_only=bottom_left_spines_only, grid=False, labels=volts_labels if show_volts_label else [], show_plot_already=False, save_figure_to=save_voltage_traces_to, title_above_plot=title_above_plot, verbose=verbose) if plot_if: stims = sorted(if_results.keys()) stims_pA = [ii * 1000 for ii in stims] freqs = [if_results[s] for s in stims] if_ax = pynml.generate_plot( [stims_pA], [freqs], "Firing frequency versus injected current for: %s" % nml2_file, colors=[if_iv_color], linestyles=['-'], markers=['o'], linewidths=[linewidth], xaxis='Input current (pA)' if label_xaxis else ' ', yaxis='Firing frequency (Hz)' if label_yaxis else '', xlim=xlim_if, ylim=ylim_if, show_xticklabels=label_xaxis, show_yticklabels=label_yaxis, font_size=font_size, bottom_left_spines_only=bottom_left_spines_only, grid=grid, show_plot_already=False, save_figure_to=save_if_figure_to, title_above_plot=title_above_plot, verbose=verbose) if save_if_data_to: with open(save_if_data_to, 'w') as if_file: for i in range(len(stims_pA)): if_file.write("%s\t%s\n" % (stims_pA[i], freqs[i])) if plot_iv: stims = sorted(iv_results.keys()) stims_pA = [ii * 1000 for ii in sorted(iv_results.keys())] vs = [iv_results[s] for s in stims] xs = [] ys = [] xs.append([]) ys.append([]) for si in range(len(stims)): stim = stims[si] if len(custom_amps_nA) == 0 and si > 1 and ( stims[si] - stims[si - 1]) > step_nA * 1.01: xs.append([]) ys.append([]) xs[-1].append(stim * 1000) ys[-1].append(iv_results[stim]) iv_ax = pynml.generate_plot( xs, ys, "V at %sms versus I below threshold for: %s" % (end_stim, nml2_file), colors=[if_iv_color for s in xs], linestyles=['-' for s in xs], markers=['o' for s in xs], xaxis='Input current (pA)' if label_xaxis else '', yaxis='Membrane potential (mV)' if label_yaxis else '', xlim=xlim_iv, ylim=ylim_iv, show_xticklabels=label_xaxis, show_yticklabels=label_yaxis, font_size=font_size, linewidths=[linewidth for s in xs], bottom_left_spines_only=bottom_left_spines_only, grid=grid, show_plot_already=False, save_figure_to=save_iv_figure_to, title_above_plot=title_above_plot, verbose=verbose) if save_iv_data_to: with open(save_iv_data_to, 'w') as iv_file: for i in range(len(stims_pA)): iv_file.write("%s\t%s\n" % (stims_pA[i], vs[i])) if show_plot_already: from matplotlib import pyplot as plt plt.show() if return_axes: return traces_ax, if_ax, iv_ax return if_results
def generate_channel_density_plots(nml2_file, text_densities=False, passives_erevs=False, target_directory=None): nml_doc = read_neuroml2_file(nml2_file, include_includes=True, verbose=False, optimized=True) cell_elements = [] cell_elements.extend(nml_doc.cells) cell_elements.extend(nml_doc.cell2_ca_poolses) svg_files = [] all_info = {} for cell in cell_elements: info = {} all_info[cell.id] = info print_comment_v("Extracting channel density info from %s" % cell.id) sb = '' ions = {} maxes = {} mins = {} row = 0 na_ions = [] k_ions = [] ca_ions = [] other_ions = [] if isinstance(cell, Cell2CaPools): cds = cell.biophysical_properties2_ca_pools.membrane_properties2_ca_pools.channel_densities + \ cell.biophysical_properties2_ca_pools.membrane_properties2_ca_pools.channel_density_nernsts elif isinstance(cell, Cell): cds = cell.biophysical_properties.membrane_properties.channel_densities + \ cell.biophysical_properties.membrane_properties.channel_density_nernsts epas = None ena = None ek = None eh = None eca = None for cd in cds: dens_si = get_value_in_si(cd.cond_density) print_comment_v( "cd: %s, ion_channel: %s, ion: %s, density: %s (SI: %s)" % (cd.id, cd.ion_channel, cd.ion, cd.cond_density, dens_si)) ions[cd.ion_channel] = cd.ion erev_V = get_value_in_si(cd.erev) if hasattr(cd, 'erev') else None erev = '%s mV' % format_float(erev_V * 1000) if hasattr( cd, 'erev') else None if cd.ion == 'na': if cd.ion_channel not in na_ions: na_ions.append(cd.ion_channel) ena = erev info['ena'] = erev_V elif cd.ion == 'k': if cd.ion_channel not in k_ions: k_ions.append(cd.ion_channel) ek = erev info['ek'] = erev_V elif cd.ion == 'ca': if cd.ion_channel not in ca_ions: ca_ions.append(cd.ion_channel) eca = erev info['eca'] = erev_V else: if cd.ion_channel not in other_ions: other_ions.append(cd.ion_channel) if cd.ion == 'non_specific': epas = erev info['epas'] = erev_V if cd.ion == 'h': eh = erev info['eh'] = erev_V if cd.ion_channel in maxes: if dens_si > maxes[cd.ion_channel]: maxes[cd.ion_channel] = dens_si else: maxes[cd.ion_channel] = dens_si if cd.ion_channel in mins: if dens_si < mins[cd.ion_channel]: mins[cd.ion_channel] = dens_si else: mins[cd.ion_channel] = dens_si for ion_channel in na_ions + k_ions + ca_ions + other_ions: col = get_ion_color(ions[ion_channel]) info[ion_channel] = { 'max': maxes[ion_channel], 'min': mins[ion_channel] } if maxes[ion_channel] > 0: sb += _get_rect(ion_channel, row, maxes[ion_channel], mins[ion_channel], col[0], col[1], col[2], text_densities) row += 1 if passives_erevs: if ena: sb += add_text(row, "E Na = %s " % ena) row += 1 if ek: sb += add_text(row, "E K = %s " % ek) row += 1 if eca: sb += add_text(row, "E Ca = %s" % eca) row += 1 if eh: sb += add_text(row, "E H = %s" % eh) row += 1 if epas: sb += add_text(row, "E pas = %s" % epas) row += 1 for sc in cell.biophysical_properties.membrane_properties.specific_capacitances: sb += add_text(row, "C (%s) = %s" % (sc.segment_groups, sc.value)) info['specific_capacitance_%s' % sc.segment_groups] = get_value_in_si(sc.value) row += 1 # sb+='<text x="%s" y="%s" fill="black" font-family="Arial">%s</text>\n'%(width/3., (height+spacing)*(row+1), text) sb = "<?xml version='1.0' encoding='UTF-8'?>\n<svg xmlns=\"http://www.w3.org/2000/svg\" width=\"" + str( width + text_densities * 200) + "\" height=\"" + str( (height + spacing) * row) + "\">\n" + sb + "</svg>\n" print(sb) svg_file = nml2_file + "_channeldens.svg" if target_directory: svg_file = target_directory + "/" + svg_file.split('/')[-1] svg_files.append(svg_file) sf = open(svg_file, 'w') sf.write(sb) sf.close() print_comment_v("Written to %s" % os.path.abspath(svg_file)) pp.pprint(all_info) return svg_files, all_info
def analyse_spiketime_vs_dt(nml2_file, target, duration, simulator, cell_v_path, dts, verbose=False, spike_threshold_mV=0, show_plot_already=True, save_figure_to=None, num_of_last_spikes=None): from pyelectro.analysis import max_min import numpy as np all_results = {} dts = list(np.sort(dts)) for dt in dts: if verbose: print_comment_v(" == Generating simulation for dt = %s ms" % dt) ref = str("Sim_dt_%s" % dt).replace('.', '_') lems_file_name = "LEMS_%s.xml" % ref generate_lems_file_for_neuroml(ref, nml2_file, target, duration, dt, lems_file_name, '.', gen_plots_for_all_v=True, gen_saves_for_all_v=True, copy_neuroml=False) if simulator == 'jNeuroML': results = pynml.run_lems_with_jneuroml(lems_file_name, nogui=True, load_saved_data=True, plot=False, verbose=verbose) if simulator == 'jNeuroML_NEURON': results = pynml.run_lems_with_jneuroml_neuron(lems_file_name, nogui=True, load_saved_data=True, plot=False, verbose=verbose) print("Results reloaded: %s" % results.keys()) all_results[dt] = results xs = [] ys = [] labels = [] spxs = [] spys = [] linestyles = [] markers = [] colors = [] spike_times_final = [] array_of_num_of_spikes = [] for dt in dts: t = all_results[dt]['t'] v = all_results[dt][cell_v_path] xs.append(t) ys.append(v) labels.append(dt) mm = max_min(v, t, delta=0, peak_threshold=spike_threshold_mV) spike_times = mm['maxima_times'] spike_times_final.append(spike_times) array_of_num_of_spikes.append(len(spike_times)) max_num_of_spikes = max(array_of_num_of_spikes) min_dt_spikes = spike_times_final[0] bound_dts = [math.log(dts[0]), math.log(dts[-1])] if num_of_last_spikes == None: num_of_spikes = len(min_dt_spikes) else: if len(min_dt_spikes) >= num_of_last_spikes: num_of_spikes = num_of_last_spikes else: num_of_spikes = len(min_dt_spikes) spike_indices = [(-1) * ind for ind in range(1, num_of_spikes + 1)] if len(min_dt_spikes) > abs(spike_indices[-1]): earliest_spike_time = min_dt_spikes[spike_indices[-1] - 1] else: earliest_spike_time = min_dt_spikes[spike_indices[-1]] for spike_ind in range(0, max_num_of_spikes): spike_time_values = [] dt_values = [] for dt in range(0, len(dts)): if spike_times_final[dt] != []: if len(spike_times_final[dt]) >= spike_ind + 1: if spike_times_final[dt][spike_ind] >= earliest_spike_time: spike_time_values.append( spike_times_final[dt][spike_ind]) dt_values.append(math.log(dts[dt])) linestyles.append('') markers.append('o') colors.append('g') spxs.append(dt_values) spys.append(spike_time_values) for last_spike_index in spike_indices: vertical_line = [ min_dt_spikes[last_spike_index], min_dt_spikes[last_spike_index] ] spxs.append(bound_dts) spys.append(vertical_line) linestyles.append('--') markers.append('') colors.append('k') pynml.generate_plot(spxs, spys, "Spike times vs dt", colors=colors, linestyles=linestyles, markers=markers, xaxis='ln ( dt (ms) )', yaxis='Spike times (s)', show_plot_already=show_plot_already, save_figure_to=save_figure_to) if verbose: pynml.generate_plot(xs, ys, "Membrane potentials in %s for %s" % (simulator, dts), labels=labels, show_plot_already=show_plot_already, save_figure_to=save_figure_to)
def convert_to_swc(nml_file_name, add_comments=False, target_dir=None): ''' Find all <cell> elements and create one SWC file for each ''' global line_count global line_index_vs_distals global line_index_vs_proximals # Reset line_count = 1 line_index_vs_distals = {} line_index_vs_proximals = {} if target_dir is None: base_dir = os.path.dirname(os.path.realpath(nml_file_name)) target_dir = base_dir nml_doc = pynml.read_neuroml2_file(nml_file_name, include_includes=True, verbose=False, optimized=True) lines = [] comment_lines = [] for cell in nml_doc.cells: swc_file_name = '%s/%s.swc' % (target_dir, cell.id) swc_file = open(swc_file_name, 'w') info = "Cell %s taken from NeuroML file %s converted to SWC" % ( cell.id, nml_file_name) print_comment_v(info) comment_lines.append(info) comment_lines.append('Using pyNeuroML v%s' % pynmlv) group = 'soma_group' lines_sg, seg_ids = _get_lines_for_seg_group(cell, group, 1) comment_lines.append( 'For group: %s, found %i NeuroML segments, resulting in %i SWC lines' % (group, len(seg_ids), len(lines_sg))) soma_seg_count = len(seg_ids) lines += lines_sg group = 'dendrite_group' lines_sg, seg_ids = _get_lines_for_seg_group(cell, group, 3) comment_lines.append( 'For group: %s, found %i NeuroML segments, resulting in %i SWC lines' % (group, len(seg_ids), len(lines_sg))) dend_seg_count = len(seg_ids) lines += lines_sg group = 'axon_group' lines_sg, seg_ids = _get_lines_for_seg_group(cell, group, 2) comment_lines.append( 'For group: %s, found %i NeuroML segments, resulting in %i SWC lines' % (group, len(seg_ids), len(lines_sg))) axon_seg_count = len(seg_ids) lines += lines_sg if not len(cell.morphology.segments ) == soma_seg_count + dend_seg_count + axon_seg_count: raise Exception( "The numbers of the segments in groups: soma_group+dendrite_group+axon_group (%i), is not the same as total number of segments (%s)! All bets are off!" % (soma_seg_count + dend_seg_count + axon_seg_count, len(cell.morphology.segments))) if add_comments: for l in comment_lines: swc_file.write('# %s\n' % l) for i in range(len(lines)): l = lines[i] swc_line = '%s' % (l) print(swc_line) swc_file.write('%s\n' % swc_line) swc_file.close() print("Written to %s" % swc_file_name)
def generate_lems_file_for_neuroml( sim_id, neuroml_file, target, duration, dt, lems_file_name, target_dir, nml_doc=None, # Use this if the nml doc has already been loaded (to avoid delay in reload) include_extra_files=[], gen_plots_for_all_v=True, plot_all_segments=False, gen_plots_for_quantities={}, # Dict with displays vs lists of quantity paths gen_plots_for_only_populations=[], # List of populations, all pops if=[] gen_saves_for_all_v=True, save_all_segments=False, gen_saves_for_only_populations=[], # List of populations, all pops if=[] gen_saves_for_quantities={}, # Dict with file names vs lists of quantity paths gen_spike_saves_for_all_somas=False, gen_spike_saves_for_only_populations=[], # List of populations, all pops if=[] gen_spike_saves_for_cells={}, # Dict with file names vs lists of quantity paths spike_time_format='ID_TIME', copy_neuroml=True, report_file_name=None, lems_file_generate_seed=None, verbose=False, simulation_seed=12345): my_random = random.Random() if lems_file_generate_seed: my_random.seed( lems_file_generate_seed ) # To ensure same LEMS file (e.g. colours of plots) are generated every time for the same input else: my_random.seed( 12345 ) # To ensure same LEMS file (e.g. colours of plots) are generated every time for the same input file_name_full = '%s/%s' % (target_dir, lems_file_name) print_comment_v( 'Creating LEMS file at: %s for NeuroML 2 file: %s (copy: %s)' % (file_name_full, neuroml_file, copy_neuroml)) ls = LEMSSimulation(sim_id, duration, dt, target, simulation_seed=simulation_seed) if nml_doc is None: nml_doc = read_neuroml2_file(neuroml_file, include_includes=True, verbose=verbose) nml_doc_inc_not_included = read_neuroml2_file(neuroml_file, include_includes=False, verbose=False) else: nml_doc_inc_not_included = nml_doc ls.set_report_file(report_file_name) quantities_saved = [] for f in include_extra_files: ls.include_neuroml2_file(f, include_included=False) if not copy_neuroml: rel_nml_file = os.path.relpath(os.path.abspath(neuroml_file), os.path.abspath(target_dir)) print_comment_v("Including existing NeuroML file (%s) as: %s" % (neuroml_file, rel_nml_file)) ls.include_neuroml2_file(rel_nml_file, include_included=True, relative_to_dir=os.path.abspath(target_dir)) else: print_comment_v( "Copying a NeuroML file (%s) to: %s (abs path: %s)" % (neuroml_file, target_dir, os.path.abspath(target_dir))) if not os.path.isdir(target_dir): raise Exception("Target directory %s does not exist!" % target_dir) if os.path.realpath( os.path.dirname(neuroml_file)) != os.path.realpath(target_dir): shutil.copy(neuroml_file, target_dir) else: print_comment_v("No need, same file...") neuroml_file_name = os.path.basename(neuroml_file) ls.include_neuroml2_file(neuroml_file_name, include_included=False) nml_dir = os.path.dirname(neuroml_file) if len( os.path.dirname(neuroml_file)) > 0 else '.' for include in nml_doc_inc_not_included.includes: if nml_dir == '.' and os.path.isfile(include.href): incl_curr = include.href else: incl_curr = '%s/%s' % (nml_dir, include.href) if os.path.isfile(include.href): incl_curr = include.href print_comment_v( ' - Including %s (located at %s; nml dir: %s), copying to %s' % (include.href, incl_curr, nml_dir, target_dir)) ''' if not os.path.isfile("%s/%s" % (target_dir, os.path.basename(incl_curr))) and \ not os.path.isfile("%s/%s" % (target_dir, incl_curr)) and \ not os.path.isfile(incl_curr): shutil.copy(incl_curr, target_dir) else: print_comment_v("No need to copy...")''' f1 = "%s/%s" % (target_dir, os.path.basename(incl_curr)) f2 = "%s/%s" % (target_dir, incl_curr) if os.path.isfile(f1): print_comment_v("No need to copy, file exists: %s..." % f1) elif os.path.isfile(f2): print_comment_v("No need to copy, file exists: %s..." % f2) else: shutil.copy(incl_curr, target_dir) ls.include_neuroml2_file(include.href, include_included=False) sub_doc = read_neuroml2_file(incl_curr) sub_dir = os.path.dirname(incl_curr) if len( os.path.dirname(incl_curr)) > 0 else '.' if sub_doc.__class__ == neuroml.nml.nml.NeuroMLDocument: for include in sub_doc.includes: incl_curr = '%s/%s' % (sub_dir, include.href) print_comment_v(' -- Including %s located at %s' % (include.href, incl_curr)) if not os.path.isfile("%s/%s" % (target_dir, os.path.basename(incl_curr))) and \ not os.path.isfile("%s/%s" % (target_dir, incl_curr)): shutil.copy(incl_curr, target_dir) ls.include_neuroml2_file(include.href, include_included=False) if gen_plots_for_all_v \ or gen_saves_for_all_v \ or len(gen_plots_for_only_populations) > 0 \ or len(gen_saves_for_only_populations) > 0 \ or gen_spike_saves_for_all_somas \ or len(gen_spike_saves_for_only_populations) > 0: for network in nml_doc.networks: for population in network.populations: variable = "v" quantity_template_e = "%s[%i]" component = population.component size = population.size cell = None segment_ids = [] for c in nml_doc.spike_generator_poissons: if c.id == component: variable = "tsince" for c in nml_doc.SpikeSourcePoisson: if c.id == component: variable = "tsince" quantity_template = "%s[%i]/" + variable if plot_all_segments or gen_spike_saves_for_all_somas: for c in nml_doc.cells: if c.id == component: cell = c for segment in cell.morphology.segments: segment_ids.append(segment.id) segment_ids.sort() if population.type and population.type == 'populationList': quantity_template = "%s/%i/" + component + "/" + variable quantity_template_e = "%s/%i/" + component + "" # Multicompartmental cell # Needs to be supported in NeuronWriter # if len(segment_ids)>1: # quantity_template_e = "%s/%i/"+component+"/0" size = len(population.instances) if gen_plots_for_all_v or population.id in gen_plots_for_only_populations: print_comment( 'Generating %i plots for %s in population %s' % (size, component, population.id)) disp0 = 'DispPop__%s' % population.id ls.create_display( disp0, "Membrane potentials of cells in %s" % population.id, "-90", "50") for i in range(size): if cell is not None and plot_all_segments: quantity_template_seg = "%s/%i/" + component + "/%i/v" for segment_id in segment_ids: quantity = quantity_template_seg % ( population.id, i, segment_id) ls.add_line_to_display( disp0, "%s[%i] seg %i: v" % (population.id, i, segment_id), quantity, "1mV", get_next_hex_color(my_random)) else: quantity = quantity_template % (population.id, i) ls.add_line_to_display( disp0, "%s[%i]: v" % (population.id, i), quantity, "1mV", get_next_hex_color(my_random)) if gen_saves_for_all_v or population.id in gen_saves_for_only_populations: print_comment( 'Saving %i values of %s for %s in population %s' % (size, variable, component, population.id)) of0 = 'Volts_file__%s' % population.id ls.create_output_file( of0, "%s.%s.%s.dat" % (sim_id, population.id, variable)) for i in range(size): if cell is not None and save_all_segments: quantity_template_seg = "%s/%i/" + component + "/%i/v" for segment_id in segment_ids: quantity = quantity_template_seg % ( population.id, i, segment_id) ls.add_column_to_output_file( of0, 'v_%s' % safe_variable(quantity), quantity) quantities_saved.append(quantity) else: quantity = quantity_template % (population.id, i) ls.add_column_to_output_file( of0, 'v_%s' % safe_variable(quantity), quantity) quantities_saved.append(quantity) if gen_spike_saves_for_all_somas or population.id in gen_spike_saves_for_only_populations: print_comment( 'Saving spikes in %i somas for %s in population %s' % (size, component, population.id)) eof0 = 'Spikes_file__%s' % population.id ls.create_event_output_file(eof0, "%s.%s.spikes" % (sim_id, population.id), format=spike_time_format) for i in range(size): quantity = quantity_template_e % (population.id, i) ls.add_selection_to_event_output_file( eof0, i, quantity, "spike") quantities_saved.append(quantity) for display in sorted(gen_plots_for_quantities.keys()): quantities = gen_plots_for_quantities[display] max_ = "1" min_ = "-1" scale = "1" # Check for v ... if quantities and len(quantities) > 0 and quantities[0].endswith('/v'): max_ = "40" min_ = "-80" scale = "1mV" ls.create_display(display, "Plots of %s" % display, min_, max_) for q in quantities: ls.add_line_to_display(display, safe_variable(q), q, scale, get_next_hex_color(my_random)) for file_name in sorted(gen_saves_for_quantities.keys()): quantities = gen_saves_for_quantities[file_name] of_id = safe_variable(file_name) ls.create_output_file(of_id, file_name) for q in quantities: ls.add_column_to_output_file(of_id, safe_variable(q), q) quantities_saved.append(q) for file_name in sorted(gen_spike_saves_for_cells.keys()): quantities = gen_spike_saves_for_cells[file_name] of_id = safe_variable(file_name) ls.create_event_output_file(of_id, file_name) pop_here = None for i, quantity in enumerate(quantities): pop, index = get_pop_index(quantity) if pop_here: if pop_here != pop: raise Exception('Problem with generating LEMS for saving spikes for file %s.\n' % file_name + \ 'Multiple cells from different populations in one file will cause issues with index/spike id.') pop_here = pop # print('===== Adding to %s (%s) event %i for %s, pop: %s, i: %s' % (file_name, of_id, i, quantity, pop, index)) ls.add_selection_to_event_output_file(of_id, index, quantity, "spike") quantities_saved.append(quantity) ls.save_to_file(file_name=file_name_full) return quantities_saved, ls
def generate_current_vs_frequency_curve(nml2_file, cell_id, start_amp_nA, end_amp_nA, step_nA, analysis_duration, analysis_delay, dt=0.05, temperature="32degC", spike_threshold_mV=0., plot_voltage_traces=False, plot_if=True, plot_iv=False, xlim_if=None, ylim_if=None, xlim_iv=None, ylim_iv=None, show_plot_already=True, save_if_figure_to=None, save_iv_figure_to=None, simulator="jNeuroML", include_included=True): from pyelectro.analysis import max_min from pyelectro.analysis import mean_spike_frequency import numpy as np print_comment_v( "Generating FI curve for cell %s in %s using %s (%snA->%snA; %snA steps)" % (cell_id, nml2_file, simulator, start_amp_nA, end_amp_nA, step_nA)) sim_id = 'iv_%s' % cell_id duration = analysis_duration + analysis_delay ls = LEMSSimulation(sim_id, duration, dt) ls.include_neuroml2_file(nml2_file, include_included=include_included) stims = [] amp = start_amp_nA while amp <= end_amp_nA: stims.append(amp) amp += step_nA number_cells = len(stims) pop = nml.Population(id="population_of_%s" % cell_id, component=cell_id, size=number_cells) # create network and add populations net_id = "network_of_%s" % cell_id net = nml.Network(id=net_id, type="networkWithTemperature", temperature=temperature) ls.assign_simulation_target(net_id) net_doc = nml.NeuroMLDocument(id=net.id) net_doc.networks.append(net) net_doc.includes.append(nml.IncludeType(nml2_file)) net.populations.append(pop) for i in range(number_cells): stim_amp = "%snA" % stims[i] input_id = ("input_%s" % stim_amp).replace('.', '_').replace('-', 'min') pg = nml.PulseGenerator(id=input_id, delay="0ms", duration="%sms" % duration, amplitude=stim_amp) net_doc.pulse_generators.append(pg) # Add these to cells input_list = nml.InputList(id=input_id, component=pg.id, populations=pop.id) input = nml.Input(id='0', target="../%s[%i]" % (pop.id, i), destination="synapses") input_list.input.append(input) net.input_lists.append(input_list) net_file_name = '%s.net.nml' % sim_id pynml.write_neuroml2_file(net_doc, net_file_name) ls.include_neuroml2_file(net_file_name) disp0 = 'Voltage_display' ls.create_display(disp0, "Voltages", "-90", "50") of0 = 'Volts_file' ls.create_output_file(of0, "%s.v.dat" % sim_id) for i in range(number_cells): ref = "v_cell%i" % i quantity = "%s[%i]/v" % (pop.id, i) ls.add_line_to_display(disp0, ref, quantity, "1mV", pynml.get_next_hex_color()) ls.add_column_to_output_file(of0, ref, quantity) lems_file_name = ls.save_to_file() if simulator == "jNeuroML": results = pynml.run_lems_with_jneuroml(lems_file_name, nogui=True, load_saved_data=True, plot=plot_voltage_traces, show_plot_already=False) elif simulator == "jNeuroML_NEURON": results = pynml.run_lems_with_jneuroml_neuron(lems_file_name, nogui=True, load_saved_data=True, plot=plot_voltage_traces, show_plot_already=False) #print(results.keys()) if_results = {} iv_results = {} for i in range(number_cells): t = np.array(results['t']) * 1000 v = np.array(results["%s[%i]/v" % (pop.id, i)]) * 1000 mm = max_min(v, t, delta=0, peak_threshold=spike_threshold_mV) spike_times = mm['maxima_times'] freq = 0 if len(spike_times) > 2: count = 0 for s in spike_times: if s >= analysis_delay and s < (analysis_duration + analysis_delay): count += 1 freq = 1000 * count / float(analysis_duration) mean_freq = mean_spike_frequency(spike_times) # print("--- %s nA, spike times: %s, mean_spike_frequency: %f, freq (%fms -> %fms): %f"%(stims[i],spike_times, mean_freq, analysis_delay, analysis_duration+analysis_delay, freq)) if_results[stims[i]] = freq if freq == 0: iv_results[stims[i]] = v[-1] if plot_if: stims = sorted(if_results.keys()) stims_pA = [ii * 1000 for ii in stims] freqs = [if_results[s] for s in stims] pynml.generate_plot([stims_pA], [freqs], "Frequency versus injected current for: %s" % nml2_file, colors=['k'], linestyles=['-'], markers=['o'], xaxis='Input current (pA)', yaxis='Firing frequency (Hz)', xlim=xlim_if, ylim=ylim_if, grid=True, show_plot_already=False, save_figure_to=save_if_figure_to) if plot_iv: stims = sorted(iv_results.keys()) stims_pA = [ii * 1000 for ii in sorted(iv_results.keys())] vs = [iv_results[s] for s in stims] pynml.generate_plot( [stims_pA], [vs], "Final membrane potential versus injected current for: %s" % nml2_file, colors=['k'], linestyles=['-'], markers=['o'], xaxis='Input current (pA)', yaxis='Membrane potential (mV)', xlim=xlim_iv, ylim=ylim_iv, grid=True, show_plot_already=False, save_figure_to=save_iv_figure_to) if show_plot_already: from matplotlib import pyplot as plt plt.show() return if_results
def evaluate_HHExpRate(rate, midpoint, scale, v): ''' Helper for putting values into HHExpRate, see also https://www.neuroml.org/NeuroML2CoreTypes/Channels.html#HHExpRate ''' rate_si = get_value_in_si(rate) midpoint_si = get_value_in_si(midpoint) scale_si = get_value_in_si(scale) v_si = get_value_in_si(v) print_comment_v('Evaluating: rate * exp( (v - midpoint) / scale) ') print_comment_v(' %s * exp( (v - (%s)) / %s) for v = %s' % (rate, midpoint, scale, v)) print_comment_v(' %s * exp( (%s - (%s)) / %s) ' % (rate_si, v_si, midpoint_si, scale_si)) print_comment_v(' <... type="HHExpRate" rate="%s" midpoint="%s" scale="%s" />' % (rate, midpoint, scale)) r = rate_si * exp((v_si - midpoint_si) / scale_si) print_comment_v(' = %s per_s' % r) print_comment_v(' = %s per_ms' % (r / 1000.))