def SingleCellSim(cell_ref,nc_simConfig,sim_duration):
    

    net_doc = pynml.read_neuroml2_file("%s.nml"%nc_simConfig)
    net_doc.id=nc_simConfig

    net=net_doc.networks[0]
    net.id=nc_simConfig


    net_doc.includes.append(IncludeType("../generatedNeuroML2/%s.cell.nml"%cell_ref))

    net_file = '%s.net.nml'%(net_doc.id)
    writers.NeuroMLWriter.write(net_doc, net_file)

    print("Written network with 1 cell in network to: %s"%(net_file))

    from neuroml.utils import validate_neuroml2

    validate_neuroml2(net_file)

    generate_lems_file_for_neuroml("Sim_"+net_doc.id, 
                               net_file, 
                               net_doc.id, 
                               sim_duration,
                               0.025, 
                               "LEMS_%s.xml"%net_doc.id,
                               ".",
                               gen_plots_for_all_v = True,
                               plot_all_segments = False,
                               gen_saves_for_all_v = True,
                               save_all_segments = False,
                               copy_neuroml = False,
                               seed = 1234)
Пример #2
0
    def go(self):

        lems_file_name = 'LEMS_%s.xml' % (self.reference)

        generate_lems_file_for_neuroml(self.reference,
                                       self.neuroml_file,
                                       self.target,
                                       self.sim_time,
                                       self.dt,
                                       lems_file_name=lems_file_name,
                                       target_dir=self.generate_dir,
                                       nml_doc=self.nml_doc)

        pynml.print_comment_v(
            "Running a simulation of %s ms with timestep %s ms: %s" %
            (self.sim_time, self.dt, lems_file_name))

        self.already_run = True

        start = time.time()
        if self.simulator == 'jNeuroML':
            results = pynml.run_lems_with_jneuroml(
                lems_file_name,
                nogui=True,
                load_saved_data=True,
                plot=False,
                exec_in_dir=self.generate_dir,
                verbose=False,
                cleanup=self.cleanup)
        elif self.simulator == 'jNeuroML_NEURON':
            results = pynml.run_lems_with_jneuroml_neuron(
                lems_file_name,
                nogui=True,
                load_saved_data=True,
                plot=False,
                exec_in_dir=self.generate_dir,
                verbose=False,
                cleanup=self.cleanup)
        else:
            pynml.print_comment_v('Unsupported simulator: %s' % self.simulator)
            exit()

        secs = time.time() - start

        pynml.print_comment_v(
            "Ran simulation in %s in %f seconds (%f mins)\n\n" %
            (self.simulator, secs, secs / 60.0))

        self.t = [t * 1000 for t in results['t']]

        self.volts = {}

        for key in results.keys():
            if key != 't':
                self.volts[key] = [v * 1000 for v in results[key]]
Пример #3
0
 def go(self):
     
     
     lems_file_name = 'LEMS_%s.xml'%(self.reference)
     
     generate_lems_file_for_neuroml(self.reference, 
                                    self.neuroml_file, 
                                    self.target, 
                                    self.sim_time, 
                                    self.dt, 
                                    lems_file_name = lems_file_name,
                                    target_dir = self.generate_dir)
     
     pynml.print_comment_v("Running a simulation of %s ms with timestep %s ms: %s"%(self.sim_time, self.dt, lems_file_name))
     
     self.already_run = True
     
     start = time.time()
     if self.simulator == 'jNeuroML':
         results = pynml.run_lems_with_jneuroml(lems_file_name, 
                                                nogui=True, 
                                                load_saved_data=True, 
                                                plot=False, 
                                                exec_in_dir = self.generate_dir,
                                                verbose=False)
     elif self.simulator == 'jNeuroML_NEURON':
         results = pynml.run_lems_with_jneuroml_neuron(lems_file_name, 
                                                       nogui=True, 
                                                       load_saved_data=True, 
                                                       plot=False, 
                                                       exec_in_dir = self.generate_dir,
                                                       verbose=False)
     else:
         pynml.print_comment_v('Unsupported simulator: %s'%self.simulator)
         exit()
         
     secs = time.time()-start
 
     pynml.print_comment_v("Ran simulation in %s in %f seconds (%f mins)\n\n"%(self.simulator, secs, secs/60.0))
     
     
     self.t = [t*1000 for t in results['t']]
     
     self.volts = {}
     
     for key in results.keys():
         if key != 't':
             self.volts[key] = [v*1000 for v in results[key]]
def SingleCellSim(simConfig,dt,targetPath):
    
    src_files = os.listdir(targetPath)
    for file_name in src_files:
    
        if file_name=="Thalamocortical.net.nml":
           full_target_path=os.path.join(targetPath,file_name)
           net_doc = pynml.read_neuroml2_file(full_target_path)
           net_doc.id=simConfig

           net=net_doc.networks[0]
           net.id=simConfig


           net_file = '%s.net.nml'%(net_doc.id)
           writers.NeuroMLWriter.write(net_doc, full_target_path)

           print("Written network with 1 cell in network to: %s"%(full_target_path))
           
        if file_name=="LEMS_Thalamocortical.xml":
           full_lems_path=os.path.join(targetPath,file_name)
           sim_duration=get_sim_duration(full_lems_path)
           

    validate_neuroml2(full_target_path)

    generate_lems_file_for_neuroml("Sim_"+net_doc.id, 
                               full_target_path, 
                               net_doc.id, 
                               sim_duration,
                               dt, 
                               "LEMS_%s.xml"%net_doc.id,
                               targetPath,
                               gen_plots_for_all_v = True,
                               plot_all_segments = False,
                               gen_saves_for_all_v = True,
                               save_all_segments = False,
                               copy_neuroml = False,
                               seed = 1234)
Пример #5
0
def SingleCellSim(simConfig, dt, targetPath):

    src_files = os.listdir(targetPath)
    for file_name in src_files:

        if file_name == "Thalamocortical.net.nml":
            full_target_path = os.path.join(targetPath, file_name)
            net_doc = pynml.read_neuroml2_file(full_target_path)
            net_doc.id = simConfig

            net = net_doc.networks[0]
            net.id = simConfig

            net_file = '%s.net.nml' % (net_doc.id)
            writers.NeuroMLWriter.write(net_doc, full_target_path)

            print("Written network with 1 cell in network to: %s" %
                  (full_target_path))

        if file_name == "LEMS_Thalamocortical.xml":
            full_lems_path = os.path.join(targetPath, file_name)
            sim_duration = get_sim_duration(full_lems_path)

    validate_neuroml2(full_target_path)

    generate_lems_file_for_neuroml("Sim_" + net_doc.id,
                                   full_target_path,
                                   net_doc.id,
                                   sim_duration,
                                   dt,
                                   "LEMS_%s.xml" % net_doc.id,
                                   targetPath,
                                   gen_plots_for_all_v=True,
                                   plot_all_segments=False,
                                   gen_saves_for_all_v=True,
                                   save_all_segments=False,
                                   copy_neuroml=False,
                                   seed=1234)
def analyse_cell(dataset_id, type, info, nogui = False, densities=False, analysis_dir='../../data/'):
    
    reference = '%s_%s'%(type,dataset_id)
    cell_file = '%s/%s.cell.nml'%(type,reference)
    
    print("====================================\n\n   Analysing cell: %s, dataset %s\n"%(cell_file,dataset_id))
    
    nml_doc = pynml.read_neuroml2_file(cell_file)
    notes = nml_doc.cells[0].notes if len(nml_doc.cells)>0 else nml_doc.izhikevich2007_cells[0].notes
    meta_nml = eval(notes[notes.index('{'):])
    summary = "Fitness: %s (max evals: %s, pop: %s)"%(meta_nml['fitness'],meta_nml['max_evaluations'],meta_nml['population_size'])
    print summary
    
    images = 'summary/%s_%s.png'
    if_iv_data_files = 'summary/%s_%s.dat'
    

    data, v_sub, curents_sub, freqs, curents_spike = get_if_iv_for_dataset('%s%s_analysis.json'%(analysis_dir,dataset_id))
    
    if densities:

        dataset = {}
        seed = meta_nml['seed']
        if isinstance(seed, tuple):
            seed = seed[0]
        layer = str(data['location'].split(',')[-1].strip().replace(' ',''))
        ref = '%s_%s_%s'%(dataset_id,layer,int(seed))

        dataset['id'] = dataset_id
        dataset['reference'] = ref
        metas = ['aibs_cre_line','aibs_dendrite_type','location']
        for m in metas:
            dataset[m] = str(data[m])

        metas2 = ['fitness','population_size','seed']
        for m in metas2:
            dataset[m] = meta_nml[m]
            
        # Assume images below already generated...
        if type=='HH':
            
            
            cell = nml_doc.cells[0]
            
            sgv_files, all_info = generate_channel_density_plots(cell_file, text_densities=True, passives_erevs=True)
            sgv_file =sgv_files[0]
            for c in all_info:
                if c == cell.id:
                    cc = 'tuned_cell_info'
                else:
                    cc = c
                dataset[cc] = all_info[c]
        
            info['datasets'][ref] = dataset
            
        elif type=='Izh':
            
            dataset['tuned_cell_info'] = {}
            izh_cell = nml_doc.izhikevich2007_cells[0]
                        
            for p in ['C','a','b','c','d','k','vpeak','vr','vt']:
            
                dataset['tuned_cell_info'][p] = get_value_in_si(getattr(izh_cell, p))
            
            '''
            sgv_files, all_info = generate_channel_density_plots(cell_file, text_densities=True, passives_erevs=True)
            sgv_file =sgv_files[0]
            for c in all_info:
                if c == cell.id:
                    cc = 'tuned_cell_info'
                else:
                    cc = c
                dataset[cc] = all_info[c]'''
        
        info['datasets'][ref] = dataset
        
    else:

        traces_ax, if_ax, iv_ax = generate_current_vs_frequency_curve(cell_file, 
                                            reference, 
                                            simulator = 'jNeuroML_NEURON',
                                            start_amp_nA =         -0.1, 
                                            end_amp_nA =           0.4, 
                                            step_nA =              0.01, 
                                            analysis_duration =    1000, 
                                            analysis_delay =       50,
                                            plot_voltage_traces =  False,
                                            plot_if =              not nogui,
                                            plot_iv =              not nogui, 
                                            xlim_if =              [-200, 400],
                                            ylim_if =              [-10, 120],
                                            xlim_iv =              [-200, 400],
                                            ylim_iv =              [-120, -40],
                                            save_if_figure_to =    images%(reference, 'if'), 
                                            save_iv_figure_to =    images%(reference, 'iv'),
                                            save_if_data_to =      if_iv_data_files%(reference, 'if'), 
                                            save_iv_data_to =      if_iv_data_files%(reference, 'iv'), 
                                            show_plot_already = False,
                                            return_axes = True)


        iv_ax.plot(curents_sub, v_sub,   color='#ff2222',marker='o', linestyle='',zorder=1)   
        if_ax.plot(curents_spike, freqs ,color='#ff2222',marker='o', linestyle='',zorder=1)

        iv_ax.get_figure().savefig(images%(reference, 'iv'),bbox_inches='tight')
        if_ax.get_figure().savefig(images%(reference, 'if'),bbox_inches='tight')
        
        
        offset = 100 # mV 
        
        ifv_x = []
        ifv_y = []
        markers = []
        lines = []
        colors = []
        
        cols = {'Izh':'r','HH':'g','AllenHH':'b'}
        
        for ii in ['if','iv']:
            for tt in ['Izh','HH','AllenHH']:
                rr = '%s_%s'%(tt,dataset_id)
                f = if_iv_data_files%(rr, ii)
                if os.path.isfile(f):
                    print("--- Opening: %s"%f)
                    data, indeces = reload_standard_dat_file(f)
                    
                    ifv_x.append(data['t'])
                    
                    if ii=='if':
                        ifv_y.append([ff-offset for ff in data[0]])
                    else:
                        ifv_y.append([vv for vv in data[0]])
                        
                    
                    markers.append('')
                    colors.append(cols[tt])
                    lines.append('-')
                    
        ifv_x.append(curents_sub)
        vvsub = [vv for vv in v_sub]
        
        ifv_y.append(vvsub)
        
        sub_color = '#888888'
        markers.append('D')
        colors.append('k')
        lines.append('')
        
        ifv_x.append(curents_spike)
        ifv_y.append([ff-offset for ff in freqs])
        
        markers.append('o')
        colors.append(sub_color)
        lines.append('')
        
        import matplotlib
        import matplotlib.pyplot as plt

        print ifv_x
        print ifv_y
        ylim = [-105, -20]
        font_size = 18
        ax1 = pynml.generate_plot(ifv_x,
                    ifv_y, 
                    summary, 
                    markers=markers,
                    colors=colors,
                    linestyles=lines,
                    show_plot_already=False,
                    xlim = [-100, 400],
                    font_size = font_size,
                    ylim = ylim,
                    title_above_plot=False)
                    
        plt.xlabel('Input current (pA)', fontsize = font_size)
        plt.ylabel("Steady membrane potential (mV)", fontsize = font_size)
        
        ax2 = ax1.twinx()
        plt.ylim([ylim[0]+offset,ylim[1]+offset])
        plt.ylabel('Firing frequency (Hz)', color=sub_color, fontsize = font_size)
        ax2.tick_params(axis='y', colors=sub_color)
        
        
        #plt.axis('off')
        
        plt.savefig(images%(reference, 'if_iv'+"_FIG"),bbox_inches='tight')
        

        temp_dir = 'temp/'

        print("Copying %s to %s"%(cell_file, temp_dir))
        shutil.copy(cell_file, temp_dir)

        net_file = generate_network_for_sweeps(type, dataset_id, '%s.cell.nml'%(reference), reference, temp_dir, data_dir=analysis_dir)

        lems_file_name = 'LEMS_Test_%s_%s.xml'%(type,dataset_id)

        generate_lems_file_for_neuroml('Test_%s_%s'%(dataset_id,type),
                                       net_file,
                                       'network_%s_%s'%(dataset_id,type), 
                                       1500, 
                                       0.01, 
                                       lems_file_name,
                                       temp_dir,
                                       gen_plots_for_all_v=False,
                                       copy_neuroml = False)

        simulator = "jNeuroML_NEURON"

        if simulator == "jNeuroML":
            results = pynml.run_lems_with_jneuroml(temp_dir+lems_file_name, 
                                                    nogui=True, 
                                                    load_saved_data=True, 
                                                    plot=False,
                                                    show_plot_already=False)
        elif simulator == "jNeuroML_NEURON":
            results = pynml.run_lems_with_jneuroml_neuron(temp_dir+lems_file_name, 
                                                    nogui=True, 
                                                    load_saved_data=True, 
                                                    plot=False,
                                                    show_plot_already=False)

        x = []
        y = []

        print results.keys()

        tt = [t*1000 for t in results['t']]
        for i in range(len(results)-1):
            x.append(tt)
            y.append([v*1000 for v in results['Pop0/%i/%s_%s/v'%(i,type,dataset_id)]])

        pynml.generate_plot(x,
                    y, 
                    summary, 
                    show_plot_already=False,
                    ylim = [-120, 60],
                    save_figure_to = images%(reference, 'traces'),
                    title_above_plot=True)
                 
        ax = pynml.generate_plot(x,
                    y, 
                    summary, 
                    show_plot_already=False,
                    ylim = [-120, 60],
                    title_above_plot=False)
                    
        ax.set_xlabel(None)
        ax.set_ylabel(None)
        plt.axis('off')
        
        fig_file = images%(reference, 'traces'+"_FIG")
        plt.savefig(fig_file, bbox_inches='tight', pad_inches=0)
        from PIL import Image
        img = Image.open(fig_file)

        img2 = img.crop((60, 40, 660, 480))
        img2.save(fig_file)
Пример #7
0
def process_celldir(inputs):
    """Process cell directory"""

    count, cell_dir, nml2_cell_dir, total_count = inputs
    local_nml2_cell_dir = os.path.join("..", nml2_cell_dir)

    print(
        '\n\n************************************************************\n\n'
        'Parsing %s (cell %i/%i)\n' % (cell_dir, count, total_count))

    if os.path.isdir(cell_dir):
        old_cwd = os.getcwd()
        os.chdir(cell_dir)
    else:
        old_cwd = os.getcwd()
        os.chdir('../' + cell_dir)

    if make_zips:
        nml2_cell_dir = '%s/%s' % (zips_dir, cell_dir)
        if not os.path.isdir(nml2_cell_dir):
            os.mkdir(nml2_cell_dir)

    print("Generating into %s" % nml2_cell_dir)

    bbp_ref = None

    template_file = open('template.hoc', 'r')
    for line in template_file:
        if line.startswith('begintemplate '):
            bbp_ref = line.split(' ')[1].strip()
            print(
                ' > Assuming cell in directory %s is in a template named %s' %
                (cell_dir, bbp_ref))

    load_cell_file = 'loadcell.hoc'

    variables = {}

    variables['cell'] = bbp_ref
    variables['groups_info_file'] = groups_info_file

    template = """
///////////////////////////////////////////////////////////////////////////////
//
//   NOTE: This file is not part of the original BBP cell model distribution
//   It has been generated by ../ParseAll.py to facilitate loading of the cell
//   into NEURON for exporting the model morphology to NeuroML2
//
//////////////////////////////////////////////////////////////////////////////

load_file("stdrun.hoc")

objref cvode
cvode = new CVode()
cvode.active(1)

//======================== settings ===================================

v_init = -80

hyp_amp = -0.062866
step_amp = 0.3112968
tstop = 3000

//=================== creating cell object ===========================
load_file("import3d.hoc")
objref cell

// Using 1 to force loading of the file, in case file with same name was loaded
// before...
load_file(1, "constants.hoc")
load_file(1, "morphology.hoc")
load_file(1, "biophysics.hoc")
print "Loaded morphology and biophysics..."

load_file(1, "synapses/synapses.hoc")
load_file(1, "template.hoc")
print "Loaded template..."

load_file(1, "createsimulation.hoc")


create_cell(0)
print "Created new cell using loadcell.hoc: {{ cell }}"

define_shape()

wopen("{{ groups_info_file }}")

fprint("//Saving information on groups in this cell...\\n")

fprint("- somatic\\n")
forsec {{ cell }}[0].somatic {
    fprint("%s\\n",secname())
}

fprint("- basal\\n")
forsec {{ cell }}[0].basal {
    fprint("%s\\n",secname())
}

fprint("- axonal\\n")
forsec {{ cell }}[0].axonal {
    fprint("%s\\n",secname())
}
fprint("- apical\\n")
forsec {{ cell }}[0].apical {
    fprint("%s\\n",secname())
}
wopen()
        """

    t = Template(template)

    contents = t.render(variables)

    load_cell = open(load_cell_file, 'w')
    load_cell.write(contents)
    load_cell.close()

    print(' > Written %s' % load_cell_file)

    if os.path.isfile(load_cell_file):

        cell_info = parse_cell_info_file(cell_dir)

        nml_file_name = "%s.net.nml" % bbp_ref
        nml_net_loc = "%s/%s" % (local_nml2_cell_dir, nml_file_name)
        nml_cell_file = "%s_0_0.cell.nml" % bbp_ref
        nml_cell_loc = "%s/%s" % (local_nml2_cell_dir, nml_cell_file)

        print(' > Loading %s and exporting to %s' %
              (load_cell_file, nml_net_loc))

        export_to_neuroml2(load_cell_file,
                           nml_net_loc,
                           separateCellFiles=True,
                           includeBiophysicalProperties=False)

        print(' > Exported to: %s and %s using %s' %
              (nml_net_loc, nml_cell_loc, load_cell_file))

        nml_doc = pynml.read_neuroml2_file(nml_cell_loc)

        cell = nml_doc.cells[0]

        print(' > Adding groups from: %s' % groups_info_file)
        groups = {}
        current_group = None
        for line in open(groups_info_file):
            if not line.startswith('//'):
                if line.startswith('- '):
                    current_group = line[2:-1]
                    print(' > Adding group: [%s]' % current_group)
                    groups[current_group] = []
                else:
                    section = line.split('.')[1].strip()
                    segment_group = section.replace('[', '_').replace(']', '')
                    groups[current_group].append(segment_group)

        for g in groups.keys():
            new_seg_group = neuroml.SegmentGroup(id=g)
            cell.morphology.segment_groups.append(new_seg_group)
            for sg in groups[g]:
                new_seg_group.includes.append(neuroml.Include(sg))
            if g in ['basal', 'apical']:
                new_seg_group.inhomogeneous_parameters.append(
                    neuroml.InhomogeneousParameter(
                        id="PathLengthOver_" + g,
                        variable="p",
                        metric="Path Length from root",
                        proximal=neuroml.ProximalDetails(
                            translation_start="0")))

        ignore_chans = [
            'Ih', 'Ca_HVA', 'Ca_LVAst', 'Ca', "SKv3_1", "SK_E2",
            "CaDynamics_E2", "Nap_Et2", "Im", "K_Tst", "NaTa_t", "K_Pst",
            "NaTs2_t"
        ]

        # ignore_chans=['StochKv','StochKv_deterministic']
        ignore_chans = []

        bp, incl_chans = get_biophysical_properties(
            cell_info['e-type'],
            ignore_chans=ignore_chans,
            templates_json="../templates.json")

        cell.biophysical_properties = bp

        print("Set biophysical properties")

        notes = ''
        notes += \
            "\n\nExport of a cell model obtained from the BBP Neocortical" \
            "Microcircuit Collaboration Portal into NeuroML2" \
            "\n\n******************************************************\n*" \
            "  This export to NeuroML2 has not yet been fully validated!!" \
            "\n*  Use with caution!!\n***********************************" \
            "*******************\n\n"

        if len(ignore_chans) > 0:
            notes += "Ignored channels = %s\n\n" % ignore_chans

        notes += "For more information on this cell model see: " \
            "https://bbp.epfl.ch/nmc-portal/microcircuit#/metype/%s/" \
            "details\n\n" % cell_info['me-type']

        cell.notes = notes
        for channel in incl_chans:

            nml_doc.includes.append(neuroml.IncludeType(href="%s" % channel))

            if make_zips:
                print("Copying %s to zip folder" % channel)
                shutil.copyfile('../../NeuroML2/%s' % channel,
                                '%s/%s' % (local_nml2_cell_dir, channel))

        pynml.write_neuroml2_file(nml_doc, nml_cell_loc)

        stim_ref = 'stepcurrent3'
        stim_ref_hyp = '%s_hyp' % stim_ref
        stim_sim_duration = 3000
        stim_hyp_amp, stim_amp = get_stimulus_amplitudes(bbp_ref)
        stim_del = '700ms'
        stim_dur = '2000ms'

        new_net_loc = "%s/%s.%s.net.nml" % (local_nml2_cell_dir, bbp_ref,
                                            stim_ref)
        new_net_doc = pynml.read_neuroml2_file(nml_net_loc)

        new_net_doc.notes = notes

        stim_hyp = neuroml.PulseGenerator(id=stim_ref_hyp,
                                          delay="0ms",
                                          duration="%sms" % stim_sim_duration,
                                          amplitude=stim_hyp_amp)
        new_net_doc.pulse_generators.append(stim_hyp)
        stim = neuroml.PulseGenerator(id=stim_ref,
                                      delay=stim_del,
                                      duration=stim_dur,
                                      amplitude=stim_amp)
        new_net_doc.pulse_generators.append(stim)

        new_net = new_net_doc.networks[0]

        pop_id = new_net.populations[0].id
        pop_comp = new_net.populations[0].component
        input_list = neuroml.InputList(id="%s_input" % stim_ref_hyp,
                                       component=stim_ref_hyp,
                                       populations=pop_id)

        syn_input = neuroml.Input(id=0,
                                  target="../%s/0/%s" % (pop_id, pop_comp),
                                  destination="synapses")

        input_list.input.append(syn_input)
        new_net.input_lists.append(input_list)

        input_list = neuroml.InputList(id="%s_input" % stim_ref,
                                       component=stim_ref,
                                       populations=pop_id)

        syn_input = neuroml.Input(id=0,
                                  target="../%s/0/%s" % (pop_id, pop_comp),
                                  destination="synapses")

        input_list.input.append(syn_input)
        new_net.input_lists.append(input_list)

        pynml.write_neuroml2_file(new_net_doc, new_net_loc)

        generate_lems_file_for_neuroml(cell_dir,
                                       new_net_loc,
                                       "network",
                                       stim_sim_duration,
                                       0.025,
                                       "LEMS_%s.xml" % cell_dir,
                                       local_nml2_cell_dir,
                                       copy_neuroml=False,
                                       seed=1234)

        pynml.nml2_to_svg(nml_net_loc)

        clear_neuron()

        pop = neuroml.Population(id="Pop_%s" % bbp_ref,
                                 component=bbp_ref + '_0_0',
                                 type="populationList")

        inst = neuroml.Instance(id="0")
        pop.instances.append(inst)

        width = 6
        X = count % width
        Z = (count - X) / width
        inst.location = neuroml.Location(x=300 * X, y=0, z=300 * Z)

        count += 1

        if make_zips:
            zip_file = "%s/%s.zip" % (zips_dir, cell_dir)
            print("Creating zip file: %s" % zip_file)
            with zipfile.ZipFile(zip_file, 'w') as myzip:

                for next_file in os.listdir(local_nml2_cell_dir):
                    next_file = '%s/%s' % (local_nml2_cell_dir, next_file)
                    arcname = next_file[len(zips_dir):]
                    print("Adding : %s as %s" % (next_file, arcname))
                    myzip.write(next_file, arcname)

        os.chdir(old_cwd)

        return nml_cell_file, pop
def PerturbChanNML2(targetCell,noSteps,sim_duration,dt,mepFile,omtFile,targetNet,targetChannels="all",targetPath=None,save_to_file=None):
    ###### method for testing how spiking behaviour of single cell NML2 models is affected by conductance changes in given ion channels  
    cell_nml2 = '%s.cell.nml'%targetCell
    document_cell = loaders.NeuroMLLoader.load(targetPath+cell_nml2)
    cell_obj=document_cell.cells[0]
    
    gInfo={}
    
    
    spikesDict={}
    spikesDict['expected']=getSpikes(leftIdent="spike times",targetFile=mepFile)
    spikesDict['observed']={}
    
    letChannel=False
    
    
    for channel_density in cell_obj.biophysical_properties.membrane_properties.channel_densities:
    
        if targetChannels !="all":
           if channel_density.ion_channel in targetChannels:
              letChannel=True
        else:
           letChannel=True
          
        if letChannel:
           targetChan=channel_density.ion_channel
           gInfo[targetChan]={}
           chan_str=channel_density.cond_density.split(" ") 
           gInfo[targetChan]['units']=chan_str[1]
           initial_value=float(chan_str[0])
           gInfo[targetChan]['values']=np.linspace(initial_value,0,noSteps)
           spikesDict['observed'][targetChan]={}
           for gValue in gInfo[targetChan]['values']:
               if gValue==initial_value:
                  targetOmt=omtFile
               else:
                  document_cell_inner=loaders.NeuroMLLoader.load(targetPath+cell_nml2)
                  cell_obj_inner=document_cell_inner.cells[0]
                  for channel_density_inner in cell_obj_inner.biophysical_properties.membrane_properties.channel_densities:
                      if channel_density_inner.ion_channel==channel_density.ion_channel:
                         target_density=channel_density_inner
                         
                  testFile="../.test.ChannelTest.jnmlnrn.omt"
                  subprocess.call(["cp %s %s"%(omtFile,testFile)],shell=True)
                  targetOmt=testFile
                  cell_id="%sG%s"%(target_density.id,str(gValue).replace(".",""))
                  new_cell_nml2="%s.cell.nml"%cell_id
                  document_cell_inner.id=cell_id
                  cell_obj_inner.id=cell_id
                  target_density.cond_density=str(gValue)+" "+gInfo[targetChan]['units']
                  writers.NeuroMLWriter.write(document_cell_inner, targetPath+new_cell_nml2)
                  
                  src_files = os.listdir(targetPath)
                  if targetNet in src_files:
                     net_doc = pynml.read_neuroml2_file(targetPath+targetNet)
                     net_doc.id="Test_%s"%cell_id
                     net=net_doc.networks[0]
                     pop=net.populations[0]
                     popID=pop.id
                     net.id=net_doc.id
                     net_file = '%s.net.nml'%(net_doc.id)
                     netPath=targetPath+net_file
                     writers.NeuroMLWriter.write(net_doc, netPath)
                     with open(netPath, 'r') as file:
                          lines = file.readlines()
                     count=0
                     for line in lines:
                         if targetCell in line:
                            new_line=line.replace(targetCell,cell_id)
                            lines[count]=new_line
                         count+=1
                     with open(netPath, 'w') as file:
                          file.writelines( lines )
                     lems_string="LEMS_%s.xml"%net_doc.id
                     sim_string="Sim_"+net_doc.id
                     generate_lems_file_for_neuroml(sim_string, 
                               netPath, 
                               net_doc.id, 
                               sim_duration,
                               dt, 
                               lems_string,
                               targetPath,
                               gen_plots_for_all_v = True,
                               plot_all_segments = False,
                               gen_saves_for_all_v = True,
                               save_all_segments = False,
                               copy_neuroml = False,
                               seed = 1234)
                               
                     with open(targetOmt, 'r') as file:
                          lines = file.readlines()
                     count=0
                     for line in lines:
                         if "target" in line:
                            lines[count]=Replace(line,"LEMS","xml",lems_string)
                         if "path" in line:
                            lines[count]=Replace(line,"Sim_","dat",sim_string+".%s.v.dat"%popID)
                         count+=1
                     with open(targetOmt, 'w') as file:
                          file.writelines( lines )
                               
                   
   
               out_file=open(r'../temp_results.txt','w')   
               command_line="omv test  %s"%targetOmt
               print("Running %s..."%command_line+" after setting conDensity of %s to %f"%(targetChan,gValue))
               subprocess.call([command_line],shell=True,stdout=out_file)
               out_file.close()
               observed_spikes=getSpikes(leftIdent="observed data",targetFile=r'../temp_results.txt',rightIdent="and")
               print "Observed spikes:"+str(observed_spikes)
               print "Expected spikes:"+str(spikesDict['expected'])
               if len(observed_spikes) != len(spikesDict['expected']):
                  print "The number of observed spikes is not equal to the number of expected spikes; analysis for %s will be terminated"%targetChan
                  break
               else:
                  spikesDict['observed'][targetChan][str(gValue)]=observed_spikes
            
           letChannel=False   
    print("will generate plots for how differences between expected and observed spike times vary with conductance level of a given ion channel")     
    spike_df_vs_gmax(gInfo,spikesDict,save_to_file)
Пример #9
0
def generate_and_run(simulation,
                     simulator,
                     network=None,
                     return_results=False,
                     base_dir=None,
                     target_dir=None,
                     num_processors=1):
    """
    Generates the network in the specified simulator and runs, if appropriate
    """

    if network == None:
        network = load_network_json(simulation.network)

    print_v("Generating network %s and running in simulator: %s..." %
            (network.id, simulator))

    if simulator == 'NEURON':

        _generate_neuron_files_from_neuroml(network,
                                            dir_for_mod_files=target_dir)

        from neuromllite.NeuronHandler import NeuronHandler

        nrn_handler = NeuronHandler()

        for c in network.cells:
            if c.neuroml2_source_file:
                src_dir = os.path.dirname(
                    os.path.abspath(c.neuroml2_source_file))
                nrn_handler.executeHoc('load_file("%s/%s.hoc")' %
                                       (src_dir, c.id))

        generate_network(network, nrn_handler, generate_network, base_dir)
        if return_results:
            raise NotImplementedError(
                "Reloading results not supported in Neuron yet...")

    elif simulator.lower() == 'sonata':  # Will not "run" obviously...

        from neuromllite.SonataHandler import SonataHandler

        sonata_handler = SonataHandler()

        generate_network(network,
                         sonata_handler,
                         always_include_props=True,
                         base_dir=base_dir)

        print_v("Done with Sonata...")

    elif simulator.lower().startswith('graph'):  # Will not "run" obviously...

        from neuromllite.GraphVizHandler import GraphVizHandler, engines

        try:
            if simulator[-1].isalpha():
                engine = engines[simulator[-1]]
                level = int(simulator[5:-1])
            else:
                engine = 'dot'
                level = int(simulator[5:])

        except Exception as e:
            print_v("Error parsing: %s: %s" % (simulator, e))
            print_v(
                "Graphs of the network structure can be generated at many levels of detail (1-6, required) and laid out using GraphViz engines (d - dot (default); c - circo; n - neato; f - fdp), so use: -graph3c, -graph2, -graph4f etc."
            )
            return

        handler = GraphVizHandler(level, engine=engine, nl_network=network)

        generate_network(network,
                         handler,
                         always_include_props=True,
                         base_dir=base_dir)

        print_v("Done with GraphViz...")

    elif simulator.lower().startswith('matrix'):  # Will not "run" obviously...

        from neuromllite.MatrixHandler import MatrixHandler

        try:
            level = int(simulator[6:])
        except:
            print_v("Error parsing: %s" % simulator)
            print_v(
                "Matrices of the network structure can be generated at many levels of detail (1-n, required), so use: -matrix1, -matrix2, etc."
            )
            return

        handler = MatrixHandler(level, nl_network=network)

        generate_network(network,
                         handler,
                         always_include_props=True,
                         base_dir=base_dir)

        print_v("Done with MatrixHandler...")

    elif simulator.startswith('PyNN'):

        #_generate_neuron_files_from_neuroml(network)
        simulator_name = simulator.split('_')[1].lower()

        from neuromllite.PyNNHandler import PyNNHandler

        pynn_handler = PyNNHandler(simulator_name, simulation.dt, network.id)

        syn_cell_params = {}
        for proj in network.projections:

            synapse = network.get_child(proj.synapse, 'synapses')
            post_pop = network.get_child(proj.postsynaptic, 'populations')

            if not post_pop.component in syn_cell_params:
                syn_cell_params[post_pop.component] = {}
            for p in synapse.parameters:
                post = ''
                if synapse.pynn_receptor_type == "excitatory":
                    post = '_E'
                elif synapse.pynn_receptor_type == "inhibitory":
                    post = '_I'
                syn_cell_params[post_pop.component][
                    '%s%s' % (p, post)] = synapse.parameters[p]

        cells = {}
        for c in network.cells:
            if c.pynn_cell:
                cell_params = {}
                if c.parameters:
                    for p in c.parameters:
                        cell_params[p] = evaluate(c.parameters[p],
                                                  network.parameters)

                dont_set_here = [
                    'tau_syn_E', 'e_rev_E', 'tau_syn_I', 'e_rev_I'
                ]
                for d in dont_set_here:
                    if d in c.parameters:
                        raise Exception(
                            'Synaptic parameters like %s should be set ' +
                            'in individual synapses, not in the list of parameters associated with the cell'
                            % d)
                if c.id in syn_cell_params:
                    cell_params.update(syn_cell_params[c.id])
                print_v("Creating cell with params: %s" % cell_params)
                exec('cells["%s"] = pynn_handler.sim.%s(**cell_params)' %
                     (c.id, c.pynn_cell))

                if c.pynn_cell != 'SpikeSourcePoisson':
                    exec(
                        "cells['%s'].default_initial_values['v'] = cells['%s'].parameter_space['v_rest'].base_value"
                        % (c.id, c.id))

        pynn_handler.set_cells(cells)

        receptor_types = {}
        for s in network.synapses:
            if s.pynn_receptor_type:
                receptor_types[s.id] = s.pynn_receptor_type

        pynn_handler.set_receptor_types(receptor_types)

        for input_source in network.input_sources:
            if input_source.pynn_input:
                pynn_handler.add_input_source(input_source)

        generate_network(network,
                         pynn_handler,
                         always_include_props=True,
                         base_dir=base_dir)

        for pid in pynn_handler.populations:
            pop = pynn_handler.populations[pid]
            if 'all' in simulation.recordTraces or pop.label in simulation.recordTraces:
                if pop.can_record('v'):
                    pop.record('v')

        pynn_handler.sim.run(simulation.duration)
        pynn_handler.sim.end()

        traces = {}
        events = {}

        if not 'NeuroML' in simulator:
            from neo.io import PyNNTextIO

            for pid in pynn_handler.populations:
                pop = pynn_handler.populations[pid]

                if 'all' in simulation.recordTraces or pop.label in simulation.recordTraces:

                    filename = "%s.%s.v.dat" % (simulation.id, pop.label)
                    all_columns = []
                    print_v("Writing data for %s to %s" %
                            (pop.label, filename))
                    for i in range(len(pop)):
                        if pop.can_record('v'):
                            ref = '%s[%i]' % (pop.label, i)
                            traces[ref] = []
                            data = pop.get_data('v', gather=False)
                            for segment in data.segments:
                                vm = segment.analogsignals[0].transpose()[i]

                                if len(all_columns) == 0:
                                    tt = np.array([
                                        t * simulation.dt / 1000.
                                        for t in range(len(vm))
                                    ])
                                    all_columns.append(tt)
                                vm_si = [float(v / 1000.) for v in vm]
                                traces[ref] = vm_si
                                all_columns.append(vm_si)

                            times_vm = np.array(all_columns).transpose()

                    np.savetxt(filename, times_vm, delimiter='\t', fmt='%s')

        if return_results:
            _print_result_info(traces, events)
            return traces, events

    elif simulator == 'NetPyNE':

        if target_dir == None:
            target_dir = './'

        _generate_neuron_files_from_neuroml(network,
                                            dir_for_mod_files=target_dir)

        from netpyne import specs
        from netpyne import sim
        # Note NetPyNE from this branch is required: https://github.com/Neurosim-lab/netpyne/tree/neuroml_updates
        from netpyne.conversion.neuromlFormat import NetPyNEBuilder

        import pprint
        pp = pprint.PrettyPrinter(depth=6)

        netParams = specs.NetParams()
        simConfig = specs.SimConfig()
        netpyne_handler = NetPyNEBuilder(netParams,
                                         simConfig=simConfig,
                                         verbose=True)

        generate_network(network, netpyne_handler, base_dir=base_dir)

        netpyne_handler.finalise()

        simConfig = specs.SimConfig()
        simConfig.tstop = simulation.duration
        simConfig.duration = simulation.duration
        simConfig.dt = simulation.dt
        simConfig.seed = simulation.seed
        simConfig.recordStep = simulation.dt

        simConfig.recordCells = ['all']
        simConfig.recordTraces = {}

        for pop in netpyne_handler.popParams.values():
            if 'all' in simulation.recordTraces or pop.id in simulation.recordTraces:
                for i in pop['cellsList']:
                    id = pop['pop']
                    index = i['cellLabel']
                    simConfig.recordTraces['v_%s_%s' % (id, index)] = {
                        'sec': 'soma',
                        'loc': 0.5,
                        'var': 'v',
                        'conds': {
                            'pop': id,
                            'cellLabel': index
                        }
                    }

        simConfig.saveDat = True

        print_v("NetPyNE netParams: ")
        pp.pprint(netParams.todict())
        #print_v("NetPyNE simConfig: ")
        #pp.pprint(simConfig.todict())

        sim.initialize(
            netParams,
            simConfig)  # create network object and set cfg and net params

        sim.net.createPops()
        cells = sim.net.createCells(
        )  # instantiate network cells based on defined populations

        for proj_id in netpyne_handler.projection_infos.keys():
            projName, prePop, postPop, synapse, ptype = netpyne_handler.projection_infos[
                proj_id]
            print_v("Creating connections for %s (%s): %s->%s via %s" %
                    (projName, ptype, prePop, postPop, synapse))

            preComp = netpyne_handler.pop_ids_vs_components[prePop]

            for conn in netpyne_handler.connections[projName]:

                pre_id, pre_seg, pre_fract, post_id, post_seg, post_fract, delay, weight = conn

                #connParam = {'delay':delay,'weight':weight,'synsPerConn':1, 'sec':post_seg, 'loc':post_fract, 'threshold':threshold}
                connParam = {
                    'delay': delay,
                    'weight': weight,
                    'synsPerConn': 1,
                    'sec': post_seg,
                    'loc': post_fract
                }

                if ptype == 'electricalProjection':

                    if weight != 1:
                        raise Exception(
                            'Cannot yet support inputs where weight !=1!')
                    connParam = {
                        'synsPerConn': 1,
                        'sec': post_seg,
                        'loc': post_fract,
                        'gapJunction': True,
                        'weight': weight
                    }
                else:
                    connParam = {
                        'delay': delay,
                        'weight': weight,
                        'synsPerConn': 1,
                        'sec': post_seg,
                        'loc': post_fract
                    }
                    #'threshold': threshold}

                connParam['synMech'] = synapse

                if post_id in sim.net.gid2lid:  # check if postsyn is in this node's list of gids
                    sim.net._addCellConn(connParam, pre_id, post_id)

        stims = sim.net.addStims(
        )  # add external stimulation to cells (IClamps etc)
        simData = sim.setupRecording(
        )  # setup variables to record for each cell (spikes, V traces, etc)
        sim.runSim()  # run parallel Neuron simulation
        sim.gatherData()  # gather spiking data and cell info from each node
        sim.saveData(
        )  # save params, cell info and sim output to file (pickle,mat,txt,etc)

        if return_results:
            raise NotImplementedError(
                "Reloading results not supported in NetPyNE yet...")

    elif simulator == 'jNeuroML' or simulator == 'jNeuroML_NEURON' or simulator == 'jNeuroML_NetPyNE':

        from pyneuroml.lems import generate_lems_file_for_neuroml
        from pyneuroml import pynml

        lems_file_name = 'LEMS_%s.xml' % simulation.id

        nml_file_name, nml_doc = generate_neuroml2_from_network(
            network, base_dir=base_dir, target_dir=target_dir)
        included_files = ['PyNN.xml']

        for c in network.cells:
            if c.lems_source_file:
                included_files.append(c.lems_source_file)
        '''
        if network.cells:
            for c in network.cells:
                included_files.append(c.neuroml2_source_file)
        '''
        if network.synapses:
            for s in network.synapses:
                if s.lems_source_file:
                    included_files.append(s.lems_source_file)

        print_v("Generating LEMS file prior to running in %s" % simulator)

        pops_plot_save = []
        pops_spike_save = []
        gen_plots_for_quantities = {}
        gen_saves_for_quantities = {}

        for p in network.populations:

            if simulation.recordTraces and ('all' in simulation.recordTraces or
                                            p.id in simulation.recordTraces):
                pops_plot_save.append(p.id)

            if simulation.recordSpikes and ('all' in simulation.recordSpikes or
                                            p.id in simulation.recordSpikes):
                pops_spike_save.append(p.id)

            if simulation.recordRates and ('all' in simulation.recordRates
                                           or p.id in simulation.recordRates):
                size = evaluate(p.size, network.parameters)
                for i in range(size):
                    quantity = '%s/%i/%s/r' % (p.id, i, p.component)
                    gen_plots_for_quantities['%s_%i_r' %
                                             (p.id, i)] = [quantity]
                    gen_saves_for_quantities['%s_%i.r.dat' %
                                             (p.id, i)] = [quantity]

            if simulation.recordVariables:
                for var in simulation.recordVariables:
                    to_rec = simulation.recordVariables[var]
                    if ('all' in to_rec or p.id in to_rec):
                        size = evaluate(p.size, network.parameters)
                        for i in range(size):
                            quantity = '%s/%i/%s/%s' % (p.id, i, p.component,
                                                        var)
                            gen_plots_for_quantities['%s_%i_%s' %
                                                     (p.id, i, var)] = [
                                                         quantity
                                                     ]
                            gen_saves_for_quantities['%s_%i.%s.dat' %
                                                     (p.id, i, var)] = [
                                                         quantity
                                                     ]

        generate_lems_file_for_neuroml(
            simulation.id,
            nml_file_name,
            network.id,
            simulation.duration,
            simulation.dt,
            lems_file_name,
            target_dir=target_dir if target_dir else '.',
            nml_doc=
            nml_doc,  # Use this if the nml doc has already been loaded (to avoid delay in reload)
            include_extra_files=included_files,
            gen_plots_for_all_v=False,
            plot_all_segments=False,
            gen_plots_for_quantities=
            gen_plots_for_quantities,  # Dict with displays vs lists of quantity paths
            gen_plots_for_only_populations=
            pops_plot_save,  # List of populations, all pops if = []
            gen_saves_for_all_v=False,
            save_all_segments=False,
            gen_saves_for_only_populations=
            pops_plot_save,  # List of populations, all pops if = []
            gen_saves_for_quantities=
            gen_saves_for_quantities,  # Dict with file names vs lists of quantity paths
            gen_spike_saves_for_all_somas=False,
            gen_spike_saves_for_only_populations=
            pops_spike_save,  # List of populations, all pops if = []
            gen_spike_saves_for_cells=
            {},  # Dict with file names vs lists of quantity paths
            spike_time_format='ID_TIME',
            copy_neuroml=True,
            lems_file_generate_seed=12345,
            report_file_name='report.%s.txt' % simulation.id,
            simulation_seed=simulation.seed if simulation.seed else 12345,
            verbose=True)

        lems_file_name = _locate_file(lems_file_name, target_dir)

        if simulator == 'jNeuroML':
            results = pynml.run_lems_with_jneuroml(
                lems_file_name,
                nogui=True,
                load_saved_data=return_results,
                reload_events=return_results)
        elif simulator == 'jNeuroML_NEURON':
            results = pynml.run_lems_with_jneuroml_neuron(
                lems_file_name,
                nogui=True,
                load_saved_data=return_results,
                reload_events=return_results)
        elif simulator == 'jNeuroML_NetPyNE':
            results = pynml.run_lems_with_jneuroml_netpyne(
                lems_file_name,
                nogui=True,
                verbose=True,
                load_saved_data=return_results,
                reload_events=return_results,
                num_processors=num_processors)

        print_v("Finished running LEMS file %s in %s (returning results: %s)" %
                (lems_file_name, simulator, return_results))

        if return_results:
            traces, events = results
            _print_result_info(traces, events)
            return results  # traces, events =
Пример #10
0
def PerturbChanNML2(targetCell,
                    noSteps,
                    sim_duration,
                    dt,
                    mepFile,
                    omtFile,
                    targetNet,
                    targetChannels="all",
                    targetPath=None,
                    save_to_file=None):
    ###### method for testing how spiking behaviour of single cell NML2 models is affected by conductance changes in given ion channels
    cell_nml2 = '%s.cell.nml' % targetCell
    document_cell = loaders.NeuroMLLoader.load(targetPath + cell_nml2)
    cell_obj = document_cell.cells[0]

    gInfo = {}

    spikesDict = {}
    spikesDict['expected'] = getSpikes(leftIdent="spike times",
                                       targetFile=mepFile)
    spikesDict['observed'] = {}

    letChannel = False

    for channel_density in cell_obj.biophysical_properties.membrane_properties.channel_densities:

        if targetChannels != "all":
            if channel_density.ion_channel in targetChannels:
                letChannel = True
        else:
            letChannel = True

        if letChannel:
            targetChan = channel_density.ion_channel
            gInfo[targetChan] = {}
            chan_str = channel_density.cond_density.split(" ")
            gInfo[targetChan]['units'] = chan_str[1]
            initial_value = float(chan_str[0])
            gInfo[targetChan]['values'] = np.linspace(initial_value, 0,
                                                      noSteps)
            spikesDict['observed'][targetChan] = {}
            for gValue in gInfo[targetChan]['values']:
                if gValue == initial_value:
                    targetOmt = omtFile
                else:
                    document_cell_inner = loaders.NeuroMLLoader.load(
                        targetPath + cell_nml2)
                    cell_obj_inner = document_cell_inner.cells[0]
                    for channel_density_inner in cell_obj_inner.biophysical_properties.membrane_properties.channel_densities:
                        if channel_density_inner.ion_channel == channel_density.ion_channel:
                            target_density = channel_density_inner

                    testFile = "../.test.ChannelTest.jnmlnrn.omt"
                    subprocess.call(["cp %s %s" % (omtFile, testFile)],
                                    shell=True)
                    targetOmt = testFile
                    cell_id = "%sG%s" % (target_density.id,
                                         str(gValue).replace(".", ""))
                    new_cell_nml2 = "%s.cell.nml" % cell_id
                    document_cell_inner.id = cell_id
                    cell_obj_inner.id = cell_id
                    target_density.cond_density = str(
                        gValue) + " " + gInfo[targetChan]['units']
                    writers.NeuroMLWriter.write(document_cell_inner,
                                                targetPath + new_cell_nml2)

                    src_files = os.listdir(targetPath)
                    if targetNet in src_files:
                        net_doc = pynml.read_neuroml2_file(targetPath +
                                                           targetNet)
                        net_doc.id = "Test_%s" % cell_id
                        net = net_doc.networks[0]
                        pop = net.populations[0]
                        popID = pop.id
                        net.id = net_doc.id
                        net_file = '%s.net.nml' % (net_doc.id)
                        netPath = targetPath + net_file
                        writers.NeuroMLWriter.write(net_doc, netPath)
                        with open(netPath, 'r') as file:
                            lines = file.readlines()
                        count = 0
                        for line in lines:
                            if targetCell in line:
                                new_line = line.replace(targetCell, cell_id)
                                lines[count] = new_line
                            count += 1
                        with open(netPath, 'w') as file:
                            file.writelines(lines)
                        lems_string = "LEMS_%s.xml" % net_doc.id
                        sim_string = "Sim_" + net_doc.id
                        generate_lems_file_for_neuroml(
                            sim_string,
                            netPath,
                            net_doc.id,
                            sim_duration,
                            dt,
                            lems_string,
                            targetPath,
                            gen_plots_for_all_v=True,
                            plot_all_segments=False,
                            gen_saves_for_all_v=True,
                            save_all_segments=False,
                            copy_neuroml=False,
                            seed=1234)

                        with open(targetOmt, 'r') as file:
                            lines = file.readlines()
                        count = 0
                        for line in lines:
                            if "target" in line:
                                lines[count] = Replace(line, "LEMS", "xml",
                                                       lems_string)
                            if "path" in line:
                                lines[count] = Replace(
                                    line, "Sim_", "dat",
                                    sim_string + ".%s.v.dat" % popID)
                            count += 1
                        with open(targetOmt, 'w') as file:
                            file.writelines(lines)

                out_file = open(r'../temp_results.txt', 'w')
                command_line = "omv test  %s" % targetOmt
                print(
                    "Running %s..." % command_line +
                    " after setting conDensity of %s to %f" %
                    (targetChan, gValue))
                subprocess.call([command_line], shell=True, stdout=out_file)
                out_file.close()
                observed_spikes = getSpikes(leftIdent="observed data",
                                            targetFile=r'../temp_results.txt',
                                            rightIdent="and")
                print "Observed spikes:" + str(observed_spikes)
                print "Expected spikes:" + str(spikesDict['expected'])
                if len(observed_spikes) != len(spikesDict['expected']):
                    print "The number of observed spikes is not equal to the number of expected spikes; analysis for %s will be terminated" % targetChan
                    break
                else:
                    spikesDict['observed'][targetChan][str(
                        gValue)] = observed_spikes

            letChannel = False
    print(
        "will generate plots for how differences between expected and observed spike times vary with conductance level of a given ion channel"
    )
    spike_df_vs_gmax(gInfo, spikesDict, save_to_file)
Пример #11
0
                         populations=pop_id)

    input = neuroml.Input(id=0, 
                          target="../%s/0/%s"%(pop_id, pop_comp), 
                          destination="synapses")  

    input_list.input.append(input)
    new_net.input_lists.append(input_list)
    
    pynml.write_neuroml2_file(new_net_doc, new_net_loc)

    generate_lems_file_for_neuroml(model_id,
                                   new_net_loc,
                                   "network",
                                   pref_duration_ms,
                                   pref_dt_ms, # used in Allen Neuron runs
                                   "LEMS_%s.xml"%model_id,
                                   nml2_cell_dir,
                                   copy_neuroml = False,
                                   lems_file_generate_seed=1234)
    
    
    net_doc.includes.append(neuroml.IncludeType(nml_cell_file))

    pop = neuroml.Population(id="Pop_%s"%model_id, component=cell.id, type="populationList")

    net.populations.append(pop)

    inst = neuroml.Instance(id="0")
    pop.instances.append(inst)
        for pop in tvbr.population_ids:
            r = 'Vars_%s' % (pop)
            gen_plots_for_quantities[r] = []
            for sv in model.state_variables:
                q = '%s/0/%s/%s' % (pop, tvbr.component, sv)
                gen_plots_for_quantities[r].append(q)

        generate_lems_file_for_neuroml(
            lems_ref,
            nml_file_name,
            conn_id,
            1000,
            0.1,
            lems_file_name,
            target_dir='../NeuroML2',
            nml_doc=
            nml_doc,  # Use this if the nml doc has already been loaded (to avoid delay in reload)
            gen_plots_for_all_v=False,
            plot_all_segments=False,
            gen_plots_for_quantities=
            gen_plots_for_quantities,  # Dict with displays vs lists of quantity paths
            gen_saves_for_all_v=False,
            save_all_segments=False,
            gen_saves_for_quantities=
            gen_saves_for_quantities,  # Dict with file names vs lists of quantity paths
            copy_neuroml=True,
            lems_file_generate_seed=12345,
            report_file_name='report.%s.txt' % lems_ref,
            simulation_seed=6789,
            verbose=True)
Пример #13
0
# Write NML2 file

nml_file = '../generatedNeuroML2/%s.nml'%ref
writers.NeuroMLWriter.write(nml_doc, nml_file)


print("Written network file to: "+nml_file)

# Validate the NeuroML 

from neuroml.utils import validate_neuroml2

validate_neuroml2(nml_file)


# Generate the LEMS file to simulate network (NEURON only...)

generate_lems_file_for_neuroml('sim_%s'%ref, 
                                nml_file, 
                                net.id, 
                                1000, 
                                0.01, 
                                'LEMS_%s.xml'%ref,
                                '../generatedNeuroML2',
                                gen_plots_for_all_v = False,
                                gen_plots_for_only = [pyr_cells_pop0.id, pyr_cells_pop1.id, pyr_cells_pop2.id, pyr_cells_pop3.id],
                                gen_saves_for_all_v = False,
                                gen_saves_for_only = [pyr_cells_pop0.id, pyr_cells_pop1.id, pyr_cells_pop2.id, pyr_cells_pop3.id],
                                copy_neuroml = False,
                                seed=1234)
Пример #14
0
def process_celldir(inputs):
    """Process cell directory"""

    count, cell_dir, nml2_cell_dir, total_count = inputs
    local_nml2_cell_dir = os.path.join("..", nml2_cell_dir)

    print(
        "\n\n************************************************************\n\n"
        "Parsing %s (cell %i/%i)\n" % (cell_dir, count, total_count)
    )

    if os.path.isdir(cell_dir):
        old_cwd = os.getcwd()
        os.chdir(cell_dir)
    else:
        old_cwd = os.getcwd()
        os.chdir("../" + cell_dir)

    if make_zips:
        nml2_cell_dir = "%s/%s" % (zips_dir, cell_dir)
        if not os.path.isdir(nml2_cell_dir):
            os.mkdir(nml2_cell_dir)

    print("Generating into %s" % nml2_cell_dir)

    bbp_ref = None

    template_file = open("template.hoc", "r")
    for line in template_file:
        if line.startswith("begintemplate "):
            bbp_ref = line.split(" ")[1].strip()
            print(" > Assuming cell in directory %s is in a template named %s" % (cell_dir, bbp_ref))

    load_cell_file = "loadcell.hoc"

    variables = {}

    variables["cell"] = bbp_ref
    variables["groups_info_file"] = groups_info_file

    template = """
///////////////////////////////////////////////////////////////////////////////
//
//   NOTE: This file is not part of the original BBP cell model distribution
//   It has been generated by ../ParseAll.py to facilitate loading of the cell
//   into NEURON for exporting the model morphology to NeuroML2
//
//////////////////////////////////////////////////////////////////////////////

load_file("stdrun.hoc")

objref cvode
cvode = new CVode()
cvode.active(1)

//======================== settings ===================================

v_init = -80

hyp_amp = -0.062866
step_amp = 0.3112968
tstop = 3000

//=================== creating cell object ===========================
load_file("import3d.hoc")
objref cell

// Using 1 to force loading of the file, in case file with same name was loaded
// before...
load_file(1, "constants.hoc")
load_file(1, "morphology.hoc")
load_file(1, "biophysics.hoc")
print "Loaded morphology and biophysics..."

load_file(1, "synapses/synapses.hoc")
load_file(1, "template.hoc")
print "Loaded template..."

load_file(1, "createsimulation.hoc")


create_cell(0)
print "Created new cell using loadcell.hoc: {{ cell }}"

define_shape()

wopen("{{ groups_info_file }}")

fprint("//Saving information on groups in this cell...\\n")

fprint("- somatic\\n")
forsec {{ cell }}[0].somatic {
    fprint("%s\\n",secname())
}

fprint("- basal\\n")
forsec {{ cell }}[0].basal {
    fprint("%s\\n",secname())
}

fprint("- axonal\\n")
forsec {{ cell }}[0].axonal {
    fprint("%s\\n",secname())
}
fprint("- apical\\n")
forsec {{ cell }}[0].apical {
    fprint("%s\\n",secname())
}
wopen()
        """

    t = Template(template)

    contents = t.render(variables)

    load_cell = open(load_cell_file, "w")
    load_cell.write(contents)
    load_cell.close()

    print(" > Written %s" % load_cell_file)

    if os.path.isfile(load_cell_file):

        cell_info = parse_cell_info_file(cell_dir)

        nml_file_name = "%s.net.nml" % bbp_ref
        nml_net_loc = "%s/%s" % (local_nml2_cell_dir, nml_file_name)
        nml_cell_file = "%s_0_0.cell.nml" % bbp_ref
        nml_cell_loc = "%s/%s" % (local_nml2_cell_dir, nml_cell_file)

        print(" > Loading %s and exporting to %s" % (load_cell_file, nml_net_loc))

        export_to_neuroml2(load_cell_file, nml_net_loc, separateCellFiles=True, includeBiophysicalProperties=False)

        print(" > Exported to: %s and %s using %s" % (nml_net_loc, nml_cell_loc, load_cell_file))

        nml_doc = pynml.read_neuroml2_file(nml_cell_loc)

        cell = nml_doc.cells[0]

        print(" > Adding groups from: %s" % groups_info_file)
        groups = {}
        current_group = None
        for line in open(groups_info_file):
            if not line.startswith("//"):
                if line.startswith("- "):
                    current_group = line[2:-1]
                    print(" > Adding group: [%s]" % current_group)
                    groups[current_group] = []
                else:
                    section = line.split(".")[1].strip()
                    segment_group = section.replace("[", "_").replace("]", "")
                    groups[current_group].append(segment_group)

        for g in groups.keys():
            new_seg_group = neuroml.SegmentGroup(id=g)
            cell.morphology.segment_groups.append(new_seg_group)
            for sg in groups[g]:
                new_seg_group.includes.append(neuroml.Include(sg))
            if g in ["basal", "apical"]:
                new_seg_group.inhomogeneous_parameters.append(
                    neuroml.InhomogeneousParameter(
                        id="PathLengthOver_" + g,
                        variable="p",
                        metric="Path Length from root",
                        proximal=neuroml.ProximalDetails(translation_start="0"),
                    )
                )

        ignore_chans = [
            "Ih",
            "Ca_HVA",
            "Ca_LVAst",
            "Ca",
            "SKv3_1",
            "SK_E2",
            "CaDynamics_E2",
            "Nap_Et2",
            "Im",
            "K_Tst",
            "NaTa_t",
            "K_Pst",
            "NaTs2_t",
        ]

        # ignore_chans=['StochKv','StochKv_deterministic']
        ignore_chans = []

        bp, incl_chans = get_biophysical_properties(
            cell_info["e-type"], ignore_chans=ignore_chans, templates_json="../templates.json"
        )

        cell.biophysical_properties = bp

        print("Set biophysical properties")

        notes = ""
        notes += (
            "\n\nExport of a cell model obtained from the BBP Neocortical"
            "Microcircuit Collaboration Portal into NeuroML2"
            "\n\n******************************************************\n*"
            "  This export to NeuroML2 has not yet been fully validated!!"
            "\n*  Use with caution!!\n***********************************"
            "*******************\n\n"
        )

        if len(ignore_chans) > 0:
            notes += "Ignored channels = %s\n\n" % ignore_chans

        notes += (
            "For more information on this cell model see: "
            "https://bbp.epfl.ch/nmc-portal/microcircuit#/metype/%s/"
            "details\n\n" % cell_info["me-type"]
        )

        cell.notes = notes
        for channel in incl_chans:

            nml_doc.includes.append(neuroml.IncludeType(href="%s" % channel))

            if make_zips:
                print("Copying %s to zip folder" % channel)
                shutil.copyfile("../../NeuroML2/%s" % channel, "%s/%s" % (local_nml2_cell_dir, channel))

        pynml.write_neuroml2_file(nml_doc, nml_cell_loc)

        stim_ref = "stepcurrent3"
        stim_ref_hyp = "%s_hyp" % stim_ref
        stim_sim_duration = 3000
        stim_hyp_amp, stim_amp = get_stimulus_amplitudes(bbp_ref)
        stim_del = "700ms"
        stim_dur = "2000ms"

        new_net_loc = "%s/%s.%s.net.nml" % (local_nml2_cell_dir, bbp_ref, stim_ref)
        new_net_doc = pynml.read_neuroml2_file(nml_net_loc)

        new_net_doc.notes = notes

        stim_hyp = neuroml.PulseGenerator(
            id=stim_ref_hyp, delay="0ms", duration="%sms" % stim_sim_duration, amplitude=stim_hyp_amp
        )
        new_net_doc.pulse_generators.append(stim_hyp)
        stim = neuroml.PulseGenerator(id=stim_ref, delay=stim_del, duration=stim_dur, amplitude=stim_amp)
        new_net_doc.pulse_generators.append(stim)

        new_net = new_net_doc.networks[0]

        pop_id = new_net.populations[0].id
        pop_comp = new_net.populations[0].component
        input_list = neuroml.InputList(id="%s_input" % stim_ref_hyp, component=stim_ref_hyp, populations=pop_id)

        syn_input = neuroml.Input(id=0, target="../%s/0/%s" % (pop_id, pop_comp), destination="synapses")

        input_list.input.append(syn_input)
        new_net.input_lists.append(input_list)

        input_list = neuroml.InputList(id="%s_input" % stim_ref, component=stim_ref, populations=pop_id)

        syn_input = neuroml.Input(id=0, target="../%s/0/%s" % (pop_id, pop_comp), destination="synapses")

        input_list.input.append(syn_input)
        new_net.input_lists.append(input_list)

        pynml.write_neuroml2_file(new_net_doc, new_net_loc)

        generate_lems_file_for_neuroml(
            cell_dir,
            new_net_loc,
            "network",
            stim_sim_duration,
            0.025,
            "LEMS_%s.xml" % cell_dir,
            local_nml2_cell_dir,
            copy_neuroml=False,
            seed=1234,
        )

        pynml.nml2_to_svg(nml_net_loc)

        clear_neuron()

        pop = neuroml.Population(id="Pop_%s" % bbp_ref, component=bbp_ref + "_0_0", type="populationList")

        inst = neuroml.Instance(id="0")
        pop.instances.append(inst)

        width = 6
        X = count % width
        Z = (count - X) / width
        inst.location = neuroml.Location(x=300 * X, y=0, z=300 * Z)

        count += 1

        if make_zips:
            zip_file = "%s/%s.zip" % (zips_dir, cell_dir)
            print("Creating zip file: %s" % zip_file)
            with zipfile.ZipFile(zip_file, "w") as myzip:

                for next_file in os.listdir(local_nml2_cell_dir):
                    next_file = "%s/%s" % (local_nml2_cell_dir, next_file)
                    arcname = next_file[len(zips_dir) :]
                    print("Adding : %s as %s" % (next_file, arcname))
                    myzip.write(next_file, arcname)

        os.chdir(old_cwd)

        return nml_cell_file, pop
def analyse_cell(dataset_id,
                 type,
                 info,
                 nogui=False,
                 densities=False,
                 analysis_dir='../../data/'):

    reference = '%s_%s' % (type, dataset_id)
    cell_file = '%s/%s.cell.nml' % (type, reference)

    print(
        "====================================\n\n   Analysing cell: %s, dataset %s\n"
        % (cell_file, dataset_id))

    nml_doc = pynml.read_neuroml2_file(cell_file)
    notes = nml_doc.cells[0].notes if len(
        nml_doc.cells) > 0 else nml_doc.izhikevich2007_cells[0].notes
    meta_nml = eval(notes[notes.index('{'):])
    summary = "Fitness: %s (max evals: %s, pop: %s)" % (
        meta_nml['fitness'], meta_nml['max_evaluations'],
        meta_nml['population_size'])
    print summary

    images = 'summary/%s_%s.png'
    if_iv_data_files = 'summary/%s_%s.dat'

    data, v_sub, curents_sub, freqs, curents_spike = get_if_iv_for_dataset(
        '%s%s_analysis.json' % (analysis_dir, dataset_id))

    if densities:

        dataset = {}
        seed = meta_nml['seed']
        if isinstance(seed, tuple):
            seed = seed[0]
        layer = str(data['location'].split(',')[-1].strip().replace(' ', ''))
        ref = '%s_%s_%s' % (dataset_id, layer, int(seed))

        dataset['id'] = dataset_id
        dataset['reference'] = ref
        metas = ['aibs_cre_line', 'aibs_dendrite_type', 'location']
        for m in metas:
            dataset[m] = str(data[m])

        metas2 = ['fitness', 'population_size', 'seed']
        for m in metas2:
            dataset[m] = meta_nml[m]

        # Assume images below already generated...
        if type == 'HH':

            cell = nml_doc.cells[0]

            sgv_files, all_info = generate_channel_density_plots(
                cell_file, text_densities=True, passives_erevs=True)
            sgv_file = sgv_files[0]
            for c in all_info:
                if c == cell.id:
                    cc = 'tuned_cell_info'
                else:
                    cc = c
                dataset[cc] = all_info[c]

            info['datasets'][ref] = dataset

        elif type == 'Izh':

            dataset['tuned_cell_info'] = {}
            izh_cell = nml_doc.izhikevich2007_cells[0]

            for p in ['C', 'a', 'b', 'c', 'd', 'k', 'vpeak', 'vr', 'vt']:

                dataset['tuned_cell_info'][p] = get_value_in_si(
                    getattr(izh_cell, p))
            '''
            sgv_files, all_info = generate_channel_density_plots(cell_file, text_densities=True, passives_erevs=True)
            sgv_file =sgv_files[0]
            for c in all_info:
                if c == cell.id:
                    cc = 'tuned_cell_info'
                else:
                    cc = c
                dataset[cc] = all_info[c]'''

        info['datasets'][ref] = dataset

    else:

        traces_ax, if_ax, iv_ax = generate_current_vs_frequency_curve(
            cell_file,
            reference,
            simulator='jNeuroML_NEURON',
            start_amp_nA=-0.1,
            end_amp_nA=0.4,
            step_nA=0.01,
            analysis_duration=1000,
            analysis_delay=50,
            plot_voltage_traces=False,
            plot_if=not nogui,
            plot_iv=not nogui,
            xlim_if=[-200, 400],
            ylim_if=[-10, 120],
            xlim_iv=[-200, 400],
            ylim_iv=[-120, -40],
            save_if_figure_to=images % (reference, 'if'),
            save_iv_figure_to=images % (reference, 'iv'),
            save_if_data_to=if_iv_data_files % (reference, 'if'),
            save_iv_data_to=if_iv_data_files % (reference, 'iv'),
            show_plot_already=False,
            return_axes=True)

        iv_ax.plot(curents_sub,
                   v_sub,
                   color='#ff2222',
                   marker='o',
                   linestyle='',
                   zorder=1)
        if_ax.plot(curents_spike,
                   freqs,
                   color='#ff2222',
                   marker='o',
                   linestyle='',
                   zorder=1)

        iv_ax.get_figure().savefig(images % (reference, 'iv'),
                                   bbox_inches='tight')
        if_ax.get_figure().savefig(images % (reference, 'if'),
                                   bbox_inches='tight')

        offset = 100  # mV

        ifv_x = []
        ifv_y = []
        markers = []
        lines = []
        colors = []

        cols = {'Izh': 'r', 'HH': 'g', 'AllenHH': 'b'}

        for ii in ['if', 'iv']:
            for tt in ['Izh', 'HH', 'AllenHH']:
                rr = '%s_%s' % (tt, dataset_id)
                f = if_iv_data_files % (rr, ii)
                if os.path.isfile(f):
                    print("--- Opening: %s" % f)
                    data, indeces = reload_standard_dat_file(f)

                    ifv_x.append(data['t'])

                    if ii == 'if':
                        ifv_y.append([ff - offset for ff in data[0]])
                    else:
                        ifv_y.append([vv for vv in data[0]])

                    markers.append('')
                    colors.append(cols[tt])
                    lines.append('-')

        ifv_x.append(curents_sub)
        vvsub = [vv for vv in v_sub]

        ifv_y.append(vvsub)

        sub_color = '#888888'
        markers.append('D')
        colors.append('k')
        lines.append('')

        ifv_x.append(curents_spike)
        ifv_y.append([ff - offset for ff in freqs])

        markers.append('o')
        colors.append(sub_color)
        lines.append('')

        import matplotlib
        import matplotlib.pyplot as plt

        print ifv_x
        print ifv_y
        ylim = [-105, -20]
        font_size = 18
        ax1 = pynml.generate_plot(ifv_x,
                                  ifv_y,
                                  summary,
                                  markers=markers,
                                  colors=colors,
                                  linestyles=lines,
                                  show_plot_already=False,
                                  xlim=[-100, 400],
                                  font_size=font_size,
                                  ylim=ylim,
                                  title_above_plot=False)

        plt.xlabel('Input current (pA)', fontsize=font_size)
        plt.ylabel("Steady membrane potential (mV)", fontsize=font_size)

        ax2 = ax1.twinx()
        plt.ylim([ylim[0] + offset, ylim[1] + offset])
        plt.ylabel('Firing frequency (Hz)',
                   color=sub_color,
                   fontsize=font_size)
        ax2.tick_params(axis='y', colors=sub_color)

        #plt.axis('off')

        plt.savefig(images % (reference, 'if_iv' + "_FIG"),
                    bbox_inches='tight')

        temp_dir = 'temp/'

        print("Copying %s to %s" % (cell_file, temp_dir))
        shutil.copy(cell_file, temp_dir)

        net_file = generate_network_for_sweeps(type,
                                               dataset_id,
                                               '%s.cell.nml' % (reference),
                                               reference,
                                               temp_dir,
                                               data_dir=analysis_dir)

        lems_file_name = 'LEMS_Test_%s_%s.xml' % (type, dataset_id)

        generate_lems_file_for_neuroml('Test_%s_%s' % (dataset_id, type),
                                       net_file,
                                       'network_%s_%s' % (dataset_id, type),
                                       1500,
                                       0.01,
                                       lems_file_name,
                                       temp_dir,
                                       gen_plots_for_all_v=False,
                                       copy_neuroml=False)

        simulator = "jNeuroML_NEURON"

        if simulator == "jNeuroML":
            results = pynml.run_lems_with_jneuroml(temp_dir + lems_file_name,
                                                   nogui=True,
                                                   load_saved_data=True,
                                                   plot=False,
                                                   show_plot_already=False)
        elif simulator == "jNeuroML_NEURON":
            results = pynml.run_lems_with_jneuroml_neuron(
                temp_dir + lems_file_name,
                nogui=True,
                load_saved_data=True,
                plot=False,
                show_plot_already=False)

        x = []
        y = []

        print results.keys()

        tt = [t * 1000 for t in results['t']]
        for i in range(len(results) - 1):
            x.append(tt)
            y.append([
                v * 1000
                for v in results['Pop0/%i/%s_%s/v' % (i, type, dataset_id)]
            ])

        pynml.generate_plot(x,
                            y,
                            summary,
                            show_plot_already=False,
                            ylim=[-120, 60],
                            save_figure_to=images % (reference, 'traces'),
                            title_above_plot=True)

        ax = pynml.generate_plot(x,
                                 y,
                                 summary,
                                 show_plot_already=False,
                                 ylim=[-120, 60],
                                 title_above_plot=False)

        ax.set_xlabel(None)
        ax.set_ylabel(None)
        plt.axis('off')

        fig_file = images % (reference, 'traces' + "_FIG")
        plt.savefig(fig_file, bbox_inches='tight', pad_inches=0)
        from PIL import Image
        img = Image.open(fig_file)

        img2 = img.crop((60, 40, 660, 480))
        img2.save(fig_file)
Пример #16
0
            Y = ys[layer][0] + random.random() * (ys[layer][1] - ys[layer][0])

            inst.location = Location(x=X, y=Y, z=Z)

            count += 1

net_file = '%s.net.nml' % (net_ref)
writers.NeuroMLWriter.write(net_doc, net_file)

print("Written network with %i cells in network to: %s" % (count, net_file))

from neuroml.utils import validate_neuroml2

validate_neuroml2(net_file)

generate_lems_file_for_neuroml("Sim_" + net_ref,
                               net_file,
                               net_ref,
                               50,
                               0.025,
                               "LEMS_%s.xml" % net_ref,
                               ".",
                               gen_plots_for_all_v=True,
                               plot_all_segments=True,
                               gen_saves_for_all_v=True,
                               save_all_segments=True,
                               copy_neuroml=False,
                               seed=1234)

pynml.nml2_to_svg(net_file)
Пример #17
0
                                   populations=pop_id)

    input = neuroml.Input(id=0,
                          target="../%s/0/%s" % (pop_id, pop_comp),
                          destination="synapses")

    input_list.input.append(input)
    new_net.input_lists.append(input_list)

    pynml.write_neuroml2_file(new_net_doc, new_net_loc)

    generate_lems_file_for_neuroml(
        model_id,
        new_net_loc,
        "network",
        2500,
        0.005,  # used in Allen Neuron runs
        "LEMS_%s.xml" % model_id,
        nml2_cell_dir,
        copy_neuroml=False,
        seed=1234)

    net_doc.includes.append(neuroml.IncludeType(nml_cell_file))

    pop = neuroml.Population(id="Pop_%s" % model_id,
                             component=cell.id,
                             type="populationList")

    net.populations.append(pop)

    inst = neuroml.Instance(id="0")
    pop.instances.append(inst)
                         populations=pop_id)

    input = neuroml.Input(id=0, 
                          target="../%s/0/%s"%(pop_id, pop_comp), 
                          destination="synapses")  

    input_list.input.append(input)
    new_net.input_lists.append(input_list)
    
    pynml.write_neuroml2_file(new_net_doc, new_net_loc)

    generate_lems_file_for_neuroml(model_id,
                                   new_net_loc,
                                   "network",
                                   2500,
                                   0.005, # used in Allen Neuron runs
                                   "LEMS_%s.xml"%model_id,
                                   nml2_cell_dir,
                                   copy_neuroml = False,
                                   seed=1234)
    
    
    net_doc.includes.append(neuroml.IncludeType(nml_cell_file))

    pop = neuroml.Population(id="Pop_%s"%model_id, component=cell.id, type="populationList")

    net.populations.append(pop)

    inst = neuroml.Instance(id="0")
    pop.instances.append(inst)
Пример #19
0
    lems_file_name = 'LEMS_%s.xml' % sim_id
    target_dir = "test_data"

    generate_lems_file_for_neuroml(
        sim_id,
        neuroml_file,
        target,
        duration,
        dt,
        lems_file_name,
        target_dir,
        include_extra_files=[],
        gen_plots_for_all_v=True,
        plot_all_segments=False,
        gen_plots_for_quantities=
        {},  #  Dict with displays vs lists of quantity paths
        gen_plots_for_only_populations=
        [],  #  List of populations, all pops if = []
        gen_saves_for_all_v=True,
        save_all_segments=False,
        gen_saves_for_only_populations=
        [],  #  List of populations, all pops if = []
        gen_saves_for_quantities=
        {},  #  Dict with file names vs lists of quantity paths
        gen_spike_saves_for_all_somas=True,
        report_file_name='report.txt',
        copy_neuroml=True,
        verbose=True)

    if '-test' in sys.argv:
Пример #20
0
writers.NeuroMLWriter.write(nml_doc, nml_file)

print("Written network file to: " + nml_file)

# Validate the NeuroML

from neuroml.utils import validate_neuroml2

validate_neuroml2(nml_file)

# Generate the LEMS file to simulate network (NEURON only...)

generate_lems_file_for_neuroml('sim_%s' % ref,
                               nml_file,
                               net.id,
                               1000,
                               0.01,
                               'LEMS_%s.xml' % ref,
                               '../generatedNeuroML2',
                               gen_plots_for_all_v=False,
                               gen_plots_for_only=[
                                   pyr_cells_pop0.id, pyr_cells_pop1.id,
                                   pyr_cells_pop2.id, pyr_cells_pop3.id
                               ],
                               gen_saves_for_all_v=False,
                               gen_saves_for_only=[
                                   pyr_cells_pop0.id, pyr_cells_pop1.id,
                                   pyr_cells_pop2.id, pyr_cells_pop3.id
                               ],
                               copy_neuroml=False,
                               seed=1234)
                         populations=pop_id)

    input = neuroml.Input(id=0, 
                          target="../%s/0/%s"%(pop_id, pop_comp), 
                          destination="synapses")  

    input_list.input.append(input)
    new_net.input_lists.append(input_list)
    
    pynml.write_neuroml2_file(new_net_doc, new_net_loc)

    generate_lems_file_for_neuroml(model_id,
                                   new_net_loc,
                                   "network",
                                   1500,
                                   0.025,
                                   "LEMS_%s.xml"%model_id,
                                   nml2_cell_dir,
                                   copy_neuroml = False,
                                   seed=1234)
    
    
    net_doc.includes.append(neuroml.IncludeType(nml_cell_file))

    pop = neuroml.Population(id="Pop_%s"%model_id, component=cell.id, type="populationList")

    net.populations.append(pop)

    inst = neuroml.Instance(id="0")
    pop.instances.append(inst)
                                       populations=pop_id)

        input = neuroml.Input(id=0,
                              target="../%s/0/%s" % (pop_id, pop_comp),
                              destination="synapses")

        input_list.input.append(input)
        new_net.input_lists.append(input_list)

        pynml.write_neuroml2_file(new_net_doc, new_net_loc)

        generate_lems_file_for_neuroml(cell_dir,
                                       new_net_loc,
                                       "network",
                                       stim_sim_duration,
                                       0.025,
                                       "LEMS_%s.xml" % cell_dir,
                                       nml2_cell_dir,
                                       copy_neuroml=False,
                                       seed=1234)

        pynml.nml2_to_svg(nml_net_loc)

        clear_neuron()

        net_doc.includes.append(IncludeType(nml_cell_file))

        pop = Population(id="Pop_%s" % bbp_ref,
                         component=bbp_ref + '_0_0',
                         type="populationList")
Пример #23
0
def analyse_spiketime_vs_dt(nml2_file, 
                            target,
                            duration,
                            simulator,
                            cell_v_path,
                            dts,
                            verbose=False,
                            spike_threshold_mV = 0,
                            show_plot_already=True,
                            save_figure_to=None,
                            num_of_last_spikes=None):
                                
    from pyelectro.analysis import max_min
    import numpy as np
    
    all_results = {}
    
    dts=list(np.sort(dts))
    
    for dt in dts:
        if verbose:
            print_comment_v(" == Generating simulation for dt = %s ms"%dt)
        ref = str("Sim_dt_%s"%dt).replace('.','_')
        lems_file_name = "LEMS_%s.xml"%ref
        generate_lems_file_for_neuroml(ref, 
                                   nml2_file, 
                                   target, 
                                   duration, 
                                   dt, 
                                   lems_file_name,
                                   '.',
                                   gen_plots_for_all_v = True,
                                   gen_saves_for_all_v = True,
                                   copy_neuroml = False,
                                   seed=None)
                                   
        if simulator == 'jNeuroML':
             results = pynml.run_lems_with_jneuroml(lems_file_name, nogui=True, load_saved_data=True, plot=False, verbose=verbose)
        if simulator == 'jNeuroML_NEURON':
             results = pynml.run_lems_with_jneuroml_neuron(lems_file_name, nogui=True, load_saved_data=True, plot=False, verbose=verbose)
             
        print("Results reloaded: %s"%results.keys())
             
        all_results[dt] = results
        
    xs = []
    ys = []
    labels = []
    
    spxs = []
    spys = []
    linestyles = []
    markers = []
    colors=[]
    spike_times_final=[]
    array_of_num_of_spikes=[]
    
    for dt in dts:
        t = all_results[dt]['t']
        v = all_results[dt][cell_v_path]
        xs.append(t)
        ys.append(v)
        labels.append(dt)
        
        mm = max_min(v, t, delta=0, peak_threshold=spike_threshold_mV)
        
        spike_times = mm['maxima_times']
        
        spike_times_final.append(spike_times)
        
        array_of_num_of_spikes.append(len(spike_times))
        
    max_num_of_spikes=max(array_of_num_of_spikes)
    
    min_dt_spikes=spike_times_final[0]
    
    bound_dts=[math.log(dts[0]),math.log(dts[-1])]
    
    if num_of_last_spikes == None:
    
       num_of_spikes=len(min_dt_spikes)
       
    else:
       
       if len(min_dt_spikes) >=num_of_last_spikes:
       
          num_of_spikes=num_of_last_spikes
          
       else:
       
          num_of_spikes=len(min_dt_spikes)
    
    spike_indices=[(-1)*ind for ind in range(1,num_of_spikes+1) ]
    
    if len(min_dt_spikes) > abs(spike_indices[-1]):
    
       earliest_spike_time=min_dt_spikes[spike_indices[-1]-1]
       
    else:
     
       earliest_spike_time=min_dt_spikes[spike_indices[-1]]
       
    for spike_ind in range(0,max_num_of_spikes):
    
        spike_time_values=[]
        
        dt_values=[]
        
        for dt in range(0,len(dts)):
        
            if spike_times_final[dt] !=[]:
           
               if len(spike_times_final[dt]) >= spike_ind+1:
               
                  if spike_times_final[dt][spike_ind] >= earliest_spike_time:
             
                     spike_time_values.append(spike_times_final[dt][spike_ind])
               
                     dt_values.append(math.log(dts[dt]))       
        
        linestyles.append('')
               
        markers.append('o')
       
        colors.append('g')
       
        spxs.append(dt_values)
       
        spys.append(spike_time_values)
    
    for last_spike_index in spike_indices:
       
       vertical_line=[min_dt_spikes[last_spike_index],min_dt_spikes[last_spike_index] ]
          
       spxs.append(bound_dts)
          
       spys.append(vertical_line)
          
       linestyles.append('--')
          
       markers.append('')
       
       colors.append('k')
    
    pynml.generate_plot(spxs, 
          spys, 
          "Spike times vs dt",
          colors=colors,
          linestyles = linestyles,
          markers = markers,
          xaxis = 'ln ( dt (ms) )', 
          yaxis = 'Spike times (s)',
          show_plot_already=show_plot_already,
          save_figure_to=save_figure_to) 
    
    if verbose:
        pynml.generate_plot(xs, 
                  ys, 
                  "Membrane potentials in %s for %s"%(simulator,dts),
                  labels = labels,
                  show_plot_already=show_plot_already,
                  save_figure_to=save_figure_to)
Пример #24
0
                                   populations=pop_id)

    input = neuroml.Input(id=0,
                          target="../%s/0/%s" % (pop_id, pop_comp),
                          destination="synapses")

    input_list.input.append(input)
    new_net.input_lists.append(input_list)

    pynml.write_neuroml2_file(new_net_doc, new_net_loc)

    generate_lems_file_for_neuroml(model_id,
                                   new_net_loc,
                                   "network",
                                   1500,
                                   0.025,
                                   "LEMS_%s.xml" % model_id,
                                   nml2_cell_dir,
                                   copy_neuroml=False,
                                   seed=1234)

    net_doc.includes.append(neuroml.IncludeType(nml_cell_file))

    pop = neuroml.Population(id="Pop_%s" % model_id,
                             component=cell.id,
                             type="populationList")

    net.populations.append(pop)

    inst = neuroml.Instance(id="0")
    pop.instances.append(inst)
def analyse_cell(dataset_id, type, nogui=False):

    reference = '%s_%s' % (type, dataset_id)
    cell_file = '%s.cell.nml' % (reference)

    images = 'summary/%s_%s.png'

    generate_current_vs_frequency_curve(
        cell_file,
        reference,
        simulator='jNeuroML_NEURON',
        start_amp_nA=-0.1,
        end_amp_nA=0.4,
        step_nA=0.01,
        analysis_duration=1000,
        analysis_delay=50,
        plot_voltage_traces=False,
        plot_if=not nogui,
        plot_iv=not nogui,
        xlim_if=[-200, 400],
        ylim_if=[-10, 120],
        xlim_iv=[-200, 400],
        ylim_iv=[-120, -40],
        save_if_figure_to=images % (reference, 'if'),
        save_iv_figure_to=images % (reference, 'iv'),
        show_plot_already=False)

    temp_dir = 'temp/'

    shutil.copy(cell_file, temp_dir)

    net_file = generate_network_for_sweeps(type, dataset_id,
                                           '%s.cell.nml' % (reference),
                                           reference, temp_dir)

    lems_file_name = 'LEMS_Test_%s_%s.xml' % (type, dataset_id)

    generate_lems_file_for_neuroml('Test_%s_%s' % (dataset_id, type),
                                   net_file,
                                   'network_%s_%s' % (dataset_id, type),
                                   1500,
                                   0.01,
                                   lems_file_name,
                                   temp_dir,
                                   gen_plots_for_all_v=False,
                                   copy_neuroml=False)

    simulator = "jNeuroML_NEURON"

    if simulator == "jNeuroML":
        results = pynml.run_lems_with_jneuroml(temp_dir + lems_file_name,
                                               nogui=True,
                                               load_saved_data=True,
                                               plot=False,
                                               show_plot_already=False)
    elif simulator == "jNeuroML_NEURON":
        results = pynml.run_lems_with_jneuroml_neuron(temp_dir +
                                                      lems_file_name,
                                                      nogui=True,
                                                      load_saved_data=True,
                                                      plot=False,
                                                      show_plot_already=False)

    x = []
    y = []

    print results.keys()

    tt = [t * 1000 for t in results['t']]
    for i in range(len(results) - 1):
        x.append(tt)
        y.append([
            v * 1000
            for v in results['Pop0/%i/%s_%s/v' % (i, type, dataset_id)]
        ])

    pynml.generate_plot(x,
                        y,
                        "Cell: %s" % dataset_id,
                        xaxis="Time (ms)",
                        yaxis="Membrane potential (mV)",
                        show_plot_already=False,
                        ylim=[-120, 60],
                        save_figure_to=images % (reference, 'traces'))
                             populations=pop_id)
                             
        input = neuroml.Input(id=0, 
                              target="../%s/0/%s"%(pop_id, pop_comp), 
                              destination="synapses")  
                              
        input_list.input.append(input)
        new_net.input_lists.append(input_list)
        
        pynml.write_neuroml2_file(new_net_doc, new_net_loc)
        
        generate_lems_file_for_neuroml(cell_dir,
                                       new_net_loc,
                                       "network",
                                       stim_sim_duration,
                                       0.025,
                                       "LEMS_%s.xml"%cell_dir,
                                       nml2_cell_dir,
                                       copy_neuroml = False,
                                       seed=1234)
        
        pynml.nml2_to_svg(nml_net_loc)
        
        clear_neuron()
        
        net_doc.includes.append(IncludeType(nml_cell_file))

        pop = Population(id="Pop_%s"%bbp_ref, component=bbp_ref+'_0_0', type="populationList")

        net.populations.append(pop)
Пример #27
0
    dt = 0.025
    lems_file_name = 'LEMS_%s.xml' % sim_id
    target_dir = "."

    pops_with_morphlogies = [
        "pop_ngf", "pop_bistratified", "pop_sca", "pop_olm", "pop_poolosyn",
        "pop_pvbasket", "pop_cck", "pop_axoaxonic", "pop_ivy"
    ]

    generate_lems_file_for_neuroml(
        sim_id,
        neuroml_file,
        target,
        duration,
        dt,
        lems_file_name,
        target_dir,
        include_extra_files=[],
        gen_plots_for_all_v=False,
        plot_all_segments=False,
        gen_plots_for_only_populations=
        pops_with_morphlogies,  #  List of populations, all pops if = []
        gen_saves_for_all_v=False,
        save_all_segments=False,
        gen_saves_for_only_populations=
        pops_with_morphlogies,  #  List of populations, all pops if = []
        gen_saves_for_quantities=
        {},  #  Dict with file names vs lists of quantity paths
        copy_neuroml=True,
        seed=None)
Пример #28
0
def analyse_spiketime_vs_dt(nml2_file,
                            target,
                            duration,
                            simulator,
                            cell_v_path,
                            dts,
                            verbose=False,
                            spike_threshold_mV=0,
                            show_plot_already=True,
                            save_figure_to=None,
                            num_of_last_spikes=None):

    from pyelectro.analysis import max_min
    import numpy as np

    all_results = {}

    dts = list(np.sort(dts))

    for dt in dts:
        if verbose:
            print_comment_v(" == Generating simulation for dt = %s ms" % dt)
        ref = str("Sim_dt_%s" % dt).replace('.', '_')
        lems_file_name = "LEMS_%s.xml" % ref
        generate_lems_file_for_neuroml(ref,
                                       nml2_file,
                                       target,
                                       duration,
                                       dt,
                                       lems_file_name,
                                       '.',
                                       gen_plots_for_all_v=True,
                                       gen_saves_for_all_v=True,
                                       copy_neuroml=False)

        if simulator == 'jNeuroML':
            results = pynml.run_lems_with_jneuroml(lems_file_name,
                                                   nogui=True,
                                                   load_saved_data=True,
                                                   plot=False,
                                                   verbose=verbose)
        if simulator == 'jNeuroML_NEURON':
            results = pynml.run_lems_with_jneuroml_neuron(lems_file_name,
                                                          nogui=True,
                                                          load_saved_data=True,
                                                          plot=False,
                                                          verbose=verbose)

        print("Results reloaded: %s" % results.keys())

        all_results[dt] = results

    xs = []
    ys = []
    labels = []

    spxs = []
    spys = []
    linestyles = []
    markers = []
    colors = []
    spike_times_final = []
    array_of_num_of_spikes = []

    for dt in dts:
        t = all_results[dt]['t']
        v = all_results[dt][cell_v_path]
        xs.append(t)
        ys.append(v)
        labels.append(dt)

        mm = max_min(v, t, delta=0, peak_threshold=spike_threshold_mV)

        spike_times = mm['maxima_times']

        spike_times_final.append(spike_times)

        array_of_num_of_spikes.append(len(spike_times))

    max_num_of_spikes = max(array_of_num_of_spikes)

    min_dt_spikes = spike_times_final[0]

    bound_dts = [math.log(dts[0]), math.log(dts[-1])]

    if num_of_last_spikes == None:

        num_of_spikes = len(min_dt_spikes)

    else:

        if len(min_dt_spikes) >= num_of_last_spikes:

            num_of_spikes = num_of_last_spikes

        else:

            num_of_spikes = len(min_dt_spikes)

    spike_indices = [(-1) * ind for ind in range(1, num_of_spikes + 1)]

    if len(min_dt_spikes) > abs(spike_indices[-1]):

        earliest_spike_time = min_dt_spikes[spike_indices[-1] - 1]

    else:

        earliest_spike_time = min_dt_spikes[spike_indices[-1]]

    for spike_ind in range(0, max_num_of_spikes):

        spike_time_values = []

        dt_values = []

        for dt in range(0, len(dts)):

            if spike_times_final[dt] != []:

                if len(spike_times_final[dt]) >= spike_ind + 1:

                    if spike_times_final[dt][spike_ind] >= earliest_spike_time:

                        spike_time_values.append(
                            spike_times_final[dt][spike_ind])

                        dt_values.append(math.log(dts[dt]))

        linestyles.append('')

        markers.append('o')

        colors.append('g')

        spxs.append(dt_values)

        spys.append(spike_time_values)

    for last_spike_index in spike_indices:

        vertical_line = [
            min_dt_spikes[last_spike_index], min_dt_spikes[last_spike_index]
        ]

        spxs.append(bound_dts)

        spys.append(vertical_line)

        linestyles.append('--')

        markers.append('')

        colors.append('k')

    pynml.generate_plot(spxs,
                        spys,
                        "Spike times vs dt",
                        colors=colors,
                        linestyles=linestyles,
                        markers=markers,
                        xaxis='ln ( dt (ms) )',
                        yaxis='Spike times (s)',
                        show_plot_already=show_plot_already,
                        save_figure_to=save_figure_to)

    if verbose:
        pynml.generate_plot(xs,
                            ys,
                            "Membrane potentials in %s for %s" %
                            (simulator, dts),
                            labels=labels,
                            show_plot_already=show_plot_already,
                            save_figure_to=save_figure_to)
            inst.location = Location(x=X, y=Y, z=Z)

            count+=1

net_file = '%s.net.nml'%(net_ref)
writers.NeuroMLWriter.write(net_doc, net_file)

print("Written network with %i cells in network to: %s"%(count,net_file))

from neuroml.utils import validate_neuroml2

validate_neuroml2(net_file)

generate_lems_file_for_neuroml("Sim_"+net_ref, 
                               net_file, 
                               net_ref, 
                               50, 
                               0.025, 
                               "LEMS_%s.xml"%net_ref,
                               ".",
                               gen_plots_for_all_v = True,
                               plot_all_segments = True,
                               gen_saves_for_all_v = True,
                               save_all_segments = True,
                               copy_neuroml = False,
                               seed = 1234)

pynml.nml2_to_svg(net_file)

target_dir = "."

interesting_seg_ids = [0, 1000, 2000, 10000]

to_plot = {'Some_voltages': []}
to_save = {'%s_voltages.dat' % cell_id: []}

for seg_id in interesting_seg_ids:
    to_plot.values()[0].append('%s/0/%s/%s/v' %
                               (pop.id, pop.component, seg_id))
    to_save.values()[0].append('%s/0/%s/%s/v' %
                               (pop.id, pop.component, seg_id))

generate_lems_file_for_neuroml(
    sim_id,
    nml_file,
    target,
    duration,
    dt,
    lems_file_name,
    target_dir,
    gen_plots_for_all_v=False,
    plot_all_segments=False,
    gen_plots_for_quantities=
    to_plot,  #  Dict with displays vs lists of quantity paths
    gen_saves_for_all_v=False,
    save_all_segments=False,
    gen_saves_for_quantities=
    to_save,  #  Dict with file names vs lists of quantity paths
    copy_neuroml=False)
def analyse_cell(dataset_id, type, nogui = False):
    
    
    reference = '%s_%s'%(type,dataset_id)
    cell_file = '%s.cell.nml'%(reference)
    
    images = 'summary/%s_%s.png'
    
    generate_current_vs_frequency_curve(cell_file, 
                                        reference, 
                                        simulator = 'jNeuroML_NEURON',
                                        start_amp_nA =         -0.1, 
                                        end_amp_nA =           0.4, 
                                        step_nA =              0.01, 
                                        analysis_duration =    1000, 
                                        analysis_delay =       50,
                                        plot_voltage_traces =  False,
                                        plot_if =              not nogui,
                                        plot_iv =              not nogui, 
                                        xlim_if =              [-200, 400],
                                        ylim_if =              [-10, 120],
                                        xlim_iv =              [-200, 400],
                                        ylim_iv =              [-120, -40],
                                        save_if_figure_to=images%(reference, 'if'), 
                                        save_iv_figure_to=images%(reference, 'iv'),
                                        show_plot_already = False)
               
    temp_dir = 'temp/'
    
    shutil.copy(cell_file, temp_dir)
    
    net_file = generate_network_for_sweeps(type, dataset_id, '%s.cell.nml'%(reference), reference, temp_dir)
    
    lems_file_name = 'LEMS_Test_%s_%s.xml'%(type,dataset_id)
    
    generate_lems_file_for_neuroml('Test_%s_%s'%(dataset_id,type),
                                   net_file,
                                   'network_%s_%s'%(dataset_id,type), 
                                   1500, 
                                   0.01, 
                                   lems_file_name,
                                   temp_dir,
                                   gen_plots_for_all_v=False,
                                   copy_neuroml = False)
    
    simulator = "jNeuroML_NEURON"
    
    if simulator == "jNeuroML":
        results = pynml.run_lems_with_jneuroml(temp_dir+lems_file_name, 
                                                nogui=True, 
                                                load_saved_data=True, 
                                                plot=False,
                                                show_plot_already=False)
    elif simulator == "jNeuroML_NEURON":
        results = pynml.run_lems_with_jneuroml_neuron(temp_dir+lems_file_name, 
                                                nogui=True, 
                                                load_saved_data=True, 
                                                plot=False,
                                                show_plot_already=False)
                                                
    x = []
    y = []
    
    print results.keys()
    
    tt = [t*1000 for t in results['t']]
    for i in range(len(results)-1):
        x.append(tt)
        y.append([v*1000 for v in results['Pop0/%i/%s_%s/v'%(i,type,dataset_id)]])
        
    pynml.generate_plot(x,
                y, 
                "Cell: %s"%dataset_id, 
                xaxis = "Time (ms)", 
                yaxis = "Membrane potential (mV)",
                show_plot_already=False,
                ylim = [-120, 60],
                save_figure_to = images%(reference, 'traces'))
Пример #32
0
 target = "simplenet"
 duration=1000
 dt = 0.025
 lems_file_name = 'LEMS_%s.xml'%sim_id
 target_dir = "test_data"
 
 generate_lems_file_for_neuroml(sim_id, 
                                neuroml_file, 
                                target, 
                                duration, 
                                dt, 
                                lems_file_name,
                                target_dir,
                                include_extra_files = [],
                                gen_plots_for_all_v = True,
                                plot_all_segments = False,
                                gen_plots_for_quantities = {},   #  Dict with displays vs lists of quantity paths
                                gen_plots_for_only_populations = [],   #  List of populations, all pops if = []
                                gen_saves_for_all_v = True,
                                save_all_segments = False,
                                gen_saves_for_only_populations = [],  #  List of populations, all pops if = []
                                gen_saves_for_quantities = {},   #  Dict with file names vs lists of quantity paths
                                gen_spike_saves_for_all_somas = True,
                                report_file_name = 'report.txt',
                                copy_neuroml = True,
                                verbose=True)
                                
                                
 if '-test' in sys.argv:
     
     neuroml_file = "test_data/HHCellNetwork.net.nml"
     lems_file_name = 'LEMS_%s2.xml'%sim_id
Пример #33
0
plots = {}
saves = {}
for i in range(number_cells):
    p = []
    plots['Mitral_0_%i'%i] = p
    p.append('Pop_Mitral_0_%i/0/Mitral_0_%i/0/v'%(i,i))
    #p.append('Pop_Mitral_0_%i/0/Mitral_0_%i/681/v'%(i,i))
    #p.append('Pop_Mitral_0_%i/0/Mitral_0_%i/20/v'%(i,i))
    p.append('Pop_Mitral_0_%i/0/Mitral_0_%i/43/v'%(i,i))
    #save_plot.append('Pop_Mitral_0_%i/0/Mitral_0_%i/682/v'%(i,i))
    #save_plot.append('Pop_Mitral_0_%i/0/Mitral_0_%i/20/v'%(i,i))
    #save_plot.append('Pop_Mitral_0_%i/0/Mitral_0_%i/43/v'%(i,i))
    
print plots
print saves

generate_lems_file_for_neuroml(ref, 
                                nml_file1, 
                                "network", 
                                180, 
                                0.01, 
                                'LEMS_%s.xml'%ref,
                                '.',
                                gen_plots_for_all_v = False,
                                gen_plots_for_quantities = plots,
                                gen_saves_for_all_v = False,
                                gen_saves_for_quantities = saves,
                                copy_neuroml=False)