コード例 #1
0
def SingleCellSim(cell_ref,nc_simConfig,sim_duration):
    

    net_doc = pynml.read_neuroml2_file("%s.nml"%nc_simConfig)
    net_doc.id=nc_simConfig

    net=net_doc.networks[0]
    net.id=nc_simConfig


    net_doc.includes.append(IncludeType("../generatedNeuroML2/%s.cell.nml"%cell_ref))

    net_file = '%s.net.nml'%(net_doc.id)
    writers.NeuroMLWriter.write(net_doc, net_file)

    print("Written network with 1 cell in network to: %s"%(net_file))

    from neuroml.utils import validate_neuroml2

    validate_neuroml2(net_file)

    generate_lems_file_for_neuroml("Sim_"+net_doc.id, 
                               net_file, 
                               net_doc.id, 
                               sim_duration,
                               0.025, 
                               "LEMS_%s.xml"%net_doc.id,
                               ".",
                               gen_plots_for_all_v = True,
                               plot_all_segments = False,
                               gen_saves_for_all_v = True,
                               save_all_segments = False,
                               copy_neuroml = False,
                               seed = 1234)
コード例 #2
0
def main(args=None):
    """Main"""
    
    vs = [(v-100)*0.001 for v in range(200)]
    
    for f in ['IM.channel.nml','Kd.channel.nml']:
        nml_doc = pynml.read_neuroml2_file(f)

        for ct in nml_doc.ComponentType:

            ys = []
            for v in vs:
                req_variables = {'v':'%sV'%v,'vShift':'10mV'}
                vals = pynml.evaluate_component(ct,req_variables=req_variables)
                print vals
                if 'x' in vals:
                    ys.append(vals['x'])
                if 't' in vals:
                    ys.append(vals['t'])
                if 'r' in vals:
                    ys.append(vals['r'])

            ax = pynml.generate_plot([vs],[ys],          
                             "Some traces from %s in %s"%(ct.name,f),
                             show_plot_already=False )       

            print vals
        
    plt.show()
コード例 #3
0
def main(args=None):
    """Main"""

    vs = [(v - 100) * 0.001 for v in range(200)]

    for f in ['IM.channel.nml', 'Kd.channel.nml']:
        nml_doc = pynml.read_neuroml2_file(f)

        for ct in nml_doc.ComponentType:

            ys = []
            for v in vs:
                req_variables = {'v': '%sV' % v, 'vShift': '10mV'}
                vals = pynml.evaluate_component(ct,
                                                req_variables=req_variables)
                print(vals)
                if 'x' in vals:
                    ys.append(vals['x'])
                if 't' in vals:
                    ys.append(vals['t'])
                if 'r' in vals:
                    ys.append(vals['r'])

            ax = pynml.generate_plot([vs], [ys],
                                     "Some traces from %s in %s" %
                                     (ct.name, f),
                                     show_plot_already=False)
            print(vals)

    plt.show()
コード例 #4
0
ファイル: export_granule.py プロジェクト: JustasB/Mig3DTest
def readGCnml(gcid):

    nml_cell_file = "../NeuroML2/GranuleCells/Exported/Granule_0_%i.cell.nml" % gcid
    nml_doc = pynml.read_neuroml2_file(nml_cell_file)
    cell = nml_doc.cells[0]

    return cell, nml_doc, nml_cell_file
コード例 #5
0
def add_cell_and_channels(nml_doc,cell_nml2_rel_path, cell_id):
    
    cell_nml2_path = os.path.dirname(__file__)+"/../../NeuroML2/prototypes/"+cell_nml2_rel_path
    print("Translated %s to %s"%(cell_nml2_rel_path,cell_nml2_path))
    nml2_doc_cell = pynml.read_neuroml2_file(cell_nml2_path, include_includes=False)
    
    for cell in _get_cells_of_all_known_types(nml2_doc_cell):
        if cell.id == cell_id:
            all_cells[cell_id] = cell
            
            _copy_to_dir_for_model(nml_doc,cell_nml2_path)
            new_file = '%s/%s.cell.nml'%(nml_doc.id,cell_id)
            nml_doc.includes.append(neuroml.IncludeType(new_file)) 
            if not new_file in all_included_files:
                all_included_files.append(new_file)
            
            for included in nml2_doc_cell.includes:
                #Todo replace... quick & dirty...
                old_loc = '%s/%s'%(os.path.dirname(os.path.abspath(cell_nml2_path)), included.href)
                print old_loc
                _copy_to_dir_for_model(nml_doc,old_loc)
                new_loc = '%s/%s'%(nml_doc.id,included.href)
                nml_doc.includes.append(neuroml.IncludeType(new_loc))
                if not new_loc in all_included_files:
                    all_included_files.append(new_loc)
コード例 #6
0
 def __init__(self,target_net_path,reference_net_path,test_populations=True,test_projections=True,test_inputs=True):
 
    self.target_net_path=target_net_path
    
    self.reference_net_path=reference_net_path
    
    self.test_populations=test_populations
    
    self.test_projections=test_projections
    
    self.test_inputs=test_inputs
    
    self.target_net_doc=pynml.read_neuroml2_file(target_net_path)
    
    self.reference_net_doc=pynml.read_neuroml2_file(reference_net_path)
    
    self.error_counter=0
コード例 #7
0
def get_channels_from_channel_file(channel_file):
    doc = read_neuroml2_file(channel_file, include_includes=True, verbose=False, already_included=[])
    channels = list(doc.ion_channel_hhs.__iter__()) + \
               list(doc.ion_channel.__iter__())
    for channel in channels:
        setattr(channel,'file',channel_file)  
        if not hasattr(channel,'notes'):
            setattr(channel,'notes','')
    return channels
コード例 #8
0
ファイル: LEMSSimulation.py プロジェクト: RokasSt/pyNeuroML
 def include_neuroml2_file(self, nml2_file_name, include_included=True, relative_to_dir='.'):
     full_path = os.path.abspath(relative_to_dir+'/'+nml2_file_name)
     base_path = os.path.dirname(full_path)
     print_comment_v("Including in generated LEMS file: %s (%s)"%(nml2_file_name, full_path))
     self.lems_info['include_files'].append(nml2_file_name)
     if include_included:
         cell = read_neuroml2_file(full_path)
         for include in cell.includes:
             self.include_neuroml2_file(include.href, include_included=True, relative_to_dir=base_path)
コード例 #9
0
    def include_neuroml2_file(self, nml2_file_name, include_included=True, relative_to_dir='.'):
        full_path = os.path.abspath(relative_to_dir + '/' + nml2_file_name)
        base_path = os.path.dirname(full_path)
        # print_comment_v("Including in generated LEMS file: %s (%s)"%(nml2_file_name, full_path))
        if nml2_file_name not in self.lems_info['include_files']:
            self.lems_info['include_files'].append(nml2_file_name)

        if include_included:
            cell = read_neuroml2_file(full_path)
            for include in cell.includes:
                self.include_neuroml2_file(include.href, include_included=True, relative_to_dir=base_path)
コード例 #10
0
def get_channels_from_channel_file(channel_file):
    doc = read_neuroml2_file(channel_file,
                             include_includes=True,
                             verbose=False,
                             already_included=[])
    channels = list(doc.ion_channel_hhs.__iter__()) + \
               list(doc.ion_channel.__iter__())
    for channel in channels:
        setattr(channel, 'file', channel_file)
        if not hasattr(channel, 'notes'):
            setattr(channel, 'notes', '')
    return channels
コード例 #11
0
    def __init__(self,
                 target_net_path,
                 reference_net_path,
                 test_populations=True,
                 test_projections=True,
                 test_inputs=True):

        self.target_net_path = target_net_path

        self.reference_net_path = reference_net_path

        self.test_populations = test_populations

        self.test_projections = test_projections

        self.test_inputs = test_inputs

        self.target_net_doc = pynml.read_neuroml2_file(target_net_path)

        self.reference_net_doc = pynml.read_neuroml2_file(reference_net_path)

        self.error_counter = 0
コード例 #12
0
ファイル: ExportSWC.py プロジェクト: NeuroML/pyNeuroML
def convert_to_swc(nml_file_name):

    global line_count
    global line_index_vs_distals
    global line_index_vs_proximals
    
    # Reset
    line_count = 1
    line_index_vs_distals = {}
    line_index_vs_proximals = {}

    base_dir = os.path.dirname(os.path.realpath(nml_file_name))
    nml_doc = pynml.read_neuroml2_file(nml_file_name, include_includes=True, verbose=False, optimized=True)

    lines = []

    for cell in nml_doc.cells:
        
        swc_file_name = '%s/%s.swc' % (base_dir, cell.id)
            
        swc_file = open(swc_file_name, 'w')

        print_comment_v("Converting cell %s as found in NeuroML doc %s to SWC..." % (cell.id, nml_file_name))

        lines_sg, seg_ids = _get_lines_for_seg_group(cell, 'soma_group', 1)
        soma_seg_count = len(seg_ids)
        lines += lines_sg

        lines_sg, seg_ids = _get_lines_for_seg_group(cell, 'dendrite_group', 3)
        dend_seg_count = len(seg_ids)
        lines += lines_sg

        lines_sg, seg_ids = _get_lines_for_seg_group(cell, 'axon_group', 2)
        axon_seg_count = len(seg_ids)
        lines += lines_sg

        if not len(cell.morphology.segments) == soma_seg_count + dend_seg_count + axon_seg_count:
            raise Exception("The numbers of the segments in groups: soma_group+dendrite_group+axon_group (%i), is not the same as total number of segments (%s)! All bets are off!" % (soma_seg_count + dend_seg_count + axon_seg_count, len(cell.morphology.segments)))

        for i in range(len(lines)):
            l = lines[i]
            swc_line = '%s' % (l)
            print(swc_line)
            swc_file.write('%s\n' % swc_line)

        swc_file.close()

        print("Written to %s" % swc_file_name)    
コード例 #13
0
def include_neuroml2_cell_and_channels(nml_doc,cell_nml2_path, cell_id):
    
    
    nml2_doc_cell = pynml.read_neuroml2_file(cell_nml2_path, include_includes=False)
    
    for cell in _get_cells_of_all_known_types(nml2_doc_cell):
        if cell.id == cell_id:
            all_cells[cell_id] = cell
            
            new_file = cell_nml2_path
            nml_doc.includes.append(neuroml.IncludeType(new_file)) 
            if not new_file in all_included_files:
                all_included_files.append(new_file)
            
            for included in nml2_doc_cell.includes:
                
                new_loc = included.href
                nml_doc.includes.append(neuroml.IncludeType(new_loc))
                if not new_loc in all_included_files:
                    all_included_files.append(new_loc)
コード例 #14
0
def SingleCellSim(simConfig,dt,targetPath):
    
    src_files = os.listdir(targetPath)
    for file_name in src_files:
    
        if file_name=="Thalamocortical.net.nml":
           full_target_path=os.path.join(targetPath,file_name)
           net_doc = pynml.read_neuroml2_file(full_target_path)
           net_doc.id=simConfig

           net=net_doc.networks[0]
           net.id=simConfig


           net_file = '%s.net.nml'%(net_doc.id)
           writers.NeuroMLWriter.write(net_doc, full_target_path)

           print("Written network with 1 cell in network to: %s"%(full_target_path))
           
        if file_name=="LEMS_Thalamocortical.xml":
           full_lems_path=os.path.join(targetPath,file_name)
           sim_duration=get_sim_duration(full_lems_path)
           

    validate_neuroml2(full_target_path)

    generate_lems_file_for_neuroml("Sim_"+net_doc.id, 
                               full_target_path, 
                               net_doc.id, 
                               sim_duration,
                               dt, 
                               "LEMS_%s.xml"%net_doc.id,
                               targetPath,
                               gen_plots_for_all_v = True,
                               plot_all_segments = False,
                               gen_saves_for_all_v = True,
                               save_all_segments = False,
                               copy_neuroml = False,
                               seed = 1234)
コード例 #15
0
def SingleCellSim(simConfig, dt, targetPath):

    src_files = os.listdir(targetPath)
    for file_name in src_files:

        if file_name == "Thalamocortical.net.nml":
            full_target_path = os.path.join(targetPath, file_name)
            net_doc = pynml.read_neuroml2_file(full_target_path)
            net_doc.id = simConfig

            net = net_doc.networks[0]
            net.id = simConfig

            net_file = '%s.net.nml' % (net_doc.id)
            writers.NeuroMLWriter.write(net_doc, full_target_path)

            print("Written network with 1 cell in network to: %s" %
                  (full_target_path))

        if file_name == "LEMS_Thalamocortical.xml":
            full_lems_path = os.path.join(targetPath, file_name)
            sim_duration = get_sim_duration(full_lems_path)

    validate_neuroml2(full_target_path)

    generate_lems_file_for_neuroml("Sim_" + net_doc.id,
                                   full_target_path,
                                   net_doc.id,
                                   sim_duration,
                                   dt,
                                   "LEMS_%s.xml" % net_doc.id,
                                   targetPath,
                                   gen_plots_for_all_v=True,
                                   plot_all_segments=False,
                                   gen_saves_for_all_v=True,
                                   save_all_segments=False,
                                   copy_neuroml=False,
                                   seed=1234)
コード例 #16
0
def create_GoC_network( duration, dt, seed, runid, run=False):

	### ---------- Load Params
	noPar = True
	pfile = Path('params_file.pkl')
	if pfile.exists():
		print('Reading parameters from file:')
		file = open('params_file.pkl','rb')
		params_list = pkl.load(file)
		if len(params_list)>runid:
			p = params_list[runid]
			file.close()
	if noPar:
		p = inp.get_simulation_params( runid )
    
	### ---------- Component types
	goc_filename = 'GoC.cell.nml'							# Golgi cell with channels
	goc_file = pynml.read_neuroml2_file( goc_filename )
	goc_type = goc_file.cells[0]
	goc_ref = nml.IncludeType( href=goc_filename )

	MFSyn_filename = 'MF_GoC_Syn.nml'						# small conductance synapse for background inputs
	mfsyn_file = pynml.read_neuroml2_file( MFSyn_filename )
	MFSyn_type = mfsyn_file.exp_three_synapses[0]
	mfsyn_ref = nml.IncludeType( href=MFSyn_filename )
	
	MF20Syn_filename = 'MF_GoC_SynMult.nml'					# multi-syn conductance for strong/coincident transient input
	mf20syn_file = pynml.read_neuroml2_file( MF20Syn_filename )
	MF20Syn_type = mf20syn_file.exp_three_synapses[0]
	mf20syn_ref = nml.IncludeType( href=MF20Syn_filename )

	mf_type2 = 'spikeGeneratorPoisson'						# Spike source for background inputs
	mf_poisson = nml.SpikeGeneratorPoisson( id = "MF_Poisson", average_rate="5 Hz" )	# Not tuned to any data - qqq !
	
	mf_bursttype = 'transientPoissonFiringSynapse'			# Burst of MF input (as explicit input)
	mf_burst = nml.TransientPoissonFiringSynapse( id="MF_Burst", average_rate="100 Hz", delay="2000 ms", duration="500 ms", synapse=MF20Syn_type.id, spike_target='./{}'.format(MF20Syn_type.id) )

	gj = nml.GapJunction( id="GJ_0", conductance="426pS" )	# GoC synapse
	
	### --------- Populations

	# Build network to specify cells and connectivity
	net = nml.Network( id="gocNetwork", type="networkWithTemperature" , temperature="23 degC" )
		
	# Create GoC population
	goc_pop = nml.Population( id=goc_type.id+"Pop", component = goc_type.id, type="populationList", size=p["nGoC"] )
	for goc in range( p["nGoC"] ):
		inst = nml.Instance( id=goc )
		goc_pop.instances.append( inst )
		inst.location = nml.Location( x=p["GoC_pos"][goc,0], y=p["GoC_pos"][goc,1], z=p["GoC_pos"][goc,2] )
	net.populations.append( goc_pop )


	### MF population
	MF_Poisson_pop = nml.Population(id=mf_poisson.id+"_pop", component=mf_poisson.id, type="populationList", size=p["nMF"])
	for mf in range( p["nMF"] ):
		inst = nml.Instance(id=mf)
		MF_Poisson_pop.instances.append( inst )
		inst.location = nml.Location( x=p["MF_pos"][mf,0], y=p["MF_pos"][mf,1], z=p["MF_pos"][mf,2] )		
	net.populations.append( MF_Poisson_pop )
	
	# Create NML document for network specification
	net_doc = nml.NeuroMLDocument( id=net.id )
	net_doc.networks.append( net )
	net_doc.includes.append( goc_ref )
	net_doc.includes.append( mfsyn_ref )
	net_doc.includes.append( mf20syn_ref )
	net_doc.spike_generator_poissons.append( mf_poisson )	
	net_doc.transient_poisson_firing_synapses.append( mf_burst )
	net_doc.gap_junctions.append(gj)
	
	
	### ------------ Connectivity

	### 1. Background excitatory inputs: 	MF to GoC populations
	MFProjection = nml.Projection(id="MFtoGoC", presynaptic_population=MF_Poisson_pop.id, postsynaptic_population=goc_pop.id, synapse=MFSyn_type.id)
	net.projections.append(MFProjection)

	# MF_> GoC synapses (with syn_count equivalent to integer scaling of Mf synapse strength)
	nMFSyn = p["MF_GoC_pairs"].shape[1]
	ctr=0
	for syn in range( nMFSyn ):
		mf, goc = p["MF_GoC_pairs"][:, syn]
		for syn_count in range(p["MF_GoC_wt"][ctr]):
			conn2 = nml.Connection(id=ctr, pre_cell_id='../{}/{}/{}'.format(MF_Poisson_pop.id, mf, mf_poisson.id), post_cell_id='../{}/{}/{}'.format(goc_pop.id, goc, goc_type.id), post_segment_id='0', post_fraction_along="0.5")	#on soma
			MFProjection.connections.append(conn2)
			ctr+=1

	### 2. Perturbation as High Freq MF Inputs
	ctr=0
	for goc in p["Burst_GoC"]:
		for jj in range( p["nBurst"] ):				# Each Perturbed GoC gets nBurst random Burst sources
			inst = nml.ExplicitInput( id=ctr, target='../{}/{}/{}'.format(goc_pop.id, goc, goc_type.id), input=mf_burst.id, synapse=MF20Syn_type.id, spikeTarget='./{}'.format(MF20Syn_type.id))
			net.explicit_inputs.append( inst )
			ctr += 1

	### 3. Electrical coupling between GoCs
		
	GoCCoupling = nml.ElectricalProjection( id="gocGJ", presynaptic_population=goc_pop.id, postsynaptic_population=goc_pop.id )
	net.electrical_projections.append( GoCCoupling )
	dend_id = [1,2,5]
	for jj in range( p["GJ_pairs"].shape[0] ):
		conn = nml.ElectricalConnectionInstanceW( id=jj, pre_cell='../{}/{}/{}'.format(goc_pop.id, p["GJ_pairs"][jj,0], goc_type.id), pre_segment=dend_id[p["GJ_loc"][jj,0]], pre_fraction_along='0.5', post_cell='../{}/{}/{}'.format(goc_pop.id, p["GJ_pairs"][jj,1], goc_type.id), post_segment=dend_id[p["GJ_loc"][jj,1]], post_fraction_along='0.5', synapse=gj.id, weight=p["GJ_wt"][jj] )
		GoCCoupling.electrical_connection_instance_ws.append( conn )
		
		
	### --------------  Write files
		
	net_filename = 'gocNetwork.nml'
	pynml.write_neuroml2_file( net_doc, net_filename )

	simid = 'sim_gocnet_'+goc_type.id+'_run_{}'.format(runid)
	ls = LEMSSimulation( simid, duration=duration, dt=dt, simulation_seed=seed )
	ls.assign_simulation_target( net.id )
	ls.include_neuroml2_file( net_filename)
	ls.include_neuroml2_file( goc_filename)
	ls.include_neuroml2_file( MFSyn_filename)
	ls.include_neuroml2_file( MF20Syn_filename)
	
	
	# Specify outputs
	eof0 = 'Events_file'
	ls.create_event_output_file(eof0, "%s.v.spikes"%simid,format='ID_TIME')
	for jj in range( goc_pop.size):
		ls.add_selection_to_event_output_file( eof0, jj, '{}/{}/{}'.format( goc_pop.id, jj, goc_type.id), 'spike' )
		
	of0 = 'Volts_file'
	ls.create_output_file(of0, "%s.v.dat"%simid)
	for jj in range( goc_pop.size ):
		ls.add_column_to_output_file(of0, jj, '{}/{}/{}/v'.format( goc_pop.id, jj, goc_type.id))
		
	#Create Lems file to run
	lems_simfile = ls.save_to_file()

	if run:
		res = pynml.run_lems_with_jneuroml_neuron( lems_simfile, max_memory="2G", nogui=True, plot=False)
	else:
		res = pynml.run_lems_with_jneuroml_neuron( lems_simfile, max_memory="2G", only_generate_scripts = True, compile_mods = False, nogui=True, plot=False)
	
	
	return res
コード例 #17
0
def generate_lems_file_for_neuroml(sim_id, 
                                   neuroml_file, 
                                   target, 
                                   duration, 
                                   dt, 
                                   lems_file_name,
                                   target_dir,
                                   gen_plots_for_all_v = True,
                                   gen_saves_for_all_v = True,
                                   copy_neuroml = True,
                                   seed=None):
                                       
    
    if seed:
        random.seed(seed) # To ensure same LEMS file (e.g. colours of plots) are generated every time for the same input
    
    file_name_full = '%s/%s'%(target_dir,lems_file_name)
    
    print_comment_v('Creating LEMS file at: %s for NeuroML 2 file: %s'%(file_name_full,neuroml_file))
    
    ls = LEMSSimulation(sim_id, duration, dt, target)
    
    nml_doc = read_neuroml2_file(neuroml_file)
    
    quantities_saved = []
    
    if not copy_neuroml:
        rel_nml_file = os.path.relpath(os.path.abspath(neuroml_file), os.path.abspath(target_dir))
        print_comment_v("Including existing NeuroML file (%s) as: %s"%(neuroml_file, rel_nml_file))
        ls.include_neuroml2_file(rel_nml_file, include_included=True, relative_to_dir=os.path.abspath(target_dir))
    else:
        if os.path.abspath(os.path.dirname(neuroml_file))!=os.path.abspath(target_dir):
            shutil.copy(neuroml_file, target_dir)
        
        neuroml_file_name = os.path.basename(neuroml_file)
        
        ls.include_neuroml2_file(neuroml_file_name, include_included=False)
        
        
        for include in nml_doc.includes:
            incl_curr = '%s/%s'%(os.path.dirname(neuroml_file),include.href)
            print_comment_v(' - Including %s located at %s'%(include.href, incl_curr))
            shutil.copy(incl_curr, target_dir)
            ls.include_neuroml2_file(include.href, include_included=False)
            
            sub_doc = read_neuroml2_file(incl_curr)
        
            for include in sub_doc.includes:
                incl_curr = '%s/%s'%(os.path.dirname(neuroml_file),include.href)
                print_comment_v(' -- Including %s located at %s'%(include.href, incl_curr))
                shutil.copy(incl_curr, target_dir)
                ls.include_neuroml2_file(include.href, include_included=False)
                
                
    if gen_plots_for_all_v or gen_saves_for_all_v:
        
        for network in nml_doc.networks:
            for population in network.populations:
                size = population.size
                component = population.component
                
                quantity_template = "%s[%i]/v"
                if population.type and population.type == 'populationList':
                    quantity_template = "%s/%i/"+component+"/v"
                
                if gen_plots_for_all_v:
                    print_comment('Generating %i plots for %s in population %s'%(size, component, population.id))
   
                    disp0 = 'DispPop__%s'%population.id
                    ls.create_display(disp0, "Voltages of %s"%disp0, "-90", "50")
                    for i in range(size):
                        quantity = quantity_template%(population.id, i)
                        ls.add_line_to_display(disp0, "v %s"%safe_variable(quantity), quantity, "1mV", get_next_hex_color())
                
                if gen_saves_for_all_v:
                    print_comment('Saving %i values of v for %s in population %s'%(size, component, population.id))
   
                    of0 = 'Volts_file__%s'%population.id
                    ls.create_output_file(of0, "%s.%s.v.dat"%(sim_id,population.id))
                    for i in range(size):
                        quantity = quantity_template%(population.id, i)
                        ls.add_column_to_output_file(of0, 'v_%s'%safe_variable(quantity), quantity)
                        quantities_saved.append(quantity)
                        
        
    ls.save_to_file(file_name=file_name_full)
    
    return quantities_saved
コード例 #18
0
def analyse_cell(dataset_id, type, info, nogui = False, densities=False, analysis_dir='../../data/'):
    
    reference = '%s_%s'%(type,dataset_id)
    cell_file = '%s/%s.cell.nml'%(type,reference)
    
    print("====================================\n\n   Analysing cell: %s, dataset %s\n"%(cell_file,dataset_id))
    
    nml_doc = pynml.read_neuroml2_file(cell_file)
    notes = nml_doc.cells[0].notes if len(nml_doc.cells)>0 else nml_doc.izhikevich2007_cells[0].notes
    meta_nml = eval(notes[notes.index('{'):])
    summary = "Fitness: %s (max evals: %s, pop: %s)"%(meta_nml['fitness'],meta_nml['max_evaluations'],meta_nml['population_size'])
    print summary
    
    images = 'summary/%s_%s.png'
    if_iv_data_files = 'summary/%s_%s.dat'
    

    data, v_sub, curents_sub, freqs, curents_spike = get_if_iv_for_dataset('%s%s_analysis.json'%(analysis_dir,dataset_id))
    
    if densities:

        dataset = {}
        seed = meta_nml['seed']
        if isinstance(seed, tuple):
            seed = seed[0]
        layer = str(data['location'].split(',')[-1].strip().replace(' ',''))
        ref = '%s_%s_%s'%(dataset_id,layer,int(seed))

        dataset['id'] = dataset_id
        dataset['reference'] = ref
        metas = ['aibs_cre_line','aibs_dendrite_type','location']
        for m in metas:
            dataset[m] = str(data[m])

        metas2 = ['fitness','population_size','seed']
        for m in metas2:
            dataset[m] = meta_nml[m]
            
        # Assume images below already generated...
        if type=='HH':
            
            
            cell = nml_doc.cells[0]
            
            sgv_files, all_info = generate_channel_density_plots(cell_file, text_densities=True, passives_erevs=True)
            sgv_file =sgv_files[0]
            for c in all_info:
                if c == cell.id:
                    cc = 'tuned_cell_info'
                else:
                    cc = c
                dataset[cc] = all_info[c]
        
            info['datasets'][ref] = dataset
            
        elif type=='Izh':
            
            dataset['tuned_cell_info'] = {}
            izh_cell = nml_doc.izhikevich2007_cells[0]
                        
            for p in ['C','a','b','c','d','k','vpeak','vr','vt']:
            
                dataset['tuned_cell_info'][p] = get_value_in_si(getattr(izh_cell, p))
            
            '''
            sgv_files, all_info = generate_channel_density_plots(cell_file, text_densities=True, passives_erevs=True)
            sgv_file =sgv_files[0]
            for c in all_info:
                if c == cell.id:
                    cc = 'tuned_cell_info'
                else:
                    cc = c
                dataset[cc] = all_info[c]'''
        
        info['datasets'][ref] = dataset
        
    else:

        traces_ax, if_ax, iv_ax = generate_current_vs_frequency_curve(cell_file, 
                                            reference, 
                                            simulator = 'jNeuroML_NEURON',
                                            start_amp_nA =         -0.1, 
                                            end_amp_nA =           0.4, 
                                            step_nA =              0.01, 
                                            analysis_duration =    1000, 
                                            analysis_delay =       50,
                                            plot_voltage_traces =  False,
                                            plot_if =              not nogui,
                                            plot_iv =              not nogui, 
                                            xlim_if =              [-200, 400],
                                            ylim_if =              [-10, 120],
                                            xlim_iv =              [-200, 400],
                                            ylim_iv =              [-120, -40],
                                            save_if_figure_to =    images%(reference, 'if'), 
                                            save_iv_figure_to =    images%(reference, 'iv'),
                                            save_if_data_to =      if_iv_data_files%(reference, 'if'), 
                                            save_iv_data_to =      if_iv_data_files%(reference, 'iv'), 
                                            show_plot_already = False,
                                            return_axes = True)


        iv_ax.plot(curents_sub, v_sub,   color='#ff2222',marker='o', linestyle='',zorder=1)   
        if_ax.plot(curents_spike, freqs ,color='#ff2222',marker='o', linestyle='',zorder=1)

        iv_ax.get_figure().savefig(images%(reference, 'iv'),bbox_inches='tight')
        if_ax.get_figure().savefig(images%(reference, 'if'),bbox_inches='tight')
        
        
        offset = 100 # mV 
        
        ifv_x = []
        ifv_y = []
        markers = []
        lines = []
        colors = []
        
        cols = {'Izh':'r','HH':'g','AllenHH':'b'}
        
        for ii in ['if','iv']:
            for tt in ['Izh','HH','AllenHH']:
                rr = '%s_%s'%(tt,dataset_id)
                f = if_iv_data_files%(rr, ii)
                if os.path.isfile(f):
                    print("--- Opening: %s"%f)
                    data, indeces = reload_standard_dat_file(f)
                    
                    ifv_x.append(data['t'])
                    
                    if ii=='if':
                        ifv_y.append([ff-offset for ff in data[0]])
                    else:
                        ifv_y.append([vv for vv in data[0]])
                        
                    
                    markers.append('')
                    colors.append(cols[tt])
                    lines.append('-')
                    
        ifv_x.append(curents_sub)
        vvsub = [vv for vv in v_sub]
        
        ifv_y.append(vvsub)
        
        sub_color = '#888888'
        markers.append('D')
        colors.append('k')
        lines.append('')
        
        ifv_x.append(curents_spike)
        ifv_y.append([ff-offset for ff in freqs])
        
        markers.append('o')
        colors.append(sub_color)
        lines.append('')
        
        import matplotlib
        import matplotlib.pyplot as plt

        print ifv_x
        print ifv_y
        ylim = [-105, -20]
        font_size = 18
        ax1 = pynml.generate_plot(ifv_x,
                    ifv_y, 
                    summary, 
                    markers=markers,
                    colors=colors,
                    linestyles=lines,
                    show_plot_already=False,
                    xlim = [-100, 400],
                    font_size = font_size,
                    ylim = ylim,
                    title_above_plot=False)
                    
        plt.xlabel('Input current (pA)', fontsize = font_size)
        plt.ylabel("Steady membrane potential (mV)", fontsize = font_size)
        
        ax2 = ax1.twinx()
        plt.ylim([ylim[0]+offset,ylim[1]+offset])
        plt.ylabel('Firing frequency (Hz)', color=sub_color, fontsize = font_size)
        ax2.tick_params(axis='y', colors=sub_color)
        
        
        #plt.axis('off')
        
        plt.savefig(images%(reference, 'if_iv'+"_FIG"),bbox_inches='tight')
        

        temp_dir = 'temp/'

        print("Copying %s to %s"%(cell_file, temp_dir))
        shutil.copy(cell_file, temp_dir)

        net_file = generate_network_for_sweeps(type, dataset_id, '%s.cell.nml'%(reference), reference, temp_dir, data_dir=analysis_dir)

        lems_file_name = 'LEMS_Test_%s_%s.xml'%(type,dataset_id)

        generate_lems_file_for_neuroml('Test_%s_%s'%(dataset_id,type),
                                       net_file,
                                       'network_%s_%s'%(dataset_id,type), 
                                       1500, 
                                       0.01, 
                                       lems_file_name,
                                       temp_dir,
                                       gen_plots_for_all_v=False,
                                       copy_neuroml = False)

        simulator = "jNeuroML_NEURON"

        if simulator == "jNeuroML":
            results = pynml.run_lems_with_jneuroml(temp_dir+lems_file_name, 
                                                    nogui=True, 
                                                    load_saved_data=True, 
                                                    plot=False,
                                                    show_plot_already=False)
        elif simulator == "jNeuroML_NEURON":
            results = pynml.run_lems_with_jneuroml_neuron(temp_dir+lems_file_name, 
                                                    nogui=True, 
                                                    load_saved_data=True, 
                                                    plot=False,
                                                    show_plot_already=False)

        x = []
        y = []

        print results.keys()

        tt = [t*1000 for t in results['t']]
        for i in range(len(results)-1):
            x.append(tt)
            y.append([v*1000 for v in results['Pop0/%i/%s_%s/v'%(i,type,dataset_id)]])

        pynml.generate_plot(x,
                    y, 
                    summary, 
                    show_plot_already=False,
                    ylim = [-120, 60],
                    save_figure_to = images%(reference, 'traces'),
                    title_above_plot=True)
                 
        ax = pynml.generate_plot(x,
                    y, 
                    summary, 
                    show_plot_already=False,
                    ylim = [-120, 60],
                    title_above_plot=False)
                    
        ax.set_xlabel(None)
        ax.set_ylabel(None)
        plt.axis('off')
        
        fig_file = images%(reference, 'traces'+"_FIG")
        plt.savefig(fig_file, bbox_inches='tight', pad_inches=0)
        from PIL import Image
        img = Image.open(fig_file)

        img2 = img.crop((60, 40, 660, 480))
        img2.save(fig_file)
コード例 #19
0
def process_celldir(inputs):
    """Process cell directory"""

    count, cell_dir, nml2_cell_dir, total_count = inputs
    local_nml2_cell_dir = os.path.join("..", nml2_cell_dir)

    print(
        '\n\n************************************************************\n\n'
        'Parsing %s (cell %i/%i)\n' % (cell_dir, count, total_count))

    if os.path.isdir(cell_dir):
        old_cwd = os.getcwd()
        os.chdir(cell_dir)
    else:
        old_cwd = os.getcwd()
        os.chdir('../' + cell_dir)

    if make_zips:
        nml2_cell_dir = '%s/%s' % (zips_dir, cell_dir)
        if not os.path.isdir(nml2_cell_dir):
            os.mkdir(nml2_cell_dir)

    print("Generating into %s" % nml2_cell_dir)

    bbp_ref = None

    template_file = open('template.hoc', 'r')
    for line in template_file:
        if line.startswith('begintemplate '):
            bbp_ref = line.split(' ')[1].strip()
            print(
                ' > Assuming cell in directory %s is in a template named %s' %
                (cell_dir, bbp_ref))

    load_cell_file = 'loadcell.hoc'

    variables = {}

    variables['cell'] = bbp_ref
    variables['groups_info_file'] = groups_info_file

    template = """
///////////////////////////////////////////////////////////////////////////////
//
//   NOTE: This file is not part of the original BBP cell model distribution
//   It has been generated by ../ParseAll.py to facilitate loading of the cell
//   into NEURON for exporting the model morphology to NeuroML2
//
//////////////////////////////////////////////////////////////////////////////

load_file("stdrun.hoc")

objref cvode
cvode = new CVode()
cvode.active(1)

//======================== settings ===================================

v_init = -80

hyp_amp = -0.062866
step_amp = 0.3112968
tstop = 3000

//=================== creating cell object ===========================
load_file("import3d.hoc")
objref cell

// Using 1 to force loading of the file, in case file with same name was loaded
// before...
load_file(1, "constants.hoc")
load_file(1, "morphology.hoc")
load_file(1, "biophysics.hoc")
print "Loaded morphology and biophysics..."

load_file(1, "synapses/synapses.hoc")
load_file(1, "template.hoc")
print "Loaded template..."

load_file(1, "createsimulation.hoc")


create_cell(0)
print "Created new cell using loadcell.hoc: {{ cell }}"

define_shape()

wopen("{{ groups_info_file }}")

fprint("//Saving information on groups in this cell...\\n")

fprint("- somatic\\n")
forsec {{ cell }}[0].somatic {
    fprint("%s\\n",secname())
}

fprint("- basal\\n")
forsec {{ cell }}[0].basal {
    fprint("%s\\n",secname())
}

fprint("- axonal\\n")
forsec {{ cell }}[0].axonal {
    fprint("%s\\n",secname())
}
fprint("- apical\\n")
forsec {{ cell }}[0].apical {
    fprint("%s\\n",secname())
}
wopen()
        """

    t = Template(template)

    contents = t.render(variables)

    load_cell = open(load_cell_file, 'w')
    load_cell.write(contents)
    load_cell.close()

    print(' > Written %s' % load_cell_file)

    if os.path.isfile(load_cell_file):

        cell_info = parse_cell_info_file(cell_dir)

        nml_file_name = "%s.net.nml" % bbp_ref
        nml_net_loc = "%s/%s" % (local_nml2_cell_dir, nml_file_name)
        nml_cell_file = "%s_0_0.cell.nml" % bbp_ref
        nml_cell_loc = "%s/%s" % (local_nml2_cell_dir, nml_cell_file)

        print(' > Loading %s and exporting to %s' %
              (load_cell_file, nml_net_loc))

        export_to_neuroml2(load_cell_file,
                           nml_net_loc,
                           separateCellFiles=True,
                           includeBiophysicalProperties=False)

        print(' > Exported to: %s and %s using %s' %
              (nml_net_loc, nml_cell_loc, load_cell_file))

        nml_doc = pynml.read_neuroml2_file(nml_cell_loc)

        cell = nml_doc.cells[0]

        print(' > Adding groups from: %s' % groups_info_file)
        groups = {}
        current_group = None
        for line in open(groups_info_file):
            if not line.startswith('//'):
                if line.startswith('- '):
                    current_group = line[2:-1]
                    print(' > Adding group: [%s]' % current_group)
                    groups[current_group] = []
                else:
                    section = line.split('.')[1].strip()
                    segment_group = section.replace('[', '_').replace(']', '')
                    groups[current_group].append(segment_group)

        for g in groups.keys():
            new_seg_group = neuroml.SegmentGroup(id=g)
            cell.morphology.segment_groups.append(new_seg_group)
            for sg in groups[g]:
                new_seg_group.includes.append(neuroml.Include(sg))
            if g in ['basal', 'apical']:
                new_seg_group.inhomogeneous_parameters.append(
                    neuroml.InhomogeneousParameter(
                        id="PathLengthOver_" + g,
                        variable="p",
                        metric="Path Length from root",
                        proximal=neuroml.ProximalDetails(
                            translation_start="0")))

        ignore_chans = [
            'Ih', 'Ca_HVA', 'Ca_LVAst', 'Ca', "SKv3_1", "SK_E2",
            "CaDynamics_E2", "Nap_Et2", "Im", "K_Tst", "NaTa_t", "K_Pst",
            "NaTs2_t"
        ]

        # ignore_chans=['StochKv','StochKv_deterministic']
        ignore_chans = []

        bp, incl_chans = get_biophysical_properties(
            cell_info['e-type'],
            ignore_chans=ignore_chans,
            templates_json="../templates.json")

        cell.biophysical_properties = bp

        print("Set biophysical properties")

        notes = ''
        notes += \
            "\n\nExport of a cell model obtained from the BBP Neocortical" \
            "Microcircuit Collaboration Portal into NeuroML2" \
            "\n\n******************************************************\n*" \
            "  This export to NeuroML2 has not yet been fully validated!!" \
            "\n*  Use with caution!!\n***********************************" \
            "*******************\n\n"

        if len(ignore_chans) > 0:
            notes += "Ignored channels = %s\n\n" % ignore_chans

        notes += "For more information on this cell model see: " \
            "https://bbp.epfl.ch/nmc-portal/microcircuit#/metype/%s/" \
            "details\n\n" % cell_info['me-type']

        cell.notes = notes
        for channel in incl_chans:

            nml_doc.includes.append(neuroml.IncludeType(href="%s" % channel))

            if make_zips:
                print("Copying %s to zip folder" % channel)
                shutil.copyfile('../../NeuroML2/%s' % channel,
                                '%s/%s' % (local_nml2_cell_dir, channel))

        pynml.write_neuroml2_file(nml_doc, nml_cell_loc)

        stim_ref = 'stepcurrent3'
        stim_ref_hyp = '%s_hyp' % stim_ref
        stim_sim_duration = 3000
        stim_hyp_amp, stim_amp = get_stimulus_amplitudes(bbp_ref)
        stim_del = '700ms'
        stim_dur = '2000ms'

        new_net_loc = "%s/%s.%s.net.nml" % (local_nml2_cell_dir, bbp_ref,
                                            stim_ref)
        new_net_doc = pynml.read_neuroml2_file(nml_net_loc)

        new_net_doc.notes = notes

        stim_hyp = neuroml.PulseGenerator(id=stim_ref_hyp,
                                          delay="0ms",
                                          duration="%sms" % stim_sim_duration,
                                          amplitude=stim_hyp_amp)
        new_net_doc.pulse_generators.append(stim_hyp)
        stim = neuroml.PulseGenerator(id=stim_ref,
                                      delay=stim_del,
                                      duration=stim_dur,
                                      amplitude=stim_amp)
        new_net_doc.pulse_generators.append(stim)

        new_net = new_net_doc.networks[0]

        pop_id = new_net.populations[0].id
        pop_comp = new_net.populations[0].component
        input_list = neuroml.InputList(id="%s_input" % stim_ref_hyp,
                                       component=stim_ref_hyp,
                                       populations=pop_id)

        syn_input = neuroml.Input(id=0,
                                  target="../%s/0/%s" % (pop_id, pop_comp),
                                  destination="synapses")

        input_list.input.append(syn_input)
        new_net.input_lists.append(input_list)

        input_list = neuroml.InputList(id="%s_input" % stim_ref,
                                       component=stim_ref,
                                       populations=pop_id)

        syn_input = neuroml.Input(id=0,
                                  target="../%s/0/%s" % (pop_id, pop_comp),
                                  destination="synapses")

        input_list.input.append(syn_input)
        new_net.input_lists.append(input_list)

        pynml.write_neuroml2_file(new_net_doc, new_net_loc)

        generate_lems_file_for_neuroml(cell_dir,
                                       new_net_loc,
                                       "network",
                                       stim_sim_duration,
                                       0.025,
                                       "LEMS_%s.xml" % cell_dir,
                                       local_nml2_cell_dir,
                                       copy_neuroml=False,
                                       seed=1234)

        pynml.nml2_to_svg(nml_net_loc)

        clear_neuron()

        pop = neuroml.Population(id="Pop_%s" % bbp_ref,
                                 component=bbp_ref + '_0_0',
                                 type="populationList")

        inst = neuroml.Instance(id="0")
        pop.instances.append(inst)

        width = 6
        X = count % width
        Z = (count - X) / width
        inst.location = neuroml.Location(x=300 * X, y=0, z=300 * Z)

        count += 1

        if make_zips:
            zip_file = "%s/%s.zip" % (zips_dir, cell_dir)
            print("Creating zip file: %s" % zip_file)
            with zipfile.ZipFile(zip_file, 'w') as myzip:

                for next_file in os.listdir(local_nml2_cell_dir):
                    next_file = '%s/%s' % (local_nml2_cell_dir, next_file)
                    arcname = next_file[len(zips_dir):]
                    print("Adding : %s as %s" % (next_file, arcname))
                    myzip.write(next_file, arcname)

        os.chdir(old_cwd)

        return nml_cell_file, pop
コード例 #20
0
ファイル: TestStimCells.py プロジェクト: JustasB/Mig3DTest
from pyneuroml import pynml
from pyneuroml.lems import generate_lems_file_for_neuroml

import neuroml as nml

ref = "TestStimCells"
number_cells = 1
nml_file0 = "PartialBulb_%iMTCells.net.nml"%number_cells    
nml_file1 = nml_file0.replace('.net.nml', '_%s.net.nml'%ref)


nml_doc = pynml.read_neuroml2_file(nml_file0)


for i in range(number_cells):
    stim = "2nA"
    input_id = ("input_%s_%i"%(stim, i))
    pg = nml.PulseGenerator(id=input_id,
                                delay="40ms",
                                duration="100ms",
                                amplitude=stim)
    nml_doc.pulse_generators.append(pg)

    pop = 'Pop_Mitral_0_%i'%i
    # Add these to cells
    input_list = nml.InputList(id=input_id,
                             component=pg.id,
                             populations=pop)
    input = nml.Input(id='0', 
                          target="../%s[%i]"%(pop, i), 
コード例 #21
0
def create_GoC(runid):

    ### ---------- Load Params
    noPar = True
    pfile = Path('cellparams_file.pkl')
    keepFile = open('useParams_FI_14_25.pkl', 'rb')
    runid = pkl.load(keepFile)[runid]
    keepFile.close()
    print('Running morphology for parameter set = ', runid)

    if pfile.exists():
        print('Reading parameters from file:')
        file = open('cellparams_file.pkl', 'rb')
        params_list = pkl.load(file)
        if len(params_list) > runid:
            p = params_list[runid]
            file.close()
    if noPar:
        p = icp.get_channel_params(runid)

    # Creating document for cell
    gocID = 'Golgi_040408_C1_' + format(runid, '05d')
    goc = nml.Cell(id=gocID)  #--------simid
    cell_doc = nml.NeuroMLDocument(id=gocID)
    cell_doc.cells.append(goc)

    ### Load morphology
    morpho_fname = 'Golgi_040408_C1.cell.nml'
    morpho_file = pynml.read_neuroml2_file(morpho_fname)
    morpho = morpho_file.cells[0].morphology
    cell_doc.includes.append(nml.IncludeType(href=morpho_fname))
    goc.morphology = morpho

    ### ---------- Channels
    na_fname = 'Golgi_Na.channel.nml'
    cell_doc.includes.append(nml.IncludeType(href=na_fname))
    nar_fname = 'Golgi_NaR.channel.nml'
    cell_doc.includes.append(nml.IncludeType(href=nar_fname))
    nap_fname = 'Golgi_NaP.channel.nml'
    cell_doc.includes.append(nml.IncludeType(href=nap_fname))

    ka_fname = 'Golgi_KA.channel.nml'
    cell_doc.includes.append(nml.IncludeType(href=ka_fname))
    sk2_fname = 'Golgi_SK2.channel.nml'
    cell_doc.includes.append(nml.IncludeType(href=sk2_fname))
    km_fname = 'Golgi_KM.channel.nml'
    cell_doc.includes.append(nml.IncludeType(href=km_fname))
    kv_fname = 'Golgi_KV.channel.nml'
    cell_doc.includes.append(nml.IncludeType(href=kv_fname))
    bk_fname = 'Golgi_BK.channel.nml'
    cell_doc.includes.append(nml.IncludeType(href=bk_fname))

    cahva_fname = 'Golgi_CaHVA.channel.nml'
    cell_doc.includes.append(nml.IncludeType(href=cahva_fname))
    calva_fname = 'Golgi_CaLVA.channel.nml'
    cell_doc.includes.append(nml.IncludeType(href=calva_fname))

    hcn1f_fname = 'Golgi_HCN1f.channel.nml'
    cell_doc.includes.append(nml.IncludeType(href=hcn1f_fname))
    hcn1s_fname = 'Golgi_HCN1s.channel.nml'
    cell_doc.includes.append(nml.IncludeType(href=hcn1s_fname))
    hcn2f_fname = 'Golgi_HCN2f.channel.nml'
    cell_doc.includes.append(nml.IncludeType(href=hcn2f_fname))
    hcn2s_fname = 'Golgi_HCN2s.channel.nml'
    cell_doc.includes.append(nml.IncludeType(href=hcn2s_fname))

    leak_fname = 'Golgi_lkg.channel.nml'
    #leak_ref 	= nml.IncludeType( href=leak_fname)
    cell_doc.includes.append(nml.IncludeType(href=leak_fname))
    calc_fname = 'Golgi_CALC.nml'
    cell_doc.includes.append(nml.IncludeType(href=calc_fname))
    calc = pynml.read_neuroml2_file(
        calc_fname).decaying_pool_concentration_models[0]

    calc2_fname = 'Golgi_CALC2.nml'
    cell_doc.includes.append(nml.IncludeType(href=calc2_fname))

    goc_2pools_fname = 'GoC_2Pools.cell.nml'
    ### ------Biophysical Properties
    biophys = nml.BiophysicalProperties(id='biophys_' + gocID)
    goc.biophysical_properties = biophys

    # Inproperties
    '''
	res = nml.Resistivity( p["ra"] )		# --------- "0.1 kohm_cm" 
	ca_species = nml.Species( id="ca", ion="ca", concentration_model=calc.id, initial_concentration ="5e-5 mM", initial_ext_concentration="2 mM" )
	ca2_species = nml.Species( id="ca2", ion="ca2", concentration_model="Golgi_CALC2", initial_concentration ="5e-5 mM", initial_ext_concentration="2 mM" )
	intracellular = nml.IntracellularProperties(  )
	intracellular.resistivities.append( res )
	intracellular.species.append( ca_species )
	'''
    intracellular = pynml.read_neuroml2_file(goc_2pools_fname).cells[
        0].biophysical_properties.intracellular_properties
    biophys.intracellular_properties = intracellular

    # Membrane properties ------- cond
    memb = nml.MembraneProperties()
    biophys.membrane_properties = memb

    #pynml.read_neuroml2_file(leak_fname).ion_channel[0].id -> can't read ion channel passive
    chan_leak = nml.ChannelDensity(ion_channel="LeakConductance",
                                   cond_density=p["leak_cond"],
                                   erev="-55 mV",
                                   ion="non_specific",
                                   id="Leak")
    memb.channel_densities.append(chan_leak)

    chan_na = nml.ChannelDensity(
        ion_channel=pynml.read_neuroml2_file(na_fname).ion_channel[0].id,
        cond_density=p["na_cond"],
        erev="87.39 mV",
        ion="na",
        id="Golgi_Na_soma_group",
        segment_groups="soma_group")
    memb.channel_densities.append(chan_na)
    chan_nap = nml.ChannelDensity(
        ion_channel=pynml.read_neuroml2_file(nap_fname).ion_channel[0].id,
        cond_density=p["nap_cond"],
        erev="87.39 mV",
        ion="na",
        id="Golgi_NaP_soma_group",
        segment_groups="soma_group")
    memb.channel_densities.append(chan_nap)
    chan_nar = nml.ChannelDensity(
        ion_channel=pynml.read_neuroml2_file(nar_fname).ion_channel[0].id,
        cond_density=p["nar_cond"],
        erev="87.39 mV",
        ion="na",
        id="Golgi_NaR_soma_group",
        segment_groups="soma_group")
    memb.channel_densities.append(chan_nar)
    chan_ka = nml.ChannelDensity(
        ion_channel=pynml.read_neuroml2_file(ka_fname).ion_channel[0].id,
        cond_density=p["ka_cond"],
        erev="-84.69 mV",
        ion="k",
        id="Golgi_KA_soma_group",
        segment_groups="soma_group")
    memb.channel_densities.append(chan_ka)
    chan_sk = nml.ChannelDensity(
        ion_channel=pynml.read_neuroml2_file(sk2_fname).ion_channel_kses[0].id,
        cond_density=p["sk2_cond"],
        erev="-84.69 mV",
        ion="k",
        id="Golgi_KAHP_soma_group",
        segment_groups="soma_group")
    memb.channel_densities.append(chan_sk)
    chan_kv = nml.ChannelDensity(
        ion_channel=pynml.read_neuroml2_file(kv_fname).ion_channel[0].id,
        cond_density=p["kv_cond"],
        erev="-84.69 mV",
        ion="k",
        id="Golgi_KV_soma_group",
        segment_groups="soma_group")
    memb.channel_densities.append(chan_kv)
    chan_km = nml.ChannelDensity(
        ion_channel=pynml.read_neuroml2_file(km_fname).ion_channel[0].id,
        cond_density=p["km_cond"],
        erev="-84.69 mV",
        ion="k",
        id="Golgi_KM_soma_group",
        segment_groups="soma_group")
    memb.channel_densities.append(chan_km)
    chan_bk = nml.ChannelDensity(
        ion_channel=pynml.read_neuroml2_file(bk_fname).ion_channel[0].id,
        cond_density=p["bk_cond"],
        erev="-84.69 mV",
        ion="k",
        id="Golgi_BK_soma_group",
        segment_groups="soma_group")
    memb.channel_densities.append(chan_bk)
    chan_h1f = nml.ChannelDensity(
        ion_channel=pynml.read_neuroml2_file(hcn1f_fname).ion_channel[0].id,
        cond_density=p["hcn1f_cond"],
        erev="-20 mV",
        ion="h",
        id="Golgi_hcn1f_soma_group",
        segment_groups="soma_group")
    memb.channel_densities.append(chan_h1f)
    chan_h1s = nml.ChannelDensity(
        ion_channel=pynml.read_neuroml2_file(hcn1s_fname).ion_channel[0].id,
        cond_density=p["hcn1s_cond"],
        erev="-20 mV",
        ion="h",
        id="Golgi_hcn1s_soma_group",
        segment_groups="soma_group")
    memb.channel_densities.append(chan_h1s)
    chan_h2f = nml.ChannelDensity(
        ion_channel=pynml.read_neuroml2_file(hcn2f_fname).ion_channel[0].id,
        cond_density=p["hcn2f_cond"],
        erev="-20 mV",
        ion="h",
        id="Golgi_hcn2f_soma_group",
        segment_groups="soma_group")
    memb.channel_densities.append(chan_h2f)
    chan_h2s = nml.ChannelDensity(
        ion_channel=pynml.read_neuroml2_file(hcn2s_fname).ion_channel[0].id,
        cond_density=p["hcn2s_cond"],
        erev="-20 mV",
        ion="h",
        id="Golgi_hcn2s_soma_group",
        segment_groups="soma_group")
    memb.channel_densities.append(chan_h2s)
    chan_hva = nml.ChannelDensityNernst(
        ion_channel=pynml.read_neuroml2_file(cahva_fname).ion_channel[0].id,
        cond_density=p["cahva_cond"],
        ion="ca",
        id="Golgi_Ca_HVA_soma_group",
        segment_groups="soma_group")
    memb.channel_density_nernsts.append(chan_hva)
    chan_lva = nml.ChannelDensityNernst(
        ion_channel=pynml.read_neuroml2_file(calva_fname).ion_channel[0].id,
        cond_density=p["calva_cond"],
        ion="ca2",
        id="Golgi_Ca_LVA_soma_group",
        segment_groups="soma_group")
    memb.channel_density_nernsts.append(chan_lva)

    memb.spike_threshes.append(nml.SpikeThresh("0 mV"))
    memb.specific_capacitances.append(
        nml.SpecificCapacitance("1.0 uF_per_cm2"))
    memb.init_memb_potentials.append(nml.InitMembPotential("-60 mV"))

    goc_filename = '{}.cell.nml'.format(gocID)
    pynml.write_neuroml2_file(cell_doc, goc_filename)

    return True
コード例 #22
0
ファイル: ExportSWC.py プロジェクト: sanjayankur31/pyNeuroML
def convert_to_swc(nml_file_name, add_comments=False, target_dir=None):
    '''
    Find all <cell> elements and create one SWC file for each
    '''
    global line_count
    global line_index_vs_distals
    global line_index_vs_proximals

    # Reset
    line_count = 1
    line_index_vs_distals = {}
    line_index_vs_proximals = {}

    if target_dir is None:
        base_dir = os.path.dirname(os.path.realpath(nml_file_name))
        target_dir = base_dir
    nml_doc = pynml.read_neuroml2_file(nml_file_name,
                                       include_includes=True,
                                       verbose=False,
                                       optimized=True)

    lines = []
    comment_lines = []

    for cell in nml_doc.cells:

        swc_file_name = '%s/%s.swc' % (target_dir, cell.id)
        swc_file = open(swc_file_name, 'w')

        info = "Cell %s taken from NeuroML file %s converted to SWC" % (
            cell.id, nml_file_name)
        print_comment_v(info)
        comment_lines.append(info)
        comment_lines.append('Using pyNeuroML v%s' % pynmlv)

        group = 'soma_group'
        lines_sg, seg_ids = _get_lines_for_seg_group(cell, group, 1)
        comment_lines.append(
            'For group: %s, found %i NeuroML segments, resulting in %i SWC lines'
            % (group, len(seg_ids), len(lines_sg)))

        soma_seg_count = len(seg_ids)
        lines += lines_sg

        group = 'dendrite_group'
        lines_sg, seg_ids = _get_lines_for_seg_group(cell, group, 3)
        comment_lines.append(
            'For group: %s, found %i NeuroML segments, resulting in %i SWC lines'
            % (group, len(seg_ids), len(lines_sg)))
        dend_seg_count = len(seg_ids)
        lines += lines_sg

        group = 'axon_group'
        lines_sg, seg_ids = _get_lines_for_seg_group(cell, group, 2)
        comment_lines.append(
            'For group: %s, found %i NeuroML segments, resulting in %i SWC lines'
            % (group, len(seg_ids), len(lines_sg)))
        axon_seg_count = len(seg_ids)
        lines += lines_sg

        if not len(cell.morphology.segments
                   ) == soma_seg_count + dend_seg_count + axon_seg_count:
            raise Exception(
                "The numbers of the segments in groups: soma_group+dendrite_group+axon_group (%i), is not the same as total number of segments (%s)! All bets are off!"
                % (soma_seg_count + dend_seg_count + axon_seg_count,
                   len(cell.morphology.segments)))

        if add_comments:
            for l in comment_lines:
                swc_file.write('# %s\n' % l)

        for i in range(len(lines)):
            l = lines[i]
            swc_line = '%s' % (l)
            print(swc_line)
            swc_file.write('%s\n' % swc_line)

        swc_file.close()

        print("Written to %s" % swc_file_name)
コード例 #23
0
                         seed = seed,
                         known_target_values = {},
                         dry_run = False)
 
                         
 compare('%s/%s.Pop0.v.dat'%(r1['run_directory'], r1['reference']), show_plot_already=False)
 '''
 r2={}
 r2['run_directory'] ='NT_AllenIzh2stage_STAGE2_Tue_Dec__1_17.26.38_2015'
 r2['reference'] = 'AllenIzh2stage_STAGE2'  '''
 
 compare('%s/%s.Pop0.v.dat'%(r2['run_directory'], r2['reference']), show_plot_already=True, dataset=dataset)
 
 final_network = '%s/%s.net.nml'%(r2['run_directory'], ref)
 
 nml_doc = pynml.read_neuroml2_file(final_network)
 
 cell = nml_doc.izhikevich2007_cells[0]
 
 print("Extracted cell: %s from tuned model"%cell.id)
 
 new_id = '%s_%s'%(type, dataset)
 new_cell_doc = neuroml.NeuroMLDocument(id=new_id)
 cell.id = new_id
 
 cell.notes = "Cell model tuned to Allen Institute Cell Types Database, dataset: "+ \
              "%s\n\nTuning procedure metadata:\n\n%s\n"%(dataset, pp.pformat(r2))
 
 new_cell_doc.izhikevich2007_cells.append(cell)
 new_cell_file = 'tuned_cells/%s.cell.nml'%new_id
 
コード例 #24
0
    nml_file = '%s/%s.net.nml' % (root_dir, reference)
    writers.NeuroMLWriter.write(nml_doc, nml_file)

    print("Written network file to: " + nml_file)

    ############################################
    #  Create the LEMS file with helper method
    sim_id = 'Sim%s' % reference
    target = net.id
    duration = 1000
    dt = 0.025
    lems_file_name = 'LEMS_%s.xml' % sim_id
    target_dir = root_dir

    cell = read_neuroml2_file('%s/%s' % (root_dir, cell_file)).cells[0]
    print('Extracting channel info from %s' % cell.id)
    gen_plots_for_quantities = {}
    gen_saves_for_quantities = {}

    vs = 'MembranePotentials'
    v_dat = 'MembranePotentials.dat'

    all_vs = []
    gen_plots_for_quantities[vs] = all_vs
    gen_saves_for_quantities[v_dat] = all_vs

    for seg in cell.morphology.segments:
        v_quantity = '%s/0/%s/%s/v' % (pop.id, cell_comp, seg.id)
        all_vs.append(v_quantity)
コード例 #25
0
def generate_channel_density_plots(nml2_file, text_densities=False, passives_erevs=False, target_directory=None):
    
    nml_doc = read_neuroml2_file(nml2_file, include_includes=True, verbose=False, optimized=True)
    
    cell_elements = []
    cell_elements.extend(nml_doc.cells)
    cell_elements.extend(nml_doc.cell2_ca_poolses)
    svg_files = []
    all_info = {}
    
    for cell in cell_elements:
        info = {}
        all_info[cell.id] = info
        print_comment_v("Extracting channel density info from %s"%cell.id)
        sb = ''
        ions = {}
        maxes = {}
        mins = {}
        row = 0
        na_ions = []
        k_ions = []
        ca_ions = []
        other_ions = []
        
        if isinstance(cell, Cell2CaPools):
            cds = cell.biophysical_properties2_ca_pools.membrane_properties2_ca_pools.channel_densities + \
                cell.biophysical_properties2_ca_pools.membrane_properties2_ca_pools.channel_density_nernsts
        elif isinstance(cell, Cell):
            cds = cell.biophysical_properties.membrane_properties.channel_densities + \
                cell.biophysical_properties.membrane_properties.channel_density_nernsts
              
        epas = None
        ena = None
        ek = None
        eh = None
        eca = None
              
        for cd in cds:
            dens_si = get_value_in_si(cd.cond_density)
            print_comment_v("cd: %s, ion_channel: %s, ion: %s, density: %s (SI: %s)"%(cd.id,cd.ion_channel,cd.ion,cd.cond_density,dens_si))
            
            ions[cd.ion_channel] = cd.ion
            erev_V = get_value_in_si(cd.erev) if hasattr(cd,'erev') else None
            erev = '%s mV'%format_float(erev_V*1000) if hasattr(cd,'erev') else None
            
            if cd.ion == 'na':
                if not cd.ion_channel in na_ions: na_ions.append(cd.ion_channel)
                ena = erev
                info['ena']=erev_V
            elif cd.ion == 'k':
                if not cd.ion_channel in k_ions: k_ions.append(cd.ion_channel)
                ek = erev
                info['ek']=erev_V
            elif cd.ion == 'ca':
                if not cd.ion_channel in ca_ions: ca_ions.append(cd.ion_channel)
                eca = erev
                info['eca']=erev_V
            else:
                if not cd.ion_channel in other_ions: other_ions.append(cd.ion_channel)
                if cd.ion == 'non_specific':
                    epas = erev
                    info['epas']=erev_V
                if cd.ion == 'h':
                    eh = erev
                    info['eh']=erev_V
            
            if cd.ion_channel in maxes:
                if dens_si>maxes[cd.ion_channel]: maxes[cd.ion_channel]=dens_si
            else: 
                maxes[cd.ion_channel]=dens_si
            if cd.ion_channel in mins:
                if dens_si<mins[cd.ion_channel]: mins[cd.ion_channel]=dens_si
            else: 
                mins[cd.ion_channel]=dens_si
                
        for ion_channel in na_ions + k_ions + ca_ions + other_ions:
            col = get_ion_color(ions[ion_channel])
            info[ion_channel]={'max':maxes[ion_channel],'min':mins[ion_channel]}
            
            if maxes[ion_channel]>0:
                sb+=_get_rect(ion_channel, row, maxes[ion_channel],mins[ion_channel],col[0],col[1],col[2],text_densities)
                row+=1
            
        if passives_erevs:
            
            if ena:
                sb+=add_text(row, "E Na = %s "%ena)
                row+=1
            if ek:
                sb+=add_text(row, "E K = %s "%ek)
                row+=1
            if eca:
                sb+=add_text(row, "E Ca = %s"%eca)
                row+=1
            if eh:
                sb+=add_text(row, "E H = %s"%eh)
                row+=1
            if epas:
                sb+=add_text(row, "E pas = %s"%epas)
                row+=1
                
            for sc in cell.biophysical_properties.membrane_properties.specific_capacitances:
                sb+=add_text(row, "C (%s) = %s"%(sc.segment_groups, sc.value))
                
                info['specific_capacitance_%s'%sc.segment_groups]=get_value_in_si(sc.value)
                row+=1
                
                
            #sb+='<text x="%s" y="%s" fill="black" font-family="Arial">%s</text>\n'%(width/3., (height+spacing)*(row+1), text)
        
            
        sb="<?xml version='1.0' encoding='UTF-8'?>\n<svg xmlns=\"http://www.w3.org/2000/svg\" width=\""+str(width+text_densities*200)+"\" height=\""+str((height+spacing)*row)+"\">\n"+sb+"</svg>\n"

        print(sb)
        svg_file = nml2_file+"_channeldens.svg"
        if target_directory:
            svg_file = target_directory+"/"+svg_file.split('/')[-1]
        svg_files.append(svg_file)
        sf = open(svg_file,'w')
        sf.write(sb)
        sf.close()
        print_comment_v("Written to %s"%os.path.abspath(svg_file))
        
        pp.pprint(all_info)
        
    return svg_files, all_info
コード例 #26
0
def create_GoC_network(duration=2000, dt=0.025, seed=123, runid=0, run=False):

    keepFile = open('useParams_FI_14_25.pkl', 'rb')
    runid = pkl.load(keepFile)[runid]
    keepFile.close()

    ### ---------- Component types
    gocID = 'Golgi_040408_C1_' + format(runid, '05d')
    goc_filename = '{}.cell.nml'.format(gocID)
    goc_type = pynml.read_neuroml2_file(goc_filename).cells[0]

    ### --------- Populations

    # Build network to specify cells and connectivity
    net = nml.Network(id='MorphoNet_' + format(runid, '05d'),
                      type="networkWithTemperature",
                      temperature="23 degC")

    # Create GoC population
    goc_pop = nml.Population(id=goc_type.id + "Pop",
                             component=goc_type.id,
                             type="populationList",
                             size=1)
    inst = nml.Instance(id=0)
    goc_pop.instances.append(inst)
    inst.location = nml.Location(x=0, y=0, z=0)
    net.populations.append(goc_pop)

    # Create NML document for network specification
    net_doc = nml.NeuroMLDocument(id=net.id)
    net_doc.networks.append(net)
    net_doc.includes.append(nml.IncludeType(href=goc_filename))

    ### --------------  Write files

    net_filename = 'Morpho1_' + format(runid, '05d') + '.nml'
    pynml.write_neuroml2_file(net_doc, net_filename)

    simid = 'sim_morpho1_' + goc_type.id
    ls = LEMSSimulation(simid, duration=duration, dt=dt, simulation_seed=seed)
    ls.assign_simulation_target(net.id)
    ls.include_neuroml2_file(net_filename)
    ls.include_neuroml2_file(goc_filename)

    # Specify outputs
    eof0 = 'Events_file'
    ls.create_event_output_file(eof0, "%s.v.spikes" % simid, format='ID_TIME')
    for jj in range(goc_pop.size):
        ls.add_selection_to_event_output_file(
            eof0, jj, '{}/{}/{}'.format(goc_pop.id, jj, goc_type.id), 'spike')

    of0 = 'Volts_file'
    ls.create_output_file(of0, "%s.v.dat" % simid)
    for jj in range(goc_pop.size):
        ls.add_column_to_output_file(
            of0, jj, '{}/{}/{}/v'.format(goc_pop.id, jj, goc_type.id))

    #Create Lems file to run
    lems_simfile = ls.save_to_file()

    if run:
        res = pynml.run_lems_with_jneuroml_neuron(lems_simfile,
                                                  max_memory="2G",
                                                  nogui=True,
                                                  plot=False)
    else:
        res = pynml.run_lems_with_jneuroml_neuron(lems_simfile,
                                                  max_memory="2G",
                                                  only_generate_scripts=True,
                                                  compile_mods=False,
                                                  nogui=True,
                                                  plot=False)

    return res
コード例 #27
0
ファイル: LEMSSimulation.py プロジェクト: travs/pyNeuroML
 def include_neuroml2_file(self, nml2_file_name, include_included=True):
     self.lems_info['include_files'].append(nml2_file_name)
     if include_included:
         cell = read_neuroml2_file(nml2_file_name)
         for include in cell.includes:
             self.lems_info['include_files'].append(include.href)
コード例 #28
0
def dashboard_cells(net_id,
                    net_file_name,
                    config_array,
                    global_dt,
                    if_params,
                    elec_len_list,
                    dt_list,
                    generate_dashboards=True,
                    compare_to_neuroConstruct=False,
                    regenerate_nml2=False,
                    show_plot_already=False,
                    proj_string_neuroConstruct=None,
                    shell=None,
                    nc_home=None):
            
    ################### check whether use of neuroConstruct python/java interface is needed ########################
    
    if regenerate_nml2:
    
       use_NeuroConstruct(compare_to_neuroConstruct=compare_to_neuroConstruct,
                          regenerate_nml2=regenerate_nml2,
                          proj_string=proj_string_neuroConstruct,
                          global_dt=global_dt,
                          config_array=config_array,
                          elec_len_list=elec_len_list,
                          shell=shell,
                          nc_home=nc_home)
                             
    else:
       
          
       use_NeuroConstruct(compare_to_neuroConstruct=compare_to_neuroConstruct,
                          regenerate_nml2=regenerate_nml2,
                          proj_string=proj_string_neuroConstruct,
                          global_dt=global_dt,
                          config_array=config_array,
                          shell=shell,
                          nc_home=nc_home)     
                             
            
                             
    ############################################################################################################
        
    if generate_dashboards:    
       
       cell_id_list=[]
       
       config_list=[]
       
       analysis_header_list=[]
       
       nC_vs_NML2Curve_list=[]
       
       IFcurve_list=[]
       
       for cellModel in config_array.keys():
       
           cell_id_list.append(cellModel)
           
           config_list.append(config_array[cellModel]['Analysis'])
        
           save_to_path="../"+cellModel
        
           if not os.path.exists(save_to_path):
              print("Creating a new directory %s"%save_to_path)
              os.makedirs(save_to_path)
           else:
              print("A directory %s already exists"%save_to_path)
           
           pathToConfig="../"+config_array[cellModel]['Analysis']
           
           try:
              
              with open(os.path.join(pathToConfig,"compSummary.json"),'r') as f:
              
                   comp_info=json.load(f)
                   
           except IOError:
           
              print "cannot open file %s"%os.path.join(pathToConfig,"compSummary.json")
              
           cell_morph_summary=comp_info[config_array[cellModel]['Analysis']]
           
           src_files = os.listdir(pathToConfig)
           
           num_dx_configs=0
           
           dx_array=[]
           
           found_all_compartmentalizations=False
           
           dx_configs={}
        
           target_v=None
           
           found_default=False
           
           for file_name in src_files:
           
               full_file_path=os.path.join(pathToConfig,file_name)
               
               if (os.path.isdir(full_file_path)) and "_default" in file_name:
               
                  found_default=True
            
                  original_LEMS_target=os.path.join(full_file_path,"LEMS_Target.xml")
            
                  ###################################################################################
                  if -1 in elec_len_list and "-1" in cell_morph_summary.keys():
                  
                      dx_configs[cell_morph_summary["-1"]["IntDivs"]]=original_LEMS_target
                     
                      default_num_of_comps=cell_morph_summary["-1"]["IntDivs"]
                      
                      dx_array.append(int(default_num_of_comps))
                     
                      num_dx_configs+=1
                  ##################################################################################      
                  print("%s is a directory"%full_file_path)
                  print("will generate the IF curve for %s.cell.nml"%cellModel)
                  generate_current_vs_frequency_curve(os.path.join(full_file_path,cellModel+".cell.nml"), 
                                      cellModel, 
                                      start_amp_nA =     if_params['start_amp_nA'], 
                                      end_amp_nA =       if_params['end_amp_nA'], 
                                      step_nA =          if_params['step_nA'], 
                                      analysis_duration =if_params['analysis_duration'], 
                                      analysis_delay =   if_params['analysis_delay'],
                                      dt=                if_params['dt'],
                                      temperature=       if_params['temperature'],
                                      plot_voltage_traces=if_params['plot_voltage_traces'],
                                      plot_if=            if_params['plot_if'],
                                      plot_iv=            if_params['plot_iv'],
                                      show_plot_already=  show_plot_already,
                                      save_if_figure_to='%s/IF_%s.png'%(save_to_path,cellModel),
                                      save_iv_figure_to='%s/IV_%s.png'%(save_to_path,cellModel),
                                      simulator=         if_params['simulator'])
                                        
                  IFcurve="IF_%s"%cellModel
                  
                  IFcurve_list.append(IFcurve)
                  
                  IVcurve="IV_%s"%cellModel
                  
                  nml2_file_path=os.path.join(full_file_path,net_file_name+".net.nml")      
                  
                  net_doc = pynml.read_neuroml2_file(nml2_file_path)
                  net=net_doc.networks[0]
                  pop=net.populations[0]
                  popID=pop.id
                  
                  target_v="%s/0/%s/v"%(popID,cellModel)
               
                  ########################################################################################
               
                  if if_params['simulator'] == 'jNeuroML':
                     results = pynml.run_lems_with_jneuroml(original_LEMS_target, nogui=True, load_saved_data=True, plot=False, verbose=False)
                  if if_params['simulator'] == 'jNeuroML_NEURON':
                     results = pynml.run_lems_with_jneuroml_neuron(original_LEMS_target, nogui=True, load_saved_data=True, plot=False, verbose=False)
                  
                  t = results['t']
                  v = results[target_v]
                  
                  if compare_to_neuroConstruct:
                  
                     print("will generate the comparison between the nC model and NeuroML2 model")
                     
                     PlotNC_vs_NML2({'NML2':[{'t':t,'v':v}],'nC':[config_array[cellModel]['OriginalTag']],
                                     'subplotTitles':['NeuroML2 versus nC model: simulations in NEURON with dt=%f'%global_dt]},
                                     {'cols':8,'rows':5},
                                     legend=True,
                                     show=False,
                                     save_to_file='%s/nC_vs_NML2_%s.png'%(save_to_path,config_array[cellModel]['Analysis']),
                                     nCcellPath=os.path.join(save_to_path,config_array[cellModel]['Analysis'])   )
                                    
                     analysis_string1="nC_vs_NML2_%s"%config_array[cellModel]['Analysis']
                     
                     analysis_header1="Comparison between the original nC model and NeuroML2 model: simulations in NEURON with dt=%f"%global_dt
                     
                     analysis_header_list.append(analysis_header1)
                     
                     nC_vs_NML2Curve_list.append(analysis_string1)
                              
                  else:
                  
                     print("will generate the plot for the NeuroML2 model")
                     
                     generate_nml2_plot({'NML2':[{'t':t,'v':v}],'subplotTitles':['NeuroML2 model: simulations in NEURON with dt=%f'%global_dt]},
                                        {'cols':8,'rows':5},
                                        show=False,
                                        save_to_file='%s/NML2_%s.png'%(save_to_path,config_array[cellModel]['Analysis']))
                                            
                     analysis_string1="NML2_%s"%config_array[cellModel]['Analysis']
                     
                     analysis_header1="NeuroML2 model: simulations in NEURON with dt=%f"%global_dt    
                     
                     analysis_header_array.append(analysis_header1) 
                     
                     nC_vs_NML2Curve_array.append(analysis_string1)
                     
                  smallest_dt=min(dt_list)  
                              
                  ########################################################################################
                  print("will generate the spike times vs dt curve for %s.cell.nml"%cellModel)
                  analyse_spiketime_vs_dt(nml2_file_path, 
                                          net_id,
                                          get_sim_duration(os.path.join(full_file_path,"LEMS_%s.xml"%net_id)),
                                          if_params['simulator'],
                                          target_v,
                                          dt_list,
                                          verbose=False,
                                          spike_threshold_mV = 0,
                                          show_plot_already=show_plot_already,
                                          save_figure_to="%s/Dt_%s.png"%(save_to_path,cellModel),
                                          num_of_last_spikes=None)
                                          
                  dt_curve="Dt_%s"%cellModel
                  
               if not found_all_compartmentalizations:
                  
                  for elecLen in range(0,len(elec_len_list)):
               
                      elec_len=str(elec_len_list[elecLen]) 
                      
                      if elec_len != "-1":
                  
                         if (elec_len  in file_name) and (elec_len in cell_morph_summary.keys() ):
                      
                            dx_configs[cell_morph_summary[elec_len]["IntDivs"]]=os.path.join(full_file_path,"LEMS_Target.xml")
                      
                            num_dx_configs+=1
                         
                            dx_array.append(int(cell_morph_summary[elec_len]["IntDivs"] ) )
                      
                            break
                         
               if num_dx_configs==len(elec_len_list):
                  
                  found_all_compartmentalizations=True 
                      
           if not found_default:
           
              print("default configuration for %s analysis is not found; execution will terminate; set regenerate_nml2 to True to generate target dirs."%cellModel)
              
              quit()
                      
           if found_all_compartmentalizations:
           
              dx_array=list(np.sort(dx_array) )
                  
              maximum_int_divs=max(dx_array)
           
              print("testing the presence of cell configs with different levels of spatial discretization")
              
              analyse_spiketime_vs_dx(dx_configs, 
                                      if_params['simulator'],
                                      target_v,
                                      verbose=False,
                                      spike_threshold_mV = 0,
                                      show_plot_already=show_plot_already,
                                      save_figure_to="%s/Dx_%s.png"%(save_to_path,cellModel),
                                      num_of_last_spikes=3) 
               
              dx_curve="Dx_%s"%cellModel
                
           else:
              print("not all of the target configurations were recompartmentalized; execution will terminate; set regenerate_nml2 to True to obtain all of the target configurations.")
              quit() 
              
           if config_array[cellModel]['Analysis'] != config_array[cellModel]['SpikeProfile']:    
        
              pathToProfileConfig="../"+config_array[cellModel]['SpikeProfile']+"/"+config_array[cellModel]['SpikeProfile']+"_default"
           
              original_LEMS_target=os.path.join(pathToProfileConfig,"LEMS_Target.xml")
               
              if if_params['simulator'] == 'jNeuroML':
                 results = pynml.run_lems_with_jneuroml(original_LEMS_target, nogui=True, load_saved_data=True, plot=False, verbose=False)
              if if_params['simulator'] == 'jNeuroML_NEURON':
                 results = pynml.run_lems_with_jneuroml_neuron(original_LEMS_target, nogui=True, load_saved_data=True, plot=False, verbose=False)
                
              if 'SpikeProfileTag' in config_array[cellModel].keys():
                 
                  tag=config_array[cellModel]['SpikeProfileTag']+"_0_wtime"
                 
              else:
                 
                  tag=config_array[cellModel]['OriginalTag']
                  
              if 'SpikeProfileCellTag' in config_array[cellModel].keys() and 'SpikeProfileTag' in config_array[cellModel].keys():
                
                 target_v="%s/0/%s/v"%(config_array[cellModel]['SpikeProfileTag'],config_array[cellModel]['SpikeProfileCellTag'])
                    
              t = results['t']
              v = results[target_v]
        
              if compare_to_neuroConstruct:
               
                 print("will generate the comparison between the nC model and NeuroML2 model")
                 
                 PlotNC_vs_NML2({'NML2':[{'t':t,'v':v}],'nC':[tag],
                                 'subplotTitles':['NML2 versus nC model: simulations in NEURON with dt=%f'%global_dt]},
                                 {'cols':8,'rows':5},
                                 legend=True,
                                 show=show_plot_already,
                                 save_to_file='%s/nC_vs_NML2_%s.png'%(save_to_path,config_array[cellModel]['SpikeProfile']),
                                 nCcellPath=os.path.join(save_to_path,config_array[cellModel]['SpikeProfile'])   )
                               
                 analysis_string2="nC_vs_NML2_%s"%config_array[cellModel]['SpikeProfile']
           
                 analysis_header2="Comparison between the original nC model and NeuroML2 model: simulations in NEURON with dt=%f"%global_dt
           
              else:
        
                 print("will generate the plot for the NeuroML2 model")
                 
                 generate_nml2_plot({'NML2':[{'t':t,'v':v}],'subplotTitles':['NeuroML2 model: simulations in NEURON with dt=%f'%global_dt]},
                                        {'cols':8,'rows':5},
                                        show=False,
                                        save_to_file='%s/NML2_%s.png'%(save_to_path,config_array[cellModel]['SpikeProfile']))
           
                                         
                 analysis_string2="NML2_%s"%config_array[cellModel]['SpikeProfile']
                     
                 analysis_header2="NeuroML2 model: simulations in NEURON with dt=%f"%global_dt
              
              cwd=os.getcwd()
               
              os.chdir(save_to_path)
        
              readme = ''' 
         
## Model: %(CellID)s

### Original neuroConstruct config ID: %(SpikeProfile)s

**%(AnalysisHeader2)s**

![Simulation](%(SpikeProfileCurve)s.png)

### Original neuroConstruct config ID: %(Config)s

**%(AnalysisHeader1)s**

![Simulation](%(nC_vs_NML2Curve)s.png)

**IF curve for the NeuroML2 model simulated in NEURON**

![Simulation](%(IFcurve)s.png)

**IV curve for the NeuroML2 model simulated in NEURON**

![Simulation](%(IVcurve)s.png)

**Spike times versus time step: the NeuroML2 model simulated in NEURON.
Dashed black lines - spike times at the %(Smallest_dt)s ms time step; Green - spike times at the following time steps (in ms): %(DtArray)s.**

![Simulation](%(DtCurve)s.png)

**Spike times versus spatial discretization: the NeuroML2 model simulated in NEURON.
Default value for the number of internal divs is %(default_divs)s.
Dashed black lines - spike times at the %(MaximumDivs)s internal divisions; Blue - spike times at the following values of internal divisions:
%(IntDivsArray)s.**

![Simulation](%(DxCurve)s.png)'''

              readme_file = open('README.md','w')
              
              readme_final=readme%{"CellID":cellModel,
                                   "IFcurve":IFcurve,
                                   "IVcurve":IVcurve,
                                   "Config":config_array[cellModel]['Analysis'],
                                   "DtCurve":dt_curve,
                                   "DxCurve":dx_curve,
                                   "nC_vs_NML2Curve":analysis_string1,
                                   "AnalysisHeader1":analysis_header1,
                                   "SpikeProfileCurve":analysis_string2,
                                   "AnalysisHeader2":analysis_header2,
                                   "default_divs":default_num_of_comps,
                                   "SpikeProfile":config_array[cellModel]['SpikeProfile'],
                                   "Smallest_dt":smallest_dt,
                                   "DtArray":dt_list,
                                   "IntDivsArray":dx_array,
                                   "MaximumDivs":maximum_int_divs}
                           
              readme_file.write(readme_final)
              
              readme_file.close()

              os.chdir(cwd)
      
           else:  
           
              cwd=os.getcwd()
               
              os.chdir(save_to_path)
        
              readme = ''' 
         
## Model: %(CellID)s

### Original neuroConstruct config ID: %(Config)s

**%(AnalysisHeader1)s**

![Simulation](%(nC_vs_NML2Curve)s.png)

**IF curve for the NeuroML2 model simulated in NEURON**

![Simulation](%(IFcurve)s.png)

**IV curve for the NeuroML2 model simulated in NEURON**

![Simulation](%(IVcurve)s.png)

**Spike times versus time step: the NeuroML2 model simulated in NEURON.
Dashed black lines - spike times at the %(Smallest_dt)s ms time step; Green - spike times for the following time steps (in ms): %(DtArray)s.**

![Simulation](%(DtCurve)s.png)

**Spike times versus spatial discretization: the NeuroML2 model simulated in NEURON.
Default value for the number of internal divs is %(default_divs)s.
Dashed black lines - spike times at the %(MaximumDivs)s internal divisions; Blue - spike times at the following values of internal divisions:
%(IntDivsArray)s.**

![Simulation](%(DxCurve)s.png)'''

              readme_file = open('README.md','w')
              
              readme_final=readme%{"CellID":cellModel,
                                   "IFcurve":IFcurve,
                                   "IVcurve":IVcurve,
                                   "Config":config_array[cellModel]['Analysis'],
                                   "DtCurve":dt_curve,
                                   "DxCurve":dx_curve,
                                   "nC_vs_NML2Curve":analysis_string1,
                                   "AnalysisHeader1":analysis_header1,
                                   "default_divs":default_num_of_comps,
                                   "Smallest_dt":smallest_dt,
                                   "DtArray":dt_list,
                                   "IntDivsArray":dx_array,
                                   "MaximumDivs":maximum_int_divs}
                           
              readme_file.write(readme_final)
              readme_file.close()
              os.chdir(cwd)                                
              
       cwd=os.getcwd()
               
       os.chdir(os.path.dirname(cwd))
        
       readme = ''' 
      
## Conversion of Thalamocortical cell models to NeuroML2

'''
       readme_file = open('README.md','w')

       for cell_index in range(0,len(cell_id_list)):
       
           readme_cell='''
           
## Model: %(CellID)s

### Original neuroConstruct config ID: %(Config)s

**%(AnalysisHeader)s**

![Simulation](%(nC_vs_NML2Curve)s.png)

**IF curve for the NeuroML2 model simulated in NEURON**

![Simulation](%(IFcurve)s.png)'''

           readme_cell=readme_cell%{"CellID":cell_id_list[cell_index],
                                    "Config":config_list[cell_index],
                                    "AnalysisHeader":analysis_header_list[cell_index],
                                    "nC_vs_NML2Curve":os.path.join(cell_id_list[cell_index],nC_vs_NML2Curve_list[cell_index]),
                                    "IFcurve":os.path.join(cell_id_list[cell_index],IFcurve_list[cell_index])}
                                    
           readme=readme+readme_cell
                                
       readme_file.write(readme)
       readme_file.close()
       os.chdir(cwd)     
コード例 #29
0
def create_GoC_network(duration=2000,
                       dt=0.025,
                       seed=123,
                       runid=0,
                       run=False,
                       minI=-75,
                       maxI=200,
                       iStep=25,
                       iDur=400,
                       iRest=500):

    file = open('useParams_SpontFreq_7_pm_2.pkl', 'rb')
    use_params = pkl.load(file)["useParams"]
    file.close()

    runid = use_params[0][runid]
    print('Using parameter set = ', runid)
    ### ---------- Component types
    gocID = 'GoC_' + format(runid, '05d')
    goc_filename = '{}.cell.nml'.format(gocID)
    goc_type = pynml.read_neuroml2_file(goc_filename).cells[0]

    ### --------- Populations

    # Build network to specify cells and connectivity
    net = nml.Network(id='GoCNet_' + format(runid, '05d'),
                      type="networkWithTemperature",
                      temperature="23 degC")

    # Create GoC population
    goc_pop = nml.Population(id=goc_type.id + "Pop",
                             component=goc_type.id,
                             type="populationList",
                             size=1)
    inst = nml.Instance(id=0)
    goc_pop.instances.append(inst)
    inst.location = nml.Location(x=0, y=0, z=0)
    net.populations.append(goc_pop)

    # Create NML document for network specification
    net_doc = nml.NeuroMLDocument(id=net.id)
    net_doc.networks.append(net)
    net_doc.includes.append(nml.IncludeType(href=goc_filename))

    # Add Current Injection
    ctr = 0
    goc = 0
    p = {
        "iAmp": np.arange(minI, maxI + iStep / 2, iStep),
        "iDuration": iDur,
        "iRest": iRest
    }
    p["nSteps"] = p["iAmp"].shape[0]

    for jj in range(p["nSteps"]):
        input_id = 'stim_{}'.format(ctr)
        istep = nml.PulseGenerator(id=input_id,
                                   delay='{} ms'.format(p["iDuration"] * jj +
                                                        p["iRest"] * (jj + 1)),
                                   duration='{} ms'.format(p["iDuration"]),
                                   amplitude='{} pA'.format(p["iAmp"][jj]))
        net_doc.pulse_generators.append(istep)

        input_list = nml.InputList(id='ilist_{}'.format(ctr),
                                   component=istep.id,
                                   populations=goc_pop.id)
        curr_inj = nml.Input('0',
                             target="../%s[%i]" % (goc_pop.id, goc),
                             destination="synapses")
        input_list.input.append(curr_inj)
        net.input_lists.append(input_list)
        ctr += 1

    ### --------------  Write files

    net_filename = 'GoCNet_istep_' + format(runid, '05d') + '.nml'
    pynml.write_neuroml2_file(net_doc, net_filename)

    simid = 'sim_gocnet_istep_' + goc_type.id
    ls = LEMSSimulation(simid, duration=duration, dt=dt, simulation_seed=seed)
    ls.assign_simulation_target(net.id)
    ls.include_neuroml2_file(net_filename)
    ls.include_neuroml2_file(goc_filename)

    # Specify outputs
    eof0 = 'Events_file'
    ls.create_event_output_file(eof0, "%s.v.spikes" % simid, format='ID_TIME')
    for jj in range(goc_pop.size):
        ls.add_selection_to_event_output_file(
            eof0, jj, '{}/{}/{}'.format(goc_pop.id, jj, goc_type.id), 'spike')

    of0 = 'Volts_file'
    ls.create_output_file(of0, "%s.v.dat" % simid)
    for jj in range(goc_pop.size):
        ls.add_column_to_output_file(
            of0, jj, '{}/{}/{}/v'.format(goc_pop.id, jj, goc_type.id))

    #Create Lems file to run
    lems_simfile = ls.save_to_file()

    if run:
        res = pynml.run_lems_with_jneuroml_neuron(lems_simfile,
                                                  max_memory="2G",
                                                  nogui=True,
                                                  plot=False)
    else:
        res = pynml.run_lems_with_jneuroml_neuron(lems_simfile,
                                                  max_memory="2G",
                                                  only_generate_scripts=True,
                                                  compile_mods=False,
                                                  nogui=True,
                                                  plot=False)

    return res
コード例 #30
0
def get_channels_from_channel_file(channel_file):
    doc = read_neuroml2_file(channel_file, include_includes=True, verbose=False, already_included=[])
    channels = list(doc.ion_channel_hhs.__iter__()) + \
               list(doc.ion_channel.__iter__())
    return channels
コード例 #31
0
                           dir_to_old_components="../../cells",
                           reduced_to_single_compartment=False,
                           validate_nml2=False,
                           return_synapses=True,
                           connection_segment_groups=[{'Type':'Chem','PreCellType':"pyr_4_sym",'PreSegGroup':"soma_group",'PostCellType':"pyr_4_sym",'PostSegGroup':"dendrite_group"},
                                                      {'Type':'Chem','PreCellType':"pyr_4_sym",'PreSegGroup':"soma_group",'PostCellType':"bask",'PostSegGroup':"dendrite_group"},
                                                      {'Type':'Chem','PreCellType':"bask",'PreSegGroup':"soma_group",'PostCellType':"bask",'PostSegGroup':"dendrite_group"},
                                                      {'Type':'Chem','PreCellType':"bask",'PreSegGroup':"soma_group",'PostCellType':"pyr_4_sym",'PostSegGroup':"dendrite_group"},
                                                      {'Type':'Elect','PreCellType':"pyr_4_sym",'PreSegGroup':"apical_dends",'PostCellType':"pyr_4_sym",'PostSegGroup':"apical_dends"},
                                                      {'Type':'Elect','PreCellType':"pyr_4_sym",'PreSegGroup':"soma_group",'PostCellType':"bask",'PostSegGroup':"dendrite_group"},
                                                      {'Type':'Elect','PreCellType':"bask",'PreSegGroup':"dendrite_group",'PostCellType':"bask",'PostSegGroup':"dendrite_group"},
                                                      {'Type':'Elect','PreCellType':"bask",'PreSegGroup':"dendrite_group",'PostCellType':"pyr_4_sym",'PostSegGroup':"dendrite_group"}],
                            input_segment_groups=[{'PostCellType':"pyr_4_sym",'PostSegGroup':"dendrite_group"},{'PostCellType':"bask",'PostSegGroup':"dendrite_group"}],
                            synapse_file_tags=['.synapse.','Syn','Elect'])
                            
nml_doc=pynml.read_neuroml2_file("TestRunColumnAllenInstitute.net.nml")

network=nml_doc.networks[0]

lems_file_name=oc_build.generate_lems_simulation(nml_doc, 
                                                 network, 
                                                 "TestRunColumnAllenInstitute.net.nml", 
                                                 duration =100, 
                                                 dt =0.020,
                                                 include_extra_lems_files=include_these_synapses)
     
opencortex.print_comment_v("Starting simulation of %s.net.nml"%"TestRunColumnAllenInstitute")
                            
oc_build.simulate_network(lems_file_name=lems_file_name,
                          simulator="jNeuroML_NEURON",
                          max_memory="4000M")
コード例 #32
0
def generate(cell, duration, config='IClamp'):

    reference = "%s_%s" % (config, cell)

    cell_id = '%s' % cell
    nml2_filename = '%s.cell.nml' % (cell)
    cell_nmll = Cell(id=cell_id, neuroml2_source_file=nml2_filename)

    ################################################################################
    ###   Add some inputs

    syn_exc = None

    if 'IClamp' in config:
        parameters = {}
        parameters['stim_amp'] = '350pA'
        parameters['stim_delay'] = '100ms'
        parameters['stim_dur'] = '500ms'
        input_source = InputSource(id='iclamp_0',
                                   neuroml2_input='PulseGenerator',
                                   parameters={
                                       'amplitude': 'stim_amp',
                                       'delay': 'stim_delay',
                                       'duration': 'stim_dur'
                                   })

    elif 'Poisson' in config:
        syn_exc = Synapse(id='ampa', neuroml2_source_file='ampa.synapse.nml')
        parameters = {}
        parameters['average_rate'] = '100 Hz'
        parameters['number_per_cell'] = '10'
        input_source = InputSource(id='pfs0',
                                   neuroml2_input='PoissonFiringSynapse',
                                   parameters={
                                       'average_rate': 'average_rate',
                                       'synapse': syn_exc.id,
                                       'spike_target': "./%s" % syn_exc.id
                                   })

    sim, net = create_new_model(
        reference,
        duration,
        dt=0.025,  # ms 
        temperature=34,  # degC
        default_region='Cortex',
        parameters=parameters,
        cell_for_default_population=cell_nmll,
        color_for_default_population=colors[cell],
        input_for_default_population=input_source)

    if 'Poisson' in config:
        net.synapses.append(syn_exc)
        net.inputs[0].number_per_cell = 'number_per_cell'

    cell = read_neuroml2_file(nml2_filename).cells[0]

    if len(cell.morphology.segments) > 1:
        sim.recordVariables = {}
        for seg in cell.morphology.segments:
            seg_id = seg.id
            if seg_id != 0 and seg_id % 500 < 10:
                sim.recordVariables['%i/v' % seg_id] = {'all': '*'}
    '''
    sim.recordVariables={'biophys/membraneProperties/Na_all/Na/m/q':{'all':'*'},
                         'biophys/membraneProperties/Na_all/Na/h/q':{'all':'*'},
                         'biophys/membraneProperties/Kd_all/Kd/n/q':{'all':'*'}}
    
    if cell != 'FS':
        sim.recordVariables['biophys/membraneProperties/IM_all/IM/p/q']={'all':'*'}
        
    if cell == 'IB' or cell == 'IBR':
        sim.recordVariables['biophys/membraneProperties/IL_all/IL/q/q']={'all':'*'}
        sim.recordVariables['biophys/membraneProperties/IL_all/IL/r/q']={'all':'*'}
        
    if cell == 'LTS':
        sim.recordVariables['biophys/membraneProperties/IT_all/IT/s/q']={'all':'*'}
        sim.recordVariables['biophys/membraneProperties/IT_all/IT/u/q']={'all':'*'}'''

    net.to_json_file()
    sim.to_json_file()

    return sim, net
コード例 #33
0
    def parse_dataset(self, d):
        #print_v("Parsing dataset/array: "+ str(d))

        # Population
        if self.current_population and d.name == 'locations':

            perc_cells = self.parameters[
                'percentage_cells_per_pop'] if 'percentage_cells_per_pop' in self.parameters else 100
            if perc_cells > 100: perc_cells = 100

            size = max(0, int((perc_cells / 100.) * d.shape[0]))

            if size > 0:
                properties = {}
                if self._is_interneuron(self.current_population):
                    properties['radius'] = 5
                    type = 'I'
                else:
                    properties['radius'] = 10
                    type = 'E'
                properties['type'] = type

                layer = self.current_population.split('_')[0]
                properties['region'] = layer
                try:
                    import opencortex.utils.color as occ
                    if layer == 'L23':
                        if type == 'E': color = occ.L23_PRINCIPAL_CELL
                        if type == 'I': color = occ.L23_INTERNEURON
                    if layer == 'L4':
                        if type == 'E': color = occ.L4_PRINCIPAL_CELL
                        if type == 'I': color = occ.L4_INTERNEURON
                    if layer == 'L5':
                        if type == 'E': color = occ.L5_PRINCIPAL_CELL
                        if type == 'I': color = occ.L5_INTERNEURON
                    if layer == 'L6':
                        if type == 'E': color = occ.L6_PRINCIPAL_CELL
                        if type == 'I': color = occ.L6_INTERNEURON

                    properties['color'] = color
                except:
                    # Don't worry about it, it's just metadata
                    pass

                component_obj = None

                if self.parameters[
                        'DEFAULT_CELL_ID'] in self.component_objects:
                    component_obj = self.component_objects[
                        self.parameters['DEFAULT_CELL_ID']]
                else:
                    if 'cell_info' in self.parameters:
                        def_cell_info = self.parameters['cell_info'][
                            self.parameters['DEFAULT_CELL_ID']]
                        if def_cell_info.neuroml2_source_file:
                            from pyneuroml import pynml
                            nml2_doc = pynml.read_neuroml2_file(
                                def_cell_info.neuroml2_source_file,
                                include_includes=True)
                            component_obj = nml2_doc.get_by_id(
                                self.parameters['DEFAULT_CELL_ID'])
                            print_v("Loaded NeuroML2 object %s from %s " %
                                    (component_obj,
                                     def_cell_info.neuroml2_source_file))
                            self.component_objects[self.parameters[
                                'DEFAULT_CELL_ID']] = component_obj

                self.handler.handle_population(
                    self.current_population,
                    self.parameters['DEFAULT_CELL_ID'],
                    size,
                    component_obj=component_obj,
                    properties=properties)

                print_v("   There are %i cells in: %s" %
                        (size, self.current_population))
                for i in range(0, d.shape[0]):

                    if i < size:
                        row = d[i, :]
                        x = row[0]
                        y = row[1]
                        z = row[2]
                        self.pop_locations[self.current_population][i] = (x, y,
                                                                          z)
                        self.handler.handle_location(
                            i, self.current_population,
                            self.parameters['DEFAULT_CELL_ID'], x, y, z)

        # Projection
        elif self.pre_pop != None and self.post_pop != None:

            proj_id = 'Proj__%s__%s' % (self.pre_pop, self.post_pop)
            synapse = 'gaba'
            if 'PC' in self.pre_pop or 'SS' in self.pre_pop:  # TODO: better choice between E/I cells
                synapse = 'ampa'

            (ii, jj) = np.nonzero(d)
            conns_here = False
            pre_num = len(self.pop_locations[
                self.pre_pop]) if self.pre_pop in self.pop_locations else 0
            post_num = len(self.pop_locations[
                self.post_pop]) if self.post_pop in self.pop_locations else 0

            if pre_num > 0 and post_num > 0:
                for index in range(len(ii)):
                    if ii[index]<pre_num and \
                       jj[index]<post_num:
                        conns_here = True
                        break

                if conns_here:
                    print_v("Conn %s -> %s (%s)" %
                            (self.pre_pop, self.post_pop, synapse))
                    self.handler.handle_projection(proj_id, self.pre_pop,
                                                   self.post_pop, synapse)

                    conn_count = 0

                    for index in range(len(ii)):
                        i = ii[index]
                        j = jj[index]
                        if i < pre_num and j < post_num:
                            #print("  Conn5 %s[%s] -> %s[%s]"%(self.pre_pop,i,self.post_pop,j))
                            delay = 1.111
                            weight = 1
                            self.handler.handle_connection(proj_id,
                                             conn_count,
                                             self.pre_pop,
                                             self.post_pop,
                                             synapse, \
                                             i, \
                                             j, \
                                             delay = delay, \
                                             weight = weight)
                            conn_count += 1

                    self.handler.finalise_projection(proj_id, self.pre_pop,
                                                     self.post_pop, synapse)

            self.post_pop = None
コード例 #34
0
def get_includes_from_channel_file(channel_file):
    doc = read_neuroml2_file(channel_file)
    includes = []
    for incl in doc.includes:
        includes.append(incl.href)
    return includes
コード例 #35
0
ファイル: __init__.py プロジェクト: sanjayankur31/pyNeuroML
def generate_lems_file_for_neuroml(
        sim_id,
        neuroml_file,
        target,
        duration,
        dt,
        lems_file_name,
        target_dir,
        nml_doc=None,  # Use this if the nml doc has already been loaded (to avoid delay in reload)
        include_extra_files=[],
        gen_plots_for_all_v=True,
        plot_all_segments=False,
        gen_plots_for_quantities={},  # Dict with displays vs lists of quantity paths
        gen_plots_for_only_populations=[],  # List of populations, all pops if=[]
        gen_saves_for_all_v=True,
        save_all_segments=False,
        gen_saves_for_only_populations=[],  # List of populations, all pops if=[]
        gen_saves_for_quantities={},  # Dict with file names vs lists of quantity paths
        gen_spike_saves_for_all_somas=False,
        gen_spike_saves_for_only_populations=[],  # List of populations, all pops if=[]
        gen_spike_saves_for_cells={},  # Dict with file names vs lists of quantity paths
        spike_time_format='ID_TIME',
        copy_neuroml=True,
        report_file_name=None,
        lems_file_generate_seed=None,
        verbose=False,
        simulation_seed=12345):

    my_random = random.Random()
    if lems_file_generate_seed:
        my_random.seed(
            lems_file_generate_seed
        )  # To ensure same LEMS file (e.g. colours of plots) are generated every time for the same input
    else:
        my_random.seed(
            12345
        )  # To ensure same LEMS file (e.g. colours of plots) are generated every time for the same input

    file_name_full = '%s/%s' % (target_dir, lems_file_name)

    print_comment_v(
        'Creating LEMS file at: %s for NeuroML 2 file: %s (copy: %s)' %
        (file_name_full, neuroml_file, copy_neuroml))

    ls = LEMSSimulation(sim_id,
                        duration,
                        dt,
                        target,
                        simulation_seed=simulation_seed)

    if nml_doc is None:
        nml_doc = read_neuroml2_file(neuroml_file,
                                     include_includes=True,
                                     verbose=verbose)
        nml_doc_inc_not_included = read_neuroml2_file(neuroml_file,
                                                      include_includes=False,
                                                      verbose=False)
    else:
        nml_doc_inc_not_included = nml_doc

    ls.set_report_file(report_file_name)

    quantities_saved = []

    for f in include_extra_files:
        ls.include_neuroml2_file(f, include_included=False)

    if not copy_neuroml:
        rel_nml_file = os.path.relpath(os.path.abspath(neuroml_file),
                                       os.path.abspath(target_dir))
        print_comment_v("Including existing NeuroML file (%s) as: %s" %
                        (neuroml_file, rel_nml_file))
        ls.include_neuroml2_file(rel_nml_file,
                                 include_included=True,
                                 relative_to_dir=os.path.abspath(target_dir))
    else:
        print_comment_v(
            "Copying a NeuroML file (%s) to: %s (abs path: %s)" %
            (neuroml_file, target_dir, os.path.abspath(target_dir)))

        if not os.path.isdir(target_dir):
            raise Exception("Target directory %s does not exist!" % target_dir)

        if os.path.realpath(
                os.path.dirname(neuroml_file)) != os.path.realpath(target_dir):
            shutil.copy(neuroml_file, target_dir)
        else:
            print_comment_v("No need, same file...")

        neuroml_file_name = os.path.basename(neuroml_file)

        ls.include_neuroml2_file(neuroml_file_name, include_included=False)

        nml_dir = os.path.dirname(neuroml_file) if len(
            os.path.dirname(neuroml_file)) > 0 else '.'

        for include in nml_doc_inc_not_included.includes:

            if nml_dir == '.' and os.path.isfile(include.href):
                incl_curr = include.href
            else:
                incl_curr = '%s/%s' % (nml_dir, include.href)

            if os.path.isfile(include.href):
                incl_curr = include.href

            print_comment_v(
                ' - Including %s (located at %s; nml dir: %s), copying to %s' %
                (include.href, incl_curr, nml_dir, target_dir))
            '''
            if not os.path.isfile("%s/%s" % (target_dir, os.path.basename(incl_curr))) and \
               not os.path.isfile("%s/%s" % (target_dir, incl_curr)) and \
               not os.path.isfile(incl_curr):
                shutil.copy(incl_curr, target_dir)
            else:
                print_comment_v("No need to copy...")'''

            f1 = "%s/%s" % (target_dir, os.path.basename(incl_curr))
            f2 = "%s/%s" % (target_dir, incl_curr)
            if os.path.isfile(f1):
                print_comment_v("No need to copy, file exists: %s..." % f1)
            elif os.path.isfile(f2):
                print_comment_v("No need to copy, file exists: %s..." % f2)
            else:
                shutil.copy(incl_curr, target_dir)

            ls.include_neuroml2_file(include.href, include_included=False)
            sub_doc = read_neuroml2_file(incl_curr)
            sub_dir = os.path.dirname(incl_curr) if len(
                os.path.dirname(incl_curr)) > 0 else '.'

            if sub_doc.__class__ == neuroml.nml.nml.NeuroMLDocument:
                for include in sub_doc.includes:
                    incl_curr = '%s/%s' % (sub_dir, include.href)
                    print_comment_v(' -- Including %s located at %s' %
                                    (include.href, incl_curr))

                    if not os.path.isfile("%s/%s" % (target_dir, os.path.basename(incl_curr))) and \
                       not os.path.isfile("%s/%s" % (target_dir, incl_curr)):

                        shutil.copy(incl_curr, target_dir)
                        ls.include_neuroml2_file(include.href,
                                                 include_included=False)

    if gen_plots_for_all_v \
       or gen_saves_for_all_v \
       or len(gen_plots_for_only_populations) > 0 \
       or len(gen_saves_for_only_populations) > 0 \
       or gen_spike_saves_for_all_somas \
       or len(gen_spike_saves_for_only_populations) > 0:

        for network in nml_doc.networks:
            for population in network.populations:

                variable = "v"
                quantity_template_e = "%s[%i]"

                component = population.component
                size = population.size
                cell = None
                segment_ids = []

                for c in nml_doc.spike_generator_poissons:
                    if c.id == component:
                        variable = "tsince"
                for c in nml_doc.SpikeSourcePoisson:
                    if c.id == component:
                        variable = "tsince"

                quantity_template = "%s[%i]/" + variable
                if plot_all_segments or gen_spike_saves_for_all_somas:
                    for c in nml_doc.cells:
                        if c.id == component:
                            cell = c
                            for segment in cell.morphology.segments:
                                segment_ids.append(segment.id)
                            segment_ids.sort()

                if population.type and population.type == 'populationList':
                    quantity_template = "%s/%i/" + component + "/" + variable
                    quantity_template_e = "%s/%i/" + component + ""
                    #  Multicompartmental cell
                    #  Needs to be supported in NeuronWriter
                    # if len(segment_ids)>1:
                    #     quantity_template_e = "%s/%i/"+component+"/0"
                    size = len(population.instances)

                if gen_plots_for_all_v or population.id in gen_plots_for_only_populations:
                    print_comment(
                        'Generating %i plots for %s in population %s' %
                        (size, component, population.id))

                    disp0 = 'DispPop__%s' % population.id
                    ls.create_display(
                        disp0,
                        "Membrane potentials of cells in %s" % population.id,
                        "-90", "50")

                    for i in range(size):
                        if cell is not None and plot_all_segments:
                            quantity_template_seg = "%s/%i/" + component + "/%i/v"
                            for segment_id in segment_ids:
                                quantity = quantity_template_seg % (
                                    population.id, i, segment_id)
                                ls.add_line_to_display(
                                    disp0, "%s[%i] seg %i: v" %
                                    (population.id, i, segment_id), quantity,
                                    "1mV", get_next_hex_color(my_random))
                        else:
                            quantity = quantity_template % (population.id, i)
                            ls.add_line_to_display(
                                disp0, "%s[%i]: v" % (population.id, i),
                                quantity, "1mV", get_next_hex_color(my_random))

                if gen_saves_for_all_v or population.id in gen_saves_for_only_populations:
                    print_comment(
                        'Saving %i values of %s for %s in population %s' %
                        (size, variable, component, population.id))

                    of0 = 'Volts_file__%s' % population.id
                    ls.create_output_file(
                        of0,
                        "%s.%s.%s.dat" % (sim_id, population.id, variable))
                    for i in range(size):
                        if cell is not None and save_all_segments:
                            quantity_template_seg = "%s/%i/" + component + "/%i/v"
                            for segment_id in segment_ids:
                                quantity = quantity_template_seg % (
                                    population.id, i, segment_id)
                                ls.add_column_to_output_file(
                                    of0, 'v_%s' % safe_variable(quantity),
                                    quantity)
                                quantities_saved.append(quantity)
                        else:
                            quantity = quantity_template % (population.id, i)
                            ls.add_column_to_output_file(
                                of0, 'v_%s' % safe_variable(quantity),
                                quantity)
                            quantities_saved.append(quantity)

                if gen_spike_saves_for_all_somas or population.id in gen_spike_saves_for_only_populations:
                    print_comment(
                        'Saving spikes in %i somas for %s in population %s' %
                        (size, component, population.id))

                    eof0 = 'Spikes_file__%s' % population.id
                    ls.create_event_output_file(eof0,
                                                "%s.%s.spikes" %
                                                (sim_id, population.id),
                                                format=spike_time_format)
                    for i in range(size):
                        quantity = quantity_template_e % (population.id, i)
                        ls.add_selection_to_event_output_file(
                            eof0, i, quantity, "spike")
                        quantities_saved.append(quantity)

    for display in sorted(gen_plots_for_quantities.keys()):

        quantities = gen_plots_for_quantities[display]
        max_ = "1"
        min_ = "-1"
        scale = "1"

        # Check for v ...
        if quantities and len(quantities) > 0 and quantities[0].endswith('/v'):
            max_ = "40"
            min_ = "-80"
            scale = "1mV"

        ls.create_display(display, "Plots of %s" % display, min_, max_)
        for q in quantities:
            ls.add_line_to_display(display, safe_variable(q), q, scale,
                                   get_next_hex_color(my_random))

    for file_name in sorted(gen_saves_for_quantities.keys()):
        quantities = gen_saves_for_quantities[file_name]
        of_id = safe_variable(file_name)
        ls.create_output_file(of_id, file_name)
        for q in quantities:
            ls.add_column_to_output_file(of_id, safe_variable(q), q)
            quantities_saved.append(q)

    for file_name in sorted(gen_spike_saves_for_cells.keys()):

        quantities = gen_spike_saves_for_cells[file_name]
        of_id = safe_variable(file_name)
        ls.create_event_output_file(of_id, file_name)
        pop_here = None
        for i, quantity in enumerate(quantities):
            pop, index = get_pop_index(quantity)
            if pop_here:
                if pop_here != pop:
                    raise Exception('Problem with generating LEMS for saving spikes for file %s.\n' % file_name + \
                                    'Multiple cells from different populations in one file will cause issues with index/spike id.')
            pop_here = pop
            # print('===== Adding to %s (%s) event %i for %s, pop: %s, i: %s' % (file_name, of_id, i, quantity, pop, index))
            ls.add_selection_to_event_output_file(of_id, index, quantity,
                                                  "spike")
            quantities_saved.append(quantity)

    ls.save_to_file(file_name=file_name_full)
    return quantities_saved, ls
コード例 #36
0
def create_GoC_network(duration,
                       dt,
                       seed,
                       N_goc=0,
                       N_mf=15,
                       run=False,
                       prob_type='Boltzmann',
                       GJw_type='Vervaeke2010'):

    ### ---------- Component types
    goc_filename = 'GoC.cell.nml'  # Golgi cell with channels
    goc_file = pynml.read_neuroml2_file(goc_filename)
    goc_type = goc_file.cells[0]
    goc_ref = nml.IncludeType(href=goc_filename)

    MFSyn_filename = 'MF_GoC_Syn.nml'  # small conductance synapse for background inputs
    mfsyn_file = pynml.read_neuroml2_file(MFSyn_filename)
    MFSyn_type = mfsyn_file.exp_three_synapses[0]
    mfsyn_ref = nml.IncludeType(href=MFSyn_filename)

    MF20Syn_filename = 'MF_GoC_SynMult.nml'  # multi-syn conductance for strong/coincident transient input
    mf20syn_file = pynml.read_neuroml2_file(MF20Syn_filename)
    MF20Syn_type = mf20syn_file.exp_three_synapses[0]
    mf20syn_ref = nml.IncludeType(href=MF20Syn_filename)

    mf_type2 = 'spikeGeneratorPoisson'  # Spike source for background inputs
    mf_poisson = nml.SpikeGeneratorPoisson(
        id="MF_Poisson", average_rate="5 Hz")  # Not tuned to any data - qqq !

    mf_bursttype = 'transientPoissonFiringSynapse'  # Burst of MF input (as explicit input)
    mf_burst = nml.TransientPoissonFiringSynapse(id="MF_Burst",
                                                 average_rate="100 Hz",
                                                 delay="2000 ms",
                                                 duration="500 ms",
                                                 synapse=MF20Syn_type.id,
                                                 spike_target='./{}'.format(
                                                     MF20Syn_type.id))

    gj = nml.GapJunction(id="GJ_0", conductance="426pS")  # GoC synapse

    ### --------- Populations

    # Build network to specify cells and connectivity
    net = nml.Network(id="gocNetwork",
                      type="networkWithTemperature",
                      temperature="23 degC")

    ### Golgi cells
    if N_goc > 0:
        GoC_pos = nu.GoC_locate(N_goc)
    else:
        GoC_pos = nu.GoC_density_locate()
        N_goc = GoC_pos.shape[0]

    # Create GoC population
    goc_pop = nml.Population(id=goc_type.id + "Pop",
                             component=goc_type.id,
                             type="populationList",
                             size=N_goc)
    for goc in range(N_goc):
        inst = nml.Instance(id=goc)
        goc_pop.instances.append(inst)
        inst.location = nml.Location(x=GoC_pos[goc, 0],
                                     y=GoC_pos[goc, 1],
                                     z=GoC_pos[goc, 2])
    net.populations.append(goc_pop)

    ### MF population
    MF_Poisson_pop = nml.Population(id=mf_poisson.id + "_pop",
                                    component=mf_poisson.id,
                                    type="populationList",
                                    size=N_mf)
    MF_pos = nu.GoC_locate(N_mf)
    for mf in range(N_mf):
        inst = nml.Instance(id=mf)
        MF_Poisson_pop.instances.append(inst)
        inst.location = nml.Location(x=MF_pos[mf, 0],
                                     y=MF_pos[mf, 1],
                                     z=MF_pos[mf, 2])
    net.populations.append(MF_Poisson_pop)

    # Create NML document for network specification
    net_doc = nml.NeuroMLDocument(id=net.id)
    net_doc.networks.append(net)
    net_doc.includes.append(goc_ref)
    net_doc.includes.append(mfsyn_ref)
    net_doc.includes.append(mf20syn_ref)
    net_doc.spike_generator_poissons.append(mf_poisson)
    net_doc.transient_poisson_firing_synapses.append(mf_burst)
    net_doc.gap_junctions.append(gj)

    ### ------------ Connectivity

    ### background excitatory inputs: 	MF to GoC populations
    MFProjection = nml.Projection(id="MFtoGoC",
                                  presynaptic_population=MF_Poisson_pop.id,
                                  postsynaptic_population=goc_pop.id,
                                  synapse=MFSyn_type.id)
    net.projections.append(MFProjection)

    #Get list of MF->GoC synapse
    mf_synlist = nu.randdist_MF_syn(N_mf, N_goc,
                                    pConn=0.3)  # Not tuned to any data - qqq!
    nMFSyn = mf_synlist.shape[1]
    for syn in range(nMFSyn):
        mf, goc = mf_synlist[:, syn]
        conn2 = nml.Connection(
            id=syn,
            pre_cell_id='../{}/{}/{}'.format(MF_Poisson_pop.id, mf,
                                             mf_poisson.id),
            post_cell_id='../{}/{}/{}'.format(goc_pop.id, goc, goc_type.id),
            post_segment_id='0',
            post_fraction_along="0.5")  #on soma
        MFProjection.connections.append(conn2)

    ### Add few burst inputs
    n_bursts = 4
    gocPerm = np.random.permutation(
        N_goc)  # change to central neurons later -qqq !!!
    ctr = 0
    for gg in range(4):
        goc = gocPerm[gg]
        for jj in range(n_bursts):
            inst = nml.ExplicitInput(
                id=ctr,
                target='../{}/{}/{}'.format(goc_pop.id, goc, goc_type.id),
                input=mf_burst.id,
                synapse=MF20Syn_type.id,
                spikeTarget='./{}'.format(MF20Syn_type.id))
            net.explicit_inputs.append(inst)
            ctr += 1

    ### Electrical coupling between GoCs

    # get GJ connectivity
    GJ_pairs, GJWt = nu.GJ_conn(GoC_pos, prob_type, GJw_type)
    #tmp1, tmp2 = valnet.gapJuncAnalysis( GJ_pairs, GJWt )
    #print("Number of gap junctions per cell: ", tmp1)
    #print("Net GJ conductance per cell:", tmp2)

    # Add electrical synapses
    GoCCoupling = nml.ElectricalProjection(id="gocGJ",
                                           presynaptic_population=goc_pop.id,
                                           postsynaptic_population=goc_pop.id)
    nGJ = GJ_pairs.shape[0]
    for jj in range(nGJ):
        conn = nml.ElectricalConnectionInstanceW(
            id=jj,
            pre_cell='../{}/{}/{}'.format(goc_pop.id, GJ_pairs[jj, 0],
                                          goc_type.id),
            pre_segment='1',
            pre_fraction_along='0.5',
            post_cell='../{}/{}/{}'.format(goc_pop.id, GJ_pairs[jj, 1],
                                           goc_type.id),
            post_segment='1',
            post_fraction_along='0.5',
            synapse=gj.id,
            weight=GJWt[jj])
        GoCCoupling.electrical_connection_instance_ws.append(conn)
    net.electrical_projections.append(GoCCoupling)

    ### --------------  Write files

    net_filename = 'gocNetwork.nml'
    pynml.write_neuroml2_file(net_doc, net_filename)
    #lems_filename = 'instances.xml'
    #pynml.write_lems_file( lems_inst_doc, lems_filename, validate=False )

    simid = 'sim_gocnet' + goc_type.id
    ls = LEMSSimulation(simid, duration=duration, dt=dt, simulation_seed=seed)
    ls.assign_simulation_target(net.id)

    ls.include_neuroml2_file(net_filename)
    ls.include_neuroml2_file(goc_filename)
    ls.include_neuroml2_file(MFSyn_filename)
    ls.include_neuroml2_file(MF20Syn_filename)
    #ls.include_lems_file( lems_filename, include_included=False)

    # Specify outputs
    eof0 = 'Events_file'
    ls.create_event_output_file(eof0, "%s.v.spikes" % simid, format='ID_TIME')
    for jj in range(goc_pop.size):
        ls.add_selection_to_event_output_file(
            eof0, jj, '{}/{}/{}'.format(goc_pop.id, jj, goc_type.id), 'spike')

    of0 = 'Volts_file'
    ls.create_output_file(of0, "%s.v.dat" % simid)
    for jj in range(goc_pop.size):
        ls.add_column_to_output_file(
            of0, jj, '{}/{}/{}/v'.format(goc_pop.id, jj, goc_type.id))

    #Create Lems file to run
    lems_simfile = ls.save_to_file()

    if run:
        res = pynml.run_lems_with_jneuroml_neuron(lems_simfile,
                                                  max_memory="2G",
                                                  nogui=True,
                                                  plot=False)
    else:
        res = pynml.run_lems_with_jneuroml_neuron(lems_simfile,
                                                  max_memory="2G",
                                                  only_generate_scripts=True,
                                                  compile_mods=False,
                                                  nogui=True,
                                                  plot=False)

    return res
コード例 #37
0
ファイル: __init__.py プロジェクト: RokasSt/pyNeuroML
def generate_lems_file_for_neuroml(sim_id, 
                                   neuroml_file, 
                                   target, 
                                   duration, 
                                   dt, 
                                   lems_file_name,
                                   target_dir,
                                   include_extra_files = [],
                                   gen_plots_for_all_v = True,
                                   plot_all_segments = False,
                                   gen_plots_for_quantities = {},   #  Dict with displays vs lists of quantity paths
                                   gen_plots_for_only_populations = [],   #  List of populations, all pops if = []
                                   gen_saves_for_all_v = True,
                                   save_all_segments = False,
                                   gen_saves_for_only_populations = [],  #  List of populations, all pops if = []
                                   gen_saves_for_quantities = {},   #  Dict with file names vs lists of quantity paths
                                   copy_neuroml = True,
                                   seed=None):
                                       
    
    if seed:
        random.seed(seed) # To ensure same LEMS file (e.g. colours of plots) are generated every time for the same input
    
    file_name_full = '%s/%s'%(target_dir,lems_file_name)
    
    print_comment_v('Creating LEMS file at: %s for NeuroML 2 file: %s'%(file_name_full,neuroml_file))
    
    ls = LEMSSimulation(sim_id, duration, dt, target)
    
    nml_doc = read_neuroml2_file(neuroml_file, include_includes=True, verbose=True)
    
    quantities_saved = []
    
    for f in include_extra_files:
        ls.include_neuroml2_file(f, include_included=False)
    
    if not copy_neuroml:
        rel_nml_file = os.path.relpath(os.path.abspath(neuroml_file), os.path.abspath(target_dir))
        print_comment_v("Including existing NeuroML file (%s) as: %s"%(neuroml_file, rel_nml_file))
        ls.include_neuroml2_file(rel_nml_file, include_included=True, relative_to_dir=os.path.abspath(target_dir))
    else:
        print_comment_v("Copying NeuroML file (%s) to: %s (%s)"%(neuroml_file, target_dir, os.path.abspath(target_dir)))
        if os.path.abspath(os.path.dirname(neuroml_file))!=os.path.abspath(target_dir):
            shutil.copy(neuroml_file, target_dir)
        
        neuroml_file_name = os.path.basename(neuroml_file)
        
        ls.include_neuroml2_file(neuroml_file_name, include_included=False)
        
        
        for include in nml_doc.includes:
            incl_curr = '%s/%s'%(os.path.dirname(neuroml_file),include.href)
            print_comment_v(' - Including %s located at %s'%(include.href, incl_curr))
            shutil.copy(incl_curr, target_dir)
            ls.include_neuroml2_file(include.href, include_included=False)
            
            sub_doc = read_neuroml2_file(incl_curr)
        
            for include in sub_doc.includes:
                incl_curr = '%s/%s'%(os.path.dirname(neuroml_file),include.href)
                print_comment_v(' -- Including %s located at %s'%(include.href, incl_curr))
                shutil.copy(incl_curr, target_dir)
                ls.include_neuroml2_file(include.href, include_included=False)
                
                
    if gen_plots_for_all_v or gen_saves_for_all_v or len(gen_plots_for_only_populations)>0 or len(gen_saves_for_only_populations)>0 :
        
        for network in nml_doc.networks:
            for population in network.populations:
                
                quantity_template = "%s[%i]/v"
                component = population.component
                size = population.size
                cell = None
                segment_ids = []
                if plot_all_segments:
                    for c in nml_doc.cells:
                        if c.id == component:
                            cell = c
                            for segment in cell.morphology.segments:
                                segment_ids.append(segment.id)
                            segment_ids.sort()
                        
                if population.type and population.type == 'populationList':
                    quantity_template = "%s/%i/"+component+"/v"
                    size = len(population.instances)
                    
                if gen_plots_for_all_v or population.id in gen_plots_for_only_populations:
                    print_comment('Generating %i plots for %s in population %s'%(size, component, population.id))
   
                    disp0 = 'DispPop__%s'%population.id
                    ls.create_display(disp0, "Membrane potentials of cells in %s"%population.id, "-90", "50")
                    
                    for i in range(size):
                        if cell!=None and plot_all_segments:
                            quantity_template_seg = "%s/%i/"+component+"/%i/v"
                            for segment_id in segment_ids:
                                quantity = quantity_template_seg%(population.id, i, segment_id)
                                ls.add_line_to_display(disp0, "%s[%i] seg %i: v"%(population.id, i, segment_id), quantity, "1mV", get_next_hex_color())
                        else:
                            quantity = quantity_template%(population.id, i)
                            ls.add_line_to_display(disp0, "%s[%i]: v"%(population.id, i), quantity, "1mV", get_next_hex_color())
                
                if gen_saves_for_all_v or population.id in gen_saves_for_only_populations:
                    print_comment('Saving %i values of v for %s in population %s'%(size, component, population.id))
   
                    of0 = 'Volts_file__%s'%population.id
                    ls.create_output_file(of0, "%s.%s.v.dat"%(sim_id,population.id))
                    for i in range(size):
                        if cell!=None and save_all_segments:
                            quantity_template_seg = "%s/%i/"+component+"/%i/v"
                            for segment_id in segment_ids:
                                quantity = quantity_template_seg%(population.id, i, segment_id)
                                ls.add_column_to_output_file(of0, 'v_%s'%safe_variable(quantity), quantity)
                                quantities_saved.append(quantity)
                        else:
                            quantity = quantity_template%(population.id, i)
                            ls.add_column_to_output_file(of0, 'v_%s'%safe_variable(quantity), quantity)
                            quantities_saved.append(quantity)
                        
    for display in gen_plots_for_quantities.keys():
        
        quantities = gen_plots_for_quantities[display]
        ls.create_display(display, "Plots of %s"%display, "-90", "50")
        for q in quantities:
            ls.add_line_to_display(display, safe_variable(q), q, "1", get_next_hex_color())
            
    for file_name in gen_saves_for_quantities.keys():
        
        quantities = gen_saves_for_quantities[file_name]
        ls.create_output_file(file_name, file_name)
        for q in quantities:
            ls.add_column_to_output_file(file_name, safe_variable(q), q)
                        
        
    ls.save_to_file(file_name=file_name_full)
    
    return quantities_saved
コード例 #38
0
import neuroml

from pyneuroml import pynml
import random

nml_file = "LargeScale_HippocampalNet.net.nml"
nml_doc = pynml.read_neuroml2_file(nml_file)

width = 1500
depth = 500

h_so = 100
h_sp = 100
h_lm = 200

for pop in nml_doc.networks[0].populations:

    print("Handling population %s with %s cells" %
          (pop.id, len(pop.instances)))
    for instance in pop.instances:
        loc = instance.location
        old = str(loc)
        loc.x = width * random.random()
        loc.y = (h_so + h_sp + h_lm) * random.random()
        if pop.id == 'pop_poolosyn':
            loc.y = h_so + h_sp * random.random()
        loc.z = depth * random.random()
        print("    Changed %s to %s" % (old, loc))

net_file = 'Mod_%s' % (nml_file)
neuroml.writers.NeuroMLWriter.write(nml_doc, net_file)
コード例 #39
0
ファイル: NeuroML2ToPOVRay.py プロジェクト: 34383c/pyNeuroML
def main ():

    args = process_args()
        
    xmlfile = args.neuroml_file

    pov_file_name = xmlfile.replace(".xml", ".pov").replace(".nml1", ".pov").replace(".nml", ".pov")

    pov_file = open(pov_file_name, "w")


    header='''
/*
POV-Ray file generated from NeuroML network
*/
#version 3.6;

#include "colors.inc"

background {rgbt %s}


    \n''' ###    end of header


    pov_file.write(header%(args.background))

    cells_file = pov_file
    net_file = pov_file
    splitOut = False

    cf = pov_file_name.replace(".pov", "_cells.inc")
    nf = pov_file_name.replace(".pov", "_net.inc")

    if args.split:
        splitOut = True
        cells_file = open(cf, "w")
        net_file = open(nf, "w")
        print("Saving into %s and %s and %s"%(pov_file_name, cf, nf))

    print("Converting XML file: %s to %s"%(xmlfile, pov_file_name))


    nml_doc = pynml.read_neuroml2_file(xmlfile, include_includes=True, verbose=args.v)

    cell_elements = []
    cell_elements.extend(nml_doc.cells)
    cell_elements.extend(nml_doc.cell2_ca_poolses)
    
    
    minXc = 1e9
    minYc = 1e9
    minZc = 1e9
    maxXc = -1e9
    maxYc = -1e9
    maxZc = -1e9

    minX = 1e9
    minY = 1e9
    minZ = 1e9
    maxX = -1e9
    maxY = -1e9
    maxZ = -1e9

    declaredcells = {}

    print("There are %i cells in the file"%len(cell_elements))

    for cell in cell_elements:
        
        cellName = cell.id
        print("Handling cell: %s"%cellName)

        
        declaredcell = "cell_"+cellName

        declaredcells[cellName] = declaredcell

        cells_file.write("#declare %s = \n"%declaredcell)
        cells_file.write("union {\n")

        prefix = ""


        segments = cell.morphology.segments

        distpoints = {}

        for segment in segments:

            id = int(segment.id)

            distal = segment.distal

            x = float(distal.x)
            y = float(distal.y)
            z = float(distal.z)
            r = max(float(distal.diameter)/2.0, args.mindiam)

            if x-r<minXc: minXc=x-r
            if y-r<minYc: minYc=y-r
            if z-r<minZc: minZc=z-r

            if x+r>maxXc: maxXc=x+r
            if y+r>maxYc: maxYc=y+r
            if z+r>maxZc: maxZc=z+r

            distalpoint = "<%f, %f, %f>, %f "%(x,y,z,r)

            distpoints[id] = distalpoint

            proximalpoint = ""
            if segment.proximal is not None:
                proximal = segment.proximal
                proximalpoint = "<%f, %f, %f>, %f "%(float(proximal.x),float(proximal.y),float(proximal.z),max(float(proximal.diameter)/2.0, args.mindiam))
            else:
                parent = int(segment.parent.segments)
                proximalpoint = distpoints[parent]

            shape = "cone"
            if proximalpoint == distalpoint:
                shape = "sphere"
                proximalpoint = ""
                
            if ( shape == "cone" and (proximalpoint.split('>')[0] == distalpoint.split('>')[0])):
                comment = "Ignoring zero length segment (id = %i): %s -> %s\n"%(id, proximalpoint, distalpoint)
                print(comment)
                cells_file.write("    // "+comment)
                
            else:
                cells_file.write("    %s {\n"%shape)
                cells_file.write("        %s\n"%distalpoint)
                if len(proximalpoint): cells_file.write("        %s\n"%proximalpoint)
                cells_file.write("        //%s_%s.%s\n"%('CELL_GROUP_NAME','0', id))
                cells_file.write("    }\n")

        cells_file.write("    pigment { color rgb <%f,%f,%f> }\n"%(random.random(),random.random(),random.random()))

        cells_file.write("}\n\n")


    if splitOut:
        pov_file.write("#include \""+cf+"\"\n\n")
        pov_file.write("#include \""+nf+"\"\n\n")
        
    pov_file.write('''\n/*\n  Defining a dummy cell to use when cell in population is not found in NeuroML file...\n*/\n#declare %s = 
union {
    sphere {
        <0.000000, 0.000000, 0.000000>, 5.000000 
    }
    pigment { color rgb <1,0,0> }
}\n'''%_DUMMY_CELL)


    positions = {}
    popElements = nml_doc.networks[0].populations

    print("There are %i populations in the file"%len(popElements))

    for pop in popElements:
        
        name = pop.id
        celltype = pop.component
        instances = pop.instances
        

        info = "Population: %s has %i positioned cells of type: %s"%(name,len(instances),celltype)
        print(info)

        colour = "1"
        
        for prop in pop.properties:

            if prop.tag == 'color':
                colour = prop.value
                colour = colour.replace(" ", ",")
                #print "Colour determined to be: "+colour
        
        net_file.write("\n\n/* "+info+" */\n\n")

        pop_positions = {}
        
        if not celltype in declaredcells:
            cell_definition = _DUMMY_CELL  
            minXc = 0
            minYc = 0
            minZc = 0
            maxXc = 0
            maxYc = 0
            maxZc = 0
        else:
            cell_definition = declaredcells[celltype]
        
        for instance in instances:

            location = instance.location
            id = instance.id
            net_file.write("object {\n")
            net_file.write("    %s\n"%cell_definition)
            x = float(location.x)
            y = float(location.y)
            z = float(location.z)
            pop_positions[id] = '<%s,%s,%s>'%(x,y,z)

            if x+minXc<minX: minX=x+minXc
            if y+minYc<minY: minY=y+minYc
            if z+minZc<minZ: minZ=z+minZc

            if x+maxXc>maxX: maxX=x+maxXc
            if y+maxYc>maxY: maxY=y+maxYc
            if z+maxZc>maxZ: maxZ=z+maxZc

            net_file.write("    translate <%s, %s, %s>\n"%(x,y,z))

            if colour == '1':
                colour = "%f,%f,%f"%(random.random(),random.random(),random.random())

            if colour is not None:
                net_file.write("    pigment { color rgb <%s> }"%(colour))

            net_file.write("\n    //%s_%s\n"%(name, id)) 

            net_file.write("}\n")
        
        positions[name] = pop_positions
            
        if len(instances) == 0 and int(pop.size>0):
            
            info = "Population: %s has %i unpositioned cells of type: %s"%(name,pop.size,celltype)
            print(info)

            colour = "1"
            '''
            if pop.annotation:
                print dir(pop.annotation)
                print pop.annotation.anytypeobjs_
                print pop.annotation.member_data_items_[0].name
                print dir(pop.annotation.member_data_items_[0])
                for prop in pop.annotation.anytypeobjs_:
                    print prop

                    if len(prop.getElementsByTagName('meta:tag'))>0 and prop.getElementsByTagName('meta:tag')[0].childNodes[0].data == 'color':
                        #print prop.getElementsByTagName('meta:tag')[0].childNodes
                        colour = prop.getElementsByTagName('meta:value')[0].childNodes[0].data
                        colour = colour.replace(" ", ",")
                    elif prop.hasAttribute('tag') and prop.getAttribute('tag') == 'color':
                        colour = prop.getAttribute('value')
                        colour = colour.replace(" ", ",")
                    print "Colour determined to be: "+colour
            '''

            net_file.write("\n\n/* "+info+" */\n\n")


            net_file.write("object {\n")
            net_file.write("    %s\n"%cell_definition)
            x = 0
            y = 0
            z = 0

            if x+minXc<minX: minX=x+minXc
            if y+minYc<minY: minY=y+minYc
            if z+minZc<minZ: minZ=z+minZc

            if x+maxXc>maxX: maxX=x+maxXc
            if y+maxYc>maxY: maxY=y+maxYc
            if z+maxZc>maxZ: maxZ=z+maxZc

            net_file.write("    translate <%s, %s, %s>\n"%(x,y,z))

            if colour == '1':
                colour = "%f,%f,%f"%(random.random(),random.random(),random.random())

            if colour is not None:
                net_file.write("    pigment { color rgb <%s> }"%(colour))

            net_file.write("\n    //%s_%s\n"%(name, id)) 

            net_file.write("}\n")
            
    if False and args.conns: # false because segment specific connections not implemented yet... i.e. connections from dends to axons...
        for projection in nml_doc.networks[0].projections:
            pre = projection.presynaptic_population
            post = projection.postsynaptic_population
            for connection in projection.connections:
                pre_cell = int(connection.pre_cell_id.split("/")[2])
                post_cell = int(connection.post_cell_id.split("/")[2])
                pre_loc = positions[pre][pre_cell]
                post_loc = positions[post][post_cell]
                net_file.write("// Connection from %s:%s %s -> %s:%s %s\n"%(pre, pre_cell, pre_loc, post, post_cell, post_loc)) 
                net_file.write("cylinder{ %s, %s, .1  pigment{color Grey}}\n\n"%(pre_loc, post_loc))
                #net_file.write("blob {\n    threshold .65\n    sphere { %s, .9, 10 pigment {Blue} }\n"%(pre_loc))
                #net_file.write("    cylinder{ %s, %s, .4, 1 }\n"%(pre_loc, post_loc))
                #net_file.write("    sphere { %s,5, 10 pigment {Green} }\n    finish { phong 1 }\n}\n\n"%(post_loc))
        
        
    plane = '''
plane {
   y, vv(-1)
   pigment {checker color rgb 1.0, color rgb 0.8 scale 20}
}
'''

    footer='''

#declare minX = %f;
#declare minY = %f;
#declare minZ = %f;

#declare maxX = %f;
#declare maxY = %f;
#declare maxZ = %f;

#macro uu(xx)
    0.5 * (maxX *(1+xx) + minX*(1-xx))
#end

#macro vv(xx)
    0.5 * (maxY *(1+xx) + minY*(1-xx))
#end

#macro ww(xx)
    0.5 * (maxZ *(1+xx) + minZ*(1-xx))
#end

light_source {
  <uu(5),uu(2),uu(5)>
  color rgb <1,1,1>
  
}
light_source {
  <uu(-5),uu(2),uu(-5)>
  color rgb <1,1,1>
  
}
light_source {
  <uu(5),uu(-2),uu(-5)>
  color rgb <1,1,1>
  
}
light_source {
  <uu(-5),uu(-2),uu(5)>
  color rgb <1,1,1>
}


// Trying to view box
camera {
  location < uu(%s + %s * sin (clock * 2 * 3.141)) , vv(%s + %s * sin (clock * 2 * 3.141)) , ww(%s + %s * cos (clock * 2 * 3.141)) >
  look_at < uu(%s + 0) , vv(%s + 0.05+0.3*sin (clock * 2 * 3.141)) , ww(%s + 0)>
}

%s
    \n'''%(minX,minY,minZ,maxX,maxY,maxZ, args.posx, args.scalex, args.posy, args.scaley, args.posz, args.scalez, args.viewx, args.viewy, args.viewz, (plane if args.plane else "")) ###    end of footer


    pov_file.write(footer)

    pov_file.close()

    if args.movie:
        ini_file_name = pov_file_name.replace(".pov", "_movie.ini")
    
        ini_movie = '''
Antialias=On

+W800 +H600 
        
Antialias_Threshold=0.3
Antialias_Depth=4

Input_File_Name=%s

Initial_Frame=1
Final_Frame=%i
Initial_Clock=0
Final_Clock=1

Cyclic_Animation=on
Pause_when_Done=off
        
        '''
        ini_file = open(ini_file_name, 'w')
        ini_file.write(ini_movie%(pov_file_name, args.frames))
        ini_file.close()
        
        print("Created file for generating %i movie frames at: %s. To run this type:\n\n    povray %s\n"%(args.frames,ini_file_name,ini_file_name))
        
    else:
        
        print("Created file for generating image of network. To run this type:\n\n    povray %s\n"%(pov_file_name))
        print("Or for higher resolution:\n\n    povray Antialias=On Antialias_Depth=10 Antialias_Threshold=0.1 +W1200 +H900 %s\n"%(pov_file_name))
コード例 #40
0
ファイル: export_mitral.py プロジェクト: JustasB/Mig3DTest
def __main__():
    num_cells_to_export = 1

    cells = []
    for mgid in range(num_cells_to_export):
      print mgid
      cells.append(mkmitral(mgid))

    nml_net_file = "../NeuroML2/MitralCells/Exported/PartialBulb_%iMTCells.net.nml" % num_cells_to_export
    export_to_neuroml2(None, 
                       nml_net_file,
                       includeBiophysicalProperties=False,
                       separateCellFiles=True)

    for i in range(num_cells_to_export):

        print("Processing cell %i out of %i"%(i, num_cells_to_export))

        nml_cell_file = "../NeuroML2/MitralCells/Exported/Mitral_0_%i.cell.nml" % i

        nml_doc = pynml.read_neuroml2_file(nml_cell_file)

        cell = nml_doc.cells[0]

        import pydevd
        pydevd.settrace('10.211.55.3', port=4200, stdoutToServer=True, stderrToServer=True, suspend=True)
        
        # Set root to id=0 and increment others
        exportHelper.resetRoot(cell)

        somaSeg = [seg for seg in cell.morphology.segments if seg.name == "Seg0_soma"][0]
        initialSeg = [seg for seg in cell.morphology.segments if seg.name == "Seg0_initialseg"][0]
        hillockSeg = [seg for seg in cell.morphology.segments if seg.name == "Seg0_hillock"][0]

        # Fix initial and hillock segs by moving them to the soma
        hillockSeg.proximal = pointMovedByOffset(hillockSeg.proximal, somaSeg.distal)
        hillockSeg.distal = pointMovedByOffset(hillockSeg.distal, somaSeg.distal)
        initialSeg.proximal = pointMovedByOffset(initialSeg.proximal, somaSeg.distal)
        initialSeg.distal = pointMovedByOffset(initialSeg.distal, somaSeg.distal)

        # Move everything back to the origin
        originOffset = type("", (), dict(x = -somaSeg.proximal.x, y = -somaSeg.proximal.y, z = -somaSeg.proximal.z ))()

        for seg in cell.morphology.segments:
            seg.proximal = pointMovedByOffset(seg.proximal, originOffset)
            seg.distal =   pointMovedByOffset(seg.distal, originOffset)

        # Replace ModelViewParmSubset_N groups with all, axon, soma, dendrite groups
        buildStandardSegmentGroups(cell)

        # Add channel placeholders
        nml_doc.includes.append(neuroml.IncludeType(href="channelIncludesPLACEHOLDER"))
        cell.biophysical_properties = neuroml.BiophysicalProperties(id="biophysPLACEHOLDER")

        # Save the new NML
        pynml.write_neuroml2_file(nml_doc, nml_cell_file)

        # Replace placeholders with contents from MitralCell...xml files
        replaceChannelPlaceholders(nml_cell_file)

        print("COMPLETED: " + nml_cell_file)

    print("DONE")
コード例 #41
0
def analyse_cell(dataset_id,
                 type,
                 info,
                 nogui=False,
                 densities=False,
                 analysis_dir='../../data/'):

    reference = '%s_%s' % (type, dataset_id)
    cell_file = '%s/%s.cell.nml' % (type, reference)

    print(
        "====================================\n\n   Analysing cell: %s, dataset %s\n"
        % (cell_file, dataset_id))

    nml_doc = pynml.read_neuroml2_file(cell_file)
    notes = nml_doc.cells[0].notes if len(
        nml_doc.cells) > 0 else nml_doc.izhikevich2007_cells[0].notes
    meta_nml = eval(notes[notes.index('{'):])
    summary = "Fitness: %s (max evals: %s, pop: %s)" % (
        meta_nml['fitness'], meta_nml['max_evaluations'],
        meta_nml['population_size'])
    print summary

    images = 'summary/%s_%s.png'
    if_iv_data_files = 'summary/%s_%s.dat'

    data, v_sub, curents_sub, freqs, curents_spike = get_if_iv_for_dataset(
        '%s%s_analysis.json' % (analysis_dir, dataset_id))

    if densities:

        dataset = {}
        seed = meta_nml['seed']
        if isinstance(seed, tuple):
            seed = seed[0]
        layer = str(data['location'].split(',')[-1].strip().replace(' ', ''))
        ref = '%s_%s_%s' % (dataset_id, layer, int(seed))

        dataset['id'] = dataset_id
        dataset['reference'] = ref
        metas = ['aibs_cre_line', 'aibs_dendrite_type', 'location']
        for m in metas:
            dataset[m] = str(data[m])

        metas2 = ['fitness', 'population_size', 'seed']
        for m in metas2:
            dataset[m] = meta_nml[m]

        # Assume images below already generated...
        if type == 'HH':

            cell = nml_doc.cells[0]

            sgv_files, all_info = generate_channel_density_plots(
                cell_file, text_densities=True, passives_erevs=True)
            sgv_file = sgv_files[0]
            for c in all_info:
                if c == cell.id:
                    cc = 'tuned_cell_info'
                else:
                    cc = c
                dataset[cc] = all_info[c]

            info['datasets'][ref] = dataset

        elif type == 'Izh':

            dataset['tuned_cell_info'] = {}
            izh_cell = nml_doc.izhikevich2007_cells[0]

            for p in ['C', 'a', 'b', 'c', 'd', 'k', 'vpeak', 'vr', 'vt']:

                dataset['tuned_cell_info'][p] = get_value_in_si(
                    getattr(izh_cell, p))
            '''
            sgv_files, all_info = generate_channel_density_plots(cell_file, text_densities=True, passives_erevs=True)
            sgv_file =sgv_files[0]
            for c in all_info:
                if c == cell.id:
                    cc = 'tuned_cell_info'
                else:
                    cc = c
                dataset[cc] = all_info[c]'''

        info['datasets'][ref] = dataset

    else:

        traces_ax, if_ax, iv_ax = generate_current_vs_frequency_curve(
            cell_file,
            reference,
            simulator='jNeuroML_NEURON',
            start_amp_nA=-0.1,
            end_amp_nA=0.4,
            step_nA=0.01,
            analysis_duration=1000,
            analysis_delay=50,
            plot_voltage_traces=False,
            plot_if=not nogui,
            plot_iv=not nogui,
            xlim_if=[-200, 400],
            ylim_if=[-10, 120],
            xlim_iv=[-200, 400],
            ylim_iv=[-120, -40],
            save_if_figure_to=images % (reference, 'if'),
            save_iv_figure_to=images % (reference, 'iv'),
            save_if_data_to=if_iv_data_files % (reference, 'if'),
            save_iv_data_to=if_iv_data_files % (reference, 'iv'),
            show_plot_already=False,
            return_axes=True)

        iv_ax.plot(curents_sub,
                   v_sub,
                   color='#ff2222',
                   marker='o',
                   linestyle='',
                   zorder=1)
        if_ax.plot(curents_spike,
                   freqs,
                   color='#ff2222',
                   marker='o',
                   linestyle='',
                   zorder=1)

        iv_ax.get_figure().savefig(images % (reference, 'iv'),
                                   bbox_inches='tight')
        if_ax.get_figure().savefig(images % (reference, 'if'),
                                   bbox_inches='tight')

        offset = 100  # mV

        ifv_x = []
        ifv_y = []
        markers = []
        lines = []
        colors = []

        cols = {'Izh': 'r', 'HH': 'g', 'AllenHH': 'b'}

        for ii in ['if', 'iv']:
            for tt in ['Izh', 'HH', 'AllenHH']:
                rr = '%s_%s' % (tt, dataset_id)
                f = if_iv_data_files % (rr, ii)
                if os.path.isfile(f):
                    print("--- Opening: %s" % f)
                    data, indeces = reload_standard_dat_file(f)

                    ifv_x.append(data['t'])

                    if ii == 'if':
                        ifv_y.append([ff - offset for ff in data[0]])
                    else:
                        ifv_y.append([vv for vv in data[0]])

                    markers.append('')
                    colors.append(cols[tt])
                    lines.append('-')

        ifv_x.append(curents_sub)
        vvsub = [vv for vv in v_sub]

        ifv_y.append(vvsub)

        sub_color = '#888888'
        markers.append('D')
        colors.append('k')
        lines.append('')

        ifv_x.append(curents_spike)
        ifv_y.append([ff - offset for ff in freqs])

        markers.append('o')
        colors.append(sub_color)
        lines.append('')

        import matplotlib
        import matplotlib.pyplot as plt

        print ifv_x
        print ifv_y
        ylim = [-105, -20]
        font_size = 18
        ax1 = pynml.generate_plot(ifv_x,
                                  ifv_y,
                                  summary,
                                  markers=markers,
                                  colors=colors,
                                  linestyles=lines,
                                  show_plot_already=False,
                                  xlim=[-100, 400],
                                  font_size=font_size,
                                  ylim=ylim,
                                  title_above_plot=False)

        plt.xlabel('Input current (pA)', fontsize=font_size)
        plt.ylabel("Steady membrane potential (mV)", fontsize=font_size)

        ax2 = ax1.twinx()
        plt.ylim([ylim[0] + offset, ylim[1] + offset])
        plt.ylabel('Firing frequency (Hz)',
                   color=sub_color,
                   fontsize=font_size)
        ax2.tick_params(axis='y', colors=sub_color)

        #plt.axis('off')

        plt.savefig(images % (reference, 'if_iv' + "_FIG"),
                    bbox_inches='tight')

        temp_dir = 'temp/'

        print("Copying %s to %s" % (cell_file, temp_dir))
        shutil.copy(cell_file, temp_dir)

        net_file = generate_network_for_sweeps(type,
                                               dataset_id,
                                               '%s.cell.nml' % (reference),
                                               reference,
                                               temp_dir,
                                               data_dir=analysis_dir)

        lems_file_name = 'LEMS_Test_%s_%s.xml' % (type, dataset_id)

        generate_lems_file_for_neuroml('Test_%s_%s' % (dataset_id, type),
                                       net_file,
                                       'network_%s_%s' % (dataset_id, type),
                                       1500,
                                       0.01,
                                       lems_file_name,
                                       temp_dir,
                                       gen_plots_for_all_v=False,
                                       copy_neuroml=False)

        simulator = "jNeuroML_NEURON"

        if simulator == "jNeuroML":
            results = pynml.run_lems_with_jneuroml(temp_dir + lems_file_name,
                                                   nogui=True,
                                                   load_saved_data=True,
                                                   plot=False,
                                                   show_plot_already=False)
        elif simulator == "jNeuroML_NEURON":
            results = pynml.run_lems_with_jneuroml_neuron(
                temp_dir + lems_file_name,
                nogui=True,
                load_saved_data=True,
                plot=False,
                show_plot_already=False)

        x = []
        y = []

        print results.keys()

        tt = [t * 1000 for t in results['t']]
        for i in range(len(results) - 1):
            x.append(tt)
            y.append([
                v * 1000
                for v in results['Pop0/%i/%s_%s/v' % (i, type, dataset_id)]
            ])

        pynml.generate_plot(x,
                            y,
                            summary,
                            show_plot_already=False,
                            ylim=[-120, 60],
                            save_figure_to=images % (reference, 'traces'),
                            title_above_plot=True)

        ax = pynml.generate_plot(x,
                                 y,
                                 summary,
                                 show_plot_already=False,
                                 ylim=[-120, 60],
                                 title_above_plot=False)

        ax.set_xlabel(None)
        ax.set_ylabel(None)
        plt.axis('off')

        fig_file = images % (reference, 'traces' + "_FIG")
        plt.savefig(fig_file, bbox_inches='tight', pad_inches=0)
        from PIL import Image
        img = Image.open(fig_file)

        img2 = img.crop((60, 40, 660, 480))
        img2.save(fig_file)
コード例 #42
0
def generate_channel_density_plots(nml2_file,
                                   text_densities=False,
                                   passives_erevs=False,
                                   target_directory=None):
    nml_doc = read_neuroml2_file(nml2_file,
                                 include_includes=True,
                                 verbose=False,
                                 optimized=True)

    cell_elements = []
    cell_elements.extend(nml_doc.cells)
    cell_elements.extend(nml_doc.cell2_ca_poolses)
    svg_files = []
    all_info = {}

    for cell in cell_elements:
        info = {}
        all_info[cell.id] = info
        print_comment_v("Extracting channel density info from %s" % cell.id)
        sb = ''
        ions = {}
        maxes = {}
        mins = {}
        row = 0
        na_ions = []
        k_ions = []
        ca_ions = []
        other_ions = []

        if isinstance(cell, Cell2CaPools):
            cds = cell.biophysical_properties2_ca_pools.membrane_properties2_ca_pools.channel_densities + \
                cell.biophysical_properties2_ca_pools.membrane_properties2_ca_pools.channel_density_nernsts
        elif isinstance(cell, Cell):
            cds = cell.biophysical_properties.membrane_properties.channel_densities + \
                cell.biophysical_properties.membrane_properties.channel_density_nernsts

        epas = None
        ena = None
        ek = None
        eh = None
        eca = None

        for cd in cds:
            dens_si = get_value_in_si(cd.cond_density)
            print_comment_v(
                "cd: %s, ion_channel: %s, ion: %s, density: %s (SI: %s)" %
                (cd.id, cd.ion_channel, cd.ion, cd.cond_density, dens_si))

            ions[cd.ion_channel] = cd.ion
            erev_V = get_value_in_si(cd.erev) if hasattr(cd, 'erev') else None
            erev = '%s mV' % format_float(erev_V * 1000) if hasattr(
                cd, 'erev') else None

            if cd.ion == 'na':
                if cd.ion_channel not in na_ions:
                    na_ions.append(cd.ion_channel)
                ena = erev
                info['ena'] = erev_V
            elif cd.ion == 'k':
                if cd.ion_channel not in k_ions:
                    k_ions.append(cd.ion_channel)
                ek = erev
                info['ek'] = erev_V
            elif cd.ion == 'ca':
                if cd.ion_channel not in ca_ions:
                    ca_ions.append(cd.ion_channel)
                eca = erev
                info['eca'] = erev_V
            else:
                if cd.ion_channel not in other_ions:
                    other_ions.append(cd.ion_channel)
                if cd.ion == 'non_specific':
                    epas = erev
                    info['epas'] = erev_V
                if cd.ion == 'h':
                    eh = erev
                    info['eh'] = erev_V

            if cd.ion_channel in maxes:
                if dens_si > maxes[cd.ion_channel]:
                    maxes[cd.ion_channel] = dens_si
            else:
                maxes[cd.ion_channel] = dens_si
            if cd.ion_channel in mins:
                if dens_si < mins[cd.ion_channel]:
                    mins[cd.ion_channel] = dens_si
            else:
                mins[cd.ion_channel] = dens_si

        for ion_channel in na_ions + k_ions + ca_ions + other_ions:
            col = get_ion_color(ions[ion_channel])
            info[ion_channel] = {
                'max': maxes[ion_channel],
                'min': mins[ion_channel]
            }

            if maxes[ion_channel] > 0:
                sb += _get_rect(ion_channel, row, maxes[ion_channel],
                                mins[ion_channel], col[0], col[1], col[2],
                                text_densities)
                row += 1

        if passives_erevs:

            if ena:
                sb += add_text(row, "E Na = %s " % ena)
                row += 1
            if ek:
                sb += add_text(row, "E K = %s " % ek)
                row += 1
            if eca:
                sb += add_text(row, "E Ca = %s" % eca)
                row += 1
            if eh:
                sb += add_text(row, "E H = %s" % eh)
                row += 1
            if epas:
                sb += add_text(row, "E pas = %s" % epas)
                row += 1

            for sc in cell.biophysical_properties.membrane_properties.specific_capacitances:
                sb += add_text(row,
                               "C (%s) = %s" % (sc.segment_groups, sc.value))

                info['specific_capacitance_%s' %
                     sc.segment_groups] = get_value_in_si(sc.value)
                row += 1

            # sb+='<text x="%s" y="%s" fill="black" font-family="Arial">%s</text>\n'%(width/3., (height+spacing)*(row+1), text)

        sb = "<?xml version='1.0' encoding='UTF-8'?>\n<svg xmlns=\"http://www.w3.org/2000/svg\" width=\"" + str(
            width + text_densities * 200) + "\" height=\"" + str(
                (height + spacing) * row) + "\">\n" + sb + "</svg>\n"

        print(sb)
        svg_file = nml2_file + "_channeldens.svg"
        if target_directory:
            svg_file = target_directory + "/" + svg_file.split('/')[-1]
        svg_files.append(svg_file)
        sf = open(svg_file, 'w')
        sf.write(sb)
        sf.close()
        print_comment_v("Written to %s" % os.path.abspath(svg_file))

        pp.pprint(all_info)

    return svg_files, all_info
コード例 #43
0
def process_celldir(inputs):
    """Process cell directory"""

    count, cell_dir, nml2_cell_dir, total_count = inputs
    local_nml2_cell_dir = os.path.join("..", nml2_cell_dir)

    print(
        "\n\n************************************************************\n\n"
        "Parsing %s (cell %i/%i)\n" % (cell_dir, count, total_count)
    )

    if os.path.isdir(cell_dir):
        old_cwd = os.getcwd()
        os.chdir(cell_dir)
    else:
        old_cwd = os.getcwd()
        os.chdir("../" + cell_dir)

    if make_zips:
        nml2_cell_dir = "%s/%s" % (zips_dir, cell_dir)
        if not os.path.isdir(nml2_cell_dir):
            os.mkdir(nml2_cell_dir)

    print("Generating into %s" % nml2_cell_dir)

    bbp_ref = None

    template_file = open("template.hoc", "r")
    for line in template_file:
        if line.startswith("begintemplate "):
            bbp_ref = line.split(" ")[1].strip()
            print(" > Assuming cell in directory %s is in a template named %s" % (cell_dir, bbp_ref))

    load_cell_file = "loadcell.hoc"

    variables = {}

    variables["cell"] = bbp_ref
    variables["groups_info_file"] = groups_info_file

    template = """
///////////////////////////////////////////////////////////////////////////////
//
//   NOTE: This file is not part of the original BBP cell model distribution
//   It has been generated by ../ParseAll.py to facilitate loading of the cell
//   into NEURON for exporting the model morphology to NeuroML2
//
//////////////////////////////////////////////////////////////////////////////

load_file("stdrun.hoc")

objref cvode
cvode = new CVode()
cvode.active(1)

//======================== settings ===================================

v_init = -80

hyp_amp = -0.062866
step_amp = 0.3112968
tstop = 3000

//=================== creating cell object ===========================
load_file("import3d.hoc")
objref cell

// Using 1 to force loading of the file, in case file with same name was loaded
// before...
load_file(1, "constants.hoc")
load_file(1, "morphology.hoc")
load_file(1, "biophysics.hoc")
print "Loaded morphology and biophysics..."

load_file(1, "synapses/synapses.hoc")
load_file(1, "template.hoc")
print "Loaded template..."

load_file(1, "createsimulation.hoc")


create_cell(0)
print "Created new cell using loadcell.hoc: {{ cell }}"

define_shape()

wopen("{{ groups_info_file }}")

fprint("//Saving information on groups in this cell...\\n")

fprint("- somatic\\n")
forsec {{ cell }}[0].somatic {
    fprint("%s\\n",secname())
}

fprint("- basal\\n")
forsec {{ cell }}[0].basal {
    fprint("%s\\n",secname())
}

fprint("- axonal\\n")
forsec {{ cell }}[0].axonal {
    fprint("%s\\n",secname())
}
fprint("- apical\\n")
forsec {{ cell }}[0].apical {
    fprint("%s\\n",secname())
}
wopen()
        """

    t = Template(template)

    contents = t.render(variables)

    load_cell = open(load_cell_file, "w")
    load_cell.write(contents)
    load_cell.close()

    print(" > Written %s" % load_cell_file)

    if os.path.isfile(load_cell_file):

        cell_info = parse_cell_info_file(cell_dir)

        nml_file_name = "%s.net.nml" % bbp_ref
        nml_net_loc = "%s/%s" % (local_nml2_cell_dir, nml_file_name)
        nml_cell_file = "%s_0_0.cell.nml" % bbp_ref
        nml_cell_loc = "%s/%s" % (local_nml2_cell_dir, nml_cell_file)

        print(" > Loading %s and exporting to %s" % (load_cell_file, nml_net_loc))

        export_to_neuroml2(load_cell_file, nml_net_loc, separateCellFiles=True, includeBiophysicalProperties=False)

        print(" > Exported to: %s and %s using %s" % (nml_net_loc, nml_cell_loc, load_cell_file))

        nml_doc = pynml.read_neuroml2_file(nml_cell_loc)

        cell = nml_doc.cells[0]

        print(" > Adding groups from: %s" % groups_info_file)
        groups = {}
        current_group = None
        for line in open(groups_info_file):
            if not line.startswith("//"):
                if line.startswith("- "):
                    current_group = line[2:-1]
                    print(" > Adding group: [%s]" % current_group)
                    groups[current_group] = []
                else:
                    section = line.split(".")[1].strip()
                    segment_group = section.replace("[", "_").replace("]", "")
                    groups[current_group].append(segment_group)

        for g in groups.keys():
            new_seg_group = neuroml.SegmentGroup(id=g)
            cell.morphology.segment_groups.append(new_seg_group)
            for sg in groups[g]:
                new_seg_group.includes.append(neuroml.Include(sg))
            if g in ["basal", "apical"]:
                new_seg_group.inhomogeneous_parameters.append(
                    neuroml.InhomogeneousParameter(
                        id="PathLengthOver_" + g,
                        variable="p",
                        metric="Path Length from root",
                        proximal=neuroml.ProximalDetails(translation_start="0"),
                    )
                )

        ignore_chans = [
            "Ih",
            "Ca_HVA",
            "Ca_LVAst",
            "Ca",
            "SKv3_1",
            "SK_E2",
            "CaDynamics_E2",
            "Nap_Et2",
            "Im",
            "K_Tst",
            "NaTa_t",
            "K_Pst",
            "NaTs2_t",
        ]

        # ignore_chans=['StochKv','StochKv_deterministic']
        ignore_chans = []

        bp, incl_chans = get_biophysical_properties(
            cell_info["e-type"], ignore_chans=ignore_chans, templates_json="../templates.json"
        )

        cell.biophysical_properties = bp

        print("Set biophysical properties")

        notes = ""
        notes += (
            "\n\nExport of a cell model obtained from the BBP Neocortical"
            "Microcircuit Collaboration Portal into NeuroML2"
            "\n\n******************************************************\n*"
            "  This export to NeuroML2 has not yet been fully validated!!"
            "\n*  Use with caution!!\n***********************************"
            "*******************\n\n"
        )

        if len(ignore_chans) > 0:
            notes += "Ignored channels = %s\n\n" % ignore_chans

        notes += (
            "For more information on this cell model see: "
            "https://bbp.epfl.ch/nmc-portal/microcircuit#/metype/%s/"
            "details\n\n" % cell_info["me-type"]
        )

        cell.notes = notes
        for channel in incl_chans:

            nml_doc.includes.append(neuroml.IncludeType(href="%s" % channel))

            if make_zips:
                print("Copying %s to zip folder" % channel)
                shutil.copyfile("../../NeuroML2/%s" % channel, "%s/%s" % (local_nml2_cell_dir, channel))

        pynml.write_neuroml2_file(nml_doc, nml_cell_loc)

        stim_ref = "stepcurrent3"
        stim_ref_hyp = "%s_hyp" % stim_ref
        stim_sim_duration = 3000
        stim_hyp_amp, stim_amp = get_stimulus_amplitudes(bbp_ref)
        stim_del = "700ms"
        stim_dur = "2000ms"

        new_net_loc = "%s/%s.%s.net.nml" % (local_nml2_cell_dir, bbp_ref, stim_ref)
        new_net_doc = pynml.read_neuroml2_file(nml_net_loc)

        new_net_doc.notes = notes

        stim_hyp = neuroml.PulseGenerator(
            id=stim_ref_hyp, delay="0ms", duration="%sms" % stim_sim_duration, amplitude=stim_hyp_amp
        )
        new_net_doc.pulse_generators.append(stim_hyp)
        stim = neuroml.PulseGenerator(id=stim_ref, delay=stim_del, duration=stim_dur, amplitude=stim_amp)
        new_net_doc.pulse_generators.append(stim)

        new_net = new_net_doc.networks[0]

        pop_id = new_net.populations[0].id
        pop_comp = new_net.populations[0].component
        input_list = neuroml.InputList(id="%s_input" % stim_ref_hyp, component=stim_ref_hyp, populations=pop_id)

        syn_input = neuroml.Input(id=0, target="../%s/0/%s" % (pop_id, pop_comp), destination="synapses")

        input_list.input.append(syn_input)
        new_net.input_lists.append(input_list)

        input_list = neuroml.InputList(id="%s_input" % stim_ref, component=stim_ref, populations=pop_id)

        syn_input = neuroml.Input(id=0, target="../%s/0/%s" % (pop_id, pop_comp), destination="synapses")

        input_list.input.append(syn_input)
        new_net.input_lists.append(input_list)

        pynml.write_neuroml2_file(new_net_doc, new_net_loc)

        generate_lems_file_for_neuroml(
            cell_dir,
            new_net_loc,
            "network",
            stim_sim_duration,
            0.025,
            "LEMS_%s.xml" % cell_dir,
            local_nml2_cell_dir,
            copy_neuroml=False,
            seed=1234,
        )

        pynml.nml2_to_svg(nml_net_loc)

        clear_neuron()

        pop = neuroml.Population(id="Pop_%s" % bbp_ref, component=bbp_ref + "_0_0", type="populationList")

        inst = neuroml.Instance(id="0")
        pop.instances.append(inst)

        width = 6
        X = count % width
        Z = (count - X) / width
        inst.location = neuroml.Location(x=300 * X, y=0, z=300 * Z)

        count += 1

        if make_zips:
            zip_file = "%s/%s.zip" % (zips_dir, cell_dir)
            print("Creating zip file: %s" % zip_file)
            with zipfile.ZipFile(zip_file, "w") as myzip:

                for next_file in os.listdir(local_nml2_cell_dir):
                    next_file = "%s/%s" % (local_nml2_cell_dir, next_file)
                    arcname = next_file[len(zips_dir) :]
                    print("Adding : %s as %s" % (next_file, arcname))
                    myzip.write(next_file, arcname)

        os.chdir(old_cwd)

        return nml_cell_file, pop
コード例 #44
0
 def include_neuroml2_file(self, nml2_file_name, include_included=True, relative_to_dir='.'):
     self.lems_info['include_files'].append(nml2_file_name)
     if include_included:
         cell = read_neuroml2_file(relative_to_dir+'/'+nml2_file_name)
         for include in cell.includes:
             self.include_neuroml2_file(include.href, include_included=True, relative_to_dir=relative_to_dir)
コード例 #45
0
def create_GoC_network(duration, dt, seed, runid, run=False):

    ### ---------- Load Params
    noPar = True
    pfile = Path('params_file.pkl')
    if pfile.exists():
        print('Reading parameters from file:')
        file = open('params_file.pkl', 'rb')
        params_list = pkl.load(file)
        if len(params_list) > runid:
            p = params_list[runid]
            file.close()
    if noPar:
        p = inp.get_simulation_params(runid)

    ### ---------- Component types
    goc_filename = 'GoC.cell.nml'  # Golgi cell with channels
    goc_file = pynml.read_neuroml2_file(goc_filename)
    goc_type = goc_file.cells[0]
    goc_ref = nml.IncludeType(href=goc_filename)

    gj = nml.GapJunction(id="GJ_0", conductance="426pS")  # GoC synapse

    ### --------- Populations

    # Build network to specify cells and connectivity
    net = nml.Network(id="gocNetwork",
                      type="networkWithTemperature",
                      temperature="23 degC")

    # Create GoC population
    goc_pop = nml.Population(id=goc_type.id + "Pop",
                             component=goc_type.id,
                             type="populationList",
                             size=p["nGoC"])
    for goc in range(p["nGoC"]):
        inst = nml.Instance(id=goc)
        goc_pop.instances.append(inst)
        inst.location = nml.Location(x=p["GoC_pos"][goc, 0],
                                     y=p["GoC_pos"][goc, 1],
                                     z=p["GoC_pos"][goc, 2])
    net.populations.append(goc_pop)

    # Create NML document for network specification
    net_doc = nml.NeuroMLDocument(id=net.id)
    net_doc.networks.append(net)
    net_doc.includes.append(goc_ref)
    net_doc.gap_junctions.append(gj)

    ### ------------ Connectivity

    ### 1. Input Current to one cell
    ctr = 0
    for goc in p["Test_GoC"]:
        for jj in range(p["nSteps"]):
            input_id = 'stim_{}'.format(ctr)
            istep = nml.PulseGenerator(
                id=input_id,
                delay='{} ms'.format(p["iDuration"] * jj + p["iRest"] *
                                     (jj + 1)),
                duration='{} ms'.format(p["iDuration"]),
                amplitude='{} pA'.format(p["iAmp"][jj]))
            net_doc.pulse_generators.append(istep)

            input_list = nml.InputList(id='ilist_{}'.format(ctr),
                                       component=istep.id,
                                       populations=goc_pop.id)
            curr_inj = nml.Input('0',
                                 target="../%s[%i]" % (goc_pop.id, goc),
                                 destination="synapses")
            input_list.input.append(curr_inj)
            net.input_lists.append(input_list)
            ctr += 1

    ### 2. Electrical coupling between GoCs

    GoCCoupling = nml.ElectricalProjection(id="gocGJ",
                                           presynaptic_population=goc_pop.id,
                                           postsynaptic_population=goc_pop.id)
    net.electrical_projections.append(GoCCoupling)
    dend_id = [1, 2, 5]
    for jj in range(p["GJ_pairs"].shape[0]):
        conn = nml.ElectricalConnectionInstanceW(
            id=jj,
            pre_cell='../{}/{}/{}'.format(goc_pop.id, p["GJ_pairs"][jj, 0],
                                          goc_type.id),
            pre_segment=dend_id[p["GJ_loc"][jj, 0]],
            pre_fraction_along='0.5',
            post_cell='../{}/{}/{}'.format(goc_pop.id, p["GJ_pairs"][jj, 1],
                                           goc_type.id),
            post_segment=dend_id[p["GJ_loc"][jj, 1]],
            post_fraction_along='0.5',
            synapse=gj.id,
            weight=p["GJ_wt"][jj])
        GoCCoupling.electrical_connection_instance_ws.append(conn)

    ### --------------  Write files

    net_filename = 'gocNetwork.nml'
    pynml.write_neuroml2_file(net_doc, net_filename)

    simid = 'sim_gocnet_' + goc_type.id + '_run_{}'.format(runid)
    ls = LEMSSimulation(simid, duration=duration, dt=dt, simulation_seed=seed)
    ls.assign_simulation_target(net.id)
    ls.include_neuroml2_file(net_filename)
    ls.include_neuroml2_file(goc_filename)

    # Specify outputs
    eof0 = 'Events_file'
    ls.create_event_output_file(eof0, "%s.v.spikes" % simid, format='ID_TIME')
    for jj in range(goc_pop.size):
        ls.add_selection_to_event_output_file(
            eof0, jj, '{}/{}/{}'.format(goc_pop.id, jj, goc_type.id), 'spike')

    of0 = 'Volts_file'
    ls.create_output_file(of0, "%s.v.dat" % simid)
    ctr = 0
    for jj in p["Test_GoC"]:
        ls.add_column_to_output_file(
            of0, jj, '{}/{}/{}/v'.format(goc_pop.id, ctr, goc_type.id))
        ctr += 1

    #Create Lems file to run
    lems_simfile = ls.save_to_file()

    if run:
        res = pynml.run_lems_with_jneuroml_neuron(lems_simfile,
                                                  max_memory="2G",
                                                  nogui=True,
                                                  plot=False)
    else:
        res = pynml.run_lems_with_jneuroml_neuron(lems_simfile,
                                                  max_memory="2G",
                                                  only_generate_scripts=True,
                                                  compile_mods=False,
                                                  nogui=True,
                                                  plot=False)

    return res
コード例 #46
0
    def run_individual(self, sim_var, show=False):
        """
        Run an individual simulation.

        The candidate data has been flattened into the sim_var dict. The
        sim_var dict contains parameter:value key value pairs, which are
        applied to the model before it is simulated.

        """

        nml_doc = read_neuroml2_file(self.neuroml_file,
                                     include_includes=True,
                                     verbose=True,
                                     already_included=[])

        for var_name in sim_var.keys():
            words = var_name.split('/')
            type, id1 = words[0].split(':')
            if ':' in words[1]:
                variable, id2 = words[1].split(':')
            else:
                variable = words[1]
                id2 = None

            units = words[2]
            value = sim_var[var_name]

            print_comment_v(
                '  Changing value of %s (%s) in %s (%s) to: %s %s' %
                (variable, id2, type, id1, value, units))

            if type == 'cell':
                cell = None
                for c in nml_doc.cells:
                    if c.id == id1:
                        cell = c

                if variable == 'channelDensity':

                    chanDens = None
                    for cd in cell.biophysical_properties.membrane_properties.channel_densities:
                        if cd.id == id2:
                            chanDens = cd

                    chanDens.cond_density = '%s %s' % (value, units)

                elif variable == 'erev_id':  # change all values of erev in channelDensity elements with only this id

                    chanDens = None
                    for cd in cell.biophysical_properties.membrane_properties.channel_densities:
                        if cd.id == id2:
                            chanDens = cd

                    chanDens.erev = '%s %s' % (value, units)

                elif variable == 'erev_ion':  # change all values of erev in channelDensity elements with this ion

                    chanDens = None
                    for cd in cell.biophysical_properties.membrane_properties.channel_densities:
                        if cd.ion == id2:
                            chanDens = cd

                    chanDens.erev = '%s %s' % (value, units)

                elif variable == 'specificCapacitance':

                    specCap = None
                    for sc in cell.biophysical_properties.membrane_properties.specific_capacitances:
                        if (sc.segment_groups == None
                                and id2 == 'all') or sc.segment_groups == id2:
                            specCap = sc

                    specCap.value = '%s %s' % (value, units)

                else:
                    print_comment_v(
                        'Unknown variable (%s) in variable expression: %s' %
                        (variable, var_name))
                    exit()

            elif type == 'izhikevich2007Cell':
                izhcell = None
                for c in nml_doc.izhikevich2007_cells:
                    if c.id == id1:
                        izhcell = c

                izhcell.__setattr__(variable, '%s %s' % (value, units))

            else:
                print_comment_v(
                    'Unknown type (%s) in variable expression: %s' %
                    (type, var_name))

        new_neuroml_file = '%s/%s' % (self.generate_dir,
                                      os.path.basename(self.neuroml_file))
        if new_neuroml_file == self.neuroml_file:
            print_comment_v('Cannot use a directory for generating into (%s) which is the same location of the NeuroML file (%s)!'% \
                      (self.neuroml_file, self.generate_dir))

        write_neuroml2_file(nml_doc, new_neuroml_file)

        sim = NeuroMLSimulation(self.ref,
                                neuroml_file=new_neuroml_file,
                                target=self.target,
                                sim_time=self.sim_time,
                                dt=self.dt,
                                simulator=self.simulator,
                                generate_dir=self.generate_dir)

        sim.go()

        if show:
            sim.show()

        return sim.t, sim.volts
コード例 #47
0
     nml_file_name = "%s.net.nml"%bbp_ref
     nml_net_loc = "%s/%s"%(nml2_cell_dir,nml_file_name)
     nml_cell_file = "%s_0_0.cell.nml"%bbp_ref
     nml_cell_loc = "%s/%s"%(nml2_cell_dir,nml_cell_file)
     
 
     print(' > Loading %s and exporting to %s'%(load_cell_file,nml_net_loc))
 
     export_to_neuroml2(load_cell_file, 
                        nml_net_loc, 
                        separateCellFiles=True,
                        includeBiophysicalProperties=False)
     
     print(' > Exported to: %s and %s using %s'%(nml_net_loc, nml_cell_loc, load_cell_file))
     
     nml_doc = pynml.read_neuroml2_file(nml_cell_loc)
     
     cell = nml_doc.cells[0]
     
     print(' > Adding groups from: %s'%groups_info_file)
     groups = {}
     current_group = None
     for line in open(groups_info_file):
         if not line.startswith('//'):
             if line.startswith('- '):
                 current_group = line[2:-1]
                 print(' > Adding group: [%s]'%current_group)
                 groups[current_group] = []
             else:
                 section = line.split('.')[1].strip()
                 segment_group = section.replace('[','_').replace(']','')
コード例 #48
0
ファイル: NeuroMLController.py プロジェクト: 34383c/pyNeuroML
    def run_individual(self, sim_var, show=False):
        """
        Run an individual simulation.

        The candidate data has been flattened into the sim_var dict. The
        sim_var dict contains parameter:value key value pairs, which are
        applied to the model before it is simulated.

        """
        
        nml_doc = read_neuroml2_file(self.neuroml_file, 
                                     include_includes=True,
                                     verbose = True,
                                     already_included = [])
                                     
        
        for var_name in sim_var.keys():
            words = var_name.split('/')
            type, id1 = words[0].split(':')
            if ':' in words[1]:
                variable, id2 = words[1].split(':')
            else:
                variable = words[1]
                id2 = None
            
            units = words[2]
            value = sim_var[var_name]
            
            print_comment_v('  Changing value of %s (%s) in %s (%s) to: %s %s'%(variable, id2, type, id1, value, units))
            
            if type == 'cell':
                cell = None
                for c in nml_doc.cells:
                    if c.id == id1:
                        cell = c
                        
                if variable == 'channelDensity':
                    
                    chanDens = None
                    for cd in cell.biophysical_properties.membrane_properties.channel_densities:
                        if cd.id == id2:
                            chanDens = cd
                            
                    chanDens.cond_density = '%s %s'%(value, units)
                    
                elif variable == 'erev_id': # change all values of erev in channelDensity elements with only this id
                    
                    chanDens = None
                    for cd in cell.biophysical_properties.membrane_properties.channel_densities:
                        if cd.id == id2:
                            chanDens = cd
                            
                    chanDens.erev = '%s %s'%(value, units)
                    
                elif variable == 'erev_ion': # change all values of erev in channelDensity elements with this ion
                    
                    chanDens = None
                    for cd in cell.biophysical_properties.membrane_properties.channel_densities:
                        if cd.ion == id2:
                            chanDens = cd
                            
                    chanDens.erev = '%s %s'%(value, units)
                    
                elif variable == 'specificCapacitance': 
                    
                    specCap = None
                    for sc in cell.biophysical_properties.membrane_properties.specific_capacitances:
                        if (sc.segment_groups == None and id2 == 'all') or sc.segment_groups == id2 :
                            specCap = sc
                            
                    specCap.value = '%s %s'%(value, units)
                    
                else:
                    print_comment_v('Unknown variable (%s) in variable expression: %s'%(variable, var_name))
                    exit()
                
            elif type == 'izhikevich2007Cell':
                izhcell = None
                for c in nml_doc.izhikevich2007_cells:
                    if c.id == id1:
                        izhcell = c
                        
                izhcell.__setattr__(variable, '%s %s'%(value, units))
                
            else:
                print_comment_v('Unknown type (%s) in variable expression: %s'%(type, var_name))
       
                            
                                     
        new_neuroml_file =  '%s/%s'%(self.generate_dir,os.path.basename(self.neuroml_file))
        if new_neuroml_file == self.neuroml_file:
            print_comment_v('Cannot use a directory for generating into (%s) which is the same location of the NeuroML file (%s)!'% \
                      (self.neuroml_file, self.generate_dir))
                      
        write_neuroml2_file(nml_doc, new_neuroml_file)
    
            
        sim = NeuroMLSimulation(self.ref, 
                             neuroml_file = new_neuroml_file,
                             target = self.target,
                             sim_time = self.sim_time, 
                             dt = self.dt, 
                             simulator = self.simulator, 
                             generate_dir = self.generate_dir)
        
        sim.go()
        
        if show:
            sim.show()
    
        return sim.t, sim.volts
コード例 #49
0
    def run_individual(self, sim_var, show=False):
        """
        Run an individual simulation.

        The candidate data has been flattened into the sim_var dict. The
        sim_var dict contains parameter:value key value pairs, which are
        applied to the model before it is simulated.

        """
        
        nml_doc = read_neuroml2_file(self.neuroml_file, 
                                     include_includes=True,
                                     verbose = True)
                                     
        
        for var_name in sim_var.keys():
            words = var_name.split('/')
            type, id1 = words[0].split(':')
            variable, id2 = words[1].split(':')
            units = words[2]
            value = sim_var[var_name]
            
            print('Changing value of %s (%s) in %s (%s) to: %s %s'%(variable, id2, type, id1, value, units))
            
            if type == 'cell':
                cell = None
                for c in nml_doc.cells:
                    if c.id == id1:
                        cell = c
                        
                if variable == 'channelDensity':
                    
                    chanDens = None
                    for cd in cell.biophysical_properties.membrane_properties.channel_densities:
                        if cd.id == id2:
                            chanDens = cd
                            
                    chanDens.cond_density = '%s %s'%(value, units)
                else:
                    print('Unknown variable (%s) in variable expression: %s'%(variable, var_name))
            else:
                print('Unknown type (%s) in variable expression: %s'%(type, var_name))
       
                            
                                     
        new_neuroml_file =  '%s/%s'%(self.generate_dir,os.path.basename(self.neuroml_file))
        if new_neuroml_file == self.neuroml_file:
            print('Cannot use a directory for generating into (%s) which is the same location of the NeuroML file (%s)!'% \
                      (self.neuroml_file, self.generate_dir))
                      
        write_neuroml2_file(nml_doc, new_neuroml_file)
    
            
        sim = NeuroMLSimulation(self.ref, 
                             neuroml_file = new_neuroml_file,
                             target = self.target,
                             sim_time = self.sim_time, 
                             dt = self.dt, 
                             simulator = self.simulator, 
                             generate_dir = self.generate_dir)
        
        sim.go()
        
        if show:
            sim.show()
    
        return sim.t, sim.volts
コード例 #50
0
def create_GoC_network( duration, dt, seed, N_goc=0, run=False, prob_type='Boltzmann', GJw_type='Vervaeke2010' ):


	goc_filename = 'GoC.cell.nml'
	goc_file = pynml.read_neuroml2_file( goc_filename )
	goc_type = goc_file.cells[0]
	
	GJ_filename = 'GapJuncCML.nml'
	GJ_file = pynml.read_neuroml2_file( GJ_filename )
	GJ_type = GJ_file.gap_junctions[0]

	MFSyn_filename = 'MF_GoC_Syn.nml'
	mfsyn_file = pynml.read_neuroml2_file( MFSyn_filename )
	MFSyn_type = mfsyn_file.exp_three_synapses[0]
	
	MF20Syn_filename = 'MF_GoC_SynMult.nml'
	mf20syn_file = pynml.read_neuroml2_file( MF20Syn_filename )
	MF20Syn_type = mf20syn_file.exp_three_synapses[0]
	
	# Distribute cells in 3D
	if N_goc>0:
		GoC_pos = nu.GoC_locate(N_goc)
	else:
		GoC_pos = nu.GoC_density_locate()
		N_goc = GoC_pos.shape[0]
		
	# get GJ connectivity
	GJ_pairs, GJWt = nu.GJ_conn( GoC_pos, prob_type, GJw_type )
	tmp1, tmp2 = valnet.gapJuncAnalysis( GJ_pairs, GJWt )
	print("Number of gap junctions per cell: ", tmp1)
	print("Net GJ conductance per cell:", tmp2)
	
	# Create pop List
	goc_pop = nml.Population( id=goc_type.id+"Pop", component = goc_type.id, type="populationList", size=N_goc )
	
	# Create NML document for network specification
	net = nml.Network( id="gocNetwork", type="networkWithTemperature" , temperature="23 degC" )
	net_doc = nml.NeuroMLDocument( id=net.id )
	net_doc.networks.append( net )
	net_doc.includes.append( goc_type )
	
	net.populations.append( goc_pop )
	
	#Add locations for GoC instances in the population:
	for goc in range(N_goc):
		inst = nml.Instance( id=goc )
		goc_pop.instances.append( inst )
		inst.location = nml.Location( x=GoC_pos[goc,0], y=GoC_pos[goc,1], z=GoC_pos[goc,2] )
		
	# Define input spiketrains
	input_type = 'spikeGenerator'#'spikeGeneratorPoisson'
	lems_inst_doc = lems.Model()
	mf_inputs = lems.Component( "MF_Input", input_type)
	mf_inputs.set_parameter("period", "2000 ms" )
	#mf_inputs.set_parameter("averageRate", "50 Hz")
	lems_inst_doc.add( mf_inputs )
	
	#synapse_type = 'alphaCurrentSynapse'
	#alpha_syn = lems.Component( "AlphaSyn", synapse_type)
	#alpha_syn.set_parameter("tau", "30 ms" )
	#alpha_syn.set_parameter("ibase", "200 pA")
	#lems_inst_doc.add( alpha_syn )
	
	# Define MF input population
	
	N_mf = 15
	#MF_pop = nml.Population(id=mf_inputs.id+"_pop", component=mf_inputs.id, type="populationList", size=N_mf)
	#net.populations.append( MF_pop )

	mf_type2 = 'spikeGeneratorPoisson'
	#mf_poisson = lems.Component( "MF_Poisson", mf_type2)
	#mf_poisson.set_parameter("averageRate", "5 Hz")
	#lems_inst_doc.add( mf_poisson )
	# adding in neuroml document instead of mf_poisson
	mf_poisson = nml.SpikeGeneratorPoisson( id = "MF_Poisson", average_rate="5 Hz" )
	net_doc.spike_generator_poissons.append( mf_poisson )
	
	net_doc.includes.append( goc_type )
	MF_Poisson_pop = nml.Population(id=mf_poisson.id+"_pop", component=mf_poisson.id, type="populationList", size=N_mf)
	net.populations.append( MF_Poisson_pop )
	MF_pos = nu.GoC_locate( N_mf )
	for mf in range( N_mf ):
		inst = nml.Instance(id=mf)
		MF_Poisson_pop.instances.append( inst )
		inst.location = nml.Location( x=MF_pos[mf,0], y=MF_pos[mf,1], z=MF_pos[mf,2] )
		
	# Setup Mf->GoC synapses
	#MFprojection = nml.Projection(id="MFtoGoC", presynaptic_population=MF_pop.id, postsynaptic_population=goc_pop.id, synapse=alpha_syn.id)
	#net.projections.append(MFprojection)

	MF2projection = nml.Projection(id="MF2toGoC", presynaptic_population=MF_Poisson_pop.id, postsynaptic_population=goc_pop.id, synapse=MFSyn_type.id)#alpha_syn.id
	net.projections.append(MF2projection)


	#Get list of MF->GoC synapse
	mf_synlist = nu.randdist_MF_syn( N_mf, N_goc, pConn=0.3)
	nMFSyn = mf_synlist.shape[1]
	for syn in range( nMFSyn ):
		mf, goc = mf_synlist[:, syn]
		conn2 = nml.Connection(id=syn, pre_cell_id='../{}/{}/{}'.format(MF_Poisson_pop.id, mf, mf_poisson.id), post_cell_id='../{}/{}/{}'.format(goc_pop.id, goc, goc_type.id), post_segment_id='0', post_fraction_along="0.5")
		MF2projection.connections.append(conn2)
		
		
	# Burst of MF input (as explicit input)
	mf_bursttype = 'transientPoissonFiringSynapse'
	mf_burst = lems.Component( "MF_Burst", mf_bursttype)
	mf_burst.set_parameter( "averageRate", "100 Hz" )
	mf_burst.set_parameter( "delay", "2000 ms" )
	mf_burst.set_parameter( "duration", "500 ms" )
	mf_burst.set_parameter( "synapse", MF20Syn_type.id )
	mf_burst.set_parameter( "spikeTarget", './{}'.format(MF20Syn_type.id) )
	lems_inst_doc.add( mf_burst )
	
	
	# Add few burst inputs
	n_bursts = 4
	gocPerm = np.random.permutation( N_goc )
	ctr = 0
	for gg in range(4):
		goc = gocPerm[gg]
		for jj in range( n_bursts ):
			inst = nml.ExplicitInput( id=ctr, target='../{}/{}/{}'.format(goc_pop.id, goc, goc_type.id), input=mf_burst.id, synapse=MF20Syn_type.id, spikeTarget='./{}'.format(MF20Syn_type.id))
			net.explicit_inputs.append( inst )
			ctr += 1
		
	
	'''
	one-to-one pairing of MF and GoC -> no shared inputs
	for goc in range(N_mf):
		#inst = nml.Instance(id=goc)
		#MF_pop.instances.append( inst )
		#inst.location = nml.Location( x=GoC_pos[goc,0], y=GoC_pos[goc,1], z=GoC_pos[goc,2]+100 )
		#conn = nml.Connection(id=goc, pre_cell_id='../{}/{}/{}'.format(MF_pop.id, goc, mf_inputs.id), post_cell_id='../{}/{}/{}'.format(goc_pop.id, goc, goc_type.id), post_segment_id='0', post_fraction_along="0.5")
		#MFprojection.connections.append(conn)

		goc2 = N_goc-goc-1
		inst2 = nml.Instance(id=goc)
		MF_Poisson_pop.instances.append( inst2 )
		inst2.location = nml.Location( x=GoC_pos[goc2,0], y=GoC_pos[goc2,1], z=GoC_pos[goc2,2]+100 )
		conn2 = nml.Connection(id=goc, pre_cell_id='../{}/{}/{}'.format(MF_Poisson_pop.id, goc, mf_poisson.id), post_cell_id='../{}/{}/{}'.format(goc_pop.id, goc2, goc_type.id), post_segment_id='0', post_fraction_along="0.5")
		MF2projection.connections.append(conn2)

	'''
	
	# Add electrical synapses
	GoCCoupling = nml.ElectricalProjection( id="gocGJ", presynaptic_population=goc_pop.id, postsynaptic_population=goc_pop.id )
	
	#print(GJ_pairs)
	gj = nml.GapJunction( id="GJ_0", conductance="426pS" )
	net_doc.gap_junctions.append(gj)
	nGJ = GJ_pairs.shape[0]
	for jj in range( nGJ ):
		#gj.append( lems.Component( "GJ_%d"%jj, 'gapJunction') )
		#gj[jj].set_parameter( "conductance", "%fnS"%(GJWt[jj]) )
		#gj = nml.GapJunction(id="GJ_%d"%jj, conductance="%fnS"%(GJWt[jj]))
		#net_doc.gap_junctions.append(gj)
		#lems_inst_doc.add( gj[jj] )
		#print("%fnS"%(GJWt[jj]*0.426))
		conn = nml.ElectricalConnectionInstanceW( id=jj, pre_cell='../{}/{}/{}'.format(goc_pop.id, GJ_pairs[jj,0], goc_type.id), pre_segment='1', pre_fraction_along='0.5', post_cell='../{}/{}/{}'.format(goc_pop.id, GJ_pairs[jj,1], goc_type.id), post_segment='1', post_fraction_along='0.5', synapse=gj.id, weight=GJWt[jj] )#synapse="GapJuncCML" synapse=gj.id , conductance="100E-9mS"
		# ------------ need to create GJ component
		GoCCoupling.electrical_connection_instance_ws.append( conn )
	
	net.electrical_projections.append( GoCCoupling )	
		
		
		
	net_filename = 'gocNetwork.nml'
	pynml.write_neuroml2_file( net_doc, net_filename )
	lems_filename = 'instances.xml'
	pynml.write_lems_file( lems_inst_doc, lems_filename, validate=False )

	simid = 'sim_gocnet'+goc_type.id
	ls = LEMSSimulation( simid, duration=duration, dt=dt, simulation_seed=seed )
	ls.assign_simulation_target( net.id )
	
	#ls.include_lems_file( 'Synapses.xml', include_included=False)
	#ls.include_lems_file( 'Inputs.xml', include_included=False)
	ls.include_neuroml2_file( net_filename)
	ls.include_neuroml2_file( goc_filename)
	ls.include_neuroml2_file( GJ_filename)
	ls.include_neuroml2_file( MFSyn_filename)
	ls.include_neuroml2_file( MF20Syn_filename)
	ls.include_lems_file( lems_filename, include_included=False)
	
	
	# Specify outputs
	eof0 = 'Events_file'
	ls.create_event_output_file(eof0, "%s.v.spikes"%simid,format='ID_TIME')
	for jj in range( goc_pop.size):
		ls.add_selection_to_event_output_file( eof0, jj, '{}/{}/{}'.format( goc_pop.id, jj, goc_type.id), 'spike' )
		
	of0 = 'Volts_file'
	ls.create_output_file(of0, "%s.v.dat"%simid)
	for jj in range( goc_pop.size ):
		ls.add_column_to_output_file(of0, jj, '{}/{}/{}/v'.format( goc_pop.id, jj, goc_type.id))
		
	#Create Lems file to run
	lems_simfile = ls.save_to_file()

	#res = pynml.run_lems_with_jneuroml( lems_simfile, max_memory="1G",nogui=True, plot=False)
	#res = pynml.run_lems_with_jneuroml_neuron( lems_simfile, max_memory="2G", only_generate_scripts = True, compile_mods = False, nogui=True, plot=False)
	res = pynml.run_lems_with_jneuroml_neuron( lems_simfile, max_memory="2G", compile_mods = False,nogui=True, plot=False)
	#res=True
	return res
コード例 #51
0
    nml_cell_loc = "%s/%s"%(nml2_cell_dir,nml_cell_file)


    print(' > Exporting to %s'%(nml_net_loc))

    export_to_neuroml2(None, 
                       nml_net_loc, 
                       separateCellFiles=True,
                       includeBiophysicalProperties=False)

    print(' > Exported to: %s and %s'%(nml_net_loc, nml_cell_loc))
    
    
    clear_neuron()

    nml_doc = pynml.read_neuroml2_file(nml_cell_loc0)
        
    cell = nml_doc.cells[0]
    
    cell.id = 'Cell_%s'%model_id
    
    notes = ''
    notes+="\n\nExport of a cell model (%s) obtained from the Allen Institute Cell Types Database into NeuroML2"%model_id + \
            "\n\nElectrophysiology on which this model is based: %s"%metadata_info['URL'] + \
            "\n\n******************************************************\n*  This export to NeuroML2 has not yet been fully validated!!"+ \
            "\n*  Use with caution!!\n******************************************************\n\n        "


    cell.notes = notes
    for k in metadata_info.keys():
        if k.startswith("AIBS:"):
コード例 #52
0
    def parse_dataset(self, d):
        # print_v("Parsing dataset/array: "+ str(d))

        # Population
        if self.current_population and d.name == "locations":

            perc_cells = (self.parameters["percentage_cells_per_pop"]
                          if "percentage_cells_per_pop" in self.parameters else
                          100)
            if perc_cells > 100:
                perc_cells = 100

            size = max(0, int((perc_cells / 100.0) * d.shape[0]))

            if size > 0:
                properties = {}
                if self._is_interneuron(self.current_population):
                    properties["radius"] = 5
                    type = "I"
                else:
                    properties["radius"] = 10
                    type = "E"
                properties["type"] = type

                layer = self.current_population.split("_")[0]
                properties["region"] = layer
                try:
                    import opencortex.utils.color as occ

                    if layer == "L23":
                        if type == "E":
                            color = occ.L23_PRINCIPAL_CELL
                        if type == "I":
                            color = occ.L23_INTERNEURON
                    if layer == "L4":
                        if type == "E":
                            color = occ.L4_PRINCIPAL_CELL
                        if type == "I":
                            color = occ.L4_INTERNEURON
                    if layer == "L5":
                        if type == "E":
                            color = occ.L5_PRINCIPAL_CELL
                        if type == "I":
                            color = occ.L5_INTERNEURON
                    if layer == "L6":
                        if type == "E":
                            color = occ.L6_PRINCIPAL_CELL
                        if type == "I":
                            color = occ.L6_INTERNEURON

                    properties["color"] = color
                except:
                    # Don't worry about it, it's just metadata
                    pass

                component_obj = None

                if self.parameters[
                        "DEFAULT_CELL_ID"] in self.component_objects:
                    component_obj = self.component_objects[
                        self.parameters["DEFAULT_CELL_ID"]]
                else:
                    if "cell_info" in self.parameters:
                        def_cell_info = self.parameters["cell_info"][
                            self.parameters["DEFAULT_CELL_ID"]]
                        if def_cell_info.neuroml2_source_file:
                            from pyneuroml import pynml

                            nml2_doc = pynml.read_neuroml2_file(
                                def_cell_info.neuroml2_source_file,
                                include_includes=True,
                            )
                            component_obj = nml2_doc.get_by_id(
                                self.parameters["DEFAULT_CELL_ID"])
                            print_v("Loaded NeuroML2 object %s from %s " %
                                    (component_obj,
                                     def_cell_info.neuroml2_source_file))
                            self.component_objects[self.parameters[
                                "DEFAULT_CELL_ID"]] = component_obj

                self.handler.handle_population(
                    self.current_population,
                    self.parameters["DEFAULT_CELL_ID"],
                    size,
                    component_obj=component_obj,
                    properties=properties,
                )

                print_v("   There are %i cells in: %s" %
                        (size, self.current_population))
                for i in range(0, d.shape[0]):

                    if i < size:
                        row = d[i, :]
                        x = row[0]
                        y = row[1]
                        z = row[2]
                        self.pop_locations[self.current_population][i] = (x, y,
                                                                          z)
                        self.handler.handle_location(
                            i,
                            self.current_population,
                            self.parameters["DEFAULT_CELL_ID"],
                            x,
                            y,
                            z,
                        )

        # Projection
        elif self.pre_pop != None and self.post_pop != None:

            proj_id = "Proj__%s__%s" % (self.pre_pop, self.post_pop)
            synapse = "gaba"
            if ("PC" in self.pre_pop or "SS"
                    in self.pre_pop):  # TODO: better choice between E/I cells
                synapse = "ampa"

            (ii, jj) = np.nonzero(d)
            conns_here = False
            pre_num = (len(self.pop_locations[self.pre_pop])
                       if self.pre_pop in self.pop_locations else 0)
            post_num = (len(self.pop_locations[self.post_pop])
                        if self.post_pop in self.pop_locations else 0)

            if pre_num > 0 and post_num > 0:
                for index in range(len(ii)):
                    if ii[index] < pre_num and jj[index] < post_num:
                        conns_here = True
                        break

                if conns_here:
                    print_v("Conn %s -> %s (%s)" %
                            (self.pre_pop, self.post_pop, synapse))
                    self.handler.handle_projection(proj_id, self.pre_pop,
                                                   self.post_pop, synapse)

                    conn_count = 0

                    for index in range(len(ii)):
                        i = ii[index]
                        j = jj[index]
                        if i < pre_num and j < post_num:
                            # print("  Conn5 %s[%s] -> %s[%s]"%(self.pre_pop,i,self.post_pop,j))
                            delay = 1.111
                            weight = 1
                            self.handler.handle_connection(
                                proj_id,
                                conn_count,
                                self.pre_pop,
                                self.post_pop,
                                synapse,
                                i,
                                j,
                                delay=delay,
                                weight=weight,
                            )
                            conn_count += 1

                    self.handler.finalise_projection(proj_id, self.pre_pop,
                                                     self.post_pop, synapse)

            self.post_pop = None
コード例 #53
0
include_these_synapses=oc_utils.replace_cell_types(net_file_name="TestRunColumnSubstitution",
                                                   path_to_net="./",
                                                   new_net_id="TestRunColumnReduced",
                                                   cell_types_to_be_replaced=["L23PyrRS","L23PyrFRB_varInit"],
                                                   cell_types_replaced_by=["HH_464198958","HH_471141261"],
                                                   dir_to_new_components="../../../../OpenCortex/NeuroML2/prototypes/AllenInstituteCellTypesDB_HH",
                                                   dir_to_old_components="../../cells",
                                                   reduced_to_single_compartment=True,
                                                   validate_nml2=False,
                                                   return_synapses=True,
                                                   connection_segment_groups=None,
                                                   input_segment_groups=None,
                                                   synapse_file_tags=['.synapse.','Syn','Elect']) 
                                                   
nml_doc=pynml.read_neuroml2_file("TestRunColumnReduced.net.nml")

network=nml_doc.networks[0]

lems_file_name=oc_build.generate_lems_simulation(nml_doc, 
                                                 network, 
                                                 "TestRunColumnReduced.net.nml", 
                                                 duration =100, 
                                                 dt =0.01,
                                                 include_extra_lems_files=include_these_synapses)
     
opencortex.print_comment_v("Starting simulation of %s.net.nml"%"TestRunColumnReduced")
                            
oc_build.simulate_network(lems_file_name=lems_file_name,
                          simulator="jNeuroML_NEURON",
                          max_memory="4000M")      
コード例 #54
0
def export(num_cells_to_export=5):
    cells = []

    for mgid in range(num_cells_to_export):
        print mgid
        cells.append(mkmitral(mgid))

    nml_net_file = "../NeuroML2/MitralCells/Exported/PartialBulb_%iMTCells.net.nml" % num_cells_to_export
    export_to_neuroml2(None,
                       nml_net_file,
                       includeBiophysicalProperties=False,
                       separateCellFiles=True)

    for i in range(num_cells_to_export):
        print("Processing cell %i out of %i" % (i, num_cells_to_export))
        nml_cell_file = "../NeuroML2/MitralCells/Exported/Mitral_0_%i.cell.nml" % i
        nml_doc = pynml.read_neuroml2_file(nml_cell_file)
        cell = nml_doc.cells[0]

        soma_seg = next(seg for seg in cell.morphology.segments
                        if seg.name == "Seg0_soma")
        initial_seg = next(seg for seg in cell.morphology.segments
                           if seg.name == "Seg0_initialseg")
        hillock_seg = next(seg for seg in cell.morphology.segments
                           if seg.name == "Seg0_hillock")

        # Ensure hillock parent is soma
        hillock_seg.parent.segments = soma_seg.id

        # Fix initial and hillock segs by moving them to the soma
        hillock_seg.proximal = pointMovedByOffset(hillock_seg.proximal,
                                                  soma_seg.distal)
        hillock_seg.distal = pointMovedByOffset(hillock_seg.distal,
                                                soma_seg.distal)
        initial_seg.proximal = pointMovedByOffset(initial_seg.proximal,
                                                  soma_seg.distal)
        initial_seg.distal = pointMovedByOffset(initial_seg.distal,
                                                soma_seg.distal)

        # Set root to id=0 and increment others
        exportHelper.resetRoot(cell)

        # TODO: cell.position(x,y,z) used for cell positioning in networks does not work as expected
        # See: https://github.com/NeuroML/jNeuroML/issues/55
        # Skipping the translation for now
        # # Move everything back to the origin
        # originOffset = type("", (), dict(x = -soma_seg.proximal.x, y = -soma_seg.proximal.y, z = -soma_seg.proximal.z ))()
        #
        # for seg in cell.morphology.segments:
        #     seg.proximal = pointMovedByOffset(seg.proximal, originOffset)
        #     seg.distal =   pointMovedByOffset(seg.distal, originOffset)

        # Replace ModelViewParmSubset_N groups with all, axon, soma, dendrite groups
        buildStandardSegmentGroups(cell)

        # Add channel placeholders
        nml_doc.includes.append(
            neuroml.IncludeType(href="channelIncludesPLACEHOLDER"))
        cell.biophysical_properties = neuroml.BiophysicalProperties(
            id="biophysPLACEHOLDER")

        # Save the new NML
        pynml.write_neuroml2_file(nml_doc, nml_cell_file)

        # Replace placeholders with contents from MitralCell...xml files
        replaceChannelPlaceholders(nml_cell_file)

        print("COMPLETED: " + nml_cell_file)

    print("DONE")
コード例 #55
0
def main ():

    args = process_args()

    xmlfile = args.neuroml_file

    pov_file_name = xmlfile.replace(".xml", ".pov").replace(".nml1", ".pov").replace(".nml.h5", ".pov").replace(".nml", ".pov")

    pov_file = open(pov_file_name, "w")


    header='''
/*
POV-Ray file generated from NeuroML network
*/
#version 3.6;

#include "colors.inc"

background {rgbt %s}


    \n''' ###    end of header


    pov_file.write(header%(args.background))

    cells_file = pov_file
    net_file = pov_file
    splitOut = False

    cf = pov_file_name.replace(".pov", "_cells.inc")
    nf = pov_file_name.replace(".pov", "_net.inc")

    if args.split:
        splitOut = True
        cells_file = open(cf, "w")
        net_file = open(nf, "w")
        print_comment_v("Saving into %s and %s and %s"%(pov_file_name, cf, nf))

    print_comment_v("Converting XML file: %s to %s"%(xmlfile, pov_file_name))


    nml_doc = pynml.read_neuroml2_file(xmlfile, include_includes=True, verbose=args.v)

    cell_elements = []
    cell_elements.extend(nml_doc.cells)
    cell_elements.extend(nml_doc.cell2_ca_poolses)


    minXc = 1e9
    minYc = 1e9
    minZc = 1e9
    maxXc = -1e9
    maxYc = -1e9
    maxZc = -1e9

    minX = 1e9
    minY = 1e9
    minZ = 1e9
    maxX = -1e9
    maxY = -1e9
    maxZ = -1e9

    declaredcells = {}

    print_comment_v("There are %i cells in the file"%len(cell_elements))

    cell_id_vs_seg_id_vs_proximal = {}
    cell_id_vs_seg_id_vs_distal = {}
    cell_id_vs_cell = {}

    for cell in cell_elements:

        cellName = cell.id
        cell_id_vs_cell[cell.id] = cell
        print_comment_v("Handling cell: %s"%cellName)
        cell_id_vs_seg_id_vs_proximal[cell.id] = {}
        cell_id_vs_seg_id_vs_distal[cell.id] = {}

        declaredcell = "cell_"+cellName

        declaredcells[cellName] = declaredcell

        cells_file.write("#declare %s = \n"%declaredcell)
        cells_file.write("union {\n")

        prefix = ""


        segments = cell.morphology.segments

        distpoints = {}
        proxpoints = {}

        for segment in segments:

            id = int(segment.id)

            distal = segment.distal

            x = float(distal.x)
            y = float(distal.y)
            z = float(distal.z)
            r = max(float(distal.diameter)/2.0, args.mindiam)

            if x-r<minXc: minXc=x-r
            if y-r<minYc: minYc=y-r
            if z-r<minZc: minZc=z-r

            if x+r>maxXc: maxXc=x+r
            if y+r>maxYc: maxYc=y+r
            if z+r>maxZc: maxZc=z+r

            distalpoint = "<%f, %f, %f>, %f "%(x,y,z,r)

            distpoints[id] = distalpoint
            cell_id_vs_seg_id_vs_distal[cell.id][id] = (x,y,z)

            proximalpoint = ""
            if segment.proximal is not None:
                proximal = segment.proximal
                proximalpoint = "<%f, %f, %f>, %f "%(float(proximal.x),float(proximal.y),float(proximal.z),max(float(proximal.diameter)/2.0, args.mindiam))

                cell_id_vs_seg_id_vs_proximal[cell.id][id] = (float(proximal.x),float(proximal.y),float(proximal.z))
            else:
                parent = int(segment.parent.segments)
                proximalpoint = distpoints[parent]
                cell_id_vs_seg_id_vs_proximal[cell.id][id] = cell_id_vs_seg_id_vs_distal[cell.id][parent]


            proxpoints[id] = proximalpoint

            shape = "cone"
            if proximalpoint == distalpoint:
                shape = "sphere"
                proximalpoint = ""

            if ( shape == "cone" and (proximalpoint.split('>')[0] == distalpoint.split('>')[0])):
                comment = "Ignoring zero length segment (id = %i): %s -> %s\n"%(id, proximalpoint, distalpoint)
                print_comment_v(comment)
                cells_file.write("    // "+comment)

            else:
                cells_file.write("    %s {\n"%shape)
                cells_file.write("        %s\n"%distalpoint)
                if len(proximalpoint): cells_file.write("        %s\n"%proximalpoint)
                cells_file.write("        //%s_%s.%s\n"%('CELL_GROUP_NAME','0', id))
                cells_file.write("    }\n")


        cells_file.write("    pigment { color rgb <%f,%f,%f> }\n"%(random.random(),random.random(),random.random()))

        cells_file.write("}\n\n")


    if splitOut:
        pov_file.write("#include \""+cf+"\"\n\n")
        pov_file.write("#include \""+nf+"\"\n\n")

    pov_file.write('''\n/*\n  Defining a dummy cell to use when cell in population is not found in NeuroML file...\n*/\n#declare %s =
union {
    sphere {
        <0.000000, 0.000000, 0.000000>, 5.000000
    }
    pigment { color rgb <1,0,0> }
}\n'''%_DUMMY_CELL)

    pov_file.write('''\n/*\n  Defining the spheres to use for end points of connections...\n*/\n#declare conn_start_point =
union {
    sphere {
        <0.000000, 0.000000, 0.000000>, 3.000000
    }
    pigment { color rgb <0,1,0> }
}\n\n#declare conn_end_point =
union {
    sphere {
        <0.000000, 0.000000, 0.000000>, 3.000000
    }
    pigment { color rgb <1,0,0> }
}\n''')


    positions = {}
    popElements = nml_doc.networks[0].populations

    pop_id_vs_cell = {}

    print_comment_v("There are %i populations in the file"%len(popElements))

    for pop in popElements:

        name = pop.id
        celltype = pop.component
        instances = pop.instances
        if pop.component in cell_id_vs_cell.keys():
        #if cell_id_vs_cell.has_key(pop.component):
            pop_id_vs_cell[pop.id] = cell_id_vs_cell[pop.component]

        info = "Population: %s has %i positioned cells of type: %s"%(name,len(instances),celltype)
        print_comment_v(info)

        colour = "1"

        for prop in pop.properties:

            if prop.tag == 'color':
                colour = prop.value
                colour = colour.replace(" ", ",")
                #print "Colour determined to be: "+colour

        net_file.write("\n\n/* "+info+" */\n\n")

        pop_positions = {}

        if not celltype in declaredcells:
            cell_definition = _DUMMY_CELL
            minXc = 0
            minYc = 0
            minZc = 0
            maxXc = 0
            maxYc = 0
            maxZc = 0
        else:
            cell_definition = declaredcells[celltype]

        for instance in instances:

            location = instance.location
            id = int(instance.id)
            net_file.write("object {\n")
            net_file.write("    %s\n"%cell_definition)
            x = float(location.x)
            y = float(location.y)
            z = float(location.z)
            pop_positions[id] = (x,y,z)

            if x+minXc<minX: minX=x+minXc
            if y+minYc<minY: minY=y+minYc
            if z+minZc<minZ: minZ=z+minZc

            if x+maxXc>maxX: maxX=x+maxXc
            if y+maxYc>maxY: maxY=y+maxYc
            if z+maxZc>maxZ: maxZ=z+maxZc

            net_file.write("    translate <%s, %s, %s>\n"%(x,y,z))

            if colour == '1':
                colour = "%f,%f,%f"%(random.random(),random.random(),random.random())

            if colour is not None:
                net_file.write("    pigment { color rgb <%s> }"%(colour))

            net_file.write("\n    //%s_%s\n"%(name, id))

            net_file.write("}\n")

        positions[name] = pop_positions

        if len(instances) == 0 and int(pop.size>0):

            info = "Population: %s has %i unpositioned cells of type: %s"%(name,pop.size,celltype)
            print_comment_v(info)

            colour = "1"
            '''
            if pop.annotation:
                print dir(pop.annotation)
                print pop.annotation.anytypeobjs_
                print pop.annotation.member_data_items_[0].name
                print dir(pop.annotation.member_data_items_[0])
                for prop in pop.annotation.anytypeobjs_:
                    print prop

                    if len(prop.getElementsByTagName('meta:tag'))>0 and prop.getElementsByTagName('meta:tag')[0].childNodes[0].data == 'color':
                        #print prop.getElementsByTagName('meta:tag')[0].childNodes
                        colour = prop.getElementsByTagName('meta:value')[0].childNodes[0].data
                        colour = colour.replace(" ", ",")
                    elif prop.hasAttribute('tag') and prop.getAttribute('tag') == 'color':
                        colour = prop.getAttribute('value')
                        colour = colour.replace(" ", ",")
                    print "Colour determined to be: "+colour
            '''

            net_file.write("\n\n/* "+info+" */\n\n")


            net_file.write("object {\n")
            net_file.write("    %s\n"%cell_definition)
            x = 0
            y = 0
            z = 0

            if x+minXc<minX: minX=x+minXc
            if y+minYc<minY: minY=y+minYc
            if z+minZc<minZ: minZ=z+minZc

            if x+maxXc>maxX: maxX=x+maxXc
            if y+maxYc>maxY: maxY=y+maxYc
            if z+maxZc>maxZ: maxZ=z+maxZc

            net_file.write("    translate <%s, %s, %s>\n"%(x,y,z))

            if colour == '1':
                colour = "%f,%f,%f"%(random.random(),random.random(),random.random())

            if colour is not None:
                net_file.write("    pigment { color rgb <%s> }"%(colour))

            net_file.write("\n    //%s_%s\n"%(name, id))

            net_file.write("}\n")

    #print positions

    if args.conns or args.conn_points: # Note: segment specific connections not implemented yet... i.e. connections from dends to axons...
        #print_comment_v("************************\n*\n*  Note: connection lines in 3D do not yet target dendritic locations!\n*\n************************")
        for projection in nml_doc.networks[0].projections:
            pre = projection.presynaptic_population
            post = projection.postsynaptic_population
            connections = projection.connections + projection.connection_wds
            print_comment_v("Adding %i connections %s -> %s "%(len(connections),pre,post))
            #print cell_id_vs_seg_id_vs_distal
            #print cell_id_vs_seg_id_vs_proximal
            for connection in connections:
                pre_cell_id = connection.get_pre_cell_id()
                post_cell_id = connection.get_post_cell_id()

                pre_loc = (0,0,0)
                if pre in positions.keys():# positions.has_key(pre):
                    if len(positions[pre])>0:
                        pre_loc = positions[pre][pre_cell_id]
                post_loc = (0,0,0)
                if post in positions.keys(): #positions.has_key(post):
                    post_loc = positions[post][post_cell_id]
                #if pop_id_vs_cell.has_key(projection.presynaptic_population):
                if projection.presynaptic_population in pop_id_vs_cell.keys():
                    pre_cell = pop_id_vs_cell[projection.presynaptic_population]
                    d = cell_id_vs_seg_id_vs_distal[pre_cell.id][int(connection.pre_segment_id)]
                    p = cell_id_vs_seg_id_vs_proximal[pre_cell.id][int(connection.pre_segment_id)]
                    m = [ p[i]+float(connection.pre_fraction_along)*(d[i]-p[i]) for i in [0,1,2] ]
                    print_comment("Pre point is %s, %s between %s and %s"%(m,connection.pre_fraction_along,p,d))
                    pre_loc = [ pre_loc[i]+m[i] for i in [0,1,2] ]
                if projection.postsynaptic_population in pop_id_vs_cell.keys(): #has_key(projection.postsynaptic_population):

                #if pop_id_vs_cell.has_key(projection.postsynaptic_population):
                    post_cell = pop_id_vs_cell[projection.postsynaptic_population]
                    d = cell_id_vs_seg_id_vs_distal[post_cell.id][int(connection.post_segment_id)]
                    p = cell_id_vs_seg_id_vs_proximal[post_cell.id][int(connection.post_segment_id)]
                    m = [ p[i]+float(connection.post_fraction_along)*(d[i]-p[i]) for i in [0,1,2] ]
                    print_comment("Post point is %s, %s between %s and %s"%(m,connection.post_fraction_along,p,d))
                    post_loc = [ post_loc[i]+m[i] for i in [0,1,2] ]

                if post_loc != pre_loc:
                    info = "// Connection from %s:%s %s -> %s:%s %s\n"%(pre, pre_cell_id, pre_loc, post, post_cell_id, post_loc)

                    print_comment(info)
                    net_file.write("// %s"%info)
                    if args.conns:
                        net_file.write("cylinder { <%s,%s,%s>, <%s,%s,%s>, .5  pigment{color Grey}}\n"%(pre_loc[0],pre_loc[1],pre_loc[2], post_loc[0],post_loc[1],post_loc[2]))
                    if args.conn_points:
                        net_file.write("object { conn_start_point translate <%s,%s,%s> }\n"%(pre_loc[0],pre_loc[1],pre_loc[2]))
                        net_file.write("object { conn_end_point translate <%s,%s,%s> }\n"%(post_loc[0],post_loc[1],post_loc[2]))




    plane = '''
plane {
   y, vv(-1)
   pigment {checker color rgb 1.0, color rgb 0.8 scale 20}
}
'''

    footer='''

#declare minX = %f;
#declare minY = %f;
#declare minZ = %f;

#declare maxX = %f;
#declare maxY = %f;
#declare maxZ = %f;

#macro uu(xx)
    0.5 * (maxX *(1+xx) + minX*(1-xx))
#end

#macro vv(xx)
    0.5 * (maxY *(1+xx) + minY*(1-xx))
#end

#macro ww(xx)
    0.5 * (maxZ *(1+xx) + minZ*(1-xx))
#end

light_source {
  <uu(5),uu(2),uu(5)>
  color rgb <1,1,1>

}
light_source {
  <uu(-5),uu(2),uu(-5)>
  color rgb <1,1,1>

}
light_source {
  <uu(5),uu(-2),uu(-5)>
  color rgb <1,1,1>

}
light_source {
  <uu(-5),uu(-2),uu(5)>
  color rgb <1,1,1>
}


// Trying to view box
camera {
  location < uu(%s + %s * sin (clock * 2 * 3.141)) , vv(%s + %s * sin (clock * 2 * 3.141)) , ww(%s + %s * cos (clock * 2 * 3.141)) >
  look_at < uu(%s + 0) , vv(%s + 0.05+0.3*sin (clock * 2 * 3.141)) , ww(%s + 0)>
}

%s
    \n'''%(minX,minY,minZ,maxX,maxY,maxZ, args.posx, args.scalex, args.posy, args.scaley, args.posz, args.scalez, args.viewx, args.viewy, args.viewz, (plane if args.plane else "")) ###    end of footer


    pov_file.write(footer)

    pov_file.close()

    if args.movie:
        ini_file_name = pov_file_name.replace(".pov", "_movie.ini")

        ini_movie = '''
Antialias=On

+W800 +H600

Antialias_Threshold=0.3
Antialias_Depth=4

Input_File_Name=%s

Initial_Frame=1
Final_Frame=%i
Initial_Clock=0
Final_Clock=1

Cyclic_Animation=on
Pause_when_Done=off

        '''
        ini_file = open(ini_file_name, 'w')
        ini_file.write(ini_movie%(pov_file_name, args.frames))
        ini_file.close()

        print_comment_v("Created file for generating %i movie frames at: %s. To run this type:\n\n    povray %s\n"%(args.frames,ini_file_name,ini_file_name))

    else:

        print_comment_v("Created file for generating image of network. To run this type:\n\n    povray %s\n"%(pov_file_name))
        print_comment_v("Or for higher resolution:\n\n    povray Antialias=On Antialias_Depth=10 Antialias_Threshold=0.1 +W1200 +H900 %s\n"%(pov_file_name))
コード例 #56
0
ファイル: NeuroML2ToPOVRay.py プロジェクト: NeuroML/pyNeuroML
def main ():

    args = process_args()
        
    xmlfile = args.neuroml_file

    pov_file_name = xmlfile
    endings = [".xml",".h5",".nml"]
    for e in endings:
        if pov_file_name.endswith(e):
            pov_file_name.replace(e, ".pov")
            
    if pov_file_name == xmlfile:
        pov_file_name+='.pov'

    pov_file = open(pov_file_name, "w")


    header='''
/*
POV-Ray file generated from NeuroML network
*/
#version 3.6;

#include "colors.inc"

background {rgbt %s}


    \n''' ###    end of header


    pov_file.write(header%(args.background))

    cells_file = pov_file
    net_file = pov_file
    splitOut = False

    cf = pov_file_name.replace(".pov", "_cells.inc")
    nf = pov_file_name.replace(".pov", "_net.inc")

    if args.split:
        splitOut = True
        cells_file = open(cf, "w")
        net_file = open(nf, "w")
        print_comment_v("Saving into %s and %s and %s"%(pov_file_name, cf, nf))

    print_comment_v("Converting XML file: %s to %s"%(xmlfile, pov_file_name))


    nml_doc = pynml.read_neuroml2_file(xmlfile, include_includes=True, verbose=args.v, optimized=True)

    cell_elements = []
    cell_elements.extend(nml_doc.cells)
    cell_elements.extend(nml_doc.cell2_ca_poolses)
    
    
    minXc = 1e9
    minYc = 1e9
    minZc = 1e9
    maxXc = -1e9
    maxYc = -1e9
    maxZc = -1e9

    minX = 1e9
    minY = 1e9
    minZ = 1e9
    maxX = -1e9
    maxY = -1e9
    maxZ = -1e9

    declaredcells = {}

    print_comment_v("There are %i cells in the file"%len(cell_elements))
    
    cell_id_vs_seg_id_vs_proximal = {}
    cell_id_vs_seg_id_vs_distal = {}
    cell_id_vs_cell = {}

    for cell in cell_elements:
        
        cellName = cell.id 
        cell_id_vs_cell[cell.id] = cell
        print_comment_v("Handling cell: %s"%cellName)
        cell_id_vs_seg_id_vs_proximal[cell.id] = {}
        cell_id_vs_seg_id_vs_distal[cell.id] = {}
        
        declaredcell = "cell_"+cellName

        declaredcells[cellName] = declaredcell

        cells_file.write("#declare %s = \n"%declaredcell)
        cells_file.write("union {\n")

        prefix = ""


        segments = cell.morphology.segments

        distpoints = {}
        proxpoints = {}

        for segment in segments:

            id = int(segment.id)

            distal = segment.distal

            x = float(distal.x)
            y = float(distal.y)
            z = float(distal.z)
            r = max(float(distal.diameter)/2.0, args.mindiam)

            if x-r<minXc: minXc=x-r
            if y-r<minYc: minYc=y-r
            if z-r<minZc: minZc=z-r

            if x+r>maxXc: maxXc=x+r
            if y+r>maxYc: maxYc=y+r
            if z+r>maxZc: maxZc=z+r

            distalpoint = "<%f, %f, %f>, %f "%(x,y,z,r)

            distpoints[id] = distalpoint
            cell_id_vs_seg_id_vs_distal[cell.id][id] = (x,y,z)

            proximalpoint = ""
            if segment.proximal is not None:
                proximal = segment.proximal
                proximalpoint = "<%f, %f, %f>, %f "%(float(proximal.x),float(proximal.y),float(proximal.z),max(float(proximal.diameter)/2.0, args.mindiam))
                
                cell_id_vs_seg_id_vs_proximal[cell.id][id] = (float(proximal.x),float(proximal.y),float(proximal.z))
            else:
                parent = int(segment.parent.segments)
                proximalpoint = distpoints[parent]
                cell_id_vs_seg_id_vs_proximal[cell.id][id] = cell_id_vs_seg_id_vs_distal[cell.id][parent]
                
            
            proxpoints[id] = proximalpoint

            shape = "cone"
            if proximalpoint == distalpoint:
                shape = "sphere"
                proximalpoint = ""
                
            if ( shape == "cone" and (proximalpoint.split('>')[0] == distalpoint.split('>')[0])):
                comment = "Ignoring zero length segment (id = %i): %s -> %s\n"%(id, proximalpoint, distalpoint)
                print_comment_v(comment)
                cells_file.write("    // "+comment)
                
            else:
                cells_file.write("    %s {\n"%shape)
                cells_file.write("        %s\n"%distalpoint)
                if len(proximalpoint): cells_file.write("        %s\n"%proximalpoint)
                cells_file.write("        //%s_%s.%s\n"%('CELL_GROUP_NAME','0', id))
                cells_file.write("    }\n")
                

            if args.segids:
                cells_file.write('    text {\n')
                cells_file.write('        ttf "timrom.ttf" "------- Segment: %s" .1, 0.01\n'%(segment.id))
                cells_file.write('        pigment { Red }\n')
                cells_file.write('        rotate <0,180,0>\n')
                cells_file.write('        scale <10,10,10>')
                cells_file.write('        translate %s>\n'%distalpoint.split('>')[0])
                cells_file.write('    }\n')

        cells_file.write("    pigment { color rgb <%f,%f,%f> }\n"%(random.random(),random.random(),random.random()))

        cells_file.write("}\n\n")
        


    if splitOut:
        pov_file.write("#include \""+cf+"\"\n\n")
        pov_file.write("#include \""+nf+"\"\n\n")
        
    pov_file.write('''\n/*\n  Defining a dummy cell to use when cell in population is not found in NeuroML file...\n*/\n#declare %s = 
union {
    sphere {
        <0.000000, 0.000000, 0.000000>, 5.000000 
    }
    pigment { color rgb <1,0,0> }
}\n'''%_DUMMY_CELL)
        
    pov_file.write('''\n/*\n  Defining the spheres to use for end points of connections...\n*/
    \n#declare conn_start_point = 
union {
    sphere {
        <0.000000, 0.000000, 0.000000>, 3.000000 
    }
    pigment { color rgb <0,1,0> }
}\n
\n#declare conn_end_point = 
union {
    sphere {
        <0.000000, 0.000000, 0.000000>, 3.000000 
    }
    pigment { color rgb <1,0,0> }
}\n
\n#declare input_object = 
union {
    cone {
        <0, 0, 0>, 0.1    // Center and radius of one end
        <0, -40, 0>, 2.5    // Center and radius of other end
    }
    pigment { color rgb <0.2,0.2,0.8> }
}\n''')


    positions = {}
    popElements = nml_doc.networks[0].populations
    
    pop_id_vs_cell = {}

    print_comment_v("There are %i populations in the file"%len(popElements))

    for pop in popElements:
        
        name = pop.id
        celltype = pop.component
        instances = pop.instances
        
        if pop.component in cell_id_vs_cell.keys():
            pop_id_vs_cell[pop.id] = cell_id_vs_cell[pop.component]

        info = "Population: %s has %i positioned cells of type: %s"%(name,len(instances),celltype)
        print_comment_v(info)

        colour = "1"
        substitute_radius = None
        
        for prop in pop.properties:

            if prop.tag == 'color':
                colour = prop.value
                colour = colour.replace(" ", ",")
                #print "Colour determined to be: "+colour
            if prop.tag == 'radius':
                substitute_radius = float(prop.value)
        
        net_file.write("\n\n/* "+info+" */\n\n")

        pop_positions = {}
        
        if not celltype in declaredcells:
            minXc = 0
            minYc = 0
            minZc = 0
            maxXc = 0
            maxYc = 0
            maxZc = 0
            if substitute_radius:
                dummy_cell_name = define_dummy_cell(name, substitute_radius, pov_file)
                cell_definition = dummy_cell_name
            else:
                cell_definition = _DUMMY_CELL  
        else:
            cell_definition = declaredcells[celltype]
        
        for instance in instances:

            location = instance.location
            id = int(instance.id)
            net_file.write("object {\n")
            net_file.write("    %s\n"%cell_definition)
            x = float(location.x)
            y = float(location.y)
            z = float(location.z)
            pop_positions[id] = (x,y,z)

            if x+minXc<minX: minX=x+minXc
            if y+minYc<minY: minY=y+minYc
            if z+minZc<minZ: minZ=z+minZc

            if x+maxXc>maxX: maxX=x+maxXc
            if y+maxYc>maxY: maxY=y+maxYc
            if z+maxZc>maxZ: maxZ=z+maxZc

            net_file.write("    translate <%s, %s, %s>\n"%(x,y,z))

            if colour == '1':
                colour = "%f,%f,%f"%(random.random(),random.random(),random.random())

            if colour is not None:
                net_file.write("    pigment { color rgb <%s> }"%(colour))

            net_file.write("\n    //%s_%s\n"%(name, id)) 

            net_file.write("}\n")
        
        positions[name] = pop_positions
            
        if len(instances) == 0 and int(pop.size>0):
            
            info = "Population: %s has %i unpositioned cells of type: %s"%(name,pop.size,celltype)
            print_comment_v(info)

            colour = "1"
            '''
            if pop.annotation:
                print dir(pop.annotation)
                print pop.annotation.anytypeobjs_
                print pop.annotation.member_data_items_[0].name
                print dir(pop.annotation.member_data_items_[0])
                for prop in pop.annotation.anytypeobjs_:
                    print prop

                    if len(prop.getElementsByTagName('meta:tag'))>0 and prop.getElementsByTagName('meta:tag')[0].childNodes[0].data == 'color':
                        #print prop.getElementsByTagName('meta:tag')[0].childNodes
                        colour = prop.getElementsByTagName('meta:value')[0].childNodes[0].data
                        colour = colour.replace(" ", ",")
                    elif prop.hasAttribute('tag') and prop.getAttribute('tag') == 'color':
                        colour = prop.getAttribute('value')
                        colour = colour.replace(" ", ",")
                    print "Colour determined to be: "+colour
            '''

            net_file.write("\n\n/* "+info+" */\n\n")


            net_file.write("object {\n")
            net_file.write("    %s\n"%cell_definition)
            x = 0
            y = 0
            z = 0

            if x+minXc<minX: minX=x+minXc
            if y+minYc<minY: minY=y+minYc
            if z+minZc<minZ: minZ=z+minZc

            if x+maxXc>maxX: maxX=x+maxXc
            if y+maxYc>maxY: maxY=y+maxYc
            if z+maxZc>maxZ: maxZ=z+maxZc

            net_file.write("    translate <%s, %s, %s>\n"%(x,y,z))

            if colour == '1':
                colour = "%f,%f,%f"%(random.random(),random.random(),random.random())

            if colour is not None:
                net_file.write("    pigment { color rgb <%s> }"%(colour))

            net_file.write("\n    //%s_%s\n"%(name, id)) 

            net_file.write("}\n")
            
            
    if args.conns or args.conn_points: 
    
        projections = nml_doc.networks[0].projections + nml_doc.networks[0].electrical_projections + nml_doc.networks[0].continuous_projections
        for projection in projections:
            pre = projection.presynaptic_population
            post = projection.postsynaptic_population
            
            if isinstance(projection, neuroml.Projection):
                connections = []
                for c in projection.connection_wds: connections.append(c) 
                for c in projection.connections: connections.append(c) 
                color='Grey'
            elif isinstance(projection, neuroml.ElectricalProjection):
                connections = projection.electrical_connections + projection.electrical_connection_instances + projection.electrical_connection_instance_ws
                color='Yellow'
            elif isinstance(projection, neuroml.ContinuousProjection):
                connections = projection.continuous_connections + projection.continuous_connection_instances + projection.continuous_connection_instance_ws
                color='Blue'
                
            print_comment_v("Adding %i connections for %s: %s -> %s "%(len(connections),projection.id,pre,post))
            #print cell_id_vs_seg_id_vs_distal
            #print cell_id_vs_seg_id_vs_proximal
            for connection in connections:
                pre_cell_id = connection.get_pre_cell_id()
                post_cell_id = connection.get_post_cell_id()
                
                pre_loc = (0,0,0) 
                if pre in positions.keys(): 
                    if len(positions[pre])>0:
                        pre_loc = positions[pre][pre_cell_id] 
                post_loc = (0,0,0)
                if post in positions.keys():
                    post_loc = positions[post][post_cell_id] 
                    
                if projection.presynaptic_population in pop_id_vs_cell.keys():
                    pre_cell = pop_id_vs_cell[projection.presynaptic_population]
                    d = cell_id_vs_seg_id_vs_distal[pre_cell.id][connection.get_pre_segment_id()]
                    p = cell_id_vs_seg_id_vs_proximal[pre_cell.id][connection.get_pre_segment_id()]
                    m = [ p[i]+connection.get_pre_fraction_along()*(d[i]-p[i]) for i in [0,1,2] ]
                    print_comment("Pre point is %s, %s between %s and %s"%(m,connection.get_pre_fraction_along(),p,d))
                    pre_loc = [ pre_loc[i]+m[i] for i in [0,1,2] ]
                    
                if projection.postsynaptic_population in pop_id_vs_cell.keys():
                    post_cell = pop_id_vs_cell[projection.postsynaptic_population]
                    d = cell_id_vs_seg_id_vs_distal[post_cell.id][connection.get_post_segment_id()]
                    p = cell_id_vs_seg_id_vs_proximal[post_cell.id][connection.get_post_segment_id()]
                    m = [ p[i]+connection.get_post_fraction_along()*(d[i]-p[i]) for i in [0,1,2] ]
                    print_comment("Post point is %s, %s between %s and %s"%(m,connection.get_post_fraction_along(),p,d))
                    post_loc = [ post_loc[i]+m[i] for i in [0,1,2] ]
                  
                if post_loc != pre_loc:
                    info = "// Connection from %s:%s %s -> %s:%s %s\n"%(pre, pre_cell_id, pre_loc, post, post_cell_id, post_loc)

                    print_comment(info)
                    net_file.write("// %s"%info) 
                    if args.conns:
                        net_file.write("cylinder { <%s,%s,%s>, <%s,%s,%s>, .5  pigment{color %s}}\n"%(pre_loc[0],pre_loc[1],pre_loc[2], post_loc[0],post_loc[1],post_loc[2],color))
                    if args.conn_points:
                        net_file.write("object { conn_start_point translate <%s,%s,%s> }\n"%(pre_loc[0],pre_loc[1],pre_loc[2]))
                        net_file.write("object { conn_end_point translate <%s,%s,%s> }\n"%(post_loc[0],post_loc[1],post_loc[2]))
                    
    if args.inputs:
        for il in nml_doc.networks[0].input_lists:
            for input in il.input:
                popi = il.populations
                cell_id = input.get_target_cell_id()
                cell = pop_id_vs_cell[popi]
                
                loc = (0,0,0) 
                if popi in positions.keys(): 
                    if len(positions[popi])>0:
                        loc = positions[popi][cell_id] 

                d = cell_id_vs_seg_id_vs_distal[cell.id][input.get_segment_id()]
                p = cell_id_vs_seg_id_vs_proximal[cell.id][input.get_segment_id()]
                m = [ p[i]+input.get_fraction_along()*(d[i]-p[i]) for i in [0,1,2] ]
                
                input_info = "Input on cell %s:%s at %s; point %s along (%s -> %s): %s"%(popi,cell_id, loc,input.get_fraction_along(),d,p,m)
                
                loc = [ loc[i]+m[i] for i in [0,1,2] ]
                
                net_file.write("/* %s */\n"%input_info)
                net_file.write("object { input_object translate <%s,%s,%s> }\n\n"%(loc[0],loc[1],loc[2]))
        
        
    plane = '''
plane {
   y, vv(-1)
   pigment {checker color rgb 1.0, color rgb 0.8 scale 20}
}
'''

    footer='''

#declare minX = %f;
#declare minY = %f;
#declare minZ = %f;

#declare maxX = %f;
#declare maxY = %f;
#declare maxZ = %f;

#macro uu(xx)
    0.5 * (maxX *(1+xx) + minX*(1-xx))
#end

#macro vv(xx)
    0.5 * (maxY *(1+xx) + minY*(1-xx))
#end

#macro ww(xx)
    0.5 * (maxZ *(1+xx) + minZ*(1-xx))
#end

light_source {
  <uu(5),uu(2),uu(5)>
  color rgb <1,1,1>
  
}
light_source {
  <uu(-5),uu(2),uu(-5)>
  color rgb <1,1,1>
  
}
light_source {
  <uu(5),uu(-2),uu(-5)>
  color rgb <1,1,1>
  
}
light_source {
  <uu(-5),uu(-2),uu(5)>
  color rgb <1,1,1>
}


// Trying to view box
camera {
  location < uu(%s + %s * sin (clock * 2 * 3.141)) , vv(%s + %s * sin (clock * 2 * 3.141)) , ww(%s + %s * cos (clock * 2 * 3.141)) >
  look_at < uu(%s + 0) , vv(%s + 0.05+0.3*sin (clock * 2 * 3.141)) , ww(%s + 0)>
}

%s
    \n'''%(minX,minY,minZ,maxX,maxY,maxZ, args.posx, args.scalex, args.posy, args.scaley, args.posz, args.scalez, args.viewx, args.viewy, args.viewz, (plane if args.plane else "")) ###    end of footer


    pov_file.write(footer)

    pov_file.close()

    if args.movie:
        ini_file_name = pov_file_name.replace(".pov", "_movie.ini")
    
        ini_movie = '''
Antialias=On

+W800 +H600 
        
Antialias_Threshold=0.3
Antialias_Depth=4

Input_File_Name=%s

Initial_Frame=1
Final_Frame=%i
Initial_Clock=0
Final_Clock=1

Cyclic_Animation=on
Pause_when_Done=off
        
        '''
        ini_file = open(ini_file_name, 'w')
        ini_file.write(ini_movie%(pov_file_name, args.frames))
        ini_file.close()
        
        print_comment_v("Created file for generating %i movie frames at: %s. To run this type:\n\n    povray %s\n"%(args.frames,ini_file_name,ini_file_name))
        
    else:
        
        print_comment_v("Created file for generating image of network. To run this type:\n\n    povray %s\n"%(pov_file_name))
        print_comment_v("Or for higher resolution:\n\n    povray Antialias=On Antialias_Depth=10 Antialias_Threshold=0.1 +W1200 +H900 %s\n"%(pov_file_name))
コード例 #57
0
def generate_network(nl_model,
                     handler,
                     seed=1234,
                     always_include_props=False,
                     include_connections=True,
                     include_inputs=True,
                     base_dir=None):
    """
    Generate the network model as described in NeuroMLlite in a specific handler,
    e.g. NeuroMLHandler, PyNNHandler, etc.
    """

    pop_locations = {}
    cell_objects = {}
    synapse_objects = {}

    print_v("Starting net generation for %s%s..." %
            (nl_model.id, ' (base dir: %s)' % base_dir if base_dir else ''))
    rng = random.Random(seed)

    if nl_model.network_reader:

        exec('from neuromllite.%s import %s' %
             (nl_model.network_reader.type, nl_model.network_reader.type))

        exec('network_reader = %s()' % (nl_model.network_reader.type))
        network_reader.parameters = nl_model.network_reader.parameters

        network_reader.parse(handler)
        pop_locations = network_reader.get_locations()

    else:
        from neuromllite import __version__ as nmlv
        notes = "Generated by NeuroMLlite v%s" % nmlv
        notes += "\n    Generated network: %s" % nl_model.id
        notes += "\n    Generation seed: %i" % (seed)
        if nl_model.parameters:
            notes += "\n    NeuroMLlite parameters: "
            for p in sorted(nl_model.parameters.keys()):
                notes += "\n        %s = %s" % (p, nl_model.parameters[p])
        handler.handle_document_start(nl_model.id, notes)
        temperature = '%sdegC' % nl_model.temperature if nl_model.temperature else None
        handler.handle_network(nl_model.id,
                               nl_model.notes,
                               temperature=temperature)

    nml2_doc_temp = _extract_pynn_components_to_neuroml(nl_model)

    for c in nl_model.cells:

        if c.neuroml2_source_file:
            from pyneuroml import pynml
            nml2_doc = pynml.read_neuroml2_file(_locate_file(
                c.neuroml2_source_file, base_dir),
                                                include_includes=True)
            cell_objects[c.id] = nml2_doc.get_by_id(c.id)

        if c.pynn_cell:
            cell_objects[c.id] = nml2_doc_temp.get_by_id(c.id)

    for s in nl_model.synapses:
        if s.neuroml2_source_file:
            from pyneuroml import pynml
            nml2_doc = pynml.read_neuroml2_file(_locate_file(
                s.neuroml2_source_file, base_dir),
                                                include_includes=True)
            synapse_objects[s.id] = nml2_doc.get_by_id(s.id)

        if s.pynn_synapse:
            synapse_objects[s.id] = nml2_doc_temp.get_by_id(s.id)

    for p in nl_model.populations:

        size = evaluate(p.size, nl_model.parameters)
        properties = p.properties if p.properties else {}

        if p.random_layout:
            properties['region'] = p.random_layout.region
        if p.relative_layout:
            properties['region'] = p.relative_layout.region

        if not p.random_layout and not p.single_location and not p.relative_layout and not always_include_props:

            # If there are no positions (abstract network), and <property>
            # is added to <population>, jLems doesn't like it... (it has difficulty
            # interpreting pop0[0]/v, etc.)
            # So better not to give properties...
            properties = {}

        if p.notes:
            handler.handle_population(p.id,
                                      p.component,
                                      size,
                                      cell_objects[p.component]
                                      if p.component in cell_objects else None,
                                      properties=properties,
                                      notes=p.notes)
        else:
            handler.handle_population(p.id,
                                      p.component,
                                      size,
                                      cell_objects[p.component]
                                      if p.component in cell_objects else None,
                                      properties=properties)

        pop_locations[p.id] = np.zeros((size, 3))

        for i in range(size):
            if p.random_layout:
                region = nl_model.get_child(p.random_layout.region, 'regions')

                x = region.x + rng.random() * region.width
                y = region.y + rng.random() * region.height
                z = region.z + rng.random() * region.depth
                pop_locations[p.id][i] = (x, y, z)

                handler.handle_location(i, p.id, p.component, x, y, z)

            if p.single_location:
                loc = p.single_location.location
                x = loc.x
                y = loc.y
                z = loc.z
                pop_locations[p.id][i] = (x, y, z)

                handler.handle_location(i, p.id, p.component, x, y, z)

            if p.relative_layout:
                print_v("Generating population with layout: %s" %
                        p.relative_layout)
                region = nl_model.get_child(p.relative_layout.region,
                                            'regions')
                x = p.relative_layout.x + region.x
                y = p.relative_layout.y + region.y
                z = p.relative_layout.z + region.z
                pop_locations[p.id][i] = (x, y, z)
                handler.handle_location(i, p.id, p.component, x, y, z)

        if hasattr(handler, 'finalise_population'):
            handler.finalise_population(p.id)

    if include_connections:
        for p in nl_model.projections:

            type = p.type if p.type else 'projection'

            delay = evaluate(p.delay, nl_model.parameters) if p.delay else 0
            weight = evaluate(p.weight, nl_model.parameters) if p.weight else 1

            if weight != 0:
                handler.handle_projection(
                    p.id,
                    p.presynaptic,
                    p.postsynaptic,
                    p.synapse,
                    synapse_obj=synapse_objects[p.synapse]
                    if p.synapse in synapse_objects else None,
                    pre_synapse_obj=synapse_objects[p.pre_synapse]
                    if p.pre_synapse in synapse_objects else None,
                    type=type)

                conn_count = 0
                if p.random_connectivity:
                    for pre_i in range(len(pop_locations[p.presynaptic])):
                        for post_i in range(len(
                                pop_locations[p.postsynaptic])):
                            flip = rng.random()
                            #print("Is cell %i conn to %i, prob %s - %s"%(pre_i, post_i, flip, p.random_connectivity.probability))
                            if flip < p.random_connectivity.probability:
                                #print_v("Adding connection %i with weight: %s, delay: %s"%(conn_count, weight, delay))
                                handler.handle_connection(p.id,
                                                          conn_count,
                                                          p.presynaptic,
                                                          p.postsynaptic,
                                                          p.synapse, \
                                                          pre_i, \
                                                          post_i, \
                                                          preSegId=0, \
                                                          preFract=0.5, \
                                                          postSegId=0, \
                                                          postFract=0.5, \
                                                          delay=delay, \
                                                          weight=weight)
                                conn_count += 1

                if p.convergent_connectivity:

                    for post_i in range(len(pop_locations[p.postsynaptic])):

                        for count in range(
                                int(p.convergent_connectivity.num_per_post)):
                            found = False
                            while not found:
                                pre_i = int(rng.random() *
                                            len(pop_locations[p.presynaptic]))
                                if p.presynaptic == p.postsynaptic and pre_i == post_i:
                                    found = False
                                else:
                                    found = True

                            print_v(
                                "Adding connection %i (%i->%i; %i to %s of post) with weight: %s, delay: %s"
                                % (conn_count, pre_i, post_i, count,
                                   p.convergent_connectivity.num_per_post,
                                   weight, delay))
                            handler.handle_connection(p.id,
                                                      conn_count,
                                                      p.presynaptic,
                                                      p.postsynaptic,
                                                      p.synapse, \
                                                      pre_i, \
                                                      post_i, \
                                                      preSegId=0, \
                                                      preFract=0.5, \
                                                      postSegId=0, \
                                                      postFract=0.5, \
                                                      delay=delay, \
                                                      weight=weight)
                            conn_count += 1

                elif p.one_to_one_connector:
                    for i in range(
                            min(len(pop_locations[p.presynaptic]),
                                len(pop_locations[p.postsynaptic]))):

                        #print_v("Adding connection %i with weight: %s, delay: %s"%(conn_count, weight, delay))
                        handler.handle_connection(p.id,
                                                  conn_count,
                                                  p.presynaptic,
                                                  p.postsynaptic,
                                                  p.synapse, \
                                                  i, \
                                                  i, \
                                                  preSegId=0, \
                                                  preFract=0.5, \
                                                  postSegId=0, \
                                                  postFract=0.5, \
                                                  delay=delay, \
                                                  weight=weight)
                        conn_count += 1

                handler.finalise_projection(p.id, p.presynaptic,
                                            p.postsynaptic, p.synapse)

    if include_inputs:
        for input in nl_model.inputs:

            handler.handle_input_list(input.id,
                                      input.population,
                                      input.input_source,
                                      size=0,
                                      input_comp_obj=None)

            input_count = 0
            for i in range(len(pop_locations[input.population])):
                flip = rng.random()
                weight = input.weight if input.weight else 1
                if flip * 100. < input.percentage:

                    number_per_cell = evaluate(
                        input.number_per_cell,
                        nl_model.parameters) if input.number_per_cell else 1

                    for j in range(number_per_cell):
                        handler.handle_single_input(input.id,
                                                    input_count,
                                                    i,
                                                    weight=evaluate(
                                                        weight,
                                                        nl_model.parameters))
                        input_count += 1

            handler.finalise_input_source(input.id)

    if hasattr(handler, 'finalise_document'):
        handler.finalise_document()
コード例 #58
0
            dry_run=False)

        compare('%s/%s.Pop0.v.dat' % (r1['run_directory'], r1['reference']),
                show_plot_already=False)
        '''
        r2={}
        r2['run_directory'] ='NT_AllenIzh2stage_STAGE2_Tue_Dec__1_17.26.38_2015'
        r2['reference'] = 'AllenIzh2stage_STAGE2'  '''

        compare('%s/%s.Pop0.v.dat' % (r2['run_directory'], r2['reference']),
                show_plot_already=True,
                dataset=dataset)

        final_network = '%s/%s.net.nml' % (r2['run_directory'], ref)

        nml_doc = pynml.read_neuroml2_file(final_network)

        cell = nml_doc.izhikevich2007_cells[0]

        print("Extracted cell: %s from tuned model" % cell.id)

        new_id = '%s_%s' % (type, dataset)
        new_cell_doc = neuroml.NeuroMLDocument(id=new_id)
        cell.id = new_id

        cell.notes = "Cell model tuned to Allen Institute Cell Types Database, dataset: "+ \
                     "%s\n\nTuning procedure metadata:\n\n%s\n"%(dataset, pp.pformat(r2))

        new_cell_doc.izhikevich2007_cells.append(cell)
        new_cell_file = 'tuned_cells/%s.cell.nml' % new_id
コード例 #59
0
def get_includes_from_channel_file(channel_file):
    doc = read_neuroml2_file(channel_file)
    includes = []
    for incl in doc.includes:
        includes.append(incl.href)
    return includes
コード例 #60
0
    'PY_5': [500, 2.5, 2, 40, 0, 125, 0.01, 0.03],
    'PY_6': [500, 2.5, 2, 40, 0, 125, 0.00, 0.02]
}

cd_names = ["Na_STG_all", "CaT_STG_all", "CaS_STG_all", "KA_STG_all", "KCa_STG_all", "Kd_STG_all", "H_STG_all", "LeakConductance_all"]

    
for cellref in values:
    [type, number] = cellref.split('_')
    cond_densities = values[cellref]

    type = type.replace('/','_')
    number = int(number)
     
    print('Generating cell %s, number %i: %s'%(type, number, cond_densities))
    nml_doc = pynml.read_neuroml2_file('../neuroConstruct/generatedNeuroML2/%s.cell.nml'%type)
    cell = nml_doc.cells[0]
    print('Loaded: %s'%cell.id)
    cell_ref = '%s_%i'%(type, number)
    #cell.id = cell_id
    nml_doc.id = cell_ref
    nml_file = '%s.cell.nml'%cell_ref
    
    mp = cell.biophysical_properties.membrane_properties
    
    for i in range(len(cond_densities)):
        cd = cd_names[i]
        value = '%s %s'%(cond_densities[i], 'mS_per_cm2')
        print("  Changing cond dens of %s to %s"%(cd, value))
        for c in itertools.chain(mp.channel_densities, mp.channel_density_nernsts):
            if c.id == cd: