def include_lems_file(self, lems_file_name, include_included=True): if not lems_file_name in self.lems_info['include_files']: self.lems_info['include_files'].append(lems_file_name) if include_included: model = read_lems_file(lems_file_name) for inc in model.included_files: self.lems_info['include_files'].append(inc)
def include_lems_file(self, lems_file_name, include_included=True): if lems_file_name not in self.lems_info['include_files']: self.lems_info['include_files'].append(lems_file_name) if include_included: model = read_lems_file(lems_file_name) for inc in model.included_files: self.lems_info['include_files'].append(inc)
def _load_lems_file_with_neuroml2_types(cls, lems_filename): from pyneuroml.pynml import read_lems_file lems_model = cls._get_lems_model_with_neuroml2_types() model = read_lems_file( lems_filename, include_includes=False, fail_on_missing_includes=True, debug=True, ) for cid, c in model.components.items(): lems_model.components[cid] = c for ctid, ct in model.component_types.items(): lems_model.component_types[ctid] = ct return lems_model
def write_opt_to_nml(path,param_dict): ''' Write optimimal simulation parameters back to NeuroML. ''' orig_lems_file_path = path_params['model_path'] more_attributes = pynml.read_lems_file(orig_lems_file_path, include_includes=True, debug=False) for i in more_attributes.components: new = {} if str('izhikevich2007Cell') in i.type: for k,v in i.parameters.items(): units = v.split() if len(units) == 2: units = units[1] else: units = 'mV' new[k] = str(param_dict[k]) + str(' ') + str(units) i.parameters = new more_attributes.export_to_file(path+'.nml') return
def _load_lems_file_with_neuroml2_types(cls, lems_filename): from pyneuroml.pynml import get_path_to_jnml_jar from pyneuroml.pynml import read_lems_file from lems.parser.LEMS import LEMSFileParser import zipfile lems_model = lems.Model(include_includes=False) parser = LEMSFileParser(lems_model) jar_path = get_path_to_jnml_jar() # print_comment_v("Loading standard NeuroML2 dimension/unit definitions from %s"%jar_path) jar = zipfile.ZipFile(jar_path, 'r') new_lems = jar.read('NeuroML2CoreTypes/NeuroMLCoreDimensions.xml') parser.parse(new_lems) new_lems = jar.read('NeuroML2CoreTypes/NeuroMLCoreCompTypes.xml') parser.parse(new_lems) new_lems = jar.read('NeuroML2CoreTypes/Cells.xml') parser.parse(new_lems) new_lems = jar.read('NeuroML2CoreTypes/Networks.xml') parser.parse(new_lems) new_lems = jar.read('NeuroML2CoreTypes/Simulation.xml') parser.parse(new_lems) new_lems = jar.read('NeuroML2CoreTypes/Synapses.xml') parser.parse(new_lems) new_lems = jar.read('NeuroML2CoreTypes/PyNN.xml') parser.parse(new_lems) model = read_lems_file(lems_filename, include_includes=False, fail_on_missing_includes=True, debug=True) for cid, c in model.components.items(): lems_model.components[cid] = c for ctid, ct in model.component_types.items(): lems_model.component_types[ctid] = ct return lems_model
def write_opt_to_nml(path, param_dict): ''' Write optimimal simulation parameters back to NeuroML. ''' from neuronunit.optimization import get_neab from pyneuroml import pynml orig_lems_file_path = get_neab.LEMS_MODEL_PATH more_attributes = pynml.read_lems_file(orig_lems_file_path, include_includes=True, debug=False) for i in more_attributes.components: new = {} if str('izhikevich2007Cell') in i.type: for k, v in i.parameters.items(): units = v.split() if len(units) == 2: units = units[1] else: units = 'mV' new[k] = str(param_dict[k]) + str(' ') + str(units) i.parameters = new more_attributes.export_to_file(path + '.nml')
def generate_grc_layer_network( runID, correlationRadius, NADT, duration, dt, minimumISI, # ms ONRate, # Hz OFFRate, # Hz run=False): ######################################## # Load parameters for this run file = open('../params_file.pkl', 'r') p = pkl.load(file) N_syn = p['N_syn'][int(runID) - 1] f_mf = p['f_mf'][int(runID) - 1] run_num = p['run_num'][int(runID) - 1] file.close() ################################################################################# # Get connectivity matrix between cells file = open('../../network_structures/GCLconnectivity_' + str(N_syn) + '.pkl') p = pkl.load(file) conn_mat = p['conn_mat'] N_mf, N_grc = conn_mat.shape assert (np.all(conn_mat.sum( axis=0) == N_syn)), 'Connectivity matrix is incorrect.' # Get MF activity pattern if correlationRadius == 0: # Activate MFs randomly N_mf_ON = int(N_mf * f_mf) mf_indices_ON = random.sample(range(N_mf), N_mf_ON) mf_indices_ON.sort() elif correlationRadius > 0: # Spatially correlated MFs f_mf_range = np.linspace(.05, .95, 19) f_mf_ix = np.where(f_mf_range == f_mf)[0][0] p = io.loadmat('../../input_statistics/mf_patterns_r' + str(correlationRadius) + '.mat') R = p['Rs'][:, :, f_mf_ix] g = p['gs'][f_mf_ix] t = np.dot(R.transpose(), np.random.randn(N_mf)) S = (t > -g * np.ones(N_mf)) mf_indices_ON = np.where(S)[0] N_mf_ON = len(mf_indices_ON) # N_mf_OFF = N_mf - N_mf_ON mf_indices_OFF = [x for x in range(N_mf) if x not in mf_indices_ON] mf_indices_OFF.sort() ################################################################################# # load NeuroML components, LEMS components and LEMS componentTypes from external files # Spike generator (for Poisson MF spiking) spike_generator_file_name = "../../grc_lemsDefinitions/spikeGenerators.xml" spike_generator_doc = pynml.read_lems_file(spike_generator_file_name) # Integrate-and-fire GC model # if NADT = 1, loads model GC iaf_nml2_file_name = "../../grc_lemsDefinitions/IaF_GrC.nml" if NADT == 0 else "../../grc_lemsDefinitions/IaF_GrC_" + '{:.2f}'.format( f_mf) + ".nml" iaF_GrC_doc = pynml.read_neuroml2_file(iaf_nml2_file_name) iaF_GrC = iaF_GrC_doc.iaf_ref_cells[0] # AMPAR and NMDAR mediated synapses ampa_syn_filename = "../../grc_lemsDefinitions/RothmanMFToGrCAMPA_" + str( N_syn) + ".xml" nmda_syn_filename = "../../grc_lemsDefinitions/RothmanMFToGrCNMDA_" + str( N_syn) + ".xml" rothmanMFToGrCAMPA_doc = pynml.read_lems_file(ampa_syn_filename) rothmanMFToGrCNMDA_doc = pynml.read_lems_file(nmda_syn_filename) # # Define components from the componentTypes we just loaded # Refractory poisson input -- representing active MF spike_generator_ref_poisson_type = spike_generator_doc.component_types[ 'spikeGeneratorRefPoisson'] lems_instances_doc = lems.Model() spike_generator_on = lems.Component("mossySpikerON", spike_generator_ref_poisson_type.name) spike_generator_on.set_parameter("minimumISI", "%s ms" % minimumISI) spike_generator_on.set_parameter("averageRate", "%s Hz" % ONRate) lems_instances_doc.add(spike_generator_on) # Refractory poisson input -- representing silent MF spike_generator_off = lems.Component("mossySpikerOFF", spike_generator_ref_poisson_type.name) spike_generator_off.set_parameter("minimumISI", "%s ms" % minimumISI) spike_generator_off.set_parameter("averageRate", "%s Hz" % OFFRate) lems_instances_doc.add(spike_generator_off) # Synapses rothmanMFToGrCAMPA = rothmanMFToGrCAMPA_doc.components[ 'RothmanMFToGrCAMPA'].id rothmanMFToGrCNMDA = rothmanMFToGrCNMDA_doc.components[ 'RothmanMFToGrCNMDA'].id # # Create ON MF, OFF MF, and GC populations GrCPop = nml.Population(id="GrCPop", component=iaF_GrC.id, size=N_grc) mossySpikersPopON = nml.Population(id=spike_generator_on.id + "Pop", component=spike_generator_on.id, size=N_mf_ON) mossySpikersPopOFF = nml.Population(id=spike_generator_off.id + "Pop", component=spike_generator_off.id, size=N_mf_OFF) # # Create network and add populations net = nml.Network(id="network") net_doc = nml.NeuroMLDocument(id=net.id) net_doc.networks.append(net) net.populations.append(GrCPop) net.populations.append(mossySpikersPopON) net.populations.append(mossySpikersPopOFF) # # MF-GC connectivity # First connect ON MFs to GCs for mf_ix_ON in range(N_mf_ON): mf_ix = mf_indices_ON[mf_ix_ON] # Find which GCs are neighbors innervated_grcs = np.where(conn_mat[mf_ix, :] == 1)[0] for grc_ix in innervated_grcs: # Add AMPAR and NMDAR mediated synapses for synapse in [rothmanMFToGrCAMPA, rothmanMFToGrCNMDA]: connection = nml.SynapticConnection( from_='{}[{}]'.format(mossySpikersPopON.id, mf_ix_ON), synapse=synapse, to='GrCPop[{}]'.format(grc_ix)) net.synaptic_connections.append(connection) # # Now connect OFF MFs to GCs for mf_ix_OFF in range(N_mf_OFF): mf_ix = mf_indices_OFF[mf_ix_OFF] # Find which GCs are neighbors innervated_grcs = np.where(conn_mat[mf_ix, :] == 1)[0] for grc_ix in innervated_grcs: # Add AMPAR and NMDAR mediated synapses for synapse in [rothmanMFToGrCAMPA, rothmanMFToGrCNMDA]: connection = nml.SynapticConnection( from_='{}[{}]'.format(mossySpikersPopOFF.id, mf_ix_OFF), synapse=synapse, to='GrCPop[{}]'.format(grc_ix)) net.synaptic_connections.append(connection) # # Write network to file net_file_name = 'generated_network_' + runID + '.net.nml' pynml.write_neuroml2_file(net_doc, net_file_name) # Write LEMS instances to file lems_instances_file_name = 'instances_' + runID + '.xml' pynml.write_lems_file(lems_instances_doc, lems_instances_file_name, validate=False) # Create a LEMSSimulation to manage creation of LEMS file ls = LEMSSimulation('sim_' + runID, duration, dt, lems_seed=int(np.round(1000 * random.random()))) # Point to network as target of simulation ls.assign_simulation_target(net.id) # Include generated/existing NeuroML2 files ls.include_neuroml2_file(iaf_nml2_file_name) ls.include_lems_file(spike_generator_file_name, include_included=False) ls.include_lems_file(lems_instances_file_name) ls.include_lems_file(ampa_syn_filename, include_included=False) ls.include_lems_file(nmda_syn_filename, include_included=False) ls.include_neuroml2_file(net_file_name) # Specify Displays and Output Files # Details for saving output files basedir = '../data_r' + str( correlationRadius) + '/' if NADT == 0 else '../data_r' + str( correlationRadius) + '_NADT/' end_filename = str(N_syn) + '_{:.2f}'.format(f_mf) + '_' + str( run_num) # Add parameter values to spike time filename # Save MF spike times under basedir + MF_spikes_ + end_filename eof0 = 'MFspikes_file' ls.create_event_output_file(eof0, basedir + "MF_spikes_" + end_filename + ".dat") # ON MFs for i in range(mossySpikersPopON.size): ls.add_selection_to_event_output_file( eof0, mf_indices_ON[i], "%s[%i]" % (mossySpikersPopON.id, i), 'spike') # OFF MFs for i in range(mossySpikersPopOFF.size): ls.add_selection_to_event_output_file( eof0, mf_indices_OFF[i], "%s[%i]" % (mossySpikersPopOFF.id, i), 'spike') # Save GC spike times under basedir + GrC_spikes_ + end_filename eof1 = 'GrCspikes_file' ls.create_event_output_file( eof1, basedir + "GrC_spikes_" + end_filename + ".dat") # for i in range(GrCPop.size): ls.add_selection_to_event_output_file(eof1, i, "%s[%i]" % (GrCPop.id, i), 'spike') # lems_file_name = ls.save_to_file() # if run: results = pynml.run_lems_with_jneuroml(lems_file_name, max_memory="8G", nogui=True, load_saved_data=False, plot=False) return results
def load_model(self, verbose=True): """ Inputs: NEURONBackend instance object Outputs: nothing mutates input object. Take a declarative model description, and convert it into an implementation, stored in a pyhoc file. import the pyhoc file thus dragging the neuron variables into memory/python name space. Since this only happens once outside of the optimization loop its a tolerable performance hit. """ #Create a pyhoc file using jneuroml to convert from NeuroML to pyhoc. #import the contents of the file into the current names space. #The code block below does not actually function: #architecture = platform.machine() assert os.path.isfile(self.model.orig_lems_file_path) base_name = os.path.splitext(self.model.orig_lems_file_path)[0] NEURON_file_path = '{0}_nrn.py'.format(base_name) self.neuron_model_dir = os.path.dirname(self.model.orig_lems_file_path) assert os.path.isdir(self.neuron_model_dir) if not os.path.exists(NEURON_file_path): pynml.run_lems_with_jneuroml_neuron( self.model.orig_lems_file_path, skip_run=False, nogui=True, load_saved_data=False, only_generate_scripts=True, plot=False, show_plot_already=False, exec_in_dir=self.neuron_model_dir, verbose=verbose, exit_on_fail=True) subprocess.run(["cd %s; nrnivmodl" % self.neuron_model_dir], shell=True) self.load_mechanisms() elif os.path.realpath(os.getcwd()) != os.path.realpath( self.neuron_model_dir): # Load mechanisms unless they've already been loaded self.load_mechanisms() self.load() #Although the above approach successfuly instantiates a LEMS/neuroml model in pyhoc #the resulting hoc variables for current source and cell name are idiosyncratic (not generic). #The resulting idiosyncracies makes it hard not have a hard coded approach make non hard coded, and generalizable code. #work around involves predicting the hoc variable names from pyneuroml LEMS file that was used to generate them. more_attributes = pynml.read_lems_file(self.model.orig_lems_file_path, include_includes=True, debug=False) for i in more_attributes.components: #This code strips out simulation parameters from the xml tree also such as duration. #Strip out values from something a bit like an xml tree. if str('pulseGenerator') in i.type: self._current_src_name = i.id if str('Cell') in i.type: self._cell_name = i.id more_attributes = None #force garbage collection of more_attributes, its not needed anymore. return self
def generate_grc_layer_network( p_mf_ON, duration, dt, minimumISI, # ms ONRate, # Hz OFFRate, # Hz run=False): # Load connectivity matrix file = open('GCLconnectivity.pkl') p = pkl.load(file) conn_mat = p['conn_mat'] N_mf, N_grc = conn_mat.shape assert (np.all(conn_mat.sum( axis=0) == 4)), 'Connectivity matrix is incorrect.' # Load GrC and MF rosette positions grc_pos = p['grc_pos'] glom_pos = p['glom_pos'] # Choose which mossy fibers are on, which are off N_mf_ON = int(N_mf * p_mf_ON) mf_indices_ON = random.sample(range(N_mf), N_mf_ON) mf_indices_ON.sort() N_mf_OFF = N_mf - N_mf_ON mf_indices_OFF = [x for x in range(N_mf) if x not in mf_indices_ON] mf_indices_OFF.sort() # load NeuroML components, LEMS components and LEMS componentTypes from external files ##spikeGeneratorRefPoisson is now a standard nml type... ##spike_generator_doc = pynml.read_lems_file(spike_generator_file_name) iaF_GrC = nml.IafRefCell(id="iaF_GrC", refract="2ms", C="3.22pF", thresh="-40mV", reset="-63mV", leak_conductance="1.498nS", leak_reversal="-79.67mV") ampa_syn_filename = "RothmanMFToGrCAMPA.xml" nmda_syn_filename = "RothmanMFToGrCNMDA.xml" rothmanMFToGrCAMPA_doc = pynml.read_lems_file(ampa_syn_filename) rothmanMFToGrCNMDA_doc = pynml.read_lems_file(nmda_syn_filename) # define some components from the componentTypes we just loaded ##spike_generator_ref_poisson_type = spike_generator_doc.component_types['spikeGeneratorRefPoisson'] lems_instances_doc = lems.Model() spike_generator_ref_poisson_type_name = 'spikeGeneratorRefPoisson' spike_generator_on = lems.Component("mossySpikerON", spike_generator_ref_poisson_type_name) spike_generator_on.set_parameter("minimumISI", "%s ms" % minimumISI) spike_generator_on.set_parameter("averageRate", "%s Hz" % ONRate) lems_instances_doc.add(spike_generator_on) spike_generator_off = lems.Component( "mossySpikerOFF", spike_generator_ref_poisson_type_name) spike_generator_off.set_parameter("minimumISI", "%s ms" % minimumISI) spike_generator_off.set_parameter("averageRate", "%s Hz" % OFFRate) lems_instances_doc.add(spike_generator_off) rothmanMFToGrCAMPA = rothmanMFToGrCAMPA_doc.components[ 'RothmanMFToGrCAMPA'].id rothmanMFToGrCNMDA = rothmanMFToGrCNMDA_doc.components[ 'RothmanMFToGrCNMDA'].id # create populations GrCPop = nml.Population(id=iaF_GrC.id + "Pop", component=iaF_GrC.id, type="populationList", size=N_grc) GrCPop.properties.append(nml.Property(tag='color', value='0 0 0.8')) GrCPop.properties.append(nml.Property(tag='radius', value=2)) mossySpikersPopON = nml.Population(id=spike_generator_on.id + "Pop", component=spike_generator_on.id, type="populationList", size=N_mf_ON) mossySpikersPopON.properties.append( nml.Property(tag='color', value='0.8 0 0')) mossySpikersPopON.properties.append(nml.Property(tag='radius', value=2)) mossySpikersPopOFF = nml.Population(id=spike_generator_off.id + "Pop", component=spike_generator_off.id, size=N_mf_OFF) mossySpikersPopOFF.properties.append( nml.Property(tag='color', value='0 0.8 0')) mossySpikersPopOFF.properties.append(nml.Property(tag='radius', value=2)) # create network and add populations net = nml.Network(id="network") net_doc = nml.NeuroMLDocument(id=net.id) net_doc.networks.append(net) net_doc.iaf_ref_cells.append(iaF_GrC) net.populations.append(GrCPop) net.populations.append(mossySpikersPopON) net.populations.append(mossySpikersPopOFF) #net_doc.includes.append(nml.IncludeType(href=iaf_nml2_file_name)) # Add locations for GCs for grc in range(N_grc): inst = nml.Instance(id=grc) GrCPop.instances.append(inst) inst.location = nml.Location(x=grc_pos[grc, 0], y=grc_pos[grc, 1], z=grc_pos[grc, 2]) # ON MFs: locations and connectivity ONprojectionAMPA = nml.Projection( id="ONProjAMPA", presynaptic_population=mossySpikersPopON.id, postsynaptic_population=GrCPop.id, synapse=rothmanMFToGrCAMPA) ONprojectionNMDA = nml.Projection( id="ONProjNMDA", presynaptic_population=mossySpikersPopON.id, postsynaptic_population=GrCPop.id, synapse=rothmanMFToGrCNMDA) net.projections.append(ONprojectionAMPA) net.projections.append(ONprojectionNMDA) ix = 0 for mf_ix_ON in range(N_mf_ON): mf_ix = mf_indices_ON[mf_ix_ON] inst = nml.Instance(id=mf_ix_ON) mossySpikersPopON.instances.append(inst) inst.location = nml.Location(x=glom_pos[mf_ix, 0], y=glom_pos[mf_ix, 1], z=glom_pos[mf_ix, 2]) # find which granule cells are neighbors innervated_grcs = np.where(conn_mat[mf_ix, :] == 1)[0] for grc_ix in innervated_grcs: for synapse in [rothmanMFToGrCAMPA, rothmanMFToGrCNMDA]: connection = nml.Connection( id=ix, pre_cell_id='../{}/{}/{}'.format(mossySpikersPopON.id, mf_ix_ON, spike_generator_on.id), post_cell_id='../{}/{}/{}'.format(GrCPop.id, grc_ix, iaF_GrC.id)) ONprojectionAMPA.connections.append(connection) ONprojectionNMDA.connections.append(connection) ix = ix + 1 # OFF MFs: locations and connectivity OFFprojectionAMPA = nml.Projection( id="OFFProjAMPA", presynaptic_population=mossySpikersPopOFF.id, postsynaptic_population=GrCPop.id, synapse=rothmanMFToGrCAMPA) OFFprojectionNMDA = nml.Projection( id="OFFProjNMDA", presynaptic_population=mossySpikersPopOFF.id, postsynaptic_population=GrCPop.id, synapse=rothmanMFToGrCNMDA) net.projections.append(OFFprojectionAMPA) net.projections.append(OFFprojectionNMDA) ix = 0 for mf_ix_OFF in range(N_mf_OFF): mf_ix = mf_indices_OFF[mf_ix_OFF] inst = nml.Instance(id=mf_ix_OFF) mossySpikersPopOFF.instances.append(inst) inst.location = nml.Location(x=glom_pos[mf_ix, 0], y=glom_pos[mf_ix, 1], z=glom_pos[mf_ix, 2]) # find which granule cells are neighbors innervated_grcs = np.where(conn_mat[mf_ix, :] == 1)[0] for grc_ix in innervated_grcs: for synapse in [rothmanMFToGrCAMPA, rothmanMFToGrCNMDA]: connection = nml.Connection( id=ix, pre_cell_id='../{}/{}/{}'.format(mossySpikersPopOFF.id, mf_ix_OFF, spike_generator_on.id), post_cell_id='../{}/{}/{}'.format(GrCPop.id, grc_ix, iaF_GrC.id)) OFFprojectionAMPA.connections.append(connection) OFFprojectionNMDA.connections.append(connection) ix = ix + 1 # Write network to file net_file_name = 'OSBnet.nml' pynml.write_neuroml2_file(net_doc, net_file_name) # Write LEMS instances to file lems_instances_file_name = 'instances.xml' pynml.write_lems_file(lems_instances_doc, lems_instances_file_name, validate=False) # Create a LEMSSimulation to manage creation of LEMS file ls = LEMSSimulation( 'sim', duration, dt, simulation_seed=123) # int(np.round(1000*random.random()))) # Point to network as target of simulation ls.assign_simulation_target(net.id) # Include generated/existing NeuroML2 files ###ls.include_lems_file(spike_generator_file_name, include_included=False) ls.include_lems_file(lems_instances_file_name) ls.include_lems_file(ampa_syn_filename, include_included=False) ls.include_lems_file(nmda_syn_filename, include_included=False) ls.include_neuroml2_file(net_file_name) # Specify Displays and Output Files basedir = '' eof0 = 'Volts_file' ls.create_event_output_file(eof0, basedir + "MF_spikes.dat") for i in range(mossySpikersPopON.size): ls.add_selection_to_event_output_file( eof0, mf_indices_ON[i], '{}/{}/{}'.format(mossySpikersPopON.id, i, spike_generator_on.id), 'spike') for i in range(mossySpikersPopOFF.size): ls.add_selection_to_event_output_file( eof0, mf_indices_OFF[i], '{}/{}/{}'.format(mossySpikersPopOFF.id, i, spike_generator_on.id), 'spike') eof1 = 'GrCspike_file' ls.create_event_output_file(eof1, basedir + "GrC_spikes.dat") for i in range(GrCPop.size): ls.add_selection_to_event_output_file( eof1, i, '{}/{}/{}'.format(GrCPop.id, i, iaF_GrC.id), 'spike') lems_file_name = ls.save_to_file() if run: print('Running the generated LEMS file: %s for simulation of %sms' % (lems_file_name, duration)) results = pynml.run_lems_with_jneuroml(lems_file_name, max_memory="8G", nogui=True, load_saved_data=False, plot=False) return results