def generate_network(reference, network_seed=1234, temperature='32degC'): """ Generate a network which will contain populations, projections, etc. Arguments: `reference` the reference to use as the id for the network `network_seed` optional, will be used for random elements of the network, e.g. placement of cells in 3D `temperature` optional, will be specified in network and used in temperature dependent elements, e.g. ion channels with Q10. Default: 32degC """ del oc_build.all_included_files[:] oc_build.all_cells.clear() nml_doc = neuroml.NeuroMLDocument(id='%s' % reference) random.seed(network_seed) nml_doc.properties.append(neuroml.Property("Network seed", network_seed)) # Create network network = neuroml.Network(id='%s' % reference, type='networkWithTemperature', temperature=temperature) nml_doc.networks.append(network) opencortex.print_comment_v( "Created NeuroMLDocument containing a network with id: %s" % reference) return nml_doc, network
def annotate(self, **annotations): print("Updating annotations: %s"%annotations) for k in annotations: self.pop.properties.append(neuroml.Property(k, annotations[k])) self.annotations.update(annotations)
def handle_population(self, population_id, component, size, component_obj=None, properties={}): if component_obj: self.nml_doc.append(component_obj) pop = neuroml.Population(id=population_id, component=component, size=size) self.populations[population_id] = pop self.network.populations.append(pop) for p in properties: pop.properties.append(neuroml.Property(p, properties[p])) comp_obj_info = ' (%s)' % type(component_obj) if component_obj else '' if (size >= 0): sizeInfo = ", size " + str(size) + " cells" self.log.debug("Creating population: " + population_id + ", cell type: " + component + comp_obj_info + sizeInfo) else: self.log.error("Population: " + population_id + ", cell type: " + component + comp_obj_info + " specifies no size. May lead to errors!")
def parse_group(self, g): print("Parsing group: " + str(g) + ", name: " + g._v_name) for node in g: print("Sub node: %s, class: %s, name: %s (parent: %s)" % (node, node._c_classid, node._v_name, g._v_name)) if node._c_classid == 'GROUP': if g._v_name == 'populations': pop_id = node._v_name.replace('-', '_') self.current_population = PopulationContainer( id=pop_id, component=self.DUMMY_CELL_ID, size=0, type='populationList') print(" Adding new Population: %s" % self.current_population) self.network.populations.append(self.current_population) p = neuroml.Property( tag='color', value='%s %s %s' % (random.random(), random.random(), random.random())) self.current_population.properties.append(p) self.parse_group(node) if self._is_dataset(node): self.parse_dataset(node) self.current_population = None
def generate(reference = "SimpleNet", scale=1, format='xml'): population_size = scale_pop_size(3,scale) nml_doc, network = oc.generate_network(reference) oc.include_opencortex_cell(nml_doc, 'izhikevich/RS.cell.nml') pop = oc.add_population_in_rectangular_region(network, 'RS_pop', 'RS', population_size, 0,0,0, 100,100,100, color='0 .8 0') import neuroml pop.properties.append(neuroml.Property('radius',10)) syn = oc.add_exp_two_syn(nml_doc, id="syn0", gbase="2nS", erev="0mV", tau_rise="0.5ms", tau_decay="10ms") pfs = oc.add_poisson_firing_synapse(nml_doc, id="poissonFiringSyn", average_rate="50 Hz", synapse_id=syn.id) oc.add_inputs_to_population(network, "Stim0", pop, pfs.id, all_cells=True) nml_file_name = '%s.net.nml'%network.id oc.save_network(nml_doc, nml_file_name, validate=(format=='xml'), format = format) if format=='xml': oc.generate_lems_simulation(nml_doc, network, nml_file_name, duration = 500, dt = 0.025, report_file_name='report.simple.txt')
def add_single_cell_population(net, pop_id, cell_id, x=0, y=0, z=0, color=None): """ Add a population with id `pop_id` containing a single instance of cell `cell_id`. Optionally specify (`x`,`y`,`z`) and the population `color`. """ pop = neuroml.Population(id=pop_id, component=cell_id, type="populationList", size=1) # TODO... ##from neuroml.hdf5.NetworkContainer import PopulationContainer ##pop = PopulationContainer(id=pop_id, component=cell_id, type="populationList", size=1) if color is not None: pop.properties.append(neuroml.Property("color", color)) net.populations.append(pop) inst = neuroml.Instance(id=0) inst.location = neuroml.Location(x=x, y=y, z=z) pop.instances.append(inst) return pop
import neuroml nml_doc = neuroml.NeuroMLDocument(id="simplenet") net = neuroml.Network(id="simplenet") nml_doc.networks.append(net) # Create 2 populations size0 = 5 size1 = 5 pop0 = neuroml.Population(id="Pop0", size = size0, component="myComponent") net.populations.append(pop0) p = neuroml.Property(tag="axes_to_plot_tuple", value="(1,1)") pop0.properties.append(p) pop1 = neuroml.Population(id="Pop1", size = size1, component="myComponent") net.populations.append(pop1) ######################################################### import neuroml.writers as writers nml_file = 'simplenet.nml' writers.NeuroMLWriter.write(nml_doc, nml_file)
pfsStrong = oc.add_poisson_firing_synapse(nml_doc, id="poissonFiringSynStrong", average_rate="200 Hz", synapse_id=synAmpa2.id) ##### Populations popIaf = oc.add_population_in_rectangular_region(network, 'popIaf', 'iaf', scale_pop_size(20), 0,offset,0, xDim,yDim,zDim, color='.8 0 0') import neuroml popIaf.properties.append(neuroml.Property('radius',5)) offset+=yDim popIafRef = oc.add_population_in_rectangular_region(network, 'popIafRef', 'iafRef', scale_pop_size(20), 0,offset,0, xDim,yDim,zDim, color='0 0 .8') popIafRef.properties.append(neuroml.Property('radius',5)) offset+=yDim popIzh = oc.add_population_in_rectangular_region(network, 'popIzh', 'RS',
def generate_WB_network(cell_id, synapse_id, numCells_bc, connection_probability, I_mean, I_sigma, generate_LEMS_simulation, duration, x_size=100, y_size=100, z_size=100, network_id=ref + 'Network', color='0 0 1', connection=True, temperature='37 degC', validate=True, dt=0.01): nml_doc = neuroml.NeuroMLDocument(id=network_id) nml_doc.includes.append(neuroml.IncludeType(href='WangBuzsaki.cell.nml')) nml_doc.includes.append(neuroml.IncludeType(href='WangBuzsakiSynapse.xml')) # Create network net = neuroml.Network(id=network_id, type='networkWithTemperature', temperature=temperature) net.notes = 'Network generated using libNeuroML v%s' % __version__ nml_doc.networks.append(net) # Create population pop = neuroml.Population(id=ref + 'pop', component=cell_id, type='populationList', size=numCells_bc) if color is not None: pop.properties.append(neuroml.Property('color', color)) net.populations.append(pop) for i in range(0, numCells_bc): inst = neuroml.Instance(id=i) pop.instances.append(inst) inst.location = neuroml.Location(x=str(x_size * rnd.random()), y=str(y_size * rnd.random()), z=str(z_size * rnd.random())) # Add connections proj = neuroml.ContinuousProjection(id=ref + 'proj', presynaptic_population=pop.id, postsynaptic_population=pop.id) conn_count = 0 for i in range(0, numCells_bc): for j in range(0, numCells_bc): if i != j and rnd.random() < connection_probability: connection = neuroml.ContinuousConnectionInstance( id=conn_count, pre_cell='../%s/%i/%s' % (pop.id, i, cell_id), pre_component='silent', post_cell='../%s/%i/%s' % (pop.id, j, cell_id), post_component=synapse_id) proj.continuous_connection_instances.append(connection) conn_count += 1 net.continuous_projections.append(proj) # make cell pop inhomogenouos (different V_init-s with voltage-clamp) vc_dur = 2 # ms for i in range(0, numCells_bc): tmp = -75 + (rnd.random() * 15) vc = neuroml.VoltageClamp(id='VClamp%i' % i, delay='0ms', duration='%ims' % vc_dur, simple_series_resistance='1e6ohm', target_voltage='%imV' % tmp) nml_doc.voltage_clamps.append(vc) input_list = neuroml.InputList(id='input_%i' % i, component='VClamp%i' % i, populations=pop.id) input = neuroml.Input(id=i, target='../%s/%i/%s' % (pop.id, i, cell_id), destination='synapses') input_list.input.append(input) net.input_lists.append(input_list) # Add outer input (IClamp) tmp = rnd.normal(I_mean, I_sigma**2, numCells_bc) # random numbers from Gaussian distribution for i in range(0, numCells_bc): pg = neuroml.PulseGenerator(id='IClamp%i' % i, delay='%ims' % vc_dur, duration='%ims' % (duration - vc_dur), amplitude='%fpA' % (tmp[i])) nml_doc.pulse_generators.append(pg) input_list = neuroml.InputList(id='input%i' % i, component='IClamp%i' % i, populations=pop.id) input = neuroml.Input(id=i, target='../%s/%i/%s' % (pop.id, i, cell_id), destination='synapses') input_list.input.append(input) net.input_lists.append(input_list) # Write to file nml_file = '%s100Cells.net.nml' % ref print 'Writing network file to:', nml_file, '...' neuroml.writers.NeuroMLWriter.write(nml_doc, nml_file) if validate: # Validate the NeuroML from neuroml.utils import validate_neuroml2 validate_neuroml2(nml_file) if generate_LEMS_simulation: # Vreate a LEMSSimulation to manage creation of LEMS file ls = LEMSSimulation(sim_id='%sNetSim' % ref, duration=duration, dt=dt) # Point to network as target of simulation ls.assign_simulation_target(net.id) # Incude generated/existing NeuroML2 files ls.include_neuroml2_file('WangBuzsaki.cell.nml', include_included=False) ls.include_neuroml2_file('WangBuzsakiSynapse.xml', include_included=False) ls.include_neuroml2_file(nml_file, include_included=False) # Specify Display and output files disp_bc = 'display_bc' ls.create_display(disp_bc, 'Basket Cell Voltage trace', '-80', '40') of_bc = 'volts_file_bc' ls.create_output_file(of_bc, 'wangbuzsaki_network.dat') of_spikes_bc = 'spikes_bc' ls.create_event_output_file(of_spikes_bc, 'wangbuzsaki_network_spikes.dat') max_traces = 9 # the 10th color in NEURON is white ... for i in range(numCells_bc): quantity = '%s/%i/%s/v' % (pop.id, i, cell_id) if i < max_traces: ls.add_line_to_display(disp_bc, 'BC %i: Vm' % i, quantity, '1mV', pynml.get_next_hex_color()) ls.add_column_to_output_file(of_bc, 'v_%i' % i, quantity) ls.add_selection_to_event_output_file(of_spikes_bc, i, select='%s/%i/%s' % (pop.id, i, cell_id), event_port='spike') # Save to LEMS file print 'Writing LEMS file...' lems_file_name = ls.save_to_file() else: ls = None lems_file_name = '' return ls, lems_file_name
def process_celldir(inputs): """Process cell directory""" count, cell_dir, nml2_cell_dir, total_count = inputs local_nml2_cell_dir = os.path.join("..", nml2_cell_dir) print( '\n\n************************************************************\n\n' 'Parsing %s (cell %i/%i)\n' % (cell_dir, count, total_count)) if os.path.isdir(cell_dir): old_cwd = os.getcwd() os.chdir(cell_dir) else: old_cwd = os.getcwd() os.chdir('../' + cell_dir) if make_zips: nml2_cell_dir = '%s/%s' % (zips_dir, cell_dir) if not os.path.isdir(nml2_cell_dir): os.mkdir(nml2_cell_dir) print("Generating into %s" % nml2_cell_dir) bbp_ref = None template_file = open('template.hoc', 'r') for line in template_file: if line.startswith('begintemplate '): bbp_ref = line.split(' ')[1].strip() print( ' > Assuming cell in directory %s is in a template named %s' % (cell_dir, bbp_ref)) load_cell_file = 'loadcell.hoc' variables = {} variables['cell'] = bbp_ref variables['groups_info_file'] = groups_info_file template = """ /////////////////////////////////////////////////////////////////////////////// // // NOTE: This file is not part of the original BBP cell model distribution // It has been generated by ../ParseAll.py to facilitate loading of the cell // into NEURON for exporting the model morphology to NeuroML2 // ////////////////////////////////////////////////////////////////////////////// load_file("stdrun.hoc") objref cvode cvode = new CVode() cvode.active(1) //======================== settings =================================== v_init = -80 hyp_amp = -0.062866 step_amp = 0.3112968 tstop = 3000 //=================== creating cell object =========================== load_file("import3d.hoc") objref cell // Using 1 to force loading of the file, in case file with same name was loaded // before... load_file(1, "constants.hoc") load_file(1, "morphology.hoc") load_file(1, "biophysics.hoc") print "Loaded morphology and biophysics..." load_file(1, "synapses/synapses.hoc") load_file(1, "template.hoc") print "Loaded template..." load_file(1, "createsimulation.hoc") create_cell(0) print "Created new cell using loadcell.hoc: {{ cell }}" define_shape() wopen("{{ groups_info_file }}") fprint("//Saving information on groups in this cell...\\n") fprint("- somatic\\n") forsec {{ cell }}[0].somatic { fprint("%s\\n",secname()) } fprint("- basal\\n") forsec {{ cell }}[0].basal { fprint("%s\\n",secname()) } fprint("- axonal\\n") forsec {{ cell }}[0].axonal { fprint("%s\\n",secname()) } fprint("- apical\\n") forsec {{ cell }}[0].apical { fprint("%s\\n",secname()) } wopen() """ t = Template(template) contents = t.render(variables) load_cell = open(load_cell_file, 'w') load_cell.write(contents) load_cell.close() print(' > Written %s' % load_cell_file) if os.path.isfile(load_cell_file): cell_info = parse_cell_info_file(cell_dir) nml_file_name = "%s.net.nml" % bbp_ref nml_net_loc = "%s/%s" % (local_nml2_cell_dir, nml_file_name) nml_cell_file = "%s_0_0.cell.nml" % bbp_ref nml_cell_loc = "%s/%s" % (local_nml2_cell_dir, nml_cell_file) print(' > Loading %s and exporting to %s' % (load_cell_file, nml_net_loc)) export_to_neuroml2(load_cell_file, nml_net_loc, separateCellFiles=True, includeBiophysicalProperties=False) print(' > Exported to: %s and %s using %s' % (nml_net_loc, nml_cell_loc, load_cell_file)) nml_doc = pynml.read_neuroml2_file(nml_cell_loc) cell = nml_doc.cells[0] print(' > Adding groups from: %s' % groups_info_file) groups = {} current_group = None for line in open(groups_info_file): if not line.startswith('//'): if line.startswith('- '): current_group = line[2:-1] print(' > Adding group: [%s]' % current_group) groups[current_group] = [] else: section = line.split('.')[1].strip() segment_group = section.replace('[', '_').replace(']', '') groups[current_group].append(segment_group) for g in groups.keys(): new_seg_group = neuroml.SegmentGroup(id=g) cell.morphology.segment_groups.append(new_seg_group) for sg in groups[g]: new_seg_group.includes.append(neuroml.Include(sg)) if g in ['basal', 'apical']: new_seg_group.inhomogeneous_parameters.append( neuroml.InhomogeneousParameter( id="PathLengthOver_" + g, variable="p", metric="Path Length from root", proximal=neuroml.ProximalDetails( translation_start="0"))) cell.morphology.segment_groups.append( neuroml.SegmentGroup(id="soma_group", includes=[neuroml.Include("somatic")])) cell.morphology.segment_groups.append( neuroml.SegmentGroup(id="axon_group", includes=[neuroml.Include("axonal")])) cell.morphology.segment_groups.append( neuroml.SegmentGroup( id="dendrite_group", includes=[neuroml.Include("basal"), neuroml.Include("apical")])) ignore_chans = [ 'Ih', 'Ca_HVA', 'Ca_LVAst', 'Ca', "SKv3_1", "SK_E2", "CaDynamics_E2", "Nap_Et2", "Im", "K_Tst", "NaTa_t", "K_Pst", "NaTs2_t" ] # ignore_chans=['StochKv','StochKv_deterministic'] ignore_chans = [] bp, incl_chans = get_biophysical_properties( cell_info['e-type'], ignore_chans=ignore_chans, templates_json="../templates.json") cell.biophysical_properties = bp print("Set biophysical properties") notes = '' notes += \ "\n\nExport of a cell model obtained from the BBP Neocortical" \ "Microcircuit Collaboration Portal into NeuroML2\n\n" if len(ignore_chans) > 0: notes += "Ignored channels = %s\n\n" % ignore_chans #### Fix me-type cell_name = cell_info['cell name'] cell_info['me-type'] = cell_info['m-type'] + '_' + re.split( '[0-9]', cell_name)[0] notes += "For more information on this cell model see: " \ "https://bbp.epfl.ch/nmc-portal/microcircuit#/metype/%s/" \ "details\n\n" % cell_info['me-type'] cell.notes = notes for k in cell_info.keys(): p = neuroml.Property(tag='BBP:%s' % k, value=cell_info[k]) cell.properties.append(p) for channel in incl_chans: nml_doc.includes.append(neuroml.IncludeType(href="%s" % channel)) if make_zips: print("Copying %s to zip folder" % channel) shutil.copyfile('../../NeuroML2/%s' % channel, '%s/%s' % (local_nml2_cell_dir, channel)) pynml.write_neuroml2_file(nml_doc, nml_cell_loc) stim_ref = 'stepcurrent3' stim_ref_hyp = '%s_hyp' % stim_ref stim_sim_duration = 3000 stim_hyp_amp, stim_amp = get_stimulus_amplitudes(bbp_ref) stim_del = '700ms' stim_dur = '2000ms' new_net_loc = "%s/%s.%s.net.nml" % (local_nml2_cell_dir, bbp_ref, stim_ref) new_net_doc = pynml.read_neuroml2_file(nml_net_loc) new_net_doc.notes = notes stim_hyp = neuroml.PulseGenerator(id=stim_ref_hyp, delay="0ms", duration="%sms" % stim_sim_duration, amplitude=stim_hyp_amp) new_net_doc.pulse_generators.append(stim_hyp) stim = neuroml.PulseGenerator(id=stim_ref, delay=stim_del, duration=stim_dur, amplitude=stim_amp) new_net_doc.pulse_generators.append(stim) new_net = new_net_doc.networks[0] pop_id = new_net.populations[0].id pop_comp = new_net.populations[0].component input_list = neuroml.InputList(id="%s_input" % stim_ref_hyp, component=stim_ref_hyp, populations=pop_id) syn_input = neuroml.Input(id=0, target="../%s/0/%s" % (pop_id, pop_comp), destination="synapses") input_list.input.append(syn_input) new_net.input_lists.append(input_list) input_list = neuroml.InputList(id="%s_input" % stim_ref, component=stim_ref, populations=pop_id) syn_input = neuroml.Input(id=0, target="../%s/0/%s" % (pop_id, pop_comp), destination="synapses") input_list.input.append(syn_input) new_net.input_lists.append(input_list) pynml.write_neuroml2_file(new_net_doc, new_net_loc) generate_lems_file_for_neuroml(cell_dir, new_net_loc, "network", stim_sim_duration, 0.025, "LEMS_%s.xml" % cell_dir, local_nml2_cell_dir, copy_neuroml=False, simulation_seed=1234) pynml.nml2_to_svg(nml_net_loc) clear_neuron() pop = neuroml.Population(id="Pop_%s" % bbp_ref, component=bbp_ref + '_0_0', type="populationList") inst = neuroml.Instance(id="0") pop.instances.append(inst) width = 6 X = count % width Z = (count - X) / width inst.location = neuroml.Location(x=300 * X, y=0, z=300 * Z) count += 1 if make_zips: zip_file = "%s/%s.zip" % (zips_dir, cell_dir) print("Creating zip file: %s" % zip_file) with zipfile.ZipFile(zip_file, 'w') as myzip: for next_file in os.listdir(local_nml2_cell_dir): next_file = '%s/%s' % (local_nml2_cell_dir, next_file) arcname = next_file[len(zips_dir):] print("Adding : %s as %s" % (next_file, arcname)) myzip.write(next_file, arcname) os.chdir(old_cwd) return nml_cell_file, pop
def start_group(self, g): self.log.debug("Going into a group: " + g._v_name) if g._v_name == 'neuroml': if not self.optimized: self.netHandler.handle_document_start( get_str_attribute_group(g, 'id'), get_str_attribute_group(g, 'notes')) else: self.doc_id = get_str_attribute_group(g, 'id') self.doc_notes = get_str_attribute_group(g, 'notes') if g._v_name == 'network': if not self.optimized: self.netHandler.handle_network( get_str_attribute_group(g, 'id'), get_str_attribute_group(g, 'notes'), temperature=get_str_attribute_group(g, 'temperature')) else: self.optimizedNetwork = NetworkContainer( id=get_str_attribute_group(g, 'id'), notes=get_str_attribute_group(g, 'notes'), temperature=get_str_attribute_group(g, 'temperature')) if g._v_name.count('population_') >= 1: # TODO: a better check to see if the attribute is a str or numpy.ndarray self.currPopulation = get_str_attribute_group(g, 'id') self.currentComponent = get_str_attribute_group(g, 'component') size = self._get_node_size(g, self.currPopulation) properties = {} for fname in g._v_attrs._v_attrnames: if fname.startswith('property:'): name = str(fname).split(':')[1] properties[name] = get_str_attribute_group(g, fname) self.log.debug("Found a population: %s, component: %s, size: %s, properties: %s" % \ (self.currPopulation,self.currentComponent,size,properties)) if not self.optimized: if self.nml_doc_extra_elements: component_obj = self.nml_doc_extra_elements.get_by_id( self.currentComponent) else: component_obj = None if 'properties' in inspect.getargspec( self.netHandler.handle_population)[0]: self.netHandler.handle_population( self.currPopulation, self.currentComponent, size, component_obj=component_obj, properties=properties) else: self.netHandler.handle_population( self.currPopulation, self.currentComponent, size, component_obj=component_obj) else: self.currOptPopulation = PopulationContainer( id=self.currPopulation, component=self.currentComponent, size=size) for p in properties: self.currOptPopulation.properties.append( neuroml.Property(p, properties[p])) self.optimizedNetwork.populations.append( self.currOptPopulation) if g._v_name.count('projection_') >= 1: self.currentProjectionId = get_str_attribute_group(g, 'id') pt = get_str_attribute_group(g, 'type') self.currentProjectionType = pt if pt else "projection" self.currentProjectionPrePop = get_str_attribute_group( g, 'presynapticPopulation') self.currentProjectionPostPop = get_str_attribute_group( g, 'postsynapticPopulation') if "synapse" in g._v_attrs: self.currentSynapse = get_str_attribute_group(g, 'synapse') elif "postComponent" in g._v_attrs: self.currentSynapse = get_str_attribute_group( g, 'postComponent') if "preComponent" in g._v_attrs: self.currentPreSynapse = get_str_attribute_group( g, 'preComponent') if not self.optimized: self.log.debug( "------ Found a projection: %s, from %s to %s through %s" % (self.currentProjectionId, self.currentProjectionPrePop, self.currentProjectionPostPop, self.currentSynapse)) else: if self.currentProjectionType == 'electricalProjection': raise Exception( "Cannot yet export electricalProjections to optimized HDF5 format" ) self.currOptProjection = ElectricalProjectionContainer( id=self.currentProjectionId, presynaptic_population=self.currentProjectionPrePop, postsynaptic_population=self.currentProjectionPostPop) self.optimizedNetwork.electrical_projections.append( self.currOptProjection) elif self.currentProjectionType == 'continuousProjection': raise Exception( "Cannot yet export continuousProjections to optimized HDF5 format" ) self.optimizedNetwork.continuous_projections.append( self.currOptProjection) else: self.currOptProjection = ProjectionContainer( id=self.currentProjectionId, presynaptic_population=self.currentProjectionPrePop, postsynaptic_population=self.currentProjectionPostPop, synapse=self.currentSynapse) self.optimizedNetwork.projections.append( self.currOptProjection) if g._v_name.count('inputList_') >= 1 or g._v_name.count( 'input_list_') >= 1: # inputList_ preferred # TODO: a better check to see if the attribute is a str or numpy.ndarray self.currInputList = get_str_attribute_group(g, 'id') component = get_str_attribute_group(g, 'component') population = get_str_attribute_group(g, 'population') size = self._get_node_size(g, self.currInputList) if not self.optimized: if self.nml_doc_extra_elements: input_comp_obj = self.nml_doc_extra_elements.get_by_id( component) else: input_comp_obj = None self.log.debug("Found an inputList: " + self.currInputList + ", component: " + component + ", population: " + population + ", size: " + str(size)) self.netHandler.handle_input_list( self.currInputList, population, component, size, input_comp_obj=input_comp_obj) else: self.currOptInputList = InputListContainer( id=self.currInputList, component=component, populations=population) self.optimizedNetwork.input_lists.append(self.currOptInputList)
nml_doc = pynml.read_neuroml2_file(nml_cell_loc0) cell = nml_doc.cells[0] cell.id = 'Cell_%s' % model_id notes = '' notes+="\n\nExport of a cell model (%s) obtained from the Allen Institute Cell Types Database into NeuroML2"%model_id + \ "\n\nElectrophysiology on which this model is based: %s"%metadata_info['URL'] + \ "\n\n******************************************************\n* This export to NeuroML2 has not yet been fully validated!!"+ \ "\n* Use with caution!!\n******************************************************\n\n " cell.notes = notes for k in metadata_info.keys(): if k.startswith("AIBS:"): p = neuroml.Property(tag=k, value=metadata_info[k]) cell.properties.append(p) print(' > Altering groups') for sg in cell.morphology.segment_groups: print("Found group: %s" % sg.id) if (sg.id.startswith('ModelViewParm')) and len(sg.members) == 0: replace = {} replace['soma_'] = 'soma' replace['axon_'] = 'axon' replace['apic_'] = 'apic' replace['dend_'] = 'dend' for prefix in replace.keys(): all_match = True for inc in sg.includes:
def generate_grc_layer_network( p_mf_ON, duration, dt, minimumISI, # ms ONRate, # Hz OFFRate, # Hz run=False): # Load connectivity matrix file = open('GCLconnectivity.pkl') p = pkl.load(file) conn_mat = p['conn_mat'] N_mf, N_grc = conn_mat.shape assert (np.all(conn_mat.sum( axis=0) == 4)), 'Connectivity matrix is incorrect.' # Load GrC and MF rosette positions grc_pos = p['grc_pos'] glom_pos = p['glom_pos'] # Choose which mossy fibers are on, which are off N_mf_ON = int(N_mf * p_mf_ON) mf_indices_ON = random.sample(range(N_mf), N_mf_ON) mf_indices_ON.sort() N_mf_OFF = N_mf - N_mf_ON mf_indices_OFF = [x for x in range(N_mf) if x not in mf_indices_ON] mf_indices_OFF.sort() # load NeuroML components, LEMS components and LEMS componentTypes from external files ##spikeGeneratorRefPoisson is now a standard nml type... ##spike_generator_doc = pynml.read_lems_file(spike_generator_file_name) iaF_GrC = nml.IafRefCell(id="iaF_GrC", refract="2ms", C="3.22pF", thresh="-40mV", reset="-63mV", leak_conductance="1.498nS", leak_reversal="-79.67mV") ampa_syn_filename = "RothmanMFToGrCAMPA.xml" nmda_syn_filename = "RothmanMFToGrCNMDA.xml" rothmanMFToGrCAMPA_doc = pynml.read_lems_file(ampa_syn_filename) rothmanMFToGrCNMDA_doc = pynml.read_lems_file(nmda_syn_filename) # define some components from the componentTypes we just loaded ##spike_generator_ref_poisson_type = spike_generator_doc.component_types['spikeGeneratorRefPoisson'] lems_instances_doc = lems.Model() spike_generator_ref_poisson_type_name = 'spikeGeneratorRefPoisson' spike_generator_on = lems.Component("mossySpikerON", spike_generator_ref_poisson_type_name) spike_generator_on.set_parameter("minimumISI", "%s ms" % minimumISI) spike_generator_on.set_parameter("averageRate", "%s Hz" % ONRate) lems_instances_doc.add(spike_generator_on) spike_generator_off = lems.Component( "mossySpikerOFF", spike_generator_ref_poisson_type_name) spike_generator_off.set_parameter("minimumISI", "%s ms" % minimumISI) spike_generator_off.set_parameter("averageRate", "%s Hz" % OFFRate) lems_instances_doc.add(spike_generator_off) rothmanMFToGrCAMPA = rothmanMFToGrCAMPA_doc.components[ 'RothmanMFToGrCAMPA'].id rothmanMFToGrCNMDA = rothmanMFToGrCNMDA_doc.components[ 'RothmanMFToGrCNMDA'].id # create populations GrCPop = nml.Population(id=iaF_GrC.id + "Pop", component=iaF_GrC.id, type="populationList", size=N_grc) GrCPop.properties.append(nml.Property(tag='color', value='0 0 0.8')) GrCPop.properties.append(nml.Property(tag='radius', value=2)) mossySpikersPopON = nml.Population(id=spike_generator_on.id + "Pop", component=spike_generator_on.id, type="populationList", size=N_mf_ON) mossySpikersPopON.properties.append( nml.Property(tag='color', value='0.8 0 0')) mossySpikersPopON.properties.append(nml.Property(tag='radius', value=2)) mossySpikersPopOFF = nml.Population(id=spike_generator_off.id + "Pop", component=spike_generator_off.id, size=N_mf_OFF) mossySpikersPopOFF.properties.append( nml.Property(tag='color', value='0 0.8 0')) mossySpikersPopOFF.properties.append(nml.Property(tag='radius', value=2)) # create network and add populations net = nml.Network(id="network") net_doc = nml.NeuroMLDocument(id=net.id) net_doc.networks.append(net) net_doc.iaf_ref_cells.append(iaF_GrC) net.populations.append(GrCPop) net.populations.append(mossySpikersPopON) net.populations.append(mossySpikersPopOFF) #net_doc.includes.append(nml.IncludeType(href=iaf_nml2_file_name)) # Add locations for GCs for grc in range(N_grc): inst = nml.Instance(id=grc) GrCPop.instances.append(inst) inst.location = nml.Location(x=grc_pos[grc, 0], y=grc_pos[grc, 1], z=grc_pos[grc, 2]) # ON MFs: locations and connectivity ONprojectionAMPA = nml.Projection( id="ONProjAMPA", presynaptic_population=mossySpikersPopON.id, postsynaptic_population=GrCPop.id, synapse=rothmanMFToGrCAMPA) ONprojectionNMDA = nml.Projection( id="ONProjNMDA", presynaptic_population=mossySpikersPopON.id, postsynaptic_population=GrCPop.id, synapse=rothmanMFToGrCNMDA) net.projections.append(ONprojectionAMPA) net.projections.append(ONprojectionNMDA) ix = 0 for mf_ix_ON in range(N_mf_ON): mf_ix = mf_indices_ON[mf_ix_ON] inst = nml.Instance(id=mf_ix_ON) mossySpikersPopON.instances.append(inst) inst.location = nml.Location(x=glom_pos[mf_ix, 0], y=glom_pos[mf_ix, 1], z=glom_pos[mf_ix, 2]) # find which granule cells are neighbors innervated_grcs = np.where(conn_mat[mf_ix, :] == 1)[0] for grc_ix in innervated_grcs: for synapse in [rothmanMFToGrCAMPA, rothmanMFToGrCNMDA]: connection = nml.Connection( id=ix, pre_cell_id='../{}/{}/{}'.format(mossySpikersPopON.id, mf_ix_ON, spike_generator_on.id), post_cell_id='../{}/{}/{}'.format(GrCPop.id, grc_ix, iaF_GrC.id)) ONprojectionAMPA.connections.append(connection) ONprojectionNMDA.connections.append(connection) ix = ix + 1 # OFF MFs: locations and connectivity OFFprojectionAMPA = nml.Projection( id="OFFProjAMPA", presynaptic_population=mossySpikersPopOFF.id, postsynaptic_population=GrCPop.id, synapse=rothmanMFToGrCAMPA) OFFprojectionNMDA = nml.Projection( id="OFFProjNMDA", presynaptic_population=mossySpikersPopOFF.id, postsynaptic_population=GrCPop.id, synapse=rothmanMFToGrCNMDA) net.projections.append(OFFprojectionAMPA) net.projections.append(OFFprojectionNMDA) ix = 0 for mf_ix_OFF in range(N_mf_OFF): mf_ix = mf_indices_OFF[mf_ix_OFF] inst = nml.Instance(id=mf_ix_OFF) mossySpikersPopOFF.instances.append(inst) inst.location = nml.Location(x=glom_pos[mf_ix, 0], y=glom_pos[mf_ix, 1], z=glom_pos[mf_ix, 2]) # find which granule cells are neighbors innervated_grcs = np.where(conn_mat[mf_ix, :] == 1)[0] for grc_ix in innervated_grcs: for synapse in [rothmanMFToGrCAMPA, rothmanMFToGrCNMDA]: connection = nml.Connection( id=ix, pre_cell_id='../{}/{}/{}'.format(mossySpikersPopOFF.id, mf_ix_OFF, spike_generator_on.id), post_cell_id='../{}/{}/{}'.format(GrCPop.id, grc_ix, iaF_GrC.id)) OFFprojectionAMPA.connections.append(connection) OFFprojectionNMDA.connections.append(connection) ix = ix + 1 # Write network to file net_file_name = 'OSBnet.nml' pynml.write_neuroml2_file(net_doc, net_file_name) # Write LEMS instances to file lems_instances_file_name = 'instances.xml' pynml.write_lems_file(lems_instances_doc, lems_instances_file_name, validate=False) # Create a LEMSSimulation to manage creation of LEMS file ls = LEMSSimulation( 'sim', duration, dt, simulation_seed=123) # int(np.round(1000*random.random()))) # Point to network as target of simulation ls.assign_simulation_target(net.id) # Include generated/existing NeuroML2 files ###ls.include_lems_file(spike_generator_file_name, include_included=False) ls.include_lems_file(lems_instances_file_name) ls.include_lems_file(ampa_syn_filename, include_included=False) ls.include_lems_file(nmda_syn_filename, include_included=False) ls.include_neuroml2_file(net_file_name) # Specify Displays and Output Files basedir = '' eof0 = 'Volts_file' ls.create_event_output_file(eof0, basedir + "MF_spikes.dat") for i in range(mossySpikersPopON.size): ls.add_selection_to_event_output_file( eof0, mf_indices_ON[i], '{}/{}/{}'.format(mossySpikersPopON.id, i, spike_generator_on.id), 'spike') for i in range(mossySpikersPopOFF.size): ls.add_selection_to_event_output_file( eof0, mf_indices_OFF[i], '{}/{}/{}'.format(mossySpikersPopOFF.id, i, spike_generator_on.id), 'spike') eof1 = 'GrCspike_file' ls.create_event_output_file(eof1, basedir + "GrC_spikes.dat") for i in range(GrCPop.size): ls.add_selection_to_event_output_file( eof1, i, '{}/{}/{}'.format(GrCPop.id, i, iaF_GrC.id), 'spike') lems_file_name = ls.save_to_file() if run: print('Running the generated LEMS file: %s for simulation of %sms' % (lems_file_name, duration)) results = pynml.run_lems_with_jneuroml(lems_file_name, max_memory="8G", nogui=True, load_saved_data=False, plot=False) return results
import opencortex.core as oc nml_doc, network = oc.generate_network("IClamps") oc.include_opencortex_cell(nml_doc, 'izhikevich/RS.cell.nml') oc.include_opencortex_cell(nml_doc, 'acnet2/pyr_4_sym_soma.cell.nml') #oc.include_opencortex_cell(nml_doc, '../NeuroML2/prototypes/BlueBrainProject_NMC/cADpyr229_L23_PC_5ecbf9b163_0_0.cell.nml') popIzh = oc.add_single_cell_population(network, 'popIzh', 'RS', color='.8 0 0') import neuroml popIzh.properties.append(neuroml.Property('radius', 5)) popHH = oc.add_single_cell_population(network, 'popHH', 'pyr_4_sym_soma', z=100, color='0 .8 0') ''' popBBP = oc.add_single_cell_population(network, 'popBBP', 'cADpyr229_L23_PC_5ecbf9b163_0_0', z=200)''' pgIzh = oc.add_pulse_generator(nml_doc, id="pgIzh", delay="100ms", duration="300ms", amplitude="0.5nA") pgHH = oc.add_pulse_generator(nml_doc, id="pgHH",
cell = nml_doc.cells[0] cell.id = 'Cell_%s'%model_id notes = '' notes+="\n\nExport of a cell model (%s) obtained from the Allen Institute Cell Types Database into NeuroML2"%model_id + \ "\n\nElectrophysiology on which this model is based: %s"%metadata_info['URL'] + \ "\n\n******************************************************\n* This export to NeuroML2 has not yet been fully validated!!"+ \ "\n* Use with caution!!\n******************************************************\n\n " cell.notes = notes for k in metadata_info.keys(): if k.startswith("AIBS:"): p = neuroml.Property(tag=k, value=metadata_info[k]) cell.properties.append(p) print(' > Altering groups') for sg in cell.morphology.segment_groups: print("Found group: %s"%sg.id) if (sg.id.startswith('ModelViewParm')) and len(sg.members)==0: replace = {} replace['soma_'] = 'soma' replace['axon_'] = 'axon' replace['apic_'] = 'apic' replace['dend_'] = 'dend' for prefix in replace.keys(): all_match = True for inc in sg.includes:
population_size0 = 10 population_size1 = 10 nml_doc, network = oc.generate_network("SpikingNet") oc.include_opencortex_cell(nml_doc, 'izhikevich/RS.cell.nml') pop_pre = oc.add_population_in_rectangular_region(network, 'pop_pre', 'RS', population_size0, 0,0,0, 100,100,100, color='.8 0 0') import neuroml pop_pre.properties.append(neuroml.Property('radius',10)) pop_post = oc.add_population_in_rectangular_region(network, 'pop_post', 'RS', population_size1, 0,100,0, 100,200,100, color='0 0 .8') pop_post.properties.append(neuroml.Property('radius',10)) syn0 = oc.add_exp_two_syn(nml_doc, id="syn0", gbase="1nS", erev="0mV",
def create_populations(net, cell_types, nrn_runname, randomSeed): ''' Reads original data files (mainly position.dat) and creates population of cells :param net: neuroml.Network() - to which the populations will be added :param cell_types: list - (just to avoid multiple declaration of cell_types) :param nrn_runname: string - name of the directory where the saved data files are stored (celltype.dat, position.dat) :param randoSeed: int - seed for random color generation :return dCellIDs: dictionary - key: cellID, value: [cell_type, ID in pop_cell_type] (for creating synapses) :return dNumCells: dictonary with the number of cells in a given population (for creating output files -> just for "real cells") ''' # read in cell gids: fCellIDs = "../../results/%s/celltype.dat" % nrn_runname dCellIDs = {} with open(fCellIDs) as file: next( file ) # skip header: "celltype, techtype, typeIndex, rangeStart, rangeEnd" for line in file: if line.split()[0] not in ["ca3cell", "eccell"]: cell_type = line.split()[1][:-4] else: cell_type = line.split()[0][:-4] rangeStart = int(line.split()[3]) rangeEnd = int(line.split()[4]) if rangeStart == rangeEnd: dCellIDs[rangeStart] = [cell_type, 0] elif rangeStart < rangeEnd: for ind, i in enumerate(range(rangeStart, rangeEnd + 1)): dCellIDs[i] = [cell_type, ind] else: raise AssertionError( "rangeEnd:%g is lower than rangeStart:%g!" % (rangeEnd, rangeStart)) file.close() # read in cell positions fPositions = "../../results/%s/position.dat" % nrn_runname dCellPops = {} with open(fPositions) as file: next(file) # skip header: "cell, x, y, z, host" for line in file: cellID = int(line.split()[0]) if cellID in dCellIDs: cell_type = dCellIDs[cellID][0] pos = [ float(line.split()[1]), float(line.split()[2]), float(line.split()[3]) ] if cell_type not in dCellPops: dCellPops[cell_type] = [] dCellPops[cell_type].append(pos) else: dCellPops[cell_type].append(pos) file.close() ##### add populations to nml file ##### dNumCells = { } # for creating displays and output files (see later in the code) j = 0 # for increasing random seed in random colour generation for cell_type, pop_list in dCellPops.iteritems(): if cell_type in cell_types: dNumCells[cell_type] = 0 component = "%scell" % cell_type else: component = "spikeGenPoisson" # TODO: implement other stimulations ... popID = "pop_%s" % cell_type pop = neuroml.Population(id=popID, component=component, type="populationList", size=len(pop_list)) pop.properties.append( neuroml.Property("color", helper_getnextcolor(randomSeed + j))) net.populations.append(pop) j += 1 for i, sublist in enumerate(pop_list): x_pos = sublist[0] y_pos = sublist[1] z_pos = sublist[2] inst = neuroml.Instance(id=i) pop.instances.append(inst) inst.location = neuroml.Location(x=x_pos, y=y_pos, z=z_pos) if cell_type in cell_types: dNumCells[cell_type] += 1 return dCellIDs, dNumCells
##### Populations pop_iaf = oc.add_population_in_rectangular_region(network, 'pop_iaf', 'iaf', 5, 0, offset, 0, xDim, yDim, zDim, color='.8 0 0') import neuroml pop_iaf.properties.append(neuroml.Property('radius', 5)) offset += yDim pop_rs = oc.add_population_in_rectangular_region(network, 'pop_rs', 'RS', 5, 0, offset, 0, xDim, yDim, zDim, color='0 .8 0') pop_rs.properties.append(neuroml.Property('radius', 5))