Exemplo n.º 1
0
def lp():
    for perc in np.linspace(0.0, 0.8, 9):  # now is percent block
        h.gnablock_ina2005 = perc  # h.perc_ina2005
        sim.runSim()
        sim.gatherData()
        sim.saveData(include=['simData'],
                     filename='data/19dec06blkA_%02d' % (100 * perc))
Exemplo n.º 2
0
def run_sim():
    # Folder that contains x86_64 folder
    NETPYNE_WORKDIR_PATH = "../../../"
    neuron.load_mechanisms(NETPYNE_WORKDIR_PATH)

    netParams = sim.loadNetParams("./netParams.json", None, False)
    simConfig = sim.loadSimCfg("./simConfig.json", None, False)

    sim.createSimulate(netParams, simConfig)
    sim.saveData()
Exemplo n.º 3
0
    def simulateNetPyNEModelInGeppetto(self, args):
        try:
            with redirect_stdout(sys.__stdout__):
                # TODO mpi is not finding  libmpi.dylib.. set LD_LIBRARY_PATH to openmpi bin folder, but nothing
                if args['parallelSimulation']:
                    logging.debug('Running parallel simulation')
                    if not 'usePrevInst' in args or not args['usePrevInst']:
                        self.netParams.save("netParams.json")
                        self.simConfig.saveJson = True
                        self.simConfig.save("simParams.json")
                        template = os.path.join(os.path.dirname(__file__),
                                                'template.py')
                    else:
                        sim.cfg.saveJson = True
                        oldName = sim.cfg.filename
                        sim.cfg.filename = 'model_output'
                        sim.saveData()
                        sim.cfg.filename = oldName
                        template = os.path.join(os.path.dirname(__file__),
                                                'template2.py')
                    copyfile(template, './init.py')

                    cp = subprocess.run([
                        "mpiexec", "-n", args['cores'], "nrniv", "-mpi",
                        "-python", "init.py"
                    ],
                                        capture_output=True)
                    print(cp.stdout.decode() + cp.stderr.decode())
                    if cp.returncode != 0:
                        return utils.getJSONError(
                            "Error while simulating the NetPyNE model",
                            cp.stderr.decode())
                    sim.load('model_output.json')
                    self.geppetto_model = self.model_interpreter.getGeppettoModel(
                        sim)
                    netpyne_model = sim

                else:
                    logging.info("Starting simulation")

                    if not args.get('usePrevInst', False):
                        logging.debug('Instantiating single thread simulation')
                        netpyne_model = self.instantiateNetPyNEModel()
                        self.geppetto_model = self.model_interpreter.getGeppettoModel(
                            netpyne_model)

                    logging.debug('Running single thread simulation')
                    netpyne_model = self.simulateNetPyNEModel()

                return json.loads(
                    GeppettoModelSerializer.serialize(self.geppetto_model))
        except:
            return utils.getJSONError(
                "Error while simulating the NetPyNE model", sys.exc_info())
Exemplo n.º 4
0
def run():
    # Folder that contains x86_64 folder
    NETPYNE_WORKDIR_PATH = "../../../"
    neuron.load_mechanisms(NETPYNE_WORKDIR_PATH)

    # read cfg and netParams from command line arguments if available; otherwise use default
    simConfig, netParams = sim.readCmdLineArgs(simConfigDefault="cfg.py",
                                               netParamsDefault="netParams.py")

    # Create network and run simulation
    sim.createSimulate(netParams=netParams, simConfig=simConfig)
    sim.saveData()
Exemplo n.º 5
0
    def simulateModel(self):
        """

        Simulates model

        Returns
        -------
        SimConfig
            generated SimConfig

        """
        sim.setupRecording()
        sim.simulate()
        sim.saveData()
        return sim
Exemplo n.º 6
0
 def exportModel(self, args):
     try:
         with redirect_stdout(sys.__stdout__):
             if not args['netCells']:
                 sim.initialize(netParams=self.netParams,
                                simConfig=self.simConfig)
             sim.cfg.filename = args['fileName']
             include = [
                 el for el in specs.SimConfig().saveDataInclude
                 if el in args.keys() and args[el]
             ]
             if args['netCells']: include += ['netPops']
             sim.cfg.saveJson = True
             sim.saveData(include)
             sim.cfg.saveJson = False
         return utils.getJSONReply()
     except:
         return utils.getJSONError(
             "Error while exporting the NetPyNE model", sys.exc_info())
Exemplo n.º 7
0
    def exportModel(self, args):
        try:
            with redirect_stdout(sys.__stdout__):
                if not args['netCells']:
                    sim.initialize(netParams=self.netParams,
                                   simConfig=self.simConfig)
                sim.cfg.filename = args['fileName']
                include = [
                    el for el in specs.SimConfig().saveDataInclude
                    if el in args.keys() and args[el]
                ]
                if args['netCells']: include += ['netPops']
                sim.cfg.saveJson = True
                sim.saveData(include)
                sim.cfg.saveJson = False

                with open(f"{sim.cfg.filename}_data.json") as json_file:
                    data = json.load(json_file)
                    return data

        except Exception:
            message = "Error while exporting the NetPyNE model"
            logging.exception(message)
            return utils.getJSONError(message, sys.exc_info())
Exemplo n.º 8
0
Usage:  python init.py  # Run simulation, optionally plot a raster

MPI usage:  mpiexec -n 4 nrniv -python -mpi init.py
"""

from netpyne import sim

cfg, netParams = sim.readCmdLineArgs(simConfigDefault='cfg.py',
                                     netParamsDefault='netParams.py')

# sim.createSimulateAnalyze(netParams, cfg)
#​
sim.initialize(
    simConfig=cfg,
    netParams=netParams)  # create network object and set cfg and net params
sim.net.createPops()  # instantiate network populations
sim.net.createCells()  # instantiate network cells based on defined populations
sim.net.connectCells()  # create connections between cells based on params
sim.net.addStims()  # add network stimulation
sim.setupRecording(
)  # setup variables to record for each cell (spikes, V traces, etc)
sim.runSim()  # run parallel Neuron simulation
sim.gatherData()  # gather spiking data and cell info from each node

# distributed saving (to avoid errors with large output data)
#sim.saveDataInNodes()
#sim.gatherDataFromFiles(saveMerged=True)

sim.saveData()  # gather spiking data and cell info from each node
sim.analysis.plotData()
Exemplo n.º 9
0
 def simulateNetPyNEModel(self):
     with redirect_stdout(sys.__stdout__):
         sim.setupRecording()
         sim.simulate()
         sim.saveData()
     return sim
Exemplo n.º 10
0
#import matplotlib; matplotlib.use('Agg')  # to avoid graphics error in servers
from netpyne import sim

cfg, netParams = sim.readCmdLineArgs()

sim.initialize(
    simConfig=cfg,
    netParams=netParams)  # create network object and set cfg and net params
sim.net.createPops()  # instantiate network populations
sim.net.createCells()  # instantiate network cells based on defined populations
sim.net.connectCells()  # create connections between cells based on params
sim.net.addStims()  # add network stimulation
sim.setupRecording()  # setup variables to record (spikes, V traces, etc)
# sim.runSim()                  # run parallel Neuron simulation
# sim.gatherData()              # gather spiking data and cell info from each node
sim.saveData()  # save params, cell info and sim output to file
#sim.analysis.plotData()       # plot spike raster etc

# connDict = {}

# for pop in sim.net.pops.keys():

#     print("Population: ", pop)
#     connDict[pop] = {}

#     popGids = sim.net.pops[pop].cellGids

#     for cellGid in popGids:

#         cell = sim.net.cells[cellGid]
#         allConns = cell.conns
Exemplo n.º 11
0
# create network object and set cfg and net params	
sim.initialize(simConfig = cfg, netParams = netParams)

# instantiate network populations 
sim.net.createPops()

# instantiate network cells based on defined populations
sim.net.createCells()

# create connections between cells based on params
sim.net.connectCells()

# add network stimulation
sim.net.addStims()

# setup variables to record for each cell (spikes, V traces, etc)								
sim.setupRecording()

# run parallel Neuron simulation 
sim.runSim()

# gather spiking data and cell info from each node
sim.gatherData()

# save params, cell info and sim output to file (pickle,mat,txt,etc)
sim.saveData()

# plot spike raster
sim.analysis.plotData()

Exemplo n.º 12
0
for x in range(0, len(sim.net.cells)):
    if 'inInh' in sim.net.cells[x].tags['pop']:
        sim.net.cells[x].params['interval'] = 1000 / frinh[i]
        i += 1
    if 'inL23exc' in sim.net.cells[x].tags['pop']:
        sim.net.cells[x].params['interval'] = 1000 / frl23[j]
        j += 1
    if 'inL5exc' in sim.net.cells[x].tags['pop']:
        sim.net.cells[x].params['interval'] = 1000 / frl5[k]
        k += 1
# %%
sim.setupRecording(
)  # setup variables to record for each cell (spikes, V traces, etc)
sim.runSim()  # run parallel Neuron simulation
sim.gatherData()  # gather spiking data and cell info from each node
sim.saveData(
)  # save params, cell info and sim output to file (pickle,mat,txt,etc)
sim.analysis.plotData()  # plot spike raster
#%% Save raster for each population
sim.analysis.plotRaster(include=[('L23exc'), ('inL23exc')],
                        spikeHist='overlay',
                        spikeHistBin=10,
                        saveData='raster_TMSall_single_L23.pkl')
sim.analysis.plotRaster(include=[('L5exc'), ('inL5exc')],
                        spikeHist='overlay',
                        spikeHistBin=10,
                        saveData='raster_TMSall_single_L5.pkl')
sim.analysis.plotRaster(include=[('Inh'), ('inInh')],
                        spikeHist='overlay',
                        spikeHistBin=10,
                        saveData='raster_TMSall_single_Inh.pkl')
#sim.analysis.plotTraces(include = [('L23exc'),('L5exc'),('Inh')],saveData = 'gsyn_TMS.pkl')
Exemplo n.º 13
0
def gatherDataFromFiles(gatherLFP=True,
                        saveFolder=None,
                        simLabel=None,
                        sim=None,
                        fileType='pkl',
                        saveMerged=False):
    """
    Function to gather data from multiple files (from distributed or interval saving)

    Parameters
    ----------
    gatherLFP : bool
        Whether or not to gather LFP data.
        **Default:** ``True`` gathers LFP data if available.
        **Options:** ``False`` does not gather LFP data.

    saveFolder : str
        Name of the directory where data files are located.
        **Default:** ``None`` attempts to auto-locate the data directory.

    """

    import os

    if not sim:
        from netpyne import sim

    if getattr(sim, 'rank', None) is None:
        sim.initialize()
    sim.timing('start', 'gatherTime')

    if sim.rank == 0:

        fileType = fileType.lower()
        if fileType not in ['pkl', 'json']:
            print(
                f"Could not gather data from '.{fileType}' files. Only .pkl and .json are supported so far."
            )
            return False

        if not simLabel:
            simLabel = sim.cfg.simLabel

        if not saveFolder:
            saveFolder = sim.cfg.saveFolder

        nodeDataDir = os.path.join(saveFolder, simLabel + '_node_data')

        print(f"\nSearching for .{fileType} node files in {nodeDataDir} ...")

        simLabels = [
            f.replace(f'_node_0.{fileType}', '')
            for f in os.listdir(nodeDataDir)
            if f.endswith(f'_node_0.{fileType}')
        ]

        if len(simLabels) == 0:
            print(f"Could not gather data from files. No node files found.")
            return False

        mergedFiles = []
        for simLabel in simLabels:

            allSimData = Dict()
            allCells = []
            allPops = ODict()

            print('\nGathering data from files for simulation: %s ...' %
                  (simLabel))

            simDataVecs = ['spkt', 'spkid', 'stims'] + list(
                sim.cfg.recordTraces.keys())
            singleNodeVecs = ['t']

            if sim.cfg.recordDipolesHNN:
                _aggregateDipoles()
                simDataVecs.append('dipole')

            fileData = {'simData': sim.simData}
            fileList = sorted([
                f for f in os.listdir(nodeDataDir)
                if (f.startswith(simLabel +
                                 '_node') and f.endswith(f'.{fileType}'))
            ])

            for ifile, file in enumerate(fileList):

                print('  Merging data file: %s' % (file))

                with open(os.path.join(nodeDataDir, file), 'rb') as openFile:
                    if fileType == 'pkl':
                        data = pickle.load(openFile)
                    elif fileType == 'json':
                        import json
                        data = json.load(openFile)

                    if 'cells' in data.keys():
                        if fileType == 'pkl':
                            allCells.extend([
                                cell.__getstate__() for cell in data['cells']
                            ])
                        else:
                            allCells.extend(data['cells'])

                    if 'pops' in data.keys():
                        loadedPops = data['pops']
                        if fileType == 'pkl':
                            for popLabel, pop in loadedPops.items():
                                allPops[popLabel] = pop['tags']
                        elif fileType == 'json':
                            # if populations order is not preserved (which is inherently the case for JSON), need to sort them again
                            loadedPops = list(loadedPops.items())

                            def sort(popKeyAndValue):
                                # the assumption while sorting is that populations order corresponds to cell gids in this population
                                cellGids = popKeyAndValue[1]['cellGids']
                                if len(cellGids) > 0:
                                    return cellGids[0]
                                else:
                                    return -1

                            loadedPops.sort(key=sort)

                            for popLabel, pop in loadedPops:
                                allPops[popLabel] = pop['tags']

                    if 'simConfig' in data.keys():
                        setup.setSimCfg(data['simConfig'])
                    if 'net' in data and gatherLFP:
                        if 'recXElectrode' in data['net']:
                            xElectrode = data['net']['recXElectrode']
                            if False == isinstance(xElectrode, RecXElectrode):
                                xElectrode = RecXElectrode.fromJSON(xElectrode)
                            sim.net.recXElectrode = xElectrode

                    nodePopsCellGids = {
                        popLabel: list(pop['cellGids'])
                        for popLabel, pop in data['pops'].items()
                    }

                    if ifile == 0 and gatherLFP and 'LFP' in data['simData']:
                        lfpData = data['simData']['LFP']
                        if False == isinstance(lfpData, np.ndarray):
                            lfpData = np.array(lfpData)
                            data['simData']['LFP'] = lfpData

                        allSimData['LFP'] = np.zeros(lfpData.shape)
                        if 'LFPPops' in data['simData']:
                            allSimData['LFPPops'] = {
                                p: np.zeros(lfpData.shape)
                                for p in data['simData']['LFPPops'].keys()
                            }

                    for key, value in data['simData'].items():

                        if key in simDataVecs:

                            if isinstance(value, dict):
                                for key2, value2 in value.items():
                                    if isinstance(value2, dict):
                                        allSimData[key].update(
                                            Dict({key2: Dict()}))
                                        for stim, value3 in value2.items():
                                            allSimData[key][key2].update(
                                                {stim: list(value3)})
                                    elif key == 'dipole':
                                        allSimData[key][key2] = np.add(
                                            allSimData[key][key2],
                                            value2.as_numpy())
                                    else:
                                        allSimData[key].update(
                                            {key2: list(value2)})
                            else:
                                allSimData[key] = list(
                                    allSimData[key]) + list(value)

                        elif gatherLFP and key == 'LFP':
                            allSimData['LFP'] += np.array(value)

                        elif gatherLFP and key == 'LFPPops':
                            for p in value:
                                allSimData['LFPPops'][p] += np.array(value[p])

                        elif key == 'dipoleSum':
                            if key not in allSimData.keys():
                                allSimData[key] = value
                            else:
                                allSimData[key] += value

                        elif key not in singleNodeVecs:
                            allSimData[key].update(value)

                    if file == fileList[0]:
                        for key in singleNodeVecs:
                            allSimData[key] = list(fileData['simData'][key])
                        allPopsCellGids = {
                            popLabel: []
                            for popLabel in nodePopsCellGids
                        }
                    else:
                        for popLabel, popCellGids in nodePopsCellGids.items():
                            allPopsCellGids[popLabel].extend(popCellGids)

                    mergedFiles.append(file)

            if len(allSimData['spkt']) > 0:
                allSimData['spkt'], allSimData['spkid'] = zip(
                    *sorted(zip(allSimData['spkt'], allSimData['spkid'])))
                allSimData['spkt'], allSimData['spkid'] = list(
                    allSimData['spkt']), list(allSimData['spkid'])

            sim.allSimData = allSimData
            sim.net.allCells = sorted(allCells, key=lambda k: k['gid'])
            for popLabel, pop in allPops.items():
                pop['cellGids'] = sorted(allPopsCellGids[popLabel])
            sim.net.allPops = allPops

    ## Print statistics
    sim.pc.barrier()
    if sim.rank != 0:
        sim.pc.barrier()
    else:
        sim.timing('stop', 'gatherTime')
        if sim.cfg.timing:
            print(('  Done; gather time = %0.2f s.' %
                   sim.timingData['gatherTime']))

        if saveMerged:
            print('\nSaving merged data into single file ...')
            saved = sim.saveData()

            if len(saved) > 0:
                # if single file saved successfully, clean up node data
                for file in mergedFiles:
                    path = os.path.join(nodeDataDir, file)
                    os.remove(path)

        print('\nAnalyzing...')

        sim.totalSpikes = len(sim.allSimData['spkt'])
        sim.totalSynapses = sum(
            [len(cell['conns']) for cell in sim.net.allCells])
        if sim.cfg.createPyStruct:
            if sim.cfg.compactConnFormat:
                preGidIndex = sim.cfg.compactConnFormat.index(
                    'preGid') if 'preGid' in sim.cfg.compactConnFormat else 0
                sim.totalConnections = sum([
                    len(set([conn[preGidIndex] for conn in cell['conns']]))
                    for cell in sim.net.allCells
                ])
            else:
                sim.totalConnections = sum([
                    len(set([conn['preGid'] for conn in cell['conns']]))
                    for cell in sim.net.allCells
                ])
        else:
            sim.totalConnections = sim.totalSynapses
        sim.numCells = len(sim.net.allCells)

        if sim.totalSpikes > 0:
            sim.firingRate = float(
                sim.totalSpikes) / sim.numCells / sim.cfg.duration * 1e3
        else:
            sim.firingRate = 0
        if sim.numCells > 0:
            sim.connsPerCell = sim.totalConnections / float(sim.numCells)
            sim.synsPerCell = sim.totalSynapses / float(sim.numCells)
        else:
            sim.connsPerCell = 0
            sim.synsPerCell = 0

        print(('  Cells: %i' % (sim.numCells)))
        print(('  Connections: %i (%0.2f per cell)' %
               (sim.totalConnections, sim.connsPerCell)))
        if sim.totalSynapses != sim.totalConnections:
            print(('  Synaptic contacts: %i (%0.2f per cell)' %
                   (sim.totalSynapses, sim.synsPerCell)))
        print(('  Spikes: %i (%0.2f Hz)' % (sim.totalSpikes, sim.firingRate)))

        if 'runTime' in sim.timingData:
            print(('  Simulated time: %0.1f s; %i workers' %
                   (sim.cfg.duration / 1e3, sim.nhosts)))
            print(('  Run time: %0.2f s' % (sim.timingData['runTime'])))

            if sim.cfg.printPopAvgRates and not sim.cfg.gatherOnlySimData:
                trange = sim.cfg.printPopAvgRates if isinstance(
                    sim.cfg.printPopAvgRates, list) else None
                sim.allSimData['popRates'] = sim.analysis.popAvgRates(
                    tranges=trange)

            if 'plotfI' in sim.cfg.analysis:
                sim.analysis.calculatefI()

            sim.allSimData['avgRate'] = sim.firingRate
Exemplo n.º 14
0
    del sim.net.pops['external_virtual_100']
    # remove axon
    for k in sim.net.params.cellParams:
        try:
            del sim.net.params.cellParams[k]['secs']['axon_0']
            del sim.net.params.cellParams[k]['secs']['axon_1']
            for c in sim.net.cells:
                del c.secs['axon_0']
                del c.secs['axon_1']
        except:
            pass
    # remove conns
    for c in sim.net.cells:
        c.conns = []
    # save
    sim.saveData(filename='sonata_300cells')


# save json with psection
if saveJsonPsection:
    import json
    data = {}
    remove = ['cell', 'regions','species', 'point_processes', 'hoc_internal_name', 'name']#, 'morphology']
    removeMorph = ['parent', 'trueparent']
    for icell, c in enumerate(sim.net.cells):
        try:
            data[icell] = {}
            for isec, sec in enumerate(c.secs.values()):
                name = str(sec['hObj'].name()).split('.')[-1]
                data[icell][name] = sec['hObj'].psection()
                for x in remove:
Exemplo n.º 15
0
    def _prepare_simulation_files(self,
                                  experiment: model.Experiment = None,
                                  use_prev_inst: bool = False) -> str:
        """Prepares template files and netpyne model files for a single simulation """
        exp = copy.deepcopy(experiment)
        # Remove parameter & trials for single run
        exp.params = []
        exp.trials = []

        save_folder_path = os.path.join(constants.NETPYNE_WORKDIR_PATH,
                                        constants.EXPERIMENTS_FOLDER, exp.name)
        try:
            os.makedirs(save_folder_path)
        except OSError:
            raise

        if use_prev_inst:
            sim.cfg.saveJson = True
            oldName = sim.cfg.filename
            sim.cfg.filename = constants.MODEL_OUTPUT_FILENAME

            # workaround for issue with empty LFP dict when calling saveData()
            if 'LFP' in sim.allSimData:
                del sim.allSimData['LFP']

            # TODO: store in experiments folder!
            sim.saveData()
            sim.cfg.filename = oldName
            template_name = constants.TEMPLATE_FILENAME_SINGLE_RUN_INSTANTIATED
        else:
            # Create netParams and SimConfig
            self.netParams.save(
                os.path.join(save_folder_path, experiments.NET_PARAMS_FILE))

            simCfg = copy.copy(self.simConfig)
            # filename and simLabel must be set to define the output filename
            simCfg.saveJson = True
            simCfg.filename = 'model_output'
            simCfg.simLabel = 'model_output'
            simCfg.saveDataInclude = [
                "simData", "simConfig", "netParams", "net"
            ]
            simCfg.save(
                os.path.join(save_folder_path, experiments.SIM_CONFIG_FILE))

            template_name = constants.TEMPLATE_FILENAME_SINGLE_RUN

        # Create Experiment Config
        config_dict = dataclasses.asdict(exp)
        config_dict["runCfg"] = dataclasses.asdict(self.run_config)
        experiment_config = os.path.join(save_folder_path,
                                         experiments.EXPERIMENT_FILE)
        json.dump(config_dict,
                  open(experiment_config, 'w'),
                  default=str,
                  sort_keys=True,
                  indent=4)

        # Copy Template
        template = os.path.join(os.path.dirname(__file__), "templates",
                                template_name)
        copyfile(
            template,
            os.path.join(save_folder_path, constants.SIMULATION_SCRIPT_NAME))

        return save_folder_path
Exemplo n.º 16
0
def generate_and_run(simulation,
                     simulator,
                     network=None,
                     return_results=False,
                     base_dir=None,
                     target_dir=None,
                     num_processors=1):
    """
    Generates the network in the specified simulator and runs, if appropriate
    """

    if network == None:
        network = load_network_json(simulation.network)

    print_v("Generating network %s and running in simulator: %s..." %
            (network.id, simulator))

    if simulator == 'NEURON':

        _generate_neuron_files_from_neuroml(network,
                                            dir_for_mod_files=target_dir)

        from neuromllite.NeuronHandler import NeuronHandler

        nrn_handler = NeuronHandler()

        for c in network.cells:
            if c.neuroml2_source_file:
                src_dir = os.path.dirname(
                    os.path.abspath(c.neuroml2_source_file))
                nrn_handler.executeHoc('load_file("%s/%s.hoc")' %
                                       (src_dir, c.id))

        generate_network(network, nrn_handler, generate_network, base_dir)
        if return_results:
            raise NotImplementedError(
                "Reloading results not supported in Neuron yet...")

    elif simulator.lower() == 'sonata':  # Will not "run" obviously...

        from neuromllite.SonataHandler import SonataHandler

        sonata_handler = SonataHandler()

        generate_network(network,
                         sonata_handler,
                         always_include_props=True,
                         base_dir=base_dir)

        print_v("Done with Sonata...")

    elif simulator.lower().startswith('graph'):  # Will not "run" obviously...

        from neuromllite.GraphVizHandler import GraphVizHandler, engines

        try:
            if simulator[-1].isalpha():
                engine = engines[simulator[-1]]
                level = int(simulator[5:-1])
            else:
                engine = 'dot'
                level = int(simulator[5:])

        except Exception as e:
            print_v("Error parsing: %s: %s" % (simulator, e))
            print_v(
                "Graphs of the network structure can be generated at many levels of detail (1-6, required) and laid out using GraphViz engines (d - dot (default); c - circo; n - neato; f - fdp), so use: -graph3c, -graph2, -graph4f etc."
            )
            return

        handler = GraphVizHandler(level, engine=engine, nl_network=network)

        generate_network(network,
                         handler,
                         always_include_props=True,
                         base_dir=base_dir)

        print_v("Done with GraphViz...")

    elif simulator.lower().startswith('matrix'):  # Will not "run" obviously...

        from neuromllite.MatrixHandler import MatrixHandler

        try:
            level = int(simulator[6:])
        except:
            print_v("Error parsing: %s" % simulator)
            print_v(
                "Matrices of the network structure can be generated at many levels of detail (1-n, required), so use: -matrix1, -matrix2, etc."
            )
            return

        handler = MatrixHandler(level, nl_network=network)

        generate_network(network,
                         handler,
                         always_include_props=True,
                         base_dir=base_dir)

        print_v("Done with MatrixHandler...")

    elif simulator.startswith('PyNN'):

        #_generate_neuron_files_from_neuroml(network)
        simulator_name = simulator.split('_')[1].lower()

        from neuromllite.PyNNHandler import PyNNHandler

        pynn_handler = PyNNHandler(simulator_name, simulation.dt, network.id)

        syn_cell_params = {}
        for proj in network.projections:

            synapse = network.get_child(proj.synapse, 'synapses')
            post_pop = network.get_child(proj.postsynaptic, 'populations')

            if not post_pop.component in syn_cell_params:
                syn_cell_params[post_pop.component] = {}
            for p in synapse.parameters:
                post = ''
                if synapse.pynn_receptor_type == "excitatory":
                    post = '_E'
                elif synapse.pynn_receptor_type == "inhibitory":
                    post = '_I'
                syn_cell_params[post_pop.component][
                    '%s%s' % (p, post)] = synapse.parameters[p]

        cells = {}
        for c in network.cells:
            if c.pynn_cell:
                cell_params = {}
                if c.parameters:
                    for p in c.parameters:
                        cell_params[p] = evaluate(c.parameters[p],
                                                  network.parameters)

                dont_set_here = [
                    'tau_syn_E', 'e_rev_E', 'tau_syn_I', 'e_rev_I'
                ]
                for d in dont_set_here:
                    if d in c.parameters:
                        raise Exception(
                            'Synaptic parameters like %s should be set ' +
                            'in individual synapses, not in the list of parameters associated with the cell'
                            % d)
                if c.id in syn_cell_params:
                    cell_params.update(syn_cell_params[c.id])
                print_v("Creating cell with params: %s" % cell_params)
                exec('cells["%s"] = pynn_handler.sim.%s(**cell_params)' %
                     (c.id, c.pynn_cell))

                if c.pynn_cell != 'SpikeSourcePoisson':
                    exec(
                        "cells['%s'].default_initial_values['v'] = cells['%s'].parameter_space['v_rest'].base_value"
                        % (c.id, c.id))

        pynn_handler.set_cells(cells)

        receptor_types = {}
        for s in network.synapses:
            if s.pynn_receptor_type:
                receptor_types[s.id] = s.pynn_receptor_type

        pynn_handler.set_receptor_types(receptor_types)

        for input_source in network.input_sources:
            if input_source.pynn_input:
                pynn_handler.add_input_source(input_source)

        generate_network(network,
                         pynn_handler,
                         always_include_props=True,
                         base_dir=base_dir)

        for pid in pynn_handler.populations:
            pop = pynn_handler.populations[pid]
            if 'all' in simulation.recordTraces or pop.label in simulation.recordTraces:
                if pop.can_record('v'):
                    pop.record('v')

        pynn_handler.sim.run(simulation.duration)
        pynn_handler.sim.end()

        traces = {}
        events = {}

        if not 'NeuroML' in simulator:
            from neo.io import PyNNTextIO

            for pid in pynn_handler.populations:
                pop = pynn_handler.populations[pid]

                if 'all' in simulation.recordTraces or pop.label in simulation.recordTraces:

                    filename = "%s.%s.v.dat" % (simulation.id, pop.label)
                    all_columns = []
                    print_v("Writing data for %s to %s" %
                            (pop.label, filename))
                    for i in range(len(pop)):
                        if pop.can_record('v'):
                            ref = '%s[%i]' % (pop.label, i)
                            traces[ref] = []
                            data = pop.get_data('v', gather=False)
                            for segment in data.segments:
                                vm = segment.analogsignals[0].transpose()[i]

                                if len(all_columns) == 0:
                                    tt = np.array([
                                        t * simulation.dt / 1000.
                                        for t in range(len(vm))
                                    ])
                                    all_columns.append(tt)
                                vm_si = [float(v / 1000.) for v in vm]
                                traces[ref] = vm_si
                                all_columns.append(vm_si)

                            times_vm = np.array(all_columns).transpose()

                    np.savetxt(filename, times_vm, delimiter='\t', fmt='%s')

        if return_results:
            _print_result_info(traces, events)
            return traces, events

    elif simulator == 'NetPyNE':

        if target_dir == None:
            target_dir = './'

        _generate_neuron_files_from_neuroml(network,
                                            dir_for_mod_files=target_dir)

        from netpyne import specs
        from netpyne import sim
        # Note NetPyNE from this branch is required: https://github.com/Neurosim-lab/netpyne/tree/neuroml_updates
        from netpyne.conversion.neuromlFormat import NetPyNEBuilder

        import pprint
        pp = pprint.PrettyPrinter(depth=6)

        netParams = specs.NetParams()
        simConfig = specs.SimConfig()
        netpyne_handler = NetPyNEBuilder(netParams,
                                         simConfig=simConfig,
                                         verbose=True)

        generate_network(network, netpyne_handler, base_dir=base_dir)

        netpyne_handler.finalise()

        simConfig = specs.SimConfig()
        simConfig.tstop = simulation.duration
        simConfig.duration = simulation.duration
        simConfig.dt = simulation.dt
        simConfig.seed = simulation.seed
        simConfig.recordStep = simulation.dt

        simConfig.recordCells = ['all']
        simConfig.recordTraces = {}

        for pop in netpyne_handler.popParams.values():
            if 'all' in simulation.recordTraces or pop.id in simulation.recordTraces:
                for i in pop['cellsList']:
                    id = pop['pop']
                    index = i['cellLabel']
                    simConfig.recordTraces['v_%s_%s' % (id, index)] = {
                        'sec': 'soma',
                        'loc': 0.5,
                        'var': 'v',
                        'conds': {
                            'pop': id,
                            'cellLabel': index
                        }
                    }

        simConfig.saveDat = True

        print_v("NetPyNE netParams: ")
        pp.pprint(netParams.todict())
        #print_v("NetPyNE simConfig: ")
        #pp.pprint(simConfig.todict())

        sim.initialize(
            netParams,
            simConfig)  # create network object and set cfg and net params

        sim.net.createPops()
        cells = sim.net.createCells(
        )  # instantiate network cells based on defined populations

        for proj_id in netpyne_handler.projection_infos.keys():
            projName, prePop, postPop, synapse, ptype = netpyne_handler.projection_infos[
                proj_id]
            print_v("Creating connections for %s (%s): %s->%s via %s" %
                    (projName, ptype, prePop, postPop, synapse))

            preComp = netpyne_handler.pop_ids_vs_components[prePop]

            for conn in netpyne_handler.connections[projName]:

                pre_id, pre_seg, pre_fract, post_id, post_seg, post_fract, delay, weight = conn

                #connParam = {'delay':delay,'weight':weight,'synsPerConn':1, 'sec':post_seg, 'loc':post_fract, 'threshold':threshold}
                connParam = {
                    'delay': delay,
                    'weight': weight,
                    'synsPerConn': 1,
                    'sec': post_seg,
                    'loc': post_fract
                }

                if ptype == 'electricalProjection':

                    if weight != 1:
                        raise Exception(
                            'Cannot yet support inputs where weight !=1!')
                    connParam = {
                        'synsPerConn': 1,
                        'sec': post_seg,
                        'loc': post_fract,
                        'gapJunction': True,
                        'weight': weight
                    }
                else:
                    connParam = {
                        'delay': delay,
                        'weight': weight,
                        'synsPerConn': 1,
                        'sec': post_seg,
                        'loc': post_fract
                    }
                    #'threshold': threshold}

                connParam['synMech'] = synapse

                if post_id in sim.net.gid2lid:  # check if postsyn is in this node's list of gids
                    sim.net._addCellConn(connParam, pre_id, post_id)

        stims = sim.net.addStims(
        )  # add external stimulation to cells (IClamps etc)
        simData = sim.setupRecording(
        )  # setup variables to record for each cell (spikes, V traces, etc)
        sim.runSim()  # run parallel Neuron simulation
        sim.gatherData()  # gather spiking data and cell info from each node
        sim.saveData(
        )  # save params, cell info and sim output to file (pickle,mat,txt,etc)

        if return_results:
            raise NotImplementedError(
                "Reloading results not supported in NetPyNE yet...")

    elif simulator == 'jNeuroML' or simulator == 'jNeuroML_NEURON' or simulator == 'jNeuroML_NetPyNE':

        from pyneuroml.lems import generate_lems_file_for_neuroml
        from pyneuroml import pynml

        lems_file_name = 'LEMS_%s.xml' % simulation.id

        nml_file_name, nml_doc = generate_neuroml2_from_network(
            network, base_dir=base_dir, target_dir=target_dir)
        included_files = ['PyNN.xml']

        for c in network.cells:
            if c.lems_source_file:
                included_files.append(c.lems_source_file)
        '''
        if network.cells:
            for c in network.cells:
                included_files.append(c.neuroml2_source_file)
        '''
        if network.synapses:
            for s in network.synapses:
                if s.lems_source_file:
                    included_files.append(s.lems_source_file)

        print_v("Generating LEMS file prior to running in %s" % simulator)

        pops_plot_save = []
        pops_spike_save = []
        gen_plots_for_quantities = {}
        gen_saves_for_quantities = {}

        for p in network.populations:

            if simulation.recordTraces and ('all' in simulation.recordTraces or
                                            p.id in simulation.recordTraces):
                pops_plot_save.append(p.id)

            if simulation.recordSpikes and ('all' in simulation.recordSpikes or
                                            p.id in simulation.recordSpikes):
                pops_spike_save.append(p.id)

            if simulation.recordRates and ('all' in simulation.recordRates
                                           or p.id in simulation.recordRates):
                size = evaluate(p.size, network.parameters)
                for i in range(size):
                    quantity = '%s/%i/%s/r' % (p.id, i, p.component)
                    gen_plots_for_quantities['%s_%i_r' %
                                             (p.id, i)] = [quantity]
                    gen_saves_for_quantities['%s_%i.r.dat' %
                                             (p.id, i)] = [quantity]

            if simulation.recordVariables:
                for var in simulation.recordVariables:
                    to_rec = simulation.recordVariables[var]
                    if ('all' in to_rec or p.id in to_rec):
                        size = evaluate(p.size, network.parameters)
                        for i in range(size):
                            quantity = '%s/%i/%s/%s' % (p.id, i, p.component,
                                                        var)
                            gen_plots_for_quantities['%s_%i_%s' %
                                                     (p.id, i, var)] = [
                                                         quantity
                                                     ]
                            gen_saves_for_quantities['%s_%i.%s.dat' %
                                                     (p.id, i, var)] = [
                                                         quantity
                                                     ]

        generate_lems_file_for_neuroml(
            simulation.id,
            nml_file_name,
            network.id,
            simulation.duration,
            simulation.dt,
            lems_file_name,
            target_dir=target_dir if target_dir else '.',
            nml_doc=
            nml_doc,  # Use this if the nml doc has already been loaded (to avoid delay in reload)
            include_extra_files=included_files,
            gen_plots_for_all_v=False,
            plot_all_segments=False,
            gen_plots_for_quantities=
            gen_plots_for_quantities,  # Dict with displays vs lists of quantity paths
            gen_plots_for_only_populations=
            pops_plot_save,  # List of populations, all pops if = []
            gen_saves_for_all_v=False,
            save_all_segments=False,
            gen_saves_for_only_populations=
            pops_plot_save,  # List of populations, all pops if = []
            gen_saves_for_quantities=
            gen_saves_for_quantities,  # Dict with file names vs lists of quantity paths
            gen_spike_saves_for_all_somas=False,
            gen_spike_saves_for_only_populations=
            pops_spike_save,  # List of populations, all pops if = []
            gen_spike_saves_for_cells=
            {},  # Dict with file names vs lists of quantity paths
            spike_time_format='ID_TIME',
            copy_neuroml=True,
            lems_file_generate_seed=12345,
            report_file_name='report.%s.txt' % simulation.id,
            simulation_seed=simulation.seed if simulation.seed else 12345,
            verbose=True)

        lems_file_name = _locate_file(lems_file_name, target_dir)

        if simulator == 'jNeuroML':
            results = pynml.run_lems_with_jneuroml(
                lems_file_name,
                nogui=True,
                load_saved_data=return_results,
                reload_events=return_results)
        elif simulator == 'jNeuroML_NEURON':
            results = pynml.run_lems_with_jneuroml_neuron(
                lems_file_name,
                nogui=True,
                load_saved_data=return_results,
                reload_events=return_results)
        elif simulator == 'jNeuroML_NetPyNE':
            results = pynml.run_lems_with_jneuroml_netpyne(
                lems_file_name,
                nogui=True,
                verbose=True,
                load_saved_data=return_results,
                reload_events=return_results,
                num_processors=num_processors)

        print_v("Finished running LEMS file %s in %s (returning results: %s)" %
                (lems_file_name, simulator, return_results))

        if return_results:
            traces, events = results
            _print_result_info(traces, events)
            return results  # traces, events =
Exemplo n.º 17
0
###############################################################################
# EXECUTION CODE (via netpyne)
###############################################################################
from netpyne import sim

# Create network and run simulation
sim.initialize(                       # create network object and set cfg and net params
    simConfig = simConfig,   # pass simulation config and network params as arguments
    netParams = netParams)   
sim.net.createPops()                      # instantiate network populations
sim.net.createCells()                     # instantiate network cells based on defined populations
sim.net.connectCells()                    # create connections between cells based on params
sim.setupRecording()                  # setup variables to record for each cell (spikes, V traces, etc)
sim.runSim()                          # run parallel Neuron simulation  
sim.gatherData()                      # gather spiking data and cell info from each node
sim.saveData()                        # save params, cell info and sim output to file (pickle,mat,txt,etc)
sim.analysis.plotData()                   # plot spike raster


# ###############################################################################
# # INTERACTING WITH INSTANTIATED NETWORK
# ###############################################################################

# modify conn weights
sim.net.modifyConns({'conds': {'label': 'hop->hop'}, 'weight': 0.5})

sim.runSim()                          # run parallel Neuron simulation  
sim.gatherData()                      # gather spiking data and cell info from each node
sim.saveData()                        # save params, cell info and sim output to file (pickle,mat,txt,etc)
sim.analysis.plotData()                   # plot spike raster
Exemplo n.º 18
0
Example of saving different network components to file
"""

from netpyne import sim

from netParams import netParams
from cfg import cfg

sim.createSimulateAnalyze(netParams, cfg)

# Saving different network components to file
sim.cfg.saveJson = True

# save network params (rules)
sim.saveData(include=['netParams'], filename='out_netParams')

# save network instance
sim.saveData(include=['net'], filename='out_netInstance')

# save network params and instance together
sim.saveData(include=['netParams', 'net'],
             filename='out_netParams_netInstance')

# save sim config
sim.saveData(include=['simConfig'], filename='out_simConfig')

# save sim output data
sim.saveData(include=['simData'], filename='out_simData')

# save network instance with compact conn format (list instead of dict)
Exemplo n.º 19
0
    def _run_in_same_process(self):
        logging.debug('Running single core simulation')

        sim.setupRecording()
        sim.simulate()
        sim.saveData()