def _run_microcircuit(plot_filename, conf):
    import plotting
    import logging

    simulator = conf['simulator']
    # we here only need nest as simulator, simulator = 'nest'
    import pyNN.nest as sim

    # prepare simulation
    logging.basicConfig()

    # extract parameters from config file
    master_seed = conf['params_dict']['nest']['master_seed']
    layers = conf['layers']
    pops = conf['pops']
    plot_spiking_activity = conf['plot_spiking_activity']
    raster_t_min = conf['raster_t_min']
    raster_t_max = conf['raster_t_max']
    frac_to_plot = conf['frac_to_plot']
    record_corr = conf['params_dict']['nest']['record_corr']
    tau_max = conf['tau_max']

    # Numbers of neurons from which to record spikes
    n_rec = helper_functions.get_n_rec(conf)

    sim.setup(**conf['simulator_params'][simulator])

    if simulator == 'nest':
        n_vp = sim.nest.GetKernelStatus('total_num_virtual_procs')
        if sim.rank() == 0:
            print 'n_vp: ', n_vp
            print 'master_seed: ', master_seed
        sim.nest.SetKernelStatus({'print_time': False,
                                  'dict_miss_is_error': False,
                                  'grng_seed': master_seed,
                                  'rng_seeds': range(master_seed + 1,
                                                     master_seed + n_vp + 1),
                                  'data_path': conf['system_params'] \
                                                   ['output_path']})

    import network

    # result of export-files
    results = []

    # create network
    start_netw = time.time()
    n = network.Network(sim)

    # contains the GIDs of the spike detectors and voltmeters needed for
    # retrieving filenames later
    device_list = n.setup(sim, conf)

    end_netw = time.time()
    if sim.rank() == 0:
        print 'Creating the network took ', end_netw - start_netw, ' s'

    # simulate
    if sim.rank() == 0:
        print "Simulating..."
    start_sim = time.time()
    sim.run(conf['simulator_params'][simulator]['sim_duration'])
    end_sim = time.time()
    if sim.rank() == 0:
        print 'Simulation took ', end_sim - start_sim, ' s'

    # extract filename from device_list (spikedetector/voltmeter),
    # gid of neuron and thread. merge outputs from all threads
    # into a single file which is then added to the task output.
    for dev in device_list:
        label = sim.nest.GetStatus(dev)[0]['label']
        gid = sim.nest.GetStatus(dev)[0]['global_id']
        # use the file extension to distinguish between spike and voltage
        # output
        extension = sim.nest.GetStatus(dev)[0]['file_extension']
        if extension == 'gdf':  # spikes
            data = np.empty((0, 2))
        elif extension == 'dat':  # voltages
            data = np.empty((0, 3))
        for thread in xrange(conf['simulator_params']['nest']['threads']):
            filenames = glob.glob(conf['system_params']['output_path']
                                  + '%s-*%d-%d.%s' % (label, gid, thread, extension))
            assert(
                len(filenames) == 1), 'Multiple input files found. Use a clean output directory.'
            data = np.vstack([data, np.loadtxt(filenames[0])])
            # delete original files
            os.remove(filenames[0])
        order = np.argsort(data[:, 1])
        data = data[order]
        outputfile_name = 'collected_%s-%d.%s' % (label, gid, extension)
        outputfile = open(outputfile_name, 'w')
        # the outputfile should have same format as output from NEST.
        # i.e., [int, float] for spikes and [int, float, float] for voltages,
        # hence we write it line by line and assign the corresponding filetype
        if extension == 'gdf':  # spikes
            for line in data:
                outputfile.write('%d\t%.3f\n' % (line[0], line[1]))
            outputfile.close()
            filetype = 'application/vnd.juelich.nest.spike_times'

        elif extension == 'dat':  # voltages
            for line in data:
                outputfile.write(
                    '%d\t%.3f\t%.3f\n' % (line[0], line[1], line[2]))
            outputfile.close()
            filetype = 'application/vnd.juelich.nest.analogue_signal'

        res = (outputfile_name, filetype)
        results.append(res)

    if record_corr and simulator == 'nest':
        start_corr = time.time()
        if sim.nest.GetStatus(n.corr_detector, 'local')[0]:
            print 'getting count_covariance on rank ', sim.rank()
            cov_all = sim.nest.GetStatus(
                n.corr_detector, 'count_covariance')[0]
            delta_tau = sim.nest.GetStatus(n.corr_detector, 'delta_tau')[0]

            cov = {}
            for target_layer in np.sort(layers.keys()):
                for target_pop in pops:
                    target_index = conf['structure'][target_layer][target_pop]
                    cov[target_index] = {}
                    for source_layer in np.sort(layers.keys()):
                        for source_pop in pops:
                            source_index = conf['structure'][
                                source_layer][source_pop]
                            cov[target_index][source_index] = \
                                np.array(list(
                                    cov_all[target_index][source_index][::-1])
                                + list(cov_all[source_index][target_index][1:]))

            f = open(conf['system_params'][
                     'output_path'] + '/covariances.dat', 'w')
            print >>f, 'tau_max: ', tau_max
            print >>f, 'delta_tau: ', delta_tau
            print >>f, 'simtime: ', conf['simulator_params'][
                simulator]['sim_duration'], '\n'

            for target_layer in np.sort(layers.keys()):
                for target_pop in pops:
                    target_index = conf['structure'][target_layer][target_pop]
                    for source_layer in np.sort(layers.keys()):
                        for source_pop in pops:
                            source_index = conf['structure'][
                                source_layer][source_pop]
                            print >>f, target_layer, target_pop, '-', source_layer, source_pop
                            print >>f, 'n_events_target: ', sim.nest.GetStatus(
                                n.corr_detector, 'n_events')[0][target_index]
                            print >>f, 'n_events_source: ', sim.nest.GetStatus(
                                n.corr_detector, 'n_events')[0][source_index]
                            for i in xrange(len(cov[target_index][source_index])):
                                print >>f, cov[target_index][source_index][i]
                            print >>f, ''
            f.close()

            # add file covariances.dat into bundle
            res_cov = ('covariances.dat',
                       'text/plain')
            results.append(res_cov)

        end_corr = time.time()
        print "Writing covariances took ", end_corr - start_corr, " s"

    if plot_spiking_activity and sim.rank() == 0:
        plotting.plot_raster_bars(raster_t_min, raster_t_max, n_rec,
                                  frac_to_plot, n.pops,
                                  conf['system_params']['output_path'],
                                  plot_filename, conf)
        res_plot = (plot_filename, 'image/png')
        results.append(res_plot)

    sim.end()

    return results
Beispiel #2
0
        string1 = "\033[0;46m" + (message + ": ").ljust(30) + "\033[m"
        string2 = "\033[1;46m" + ("%5.2f" % (time.time() - currentTimer) + " seconds").rjust(30) + "\033[m"
        print(string1 + string2)
        currentTimer = time.time()


def printMessage(message):
    global rank
    if rank == 0:
        print("\033[2;46m" + (message).ljust(60) + "\033[m")


###################### MAIN BODY ###########################
## Rank for MPI ##
numberOfNodes = sim.num_processes()
rank = sim.rank()

# Log to stderr, only warnings, errors, critical
init_logging(None, num_processes=numberOfNodes, rank=rank, level=logging.WARNING)

## Start message ##
if rank == 0:
    print("\033[1;45m" + (("Lattice Simulation").rjust(38)).ljust(60) + "\033[m")
    print("\033[0;44m" + ("MPI_Rank: %d  " % rank + " MPI_Size: %d " % numberOfNodes).ljust(60) + "\033[m")


## Timer ##
currentTimer = time.time()
totalTimer = time.time()

## Default global parameters ##
Beispiel #3
0
        string1 = "\033[0;46m" + (message + ": ").ljust(30) + "\033[m"
        string2 = "\033[1;46m" + ("%5.2f" % (time.time() - currentTimer) + " seconds").rjust(30) + "\033[m"
        print(string1 + string2)
        currentTimer = time.time()

def printMessage(message):
    global rank
    if rank==0:
        print("\033[2;46m" + (message).ljust(60) + "\033[m")


###################### MAIN BODY ###########################
## Rank for MPI ##
global numberOfNodes, rank
numberOfNodes = sim.num_processes()
rank = sim.rank()

# Log to stderr, only warnings, errors, critical
init_logging('sim.log',num_processes=numberOfNodes,rank=rank,level=logging.DEBUG)

## Start message ##
if rank==0:
    print("\033[1;45m" + (("Lattice Simulation").rjust(38)).ljust(60) + "\033[m")
    print("\033[0;44m" + ("MPI_Rank: %d  " % rank + " MPI_Size: %d " % numberOfNodes).ljust(60) + "\033[m")


## Timer ##
global currentTimer, totalTimer
currentTimer = time.time()
totalTimer = time.time()
Beispiel #4
0
def _run_microcircuit(plot_filename, conf):
    import plotting
    import logging

    simulator = conf['simulator']
    # we here only need nest as simulator, simulator = 'nest'
    import pyNN.nest as sim

    # prepare simulation
    logging.basicConfig()

    # extract parameters from config file
    master_seed = conf['params_dict']['nest']['master_seed']
    layers = conf['layers']
    pops = conf['pops']
    plot_spiking_activity = conf['plot_spiking_activity']
    raster_t_min = conf['raster_t_min']
    raster_t_max = conf['raster_t_max']
    frac_to_plot = conf['frac_to_plot']
    record_corr = conf['params_dict']['nest']['record_corr']
    tau_max = conf['tau_max']

    # Numbers of neurons from which to record spikes
    n_rec = helper_functions.get_n_rec(conf)

    sim.setup(**conf['simulator_params'][simulator])

    if simulator == 'nest':
        n_vp = sim.nest.GetKernelStatus('total_num_virtual_procs')
        if sim.rank() == 0:
            print 'n_vp: ', n_vp
            print 'master_seed: ', master_seed
        sim.nest.SetKernelStatus({'print_time': False,
                                  'dict_miss_is_error': False,
                                  'grng_seed': master_seed,
                                  'rng_seeds': range(master_seed + 1,
                                                     master_seed + n_vp + 1),
                                  # PYTHON2.6: FOR WRITING OUTPUT FROM
                                  # RECORDING DEVICES WITH PYNEST FUNCTIONS,
                                  # THE OUTPUT PATH IS NOT AUTOMATICALLY THE
                                  # CWD BUT HAS TO BE SET MANUALLY
                                  'data_path': conf['system_params']['output_path']})

    import network

    # result of export-files
    results = []

    # create network
    start_netw = time.time()
    n = network.Network(sim)

    # PYTHON2.6: device_list CONTAINS THE GIDs OF THE SPIKE DETECTORS AND VOLTMETERS
    # NEEDED FOR RETRIEVING FILENAMES LATER
    device_list = n.setup(sim, conf)

    end_netw = time.time()
    if sim.rank() == 0:
        print 'Creating the network took ', end_netw - start_netw, ' s'

    # simulate
    if sim.rank() == 0:
        print "Simulating..."
    start_sim = time.time()
    sim.run(conf['simulator_params'][simulator]['sim_duration'])
    end_sim = time.time()
    if sim.rank() == 0:
        print 'Simulation took ', end_sim - start_sim, ' s'

    # extract filename from device_list (spikedetector/voltmeter),
    # gid of neuron and thread. merge outputs from all threads
    # into a single file which is then added to the task output.
    # PYTHON2.6: NEEDS TO BE ADAPTED IF NOT RECORDED VIA PYNEST
    for dev in device_list:
        label = sim.nest.GetStatus(dev)[0]['label']
        gid = sim.nest.GetStatus(dev)[0]['global_id']
        # use the file extension to distinguish between spike and voltage output
        extension = sim.nest.GetStatus(dev)[0]['file_extension']
        if extension == 'gdf':  # spikes
            data = np.empty((0, 2))
        elif extension == 'dat':  # voltages
            data = np.empty((0, 3))
        for thread in xrange(conf['simulator_params']['nest']['threads']):
            filenames = glob.glob(conf['system_params']['output_path']
                                  + '%s-*%d-%d.%s' % (label, gid, thread, extension))
            assert(len(filenames) == 1), 'Multiple input files found. Use a clean output directory.'
            data = np.vstack([data, np.loadtxt(filenames[0])])
            # delete original files
            os.remove(filenames[0])
        order = np.argsort(data[:, 1])
        data = data[order]
        outputfile_name = 'collected_%s-%d.%s' % (label, gid, extension)
        outputfile = open(outputfile_name, 'w')
        # the outputfile should have same format as output from NEST.
        # i.e., [int, float] for spikes and [int, float, float] for voltages,
        # hence we write it line by line and assign the corresponding filetype
        if extension == 'gdf':  # spikes
            for line in data:
                outputfile.write('%d\t%.3f\n' % (line[0], line[1]))
            outputfile.close()
            filetype = 'application/vnd.juelich.nest.spike_times'

        elif extension == 'dat':  # voltages
            for line in data:
                outputfile.write('%d\t%.3f\t%.3f\n' % (line[0], line[1], line[2]))
            outputfile.close()
            filetype = 'application/vnd.juelich.nest.analogue_signal'

        res = (outputfile_name, filetype)
        results.append(res)

    # start_writing = time.time()

    # PYTHON2.6: SPIKE AND VOLTAGE FILES ARE CURRENTLY WRITTEN WHEN A SPIKE
    # DETECTOR OR A VOLTMETER IS CONNECTED WITH 'to_file': True

    # for layer in layers:
    #     for pop in pops:
    #         # filename = conf['system_params']['output_path'] + '/spikes_' + layer + pop + '.dat'
    #         filename = conf['system_params']['output_path'] + 'spikes_' + layer + pop + '.dat'
    #         n.pops[layer][pop].printSpikes(filename, gather=False)

    #         # add filename and filepath into results
    #         subres = (filename, 'application/vnd.juelich.bundle.nest.data')
    #         results.append(subres)

    # if record_v:
    #     for layer in layers:
    #         for pop in pops:
    #             filename = conf['system_params']['output_path'] + '/voltages_' + layer + pop + '.dat'
    #             n.pops[layer][pop].print_v(filename, gather=False)

    if record_corr and simulator == 'nest':
        start_corr = time.time()
        if sim.nest.GetStatus(n.corr_detector, 'local')[0]:
            print 'getting count_covariance on rank ', sim.rank()
            cov_all = sim.nest.GetStatus(n.corr_detector, 'count_covariance')[0]
            delta_tau = sim.nest.GetStatus(n.corr_detector, 'delta_tau')[0]

            cov = {}
            for target_layer in np.sort(layers.keys()):
                for target_pop in pops:
                    target_index = conf['structure'][target_layer][target_pop]
                    cov[target_index] = {}
                    for source_layer in np.sort(layers.keys()):
                        for source_pop in pops:
                            source_index = conf['structure'][source_layer][source_pop]
                            cov[target_index][source_index] = np.array(list(cov_all[target_index][source_index][::-1])
                                                                       + list(cov_all[source_index][target_index][1:]))

            f = open(conf['system_params']['output_path'] + '/covariances.dat', 'w')
            print >>f, 'tau_max: ', tau_max
            print >>f, 'delta_tau: ', delta_tau
            print >>f, 'simtime: ', conf['simulator_params'][simulator]['sim_duration'], '\n'

            for target_layer in np.sort(layers.keys()):
                for target_pop in pops:
                    target_index = conf['structure'][target_layer][target_pop]
                    for source_layer in np.sort(layers.keys()):
                        for source_pop in pops:
                            source_index = conf['structure'][source_layer][source_pop]
                            print >>f, target_layer, target_pop, '-', source_layer, source_pop
                            print >>f, 'n_events_target: ', sim.nest.GetStatus(n.corr_detector, 'n_events')[0][target_index]
                            print >>f, 'n_events_source: ', sim.nest.GetStatus(n.corr_detector, 'n_events')[0][source_index]
                            for i in xrange(len(cov[target_index][source_index])):
                                print >>f, cov[target_index][source_index][i]
                            print >>f, ''
            f.close()

            # add file covariances.dat into bundle
            res_cov = ('covariances.dat',
                       'text/plain')
            results.append(res_cov)

        end_corr = time.time()
        print "Writing covariances took ", end_corr - start_corr, " s"

    # end_writing = time.time()
    # print "Writing data took ", end_writing - start_writing, " s"

    if plot_spiking_activity and sim.rank() == 0:
        plotting.plot_raster_bars(raster_t_min, raster_t_max, n_rec,
                                  frac_to_plot, n.pops,
                                  conf['system_params']['output_path'],
                                  plot_filename, conf)
        res_plot = (plot_filename, 'image/png')
        results.append(res_plot)

    sim.end()

    return results