def test_script(self):
        """
        test that tests the printing of v from a pre determined recording
        :return:
        """
        synfire_run.do_run(n_neurons, max_delay=14, time_step=1,
                           neurons_per_core=1, delay=1.7, run_times=[50],
                           spike_path=current_spike_file_path,
                           gsyn_path_exc=current_gsyn_file_path,
                           v_path=current_v_file_path, end_before_print=False)

        spikes_read = synfire_run.get_output_pop_spikes_neo()
        v_read = synfire_run.get_output_pop_voltage_neo()
        gsyn_read = synfire_run.get_output_pop_gsyn_exc_neo()

        io = PickleIO(filename=current_spike_file_path)
        spikes_saved = io.read()[0]
        io = PickleIO(filename=current_v_file_path)
        v_saved = io.read()[0]
        io = PickleIO(filename=current_gsyn_file_path)
        gsyn_saved = io.read()[0]

        neo_compare.compare_blocks(spikes_read, spikes_saved)
        neo_compare.compare_blocks(v_read, v_saved)
        neo_compare.compare_blocks(gsyn_read, gsyn_saved)
    def test_print_voltage(self):
        """
        test that tests the printing of v from a pre determined recording
        :return:
        """
        synfire_run.do_run(n_neurons, max_delay=max_delay, time_step=timestep,
                           neurons_per_core=neurons_per_core, delay=delay,
                           run_times=[runtime], v_path=current_v_file_path)
        v_read = synfire_run.get_output_pop_voltage_neo()

        io = PickleIO(filename=current_v_file_path)
        v_saved = io.read()[0]
        neo_compare.compare_blocks(v_read, v_saved)
        os.remove(current_v_file_path)
Esempio n. 3
0
    def test_va_benchmark(self):

        try:
            exc_spikes = do_run()
        # System intentional overload so may error
        except SpinnmanTimeoutException as ex:
            raise SkipTest(ex)
        spike_count = neo_convertor.count_spikes(exc_spikes)
        print(spike_count)
        # CB Jan 14 2019 Result varie between runs
        self.assertLessEqual(2558, spike_count)
        self.assertGreaterEqual(2559, spike_count)
        io = PickleIO(filename=neo_path)
        recorded_spikes = io.read()[0]
        neo_compare.compare_blocks(exc_spikes, recorded_spikes)
Esempio n. 4
0
    def test_get_gsyn(self):
        synfire_run.do_run(n_neurons,
                           max_delay=max_delay,
                           time_step=timestep,
                           neurons_per_core=neurons_per_core,
                           delay=delay,
                           run_times=[runtime],
                           gsyn_path_exc=gsyn_path)
        spikes = synfire_run.get_output_pop_spikes_numpy()
        gsyn = synfire_run.get_output_pop_gsyn_exc_neo()

        self.assertEqual(12, len(spikes))
        spike_checker.synfire_spike_checker(spikes, n_neurons)
        io = PickleIO(filename=gsyn_path)
        gsyn_saved = io.read()[0]
        neo_compare.compare_blocks(gsyn, gsyn_saved)
        os.remove(gsyn_path)
Esempio n. 5
0
 def test_get_gsyn(self):
     try:
         synfire_run.do_run(n_neurons, max_delay=max_delay,
                            time_step=timestep,
                            neurons_per_core=neurons_per_core, delay=delay,
                            run_times=[runtime], gsyn_path_exc=gsyn_path)
         spikes = synfire_run.get_output_pop_spikes_numpy()
         g_syn = synfire_run.get_output_pop_gsyn_exc_numpy()
         spike_checker.synfire_spike_checker(spikes, n_neurons)
         io = PickleIO(filename=gsyn_path)
         gsyn2_neo = io.read()[0]
         gsyn2_numpy = neo_convertor.convert_data(
             gsyn2_neo, run=0, name="gsyn_exc")
         self.assertTrue(numpy.allclose(g_syn, gsyn2_numpy))
         os.remove(gsyn_path)
     except SpinnmanTimeoutException as ex:
         # System intentional overload so may error
         raise SkipTest(ex)
Esempio n. 6
0
    def record_v(self):
        sim.setup(timestep=1)
        simtime = 100
        input = sim.Population(1, sim.SpikeSourceArray(spike_times=[0, 30]),
                               label="input")
        pop = sim.Population(32, sim.IF_curr_exp(), label="pop")
        sim.Projection(input, pop, sim.AllToAllConnector(),
                       synapse_type=sim.StaticSynapse(weight=5, delay=1))
        pop.record("v")
        sim.run(simtime)

        neo = pop.get_data("all")
        pop.write_data(pickle_path, "all")
        io = PickleIO(filename=pickle_path)
        saved = io.read()[0]
        neo_compare.compare_blocks(neo, saved)
        assert len(neo.segments[0].spiketrains) == 0
        assert len(neo.segments[0].filter(name="v")) > 0
        assert len(neo.segments[0].filter(name="gsyn_exc")) == 0

        v_neo = pop.get_data("v")
        pop.write_data(pickle_path, "v")
        io = PickleIO(filename=pickle_path)
        v_saved = io.read()[0]
        neo_compare.compare_blocks(v_neo, v_saved)
        neo_compare.compare_blocks(v_neo, neo)

        with self.assertRaises(ConfigurationException):
            pop.get_data("spikes")
        with self.assertRaises(ConfigurationException):
            pop.get_data("gsyn_exc")
        with self.assertRaises(ConfigurationException):
            pop.write_data(pickle_path, "spikes")
        with self.assertRaises(ConfigurationException):
            pop.write_data(pickle_path, "gsyn_exc")
Esempio n. 7
0
    def test_print_voltage(self):
        """
        test that tests the printing of v from a pre determined recording
        :return:
        """
        try:
            synfire_run.do_run(n_neurons,
                               max_delay=max_delay,
                               time_step=timestep,
                               neurons_per_core=neurons_per_core,
                               delay=delay,
                               run_times=[runtime],
                               v_path=current_v_file_path)
            v_read = synfire_run.get_output_pop_voltage_neo()

            io = PickleIO(filename=current_v_file_path)
            v_saved = io.read()[0]
            neo_compare.compare_blocks(v_read, v_saved)
            os.remove(current_v_file_path)
            # System intentional overload so may error
        except SpinnmanTimeoutException as ex:
            raise SkipTest(ex)
    def test_print_spikes(self):
        try:
            synfire_run.do_run(n_neurons,
                               time_step=timestep,
                               max_delay=max_delay,
                               delay=delay,
                               neurons_per_core=neurons_per_core,
                               run_times=[runtime],
                               spike_path=spike_path)
            spikes = synfire_run.get_output_pop_spikes_neo()

            try:
                io = PickleIO(filename=spike_path)
                read_in_spikes = io.read()[0]

                neo_compare.compare_blocks(spikes, read_in_spikes)
            except UnicodeDecodeError:
                raise SkipTest(
                    "https://github.com/NeuralEnsemble/python-neo/issues/529")

        except SpinnmanTimeoutException as ex:
            # System intentional overload so may error
            raise SkipTest(ex)
Esempio n. 9
0
    def test_script(self):
        """
        test that tests the printing of v from a pre determined recording
        :return:
        """
        try:
            n_neurons = 20  # number of neurons in each population
            current_file_path = os.path.dirname(os.path.abspath(__file__))
            current_spike_file_path = os.path.join(current_file_path,
                                                   "spikes.pickle")
            current_v_file_path = os.path.join(current_file_path, "v.pickle")
            current_gsyn_file_path = os.path.join(current_file_path,
                                                  "gsyn.pickle")
            synfire_run.do_run(n_neurons, max_delay=14, time_step=0.1,
                               neurons_per_core=1, delay=1.7, run_times=[50],
                               spike_path=current_spike_file_path,
                               gsyn_path_exc=current_gsyn_file_path,
                               v_path=current_v_file_path)

            spikes_read = synfire_run.get_output_pop_spikes_neo()
            v_read = synfire_run.get_output_pop_voltage_neo()
            gsyn_read = synfire_run.get_output_pop_gsyn_exc_neo()

            io = PickleIO(filename=current_spike_file_path)
            spikes_saved = io.read()[0]
            io = PickleIO(filename=current_v_file_path)
            v_saved = io.read()[0]
            io = PickleIO(filename=current_gsyn_file_path)
            gsyn_saved = io.read()[0]

            neo_compare.compare_blocks(spikes_read, spikes_saved)
            neo_compare.compare_blocks(v_read, v_saved)
            neo_compare.compare_blocks(gsyn_read, gsyn_saved)

        except SpinnmanTimeoutException as ex:
            # System sometimes times outs
            raise SkipTest(ex)
Esempio n. 10
0
 def _get_io(filename):
     """ Return a Neo IO instance, guessing the type based on the filename\
         suffix.
     """
     logger.debug("Creating Neo IO for filename {}", filename)
     directory = os.path.dirname(filename)
     utility_calls.check_directory_exists_and_create_if_not(directory)
     extension = os.path.splitext(filename)[1]
     if extension in ('.txt', '.ras', '.v', '.gsyn'):
         raise IOError(
             "ASCII-based formats are not currently supported for output"
             " data. Try using the file extension '.pkl' or '.h5'")
     elif extension in ('.h5', ):
         return NeoHdf5IO(filename=filename)
     elif extension in ('.pkl', '.pickle'):
         return PickleIO(filename=filename)
     elif extension == '.mat':
         return NeoMatlabIO(filename=filename)
     else:  # function to be improved later
         raise Exception("file extension %s not supported" % extension)
Esempio n. 11
0
    def test__issue_285(self):
        train = SpikeTrain([3, 4, 5] * pq.s, t_stop=10.0)
        unit = Unit()
        train.unit = unit
        unit.spiketrains.append(train)

        epoch = Epoch([0, 10, 20], [2, 2, 2], ["a", "b", "c"], units="ms")

        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        seg.epochs.append(epoch)
        epoch.segment = seg
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].unit, Unit)
        self.assertIsInstance(r_seg.epochs[0], Epoch)
Esempio n. 12
0
    def test__issue_285(self):
        # Spiketrain
        train = SpikeTrain([3, 4, 5] * pq.s, t_stop=10.0)
        unit = Unit()
        train.unit = unit
        unit.spiketrains.append(train)

        epoch = Epoch(np.array([0, 10, 20]),
                      np.array([2, 2, 2]),
                      np.array(["a", "b", "c"]),
                      units="ms")

        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        seg.epochs.append(epoch)
        epoch.segment = seg
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].unit, Unit)
        self.assertIsInstance(r_seg.epochs[0], Epoch)
        os.remove('blk.pkl')

        # Epoch
        epoch = Epoch(times=np.arange(0, 30, 10) * pq.s,
                      durations=[10, 5, 7] * pq.ms,
                      labels=np.array(['btn0', 'btn1', 'btn2'], dtype='U'))
        epoch.segment = Segment()
        blk = Block()
        seg = Segment()
        seg.epochs.append(epoch)
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.epochs[0].segment, Segment)
        os.remove('blk.pkl')

        # Event
        event = Event(np.arange(0, 30, 10) * pq.s,
                      labels=np.array(['trig0', 'trig1', 'trig2'], dtype='U'))
        event.segment = Segment()

        blk = Block()
        seg = Segment()
        seg.events.append(event)
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.events[0].segment, Segment)
        os.remove('blk.pkl')

        # IrregularlySampledSignal
        signal = IrregularlySampledSignal([0.0, 1.23, 6.78], [1, 2, 3],
                                          units='mV',
                                          time_units='ms')
        signal.segment = Segment()

        blk = Block()
        seg = Segment()
        seg.irregularlysampledsignals.append(signal)
        blk.segments.append(seg)
        blk.segments[0].block = blk

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.irregularlysampledsignals[0].segment,
                              Segment)
        os.remove('blk.pkl')
Esempio n. 13
0
    def test__issue_285(self):
        ##Spiketrain
        train = SpikeTrain([3, 4, 5] * pq.s, t_stop=10.0)
        unit = Unit()
        train.unit = unit
        unit.spiketrains.append(train)

        epoch = Epoch([0, 10, 20], [2, 2, 2], ["a", "b", "c"], units="ms")

        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        seg.epochs.append(epoch)
        epoch.segment = seg
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].unit, Unit)
        self.assertIsInstance(r_seg.epochs[0], Epoch)
        os.remove('blk.pkl')
        ##Epoch
        train = Epoch(times=np.arange(0, 30, 10)*pq.s,durations=[10, 5, 7]*pq.ms,labels=np.array(['btn0', 'btn1', 'btn2'], dtype='S'))
        train.segment = Segment()
        unit = Unit()
        unit.spiketrains.append(train)
        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].segment, Segment)
        os.remove('blk.pkl')
        ##Event
        train = Event(np.arange(0, 30, 10)*pq.s,labels=np.array(['trig0', 'trig1', 'trig2'],dtype='S'))
        train.segment = Segment()
        unit = Unit()
        unit.spiketrains.append(train)

        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        blk.segments.append(seg)

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].segment, Segment)
        os.remove('blk.pkl')
        ##IrregularlySampledSignal
        train =  IrregularlySampledSignal([0.0, 1.23, 6.78], [1, 2, 3],units='mV', time_units='ms')
        train.segment = Segment()
        unit = Unit()
        train.channel_index = ChannelIndex(1)
        unit.spiketrains.append(train)

        blk = Block()
        seg = Segment()
        seg.spiketrains.append(train)
        blk.segments.append(seg)
        blk.segments[0].block = blk

        reader = PickleIO(filename="blk.pkl")
        reader.write(blk)

        reader = PickleIO(filename="blk.pkl")
        r_blk = reader.read_block()
        r_seg = r_blk.segments[0]
        self.assertIsInstance(r_seg.spiketrains[0].segment, Segment)
        self.assertIsInstance(r_seg.spiketrains[0].channel_index, ChannelIndex)
        os.remove('blk.pkl')
Esempio n. 14
0
def main(p,file,save_path):
    pre = 10*pq.ms
    post = 10*pq.ms
    fid = PIO(file)
    blk = fid.read_block()
    FR,ISI,contact_trains = get_contact_sliced_trains(blk,pre=pre,post=post)
    binsize = 2*pq.ms
    for unit in blk.channel_indexes[-1].units:
        root = blk.annotations['ratnum'] + blk.annotations['whisker'] + 'c{}'.format(unit.name[-1])
        trains = contact_trains[unit.name]
        all_isi = np.array([])
        CV_array = np.array([])
        LV_array = np.array([])
        for interval in ISI[unit.name]:
            all_isi = np.concatenate([all_isi,interval])
            if np.all(np.isfinite(interval)):
                CV_array = np.concatenate([CV_array,[cv(interval)]])
                LV_array = np.concatenate([LV_array,[lv(interval)]])

        all_isi = all_isi * interval.units
        CV_array = CV_array
        CV = np.mean(CV_array)
        LV = np.mean(LV_array)

        ## calculate data for PSTH
        b,durations = get_binary_trains(contact_trains[unit.name])
        b_times = np.where(b)[1] * pq.ms#interval.units
        b_times-=pre
        PSTH,t_edges = np.histogram(b_times,bins=np.arange(-np.array(pre),np.max(durations)+np.array(post),float(binsize)))
        plt.bar(t_edges[:-1],
                PSTH.astype('f8')/len(durations)/binsize*1000,
                width=float(binsize),
                align='edge',
                alpha=0.8
                )

        ax = plt.gca()
        thresh = 500 * pq.ms
        ax.set_xlim(-15, thresh.__int__())
        ax.set_xlabel('Time after contact (ms)')
        ax.set_ylabel('Spikes per second')
        ax.set_title('PSTH for: {}'.format(root))

        plt.savefig(os.path.join(save_path,root+'_PSTH.svg'))
        plt.close('all')
        # ============================================

        # PLOT ISIs
        plt.figure()
        thresh = 100 * pq.ms
        if len(all_isi[np.logical_and(np.isfinite(all_isi), all_isi < thresh)])==0:
            return
        ax = sns.distplot(all_isi[np.logical_and(np.isfinite(all_isi), all_isi < thresh)],
                          bins=np.arange(0,100,1),
                          kde_kws={'color':'k','lw':3,'alpha':0.5,'label':'KDE'})
        ax.set_xlabel('ISI '+all_isi.dimensionality.latex)
        ax.set_ylabel('Percentage of all ISIs')

        a_inset = plt.axes([.55, .5, .2, .2], facecolor='w')
        a_inset.grid(color='k',linestyle=':',alpha=0.4)
        a_inset.axvline(CV,color='k',lw=0.5)
        a_inset.set_title('CV = {:0.2f}\nLV = {:0.2f}'.format(CV,LV))
        a_inset.set_xlabel('CV')
        a_inset.set_ylabel('# of Contacts')
        sns.distplot(CV_array,color='g',kde=False)
        ax.set_title('ISI distribution for {}'.format(root))
        plt.savefig(os.path.join(save_path, root + '_ISI.svg'))
        plt.close('all')
Esempio n. 15
0
from spikeAnalysis import *
from bayes_analyses import *
from neo_utils import *
from mechanics import *
from neo.io import PickleIO as PIO
import os
import glob

p = r'C:\Users\guru\Box Sync\__VG3D\deflection_trials\data'
p_save = r'C:\Users\guru\Box Sync\__VG3D\deflection_trials\figs'
files = glob.glob(os.path.join(p, '*.pkl'))
for f in files:
    print os.path.basename(f)
    fid = PIO(f)
    blk = fid.read_block()
    for unit in blk.channel_indexes[-1].units:
        cell_num = int(unit.name[-1])
        root = get_root(blk, cell_num)
        M = get_var(blk)[0]
        M = replace_NaNs(M)
        sp = concatenate_sp(blk)
        st = sp[unit.name]
        kernel = elephant.kernels.GaussianKernel(5 * pq.ms)
        r = np.array(
            instantaneous_rate(st, sampling_period=pq.ms,
                               kernel=kernel)).ravel()
        Mdot = get_deriv(M)

        fig = plt.figure()
        axy = fig.add_subplot(121)
        axz = fig.add_subplot(122)
Esempio n. 16
0
folder ='./output_data/'
voltage = '_v'
excitation = '_g_exc'
inhibition = '_g_inh'
orientation = '_orientation'
format = '.pickle'

filename_voltage = folder + voltage + format
filename_excitation = folder + excitation + format
filename_voltage = folder + excitation + format
filename_orientation = folder + str(contrast) + orientation + '.npy'

# ============== Save cell dynamical variables  =================

if save_voltage_and_conductances:
    from neo.io import PickleIO

    io = PickleIO(filename=filename_voltage)
    cortical_neurons_exc.write_data(io, variables=['v'])

    io = PickleIO(filename=filename_voltage)
    cortical_neurons_exc.write_data(io, variables=['gsyn_exc'])

    io = PickleIO(filename=filename_voltage)
    cortical_neurons_exc.write_data(io, variables=['gsyn_inh'])

# ============== Save orientation analysis =================

if save_orientation_response:
    np.save(filename_orientation, np.vstack((orientation_space,rate)))
if __name__ == '__main__':
    # Delayed imports so unit tests do not need them
    from pyNN.utility.plotting import Figure
    import matplotlib.pyplot as plt

    synfire_run.do_run(n_neurons,
                       max_delay=max_delay,
                       time_step=timestep,
                       neurons_per_core=neurons_per_core,
                       delay=delay,
                       run_times=[runtime],
                       gsyn_path_exc=gsyn_path)
    spikes = synfire_run.get_output_pop_spikes_numpy()
    g_syn = synfire_run.get_output_pop_gsyn_exc_numpy()
    spike_checker.synfire_spike_checker(spikes, n_neurons)
    io = PickleIO(filename=gsyn_path)
    gsyn2_neo = io.read()[0]
    gsyn2_numpy = neo_convertor.convert_data(gsyn2_neo, run=0, name="gsyn_exc")
    print(len(spikes))
    Figure(SpynnakerPanel(spikes,
                          yticks=True,
                          xticks=True,
                          markersize=4,
                          xlim=(0, runtime)),
           SpynnakerPanel(gsyn2_neo, yticks=True),
           title="TestPrintGsyn".format(delay),
           annotations="generated by {}".format(__file__))
    plt.show()
    os.remove(gsyn_path)