Esempio n. 1
0
def initiate_neurons(raw_data):

    neuron_list = list()

    for i in range((raw_data['alldays'][0]['PMd_units'][0][:]).shape[0]):
        spike_times = get_spike_time(raw_data, i + 1)

        # instantiate neuron
        neuron = NeuroVis(spike_times, name='PMd %d' % (i + 1))
        neuron_list.append(neuron)

    return neuron_list
Esempio n. 2
0
    y_coords = np.reshape(y_coords, (y_coords.shape[0], 1))
    temp_depths = np.sum(np.dot(temp_chan_amps, y_coords), axis=1) / (np.sum(
        temp_chan_amps, axis=1))

    sorted_spike_depths = temp_depths[sorted_spike_templates]

    # create neurons and find region

    accumulator = 0

    for idx, count in enumerate(counts_per_cluster):

        if count > 0:

            spike_times = sorted_spikes[accumulator:accumulator + count]
            neuron = NeuroVis(spiketimes=spike_times, name='%d' % (idx))
            cluster_depth = np.mean(
                sorted_spike_depths[accumulator:accumulator + count])

            if name == 'frontal':

                if (cluster_depth > 0 and cluster_depth < 1550):
                    striatum.append(neuron)
                elif (cluster_depth > 1550 and cluster_depth < 3840):
                    motor_ctx.append(neuron)

            elif name == 'posterior':

                if (cluster_depth > 0 and cluster_depth < 1634):
                    thalamus.append(neuron)
                elif (cluster_depth > 1634 and cluster_depth < 2797):
Esempio n. 3
0
def test_neurovis():

    num_spikes = 500
    num_trials = 10

    binsize = 100
    window = [-500, 1500]

    rand_spiketimes = np.sort(num_trials * np.random.rand(num_spikes))

    neuron = NeuroVis(spiketimes=rand_spiketimes)

    df = pd.DataFrame()

    event = 'realCueTime'
    condition_num = 'responseNum'
    condition_bool = 'responseBool'

    start_times = rand_spiketimes[0::int(num_spikes / num_trials)]
    df['trialStart'] = start_times

    df[event] = df['trialStart'] + np.random.rand(num_trials)

    event_times = ((start_times[:-1] + start_times[1:]) / 2).tolist()
    event_times.append(start_times[-1] + np.random.rand())

    df[event] = event_times

    df[condition_num] = np.random.rand(num_trials)
    df[condition_bool] = df[condition_num] < 0.5

    raster = neuron.get_raster(event=event,
                               conditions=condition_bool,
                               df=df,
                               plot=True,
                               binsize=binsize,
                               window=window)

    assert_equal(raster['event'], event)
    assert_equal(raster['conditions'], condition_bool)
    assert_equal(raster['binsize'], binsize)
    assert_equal(raster['window'], window)

    total_trials = 0

    for cond_id in raster['data'].keys():

        assert_true(cond_id in df[condition_bool])
        assert_equal(raster['data'][cond_id].shape[1],
                     (window[1] - window[0]) / binsize)
        total_trials += raster['data'][cond_id].shape[0]

    assert_equal(total_trials, num_trials)

    psth = neuron.get_psth(event=event,
                           conditions=condition_bool,
                           df=df,
                           plot=True,
                           binsize=binsize,
                           window=window)

    assert_equal(psth['window'], window)
    assert_equal(psth['binsize'], binsize)
    assert_equal(psth['event'], event)
    assert_equal(psth['conditions'], condition_bool)

    for cond_id in psth['data'].keys():

        assert_true(cond_id in df[condition_bool])
        assert_equal(psth['data'][cond_id]['mean'].shape[0],
                     (window[1] - window[0]) / binsize)
        assert_equal(psth['data'][cond_id]['sem'].shape[0],
                     (window[1] - window[0]) / binsize)

    spikecounts = neuron.get_spikecounts(event=event,
                                         df=df,
                                         window=[0, num_trials])
Esempio n. 4
0
fpath = os.path.join('data_structure_ANM218457_20131006.mat')
mat = scipy.io.loadmat(fpath)

########################################################
#
# 2 Get Spike Times
# --------------------

neuron_n = 9
spike_times = mat['obj']['eventSeriesHash'][0][0]['value'][0][0][0][neuron_n -
                                                                    1][0][0][1]
spike_times = [i[0] for i in spike_times]

# instantiate neuron
neuron = NeuroVis(spike_times, neuron_n)
print('neuron %d has a firing rate of %0.2f spikes per second' %
      (neuron_n, neuron.firingrate))

########################################################
#
# Let's use all the goodness of ``pandas`` to define all our conditions.
# Here, we will create a set of extra columns in the data frame that are
# going to be useful to select and plot PSTHs for specific conditions. We
# aim to follow the principles outlined in `Hadley Wickam's white paper on
# Tidy Data <http://vita.had.co.nz/papers/tidy-data.pdf>`__.

########################################################
#
# 3 Get Event Times
# --------------------
Esempio n. 5
0
def test_popvis():

    np.random.seed()

    num_spikes = 500
    num_trials = 10

    binsize = 100
    window = [-500, 1500]

    num_neurons = 10
    neuron_list = list()

    for i in range(num_neurons):
        rand_spiketimes = num_trials * np.random.rand(num_spikes)
        neuron_list.append(NeuroVis(rand_spiketimes))

    pop = PopVis(neuron_list)

    df = pd.DataFrame()

    event = 'realCueTime'
    condition_num = 'responseNum'
    condition_bool = 'responseBool'

    start_times = rand_spiketimes[0::int(num_spikes/num_trials)]

    df['trialStart'] = start_times

    df[event] = df['trialStart'] + np.random.rand(num_trials)

    event_times = ((start_times[:-1] + start_times[1:]) / 2).tolist()
    event_times.append(start_times[-1] + np.random.rand())

    df[event] = event_times

    df[condition_num] = np.random.rand(num_trials)
    df[condition_bool] = df[condition_num] < 0.5

    all_psth = pop.get_all_psth(event=event, conditions=condition_bool, df=df,
                                plot=True, binsize=binsize, window=window)

    assert_equal(all_psth['window'], window)
    assert_equal(all_psth['binsize'], binsize)
    assert_equal(all_psth['event'], event)
    assert_equal(all_psth['conditions'], condition_bool)

    for cond_id in all_psth['data'].keys():

        assert_true(cond_id in df[condition_bool])
        assert_equal(all_psth['data'][cond_id].shape[0],
                     num_neurons)
        assert_equal(all_psth['data'][cond_id].shape[1],
                     (window[1] - window[0]) / binsize)

    assert_raises(ValueError, pop.plot_heat_map, all_psth,
                  sortby=list(range(num_trials-1)))

    pop.plot_heat_map(all_psth, sortby=list(range(num_trials)))
    pop.plot_heat_map(all_psth, sortby='rate')
    pop.plot_heat_map(all_psth, sortby='latency')
    pop.plot_heat_map(all_psth, sortorder='ascend')

    pop.plot_population_psth(all_psth=all_psth)
print('events:', reach_data['events'].keys())
print('features', reach_data['features'].keys())
print('number of PMd neurons:', len(reach_data['neurons_PMd']))
print('number of M1 neurons:', len(reach_data['neurons_M1']))

########################################################
# Part I: NeuroVis
# -----------------------------
#
#
# Instantiate Example PMd Neuron
# ~~~~~~~~~~~~~

neuron_number = 91
spike_times = reach_data['neurons_PMd'][neuron_number - 1]
neuron_PMd = NeuroVis(spike_times, name='PMd %d' % neuron_number)

########################################################
#
# Raster plot and PSTH aligned to target onset
# ~~~~~~~~~~~~~

neuron_PMd.get_raster(event='targetOnTime', df=reach_data['events'])

########################################################

neuron_PMd.get_psth(event='targetOnTime', df=reach_data['events'])

########################################################
# Let's put the data into a DataFrame
#
Esempio n. 7
0
def test_neurovis():

    np.random.seed(1738)

    num_spikes = 500
    num_trials = 10

    binsize = 100
    window = [-500, 1500]

    rand_spiketimes = np.sort(num_trials * np.random.rand(num_spikes))

    neuron = NeuroVis(spiketimes=rand_spiketimes)

    df = pd.DataFrame()

    event = 'realCueTime'
    condition_num = 'responseNum'
    condition_bool = 'responseBool'

    start_times = rand_spiketimes[0::int(num_spikes/num_trials)]
    df['trialStart'] = start_times

    df[event] = df['trialStart'] + np.random.rand(num_trials)

    event_times = ((start_times[:-1] + start_times[1:]) / 2).tolist()
    event_times.append(start_times[-1] + np.random.rand())

    df[event] = event_times

    df[condition_num] = np.random.rand(num_trials)
    df[condition_bool] = df[condition_num] < 0.5

    for cond in [None, condition_bool]:

        raster = neuron.get_raster(event=event, conditions=cond,
                                   df=df, plot=True, binsize=binsize,
                                   window=window)

        neuron.plot_raster(raster, cond_name=raster['conditions'])

        assert_equal(raster['event'], event)
        assert_equal(raster['conditions'], cond)
        assert_equal(raster['binsize'], binsize)
        assert_equal(raster['window'], window)

    total_trials = 0

    for cond_id in raster['data'].keys():

        assert_true(cond_id in df[condition_bool])
        assert_equal(raster['data'][cond_id].shape[1],
                     (window[1] - window[0]) / binsize)
        total_trials += raster['data'][cond_id].shape[0]

    assert_equal(total_trials, num_trials)

    psth = neuron.get_psth(event=event, conditions=condition_bool, df=df,
                           plot=True, binsize=binsize, window=window)

    neuron.plot_psth(psth=psth, ylim=np.random.randn(2).tolist())

    assert_equal(psth['window'], window)
    assert_equal(psth['binsize'], binsize)
    assert_equal(psth['event'], event)
    assert_equal(psth['conditions'], condition_bool)

    for cond_id in psth['data'].keys():

        assert_true(cond_id in df[condition_bool])
        assert_equal(psth['data'][cond_id]['mean'].shape[0],
                     (window[1] - window[0]) / binsize)
        assert_equal(psth['data'][cond_id]['sem'].shape[0],
                     (window[1] - window[0]) / binsize)

    neuron.get_spikecounts(event=event, df=df, window=[0, num_trials])
fpath = os.path.join('data_structure_ANM218457_20131006.mat')
mat = scipy.io.loadmat(fpath)

########################################################
#
# 2 Get Spike Times
# --------------------

neuron_n = 9
spike_times = mat['obj']['eventSeriesHash'][0][
    0]['value'][0][0][0][neuron_n - 1][0][0][1]
spike_times = [i[0] for i in spike_times]

# instantiate neuron
neuron = NeuroVis(spike_times, neuron_n)
print('neuron %d has a firing rate of %0.2f spikes per second' %
      (neuron_n, neuron.firingrate))


########################################################
#
# Let's use all the goodness of ``pandas`` to define all our conditions.
# Here, we will create a set of extra columns in the data frame that are
# going to be useful to select and plot PSTHs for specific conditions. We
# aim to follow the principles outlined in `Hadley Wickam's white paper on
# Tidy Data <http://vita.had.co.nz/papers/tidy-data.pdf>`__.

########################################################
#
# 3 Get Event Times