def cut_data(start, end):
    print('Cut {}'.format(str(start / number_of_points)))
    raw_data = ns_funcs.load_binary_amplifier_data(
        binary_data_filename,
        number_of_channels=const.NUMBER_OF_AP_CHANNELS_IN_BINARY_FILE)
    data = copy(raw_data[:, int(start):int(end)])
    del raw_data
    return data
Example #2
0
# Load data
event_dataframes = ns_funcs.load_events_dataframes(events_folder,
                                                   sync_funcs.event_types)
ev_video = event_dataframes['ev_video']

template_info = pd.read_pickle(join(kilosort_folder, 'template_info.df'))
spike_info = pd.read_pickle(
    join(kilosort_folder, 'spike_info_after_cortex_sorting.df'))

spike_rates = np.load(spike_rates_per_video_frame_filename)

distances_rat_to_poke_all_frames = np.load(
    join(patterned_vs_non_patterned_folder,
         'distances_rat_to_poke_all_frames.npy'))

lfps = ns_funcs.load_binary_amplifier_data(
    lfps_filename, const.NUMBER_OF_LFP_CHANNELS_IN_BINARY_FILE)

trial_pokes_timepoints = np.load(trial_pokes_timepoints_filename)
non_trial_pokes_timepoints = np.load(non_trial_pokes_timepoints_filename)

window_time = 8
window_timepoints = int(window_time * const.SAMPLING_FREQUENCY)
window_downsampled = int(window_timepoints / const.LFP_DOWNSAMPLE_FACTOR)

possible_events = {
    'tp': trial_pokes_timepoints,
    'ntp': non_trial_pokes_timepoints
}

lfp_probe_positions = np.empty(const.NUMBER_OF_LFP_CHANNELS_IN_BINARY_FILE)
lfp_probe_positions[np.arange(
# FOLDERS NAMES --------------------------------------------------
date = 5
kilosort_folder = join(const.base_save_folder, const.rat_folder,
                       const.date_folders[date], 'Analysis', 'Kilosort')
data_folder = join(const.base_save_folder, const.rat_folder,
                   const.date_folders[date], 'Data')

binary_data_filename = join(const.base_save_folder, const.rat_folder,
                            const.date_folders[date], 'Data',
                            'Amplifier_APs.bin')
tsne_folder = join(const.base_save_folder, const.rat_folder,
                   const.date_folders[date], 'Analysis', 'Tsne')
barnes_hut_exe_dir = r'E:\Software\Develop\Source\Repos\spikesorting_tsne_bhpart\Barnes_Hut\win\x64\Release'

raw_data = ns_funcs.load_binary_amplifier_data(
    binary_data_filename,
    number_of_channels=const.NUMBER_OF_AP_CHANNELS_IN_BINARY_FILE)

sampling_freq = 20000

denoised_folder = join(const.base_save_folder, const.rat_folder,
                       const.date_folders[date], 'Analysis', 'Denoised')

denoised_data_filename = join(denoised_folder, 'Data',
                              'Amplifier_APs_Denoised.bin')

# -----------------------------------------------------------------

data_cor = raw_data[1050:, :]
data_hyp = raw_data[850:1050, :]
data_th = raw_data[370:850, :]
Example #4
0
tsne_folder = join(analysis_folder, const.decimation_type_folder, 'TSNE')
tsne_exe_dir = r'E:\Software\Develop\Source\Repos\spikesorting_tsne_bhpart\Barnes_Hut\win\x64\Release'

sampling_freq = const.SAMPLING_FREQUENCY

# </editor-fold>
# ----------------------------------------------------------------------------------------------------------------------

# ----------------------------------------------------------------------------------------------------------------------
# <editor-fold desc = Create the ned data set

group_channels = const.group_channels

raw_data = ns_funcs.load_binary_amplifier_data(
    binary_data_filename, const.NUMBER_OF_AP_CHANNELS_IN_BINARY_FILE,
    const.BINARY_FILE_ENCODING)

number_of_chunks = 1000
chunk_size = raw_data.shape[1] / number_of_chunks

decimated_data = np.memmap(decimated_data_filename,
                           const.BINARY_FILE_ENCODING,
                           mode='w+',
                           shape=raw_data.shape,
                           order='F')

for c in np.arange(number_of_chunks):
    start = int(c * chunk_size)
    end = int(start + chunk_size)
    data_chunk = raw_data[:, start:end]
data_folder = join(const.base_save_folder, const.rat_folder,
                   const.date_folders[date_folder], 'Data')
denoised_data_folder = join(const.base_save_folder, const.rat_folder,
                            const.date_folders[date_folder], 'Analysis',
                            'Denoised', 'Data')
spikes_folder = join(const.base_save_folder, const.rat_folder,
                     const.date_folders[date_folder], 'Analysis', 'Denoised',
                     'Kilosort')

events_folder = join(data_folder, "events")

time_points_buffer = 5200

ap_data = ns_funcs.load_binary_amplifier_data(
    join(data_folder, 'Amplifier_APs.bin'),
    number_of_channels=const.NUMBER_OF_AP_CHANNELS_IN_BINARY_FILE)

ap_data_panes = np.swapaxes(
    np.reshape(ap_data,
               (ap_data.shape[0], int(ap_data.shape[1] / time_points_buffer),
                time_points_buffer)), 0, 1)

ap_den_data = ns_funcs.load_binary_amplifier_data(
    join(denoised_data_folder, 'Amplifier_APs_Denoised.bin'),
    number_of_channels=const.NUMBER_OF_AP_CHANNELS_IN_BINARY_FILE)
ap_den_data_panes = np.swapaxes(
    np.reshape(
        ap_den_data,
        (ap_den_data.shape[0], int(
            ap_den_data.shape[1] / time_points_buffer), time_points_buffer)),
                   const.date_folders[date_folder], 'Data')

events_folder = join(data_folder, "events")

analysis_folder = join(const.base_save_folder, const.rat_folder,
                       const.date_folders[date_folder], 'Analysis')
kilosort_folder = join(analysis_folder, 'Kilosort')
results_folder = join(analysis_folder, 'Results')
events_definitions_folder = join(results_folder, 'EventsDefinitions')
lfp_average_data_folder = join(results_folder, 'Lfp', 'Averages')

ap_data_filename = join(data_folder, 'Amplifier_APs.bin')

# Load data

ap_data = ns_funcs.load_binary_amplifier_data(
    ap_data_filename, const_comm.NUMBER_OF_AP_CHANNELS_IN_BINARY_FILE)

trials = {
    's':
    np.load(
        join(events_definitions_folder,
             'events_pokes_of_successful_trial.npy')),
    'tb':
    np.load(join(events_definitions_folder, 'events_touch_ball.npy'))
}

if date_folder != 6:
    minimum_delay = 5
    nst = np.load(
        join(
            events_definitions_folder,
analysis_folder = join(const.base_save_folder, const.rat_folder,
                       const.date_folders[date_folder], 'Analysis')
results_folder = join(analysis_folder, 'Results')
spike_lfp_folder = join(results_folder, 'SpikeLfpCorrelations',
                        'RandomSpikesAlongRecording')
spike_lfp_images_folder = join(spike_lfp_folder, 'SingleNeuronImages')

template_info = pd.read_pickle(join(kilosort_folder, 'template_info.df'))

spike_info = pd.read_pickle(
    join(kilosort_folder, 'spike_info_after_cleaning.df'))

lfps_file = join(data_folder, 'Amplifier_LFPs_Downsampled_x4.bin')
lfps = ns_funcs.load_binary_amplifier_data(
    (lfps_file),
    number_of_channels=const.NUMBER_OF_LFP_CHANNELS_IN_BINARY_FILE)

imfs_file = join(analysis_folder, 'Lfp', 'EMD', 'imfs.bin')
imfs = emd.load_memmaped_imfs(imfs_file, const.NUMBER_OF_IMFS,
                              const.NUMBER_OF_LFP_CHANNELS_IN_BINARY_FILE)

num_of_spikes = 4000
half_time_window = 2000

# </editor-fold>

# -------------------------------------------------
# <editor-fold desc="CREATE TIMES OF RANDOM SPIKES FOR ALL HIGH FIRING NEURONS">

neurons_with_high_frs = template_info[
Example #8
0
event_dataframes = ns_funcs.load_events_dataframes(events_folder,
                                                   sync_funcs.event_types)
file_to_save_to = join(kilosort_folder,
                       'firing_rate_with_video_frame_window.npy')
template_info = pd.read_pickle(join(kilosort_folder, 'template_info.df'))

spike_info = pd.read_pickle(
    join(kilosort_folder, 'spike_info_after_cleaning.df'))

video_frame_spike_rates_filename = join(
    kilosort_folder, 'firing_rate_with_video_frame_window.npy')
spike_rates = np.load(video_frame_spike_rates_filename)

raw_data_filename = join(data_folder, 'Amplifier_APs.bin')
raw_data = ns_funcs.load_binary_amplifier_data(
    raw_data_filename,
    number_of_channels=const_comm.NUMBER_OF_AP_CHANNELS_IN_BINARY_FILE)

# </editor-fold>
# -------------------------------------------------
# <editor-fold desc="GET TIMES AND FRAMES AROUND DIFFERENT TRIALS">

trials = {
    's':
    np.load(
        join(events_definitions_folder,
             'events_pokes_of_successful_trial.npy')),
    'tb':
    np.load(join(events_definitions_folder, 'events_touch_ball.npy'))
}
binary_a = r'Exp2and3_2017_03_28T18_48_25_Amp_S16_LP3p5KHz_mV.bin'
number_of_channels_in_binary_file = 1440


'''
template_markings_v = np.load(join(base_folder_v, 'template_marking.npy'))
spike_templates_v = np.load(join(base_folder_v, 'spike_templates.npy'))
spike_times_v = np.load(join(base_folder_v, 'spike_times.npy'))
templates_clean_index_v = np.argwhere(template_markings_v)
spikes_clean_index_v = np.squeeze(np.argwhere(np.in1d(spike_templates_v, templates_clean_index_v)))
spike_templates_clean_v = spike_templates_v[spikes_clean_index_v]
spike_times_clean_v = spike_times_v[spikes_clean_index_v]
'''

channel_map = np.squeeze(np.load(join(base_folder_a, 'channel_map.npy')))
template_markings_a = np.load(join(base_folder_a, 'template_marking.npy'))
spike_templates_a = np.load(join(base_folder_a, 'spike_templates.npy'))
spike_times_a = np.load(join(base_folder_a, 'spike_times.npy'))
templates_clean_index_a = np.argwhere(template_markings_a)
spikes_clean_index_a = np.squeeze(np.argwhere(np.in1d(spike_templates_a, templates_clean_index_a)))
spike_templates_clean_a = spike_templates_a[spikes_clean_index_a]
spike_times_clean_a = spike_times_a[spikes_clean_index_a]


binary_file_a = join(base_folder_a, binary_a)
raw_extracellular_data_a = nf.load_binary_amplifier_data(binary_file_a)




Example #10
0
import numpy as np
import matplotlib.pyplot as plt
from os.path import join
import BrainDataAnalysis.neuroseeker_specific_functions as nsf
from matplotlib.widgets import Button

data_folder = r'D:\Data\George\Neuroseeker Chronic Rat 22_1\2017_05_22\16_49_50'  # first day
data_file = r'2017_05_22T16_49_50_Amp_S16_HP500Hz_LP3p5KHz_mV.bin'

data_folder = r'D:\Data\George\Neuroseeker Chronic Rat 22_1\2017_06_06\13_34_05'  # last day
data_file = r'2017_06_06T13_34_05_Amp_S16_HP500Hz_LP3p5KHz_mV.bin'

data_folder = r'D:\Data\George\Neuroseeker Chronic Rat 22_1\2017_06_02\13_20_21'
data_file = r'InBehaviour_2017-06-02T13_20_21_Amp_S16_mV.bin'

full_data = nsf.load_binary_amplifier_data(join(data_folder, data_file))

lfp_channels = np.arange(9, 1440, 20)

bad_channels = np.concatenate(
    (np.arange(720, 760), np.arange(1200, 1240), np.arange(1320, 1440)))

channels_to_remove = np.concatenate(
    (lfp_channels, bad_channels, nsf.references)).astype(np.int)
channels_to_remove = np.sort(channels_to_remove)

good_channels = np.arange(nsf.number_of_channels_in_binary_file)
good_channels = np.delete(good_channels, channels_to_remove).astype(np.int)

brain_regions = {
    'Parietal_Cortex': 8000,