Esempio n. 1
0
def process_a_dir(dir_name):
    print('')
    print('All folders in {} will be processed.'.format(dir_name))
    if prm.get_is_windows():
        prm.set_date(dir_name.rsplit('\\', 2)[-2])
    if prm.get_is_ubuntu():
        prm.set_date(dir_name.rsplit('/', 2)[-2])

    prm.set_filepath(dir_name)
    file_utility.set_continuous_data_path(prm)

    dead_channels.get_dead_channel_ids(prm)  # read dead_channels.txt
    file_utility.create_folder_structure(prm)

    if prm.get_is_tetrode_by_tetrode() is True:
        print('------------------------------------------')
        print('I am making one mda file for each tetrode.')
        print('------------------------------------------')
        make_sorting_database.create_sorting_folder_structure_separate_tetrodes(prm)
        convert_open_ephys_to_mda.convert_continuous_to_mda(prm)
        print('All 4 tetrodes were converted to separate mda files.')
        print('*****************************************************')

    if prm.get_is_all_tetrodes_together() is True:
        print('-------------------------------------------------------------------------')
        print('I am converting all channels into one mda file. This will take some time.')
        print('-------------------------------------------------------------------------')
        make_sorting_database.create_sorting_folder_structure(prm)
        convert_open_ephys_to_mda.convert_all_tetrodes_to_mda(prm)
        print('The big mda file is created, it is in Electrophysiology/Spike_sorting/all_tetrodes/data')
        print('***************************************************************************************')

    if prm.is_vr is True:
        vr_process_movement.save_or_open_movement_arrays(prm)
def convert_continuous_to_mda(prm):
    file_utility.create_folder_structure(prm)
    # make_sorting_database.create_sorting_folder_structure(prm)
    number_of_tetrodes = prm.get_num_tetrodes()
    folder_path = prm.get_filepath()
    spike_data_path = prm.get_spike_path() + '\\'
    continuous_file_name = prm.get_continuous_file_name()

    if os.path.isfile(spike_data_path + 't1_' + prm.get_date() +
                      '\\raw.mda') is False:
        file_utility.create_ephys_folder_structure(prm)

    for tetrode in range(number_of_tetrodes):
        channel_data_all = []
        for channel in range(4):
            file_path = folder_path + continuous_file_name + str(
                tetrode * 4 + channel + 1) + '.continuous'
            channel_data = open_ephys_IO.get_data_continuous(prm, file_path)
            channel_data_all.append(channel_data)

        recording_length = len(channel_data_all[0])
        channels_tetrode = np.zeros((4, recording_length))

        for ch in range(4):
            channels_tetrode[ch, :] = channel_data_all[ch]
        mdaio.writemda16i(
            channels_tetrode, spike_data_path + 't' + str(tetrode + 1) + '_' +
            prm.get_date() + '_continuous\\data\\raw.mda')
def convert_spk_to_mda(prm):
    file_utility.create_folder_structure(prm)
    folder_path = prm.get_filepath()
    spike_data_path = prm.get_spike_path() + '\\'
    number_of_tetrodes = prm.get_num_tetrodes()
    samples_per_spike = prm.get_waveform_size()

    if os.path.isfile(spike_data_path + 't1_' + prm.get_date() +
                      '\\raw.nt1.mda') is False:
        file_utility.create_ephys_folder_structure(prm)

        for tetrode in range(number_of_tetrodes):
            file_path = folder_path + 'TT' + str(tetrode) + '.spikes'
            waveforms, timestamps = open_ephys_IO.get_data_spike(
                folder_path, file_path, 'TT' + str(tetrode + 1))
            np.save(
                spike_data_path + 't' + str(tetrode + 1) + '_' +
                prm.get_date() + '\\TT' + str(tetrode + 1) + '_timestamps',
                timestamps
            )  # todo: this is shifted by 10 seconds relative to light and location!

            padded_array = get_padded_array(waveforms, samples_per_spike)

            mdaio.writemda16i(
                padded_array, spike_data_path + 't' + str(tetrode + 1) + '_' +
                prm.get_date() + '\\raw.nt' + str(tetrode + 1) + '.mda')
            peak_indices = get_peak_indices(waveforms, samples_per_spike)
            mdaio.writemda32i(
                peak_indices, spike_data_path + 't' + str(tetrode + 1) + '_' +
                prm.get_date() + '\\event_times.nt' + str(tetrode + 1) +
                '.mda')

            mdaio.writemda32(
                timestamps, spike_data_path + 't' + str(tetrode + 1) + '_' +
                prm.get_date() + '\\timestamps.nt' + str(tetrode + 1) + '.mda')
Esempio n. 4
0
def save_or_open_movement_arrays(prm):
    file_utility.create_folder_structure(prm)

    for file in os.listdir(prm.get_filepath()):
        os.chdir(prm.get_filepath())
        if file.endswith(".npy") and os.path.getsize(file) == 0:
            print('---FILE ERROR: The size of ' + file +
                  ' is 0, something is wrong.---')

    location, speed, velocity = cached_calculate_movement(prm)
    moves_indices, stationary_indices = cached_stationary_movement(prm, speed)
    os.chdir('..')
    return
Esempio n. 5
0
def cached_stationary_movement(prm, speed):
    file_utility.create_folder_structure(prm)

    data_path = prm.get_behaviour_data_path()

    if os.path.isfile(data_path +
                      "\\stationary.npy") is False or os.path.isfile(
                          data_path + "\\moves.npy") is False:
        stationary, moves = moves_or_stationary(prm, speed)
        np.save(data_path + "\\stationary", stationary)
        np.save(data_path + "\\moves", moves)
    else:
        stationary = np.load(data_path + "\\stationary.npy")
        moves = np.load(data_path + "\\moves.npy")
    return moves, stationary
def convert_all_tetrodes_to_mda(prm):
    file_utility.create_folder_structure(prm)
    make_sorting_database.organize_files_for_ms(prm)
    number_of_tetrodes = prm.get_num_tetrodes()
    folder_path = prm.get_filepath()
    spike_data_path = prm.get_spike_path() + '\\'

    path = spike_data_path + 'all_tetrodes\\data\\raw.mda'

    file_path = folder_path + '100_CH' + str(1) + '.continuous'
    first_ch = open_ephys_IO.get_data_continuous(prm, file_path)
    recording_length = len(first_ch)
    channels_all = np.zeros((number_of_tetrodes * 4, recording_length))
    channels_all[0, :] = first_ch

    for channel in range(15):
        file_path = folder_path + '100_CH' + str(channel + 2) + '.continuous'
        channel_data = open_ephys_IO.get_data_continuous(prm, file_path)
        channels_all[channel + 1, :] = channel_data

    mdaio.writemda16i(channels_all, path)