コード例 #1
0
def load_events(file_path, channels=None):
    """
    return time stamps in seconds of each digital channel

    :param file_path:
    :param channels: name of channels
    :return: dictionary, {channel: {'rise':[timestamps of rising events in seconds],
                                    'fall':[timestamps of falling events in seconds}}
    """

    print('\n')

    if file_path[-7:] != '.events':
        raise LookupError('The input file: ' + file_path + ' is not a .events file!')

    with open(file_path) as f:
        header = oe.readHeader(f)
    fs = float(header['sampleRate'])

    events = oe.loadEvents(file_path)

    detected_channel_number = int(max(events['channel']) + 1)
    real_channels = ['ch_' + ft.int2str(c, 3) for c in range(detected_channel_number)]

    if channels is not None:
        if detected_channel_number != len(channels):
            warning_msg = '\nThe number of digital channels detected: ' + str(detected_channel_number) + \
                          ' does not match input channel number: ' + str(len(channels))
            warnings.warn(warning_msg)

        if len(channels) <= detected_channel_number:
            real_channels[0:len(channels)] = channels
        else:
            real_channels = channels[0:detected_channel_number]

    output = {}

    for i, ch in enumerate(real_channels):
        output.update({ch : {'rise' : [],
                             'fall' : []}
                       })

        rise = events['timestamps'][np.logical_and(events['channel'] == i, events['eventId'] == 1)]
        output[ch]['rise'] = np.array(rise.astype(np.float32) / fs).astype(np.float32)

        fall = events['timestamps'][np.logical_and(events['channel'] == i, events['eventId'] == 0)]
        output[ch]['fall'] = np.array(fall.astype(np.float32) / fs).astype(np.float32)

    print('events loaded.\n')

    return output
コード例 #2
0
ファイル: lfp_plot.py プロジェクト: smasa1112/make_program
        #lowpassフィルタがかけられた波形を抽出
        waveform = glia.nucleus.Waveform(lowpass(tmp["data"], 30000, 450, 600,
                                                 10, 30),
                                         rate=glia.Frequency(30000))
        #tmp(['header', 'timestamps', 'data', 'recordingNumber'])が記載されている
        #timestampsはサンプリングレートで記録された記録点
        timestamps = tmp["timestamps"][0]
        if num < 10:
            glia.save(f"./waveform/{dirname}/0{num}.lfp", waveform)
        else:
            glia.save(f"./waveform/{dirname}/{num}.lfp", waveform)
        del tmp, waveform
        gc.collect()
        num += 1

    events = OpenEphys.loadEvents(pathname + "/all_channels.events")
    #eventsにはトリガーの始点・終点が入っているので、始点だけ取り出す
    e_tmp = (events["timestamps"] - timestamps)[0::2]
    trigger = []
    #millisecondのスケールでトリガー情報をtriggerに格納
    for i in (e_tmp / 30):
        trigger.append(glia.millisecond(i))
    #numpy配列に変換
    trigger = np.array(trigger)
    glia.save(f"./waveform/{dirname}/{dirname}.trigger", trigger)
    print((events["timestamps"] - timestamps)[0::2])

    #この状態で.lfpファイルにはそれぞれのチャネルからの計測電圧が入る
    #.triggerファイルではトリガーファイルが入る

    #刺激パターンをjsonファイルから読み込んで、.stimorderファイルに格納
コード例 #3
0
    DataFiles = glob.glob(''.join([RecFolder,'/*.continuous']))
    DataFiles.sort()


#%% Generate TTLs

os.makedirs('DataArrays', exist_ok=True)
DirList = glob.glob('OpenEphysFiles/*'); DirList.sort()

for RecFolder in DirList:
    DataFiles = glob.glob(''.join([RecFolder,'/*.continuous']))
    EventsFile = ''.join([RecFolder,'/all_channels.events'])
    DataFiles.sort()
    
    TempData = OpenEphys.loadContinuous(DataFiles[0]) # Any ch would work ;)
    Events = OpenEphys.loadEvents(EventsFile)
                                                                                
    # Draw TTL
    print('Drawing TTL channel...')                                              
    if len(Events['timestamps']) > 100: # arbitrary value, I never use < 100 stimulation pulses
        TTLSound = [0] * len(TempData['data'])
        for EvTS in range(len(Events['timestamps'])):
            if Events['eventType'][EvTS] == 3: # if event is a TTL
                if Events['eventId'][EvTS] == 1: # if TTL is on
                    TTLSound(find(TempTimestamps==EventsTimestamps(EvTS)): ...  
                        find(TempTimestamps==EventsTimestamps(EvTS+1))) = 1;    
                end                                                             
            end                                                                 
        end                                                                     
    else                                                                        
        TTLSound = zeros(size(TempData, 1), size(TempData, 2));                 
コード例 #4
0
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 11 13:26:20 2018

@author: behrenslab
"""

import data_import as di
import OpenEphys as op
import Esync as es
import numpy as np

session = di.Session(
    '/home/behrenslab/ephys/2018-06-05-reversal_learning_3_tasks_recording/m483-2018-06-07-161545.txt'
)

rsync_events = [
    event.time for event in session.events if event.name in ['Rsync']
]
rsync_ephys = op.loadEvents(
    '/home/behrenslab/ephys/2018-06-07_16-15-43/all_channels.events')
rsync_timestamps = rsync_ephys['timestamps']
rsync_timestamps = np.array(rsync_timestamps)[::2]

rsync_events = np.array(rsync_events)
rsync_timestamps = rsync_timestamps / 30
aligner = es.Rsync_aligner(rsync_events, rsync_timestamps, plot=True)
times_B = aligner.A_to_B(rsync_events)  #A pycontrol
times_A = aligner.B_to_A(rsync_timestamps)

pycontrol_to_ephys = aligner.A_to_B
コード例 #5
0
                                  ".lfp") and os.path.isfile(
                                      "./waveform/" + filename[4:-4] +
                                      ".trig"):
     print("Load lfp, trig")
 else:
     wave = []
     for j in channel_map:
         tmp = OpenEphys.loadContinuous(filename + "/101_CH" + str(j) +
                                        ".continuous")
         #gliaの形式にそってローパスフィルタをかけた波形の出力
         wave.append(
             glia.nucleus.Waveform(lowpass(tmp["data"], 30000, 450, 600, 10,
                                           30),
                                   rate=glia.Frequency(30000)))
         timestamp = tmp["timestamps"][0]
 events = OpenEphys.loadEvents(filename + "/all_channels.events")
 e_tmp = (events["timestamps"] - timestamp)[0::2]
 trigger = []
 for i in (e_tmp / 30):
     trigger.append(glia.millisecond(i))
 trigger = np.array(trigger)
 glia.save("./waveform/" + filename[4:] + ".lfp", wave)
 glia.save("./waveform/" + filename[4:] + ".trig", trigger)
 if not reload and os.path.isfile("./waveform/" + filename[4:-4] +
                                  ".stimorder"):
     print("Load stimorder")
 else:
     with open("json/" + jsonfile, encoding="utf-8") as json_fp:
         stim_pattern = json.load(json_fp)
 #pandas配列に刺激パターンを格納
 table = pandas.DataFrame(stim_pattern)
コード例 #6
0
for file_ephys in files_ephys:
    if file_ephys in m486:
        match_ephys = re.search(r'\d{4}-\d{2}-\d{2}', file_ephys)
        date_ephys = datetime.strptime(match_ephys.group(), '%Y-%m-%d').date()
        date_ephys = match_ephys.group()
        for file_behaviour in files_behaviour:
            match_behaviour = re.search(r'\d{4}-\d{2}-\d{2}', file_behaviour)
            date_behaviour = datetime.strptime(match_behaviour.group(), '%Y-%m-%d').date()
            date_behaviour = match_behaviour.group()
            if date_ephys == date_behaviour:
                behaviour_path = behaviour_filename+'/'+file_behaviour
                behaviour_session = di.Session(behaviour_path)
                ephys_path = ephys_data_folder+'/'+file_ephys + '/' + subject
                print(behaviour_path)
                print(ephys_path)
                ephys_events = op.loadEvents(os.path.join(ephys_path,'all_channels.events'))  
                data_folder = file_ephys
                check_if_kilosort_exists = os.listdir(ephys_path)
                check_if_npy_exists = os.listdir(spikes_df_csv_out_folder)
                file_ephys_npy = file_ephys +'.npy'
                if file_ephys_npy not in check_if_npy_exists:
                    # For multiple files in one session add the requirement of cluster_groups.csv file 
                    for file in check_if_kilosort_exists:
                        if fnmatch.fnmatch(file, '*.csv'):
                            # Get offset in samples of Kilosort spike data relative to TTL events.
                            with open(os.path.join(ephys_path,'messages.events')) as f:
                              message = f.read()
                
                            recording_start_sample, sampling_rate = [
                                int(x) for x in message.split('start time: ')[1].split('Hz')[0].split('@')]