def getLFPData(probeBase, syncDataset, num_channels=384): probeTTLDir = os.path.join(probeBase, r'events\\Neuropix-PXI-100.0\\TTL_1') lfp_data_dir = os.path.join(probeBase, r'continuous\\Neuropix-PXI-100.1') lfp_data_file = os.path.join(lfp_data_dir, 'continuous.dat') if not os.path.exists(lfp_data_file): print('Could not find LFP data at ' + lfp_data_file) return None, None lfp_data = np.memmap(lfp_data_file, dtype='int16', mode='r') lfp_data_reshape = np.reshape(lfp_data, [int(lfp_data.size / num_channels), -1]) time_stamps = np.load(os.path.join(lfp_data_dir, 'lfp_timestamps.npy')) bRising, bFalling = get_sync_line_data(syncDataset, channel=0) bs_t, bs = ecephys.extract_barcodes_from_times(bRising, bFalling) channel_states = np.load(os.path.join(probeTTLDir, 'channel_states.npy')) event_times = np.load(os.path.join(probeTTLDir, 'event_timestamps.npy')) beRising = event_times[channel_states > 0] / 30000. beFalling = event_times[channel_states < 0] / 30000. be_t, be = ecephys.extract_barcodes_from_times(beRising, beFalling) #Compute time shift between ephys and sync shift, p_sampleRate, m_endpoints = ecephys.get_probe_time_offset( bs_t, bs, be_t, be, 0, 30000) time_stamps_shifted = (time_stamps / p_sampleRate) - shift return lfp_data_reshape, time_stamps_shifted
def plot_barcode_intervals(probe_dirs, syncDataset, FIG_SAVE_DIR, prefix=''): fig, ax = plt.subplots() fig.suptitle('Sync Barcode Intervals') bs_t, bs = probeSync.get_sync_barcodes(syncDataset) ax.plot(np.diff(bs_t), 'k') ax.yaxis.set_major_formatter(FormatStrFormatter('%.2f')) pfig, pax = plt.subplots(1, 2) pfig.set_size_inches([8, 4]) pfig.suptitle('Probe Barcode Intervals') for ip, probe in enumerate(probe_dirs): p_name = probe.split('_')[-2][-1] be_t, be = probeSync.get_ephys_barcodes(probe) shift, p_sampleRate, m_endpoints = ecephys.get_probe_time_offset( bs_t, bs, be_t, be, 0, 30000) pax[0].plot(np.diff(be_t), probe_color_dict[p_name]) pax[0].set_title('uncorrected') pax[1].plot( np.diff(be_t) * (30000. / p_sampleRate), probe_color_dict[p_name]) pax[1].set_title('corrected') pax[0].yaxis.set_major_formatter(FormatStrFormatter('%.2f')) pax[1].yaxis.set_major_formatter(FormatStrFormatter('%.2f')) pax[0].legend([probe.split('_')[-2][-1] for probe in probe_dirs]) save_figure( fig, os.path.join(FIG_SAVE_DIR, prefix + 'Sync_barcode_intervals.png')) save_figure( pfig, os.path.join(FIG_SAVE_DIR, prefix + 'Probe_barcode_intervals.png'))
def getUnitData(probeBase,syncDataset): probeSpikeDir = os.path.join(probeBase, r'continuous\\Neuropix-PXI-100.0') #Get barcodes/times from probe events and sync file be_t, be = get_ephys_barcodes(probeBase) bs_t, bs = get_sync_barcodes(syncDataset) #Compute time shift between ephys and sync shift, p_sampleRate, m_endpoints = ecephys.get_probe_time_offset(bs_t, bs, be_t, be, 0, 30000) #Get unit spike times units = load_spike_info(probeSpikeDir, p_sampleRate, shift) return units
def probe_sync_report(probe_dirs, syncDataset, FIG_SAVE_DIR, prefix=''): bs_t, bs = probeSync.get_sync_barcodes(syncDataset) alignment_dict = {} for ip, probe in enumerate(probe_dirs): p_name = probe.split('_')[-2][-1] alignment_dict[p_name] = {} be_t, be = probeSync.get_ephys_barcodes(probe) shift, p_sampleRate, m_endpoints = ecephys.get_probe_time_offset( bs_t, bs, be_t, be, 0, 30000) alignment_dict[p_name]['shift'] = np.float(shift) alignment_dict[p_name]['sample_rate'] = np.float(p_sampleRate) save_file = os.path.join(FIG_SAVE_DIR, prefix + 'probe_sync_registration.json') save_json(alignment_dict, save_file)
def getLFPData(dataDir, pid, syncDataset, probePXIDict, probeGen='3b', num_channels=384): if '3a' in probeGen: lfp_data_dir = glob.glob( os.path.join(dataDir, '*probe' + pid + '_sorted', 'continuous', 'Neuropix-3a-100.1'))[0] events_dir = glob.glob( os.path.join(dataDir, '*probe' + pid + '_sorted', 'events', 'Neuropix-3a-100.0', 'TTL_1'))[0] elif '3b' in probeGen: probeDirName = 'Neuropix-PXI-' + probePXIDict[pid] lfp_data_dir = os.path.join(dataDir, probeDirName + '-LFP') events_dir = os.path.join(dataDir, 'events', probeDirName, 'TTL_1') elif probeGen == 'pipeline': baseString = os.path.basename(dataDir) probeBase = os.path.join(dataDir, baseString + '_probe' + pid + '_sorted') lfp_data_dir = os.path.join(probeBase, r'continuous\\\Neuropix-PXI-100.1') events_dir = os.path.join(probeBase, r'events\\Neuropix-PXI-100.0\\TTL_1') lfp_data_file = os.path.join(lfp_data_dir, 'continuous.dat') if not os.path.exists(lfp_data_file): print('Could not find LFP data at ' + lfp_data_file) return None, None lfp_data = np.memmap(lfp_data_file, dtype='int16', mode='r') lfp_data_reshape = np.reshape(lfp_data, [int(lfp_data.size / num_channels), -1]) time_stamps = np.load(os.path.join(lfp_data_dir, 'lfp_timestamps.npy')) #Get barcodes from sync file if 'barcode' in syncDataset.line_labels: bRising, bFalling = get_sync_line_data(syncDataset, 'barcode') elif 'barcodes' in syncDataset.line_labels: bRising, bFalling = get_sync_line_data(syncDataset, 'barcodes') bs_t, bs = ecephys.extract_barcodes_from_times(bRising, bFalling) #Get barcodes from ephys data if '03122019' in dataDir and 'slot3' in events_dir: #files on slot3 for this day saved extra bytes at beginning, must skip them to get the right time stamps channel_states = np.load( r"Z:\03122019_416656\events\Neuropix-PXI-slot2-probe1\TTL_1\channel_states.npy" ) event_times_file = open( os.path.join(events_dir, 'event_timestamps.npy'), 'rb') event_times_file.seek(8 * 22 + 1) event_times = np.fromfile(event_times_file, dtype='<u8')[:channel_states.size] lfp_data_reshape = lfp_data_reshape[:time_stamps.size] elif '06122019' in dataDir: good_channel_states = np.load( r"Z:\06122019_423745\events\Neuropix-PXI-slot3-probe1\TTL_1\channel_states.npy" ) good_event_times = np.load( r"Z:\06122019_423745\events\Neuropix-PXI-slot3-probe1\TTL_1\event_timestamps.npy" ) channel_states = np.load(os.path.join( events_dir, 'channel_states.npy'))[:good_channel_states.size] event_times = np.load(os.path.join( events_dir, 'event_timestamps.npy'))[:good_event_times.size] else: channel_states = np.load(os.path.join(events_dir, 'channel_states.npy')) event_times = np.load(os.path.join(events_dir, 'event_timestamps.npy')) beRising = event_times[channel_states > 0] / 30000. beFalling = event_times[channel_states < 0] / 30000. be_t, be = ecephys.extract_barcodes_from_times(beRising, beFalling) if '03212019' in dataDir: be_t = be_t[5:] be = be[5:] #Compute time shift between ephys and sync shift, p_sampleRate, m_endpoints = ecephys.get_probe_time_offset( bs_t, bs, be_t, be, 0, 30000) if '03212019' in dataDir: shift = -3.6950408520530686 time_stamps_shifted = (time_stamps / p_sampleRate) - shift return lfp_data_reshape, time_stamps_shifted
def getUnitData(dataDir, syncDataset, probeID, probePXIDict, probeGen='3b'): if probeGen == '3a': probeDir = glob.glob( os.path.join(dataDir, '*Probe' + probeID + '_sorted'))[0] probeTTLDir = os.path.join(probeDir, 'events\\Neuropix-3a-100.0\\TTL_1') probeSpikeDir = os.path.join(probeDir, 'continuous\\Neuropix-3a-100.0') elif probeGen == '3b': eventsDir = os.path.join(dataDir, 'events') probeTTLDir = os.path.join( os.path.join(eventsDir, 'Neuropix-PXI-' + probePXIDict[probeID]), 'TTL_1') probeSpikeDir = os.path.join( dataDir, 'Neuropix-PXI-' + probePXIDict[probeID] + '-AP_sortingResults') elif probeGen == 'pipeline': baseString = os.path.basename(dataDir) probeBase = os.path.join(dataDir, baseString + '_probe' + probeID + '_sorted') probeTTLDir = os.path.join(probeBase, r'events\\Neuropix-PXI-100.0\\TTL_1') probeSpikeDir = os.path.join(probeBase, r'continuous\\Neuropix-PXI-100.0') print(probeTTLDir) print(probeSpikeDir) #Get barcodes from sync file if 'barcode' in syncDataset.line_labels: bRising, bFalling = get_sync_line_data(syncDataset, 'barcode') elif 'barcodes' in syncDataset.line_labels: bRising, bFalling = get_sync_line_data(syncDataset, 'barcodes') bs_t, bs = ecephys.extract_barcodes_from_times(bRising, bFalling) #Get barcodes from ephys data if '03122019' in dataDir and 'slot3' in probeTTLDir: #files on slot3 for this day saved extra bytes at beginning, must skip them to get the right time stamps channel_states = np.load( r"Z:\03122019_416656\events\Neuropix-PXI-slot2-probe1\TTL_1\channel_states.npy" ) event_times_file = open( os.path.join(probeTTLDir, 'event_timestamps.npy'), 'rb') event_times_file.seek(8 * 22 + 1) event_times = np.fromfile(event_times_file, dtype='<u8')[:channel_states.size] elif '06122019' in dataDir: good_channel_states = np.load( r"Z:\06122019_423745\events\Neuropix-PXI-slot3-probe1\TTL_1\channel_states.npy" ) good_event_times = np.load( r"Z:\06122019_423745\events\Neuropix-PXI-slot3-probe1\TTL_1\event_timestamps.npy" ) channel_states = np.load( os.path.join(probeTTLDir, 'channel_states.npy'))[:good_channel_states.size] event_times = np.load(os.path.join( probeTTLDir, 'event_timestamps.npy'))[:good_event_times.size] else: channel_states = np.load( os.path.join(probeTTLDir, 'channel_states.npy')) event_times = np.load(os.path.join(probeTTLDir, 'event_timestamps.npy')) beRising = event_times[channel_states > 0] / 30000. beFalling = event_times[channel_states < 0] / 30000. be_t, be = ecephys.extract_barcodes_from_times(beRising, beFalling) if '03212019' in dataDir: be_t = be_t[5:] be = be[5:] #Compute time shift between ephys and sync shift, p_sampleRate, m_endpoints = ecephys.get_probe_time_offset( bs_t, bs, be_t, be, 0, 30000) if '03212019' in dataDir: shift = -3.6950408520530686 #be_t_shifted = (be_t/(p_sampleRate/30000)) - shift #just to check that the shift and scale are right #Get unit spike times units = load_spike_info(probeSpikeDir, p_sampleRate, shift) return units
'Neuropix-PXI-' + pxiDict[probeLabel]), 'TTL_1') # get barcodes from sync file bRising, bFalling = get_sync_line_data(syncDataset, 'barcode') bs_t, bs = ecephys.extract_barcodes_from_times(bRising, bFalling) # get barcodes from ephys data channel_states = np.load(os.path.join(probeEventsDir, 'channel_states.npy')) event_times = np.load(os.path.join(probeEventsDir, 'event_timestamps.npy')) beRising = event_times[channel_states > 0] / 30000. beFalling = event_times[channel_states < 0] / 30000. be_t, be = ecephys.extract_barcodes_from_times(beRising, beFalling) # compute time shift between ephys and sync probeShift, probeSampleRate, m_endpoints = ecephys.get_probe_time_offset( bs_t, bs, be_t, be, 0, 30000) # unit data kilosortData = { key: np.load(os.path.join(probeSpikeDataDir, key + '.npy')) for key in ('spike_clusters', 'spike_times', 'templates', 'spike_templates', 'channel_positions', 'amplitudes') } clusterIDs = pd.read_csv(os.path.join(probeSpikeDataDir, 'cluster_KSLabel.tsv'), sep='\t') unitIDs = np.unique(kilosortData['spike_clusters']) unitData = {}