def filter(self): """ Chops session into chunks orresponding to events :return: timeSeriesX object with chopped session """ chop_on_start_offsets_flag = bool(len(self.start_offsets)) if chop_on_start_offsets_flag: start_offsets = self.start_offsets chopping_axis_name = 'start_offsets' chopping_axis_data = start_offsets else: evs = self.events[self.events.eegfile == self.session_data.attrs['dataroot']] start_offsets = evs.eegoffset chopping_axis_name = 'events' chopping_axis_data = evs # samplerate = self.session_data.attrs['samplerate'] samplerate = float(self.session_data['samplerate']) offset_time_array = self.session_data['offsets'] event_chunk_size, start_point_shift = self.get_event_chunk_size_and_start_point_shift( eegoffset=start_offsets[0], samplerate=samplerate, offset_time_array=offset_time_array) event_time_axis = np.arange(event_chunk_size)*(1.0/samplerate)+(self.start_time-self.buffer_time) data_list = [] for i, eegoffset in enumerate(start_offsets): start_chop_pos = np.where(offset_time_array >= eegoffset)[0][0] start_chop_pos += start_point_shift selector_array = np.arange(start=start_chop_pos, stop=start_chop_pos + event_chunk_size) chopped_data_array = self.session_data.isel(time=selector_array) chopped_data_array['time'] = event_time_axis chopped_data_array['start_offsets'] = [i] data_list.append(chopped_data_array) ev_concat_data = xr.concat(data_list, dim='start_offsets') ev_concat_data = ev_concat_data.rename({'start_offsets':chopping_axis_name}) ev_concat_data[chopping_axis_name] = chopping_axis_data # ev_concat_data.attrs['samplerate'] = samplerate ev_concat_data['samplerate'] = samplerate ev_concat_data.attrs['start_time'] = self.start_time ev_concat_data.attrs['end_time'] = self.end_time ev_concat_data.attrs['buffer_time'] = self.buffer_time return TimeSeriesX(ev_concat_data)
def compute_features_recalls_normalization_params(session_list, use_session_chopper_for_wavelets=False): z_score_params_dict = OrderedDict() sessions_mask = np.zeros(base_events.shape[0], dtype=np.bool) for session in session_list: sessions_mask = sessions_mask | (base_events.session == session) sessions_evs = base_events[sessions_mask] if use_session_chopper_for_wavelets: pow_wavelet_list = [] for session in session_list: session_mask = sessions_evs.session == session single_session_evs = sessions_evs[session_mask] pow_wavelet = compute_event_wavelets_from_session_wavelets(single_session_evs) pow_wavelet_list.append(pow_wavelet) pow_wavelet = xr.concat(pow_wavelet_list,dim='events') else: pow_wavelet = compute_wavelets(sessions_evs) # pow_wavelet = pow_wavelet.remove_buffer(duration=1.0) # -------------- TAKING LOG10 np.log10(pow_wavelet.data, out=pow_wavelet.data); features_list = [] recalls_list = [] for session in session_list: session_mask = sessions_evs.session == session single_session_evs = sessions_evs[session_mask] log_session_wavelet = pow_wavelet[:, :, session_mask, :] # mean, std = compute_zscoring_params(log_pow_wavelet=log_session_wavelet) # # session_zscore_mean_powers has two axes - 1 ->events , 2->bp x freq # session_zscore_mean_powers = compute_features_using_zscoring_params(log_session_wavelet, mean, std) session_zscore_mean_powers, mean, std = compute_zscored_features(log_pow_wavelet=log_session_wavelet) recalls_list.append(single_session_evs.recalled.astype(np.int)) features_list.append(session_zscore_mean_powers) z_score_params_dict[session] = ZScoreParams(mean=mean, std=std) # packaging int namedtuple for saving features = np.concatenate(features_list, axis=0) recalls = np.concatenate(recalls_list, axis=0) return features, recalls, z_score_params_dict
def read_events_data(self): ''' Reads eeg data for individual event :return: TimeSeriesX object (channels x events x time) with data for individual events ''' self.event_ok_mask_sorted = None # reset self.event_ok_mask_sorted evs = self.events raw_readers, original_dataroots = self.__create_base_raw_readers() # used for restoring original order of the events ordered_indices = np.arange(len(evs)) event_indices_list = [] events = [] ts_array_list = [] event_ok_mask_list = [] for s, (raw_reader, dataroot) in enumerate(zip(raw_readers, original_dataroots)): ts_array, read_ok_mask = raw_reader.read() event_ok_mask_list.append(np.all(read_ok_mask,axis=0)) ind = np.atleast_1d(evs.eegfile == dataroot) event_indices_list.append(ordered_indices[ind]) events.append(evs[ind]) ts_array_list.append(ts_array) event_indices_array = np.hstack(event_indices_list) event_indices_restore_sort_order_array = event_indices_array.argsort() start_extend_time = time.time() # new code eventdata = xr.concat(ts_array_list, dim='start_offsets') # tdim = np.linspace(self.start_time-self.buffer_time,self.end_time+self.buffer_time,num=eventdata['offsets'].shape[0]) # samplerate=eventdata.attrs['samplerate'].data samplerate = float(eventdata['samplerate']) tdim = np.arange(eventdata.shape[-1]) * (1.0 / samplerate) + (self.start_time - self.buffer_time) cdim = eventdata['channels'] edim = np.concatenate(events).view(np.recarray).copy() attrs = eventdata.attrs.copy() # constructing TimeSeries Object # eventdata = TimeSeriesX(eventdata.data,dims=['channels','events','time'],coords=[cdim,edim,tdim]) eventdata = TimeSeriesX(eventdata.data, dims=['channels', 'events', 'time'], coords={'channels': cdim, 'events': edim, 'time': tdim, 'samplerate': samplerate } ) eventdata.attrs = attrs # restoring original order of the events eventdata = eventdata[:, event_indices_restore_sort_order_array, :] event_ok_mask = np.hstack(event_ok_mask_list) event_ok_mask_sorted = event_ok_mask[event_indices_restore_sort_order_array] #removing bad events if np.any(~event_ok_mask_sorted): self.removed_corrupt_events=True self.event_ok_mask_sorted = event_ok_mask_sorted eventdata = eventdata[:, event_ok_mask_sorted, :] return eventdata
def read_events_data(self): ''' Reads eeg data for individual event :return: TimeSeriesX object (channels x events x time) with data for individual events ''' self.event_ok_mask_sorted = None # reset self.event_ok_mask_sorted evs = self.events raw_readers, original_dataroots = self.__create_base_raw_readers() # used for restoring original order of the events ordered_indices = np.arange(len(evs)) event_indices_list = [] events = [] ts_array_list = [] event_ok_mask_list = [] for s, (raw_reader, dataroot) in enumerate(zip(raw_readers, original_dataroots)): ts_array, read_ok_mask = raw_reader.read() event_ok_mask_list.append(np.all(read_ok_mask, axis=0)) ind = np.atleast_1d(evs.eegfile == dataroot) event_indices_list.append(ordered_indices[ind]) events.append(evs[ind]) ts_array_list.append(ts_array) event_indices_array = np.hstack(event_indices_list) event_indices_restore_sort_order_array = event_indices_array.argsort() start_extend_time = time.time() # new code eventdata = xr.concat(ts_array_list, dim='start_offsets') # tdim = np.linspace(self.start_time-self.buffer_time,self.end_time+self.buffer_time,num=eventdata['offsets'].shape[0]) # samplerate=eventdata.attrs['samplerate'].data samplerate = float(eventdata['samplerate']) tdim = np.arange(eventdata.shape[-1]) * (1.0 / samplerate) + ( self.start_time - self.buffer_time) cdim = eventdata['channels'] edim = np.concatenate(events).view(np.recarray).copy() attrs = eventdata.attrs.copy() # constructing TimeSeries Object # eventdata = TimeSeriesX(eventdata.data,dims=['channels','events','time'],coords=[cdim,edim,tdim]) eventdata = TimeSeriesX(eventdata.data, dims=['channels', 'events', 'time'], coords={ 'channels': cdim, 'events': edim, 'time': tdim, 'samplerate': samplerate }) eventdata.attrs = attrs # restoring original order of the events eventdata = eventdata[:, event_indices_restore_sort_order_array, :] event_ok_mask = np.hstack(event_ok_mask_list) event_ok_mask_sorted = event_ok_mask[ event_indices_restore_sort_order_array] #removing bad events if np.any(~event_ok_mask_sorted): self.removed_corrupt_events = True self.event_ok_mask_sorted = event_ok_mask_sorted eventdata = eventdata[:, event_ok_mask_sorted, :] return eventdata