def features_one_file(f): file_name = os.path.basename(f).split(".")[0] treatment, animal = file_name.split("_") pol = polygram_from_pkl(f) #eegs = decompose_signal(pol["EEG_parietal_cereb"], levels=[1,2,3,4,5,6]) eegs = decompose_signal(pol["EEG_parietal_frontal"], levels=[1,2,3,4,5,6]) emgs = decompose_signal(pol["EMG_REF"],[1,2,3, 4], keep_a=False) # pol2 = eegs.merge(pol["EEG_parietal_frontal"]) pol2 = pol2.merge(emgs) pol2 = pol2.merge(pol["EMG_REF"]) pol2 = pol2.merge(pol["vigilance_state"]) ##normalise pol2 = pol2.map_signal_channels(lambda c : (c - np.mean(c))/ np.std(c)) feature_factory = [ PowerFeatures(), HjorthFeatures(), # NonLinearFeatures(), # # # FIXME skip for now -> speed EntropyFeatures(), FractalFeatures(), VigilState(),] all_rows = [] print "processing " + f old_p = 0 for t, w in pol2.iter_window(WINDOW_SIZE, WINDOW_LAG): dfs = [] for c in w.channels: for ff in feature_factory: feature_vec = ff.make_vector(c) if not feature_vec is None: dfs.append(feature_vec) p = int(100 * t/ pol2.duration.total_seconds()) if p != old_p: print f, p, "%" old_p = p row = pd.concat(dfs, axis=1) row.index = [t] all_rows.append(row) tmp_df = pd.concat(all_rows) tmp_df["animal"] = animal tmp_df["treatment"] = treatment return tmp_df
def features_one_file(f): file_name = os.path.basename(f).split(".")[0] treatment, animal = file_name.split("_") pol = polygram_from_pkl(f) #eegs = decompose_signal(pol["EEG_parietal_cereb"], levels=[1,2,3,4,5,6]) eegs = decompose_signal(pol["EEG_parietal_frontal"], levels=[1, 2, 3, 4, 5, 6]) emgs = decompose_signal(pol["EMG_REF"], [1, 2, 3, 4], keep_a=False) # pol2 = eegs.merge(pol["EEG_parietal_frontal"]) pol2 = pol2.merge(emgs) pol2 = pol2.merge(pol["EMG_REF"]) pol2 = pol2.merge(pol["vigilance_state"]) ##normalise pol2 = pol2.map_signal_channels(lambda c: (c - np.mean(c)) / np.std(c)) feature_factory = [ PowerFeatures(), HjorthFeatures(), # NonLinearFeatures(), # # # FIXME skip for now -> speed EntropyFeatures(), FractalFeatures(), VigilState(), ] all_rows = [] print "processing " + f old_p = 0 for t, w in pol2.iter_window(WINDOW_SIZE, WINDOW_LAG): dfs = [] for c in w.channels: for ff in feature_factory: feature_vec = ff.make_vector(c) if not feature_vec is None: dfs.append(feature_vec) p = int(100 * t / pol2.duration.total_seconds()) if p != old_p: print f, p, "%" old_p = p row = pd.concat(dfs, axis=1) row.index = [t] all_rows.append(row) tmp_df = pd.concat(all_rows) tmp_df["animal"] = animal tmp_df["treatment"] = treatment return tmp_df
def data_for_one_file(file, channel_name, dfs): pol = polygram_from_pkl(file) for t, w in pol.iter_window(WINDOW_SIZE, WINDOW_LAG): eeg = w[channel_name] ann = w["vigilance_state"] if ann.probas.all() > 0: y = ann.values[0] periodo = make_periodogram(eeg) try: dfs[y].append(periodo) except KeyError: dfs[y] = [periodo] return dfs
def data_for_one_file(file, channel_name, dfs): pol = polygram_from_pkl(file) for t, w in pol.iter_window(WINDOW_SIZE, WINDOW_LAG): eeg = w[channel_name] ann = w["vigilance_state"] if ann.probas.all() >0: y = ann.values[0] periodo = make_periodogram(eeg) try: dfs[y].append(periodo) except KeyError: dfs[y]=[periodo] return dfs
from pyrem.time_series import Signal from pyrem.polygram import Polygram import pylab as pl import pandas as pd import numpy as np # DATA_FILE_PATTERN= #df = pd.read_csv("/tmp/telc4_res.csv") #an = Annotation(df["pred"], 0.2, df["conf_preds"], name="prediction") #pol1 = Polygram([an]) pol = polygram_from_pkl("/data/pyrem/Ellys/pkls/TelC_4.pkl") #pol = pol1.merge(pol) z = decompose_signal(pol["EEG_parietal_frontal"], [1,2,3,4,5,6]) z = z.merge(pol["EEG_parietal_frontal"]) z["16h45m":"16h55m"].show() dwtp = decompose_signal(pol["EEG_parietal_frontal"], [6], keep_a=False) t = dwtp[0] ** 2 N=25 tt = np.log10(np.convolve(t, np.ones((N,))/N,"same")) t = Signal(tt,t.fs,name="Power in cD_6") print t.duration