def DTW_features(pcg, transitions, interval='RR', constraint='sakoe_chiba', k=0.1, norm="resample", pre=None, downsample_rate=2, suf=None, sigma=None): k = float(k) if sigma is not None: sigma = float(sigma) pcg = dtw_preprocess(pcg, pre=pre) resize = ALENGTH[interval] if norm == 'resample' else None intervals = get_intervals(pcg, transitions, interval=interval, resize=resize) intervals = [resample(i, len(i) // downsample_rate) for i in intervals] if norm not in ['path', 'resample']: raise ValueError("Invalid normalization {0}".format(norm)) path_norm = norm == 'path' dist_matrix = dtw_distances(intervals, n_jobs=-1, constraint=constraint, k=k, path_norm=path_norm, normalize=True) dist_matrix = finite_matrix(dist_matrix) medoid_index = np.argmin(np.sum(dist_matrix, axis=0)) # Remove the infinite distances if sigma: dist_matrix = -affinity(dist_matrix, sigma) medoid_distances = dist_matrix[:, medoid_index] medoid_distances = medoid_distances[np.isfinite(medoid_distances)] m_MDTW, s_MDTW = _mean_std(medoid_distances) Q1_MDTW, Q2_MDTW, Q3_MDTW = np.percentile(medoid_distances, [25, 50, 75]) contiguous_distances = np.array( [dist_matrix[i, i + 1] for i in np.arange(len(dist_matrix) - 1)]) contiguous_distances = contiguous_distances[np.isfinite( contiguous_distances)] Q1_CDTW, Q2_CDTW, Q3_CDTW = np.percentile(contiguous_distances, [25, 50, 75]) features = np.array( [m_MDTW, s_MDTW, Q1_MDTW, Q2_MDTW, Q3_MDTW, Q1_CDTW, Q2_CDTW, Q3_CDTW]) return features
def random_choice(pcgs, states, records, size): t_records = list(np.random.choice(records, size, replace=False)) indexes = [] for r in t_records: pcg = pcgs[r] transitions = get_transitions(states[r]) intervals = get_intervals(pcg, transitions) i = np.random.choice(range(len(intervals))) indexes.append(i) return t_records, indexes
def cinter_DTW_features(pcg, transitions, templates='random_templates', pre=None, constraint='sakoe_chiba', k=0.1, suf=None, sigma=None): k = float(k) if sigma is not None: sigma = float(sigma) templates = custom_loadmat(TEMPLATE_FOLDER + templates) inter_DTW_features.names = [ "%s_d%02d" % (q, i) for q in ['mean', 'std'] for i, _ in enumerate(templates) ] pcg = dtw_preprocess(pcg, pre=pre) distances = [] for interval in ['RR', 'S1', 'Sys', 'S2', 'Dia']: templates_i = templates[interval] intervals = get_intervals(pcg, transitions, interval=interval, resize=ALENGTH[interval] // interDTW_down) intervals = [ resample(i, ALENGTH[interval] // interDTW_down) for i in intervals ][:50] dist_matrix = dtw_distances(intervals, templates_i, n_jobs=-1, constraint=constraint, k=k) dist_matrix = finite_matrix(dist_matrix) if sigma: dist_matrix = -affinity(dist_matrix, sigma) distances.append(dist_matrix) RR_mean, S1_mean, Sys_mean, S2_mean, Dia_mean = [ np.mean(d) for d in distances ] RR_std, S1_std, Sys_std, S2_std, Dia_std = [np.std(d) for d in distances] features = [ RR_mean, S1_mean, Sys_mean, S2_mean, Dia_mean, RR_std, S1_std, Sys_std, S2_std, Dia_std ] return features
def build_templates(pcgs, states, records, indexes): templates = defaultdict(list) for r, i in tqdm(zip(records, indexes), total=len(records)): pcg = pcgs[r] for interval in ['RR', 'S1', 'Sys', 'S2', 'Dia']: transitions = get_transitions(states[r]) inter = get_intervals(pcg, transitions, interval=interval, resize=ALENGTH[interval] // interDTW_down)[i] templates[interval].append(inter) return dict(templates)
def mfcc_coefs(pcg, transitions, interval='RR'): fs = 1000 pcg = _normalize(pcg) intervals = get_intervals(pcg, transitions, interval=interval, resize=ALENGTH[interval]) signal = np.concatenate(intervals) win = ALENGTH[interval] / fs mel = mfcc(signal, samplerate=fs, winlen=win, winstep=win, numcep=13, nfilt=26, nfft=1024) return mel
def inter_DTW_features(pcg, transitions, interval='RR', templates='random_templates.mat', pre=None, constraint='sakoe_chiba', k=0.1, suf=None, sigma=None): k = float(k) if sigma is not None: sigma = float(sigma) templates = custom_loadmat(TEMPLATE_FOLDER + templates)[interval] inter_DTW_features.names = [ "%s_d%02d" % (q, i) for q in ['mean', 'std'] for i, _ in enumerate(templates) ] pcg = dtw_preprocess(pcg, pre=pre) intervals = get_intervals(pcg, transitions, interval=interval, resize=ALENGTH[interval] // interDTW_down) dist_matrix = dtw_distances(intervals, templates, n_jobs=-1, constraint=constraint, k=k) dist_matrix = finite_matrix(dist_matrix) if sigma: dist_matrix = -affinity(dist_matrix, sigma) mean_dtw = np.mean(dist_matrix, axis=0) std_dtw = np.mean(dist_matrix, axis=0) features = np.concatenate([mean_dtw, std_dtw]) return features
def wavelet_features(pcg, transitions, interval='RR', wavelet='rbio3.9', level=3, suf=None): level = int(level) pcg = _normalize(pcg) intervals = get_intervals(pcg, transitions, interval=interval) cAs, cD1s, cD2s, cD3s = [], [], [], [] for x in intervals: x = np.concatenate((x, np.zeros(2**level - len(x) % 2**level))) cA, cD1, cD2, cD3 = pywt.wavedec(x, wavelet, level=level) cAs.append(np.linalg.norm(cA)**2) cD1s.append(np.linalg.norm(cD1)**2) cD2s.append(np.linalg.norm(cD2)**2) cD3s.append(np.linalg.norm(cD3)**2) m_cA, std_cA = _mean_std(cAs) m_cD1, std_cD1 = _mean_std(cD1s) m_cD2, std_cD2 = _mean_std(cD2s) m_cD3, std_cD3 = _mean_std(cD3s) return [m_cA, std_cA, m_cD1, std_cD1, m_cD2, std_cD2, m_cD3, std_cD3]
pre = 'env' fpcgs = {r: dtw_preprocess(pcgs[r], pre=pre) for r in tqdm(records)} if arguments['-o']: filename = arguments['-o'] savemat(filename, fpcgs) print('Saved to {0}'.format(filename)) elif arguments['medoids']: filename = filename.replace('PCG', 'medoids') inters = { r: get_intervals(pcgs[r], get_transitions(states[r]), interval='RR', resize=ALENGTH['RR'] // interDTW_down) for r in tqdm(records) } medoid_indexes = [ dtw_medoid(inters[r], **dtw_params) for r in tqdm(records) ] medoids = {r: inters[r][m] for r, m in zip(records, medoid_indexes)} if arguments['-o']: filename = arguments['-o'] savemat(filename, medoids) print('Saved to {0}'.format(filename))