#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
ALM Copyright (C) 2019  Addison Bohannon
"""

import numpy as np
import matplotlib.pyplot as plt
from experiments.utility import load_results
from MulticoreTSNE import MulticoreTSNE as TSNE
import time

SLEEP_CLASS = ['Awake', 'N1', 'N2', 'N3', 'REM']

_, mixing_coef, labels = load_results('S8-mvar.pickle')
mixing_coef = mixing_coef[0]
avg_mixing_coef = np.zeros((5, 10))
class_label = np.zeros(5)
for i, label in enumerate(np.unique(labels)):
    class_label[i] = label
    avg_mixing_coef[i, :] = np.mean(mixing_coef[labels == label], axis=0)
# visualize with different tsne perplexities which roughly equates to the number of neighbors in a cluster.
perplexities = [10, 50, 100, 200, 500, 1000]
for perplexity in perplexities:
    tsne = TSNE(perplexity=perplexity, n_jobs=40)
    print("Running TSNE Fit...")
    mixing_coef_combined = np.concatenate((mixing_coef, avg_mixing_coef),
                                          axis=0)
    labels_combined = np.concatenate((labels, class_label), axis=0)
    start_time = time.time()
    dataCombined = tsne.fit_transform(mixing_coef_combined)
Beispiel #2
0
        Dis_pred = np.zeros([NUM_COMPS, MODEL_ORD, SIG_DIM, SIG_DIM])
        for j in range(NUM_COMPS):
            Dis_pred[j] = unstack_ar_coef(Dis[j])
        d_loss, _, _ = ar_comp_dist(D, Dis_pred)
        loss.append(d_loss)
    palm_error.append(loss)

###################
# save results
###################
# save_results([palm_error, palm_likelihood], 'performance.pickle')

###################
# load results
###################
palm_error, palm_likelihood = load_results('performance.pickle')

fig, axs = plt.subplots(1, 2)
fig.set_size_inches(8.5, 5.5)
axs[0].set_xlabel('Iteration', fontsize=12)
axs[1].set_xlabel('Iteration', fontsize=12)
axs[0].set_ylabel('Negative Log Likelihood', fontsize=12)
axs[1].set_ylabel('Component Error', fontsize=12)
for likelihood, color in zip(palm_likelihood, colors):
    plt_palm0, = axs[0].plot(likelihood, color=color, zorder=4, linewidth=3.0)
for error, color in zip(palm_error, colors):
    plt_palm0, = axs[1].plot(error, color=color, zorder=4, linewidth=3.0)
axs[0].set_facecolor("#f2f3f4")
axs[0].grid(b=True,
            which='major',
            linestyle="-",
    for _ in range(swaps):
        for _ in range(10):
            e1, e2 = nr.randint(k, size=2)
            while e1 == e2:
                e1, e2 = nr.randint(k, size=2)
            a = i[e1]
            b = j[e1]
            c = i[e2]
            d = j[e2]
            if a != c and a != d and b != c and b != d:
                break
        R[a, d], R[a, b] = R[a, b], R[a, d]
        R[c, b], R[c, d] = R[c, d], R[c, b]
    return R

components, _, _ = load_results('S8-mvar.pickle')
components = components[0]
alpha_connectivity = np.zeros([NUM_COMPONENTS, SIGNAL_DIM, SIGNAL_DIM])
beta_connectivity, delta_connectivity, theta_connectivity = np.zeros_like(alpha_connectivity), np.zeros_like(alpha_connectivity), np.zeros_like(alpha_connectivity)
for j, component_j in enumerate(components):
    transfer_function, frequencies = transfer_function_from_filter(unstack_ar_coef(component_j), SAMPLING_RATE, fft_len=FFT_LEN)
    # for channel in range(SIGNAL_DIM):
    #     transfer_function[:, channel, channel] = 0
    transfer_function[:, np.arange(SIGNAL_DIM), np.arange(SIGNAL_DIM)] = 0
    dtf = np.abs(transfer_function) / sl.norm(transfer_function, axis=-1, keepdims=True)
    delta_connectivity[j] = np.sqrt(np.mean(np.array([dtf[k]**2 for k, freq in enumerate(frequencies) if DELTA_MIN <= freq <= DELTA_MAX]), axis=0))
    delta_connectivity[j] = keep_significant_edges(delta_connectivity[j])
    theta_connectivity[j] = np.sqrt(np.mean(np.array([dtf[k]**2 for k, freq in enumerate(frequencies) if THETA_MIN <= freq <= THETA_MAX]), axis=0))
    theta_connectivity[j] = keep_significant_edges(theta_connectivity[j])
    alpha_connectivity[j] = np.sqrt(np.mean(np.array([dtf[k]**2 for k, freq in enumerate(frequencies) if ALPHA_MIN <= freq <= ALPHA_MAX]), axis=0))
    alpha_connectivity[j] = keep_significant_edges(alpha_connectivity[j])
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import StratifiedKFold
from experiments.utility import load_results, save_results

SUBJS = np.arange(1, 11)
INNER_N_SPLITS = 5
OUTER_N_SPLITS = 5
NUM_STARTS = 5

outer_score = np.zeros([len(SUBJS), OUTER_N_SPLITS])
inner_score = np.zeros([NUM_STARTS, INNER_N_SPLITS])

for i, subj in enumerate(SUBJS):
    inner_skcv = StratifiedKFold(n_splits=INNER_N_SPLITS)
    outer_skcv = StratifiedKFold(n_splits=OUTER_N_SPLITS)
    _, subj_coef, subj_labels = load_results('S' + str(subj) + '-mvar.pickle')
    for outer_cv, (outer_train_idx, outer_test_idx) in enumerate(outer_skcv.split(np.zeros_like(subj_labels), subj_labels)):
        outer_train_labels, outer_test_labels = subj_labels[outer_train_idx], subj_labels[outer_test_idx]
        for inner_cv, (inner_train_idx, inner_test_idx) in enumerate(inner_skcv.split(np.zeros_like(outer_train_labels), outer_train_labels)):
            inner_train_labels, inner_test_labels = outer_train_labels[inner_train_idx], outer_train_labels[inner_test_idx]
            for start, subj_coef_per_start in enumerate(subj_coef):
                inner_sklr = LogisticRegression(multi_class='multinomial', solver='saga', class_weight='balanced')
                inner_sklr.fit(subj_coef_per_start[outer_train_idx[inner_train_idx]], inner_train_labels)
                inner_score[start, inner_cv] = inner_sklr.score(subj_coef_per_start[outer_train_idx[inner_test_idx]], inner_test_labels)
        best_start = np.argmax(np.mean(inner_score, axis=1))
        outer_sklr = LogisticRegression(multi_class='multinomial', solver='saga', class_weight='balanced')
        outer_sklr.fit(subj_coef[best_start][outer_train_idx], outer_train_labels)
        outer_score[i, outer_cv] = outer_sklr.score(subj_coef[best_start][outer_test_idx], outer_test_labels)
score = np.mean(outer_score, axis=1)
    
###################
Beispiel #5
0
        for i, num_comps_fit in enumerate(NUM_COMPONENTS):
            print('Generative number of components: ' + str(num_comps_gen) + ', Fitted number of components: '
                  + str(num_comps_fit))
            alm_model = Alm(solver='palm', tol=1e-3)
            _, _, nll_num_comps[sample, i, j], _ = alm_model.fit(data, MODEL_ORDER, num_comps_fit, PENALTY_PARAM,
                                                                 num_starts=NUM_STARTS)

###################
# save results
###################
# save_results([nll_model_ord, nll_num_comps], 'model_misspec.pickle')

###################
# load results
###################
nll_model_ord, nll_num_comps = load_results('model_misspec.pickle')

fig, axs = plt.subplots(1, 2)
plt.subplots_adjust(wspace=0.55)
image = []
axs[0].set_xlabel('Gen. model order', fontsize=12)
axs[0].set_xticks(np.arange(len(MODEL_ORDER) + 1))
axs[0].set_xticklabels(MODEL_ORDER, fontsize=12)
axs[0].set_ylabel('Fitted model order', fontsize=12)
axs[0].set_yticks(np.arange(len(MODEL_ORDER) + 1))
axs[0].set_yticklabels(MODEL_ORDER, fontsize=12)
image.append(axs[0].imshow(np.mean(nll_model_ord, axis=0), origin='lower', cmap=plt.cm.Blues))
fig.colorbar(image[-1], ax=axs[0], fraction=0.046, pad=0.04)

axs[1].set_xlabel('Gen. num. of comps.', fontsize=12)
axs[1].set_xticks(np.arange(len(NUM_COMPONENTS) + 1))
Beispiel #6
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
ALM Copyright (C) 2019  Addison Bohannon
"""

import numpy as np
import matplotlib.pyplot as plt
from experiments.utility import load_results
from MulticoreTSNE import MulticoreTSNE as TSNE
import time

SLEEP_CLASS = ['Awake', 'N1', 'N2', 'N3', 'REM']

ar_coef, labels = load_results('S8-var.pickle')
ar_coef = np.reshape(ar_coef, [len(ar_coef), -1])
num_obs, num_features = ar_coef.shape
avg_ar_coef = np.zeros((len(SLEEP_CLASS), num_features))
class_label = np.zeros(len(SLEEP_CLASS))
for i, label in enumerate(np.unique(labels)):
    class_label[i] = label
    avg_ar_coef[i, :] = np.mean(ar_coef[labels == label], axis=0)
# visualize with different tsne perplexities which roughly equates to the number of neighbors in a cluster.
perplexities = [10, 50, 100, 200, 500, 1000]
for perplexity in perplexities:
    tsne = TSNE(perplexity=perplexity, n_jobs=40)
    print("Running TSNE Fit...")
    ar_coef_combined = np.concatenate((ar_coef, avg_ar_coef), axis=0)
    labels_combined = np.concatenate((labels, class_label), axis=0)
    start_time = time.time()
    dataCombined = tsne.fit_transform(ar_coef_combined)
Beispiel #7
0
    for i, n_i in enumerate(NUM_OBS):
        for j, m_i in enumerate(OBS_LEN):
            _, XtY, XtX = package_observations(x[:n_i, :m_i, :], MODEL_ORDER)
            D_ls = np.array([sl.solve(XtX_i, XtY_i, assume_a='pos') for XtX_i, XtY_i in zip(XtX, XtY)])
#            nll[iteration, :, i, j] = np.array(L_palm)
            error[iteration, i, j] = np.mean(sl.norm(D[:n_i] - D_ls, ord='fro', axis=(1, 2)))

###################
# save results
###################
#save_results(error, 'n_vs_m-var.pickle')

###################
# load results
###################
_, error = load_results('n_vs_m.pickle')
error /= NUM_COMPONENTS
# error /= np.sqrt(NUM_COMPONENTS)
error_var = load_results('n_vs_m-var.pickle')

vmax = np.maximum(np.max(error), np.max(error_var))

fig, axs = plt.subplots(1, 2)
plt.subplots_adjust(wspace=0.55)
images = []
for i in range(2):
    axs[i].set_xlabel('Len. of real.', fontsize=12)
    axs[i].set_xticks(np.arange(len(OBS_LEN)+1))
    axs[i].set_xticklabels(OBS_LEN, fontsize=8)
    axs[i].set_ylabel('Num. of real.', fontsize=12)
    axs[i].set_yticks(np.arange(len(NUM_OBS)+1))
Beispiel #8
0
            error_palm = []
            for D_k in D_palm:
                D_pred = [unstack_ar_coef(Dj) for Dj in D_k]
                d_loss, _, _ = ar_comp_dist(D, D_pred)
                error_palm.append(d_loss)
            error[iteration, :, i, j] = np.array(error_palm)

###################
# save results
###################
# save_results([nll, error], 'n_vs_m.pickle')

###################
# load results
###################
nll, error = load_results('n_vs_m.pickle')

error /= NUM_COMPONENTS

fig, axs = plt.subplots(1, 2)
plt.subplots_adjust(wspace=0.55)
images = []
for i in range(2):
    axs[i].set_xlabel('Len. of real.', fontsize=12)
    axs[i].set_xticks(np.arange(len(OBS_LEN) + 1))
    axs[i].set_xticklabels(OBS_LEN, fontsize=8)
    axs[i].set_ylabel('Num. of real.', fontsize=12)
    axs[i].set_yticks(np.arange(len(NUM_OBS) + 1))
    axs[i].set_yticklabels(NUM_OBS, fontsize=12)
# axs[0].set_title('Avg. min. error', fontsize=12, pad=12)
# axs[1].set_title('Avg. std. error', fontsize=12, pad=12)