Пример #1
0
def load_data_fourth_record():
    i, p = fourth_record()  #i, interictal;p, preictal

    #arbitrarily assign 1,0 to preictal, 0,1 to interictal
    pre = np.array((1, 0)).reshape(1, 2)
    inter = np.array((0, 1)).reshape(1, 2)
    #we have 630 interictal, 431 preictal
    prem = np.repeat(pre, len(p), axis=0)
    interm = np.repeat(inter, len(i), axis=0)

    labels = np.concatenate((prem, interm), axis=0)
    data = np.concatenate((p, i), axis=0)

    idx = np.random.permutation(data.shape[0])
    x, y = data[idx, :22, :], labels[idx, :]

    # x = util.normalize(x)
    x = np.abs(np.fft.fftn(x, axes=(1, 2)))
    x = util.normalize(x)

    x_training = x[:len(x) // 2, :, :]
    y_training = y[:len(y) // 2, :]

    x_val = x[len(x) // 2:len(x) // 2 + len(x) // 4, :, :]
    y_val = y[len(y) // 2:len(y) // 2 + len(y) // 4, :]

    x_test = x[len(x) // 2 + len(x) // 4:len(x), :, :]
    y_test = y[len(y) // 2 + len(y) // 4:len(y), :]

    return x_training, y_training, x_val, y_val, x_test, y_test
Пример #2
0
def pca_test():
    d = [5, 10, 15, 20, 30, 40, 50, 75, 100]
    acc = np.zeros((9))
    sen = np.zeros((9))
    spe = np.zeros((9))

    for i, di in enumerate(d):
        print(f"Given d = {di}:")
    
        tr = util.normalize(xt_f, axs = 1)
        te = util.normalize(xte_f, axs = 1)
    
    
        tr = PCA.pca(tr, di)
        te = PCA.pca(te, di)
    
        knn.fit_model(tr, yt_f)
        acc[i], spe[i], sen[i] = knn.test(te, yte_f, verbose = True, pp=2)
        print("") # skip one line
Пример #3
0
def p_test():
    pii = [1.5,2, 2.5, 3,3.5]
    acc = np.zeros((6))
    sen = np.zeros((6))
    spe = np.zeros((6))

    for i, pi in enumerate(pii):
        print(f"Given p = {pi}:")
        knn.set_k(1)
        
        tr = util.normalize(xt_f, axs = 1)
        te = util.normalize(xte_f, axs = 1)
    
    
        tr = PCA.pca(tr, 10)
        te = PCA.pca(te, 10)

        knn.fit_model(tr, yt_f)
        acc[i], spe[i], sen[i] = knn.test(te, yte_f, verbose = True, pp = pi)
        print("") # skip one line
Пример #4
0
def k_test():
    ks = [1,3,5,7,9]
    acc = np.zeros((5))
    sen = np.zeros((5))
    spe = np.zeros((5))

    for i, ki in enumerate(ks):
        print(f"Given k = {ki}:")
        knn.set_k(ki)
        
        tr = util.normalize(xt_f, axs = 1)
        te = util.normalize(xte_f, axs = 1)
    
    
        tr = PCA.pca(tr, 10)
        te = PCA.pca(te, 10)

        knn.fit_model(tr, yt_f)
        acc[i], spe[i], sen[i] = knn.test(te, yte_f, verbose = True, pp = 2)
        print("") # skip one line
Пример #5
0
        
        tr = util.normalize(xt_f, axs = 1)
        te = util.normalize(xte_f, axs = 1)
    
    
        tr = PCA.pca(tr, 10)
        te = PCA.pca(te, 10)

        knn.fit_model(tr, yt_f)
        acc[i], spe[i], sen[i] = knn.test(te, yte_f, verbose = True, pp = pi)
        print("") # skip one line



knn.set_k(1)
tr = util.normalize(xt_f, axs = 1)
te = util.normalize(xte_f, axs = 1)
    
tr = PCA.pca(tr, 10)
te = PCA.pca(te, 10)

knn.fit_model(tr, yt_f)
knn.test(te, yte_f, verbose = True, pp = 3)


# import matplotlib.pyplot as plt

# plt.plot(p,acc, label="acc")
# plt.plot(p,spe, label="spe")
# plt.plot(p,sen, label="sen")
# plt.legend(loc = "upper right")
Пример #6
0
          C = 320, #10
          N = 927)

ss.add(svm)

ss.set_model_parameters(learning_rate = 0.00001, #0.001
                        rho = 0.975,
                        optimizer = "SGD+Momentum",
                        loss = "SoftMargin") 



xtf,ytf,xvf,yvf,xtef,ytef = util.extract_linear_features(xt,yt,xv,yv,xte,yte)

#normalize...
xtf = util.normalize(xtf, axs = 1)
xvf = util.normalize(xvf, axs = 1)
xtef = util.normalize(xtef, axs = 1)

xtf = util.add_bias_term(xtf)
xvf = util.add_bias_term(xvf)
xtef = util.add_bias_term(xtef)

ss.fit_model(xtf, ytf,     #for training
            xvf, yvf, #for validation
            epoch = 1000,
            verbose = True,
            perEpochVerbose = 100,
            weightSavePerEpoch = 201)  

# ss.load_weights(9801)