def reduce_dims(X=None,
                y=None,
                data_path=None,
                algorithm='pca',
                perplexity=50,
                labels=['M', 'R', 'HC', 'V', 'PO']):
    """visualize the data in a latent space with reduced dimensions"""

    if data_path is not None:
        X, y = load_tma_data(data_path)

    X = X.reshape(X.shape[0], X.shape[1] * X.shape[2])

    print("Number of Training Samples : ", X.shape[0])

    # standardize the data
    sc = StandardScaler()
    X = sc.fit_transform(X)

    # reduce the dimensionality of the data
    if algorithm == 'pca':
        pca = PCA(n_components=2)
        X_reduced = pca.fit_transform(X)

    if algorithm == 'tsne':
        tsne = TSNE(n_components=2,
                    perplexity=perplexity,
                    random_state=0,
                    verbose=True)
        X_reduced = tsne.fit_transform(X)

    # plot the latent space
    plot_latent_space(X_reduced, y, labels)
    X_test = np.vstack((X_test, X1i[21 * 7:]))
    X_test = np.vstack((X_test, X2i[21 * 7:]))
    y_train = np.concatenate((y_train, i * np.ones((21 * 14, ))))
    y_test = np.concatenate((y_test, i * np.ones((21 * 6, ))))

X1, y1 = X_train, y_train
X2, y2 = X_test, y_test

if visualizeLatentSpace:
    X = np.vstack((X1, X2))
    y = np.hstack((y1, y2))
    print(X.shape)
    print(y.shape)
    Xr = TSNE(n_components=2,
              perplexity=50).fit_transform(X.reshape(X.shape[0], 8 * 8))
    plot_latent_space(Xr, y, labels=class_labels)

## shuffle
X1, y1 = shuffle(X1, y1, random_state=0)
X2, y2 = shuffle(X2, y2, random_state=0)

## create graph dataset
Xg1 = createGraphDataset(X1, y1)
Xg2 = createGraphDataset(X2, y2)

## define model
model = Net()

## define the optimizer
optimizer = torch.optim.Adam(model.parameters(), lr=lr)
예제 #3
0
            
            GL = DiffusionGraphLearn(
                    X,
                    p=p, 
                    beta_1=beta_1,
                    beta_2=beta_2,
                    verbosity=True
            )
            W = GL.findGraphLaplacian()
            W /= np.max(W)
            # W[W<0.4] = 0
            Ws = np.vstack((Ws, np.expand_dims(W, 0)))
            y.append(i)

    Xr = PCA(n_components=2).fit_transform(Ws.reshape(Ws.shape[0], 8*8))
    plot_latent_space(Xr, y, labels=labels)

if combinedReduceDims:
    df = pd.read_csv(onsets_file, delimiter=',')
    Ws = np.empty((0, 8, 8))
    y = []
    labels = ["M", "R", "HC", "V", "PO"]
    for i in range(len(labels)):
        file1 = os.path.join(data_path, "{}_2.csv".format(labels[i]))
        file2 = os.path.join(data_path, "{}_3.csv".format(labels[i]))
        data1 = read_from_csv(id, file1)
        data1 = preprocess(data1, fs, lpfreq=1)
        data2 = read_from_csv(id, file2)
        data2 = preprocess(data2, fs, lpfreq=1)
        onsets1 = df[['{}_2'.format(labels[i])]].values.squeeze()
        onsets2 = df[['{}_3'.format(labels[i])]].values.squeeze()