labels_dict, window_sz, pca=False, noise_percentage=0.2, noise=False) datasets = LoadData.load_data_multi_samples(dataset_matrix_r, label_vector_r, n_components, n_classes, samples) train_set_x, train_set_y = datasets[0][0], datasets[0][1] test_set_x, test_set_y = datasets[1][0], datasets[1][1] train_set_x, scaler = PCA.scale_fit(train_set_x) test_set_x = PCA.scale_transform(test_set_x, scaler) batches = LoadData.batch_creation(train_set_x, train_set_y, batch_size=65) del datasets, dataset_matrix, label_vector, dataset_matrix_r, label_vector_r, dataset_mat, dataset_dict, labels_mat, labels_dict x_0 = set_tensor_matricization(train_set_x, mode=0) x_1 = set_tensor_matricization(train_set_x, mode=1) x_2 = set_tensor_matricization(train_set_x, mode=2) W_0 = torch.empty(hidden, rank, window_sz) # Initialize with xavier W_1 = torch.empty(hidden, rank, window_sz) W_2 = torch.empty(hidden, rank, n_components) V = torch.empty(n_classes, hidden) b = torch.zeros(n_classes) nn.init.xavier_uniform_(W_0) nn.init.xavier_uniform_(W_1)