def test_aligned_update_array_error(aligned_iris, aligned_iris_relations): data, target = aligned_iris n_neighbors = [15, 15, 15, 15, 15] small_aligned_model = AlignedUMAP(n_neighbors=n_neighbors[:3]) small_aligned_model.fit(data[:3], relations=aligned_iris_relations[:2]) with pytest.raises(ValueError): small_aligned_model.update(data[3:], relations=aligned_iris_relations[2:], n_neighbors=n_neighbors[3:])
def test_aligned_update(aligned_iris, aligned_iris_relations): data, target = aligned_iris small_aligned_model = AlignedUMAP() small_aligned_model.fit(data[:3], relations=aligned_iris_relations[:2]) small_aligned_model.update(data[3], relations=aligned_iris_relations[2]) for i, slice in enumerate(data[:4]): data_dmat = pairwise_distances(slice) true_nn = np.argsort(data_dmat, axis=1)[:, :10] embd_dmat = pairwise_distances(small_aligned_model.embeddings_[i]) embd_nn = np.argsort(embd_dmat, axis=1)[:, :10] assert nn_accuracy(true_nn, embd_nn) >= 0.65
def aligned_iris_model(aligned_iris, aligned_iris_relations): data, target = aligned_iris model = AlignedUMAP() model.fit(data, relations=aligned_iris_relations) return model
# ################################ EXTRACT EMBEDDINGS ######################### to_embed_weights = [] for layer in range(3): stacked_w = np.vstack([ total_weights[optimizer][layer] for optimizer in range(len(target_optimizers)) ]) stacked_w = StandardScaler().fit_transform(stacked_w) to_embed_weights.append(stacked_w) rela = [{key: key for key in range(to_embed_weights[0].shape[0])} for i in range(2)] mapper = AlignedUMAP(n_components=2, metric='manhattan', n_neighbors=30).fit(to_embed_weights, relations=rela) emb_space_sizes = [] for emb in mapper.embeddings_: emb_space_sizes.append([ np.append(emb.min(0), np.array(total_train_losses).flatten().min()), np.append(emb.max(0), np.array(total_train_losses).flatten().max()) ]) # ################################ SAVE ANIMATIONS ############################ for index, opt_name in enumerate(target_optimizers.keys()):