def main(_): """Creates the toy dataset main.""" # Prepares dataset width, height = toy_helper.create_dataset() # Loads dataset x, y, concept = toy_helper.load_xyconcept() x_train = x[:n0, :] x_val = x[n0:, :] y_train = y[:n0, :] y_val = y[n0:, :] # Loads model _, _, feature_dense_model = toy_helper.load_model( x_train, y_train, x_val, y_val, pretrain=False) toy_helper.create_feature(x, width, height, feature_dense_model) # Runs after create_feature toy_helper.create_cluster(concept)
def main(_): n_concept = 5 n_cluster = 5 n = 60000 n0 = int(n * 0.8) pretrain = True # Loads data. x, y, concept = toy_helper.load_xyconcept(n, pretrain) if not pretrain: x_train = x[:n0, :] x_val = x[n0:, :] y_train = y[:n0, :] y_val = y[n0:, :] all_feature_dense = np.load('all_feature_dense.npy') f_train = all_feature_dense[:n0, :] f_val = all_feature_dense[n0:, :] # Loads model. if not pretrain: dense2, predict, _ = toy_helper.load_model(x_train, y_train, x_val, y_val, pretrain=pretrain) else: dense2, predict, _ = toy_helper.load_model(_, _, _, _, pretrain=pretrain) # Loads concepts. concept_arraynew = np.load('concept_arraynew.npy') concept_arraynew2 = np.load('concept_arraynew2.npy') for n_concept in range(1, 10): print(n_concept) # Discovers concept with true cluster. finetuned_model_pr = ipca.ipca_model(concept_arraynew2, dense2, predict, f_train, y_train, f_val, y_val, n_concept, comp1=True) num_epoch = 5 for _ in range(num_epoch): finetuned_model_pr.fit(f_train, y_train, batch_size=50, epochs=10, verbose=True, validation_data=(f_val, y_val)) # Evaluates groupacc and get concept_matrix. _, _ = ipca.get_groupacc(finetuned_model_pr, concept_arraynew2, f_train, f_val, concept, n_concept, n_cluster, n0, verbose=False) # Discovers concepts with self-discovered clusters. finetuned_model_pr = ipca.ipca_model(concept_arraynew, dense2, predict, f_train, y_train, f_val, y_val, n_concept, comp1=True) num_epoch = 5 for _ in range(num_epoch): finetuned_model_pr.fit(f_train, y_train, batch_size=50, epochs=10, verbose=True, validation_data=(f_val, y_val)) _, _ = ipca.get_groupacc(finetuned_model_pr, concept_arraynew, f_train, f_val, concept, n_concept, n_cluster, n0, verbose=False) for n_concept in range(1, 10): print(n_concept) concept_matrix_ace = toy_helper.get_ace_concept( concept_arraynew, dense2, predict, f_val, n_concept) finetuned_model_pr_ace = ipca.ipca_model(concept_arraynew, dense2, predict, f_train, y_train, f_val, y_val, n_concept, verbose=True, epochs=0, metric='accuracy') finetuned_model_pr_ace.layers[-5].set_weights([concept_matrix_ace]) print(finetuned_model_pr_ace.evaluate(f_val, y_val)) _, _ = ipca.get_groupacc(finetuned_model_pr_ace, concept_arraynew, f_train, f_val, concept, n_concept, n_cluster, n0, verbose=False) concept_matrix_ace2 = toy_helper.get_ace_concept( concept_arraynew2, dense2, predict, f_val, n_concept) finetuned_model_pr_ace2 = ipca.ipca_model(concept_arraynew2, dense2, predict, f_train, y_train, f_val, y_val, n_concept, verbose=True, epochs=0, metric='accuracy') finetuned_model_pr_ace2.layers[-5].set_weights([concept_matrix_ace2]) print(finetuned_model_pr_ace2.evaluate(f_val, y_val)) _, _ = ipca.get_groupacc(finetuned_model_pr_ace2, concept_arraynew2, f_train, f_val, concept, n_concept, n_cluster, n0, verbose=False) concept_matrix_pca = toy_helper.get_pca_concept(f_train, n_concept) finetuned_model_pr_pca = ipca.ipca_model(concept_arraynew, dense2, predict, f_train, y_train, f_val, y_val, n_concept, verbose=True, epochs=0, metric='accuracy') finetuned_model_pr_pca.layers[-5].set_weights([concept_matrix_pca]) print(finetuned_model_pr_pca.evaluate(f_val, y_val)) _, _ = ipca.get_groupacc(finetuned_model_pr_pca, concept_arraynew, f_train, f_val, concept, n_concept, n_cluster, n0, verbose=False)
def main(_): n_concept = 5 n_cluster = 5 n = 60000 n0 = int(n * 0.8) pretrain = True # Loads data. x, y, concept = toy_helper.load_xyconcept(n, pretrain) if not pretrain: x_train = x[:n0, :] x_val = x[n0:, :] y_train = y[:n0, :] y_val = y[n0:, :] all_feature_dense = np.load('all_feature_dense.npy') f_train = all_feature_dense[:n0, :] f_val = all_feature_dense[n0:, :] # Loads model if not pretrain: dense2, predict, _ = toy_helper.load_model( x_train, y_train, x_val, y_val, pretrain=pretrain) else: dense2, predict, _ = toy_helper.load_model(_, _, _, _, pretrain=pretrain) # Loads concept concept_arraynew = np.load('concept_arraynew.npy') concept_arraynew2 = np.load('concept_arraynew2.npy') # Returns discovered concepts with true clusters finetuned_model_pr = ipca.ipca_model(concept_arraynew2, dense2, predict, f_train, y_train, f_val, y_val, n_concept) num_epoch = 5 for _ in range(num_epoch): finetuned_model_pr.fit( f_train, y_train, batch_size=50, epochs=10, verbose=True, validation_data=(f_val, y_val)) # Evaluates groupacc and get concept_matrix concept_matrix, _ = ipca.get_groupacc( finetuned_model_pr, concept_arraynew2, f_train, f_val, concept, n_concept, n_cluster, n0, verbose=False) # Saves concept matrix with open('concept_matrix_sup.pickle', 'wb') as handle: pickle.dump(concept_matrix, handle, protocol=pickle.HIGHEST_PROTOCOL) # Plots nearest neighbors feature_sp1 = np.load('feature_sp1.npy') segment_sp1 = np.load('segment_sp1.npy') feature_sp1_1000 = feature_sp1[:1000] segment_sp1_1000 = segment_sp1[:1000] ipca.plot_nearestneighbor(concept_matrix, feature_sp1_1000, segment_sp1_1000) # Discovered concepts with self-discovered clusters. finetuned_model_pr = ipca.ipca_model(concept_arraynew, dense2, predict, f_train, y_train, f_val, y_val, n_concept) num_epoch = 5 for _ in range(num_epoch): finetuned_model_pr.fit( f_train, y_train, batch_size=50, epochs=10, verbose=True, validation_data=(f_val, y_val)) concept_matrix, _ = toy_helper.get_groupacc( finetuned_model_pr, concept_arraynew, f_train, f_val, concept, n_concept, n_cluster, n0, verbose=False) # Saves concept matrix. with open('concept_matrix_unsup.pickle', 'wb') as handle: pickle.dump(concept_matrix, handle, protocol=pickle.HIGHEST_PROTOCOL) # Plots nearest neighbors. toy_helper.plot_nearestneighbor(concept_matrix, feature_sp1_1000, segment_sp1_1000)