def eval_model(embedding = None, metamodel = None): model = metamodel if model is None: model = MetaModel(hyperparameters) if embedding is None: model.populate_with_nasnet_metacells() else: model.populate_from_embedding(embedding) model.build_model(dataset.images_shape) model.evaluate(dataset, 1, dir_path) model.save_metadata(dir_path) model.save_model(dir_path) model.generate_graph(dir_path) model.clear_model() tf.keras.backend.clear_session()
def test_model_accuracy_from_embedding(dir_name, embedding): dir_path = os.path.join(evo_dir, dir_name) # dataset = ImageDataset.get_cifar10_reduced() dataset = ImageDataset.get_cifar10() if not os.path.exists(dir_path): os.makedirs(dir_path) hyperparameters = Hyperparameters() model = MetaModel(hyperparameters) model.populate_from_embedding(embedding) model.build_model(dataset.images_shape) model.evaluate(dataset) model.save_model(dir_path) model.generate_graph(dir_path) model.save_metadata(dir_path) model.clear_model()
def test_accuracy_at_different_train_amounts(): dir_path = os.path.join(evo_dir, 'test_accuracy_epochs') if not os.path.exists(dir_path): os.makedirs(dir_path) hyperparameters = Hyperparameters() hyperparameters.parameters['POPULATION_SIZE'] = 32 hyperparameters.parameters['ROUNDS'] = 0 hyperparameters.parameters['TRAIN_EPOCHS'] = 1 hyperparameters.parameters['TRAIN_ITERATIONS'] = 16 dataset = ImageDataset.get_cifar10() existing_sims = [ x for x in os.listdir(dir_path) if 'small' not in x and '.png' not in x ] num_already_done = len(existing_sims) num_remaining = hyperparameters.parameters[ 'POPULATION_SIZE'] - num_already_done total_todo = hyperparameters.parameters['POPULATION_SIZE'] population = [] for round_num in range(num_remaining): print( f'Evaluating model {round_num + 1 + num_already_done} of {total_todo}' ) new_candidate = MetaModel(hyperparameters) new_candidate.populate_with_nasnet_metacells() new_candidate.model_name = 'evo_' + str( time.time() ) # this is redone here since all models are initialized within microseconds of eachother for init population new_candidate.build_model(dataset.images_shape) new_candidate.evaluate(dataset) new_candidate.save_model(dir_path) # new_candidate.metrics.metrics['accuracy'].extend([x + round_num for x in range(4)]) new_candidate.save_metadata(dir_path) population.append(new_candidate) new_candidate.clear_model()
def test_nth_in_dir(dir_name, n: int): dir_path = os.path.join(evo_dir, dir_name) data_path = os.path.join(dir_path, 'results.json') with open(data_path, 'r') as fl: data = json.load(fl) performances = [performance(x) for x in data['accuracies']] performances_with_indexes = [(performances[i], data['embeddings'][i]) for i in range(len(performances))] num_cells = len(performances[0]) # should be 2 pwi_per_cell = [performances_with_indexes.copy() for i in range(num_cells)] for i in range(num_cells): pwi_per_cell[i].sort(key=lambda x: x[0][i]) selected_embeddings = [x[n][1] for x in pwi_per_cell] combined_embeddings = combine_embeddings(selected_embeddings[0], selected_embeddings[1]) print(combined_embeddings) hyperparameters = Hyperparameters() hyperparameters.parameters['TRAIN_EPOCHS'] = 2 hyperparameters.parameters['TRAIN_ITERATIONS'] = 16 # hyperparameters.parameters['SGDR_EPOCHS_PER_RESTART'] = hyperparameters.parameters['TRAIN_ITERATIONS'] * hyperparameters.parameters['TRAIN_EPOCHS'] #effectively makes SGDR into basic cosine annealing dataset = ImageDataset.get_cifar10() metamodel = MetaModel(hyperparameters) metamodel.populate_from_embedding(combined_embeddings) metamodel.build_model(dataset.images_shape) metamodel.evaluate(dataset) metamodel.save_metadata(dir_path) metamodel.save_model(dir_path) metamodel.clear_model()