def test_greedy_searcher_sp(_, _1, _2, _3): train_data, test_data = get_classification_data_loaders() clean_dir(TEST_TEMP_DIR) searcher = GreedySearcher(3, (28, 28, 3), verbose=False, path=TEST_TEMP_DIR, metric=Accuracy, loss=classification_loss, generators=[CnnGenerator, CnnGenerator]) for _ in range(2): searcher.search(train_data, test_data) clean_dir(TEST_TEMP_DIR) assert len(searcher.history) == 2
def test_greedy_searcher_sp(_, _1, _2, _3): train_data, test_data = get_classification_data_loaders() clean_dir(TEST_TEMP_DIR) searcher = GreedySearcher(3, (28, 28, 3), verbose=False, path=TEST_TEMP_DIR, metric=Accuracy, loss=classification_loss, generators=[CnnGenerator, CnnGenerator]) for _ in range(2): searcher.search(train_data, test_data) clean_dir(TEST_TEMP_DIR) assert len(searcher.history) == 2
def test_model_trainer_classification(): model = CnnGenerator(3, (28, 28, 3)).generate().produce_model() train_data, test_data = get_classification_data_loaders() ModelTrainer(model, train_data=train_data, test_data=test_data, metric=Accuracy, loss_function=classification_loss, verbose=True).train_model(max_iter_num=3)
def test_bayesian_searcher(_, _1): train_data, test_data = get_classification_data_loaders() clean_dir(default_test_path) generator = Searcher(3, (28, 28, 3), verbose=False, path=default_test_path, metric=Accuracy, loss=classification_loss) Constant.N_NEIGHBOURS = 1 Constant.T_MIN = 0.8 for _ in range(2): generator.search(train_data, test_data) clean_dir(default_test_path) assert len(generator.history) == 2
def test_bayesian_searcher_sp(_, _1, _2, _3): train_data, test_data = get_classification_data_loaders() clean_dir(TEST_TEMP_DIR) searcher = BayesianSearcher(3, (28, 28, 3), verbose=False, path=TEST_TEMP_DIR, metric=Accuracy, loss=classification_loss, generators=[CnnGenerator, CnnGenerator]) Constant.N_NEIGHBOURS = 1 Constant.T_MIN = 0.8 for _ in range(2): searcher.search(train_data, test_data) clean_dir(TEST_TEMP_DIR) assert len(searcher.history) == 2
def test_bayesian_searcher_sp(_, _1, _2, _3): train_data, test_data = get_classification_data_loaders() clean_dir(TEST_TEMP_DIR) searcher = Searcher(3, (28, 28, 3), verbose=False, path=TEST_TEMP_DIR, metric=Accuracy, loss=classification_loss, generators=[CnnGenerator, CnnGenerator]) Constant.N_NEIGHBOURS = 1 Constant.T_MIN = 0.8 for _ in range(2): searcher.search(train_data, test_data) clean_dir(TEST_TEMP_DIR) assert len(searcher.history) == 2
def test_model_trainer_classification(): model = CnnGenerator(3, (28, 28, 3)).generate().produce_model() train_data, test_data = get_classification_data_loaders() ModelTrainer(model, train_data=train_data, test_data=test_data, metric=Accuracy, loss_function=classification_loss, verbose=True, path=TEST_TEMP_DIR).train_model(max_iter_num=3) clean_dir(TEST_TEMP_DIR)
def test_out_of_memory(_, _1): train_data, test_data = get_classification_data_loaders() clean_dir(TEST_TEMP_DIR) searcher = Searcher(3, (28, 28, 3), verbose=False, path=TEST_TEMP_DIR, metric=Accuracy, loss=classification_loss) Constant.N_NEIGHBOURS = 1 Constant.T_MIN = 0.8 for _ in range(4): searcher.search(train_data, test_data) clean_dir(TEST_TEMP_DIR) assert len(searcher.history) == 0
def test_grid_searcher(_, _1, _2): train_data, test_data = get_classification_data_loaders() clean_dir(TEST_TEMP_DIR) searcher = GridSearcher(3, (28, 28, 3), verbose=True, path=TEST_TEMP_DIR, metric=Accuracy, loss=classification_loss, generators=[CnnGenerator, CnnGenerator]) Constant.N_NEIGHBOURS = 1 Constant.T_MIN = 0.8 print(len(searcher.get_search_dimensions())) for _ in range(len(searcher.get_search_dimensions())): searcher.search(train_data, test_data) clean_dir(TEST_TEMP_DIR) assert len(searcher.history) == len(searcher.search_dimensions)
def test_grid_searcher(_, _1, _2): train_data, test_data = get_classification_data_loaders() clean_dir(TEST_TEMP_DIR) searcher = GridSearcher(3, (28, 28, 3), verbose=True, path=TEST_TEMP_DIR, metric=Accuracy, loss=classification_loss, generators=[CnnGenerator, CnnGenerator]) Constant.N_NEIGHBOURS = 1 Constant.T_MIN = 0.8 print(len(searcher.get_search_dimensions())) for _ in range(len(searcher.get_search_dimensions())): searcher.search(train_data, test_data) clean_dir(TEST_TEMP_DIR) assert len(searcher.history) == len(searcher.search_dimensions)
def test_exception_handling(_, _2): train_data, test_data = get_classification_data_loaders() clean_dir(TEST_TEMP_DIR) Constant.N_NEIGHBOURS = 2 Constant.T_MIN = 0.8 Constant.BETA = 1 generator = BayesianSearcher(3, (28, 28, 3), verbose=True, path=TEST_TEMP_DIR, metric=Accuracy, loss=classification_loss, generators=[CnnGenerator, ResNetGenerator]) for _ in range(3): generator.search(train_data, test_data) clean_dir(TEST_TEMP_DIR) assert len(generator.history) == 0
def test_model_trainer_timout(): model = CnnGenerator(3, (28, 28, 3)).generate().produce_model() timeout = 1 train_data, test_data = get_classification_data_loaders() with pytest.raises(TimeoutError): ModelTrainer(model, train_data=train_data, test_data=test_data, metric=Accuracy, loss_function=classification_loss, verbose=True, path=TEST_TEMP_DIR).train_model(max_iter_num=300, timeout=timeout) clean_dir(TEST_TEMP_DIR)
def test_out_of_memory(_, _2): train_data, test_data = get_classification_data_loaders() clean_dir(TEST_TEMP_DIR) Constant.N_NEIGHBOURS = 2 Constant.SEARCH_MAX_ITER = 0 Constant.T_MIN = 0.8 Constant.BETA = 1 generator = BayesianSearcher(3, (28, 28, 3), verbose=True, path=TEST_TEMP_DIR, metric=Accuracy, loss=classification_loss, generators=[CnnGenerator, ResNetGenerator]) for _ in range(3): generator.search(train_data, test_data) clean_dir(TEST_TEMP_DIR) assert len(generator.history) == 0
def test_model_trainer_classification(): Backend.backend = tensorflow model = CnnGenerator(3, (28, 28, 3)).generate().produce_model() train_data, test_data = get_classification_data_loaders() ModelTrainer(model, train_data=train_data, test_data=test_data, metric=Accuracy, loss_function=Backend.classification_loss, verbose=True, path=TEST_TEMP_DIR).train_model(max_iter_num=3) Backend.backend = torch clean_dir(TEST_TEMP_DIR)
def test_bayesian_searcher(_, _1): train_data, test_data = get_classification_data_loaders() clean_dir(default_test_path) generator = Searcher(3, (28, 28, 3), verbose=False, path=default_test_path, metric=Accuracy, loss=classification_loss) Constant.N_NEIGHBOURS = 1 Constant.T_MIN = 0.8 for _ in range(2): generator.search(train_data, test_data) clean_dir(default_test_path) assert len(generator.history) == 2
def test_max_acq(_, _2): train_data, test_data = get_classification_data_loaders() clean_dir(TEST_TEMP_DIR) Constant.N_NEIGHBOURS = 2 Constant.SEARCH_MAX_ITER = 0 Constant.T_MIN = 0.8 Constant.BETA = 1 generator = Searcher(3, (28, 28, 3), verbose=False, path=TEST_TEMP_DIR, metric=Accuracy, loss=classification_loss, generators=[CnnGenerator, ResNetGenerator]) for _ in range(3): generator.search(train_data, test_data) for index1, descriptor1 in enumerate(generator.descriptors): for descriptor2 in generator.descriptors[index1 + 1:]: assert edit_distance(descriptor1, descriptor2) != 0.0 clean_dir(TEST_TEMP_DIR)
def test_max_acq(_, _2): train_data, test_data = get_classification_data_loaders() clean_dir(TEST_TEMP_DIR) Constant.N_NEIGHBOURS = 2 Constant.SEARCH_MAX_ITER = 0 Constant.T_MIN = 0.8 Constant.BETA = 1 generator = BayesianSearcher(3, (28, 28, 3), verbose=False, path=TEST_TEMP_DIR, metric=Accuracy, loss=classification_loss, generators=[CnnGenerator, ResNetGenerator]) for _ in range(3): generator.search(train_data, test_data) for index1, descriptor1 in enumerate(generator.descriptors): for descriptor2 in generator.descriptors[index1 + 1:]: assert edit_distance(descriptor1, descriptor2) != 0.0 clean_dir(TEST_TEMP_DIR)
def test_max_acq(_, _1): train_data, test_data = get_classification_data_loaders() clean_dir(default_test_path) Constant.N_NEIGHBOURS = 2 Constant.SEARCH_MAX_ITER = 0 Constant.T_MIN = 0.8 Constant.BETA = 1 generator = Searcher(3, (28, 28, 3), verbose=False, path=default_test_path, metric=Accuracy, loss=classification_loss) for _ in range(3): generator.search(train_data, test_data) for index1, descriptor1 in enumerate(generator.descriptors): for descriptor2 in generator.descriptors[index1 + 1:]: assert edit_distance(descriptor1, descriptor2, 1) != 0 clean_dir(default_test_path)
def test_export_json(_, _1): train_data, test_data = get_classification_data_loaders() clean_dir(default_test_path) generator = Searcher(3, (28, 28, 3), verbose=False, path=default_test_path, metric=Accuracy, loss=classification_loss) Constant.N_NEIGHBOURS = 1 Constant.T_MIN = 0.8 for _ in range(3): generator.search(train_data, test_data) file_path = os.path.join(default_test_path, 'test.json') generator.export_json(file_path) import json data = json.load(open(file_path, 'r')) assert len(data['networks']) == 3 assert len(data['tree']['children']) == 2 clean_dir(default_test_path) assert len(generator.history) == 3
def test_export_json(_, _1, _2): train_data, test_data = get_classification_data_loaders() clean_dir(TEST_TEMP_DIR) generator = Searcher(3, (28, 28, 3), verbose=False, path=TEST_TEMP_DIR, metric=Accuracy, loss=classification_loss, generators=[CnnGenerator]) Constant.N_NEIGHBOURS = 1 Constant.T_MIN = 0.8 for _ in range(3): generator.search(train_data, test_data) file_path = os.path.join(TEST_TEMP_DIR, 'test.json') generator.export_json(file_path) import json data = json.load(open(file_path, 'r')) assert len(data['networks']) == 3 assert len(data['tree']['children']) == 2 clean_dir(TEST_TEMP_DIR) assert len(generator.history) == 3
def test_max_acq(_, _1): train_data, test_data = get_classification_data_loaders() clean_dir(default_test_path) Constant.N_NEIGHBOURS = 2 Constant.SEARCH_MAX_ITER = 0 Constant.T_MIN = 0.8 Constant.BETA = 1 generator = Searcher(3, (28, 28, 3), verbose=False, path=default_test_path, metric=Accuracy, loss=classification_loss) for _ in range(3): generator.search(train_data, test_data) for index1, descriptor1 in enumerate(generator.descriptors): for descriptor2 in generator.descriptors[index1 + 1:]: assert edit_distance(descriptor1, descriptor2, 1) != 0 clean_dir(default_test_path)
def test_bayesian_searcher(_, _1, _2): train_data, test_data = get_classification_data_loaders() clean_dir(TEST_TEMP_DIR)
def test_bayesian_searcher_sp(_, _1, _2, _3): train_data, test_data = get_classification_data_loaders()
def test_greedy_searcher(_, _1, _2): train_data, test_data = get_classification_data_loaders()
def test_model_trainer_classification(): model = CnnGenerator(3, (28, 28, 3)).generate().produce_model() train_data, test_data = get_classification_data_loaders() ModelTrainer(model, train_data, test_data, Accuracy, classification_loss, True).train_model(max_iter_num=3)
def test_model_trainer_classification(): model = CnnGenerator(3, (28, 28, 3)).generate().produce_model() train_data, test_data = get_classification_data_loaders() ModelTrainer(model, train_data, test_data, Accuracy, classification_loss, True).train_model(max_iter_num=3)
def test_out_of_memory(_, _2): train_data, test_data = get_classification_data_loaders()
def test_max_acq(_, _2): train_data, test_data = get_classification_data_loaders()