コード例 #1
0
ファイル: test_search.py プロジェクト: Saiuz/autokeras
def test_max_acq(_, _2):
    train_data, test_data = get_classification_data_loaders()
    clean_dir(TEST_TEMP_DIR)
    Constant.N_NEIGHBOURS = 2
    Constant.SEARCH_MAX_ITER = 0
    Constant.T_MIN = 0.8
    Constant.BETA = 1
    generator = BayesianSearcher(3, (28, 28, 3), verbose=False, path=TEST_TEMP_DIR, metric=Accuracy,
                         loss=classification_loss, generators=[CnnGenerator, ResNetGenerator])
    for _ in range(3):
        generator.search(train_data, test_data)
    for index1, descriptor1 in enumerate(generator.descriptors):
        for descriptor2 in generator.descriptors[index1 + 1:]:
            assert edit_distance(descriptor1, descriptor2) != 0.0

    clean_dir(TEST_TEMP_DIR)
コード例 #2
0
ファイル: test_search.py プロジェクト: tl-yang/autokeras
def test_max_acq(_, _2):
    train_data, test_data = get_classification_data_loaders()
    clean_dir(TEST_TEMP_DIR)
    Constant.N_NEIGHBOURS = 2
    Constant.SEARCH_MAX_ITER = 0
    Constant.T_MIN = 0.8
    Constant.BETA = 1
    generator = Searcher(3, (28, 28, 3), verbose=False, path=TEST_TEMP_DIR, metric=Accuracy,
                         loss=classification_loss, generators=[CnnGenerator, ResNetGenerator])
    for _ in range(3):
        generator.search(train_data, test_data)
    for index1, descriptor1 in enumerate(generator.descriptors):
        for descriptor2 in generator.descriptors[index1 + 1:]:
            assert edit_distance(descriptor1, descriptor2) != 0.0

    clean_dir(TEST_TEMP_DIR)
コード例 #3
0
ファイル: search.py プロジェクト: wh-forker/autokeras
def same_graph(des1, des2):
    return edit_distance(des1, des2, 1) == 0
コード例 #4
0
ファイル: search.py プロジェクト: shoreshgithub/autokeras
def contain(descriptors, target_descriptor):
    for descriptor in descriptors:
        if edit_distance(descriptor, target_descriptor, 1) < 1e-5:
            return True
    return False
コード例 #5
0
ファイル: search.py プロジェクト: karolmajek/autokeras
def same_graph(des1, des2):
    return edit_distance(des1, des2, 1) == 0
コード例 #6
0
@patch('torch.multiprocessing.get_context', side_effect=MockProcess)
@patch('autokeras.search.ModelTrainer.train_model', side_effect=mock_train)
def test_max_acq(_, _2):
    train_data, test_data = get_classification_data_loaders()
    clean_dir(TEST_TEMP_DIR)
    Constant.N_NEIGHBOURS = 2
    Constant.SEARCH_MAX_ITER = 0
    Constant.T_MIN = 0.8
    Constant.BETA = 1
    generator = BayesianSearcher(3, (28, 28, 3), verbose=False, path=TEST_TEMP_DIR, metric=Accuracy,
                         loss=classification_loss, generators=[CnnGenerator, ResNetGenerator])
    for _ in range(3):
        generator.search(train_data, test_data)
    for index1, descriptor1 in enumerate(generator.descriptors):
        for descriptor2 in generator.descriptors[index1 + 1:]:
            assert edit_distance(descriptor1, descriptor2) != 0.0

    clean_dir(TEST_TEMP_DIR)


@patch('torch.multiprocessing.get_context', side_effect=MockProcess)
@patch('autokeras.search.ModelTrainer.train_model', side_effect=mock_out_of_memory_train)
def test_out_of_memory(_, _2):
    train_data, test_data = get_classification_data_loaders()
    clean_dir(TEST_TEMP_DIR)
    Constant.N_NEIGHBOURS = 2
    Constant.SEARCH_MAX_ITER = 0
    Constant.T_MIN = 0.8
    Constant.BETA = 1
    generator = BayesianSearcher(3, (28, 28, 3), verbose=True, path=TEST_TEMP_DIR, metric=Accuracy,
                         loss=classification_loss, generators=[CnnGenerator, ResNetGenerator])