예제 #1
0
def test_task():
    dataset, _, test_dataset = Task.Dataset.from_folders(
        'https://autogluon.s3.amazonaws.com/datasets/shopee-iet.zip')
    model_list = Task.list_models()
    predictor = Task(problem_type='regression')
    predictor.fit(dataset,
                  hyperparameters={
                      'epochs': 3,
                      'batch_size': 8
                  },
                  hyperparameter_tune_kwargs={'num_trials': 2})
    test_result = predictor.predict(test_dataset)
    single_test = predictor.predict(test_dataset.iloc[0]['image'])
    predictor.save('regressor.ag')
    predictor2 = Task.load('regressor.ag')
    fit_summary = predictor2.fit_summary()
    test_score = predictor2.evaluate(test_dataset)
    # raw dataframe
    df_test_dataset = pd.DataFrame(test_dataset)
    test_score = predictor2.evaluate(df_test_dataset)
    assert test_score < 2, f'{test_score} too bad'
    test_feature = predictor2.predict_feature(test_dataset)
    single_test2 = predictor2.predict(test_dataset.iloc[0]['image'])
    assert isinstance(single_test2, pd.Series)
    assert single_test2.equals(single_test)
    # to numpy
    test_feature_numpy = predictor2.predict_feature(test_dataset,
                                                    as_pandas=False)
    single_test2_numpy = predictor2.predict(test_dataset.iloc[0]['image'],
                                            as_pandas=False)
    assert np.array_equal(single_test2.to_numpy(), single_test2_numpy)
예제 #2
0
def test_task():
    dataset, _, test_dataset = Task.Dataset.from_folders(
        'https://autogluon.s3.amazonaws.com/datasets/shopee-iet.zip')
    model_list = Task.list_models()
    classifier = Task()
    classifier.fit(dataset,
                   num_trials=2,
                   hyperparameters={
                       'epochs': 1,
                       'early_stop_patience': 3
                   })
    test_result = classifier.predict(test_dataset)
    single_test = classifier.predict(test_dataset.iloc[0]['image'])
    single_proba = classifier.predict_proba(test_dataset.iloc[0]['image'])
    classifier.save('classifier.ag')
    classifier2 = Task.load('classifier.ag')
    fit_summary = classifier2.fit_summary()
    test_acc = classifier2.evaluate(test_dataset)
    # raw dataframe
    df_test_dataset = pd.DataFrame(test_dataset)
    test_acc = classifier2.evaluate(df_test_dataset)
    assert test_acc[-1] > 0.2, f'{test_acc} too bad'
    test_proba = classifier2.predict_proba(test_dataset)
    test_feature = classifier2.predict_feature(test_dataset)
    single_test2 = classifier2.predict(test_dataset.iloc[0]['image'])
    assert isinstance(single_test2, pd.Series)
    assert single_test2.equals(single_test)
    # to numpy
    test_proba_numpy = classifier2.predict_proba(test_dataset, as_pandas=False)
    assert np.array_equal(test_proba.to_numpy(), test_proba_numpy)
    test_feature_numpy = classifier2.predict_feature(test_dataset,
                                                     as_pandas=False)
    single_test2_numpy = classifier2.predict(test_dataset.iloc[0]['image'],
                                             as_pandas=False)
    assert np.array_equal(single_test2.to_numpy(), single_test2_numpy)
예제 #3
0
def test_task():
    dataset = Task.Dataset.from_voc('https://autogluon.s3.amazonaws.com/datasets/tiny_motorbike.zip')
    train_data, _, test_data = dataset.random_split()

    detector = Task()
    detector.fit(train_data, num_trials=1, hyperparameters={'batch_size': 4, 'epochs': 5, 'early_stop_max_value': 0.2})
    test_result = detector.predict(test_data)
    print('test result', test_result)
    detector.save('detector.ag')
    detector2 = Task.load('detector.ag')
    fit_summary = detector2.fit_summary()
    test_map = detector2.evaluate(test_data)
    test_result2 = detector2.predict(test_data)
    assert test_result2.equals(test_result)
예제 #4
0
def test_task():
    dataset = Task.Dataset.from_voc('https://autogluon.s3.amazonaws.com/datasets/tiny_motorbike.zip')
    train_data, _, test_data = dataset.random_split()

    detector = Task()
    detector.fit(train_data, hyperparameters={'batch_size': 4, 'epochs': 5, 'early_stop_max_value': 0.2}, hyperparameter_tune_kwargs={'num_trials': 1})
    test_result = detector.predict(test_data)
    detector.save('detector.ag')
    detector2 = Task.load('detector.ag')
    fit_summary = detector2.fit_summary()
    test_map = detector2.evaluate(test_data)
    test_result2 = detector2.predict(test_data)
    assert test_result2.equals(test_result), f'{test_result2} != \n {test_result}'
    # to numpy
    test_result2 = detector2.predict(test_data, as_pandas=False)
def test_task():
    dataset, _, test_dataset = Task.Dataset.from_folders('https://autogluon.s3.amazonaws.com/datasets/shopee-iet.zip')
    model_list = Task.list_models()
    classifier = Task()
    classifier.fit(dataset, epochs=1, num_trials=2)
    test_result = classifier.predict(test_dataset)
    single_test = classifier.predict(test_dataset.iloc[0]['image'])
    single_proba = classifier.predict_proba(test_dataset.iloc[0]['image'])
    print('test result', test_result)
    classifier.save('classifier.ag')
    classifier2 = Task.load('classifier.ag')
    fit_summary = classifier2.fit_summary()
    test_acc = classifier2.evaluate(test_dataset)
    test_proba = classifier2.predict_proba(test_dataset)
    test_feature = classifier2.predict_feature(test_dataset)
    single_test2 = classifier2.predict(test_dataset.iloc[0]['image'])
    assert single_test2.equals(single_test)