Exemple #1
0
def task_dog_breed_identification(data_path, dataset):
    images_path = os.path.join(data_path, dataset, 'images_all')
    label_path = os.path.join(data_path, dataset, 'labels.csv')
    test_path = os.path.join(data_path, dataset, 'test')
    load_dataset = task.Dataset(images_path, label_file=label_path)

    @ag.obj(learning_rate=ag.space.Real(0.3, 0.5),
            momentum=ag.space.Real(0.90, 0.95),
            wd=ag.space.Real(1e-6, 1e-4, log=True),
            multi_precision=False)
    class NAG(optim.NAG):
        pass

    classifier = task.fit(dataset=load_dataset,
                          net=ag.Categorical('standford_dog_resnext101_64x4d',
                                             'standford_dog_resnet152_v1'),
                          optimizer=NAG(),
                          epochs=20,
                          final_fit_epochs=180,
                          num_trials=40,
                          ngpus_per_trial=8,
                          batch_size=48,
                          verbose=False,
                          ensemble=1)

    test_dataset = task.Dataset(test_path, train=False, crop_ratio=0.65)
    inds, probs, probs_all = classifier.predict(test_dataset,
                                                set_prob_thresh=0.001)
    ag.utils.generate_prob_csv(test_dataset,
                               probs_all,
                               custom='./submission.csv')
Exemple #2
0
def test_tricks(combination):
    dataset = 'shopee-iet'
    data_dir = './'
    download_shopee(data_dir, dataset)

    target = config_choice(dataset, data_dir, combination)
    classifier = task.fit(dataset=task.Dataset(target['dataset']),
                          net=target['net'],
                          optimizer=target['optimizer'],
                          epochs=target['epochs'],
                          ngpus_per_trial=target['ngpus_per_trial'],
                          num_trials=target['num_trials'],
                          batch_size=target['batch_size'],
                          verbose=True,
                          search_strategy='random',
                          tricks=target['tricks'],
                          lr_config=target['lr_config'],
                          plot_results=True)

    test_dataset = task.Dataset(target['dataset'].replace(
        'train', 'test/BabyPants'),
                                train=False,
                                scale_ratio_choice=[0.7, 0.8, 0.875])
    inds, probs, probs_all = classifier.predict(test_dataset,
                                                set_prob_thresh=0.001)
    print(inds[0], probs[0], probs_all[0])

    print('Top-1 val acc: %.3f' % classifier.results['best_reward'])
Exemple #3
0
def test_ensemble():
    dataset = task.Dataset(name='FashionMNIST')
    test_dataset = task.Dataset(name='FashionMNIST', train=False)
    classifier = task.fit(dataset,
                          epochs=1,
                          ngpus_per_trial=1,
                          verbose=False,
                          ensemble=2)
    test_acc = classifier.evaluate(test_dataset)
Exemple #4
0
def test_classifier_save_load():
    dataset = task.Dataset(name='FashionMNIST')
    test_dataset = task.Dataset(name='FashionMNIST', train=False)
    classifier = task.fit(dataset,
                          epochs=1,
                          ngpus_per_trial=1,
                          verbose=False)
    classifier.save('classifier.ag')
    classifier2 = task.Classifier.load('classifier.ag')
    test_acc = classifier2.evaluate(test_dataset)
Exemple #5
0
def main():
    opt = parse_args()
    if not os.path.exists(opt.dataset):
        os.mkdir(opt.dataset)
    dataset_path = os.path.join(opt.data_dir, opt.dataset)

    local_path = os.path.dirname(__file__)
    output_directory = os.path.join(opt.dataset, 'checkpoint/')
    filehandler = logging.FileHandler(os.path.join(opt.dataset, 'summary.log'))
    streamhandler = logging.StreamHandler()
    logger = logging.getLogger('')
    logger.setLevel(logging.INFO)
    logger.addHandler(filehandler)
    logger.addHandler(streamhandler)
    logging.info(opt)

    target = config_choice(opt.data_dir, opt.dataset)
    load_dataset = task.Dataset(target['dataset'])
    classifier = task.fit(dataset=load_dataset,
                          output_directory=output_directory,
                          net=target['net'],
                          optimizer=target['optimizer'],
                          tricks=target['tricks'],
                          lr_config=target['lr_config'],
                          resume=opt.resume,
                          epochs=opt.num_epochs,
                          ngpus_per_trial=opt.ngpus_per_trial,
                          num_trials=opt.num_trials,
                          batch_size=opt.batch_size,
                          verbose=True,
                          plot_results=True)

    summary = classifier.fit_summary(output_directory=opt.dataset, verbosity=4)
    logging.info('Top-1 val acc: %.3f' % classifier.results['best_reward'])
    logger.info(summary)

    if opt.submission:
        test_dataset = task.Dataset(os.path.join(opt.data_dir, opt.dataset, 'test'), train=False)
        inds, probs, probs_all, value = predict_details(test_dataset, classifier, load_dataset)
        generate_csv_submission(dataset_path, opt.dataset, local_path, inds, probs_all, value, opt.custom)
import math
import autogluon.core as ag
from autogluon.mxnet.optimizer import SGD
from autogluon.extra.model_zoo import EfficientNet
from autogluon.vision import ImageClassification as task


@ag.obj(
    width_coefficient=ag.Categorical(1.1, 1.2),
    depth_coefficient=ag.Categorical(1.1, 1.2),
)
class EfficientNetB1(EfficientNet):
    def __init__(self, width_coefficient, depth_coefficient):
        input_factor = math.sqrt(2.0 / (width_coefficient**2) /
                                 depth_coefficient)
        input_size = math.ceil((224 * input_factor) / 32) * 32
        super().__init__(width_coefficient=width_coefficient,
                         depth_coefficient=depth_coefficient,
                         input_size=input_size)


results = task.fit('imagenet',
                   net=EfficientNetB1(),
                   search_strategy='grid',
                   optimizer=SGD(learning_rate=1e-1, momentum=0.9, wd=1e-4),
                   batch_size=32)

print(results)
Exemple #7
0
    if len(network_list) > 1:
        nets = ag.Categorical(*network_list)
    else:
        nets = network_list[0]

    classifier = task.fit(dataset,
                          net=nets,
                          optimizer=optimizer,
                          search_strategy='skopt',
                          search_options={
                              'base_estimator': 'GP',
                              'acq_func': 'EI'
                          },
                          epochs=args.epochs,
                          batch_size=args.batch_size,
                          metric='accuracy',
                          lr_config=lr_config,
                          verborse=True,
                          plot_results=True,
                          visualizer='tensorboard',
                          ngpus_per_trial=args.ngpus_per_trial,
                          num_trials=args.num_trials,
                          final_fit_epochs=args.final_fit_epochs,
                          use_pretrained=True,
                          batch_norm=False,
                          output_directory=os.environ["SM_OUTPUT_DATA_DIR"])

    test_dataset = create_dataset(os.environ["SM_CHANNEL_TEST"], train=False)
    test_acc = classifier.evaluate(test_dataset)
    logging.info('Test accuracy: %f' % test_acc)