예제 #1
0
def task_dog_breed_identification(data_path, dataset):
    images_path = os.path.join(data_path, dataset, 'images_all')
    label_path = os.path.join(data_path, dataset, 'labels.csv')
    test_path = os.path.join(data_path, dataset, 'test')
    load_dataset = task.Dataset(images_path, label_file=label_path)

    @ag.obj(learning_rate=ag.space.Real(0.3, 0.5),
            momentum=ag.space.Real(0.90, 0.95),
            wd=ag.space.Real(1e-6, 1e-4, log=True),
            multi_precision=False)
    class NAG(optim.NAG):
        pass

    classifier = task.fit(dataset=load_dataset,
                          net=ag.Categorical('standford_dog_resnext101_64x4d',
                                             'standford_dog_resnet152_v1'),
                          optimizer=NAG(),
                          epochs=20,
                          final_fit_epochs=180,
                          num_trials=40,
                          ngpus_per_trial=8,
                          batch_size=48,
                          verbose=False,
                          ensemble=1)

    test_dataset = task.Dataset(test_path, train=False, crop_ratio=0.65)
    inds, probs, probs_all = classifier.predict(test_dataset,
                                                set_prob_thresh=0.001)
    ag.utils.generate_prob_csv(test_dataset,
                               probs_all,
                               custom='./submission.csv')
예제 #2
0
def test_tricks(test_trials):
    dataset = 'shopee-iet'
    data_dir = './'
    download_shopee(data_dir, dataset)

    for i in range(test_trials):
        print('Test:{}\n'.format(i))
        target = config_choice(dataset, data_dir, tricks_combination[i])
        classifier = task.fit(dataset=task.Dataset(target['dataset']),
                              net=target['net'],
                              optimizer=target['optimizer'],
                              epochs=target['epochs'],
                              ngpus_per_trial=target['ngpus_per_trial'],
                              num_trials=target['num_trials'],
                              batch_size=target['batch_size'],
                              verbose=True,
                              search_strategy='skopt',
                              tricks=target['tricks'],
                              lr_config=target['lr_config'],
                              plot_results=True)

        # test_dataset = task.Dataset(target['dataset'].replace('images', 'test'))
        # inds, probs, probals_all= classifier.predict(test_dataset)
        # print(inds[0], probs[0], probals_all[0])

        print('Top-1 val acc: %.3f' % classifier.results['best_reward'])
def test_tricks(test_trials):
    dataset = 'shopee-iet'
    data_dir = './'
    download_shopee(data_dir, dataset)

    for i in range(test_trials):
        print('Test:{}\n'.format(i))
        target = config_choice(dataset, data_dir, tricks_combination[i])
        classifier = task.fit(dataset=task.Dataset(target['dataset']),
                              net=target['net'],
                              optimizer=target['optimizer'],
                              epochs=target['epochs'],
                              ngpus_per_trial=target['ngpus_per_trial'],
                              num_trials=target['num_trials'],
                              batch_size=target['batch_size'],
                              verbose=True,
                              search_strategy='random',
                              tricks=target['tricks'],
                              lr_config=target['lr_config'],
                              plot_results=True)

        test_dataset = task.Dataset(target['dataset'].replace(
            'train', 'test/BabyPants'),
                                    train=False,
                                    scale_ratio_choice=[0.7, 0.8, 0.875])
        inds, probs, probs_all = classifier.predict(test_dataset,
                                                    set_prob_thresh=0.001)
        print(inds[0], probs[0], probs_all[0])

        print('Top-1 val acc: %.3f' % classifier.results['best_reward'])
예제 #4
0
def test_classifier_save_load():
    dataset = task.Dataset(name='FashionMNIST')
    test_dataset = task.Dataset(name='FashionMNIST', train=False)
    classifier = task.fit(dataset, epochs=1, ngpus_per_trial=1, verbose=False)
    classifier.save('classifier.ag')
    classifier2 = task.Classifier.load('classifier.ag')
    test_acc = classifier2.evaluate(test_dataset)
예제 #5
0
def test_ensemble():
    dataset = task.Dataset(name='FashionMNIST')
    test_dataset = task.Dataset(name='FashionMNIST', train=False)
    classifier = task.fit(dataset,
                          epochs=10,
                          ngpus_per_trial=1,
                          verbose=False,
                          ensemble=2)
    test_acc = classifier.evaluate(test_dataset)
예제 #6
0
def predict_details(data_path, classifier, load_dataset):
    test_dataset = os.path.join(data_path, 'test')
    inds, probs, probs_all = classifier.predict(task.Dataset(test_dataset))
    value = []
    target_dataset = load_dataset.init()
    for i in inds:
        value.append(target_dataset.classes[i])
    return inds, probs, probs_all, value
예제 #7
0
def main():
    opt = parse_args()
    if not os.path.exists(opt.dataset):
        os.mkdir(opt.dataset)
    dataset_path = os.path.join(opt.data_dir, opt.dataset)

    local_path = os.path.dirname(__file__)
    output_directory = os.path.join(opt.dataset, 'checkpoint/')
    filehandler = logging.FileHandler(os.path.join(opt.dataset, 'summary.log'))
    streamhandler = logging.StreamHandler()
    logger = logging.getLogger('')
    logger.setLevel(logging.INFO)
    logger.addHandler(filehandler)
    logger.addHandler(streamhandler)
    logging.info(opt)

    target = config_choice(opt.data_dir, opt.dataset)
    load_dataset = task.Dataset(target['dataset'])
    classifier = task.fit(dataset=load_dataset,
                          output_directory=output_directory,
                          net=target['net'],
                          optimizer=target['optimizer'],
                          tricks=target['tricks'],
                          lr_config=target['lr_config'],
                          resume=opt.resume,
                          epochs=opt.num_epochs,
                          ngpus_per_trial=opt.ngpus_per_trial,
                          num_trials=opt.num_trials,
                          batch_size=opt.batch_size,
                          verbose=True,
                          plot_results=True)

    summary = classifier.fit_summary(output_directory=opt.dataset, verbosity=4)
    logging.info('Top-1 val acc: %.3f' % classifier.results['best_reward'])
    logger.info(summary)

    if opt.submission:
        test_dataset = task.Dataset(os.path.join(opt.data_dir, opt.dataset,
                                                 'test'),
                                    train=False)
        inds, probs, probs_all, value = predict_details(
            test_dataset, classifier, load_dataset)
        ag.utils.generate_csv_submission(dataset_path, opt.dataset, local_path,
                                         inds, probs_all, value, opt.custom)
예제 #8
0
def train(args):
    print("args {}".format(args))
    is_distributed = len(args.hosts) > 1
    dist_ip_addrs = []
    if is_distributed:
        host_rank = args.hosts.index(args.current_host)
        ag.sagemaker_setup()
        if host_rank > 0:
            print('Host rank {} exit.'.format(host_rank))
            return

    dist_ip_addrs = args.hosts
    dist_ip_addrs.pop(host_rank)
    dataset = task.Dataset(os.path.join(args.data_dir, 'train'))
    ngpus_per_trial = 1 if args.num_gpus > 0 else 0

    classifier = task.fit(dataset,
                          epochs=args.epochs,
                          num_trials=args.num_trials,
                          ngpus_per_trial=ngpus_per_trial,
                          verbose=True,
                          dist_ip_addrs=dist_ip_addrs,
                          output_directory=args.model_dir)
예제 #9
0
import autogluon as ag
from autogluon import ImageClassification as task

dataset = task.Dataset('train')
test_dataset = task.Dataset('va', train=False)
print(ag.get_gpu_count())

time_limits = 8 * 60 * 60
epochs = 150
output = 'checkpoint/'
resume = True

classifier = task.fit(dataset,
                      search_strategy='skopt',
                      search_options={
                          'base_estimator': 'RF',
                          'acq_func': 'EI'
                      },
                      time_limits=time_limits,
                      epochs=epochs,
                      ngpus_per_trial=1,
                      num_trials=1,
                      output_directory=output,
                      verbose=True,
                      resume=resume,
                      plot_results=True)

print('Top-1 val acc: %.3f' %
      classifier.results[classifier.results['reward_attr']])
classifier.save('checkpoint/model.pth')
예제 #10
0
# Import Libraries
"""

pip install --upgrade mxnet-cu100

pip install autogluon

import autogluon as ag
from autogluon import ImageClassification as task

"""# Create AutoGluon Dataset"""

!unzip shop.zip

dataset = task.Dataset('data/train')

test_dataset = task.Dataset('data/test', train=False)

if ag.get_gpu_count() == 0:
    dataset = task.Dataset(name='FashionMNIST')
    test_dataset = task.Dataset(name='FashionMNIST', train=False)

"""# Use AutoGluon to Fit Models"""

classifier = task.fit(dataset,
                      epochs=5,
                      ngpus_per_trial=1,
                      verbose=False)

print('Top-1 val acc: %.3f' % classifier.results['best_reward'])
import math
import autogluon as ag
from autogluon import ImageClassification as task


@ag.obj(
    width_coefficient=ag.space.Categorical(1.1, 1.2),
    depth_coefficient=ag.space.Categorical(1.1, 1.2),
)
class EfficientNetB1(ag.nas.EfficientNet):
    def __init__(self, width_coefficient, depth_coefficient):
        input_factor = math.sqrt(2.0 / (width_coefficient**2) /
                                 depth_coefficient)
        input_size = math.ceil((224 * input_factor) / 32) * 32
        super().__init__(width_coefficient=width_coefficient,
                         depth_coefficient=depth_coefficient,
                         input_size=input_size)


results = task.fit('imagenet',
                   net=EfficientNetB1(),
                   search_strategy='grid',
                   optimizer=ag.optimizer.SGD(learning_rate=1e-1,
                                              momentum=0.9,
                                              wd=1e-4),
                   batch_size=32)

print(results)
ag.done()
예제 #12
0
y_pred = np.rint(model.predict(X_test))

print(accuracy_score(y_test, y_pred))

# %%
"""
# Experiment with AutoGluon: AutoML Toolkit for Deep Learning
"""

# %%
import autogluon as ag
from autogluon import ImageClassification as task


# %%
dataset = task.Dataset('./data')

# %%
#time_limits = 10 * 60 # 10mins
#classifier = task.fit(dataset, time_limits=time_limits, ngpus_per_trial=1)
classifier = task.fit(dataset,
                      epochs=10,
                      ngpus_per_trial=1,
                      verbose=True)

# %%
print('Top-1 val acc: %.3f' % classifier.results['best_reward'])


# %%