Example #1
0
from FastAutoAugment.darts.darts_micro_builder import DartsMicroBuilder
from FastAutoAugment.common.common import common_init
from FastAutoAugment.nas.evaluate import eval_arch

if __name__ == '__main__':
    conf = common_init(
        config_filepath='confs/darts_cifar.yaml',
        param_args=['--common.experiment_name', 'darts_cifar_eval'])

    conf_eval = conf['nas']['eval']

    # evaluate architecture using eval settings
    eval_arch(conf_eval, micro_builder=DartsMicroBuilder())

    exit(0)
Example #2
0
from FastAutoAugment.common.common import common_init
from FastAutoAugment.darts.darts_micro_builder import DartsMicroBuilder
from FastAutoAugment.nas.evaluate import eval_arch

if __name__ == '__main__':
    conf = common_init(config_filepath='confs/imagenet_darts.yaml',
                       param_args=['--common.experiment_name', 'darts_imagenet_eval'])

    conf_eval = conf['nas']['eval']

    micro_builder = DartsMicroBuilder()
    eval_arch(conf_eval, micro_builder=micro_builder)

    exit(0)

Example #3
0
    conf_loader = conf_eval['loader']
    save_filename = conf_eval['save_filename']
    conf_checkpoint = conf_eval['checkpoint']
    resume = conf_eval['resume']
    conf_train = conf_eval['trainer']
    # endregion

    device = torch.device(conf_eval['device'])
    checkpoint = CheckPoint(conf_checkpoint,
                            resume) if conf_checkpoint is not None else None
    model = Net().to(device)

    # get data
    train_dl, _, test_dl = data.get_data(conf_loader)
    assert train_dl is not None and test_dl is not None

    trainer = Trainer(conf_train, model, device, checkpoint, False)
    trainer.fit(train_dl, test_dl)


if __name__ == '__main__':
    conf = common_init(config_filepath='confs/dawnbench.yaml',
                       param_args=['--common.experiment_name', 'dawn_net'])

    conf_eval = conf['nas']['eval']

    # evaluate architecture using eval settings
    train_test(conf_eval)

    exit(0)
Example #4
0
from FastAutoAugment.common.common import common_init
from FastAutoAugment.random_arch.random_micro_builder import RandomMicroBuilder
from FastAutoAugment.nas import nas_utils

if __name__ == '__main__':
    conf = common_init(config_filepath='confs/random_cifar.yaml',
                       param_args=['--common.experiment_name', 'random_cifar_search'])

    # region config
    conf_search = conf['nas']['search']
    conf_model_desc = conf_search['model_desc']
    final_desc_filename = conf_search['final_desc_filename']
    # endregion

    # create model and save it to yaml
    # NOTE: there is no search here as the models are just randomly sampled
    model_desc = nas_utils.create_macro_desc(conf_model_desc,
                                             aux_tower=False,
                                             template_model_desc=None)
    macro_builder = RandomMicroBuilder()
    macro_builder.build(model_desc, 0)

    # save model to location specified by eval config
    model_desc.save(final_desc_filename)

    exit(0)
Example #5
0
import json
import os
from FastAutoAugment.common.common import get_logger, common_init, expdir_abspath
from FastAutoAugment.data_aug.train import train_and_eval

if __name__ == '__main__':
    conf = common_init(config_filepath='confs/aug_train_cifar.yaml',
                       param_args=[
                           "--autoaug.loader.aug", "fa_reduced_cifar10",
                           "--common.experiment_name", "autoaug_train"
                       ])
    logger = get_logger()

    import time
    t = time.time()
    save_path = expdir_abspath('model.pth')

    # result = train_and_eval(conf, val_ratio=conf['val_ratio'], val_fold=conf['val_fold'],
    #                         save_path=save_path, only_eval=conf['only_eval'], metric='test')

    # TODO: Will fail if val_ratio=0 since we are not using latest training infrastructure
    # TODO: Move val_ratio, val_fold, metric to config file
    result = train_and_eval(conf,
                            val_ratio=0.2,
                            val_fold=0,
                            save_path=save_path,
                            only_eval=False,
                            metric='test')
    elapsed = time.time() - t

    logger.info('training done.')
Example #6
0
def main():
    # accept search and eval scripts to run
    # config file can be supplied using --config
    parser = argparse.ArgumentParser(description='NAS E2E Runs')
    parser.add_argument('--search-script', type=str,
                        default='scripts/petridish/cifar_search.py',
                        help='Search script to run')
    parser.add_argument('--eval-script', type=str,
                        default='scripts/petridish/cifar_eval.py',
                        help='Eval script to run')
    parser.add_argument('--exp_prefix', type=str, default='petridish',
                        help='Experiment prefix to use')
    args, extra_args = parser.parse_known_args()

    # load config to some of the settings like logdir
    conf = common_init(use_args=True)
    logdir = get_conf_common()['logdir']
    assert logdir

    # get script, resume flag and experiment dir for search
    search_script = args.search_script
    resume = conf['nas']['search']['resume']
    search_script = utils.full_path(search_script.strip())
    experiment_name = args.exp_prefix + '_' + Path(search_script).stem
    experiment_dir = os.path.join(logdir, experiment_name)

    # see if search has already produced the output
    final_desc_filepath = os.path.join(experiment_dir, conf['nas']['search']['final_desc_filename'])
    if not resume or not os.path.exists(final_desc_filepath):
        print(f'Starting {search_script}...')
        result = subprocess.run(
            ['python', search_script,
            '--config', conf.config_filepath,
            '--config-defaults', conf.config_defaults_filepath,
            '--common.experiment_name', experiment_name
            ])
        print(f'Script {search_script} returned {result.returncode}')
        if result.returncode != 0:
            exit(result.returncode)
    else:
        print(f'Search is skipped because file {final_desc_filepath} already exists')

    # get script, resume flag and experiment dir for eval
    eval_script = args.eval_script
    resume = conf['nas']['eval']['resume']
    eval_script = utils.full_path(eval_script.strip())
    experiment_name = args.exp_prefix + '_' + Path(eval_script).stem
    experiment_dir = os.path.join(logdir, experiment_name)

    # if eval has already produced the output, skip eval run
    model_filepath = os.path.join(experiment_dir, conf['nas']['eval']['save_filename'])
    if not resume or not os.path.exists(model_filepath):
        # copy output of search to eval folder
        # TODO: take final_desc_filename from eval config
        os.makedirs(experiment_dir, exist_ok=True)
        shutil.copy2(final_desc_filepath, experiment_dir)

        print(f'Starting {eval_script}...')
        result = subprocess.run(
            ['python', eval_script,
            '--config', conf.config_filepath,
            '--config-defaults', conf.config_defaults_filepath,
            '--common.experiment_name', experiment_name
            ])
        print(f'Script {eval_script} returned {result.returncode}')
        if result.returncode != 0:
            exit(result.returncode)
    else:
        print(f'Eval is skipped because file {model_filepath} already exists')
    print('Search and eval done.')
    exit(0)
Example #7
0
from FastAutoAugment.common.common import common_init
from FastAutoAugment.data_aug.search import search

if __name__ == '__main__':
    conf = common_init(config_filepath='confs/wresnet40x2_cifar10_b512.yaml')
    search(conf)

Example #8
0
from FastAutoAugment.nas.arch_trainer import ArchTrainer
from FastAutoAugment.common.common import common_init
from FastAutoAugment.nas import search
from FastAutoAugment.petridish.petridish_micro_builder import PetridishMicroBuilder

if __name__ == '__main__':
    conf = common_init(
        config_filepath='confs/petridish_cifar.yaml',
        param_args=['--common.experiment_name', 'petridish_cifar_search'])

    # region config
    conf_search = conf['nas']['search']
    # endregion

    micro_builder = PetridishMicroBuilder()
    trainer_class = ArchTrainer

    search.search_arch(conf_search, micro_builder, trainer_class)

    exit(0)
Example #9
0
from FastAutoAugment.nas.model_desc import ModelDesc
from FastAutoAugment.common.common import common_init
from FastAutoAugment.nas.model import Model
from FastAutoAugment.petridish.petridish_micro_builder import PetridishMicroBuilder
from FastAutoAugment.nas.nas_utils import create_macro_desc

from FastAutoAugment.common.model_summary import summary

conf = common_init(
    config_filepath='confs/petridish_cifar.yaml',
    param_args=['--common.experiment_name', 'petridish_run2_seed42_eval'])

conf_eval = conf['nas']['eval']
conf_model_desc = conf_eval['model_desc']

conf_model_desc['n_cells'] = 14
template_model_desc = ModelDesc.load('final_model_desc.yaml')
model_desc = create_macro_desc(conf_model_desc, True, template_model_desc)

mb = PetridishMicroBuilder()
mb.register_ops()
model = Model(model_desc, droppath=False, affine=False)
#model.cuda()
summary(model, [64, 3, 32, 32])

exit(0)