def _init_conf(self, is_search_or_eval:bool, clean_expdir:bool)->Config: config_filename = self.config_filename conf = common.common_init(config_filepath=config_filename, param_args=['--common.experiment_name', self.get_expname(is_search_or_eval), ], clean_expdir=clean_expdir) return conf
def test_petridish_zero_model(): conf = common_init(config_filepath='confs/petridish_cifar.yaml') conf_search = conf['nas']['search'] model_desc = conf_search['model_desc'] model_desc_builder = ModelDescBuilder() model_desc = model_desc_builder.build(model_desc) m = Model(model_desc, False, True) y, aux = m(torch.rand((1, 3, 32, 32))) assert isinstance(y, torch.Tensor) and y.shape == (1, 10) and aux is None
def test_darts_zero_model(): conf = common_init(config_filepath='confs/darts_cifar.yaml') conf_search = conf['nas']['search'] model_desc = conf_search['model_desc'] macro_builder = MacroBuilder(model_desc, aux_tower=False) model_desc = macro_builder.build() m = Model(model_desc, False, True) y, aux = m(torch.rand((1, 3, 32, 32))) assert isinstance(y, torch.Tensor) and y.shape==(1,10) and aux is None
def _init(self, suffix: str) -> Config: config_filename = self.config_filename if self.toy_config_filename: config_filename += ';' + self.toy_config_filename conf = common_init(config_filepath=config_filename, param_args=[ '--common.experiment_name', self.base_name + f'_{suffix}', ]) return conf
def main(): conf = common_init(config_filepath='confs/algos/resnet.yaml') provider = Cifar10Provider(conf['dataset']) train_transform, test_transform = provider.get_transforms() trainset, testset = provider.get_datasets(load_train=True, load_test=True, transform_train=None, transform_test=None) trainset = MetaDataset(trainset, transform=train_transform) for x, y, meta in trainset: print(x, y, meta)
def main(): #6, 7, 9, 10, 16 #model = model_builder.build(model_builder.EXAMPLE_DESC_MATRIX, model_builder.EXAMPLE_VERTEX_OPS) nsds = Nasbench101Dataset('~/dataroot/nasbench_ds/nasbench_full.pkl') conf = common_init(config_filepath='confs/algos/nasbench101.yaml') conf_eval = conf['nas']['eval'] conf_loader = conf_eval['loader'] conf_trainer = conf_eval['trainer'] model = nsds.create_model(5) # 401277 is same model as example data_loaders = data.get_data(conf_loader) trainer = Trainer(conf_trainer, model) trainer.fit(data_loaders)
from archai.nas.arch_trainer import ArchTrainer from archai.common.common import common_init from archai.nas import search from archai.petridish.petridish_micro_builder import PetridishMicroBuilder if __name__ == '__main__': conf = common_init( config_filepath='confs/petridish_cifar.yaml', param_args=['--common.experiment_name', 'petridish_cifar_search']) # region config conf_search = conf['nas']['search'] # endregion micro_builder = PetridishMicroBuilder() trainer_class = ArchTrainer search.search_arch(conf_search, micro_builder, trainer_class) exit(0)
def main(): # accept search and eval scripts to run # config file can be supplied using --config parser = argparse.ArgumentParser(description='NAS E2E Runs') parser.add_argument('--search-script', type=str, default='scripts/darts/cifar_search.py', help='Search script to run') parser.add_argument('--eval-script', type=str, default='scripts/darts/cifar_eval.py', help='Eval script to run') parser.add_argument('--exp_prefix', type=str, default='darts', help='Experiment prefix to use') args, extra_args = parser.parse_known_args() # load config to some of the settings like logdir conf = common_init(use_args=True) logdir = get_conf_common()['logdir'] assert logdir # get script, resume flag and experiment dir for search search_script = args.search_script resume = conf['nas']['search']['resume'] search_script = utils.full_path(search_script.strip()) experiment_name = args.exp_prefix + '_' + Path(search_script).stem experiment_dir = os.path.join(logdir, experiment_name) # see if search has already produced the output final_desc_filepath = os.path.join( experiment_dir, conf['nas']['search']['final_desc_filename']) if not resume or not os.path.exists(final_desc_filepath): print(f'Starting {search_script}...') result = subprocess.run([ 'python', search_script, '--config', conf.config_filepath, '--config-defaults', conf.config_defaults_filepath, '--common.experiment_name', experiment_name ]) print(f'Script {search_script} returned {result.returncode}') if result.returncode != 0: exit(result.returncode) else: print( f'Search is skipped because file {final_desc_filepath} already exists' ) # get script, resume flag and experiment dir for eval eval_script = args.eval_script resume = conf['nas']['eval']['resume'] eval_script = utils.full_path(eval_script.strip()) experiment_name = args.exp_prefix + '_' + Path(eval_script).stem experiment_dir = os.path.join(logdir, experiment_name) # if eval has already produced the output, skip eval run model_filepath = os.path.join(experiment_dir, conf['nas']['eval']['save_filename']) if not resume or not os.path.exists(model_filepath): # copy output of search to eval folder # TODO: take final_desc_filename from eval config os.makedirs(experiment_dir, exist_ok=True) shutil.copy2(final_desc_filepath, experiment_dir) print(f'Starting {eval_script}...') result = subprocess.run([ 'python', eval_script, '--config', conf.config_filepath, '--config-defaults', conf.config_defaults_filepath, '--common.experiment_name', experiment_name ]) print(f'Script {eval_script} returned {result.returncode}') if result.returncode != 0: exit(result.returncode) else: print(f'Eval is skipped because file {model_filepath} already exists') print('Search and eval done.') exit(0)
from archai.common.common import common_init from archai.data_aug.search import search if __name__ == '__main__': conf = common_init(config_filepath='confs/wresnet40x2_cifar10_b512.yaml') search(conf)
# Copyright (c) Microsoft Corporation. # Licensed under the MIT license. from archai.nas.model_desc import ModelDesc from archai.common.common import common_init from archai.nas.model import Model from archai.algos.petridish.petridish_model_desc_builder import PetridishModelBuilder from archai.common.model_summary import summary conf = common_init(config_filepath='confs/petridish_cifar.yaml', param_args=['--common.experiment_name', 'petridish_run2_seed42_eval']) conf_eval = conf['nas']['eval'] conf_model_desc = conf_eval['model_desc'] conf_model_desc['n_cells'] = 14 template_model_desc = ModelDesc.load('$expdir/final_model_desc.yaml') model_builder = PetridishModelBuilder() model_desc = model_builder.build(conf_model_desc, template=template_model_desc) mb = PetridishModelBuilder() model = Model(model_desc, droppath=False, affine=False) summary(model, [64, 3, 32, 32]) exit(0)
# Copyright (c) Microsoft Corporation. # Licensed under the MIT license. import logging from archai.datasets import data from archai.common import utils from archai.common.timing import MeasureTime, print_all_timings, print_timing, get_timing from archai.common.common import logger, common_init conf = common_init(config_filepath='confs/algos/darts.yaml', param_args=['--common.experiment_name', 'restnet_test']) conf_eval = conf['nas']['eval'] conf_loader = conf_eval['loader'] conf_loader['train_batch'] = 512 conf_loader['test_batch'] = 4096 conf_loader['cutout'] = 0 train_dl, _, test_dl = data.get_data(conf_loader) @MeasureTime def iter_dl(dl): dummy = 0.0 for x, y in train_dl: x = x.cuda() y = y.cuda() dummy += len(x) # dummy += len(y) return dummy
from archai.darts.darts_micro_builder import DartsMicroBuilder from archai.common.common import common_init from archai.nas.evaluate import eval_arch if __name__ == '__main__': conf = common_init( config_filepath='confs/darts_cifar.yaml', param_args=['--common.experiment_name', 'darts_cifar_eval']) conf_eval = conf['nas']['eval'] # evaluate architecture using eval settings eval_arch(conf_eval, micro_builder=DartsMicroBuilder()) exit(0)
import json import os from archai.common.common import logger, common_init, expdir_abspath from archai.data_aug.train import train_and_eval if __name__ == '__main__': conf = common_init(config_filepath='confs/aug_train_cifar.yaml', param_args=[ "--autoaug.loader.aug", "fa_reduced_cifar10", "--common.experiment_name", "autoaug_train" ]) import time t = time.time() save_path = expdir_abspath('model.pth') # result = train_and_eval(conf, val_ratio=conf['val_ratio'], val_fold=conf['val_fold'], # save_path=save_path, only_eval=conf['only_eval'], metric='test') # TODO: Will fail if val_ratio=0 since we are not using latest training infrastructure # TODO: Move val_ratio, val_fold, metric to config file result = train_and_eval(conf, val_ratio=0.2, val_fold=0, save_path=save_path, only_eval=False, metric='test') elapsed = time.time() - t logger.info('training done.') logger.info('model: %s' % conf['autoaug']['model']) logger.info('augmentation: %s' % conf['autoaug']['loader']['aug'])
import torch from archai import cifar10_models from archai.common.trainer import Trainer from archai.common.config import Config from archai.common.common import common_init from archai.datasets import data def train_test(conf_eval: Config): conf_loader = conf_eval['loader'] conf_trainer = conf_eval['trainer'] # create model Net = cifar10_models.resnet34 model = Net().to(torch.device('cuda', 0)) # get data data_loaders = data.get_data(conf_loader) # train! trainer = Trainer(conf_trainer, model) trainer.fit(data_loaders) if __name__ == '__main__': conf = common_init( config_filepath='confs/algos/resnet.yaml;confs/datasets/cifar100.yaml') conf_eval = conf['nas']['eval'] train_test(conf_eval)
import torch from archai import cifar10_models from archai.common.trainer import Trainer from archai.common.config import Config from archai.common.common import common_init from archai.datasets import data def train_test(conf_eval: Config): conf_loader = conf_eval['loader'] conf_trainer = conf_eval['trainer'] # create model Net = cifar10_models.resnet34 model = Net().to(torch.device('cuda', 0)) # get data train_dl, _, test_dl = data.get_data(conf_loader) # train! trainer = Trainer(conf_trainer, model) trainer.fit(train_dl, test_dl) if __name__ == '__main__': conf = common_init(config_filepath='confs/algos/resnet.yaml') conf_eval = conf['nas']['eval'] train_test(conf_eval)
def get_filepath(suffix): conf = common_init(config_filepath='confs/algos/darts.yaml', param_args=['--common.experiment_name', 'test_basename' + f'_{suffix}' ]) return utils.full_path(os.path.join('$expdir' ,'somefile.txt'))
from archai.common.common import common_init from archai.darts.darts_micro_builder import DartsMicroBuilder from archai.nas.evaluate import eval_arch if __name__ == '__main__': conf = common_init( config_filepath='confs/imagenet_darts.yaml', param_args=['--common.experiment_name', 'darts_imagenet_eval']) conf_eval = conf['nas']['eval'] micro_builder = DartsMicroBuilder() eval_arch(conf_eval, micro_builder=micro_builder) exit(0)
from archai.common.common import common_init from archai.random_arch.random_micro_builder import RandomMicroBuilder from archai.nas import nas_utils if __name__ == '__main__': conf = common_init(config_filepath='confs/random_cifar.yaml', param_args=['--common.experiment_name', 'random_cifar_search']) # region config conf_search = conf['nas']['search'] conf_model_desc = conf_search['model_desc'] final_desc_filename = conf_search['final_desc_filename'] # endregion # create model and save it to yaml # NOTE: there is no search here as the models are just randomly sampled model_desc = nas_utils.create_macro_desc(conf_model_desc, aux_tower=False, template_model_desc=None) macro_builder = RandomMicroBuilder() macro_builder.build(model_desc, 0) # save model to location specified by eval config model_desc.save(final_desc_filename) exit(0)