def run(dtrain, dtest, epochs=10, verbose=False): args = AttrDict(**dict( save_dir="_models", iters=epochs, epochs=epochs, bootstrap_epochs=1, ncams=1, verbose=verbose )) mnist = Collection('multiinput_edge_dropout_mpcc_{}'.format(args.ncams), args.save_dir, nepochs=args.epochs, verbose=args.verbose) ncams = args.ncams mnist.set_model_family(MultiInputEdgeDropoutFamily, ninputs=ncams, resume=False, merge_function="max_pool_concat", drop_comm_train=dtrain, drop_comm_test=dtest, input_dims=1, output_dims=10) train, test = get_mnist() mnist.add_trainset(train) mnist.add_testset(test) mnist.set_searchspace( nfilters_embeded=[3], nlayers_embeded=[2], nfilters_cloud=[3], nlayers_cloud=[2], lr=[1e-3], branchweight=[.1], ent_T=[100] ) # currently optimize based on the validation accuracy of the main model traces = mnist.train(niters=args.iters, bootstrap_nepochs=args.bootstrap_epochs) return traces[-1]['y']
def train_model(args, model_type, nfilters): train, test = chainer.datasets.get_cifar10(ndim=3) # train, test = chainer.datasets.get_mnist(ndim=3) trainer = Collection(model_type, args.save_dir, nepochs=args.epochs, verbose=args.verbose) trainer.set_model_family(model_dict[model_type], ninputs=1, resume=False, input_dims=train._datasets[0].shape[1], merge_function="max_pool_concat", drop_comm_train=0.5, drop_comm_test=0.5) trainer.add_trainset(train) trainer.add_testset(test) trainer.set_searchspace(nfilters_embeded=[nfilters], nlayers_embeded=[2], nfilters_cloud=[32], nlayers_cloud=[2], lr=[1e-3], branchweight=[.1], ent_T=[100]) res = trainer.train(niters=args.iters, bootstrap_nepochs=args.bootstrap_epochs) return max_acc(res)[0]
def train_model(args, model_type, nfilters): trainer = Collection(model_type, args.save_dir, nepochs=args.epochs, verbose=args.verbose) trainer.set_model_family(model_dict[model_type]) train, test = chainer.datasets.get_mnist(ndim=3) data_shape = train._datasets[0].shape[1:] trainer.add_trainset(train) trainer.add_testset(test) trainer.set_searchspace(nfilters_embeded=[nfilters], nlayers_embeded=[2], lr=[1e-3]) res = trainer.train(niters=args.iters, bootstrap_nepochs=args.bootstrap_epochs) return max_acc(res)[0]
acc = max(acc, t['y']) best_idx = i return acc, best_idx parser = argparse.ArgumentParser(description='Hybrid Example') parser.add_argument('-s', '--save_dir', default='_models') parser.add_argument('--iters', type=int, default=100) parser.add_argument('-e', '--epochs', type=int, default=80) parser.add_argument('-b', '--bootstrap_epochs', type=int, default=2) parser.add_argument('-v', '--verbose', action='store_true') args = parser.parse_args() mnist = Collection('binary_base_fixed_cifar10', args.save_dir, input_dims=3, nepochs=args.epochs, verbose=args.verbose) mnist.set_model_family(BinaryBaseFixedFamily) #train, test = chainer.datasets.get_mnist(ndim=3) train, test = chainer.datasets.get_cifar10(ndim=3) #from chainer.datasets.sub_dataset import SubDataset #train = SubDataset(train, 0, 500) #test = SubDataset(train, 0, 500) mnist.add_trainset(train) mnist.add_testset(test) mnist.set_searchspace(pretrain_nepochs=[20],
return acc, best_idx parser = argparse.ArgumentParser(description='Hybrid Example') parser.add_argument('-s', '--save_dir', default='_models') parser.add_argument('--iters', type=int, default=100) parser.add_argument('-e', '--epochs', type=int, default=100) parser.add_argument('-b', '--bootstrap_epochs', type=int, default=100) parser.add_argument('-v', '--verbose', action='store_true') parser.add_argument('-n', '--ncams', type=str, default="0") args = parser.parse_args() args.ncams = [int(i) for i in args.ncams.split(",")] mnist = Collection('binary_multiinput_increasing_mpcc_{}'.format( reduce(lambda x, y: str(x) + str(y), args.ncams)), args.save_dir, input_dims=3, nepochs=args.epochs, verbose=args.verbose) ncams = args.ncams mnist.set_model_family(MultiInputEdgeFamily, ninputs=len(ncams), batchsize=700, merge_function="max_pool_concat") train, test = get_mvmc_flatten_eval(ncams) #print("train",len(train)) #print("test",len(test)) #import sys #sys.exit(0) #train, test = chainer.datasets.get_cifar10(ndim=3)
acc = max(acc, t['y']) best_idx = i return acc, best_idx parser = argparse.ArgumentParser(description='Hybrid Example') parser.add_argument('-s', '--save_dir', default='_models') parser.add_argument('--iters', type=int, default=100) parser.add_argument('-e', '--epochs', type=int, default=40) parser.add_argument('-b', '--bootstrap_epochs', type=int, default=2) parser.add_argument('-v', '--verbose', action='store_true') parser.add_argument('-n', '--ncams', type=int, default=6) args = parser.parse_args() mnist = Collection('multiinput_{}'.format(args.ncams), args.save_dir, nepochs=args.epochs, verbose=args.verbose) ncams = args.ncams mnist.set_model_family(MultiInputFamily, ninputs=ncams) train, test = get_mvmc_flatten(range(ncams)) #train, test = chainer.datasets.get_cifar10(ndim=3) #print(train[1]) #print(len(train[0])) #from chainer.datasets.sub_dataset import SubDataset #train = SubDataset(train, 0, 500) #test = SubDataset(train, 0, 500) mnist.add_trainset(train) mnist.add_testset(test)
return acc, best_idx parser = argparse.ArgumentParser(description='Hybrid Example') parser.add_argument('-s', '--save_dir', default='_models') parser.add_argument('--iters', type=int, default=100) parser.add_argument('-e', '--epochs', type=int, default=100) parser.add_argument('-b', '--bootstrap_epochs', type=int, default=100) parser.add_argument('-v', '--verbose', action='store_true') parser.add_argument('-n', '--ncams', type=str, default="0,1,2,3,4,5") args = parser.parse_args() args.ncams = [int(i) for i in args.ncams.split(",")] mnist = Collection('binary_multiinput_weights_mpcc_{}'.format(args.ncams[-1]), args.save_dir, input_dims=3, nepochs=args.epochs, verbose=args.verbose) ncams = args.ncams mnist.set_model_family(MultiInputEdgeFamily, ninputs=len(ncams), batchsize=700, merge_function="max_pool_concat") train, test = get_mvmc_flatten_eval(ncams) #print("train",len(train)) #print("test",len(test)) #import sys #sys.exit(0) #train, test = chainer.datasets.get_cifar10(ndim=3)
acc = max(acc, t['y']) best_idx = i return acc, best_idx parser = argparse.ArgumentParser(description='Hybrid Example') parser.add_argument('-s', '--save_dir', default='_models') parser.add_argument('--iters', type=int, default=100) parser.add_argument('-e', '--epochs', type=int, default=20) parser.add_argument('-b', '--bootstrap_epochs', type=int, default=2) parser.add_argument('-v', '--verbose', action='store_true') args = parser.parse_args() train, test = chainer.datasets.get_mnist(ndim=3) hybrid = Collection('simple_hybrid', args.save_dir, nepochs=args.epochs, verbose=args.verbose) hybrid.set_model_family(SimpleHybridFamily) hybrid.add_trainset(train) hybrid.add_testset(test) hybrid.set_searchspace( nfilters_embeded=[64], nlayers_embeded=[2], nfilters_cloud=[64], nlayers_cloud=[2], lr=[1e-3], branchweight=[.5], ent_T=[0., 0.005, 0.001, 0.1, 100] ) hybrid.set_chooser(deepopt.chooser.GridChooser())
return acc, best_idx parser = argparse.ArgumentParser(description='Hybrid Example') parser.add_argument('-s', '--save_dir', default='_models') parser.add_argument('--iters', type=int, default=100) parser.add_argument('-e', '--epochs', type=int, default=100) parser.add_argument('-b', '--bootstrap_epochs', type=int, default=100) parser.add_argument('-v', '--verbose', action='store_true') parser.add_argument('-n', '--ncams', type=str, default="0,1,2,3,4,5") args = parser.parse_args() args.ncams = [int(i) for i in args.ncams.split(",")] mnist = Collection('exp_mem_percent_{}'.format( reduce(lambda x, y: str(x) + str(y), args.ncams)), args.save_dir, input_dims=3, nepochs=args.epochs, verbose=args.verbose) ncams = args.ncams mnist.set_model_family(MultiInputEdgeFamily, ninputs=len(ncams), batchsize=700, merge_function="max_pool_concat") train, test = get_mvmc_flatten_eval(ncams) #print("train",len(train)) #print("test",len(test)) #import sys #sys.exit(0) #train, test = chainer.datasets.get_cifar10(ndim=3)
best_idx = i return acc, best_idx parser = argparse.ArgumentParser(description='Hybrid Example') parser.add_argument('-s', '--save_dir', default='_models') parser.add_argument('--iters', type=int, default=100) parser.add_argument('-e', '--epochs', type=int, default=100) parser.add_argument('-b', '--bootstrap_epochs', type=int, default=100) parser.add_argument('-v', '--verbose', action='store_true') parser.add_argument('-n', '--ncams', type=str, default="0") args = parser.parse_args() args.ncams = [int(i) for i in args.ncams.split(",")] suffix = "_".join([str(i) for i in args.ncams]) mnist = Collection('binary_multiinput_individual_mpcc_1_nolocal{}'.format(suffix), args.save_dir, input_dims=3, nepochs=args.epochs, verbose=args.verbose) ncams = args.ncams mnist.set_model_family(MultiInputEdgeFamily,ninputs=len(ncams),batchsize=700,merge_function="max_pool_concat") train, test = get_mvmc_flatten_eval(ncams) #print("train",len(train)) #print("test",len(test)) #import sys #sys.exit(0) #train, test = chainer.datasets.get_cifar10(ndim=3) #print(train[1]) #print(len(train[0])) #from chainer.datasets.sub_dataset import SubDataset #train = SubDataset(train, 0, 500) #test = SubDataset(train, 0, 500)
best_idx = i return acc, best_idx parser = argparse.ArgumentParser(description='Hybrid Example') parser.add_argument('-s', '--save_dir', default='_models') parser.add_argument('--iters', type=int, default=100) parser.add_argument('-e', '--epochs', type=int, default=40) parser.add_argument('-b', '--bootstrap_epochs', type=int, default=2) parser.add_argument('-v', '--verbose', action='store_true') parser.add_argument('-n', '--ncams', type=int, default=6) args = parser.parse_args() mnist = Collection('multiinput_edge_mpmp_{}'.format(args.ncams), args.save_dir, input_dims=3, nepochs=args.epochs, verbose=args.verbose) ncams = args.ncams mnist.set_model_family(MultiInputEdgeFamily, ninputs=ncams, batchsize=400, merge_function="max_pool") train, test = get_mvmc_flatten(range(ncams)) #train, test = chainer.datasets.get_cifar10(ndim=3) #print(train[1]) #print(len(train[0])) #from chainer.datasets.sub_dataset import SubDataset #train = SubDataset(train, 0, 500)
from elaas.elaas import Collection from elaas.family.simple import SimpleHybridFamily from elaas.family.binary import BinaryFamily from visualize import visualize import deepopt.chooser parser = argparse.ArgumentParser(description='Hybrid Example') parser.add_argument('-s', '--save_dir', default='_models') parser.add_argument('--iters', type=int, default=100) parser.add_argument('-e', '--epochs', type=int, default=20) parser.add_argument('-b', '--bootstrap_epochs', type=int, default=2) parser.add_argument('-v', '--verbose', action='store_true') args = parser.parse_args() mnist = Collection('binary', args.save_dir, nepochs=args.epochs, verbose=args.verbose) mnist.set_model_family(BinaryFamily) train, test = chainer.datasets.get_mnist(ndim=3) mnist.add_trainset(train) mnist.add_testset(test) # mnist.set_searchspace( # nfilters_embeded=[5, 10], # nlayers_embeded=[1, 2], # nfilters_cloud=[5], # nlayers_cloud=[1], # lr=[1e-3], # branchweight=[.1], # ent_T=[0.1, 0.2, 0.4] # )
acc = max(acc, t['y']) best_idx = i return acc, best_idx parser = argparse.ArgumentParser(description='Hybrid Example') parser.add_argument('-s', '--save_dir', default='_models') parser.add_argument('--iters', type=int, default=100) parser.add_argument('-e', '--epochs', type=int, default=40) parser.add_argument('-b', '--bootstrap_epochs', type=int, default=2) parser.add_argument('-v', '--verbose', action='store_true') args = parser.parse_args() mnist = Collection('float_float_cifar10', args.save_dir, input_dims=3, nepochs=args.epochs, verbose=args.verbose) mnist.set_model_family(FloatFloatFamily) #train, test = chainer.datasets.get_mnist(ndim=3) train, test = chainer.datasets.get_cifar10(ndim=3) #from chainer.datasets.sub_dataset import SubDataset #train = SubDataset(train, 0, 500) #test = SubDataset(train, 0, 500) mnist.add_trainset(train) mnist.add_testset(test) mnist.set_searchspace(nfilters_embeded=[64],
parser = argparse.ArgumentParser(description='Hybrid Example') parser.add_argument('-s', '--save_dir', default='_models') parser.add_argument('--iters', type=int, default=100) parser.add_argument('-e', '--epochs', type=int, default=100) parser.add_argument('-b', '--bootstrap_epochs', type=int, default=100) parser.add_argument('-v', '--verbose', action='store_true') parser.add_argument('-n', '--ncams', type=str, default="0,1,2,3,4,5") parser.add_argument('-m', '--merge', type=str, default="max_pool_concat") args = parser.parse_args() merge = args.merge args.ncams = [int(i) for i in args.ncams.split(",")] mnist = Collection('binary_combining_{}_{}'.format( merge, reduce(lambda x, y: str(x) + str(y), args.ncams)), args.save_dir, input_dims=3, nepochs=args.epochs, verbose=args.verbose) ncams = args.ncams mnist.set_model_family(MultiInputEdgeFamily, ninputs=len(ncams), batchsize=700, merge_function=merge) train, test = get_mvmc_flatten_eval(ncams) #print("train",len(train)) #print("test",len(test)) #import sys #sys.exit(0) #train, test = chainer.datasets.get_cifar10(ndim=3)