def get_data(manifest, manifest_root, batch_size, subset_pct, rng_seed): ''' Loads training and validation set using aeon loader args(object): Contains function arguments manifest(list): Manifest files for traning and validaions manifest_root(string): Root directory of manifest file batch_size(int): Mini batch size subset_pct(float): Subset percentage of the data (0-100) rng_seed(int): Seed for random number generator ''' assert 'train' in manifest[1], "Missing train manifest" assert 'test' in manifest[0], "Missing validation manifest" train_set = make_train_loader(manifest[1], manifest_root, batch_size, subset_pct, rng_seed) valid_set = make_validation_loader(manifest[0], manifest_root, batch_size, subset_pct) return train_set, valid_set
help='save visualization data from deconvolution') parser.add_argument('--subset_pct', type=float, default=100, help='subset of training dataset to use (percentage)') args = parser.parse_args() model, cost = create_network() rseed = 0 if args.rng_seed is None else args.rng_seed # setup data provider assert 'train' in args.manifest, "Missing train manifest" assert 'val' in args.manifest, "Missing validation manifest" train = make_alexnet_train_loader(args.manifest['train'], args.manifest_root, model.be, args.subset_pct, rseed) valid = make_validation_loader(args.manifest['val'], args.manifest_root, model.be, args.subset_pct) sched_weight = Schedule([10], change=0.1) opt = GradientDescentMomentum(0.01, 0.9, wdecay=0.0005, schedule=sched_weight) # configure callbacks valmetric = TopKMisclassification(k=5) callbacks = Callbacks(model, eval_set=valid, metric=valmetric, **args.callback_args) if args.deconv: callbacks.add_deconv_callback(train, valid) model.fit(train,
parser = NeonArgparser(__doc__, default_config_files=config_files) parser.add_argument('--depth', type=int, default=2, help='depth of each stage (network depth will be 9n+2)') parser.add_argument('--subset_pct', type=float, default=100, help='subset of training dataset to use (percentage)') args = parser.parse_args() random_seed = args.rng_seed if args.rng_seed else 0 # Check that the proper manifest sets have been supplied assert 'train' in args.manifest, "Missing train manifest" assert 'val' in args.manifest, "Missing validation manifest" model, cost = create_network(args.depth) # setup data provider train = make_train_loader(args.manifest['train'], args.manifest_root, model.be, args.subset_pct, random_seed) test = make_validation_loader(args.manifest['val'], args.manifest_root, model.be, args.subset_pct) # tune batch norm parameters on subset of train set with no augmentations tune_set = make_tuning_loader(args.manifest['train'], args.manifest_root, model.be) # configure callbacks callbacks = Callbacks(model, eval_set=test, metric=Misclassification(), **args.callback_args) callbacks.add_callback(BatchNormTuneCallback(tune_set), insert_pos=0) # begin training opt = GradientDescentMomentum(0.1, 0.9, wdecay=0.0001, schedule=Schedule([82, 124], 0.1)) model.fit(train, optimizer=opt, num_epochs=args.epochs, cost=cost, callbacks=callbacks)
train_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'train.cfg') config_files = [train_config] if os.path.exists(train_config) else [] parser = NeonArgparser(__doc__, default_config_files=config_files) parser.add_argument('--subset_pct', type=float, default=100, help='subset of training dataset to use (percentage)') args = parser.parse_args() model, cost = create_network_lrn() rseed = 0 if args.rng_seed is None else args.rng_seed # setup data provider assert 'train' in args.manifest, "Missing train manifest" assert 'val' in args.manifest, "Missing validation manifest" train = make_alexnet_train_loader(args.manifest['train'], args.manifest_root, model.be, args.subset_pct, rseed, dtype=args.datatype) valid = make_validation_loader(args.manifest['val'], args.manifest_root, model.be, args.subset_pct, dtype=args.datatype) weight_sched = Schedule(20, 0.1) opt_gdm = GradientDescentMomentum(0.01, 0.9, wdecay=0.0005, schedule=weight_sched, stochastic_round=args.rounding) opt_biases = GradientDescentMomentum(0.02, 0.9, schedule=weight_sched, stochastic_round=args.rounding) opt = MultiOptimizer({'default': opt_gdm, 'Bias': opt_biases}) # configure callbacks valmetric = TopKMisclassification(k=5) callbacks = Callbacks(model, eval_set=valid, metric=valmetric, **args.callback_args) model.fit(train, optimizer=opt, num_epochs=args.epochs, cost=cost, callbacks=callbacks)