コード例 #1
0
ファイル: i1k_msra.py プロジェクト: DailyActie/AI_DL_FM-neon
                    type=float,
                    default=100,
                    help='subset of training dataset to use (percentage)')
args = parser.parse_args()

model, cost = create_network(args.depth, args.bottleneck)
rseed = 0 if args.rng_seed is None else args.rng_seed

# setup data provider
assert 'train' in args.manifest, "Missing train manifest"
assert 'val' in args.manifest, "Missing validation manifest"
train = make_msra_train_loader(args.manifest['train'], args.manifest_root,
                               model.be, args.subset_pct, rseed)
valid = make_validation_loader(args.manifest['val'], args.manifest_root,
                               model.be, args.subset_pct)
tune = make_tuning_loader(args.manifest['train'], args.manifest_root, model.be)

weight_sched = Schedule([30, 60], 0.1)
opt = GradientDescentMomentum(0.1, 0.9, wdecay=0.0001, schedule=weight_sched)

# configure callbacks
valmetric = TopKMisclassification(k=5)
callbacks = Callbacks(model,
                      eval_set=valid,
                      metric=valmetric,
                      **args.callback_args)
callbacks.add_callback(BatchNormTuneCallback(tune), insert_pos=0)

model.fit(train,
          optimizer=opt,
          num_epochs=args.epochs,
コード例 #2
0
ファイル: train.py プロジェクト: rlugojr/neon
parser = NeonArgparser(__doc__, default_config_files=config_files)
parser.add_argument('--depth', type=int, default=2,
                    help='depth of each stage (network depth will be 9n+2)')
parser.add_argument('--subset_pct', type=float, default=100,
                    help='subset of training dataset to use (percentage)')
args = parser.parse_args()
random_seed = args.rng_seed if args.rng_seed else 0

# Check that the proper manifest sets have been supplied
assert 'train' in args.manifest, "Missing train manifest"
assert 'val' in args.manifest, "Missing validation manifest"

model, cost = create_network(args.depth)

# setup data provider
train = make_train_loader(args.manifest['train'], args.manifest_root, model.be, args.subset_pct,
                          random_seed)
test = make_validation_loader(args.manifest['val'], args.manifest_root, model.be, args.subset_pct)

# tune batch norm parameters on subset of train set with no augmentations
tune_set = make_tuning_loader(args.manifest['train'], args.manifest_root, model.be)

# configure callbacks
callbacks = Callbacks(model, eval_set=test, metric=Misclassification(), **args.callback_args)
callbacks.add_callback(BatchNormTuneCallback(tune_set), insert_pos=0)

# begin training
opt = GradientDescentMomentum(0.1, 0.9, wdecay=0.0001, schedule=Schedule([82, 124], 0.1))
model.fit(train, optimizer=opt, num_epochs=args.epochs, cost=cost, callbacks=callbacks)