print('Full model training') # Train model callbacks, num_epochs = utils.get_lr_schedule(args.lr_schedule, data_generator.num_train, args.batch_size, schedule_args = { arg_name : arg_val for arg_name, arg_val in vars(args).items() if arg_val is not None }) if args.log_dir: if os.path.isdir(args.log_dir): shutil.rmtree(args.log_dir, ignore_errors = True) callbacks.append(keras.callbacks.TensorBoard(log_dir = args.log_dir, write_graph = False)) if args.snapshot: snapshot_kwargs = {} if args.snapshot_best: snapshot_kwargs['save_best_only'] = True snapshot_kwargs['monitor'] = args.snapshot_best callbacks.append(keras.callbacks.ModelCheckpoint(args.snapshot, **snapshot_kwargs) if args.gpus <= 1 else utils.TemplateModelCheckpoint(model, args.snapshot, **snapshot_kwargs)) if args.max_decay > 0: decay = (1.0/args.max_decay - 1) / ((data_generator.num_train // args.batch_size) * (args.epochs if args.epochs else num_epochs)) else: decay = 0.0 if args.cls_weight > 0: par_model.compile(optimizer = keras.optimizers.SGD(lr=args.sgd_lr, decay=decay, momentum=0.9, nesterov=args.nesterov, clipnorm = args.clipgrad), loss = { embedding_layer_name : loss, 'prob' : 'categorical_crossentropy' }, loss_weights = { embedding_layer_name : 1.0, 'prob' : args.cls_weight }, metrics = { embedding_layer_name : metric, 'prob' : 'accuracy' }) else: par_model.compile(optimizer = keras.optimizers.SGD(lr=args.sgd_lr, decay=decay, momentum=0.9, nesterov=args.nesterov, clipnorm = args.clipgrad), loss = loss, metrics = [metric])
schedule_args={ arg_name: arg_val for arg_name, arg_val in vars(args).items() if arg_val is not None }) if args.log_dir: if os.path.isdir(args.log_dir): shutil.rmtree(args.log_dir, ignore_errors=True) callbacks.append( keras.callbacks.TensorBoard(log_dir=args.log_dir, write_graph=False)) if args.snapshot: callbacks.append( keras.callbacks.ModelCheckpoint(args.snapshot) if args.gpus <= 1 else utils.TemplateModelCheckpoint(model, args.snapshot)) if args.max_decay > 0: decay = (1.0 / args.max_decay - 1) / ( (data_generator.num_train // args.batch_size) * (args.epochs if args.epochs else num_epochs)) else: decay = 0.0 par_model.compile(optimizer=keras.optimizers.SGD(lr=args.sgd_lr, decay=decay, momentum=0.9, nesterov=args.nesterov, clipnorm=args.clipgrad), loss='categorical_crossentropy', metrics=['accuracy'])