def run_training( model_f, lr_f, name, epochs, batch_size, train_data, val_data, img, img_size, mixed_float=True, notebook=True, ): """ val_data : (X_val, Y_val) tuple """ if mixed_float: policy = mixed_precision.Policy('mixed_float16') mixed_precision.set_policy(policy) st = time.time() inputs = keras.Input((200, 200, 3)) mymodel = AdiposeModel(inputs, model_f) loss = keras.losses.BinaryCrossentropy(from_logits=True) mymodel.compile(optimizer='adam', loss=loss, metrics=[ keras.metrics.BinaryAccuracy(threshold=0.5), ]) logdir = 'logs/fit/' + name tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=logdir, histogram_freq=1, profile_batch='3,5', update_freq='epoch') lr_callback = keras.callbacks.LearningRateScheduler(lr_f, verbose=1) savedir = 'savedmodels/' + name + '/{epoch}' save_callback = keras.callbacks.ModelCheckpoint(savedir, save_weights_only=True, verbose=1) if notebook: tqdm_callback = TqdmNotebookCallback( metrics=['loss', 'binary_accuracy'], leave_inner=False) else: tqdm_callback = TqdmCallback() train_ds = create_train_dataset(img, train_data, img_size, batch_size) val_ds = create_train_dataset(img, val_data, img_size, batch_size, True) image_callback = ValFigCallback(val_ds, logdir) mymodel.fit( x=train_ds, epochs=epochs, steps_per_epoch=len(train_data) // batch_size, callbacks=[ tensorboard_callback, lr_callback, save_callback, tqdm_callback, image_callback, ], verbose=0, validation_data=val_ds, validation_steps=10, ) print('Took {} seconds'.format(time.time() - st)) mymodel.evaluate(val_ds, steps=1000)
def run_training( model_f, lr_f, name, epochs, batch_size, steps_per_epoch, vid_dir, edge_dir, train_vid_names, val_vid_names, frame_size, flow_map_size, interpolate_ratios, patch_size, overlap, edge_model_f, mixed_float=True, notebook=True, profile=False, edge_model_path=None, amodel_path=None, load_model_path=None, ): """ patch_size, frame_size and flow_map_size are all (WIDTH, HEIGHT) format """ if ((edge_model_path is None) or (amodel_path is None))\ and (load_model_path is None): raise ValueError('Need a path to load model') if mixed_float: policy = mixed_precision.Policy('mixed_float16') mixed_precision.set_global_policy(policy) st = time.time() a_model = anime_model(model_f, interpolate_ratios, flow_map_size) e_model = EdgeModel([patch_size[1], patch_size[0], 3], edge_model_f) if amodel_path is not None: a_model.load_weights(amodel_path).expect_partial() print('*' * 50) print(f'Anime model loaded from : {amodel_path}') print('*' * 50) if edge_model_path is not None: e_model.load_weights(edge_model_path).expect_partial() print('*' * 50) print(f'Edge model loaded from : {edge_model_path}') print('*' * 50) c_model = AnimeModelCyclic( a_model, e_model, (patch_size[1], patch_size[0]), overlap, ) if load_model_path is not None: c_model.load_weights(load_model_path) print('*' * 50) print(f'Cyclic model loaded from : {load_model_path}') print('*' * 50) c_model.compile(optimizer='adam') logdir = 'logs/fit/' + name if profile: tensorboard_callback = tf.keras.callbacks.TensorBoard( log_dir=logdir, histogram_freq=1, profile_batch='3,5', update_freq='epoch') else: tensorboard_callback = tf.keras.callbacks.TensorBoard( log_dir=logdir, histogram_freq=1, profile_batch=0, update_freq='epoch') lr_callback = keras.callbacks.LearningRateScheduler(lr_f, verbose=1) savedir = 'savedmodels/' + name + '/{epoch}' save_callback = keras.callbacks.ModelCheckpoint(savedir, save_weights_only=True, verbose=1) if notebook: tqdm_callback = TqdmNotebookCallback(metrics=['loss'], leave_inner=False) else: tqdm_callback = TqdmCallback() train_ds = create_train_dataset(vid_dir, edge_dir, train_vid_names, frame_size, batch_size, parallel=6) val_ds = create_train_dataset(vid_dir, edge_dir, val_vid_names, frame_size, batch_size, val_data=True, parallel=4) image_callback = ValFigCallback(val_ds, logdir) c_model.fit( x=train_ds, epochs=epochs, steps_per_epoch=steps_per_epoch, callbacks=[ tensorboard_callback, lr_callback, save_callback, tqdm_callback, image_callback, ], verbose=0, validation_data=val_ds, validation_steps=50, ) delta = time.time() - st hours, remain = divmod(delta, 3600) minutes, seconds = divmod(remain, 60) print( f'Took {hours:.0f} hours {minutes:.0f} minutes {seconds:.2f} seconds')
def run_training( encoder_f, box_f, lr_f, name, epochs, batch_size, steps_per_epoch, img, data, val_data, img_size, mixed_float=True, notebook=True, ): """ val_data : (X_val, Y_val) tuple """ if mixed_float: policy = mixed_precision.Policy('mixed_float16') mixed_precision.set_policy(policy) st = time.time() inputs = { 'image': keras.Input((img_size[0], img_size[1], 3)), 'pos': keras.Input((2)) } mymodel = BoxModel(inputs, encoder_f, box_f) loss = keras.losses.MeanSquaredError() mymodel.compile( optimizer='adam', loss=loss, # metrics=[ # 'mse', # ] ) logdir = 'logs/fit/' + name tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=logdir, histogram_freq=1, profile_batch='3,5', update_freq='epoch') lr_callback = keras.callbacks.LearningRateScheduler(lr_f, verbose=1) savedir = 'savedmodels/' + name + '/{epoch}' save_callback = keras.callbacks.ModelCheckpoint(savedir, save_weights_only=True, verbose=1) if notebook: tqdm_callback = TqdmNotebookCallback( metrics=['loss', 'binary_accuracy'], leave_inner=False) else: tqdm_callback = TqdmCallback() # if augment: train_ds = create_train_dataset(img, data, img_size, batch_size) mymodel.fit( x=train_ds, epochs=epochs, steps_per_epoch=steps_per_epoch, validation_data=val_data, callbacks=[ tensorboard_callback, lr_callback, save_callback, tqdm_callback, ], verbose=0, # validation_data=val_data, ) # else: # mymodel.fit( # x=X_train, # y=Y_train, # epochs=epochs, # batch_size=batch_size, # callbacks=[ # tensorboard_callback, # lr_callback, # save_callback, # tqdm_callback, # ], # verbose=0, # validation_data=val_data # ) print('Took {} seconds'.format(time.time() - st))
def run_training( model_f, lr_f, name, epochs, batch_size, steps_per_epoch, train_vid_paths, val_vid_paths, test_vid_paths, frame_size, interpolate_ratios, mixed_float=True, notebook=True, profile=False, load_model_path=None, ): if mixed_float: policy = mixed_precision.Policy('mixed_float16') mixed_precision.set_global_policy(policy) st = time.time() inputs = keras.Input((frame_size[1], frame_size[0], 6)) mymodel = AnimeModel(inputs, model_f, interpolate_ratios) if load_model_path: mymodel.load_weights(load_model_path) print(f'Loaded from : {load_model_path}') loss = keras.losses.MeanAbsoluteError() mymodel.compile( optimizer='adam', loss=loss, ) logdir = 'logs/fit/' + name if profile: tensorboard_callback = tf.keras.callbacks.TensorBoard( log_dir=logdir, histogram_freq=1, profile_batch='3,5', update_freq=steps_per_epoch) else: tensorboard_callback = tf.keras.callbacks.TensorBoard( log_dir=logdir, histogram_freq=1, profile_batch=0, update_freq=steps_per_epoch) lr_callback = keras.callbacks.LearningRateScheduler(lr_f, verbose=1) savedir = 'savedmodels/' + name + '/{epoch}' save_callback = keras.callbacks.ModelCheckpoint(savedir, save_weights_only=True, verbose=1) if notebook: tqdm_callback = TqdmNotebookCallback(metrics=['loss'], leave_inner=False) else: tqdm_callback = TqdmCallback() train_ds = create_train_dataset(train_vid_paths, frame_size, batch_size) val_ds = create_train_dataset(val_vid_paths, frame_size, batch_size, True) image_callback = ValFigCallback(val_ds, logdir) mymodel.fit( x=train_ds, epochs=epochs, steps_per_epoch=steps_per_epoch, callbacks=[ tensorboard_callback, lr_callback, save_callback, tqdm_callback, image_callback, ], verbose=0, validation_data=val_ds, validation_steps=10, ) delta = time.time() - st hours, remain = divmod(delta, 3600) minutes, seconds = divmod(remain, 60) print( f'Took {hours:.0f} hours {minutes:.0f} minutes {seconds:.2f} seconds')
def run_training( backbone_f, lr_f, name, epochs, steps_per_epoch, # batch_size, intermediate_filters, kernel_size, stride, rfcn_window, anchor_ratios, anchor_scales, class_names, bbox_sizes, train_dir, val_dir, img_size, frozen_layers=[], mixed_float=True, notebook=True, load_model_path=None, profile=False, ): """ img_size: (WIDTH, HEIGHT) frozen layers: one or more of ['rfcn','rpn','backbone'] """ if mixed_float: policy = mixed_precision.Policy('mixed_float16') mixed_precision.set_global_policy(policy) st = time.time() inputs = keras.Input((img_size[0], img_size[1], 3)) if class_names is None: num_classes = 1 else: num_classes = len(class_names) + 1 mymodel = ObjectDetector(backbone_f, intermediate_filters, kernel_size, stride, img_size, num_classes, rfcn_window, anchor_ratios, anchor_scales) if load_model_path: mymodel.load_weights(load_model_path).expect_partial() print('loaded from : ' + load_model_path) #-------------------- Freeze R-FCN if 'rfcn' in frozen_layers: print('####################Freezing rfcn layers') mymodel.rfcn_cls_conv.trainable = False mymodel.rfcn_reg_conv.trainable = False else: if 'rfcn_cls' in frozen_layers: print('####################Freezing rfcn_cls layers') mymodel.rfcn_cls_conv.trainable = False if 'rfcn_reg' in frozen_layers: print('####################Freezing rfcn_reg layers') mymodel.rfcn_reg_conv.trainable = False #--------------------Freeze RPN if 'rpn' in frozen_layers: print('####################Freezing rpn layers') mymodel.rpn_inter_conv.trainable = False mymodel.rpn_cls_conv.trainable = False mymodel.rpn_reg_conv.trainable = False else: if 'rpn_inter' in frozen_layers: print('####################Freezing rpn_inter layers') mymodel.rpn_inter_conv.trainable = False if 'rpn_cls' in frozen_layers: print('####################Freezing rpn_cls layers') mymodel.rpn_cls_conv.trainable = False if 'rpn_reg' in frozen_layers: print('####################Freezing rpn_reg layers') mymodel.rpn_reg_conv.trainable = False #---------------------Freeze Backbone if 'backbone' in frozen_layers: print('####################Freezing backbone layers') mymodel.backbone_model.trainable = False mymodel.compile(optimizer='adam', ) logdir = 'logs/fit/' + name if profile: tensorboard_callback = tf.keras.callbacks.TensorBoard( log_dir=logdir, histogram_freq=1, profile_batch='7,9', update_freq='epoch') else: tensorboard_callback = tf.keras.callbacks.TensorBoard( log_dir=logdir, histogram_freq=1, profile_batch=0, update_freq='epoch') lr_callback = keras.callbacks.LearningRateScheduler(lr_f, verbose=1) savedir = 'savedmodels/' + name + '/{epoch}' save_callback = keras.callbacks.ModelCheckpoint(savedir, save_weights_only=True, verbose=1) if notebook: tqdm_callback = TqdmNotebookCallback(metrics=['loss'], leave_inner=False) else: tqdm_callback = TqdmCallback() train_ds = create_train_dataset( train_dir, img_size, class_names, bbox_sizes, buffer_size=1000, ) val_ds = create_train_dataset( val_dir, img_size, class_names, bbox_sizes, buffer_size=100, val_data=True, ) image_callback = ValFigCallback(val_ds, logdir) mymodel.fit( x=train_ds, epochs=epochs, steps_per_epoch=steps_per_epoch, # steps_per_epoch=10, callbacks=[ tensorboard_callback, lr_callback, save_callback, tqdm_callback, image_callback, ], verbose=0, # validation_data=val_ds, # validation_steps=100, ) delta = time.time() - st hours, remain = divmod(delta, 3600) minutes, seconds = divmod(remain, 60) print(datetime.now().strftime('%Y/%m/%d %H:%M:%S')) print( f'Took {hours:.0f} hours {minutes:.0f} minutes {seconds:.2f} seconds')