def test_trainable_all(self): def helper(model): model.load_for_transfer_learning( dataset=self.dataset, trainable_option=constants.TRAINABLE_ALL, for_training=False, save_weights=False) for block in model.train_model.layers[1:]: sub_model = model.train_model.get_layer(block.name) for layer in sub_model.layers: if 'conv' not in layer.name: continue self.assertEqual(layer.trainable, True) helper(YOLOv3(tiny=False)) helper(YOLOv3(tiny=True))
def test_trainable_last_conv(self): def helper(model): model.load_for_transfer_learning( dataset=self.dataset, trainable_option=constants.TRAINABLE_LAST_CONV, for_training=False) for block in model.train_model.layers[1:]: sub_model = model.train_model.get_layer(block.name) conv_layers = sum([ True for layer in sub_model.layers if 'conv' in layer.name ]) conv_layer = 0 for layer in sub_model.layers: if 'conv' not in layer.name: continue conv_layer += 1 if 'last_layers' in block.name and conv_layer == conv_layers: self.assertEqual(layer.trainable, True) else: self.assertEqual(layer.trainable, False) helper(YOLOv3(tiny=False)) helper(YOLOv3(tiny=True))
def main(_argv): model = YOLOv3(tiny=FLAGS.tiny) if FLAGS.weights_path: model.load_models(Dataset(FLAGS.dataset_name), for_training=False) model.inference_model.load_weights(FLAGS.weights_path) print(constants.C_OKBLUE, "Weights from", FLAGS.weights_path, 'loaded successfully', constants.C_ENDC) else: model.load_original_yolov3() print(constants.C_OKBLUE, "Successfully loaded weights from the original paper", constants.C_ENDC) detect(model, FLAGS.dataset_name, FLAGS.img_path, FLAGS.title, FLAGS.output_path)
def load_model_and_db(batch_size): model = YOLOv3(tiny=FLAGS.tiny) dataset = Dataset(FLAGS.dataset_name, FLAGS.tiny) dataset.load_datasets(model.image_res, model.anchors, model.masks, batch_size) # When loading the model, the folders to save the checkpoints, figures and logs are created. if FLAGS.trainable == 'none': model.load_models(dataset=dataset, for_training=True, plot_model=False) else: model.load_for_transfer_learning(dataset, trainable_option=FLAGS.trainable) optimizer = tf.keras.optimizers.Adam(lr=FLAGS.lr) # model.train_model.load_weights( # '/home/brechard/models_results/testing/20191017_180805_COCO/checkpoints/YOLOv3_final.ckpt') loss = model.get_loss() model.train_model.compile(optimizer=optimizer, loss=loss, run_eagerly=False, metrics=['accuracy']) return dataset, model
def test_transfer_learning(self): def helper(original_model, test_model, tiny): model_blocks = [l.name for l in original_model.layers[1:]] if tiny: model_blocks = [ 'Tiny_DarkNet', 'tiny_layer', 'last_layers_512', 'Concatenate_128', 'last_layers_256' ] for i, block in enumerate(model_blocks): sub_original_model = original_model.get_layer(block) sub_test_model = test_model.get_layer(block) if type(sub_original_model) != tf.keras.Model: continue conv_layers = sum([ True for layer in sub_original_model.layers if 'conv' in layer.name ]) conv_layer = 0 for i_in, layer in enumerate(sub_original_model.layers): weights_equal = True if 'input' in layer.name: continue if layer.name.startswith('conv2d'): conv_layer += 1 if conv_layer == conv_layers and 'last_layers' in block: # This is one of the last convolutional layer. Therefore the weights should be different # to the original ones weights_equal = False for n_weights, weights in enumerate(layer.weights): original_weights = weights.numpy().flatten() test_weights = sub_test_model.layers[i_in].weights[ n_weights].numpy().flatten() if weights_equal: self.assertEqual( sum(original_weights != test_weights), 0) else: self.assertNotEqual( sum(original_weights[:len(test_weights)] != test_weights), 0) original_tiny_model = YOLOv3(tiny=True) original_tiny_model.load_original_yolov3(for_training=False) original_full_model = YOLOv3(tiny=False) original_full_model.load_original_yolov3(for_training=False) tiny_test_model = YOLOv3(tiny=True) full_test_model = YOLOv3(tiny=False) trainable_options = [ constants.TRAINABLE_ALL, constants.TRAINABLE_FEATURES, constants.TRAINABLE_LAST_CONV, constants.TRAINABLE_LAST_BLOCK ] for trainable_option in trainable_options: tiny_test_model.load_for_transfer_learning( dataset=self.dataset, trainable_option=trainable_option, for_training=False, save_weights=False) full_test_model.load_for_transfer_learning( dataset=self.dataset, trainable_option=trainable_option, for_training=False, save_weights=False) helper(original_tiny_model.train_model, tiny_test_model.train_model, True) helper(original_full_model.train_model, full_test_model.train_model, False)
import os from constants import PROJECT_PATH from data.dataset import Dataset from helpers import natural_keys from models.detection.predict import detect from models.detection.yolov3 import YOLOv3 weights = None checkpoint_dir = '/home/brechard/models/YOLOv3/20191031_163044_COCO/checkpoints/' dataset_name = checkpoint_dir.split('_')[-1].split('/')[0] dataset = Dataset(dataset_name) model = YOLOv3(tiny=True) model.load_models(dataset=dataset, for_training=False) # jajas.check_losses(None, "RANDOM INITIALIZATION") print() image_path = PROJECT_PATH + 'data/external/datasets/COCO/train/000000000257.jpg' for checkpoint in sorted(os.listdir(checkpoint_dir), key=natural_keys): new_weights = checkpoint.split('.ckpt')[0] if new_weights == weights or 'ckpt' not in checkpoint: continue else: weights = new_weights print('Use weights', weights) title = 'Epoch = ' + weights.split('-')[0].split('_')[-1] + '. Model loss = ' + \ checkpoint.split('.ckpt')[0].split('-')[-1] detect(model, dataset_name,