Exemple #1
0
    def load_models(self):
        cfg = self.cfg

        if self.do_rpn:
            print('Loading RPN model...')
            loss_cls = losses.rpn_loss_cls(cfg.nb_anchors)
            loss_rgr = losses.rpn_loss_regr(cfg.nb_anchors)

            get_custom_objects().update({"rpn_loss_cls_fixed_num": loss_cls})
            get_custom_objects().update({"rpn_loss_regr_fixed_num": loss_rgr})

            assert os.path.exists(cfg.my_shared_weights
                                  ), 'invalid path: %s' % cfg.my_shared_weights
            self.model_rpn = load_model(cfg.my_shared_weights)
            self.model_rpn.name = 'rpn'
            assert not self.model_rpn.layers[17].get_config()['trainable']

        if self.do_det:
            print('Loading DET model...')
            loss_cls = losses.class_loss_cls
            loss_rgr = losses.class_loss_regr(cfg.nb_object_classes - 1)

            get_custom_objects().update({"class_loss_cls": loss_cls})
            get_custom_objects().update(
                {"class_loss_regr_fixed_num": loss_rgr})

            get_custom_objects().update(
                {"RoiPoolingConv": layers.RoiPoolingConv})

            assert os.path.exists(cfg.my_shared_weights
                                  ), 'invalid path: %s' % cfg.my_shared_weights
            self.model_det = load_model(cfg.my_shared_weights)
            self.model_det.name = 'det'

        if self.do_hoi:
            print('Loading HOI model...')
            loss_cls = losses.hoi_loss_cls
            get_custom_objects().update({"hoi_loss_cls_fixed_num": loss_cls})

            assert os.path.exists(cfg.my_shared_weights
                                  ), 'invalid path: %s' % cfg.my_shared_weights
            self.model_hoi = load_model(cfg.my_shared_weights)
            self.model_hoi.name = 'hoi'
Exemple #2
0
    def compile_models(self):
        if self.mode == 'test':
            return

        cfg = self.cfg

        print('Compiling models...')
        if cfg.optimizer == 'adam':
            print('   Opt.:', 'Adam')
            opt = Adam(lr=cfg.init_lr)
        else:
            print('   Opt.:', 'SGD')
            opt = SGD(lr=cfg.init_lr, momentum=0.9, decay=0.0, nesterov=False)
        print('   Learning rate:', cfg.init_lr)
        if self.do_rpn:
            if cfg.rpn_uniform_sampling:
                print('   Uniform anchor sampling')
            else:
                print('   Non-Uniform anchor sampling')
            model = self.model_rpn
            my_losses = [
                losses.rpn_loss_cls(cfg.nb_anchors),
                losses.rpn_loss_regr(cfg.nb_anchors)
            ]
            my_metrics = None
        if self.do_det:
            model = self.model_det
            my_losses = [
                losses.class_loss_cls,
                losses.class_loss_regr(cfg.nb_object_classes - 1)
            ]
            my_metrics = {'det_out_class': 'categorical_accuracy'}
        if self.do_hoi:
            model = self.model_hoi
            loss = 'categorical_crossentropy' if cfg.do_categorical_hoi else losses.hoi_loss_cls(
                cfg.wp)
            print('   -HOI loss', loss)
            my_losses = [loss]
            my_metrics = ['categorical_accuracy'
                          ] if cfg.do_categorical_hoi else None

        model.compile(optimizer=opt, loss=my_losses, metrics=my_metrics)
Exemple #3
0
except:
    print(
        'Could not load pretrained model weights. Weights can be found in the keras application folder \
		https://github.com/fchollet/keras/tree/master/keras/applications')

optimizer = Adam(lr=1e-5)
optimizer_classifier = Adam(lr=1e-5)
model_rpn.compile(
    optimizer=optimizer,
    loss=[losses.rpn_loss_cls(num_anchors),
          losses.rpn_loss_regr(num_anchors)])
model_classifier.compile(
    optimizer=optimizer_classifier,
    loss=[
        losses.class_loss_cls,
        losses.class_loss_regr(len(classes_count) - 1)
    ],
    metrics={'dense_class_{}'.format(len(classes_count)): 'accuracy'})
model_all.compile(optimizer='sgd', loss='mae')

epoch_length = 1000
num_epochs = int(options.num_epochs)
iter_num = 0

losses = np.zeros((epoch_length, 5))
rpn_accuracy_rpn_monitor = []
rpn_accuracy_for_epoch = []
start_time = time.time()

best_loss = np.Inf
    model_classifier.load_weights(C.model_path, by_name=True)

# this is a model that holds both the RPN and the classifier, used to load/save weights for the models
model_all = Model([img_input, roi_input], rpn[:2] + classifier)
# print("MODEL RPN")
# model_rpn.summary()
# print("MODEL CLASSIFIER")
# model_classifier.summary()
# print("MODEL ALL")
# model_all.summary()
# Define
optimizer_rpn = optimizers.Adam(lr=1e-5)
optimizer_classifier = optimizers.Adam(lr=1e-5)
model_rpn.compile(optimizer=optimizer_rpn, loss=[losses.rpn_loss_cls(num_anchors), losses.rpn_loss_regr(num_anchors)])
model_classifier.compile(optimizer=optimizer_classifier,
                         loss=[losses.class_loss_cls, losses.class_loss_regr(len(classes_count) - 1)],
                         metrics={'dense_class_{}'.format(len(classes_count)): 'accuracy'})
model_all.compile(optimizer='sgd', loss='mae')

epoch_length = 1000
num_epochs = int(options.num_epochs)
iter_num = 0

losses = np.zeros((epoch_length, 5))
rpn_accuracy_rpn_monitor = []
rpn_accuracy_for_epoch = []
start_time = time.time()

best_loss = np.Inf

class_mapping_inv = {v: k for k, v in class_mapping.items()}
Exemple #5
0
	#r_loss_rpn_regr2 = record2_df['loss_rpn_regr']
	#r_loss_class_cls2 = record2_df['loss_class_cls']
	#r_loss_class_regr2 = record2_df['loss_class_regr']
	#r_curr_loss2 = record2_df['curr_loss']
	#r_elapsed_time2 = record2_df['elapsed_time']
	#r_mAP2 = record2_df['mAP']

	# print('Already train %dK batches' % (len(record_df)))

# Learning rates for RPN & classifier
optimizer = tf.keras.optimizers.Adam(lr = 1e-5)
optimizer_classifier = tf.keras.optimizers.Adam(lr = 1e-5)
#optimizer_all = tf.keras.optimizers.SGD(lr = 1e-5)

model_rpn.compile(optimizer = optimizer, loss = [losses.rpn_loss_cls(num_anchors), losses.rpn_loss_regr(num_anchors)])
model_classifier.compile(optimizer = optimizer_classifier, loss = [losses.class_loss_cls, losses.class_loss_regr(len(classes_count) - 1)], metrics = {'dense_class_{}'.format(len(classes_count)): 'accuracy'})

#model_all.compile(optimizer = optimizer_all, loss = 'mae')
model_all.compile(optimizer = 'sgd', loss = 'mae')

#total_epochs = len(record_df)
#r_epochs = len(record_df)

#total_epochs2 = len(record2_df)
#r_epochs2 = len(record2_df)
##

##
epoch_length = 1000                            # Iterations = (Dataset / Batch-Size = 1)
num_epochs = 65    	       		       # Epochs
iter_num = 0	   	       		       # Index -> iteration
    temp_im_test = np.ones((50000, 10, 10))
    temp_im_train = np.ones((10000, 10, 10))
    temp_bb_train = np.ones((50000, 10, 36))
    temp_bb_test = np.ones((10000, 10, 36))
    temp_regr_train = np.ones((50000, 2, 2, 36))
    temp_regr_test = np.ones((10000, 2, 2, 36))
    temp_class_train = np.ones((50000, 2, 2, 9))
    temp_class_test = np.ones((10000, 2, 2, 9))

    model = ROIModel(10)

    model.compile(
        optimizer=optimizers.RMSprop(1e-3),
        loss={
            "output_1": custom_losses.class_loss_cls(),
            "output_2": custom_losses.class_loss_regr(9),
            "output_3": custom_losses.rpn_loss_regr(num_anchors),
            "output_4": custom_losses.rpn_loss_cls(num_anchors),
        },
        metrics={
            "output_1": 'accuracy',
            "output_2": 'accuracy',
            "output_3": 'accuracy',
            "output_4": 'accuracy',
        },
    )

    logdir = "logs/fit/" + datetime.now().strftime("%Y%m%d-%H%M%S")
    tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=logdir)

    # test_image = np.array(train_images[0]).reshape(1,32,32,3)