Beispiel #1
0
    def __init__(self, parameter, model):
        self.n_epoch = parameter['n_epoch']
        self.device = parameter['device']
        self.model = model.to(self.device)
        self.milestones = parameter['milestones']
        learning_rate = parameter['learning_rate']
        weight_decay = parameter['weight_decay']
        momentum = parameter['momentum']
        gamma = parameter['gamma']

        self.optimizer = optim.SGD(model.parameters(), learning_rate, momentum,
                                   weight_decay)
        self.criteria = MultiboxLoss()
        if self.milestones is not None:
            self.scheduler = optim.lr_scheduler.MultiStepLR(
                self.optimizer, self.milestones, gamma)
        self.DataCoder = DataCoder()
num_classes = len(classes) + 1
root_prefix = '../datasets/VOCdevkit/VOC2007/'
ground_data_prefix = root_prefix + 'Annotations/'
image_prefix = root_prefix + 'JPEGImages/'
image_shape = (300, 300, 3)
model = mini_SSD300(image_shape, num_classes=num_classes)
plot(model, to_file='mini_SSD300.png')


def class_accuracy(y_true, y_pred):
    y_pred_classification = y_pred[:, :, 4:(4 + num_classes)]
    y_true_classification = y_true[:, :, 4:(4 + num_classes)]
    return categorical_accuracy(y_true_classification, y_pred_classification)


multibox_loss = MultiboxLoss(num_classes, neg_pos_ratio=2.0).compute_loss
model.compile(optimizer=Adam(lr=3e-4),
              loss=multibox_loss,
              metrics=[class_accuracy])
box_creator = PriorBoxCreator(model)
prior_boxes = box_creator.create_boxes()
ground_truth_manager = XMLParser(ground_data_prefix,
                                 background_id=None,
                                 class_names=classes)
ground_truth_data = ground_truth_manager.get_data()
print('Number of ground truth samples:', len(ground_truth_data.keys()))
train_keys, validation_keys = split_data(ground_truth_data, training_ratio=.8)

prior_box_manager = PriorBoxManager(prior_boxes,
                                    box_scale_factors=[.1, .1, .2, .2],
                                    num_classes=num_classes)
Beispiel #3
0
callbacks = [
    keras.callbacks.ModelCheckpoint(
        './checkpoints/weights.{epoch:02d}-{val_loss:.2f}.hdf5',
        verbose=1,
        save_weights_only=True),
    keras.callbacks.LearningRateScheduler(schedule)
]

# In[10]:

base_lr = 3e-4
optim = keras.optimizers.Adam(lr=base_lr)
# optim = keras.optimizers.RMSprop(lr=base_lr)
# optim = keras.optimizers.SGD(lr=base_lr, momentum=0.9, decay=decay, nesterov=True)
model.compile(optimizer=optim,
              loss=MultiboxLoss(NUM_CLASSES, neg_pos_ratio=2.0).compute_loss)

# In[11]:
#
#nb_epoch = 30
#history = model.fit_generator(gen.generate(True), gen.train_batches,
#                              nb_epoch, verbose=1,
#                              callbacks=callbacks,
#                              validation_data=gen.generate(False),
#                              nb_val_samples=gen.val_batches,
#                              nb_worker=1)
#

nb_epoch = 30
history = model.fit_generator(gen.flow(mode='train'),
                              len(train_keys),
Beispiel #4
0
 def compile_model(self):
     #self.__model.compile(optimizer=Adam(lr=0.0001), loss=MultiboxLoss(self.__class_num, self.__batch_size).loss)
     self.__model.compile(optimizer=Adam(lr=0.00001), loss=MultiboxLoss(self.__class_num, self.__batch_size).loss)