Пример #1
0
    def train(self, shallow, optimizer, epochs, callbacks, chk_period=-1, chk_monitor=None):
        if chk_monitor == None:
            chk_monitor = self.chk_mon
        shallow_path = common.shallow_path(shallow.name, self.trainset_name, self.feat_net_name, ext=False)

        my_callbacks = callbacks[:]
        if chk_period > 0:
            name = shallow_path + '.weights.{epoch:02d}.h5'
            checkpoint = ModelCheckpoint(name, monitor=chk_monitor, save_weights_only=True, period=chk_period)
            my_callbacks.append(checkpoint)

        bestpoint = ModelCheckpoint(shallow_path + '.weights.best.h5', monitor='val_loss', save_best_only=True, save_weights_only=True)
        my_callbacks.append(bestpoint)
        model = shallow.model()
        if model is None:
            RuntimeError("You have to initialize the model with init() before training the network")
        model.compile(optimizer=copy.deepcopy(optimizer), loss=self.loss, metrics=self.metric)
        #model.summary()
        #print("Valid split: " + str(valid_split))
        model.fit(self.trainset.data, self.trainset.getLabelsVec(),
                  validation_data=self.valdata, validation_split=self.validsplit,
                  nb_epoch=epochs, batch_size=self.batch_size,
                  callbacks=my_callbacks,
                  shuffle=True)

        save_model_json(model, shallow_path + '.json')
        model.save_weights(shallow_path + '.weights.last.h5')
    def for_resnet50():

        early_stopping = EarlyStopping('val_loss',
                                       min_delta=0.01,
                                       patience=7,
                                       verbose=1)
        reduceLR = ReduceLROnPlateau(monitor='val_loss',
                                     factor=0.1,
                                     patience=2,
                                     verbose=1,
                                     epsilon=0.01,
                                     cooldown=0,
                                     min_lr=0)
        callbacks = [early_stopping, reduceLR]

        A = new_model(in_shape, out_shape)
        optimizer = SGD(lr=0.01, momentum=0.9, decay=1e-6, nesterov=True)
        addestra(A,
                 "A_5ep",
                 optimizer,
                 100,
                 callbacks,
                 chk_period=1,
                 loss_in_name=True)

        shallow_path = common.shallow_path("A_5ep",
                                           trainset_name,
                                           feat_net,
                                           ext=False)
        early_stopping = EarlyStopping('val_loss',
                                       min_delta=0.001,
                                       patience=10,
                                       verbose=1)
        reduceLR = ReduceLROnPlateau('val_loss',
                                     factor=0.1,
                                     patience=4,
                                     verbose=1,
                                     epsilon=0.0001)
        callbacks = [early_stopping, reduceLR]

        LF = new_model(in_shape, out_shape, lf=True, lf_decay=0.03)
        LF.load_weights(shallow_path + '.weights.best.h5', by_name=True)
        optimizer = SGD(lr=0.001, momentum=0.9, decay=1e-6, nesterov=True)
        addestra(LF,
                 "LF_FT_A",
                 optimizer,
                 epochs=100,
                 callbacks=callbacks,
                 chk_period=1)
Пример #3
0
 def load(self, shallow,  weight_index, alt_net_load=None, alt_name_load=None, extra_name=''):
     if alt_net_load is not None:
         name = alt_net_load.name + extra_name
     elif alt_name_load is not None:
         name = alt_name_load + extra_name
     else:
         name = shallow.name + extra_name
     shallow_path = common.shallow_path(name, self.trainset_name, self.feat_net_name, ext=False)
     model = shallow.model()
     if model is None:
         raise RuntimeError("You have to initialize the model with init() before loading the weights")
     model.load_weights(shallow_path + '.weights.' + str(weight_index) + '.h5', by_name=True)
     shallow.weights_loaded_from=name
     shallow.weights_loaded_index=weight_index
     return self
    def for_resnet50_HL():

        early_stopping = EarlyStopping('val_acc',
                                       min_delta=0.01,
                                       patience=5,
                                       verbose=1)
        callbacks = [early_stopping]

        HL8K = new_model(in_shape, out_shape, hiddens=[Hidden(8000, 0.5)])
        optimizer = SGD(lr=0.01, momentum=0.9, decay=1e-6, nesterov=True)
        addestra(HL8K,
                 "HL8K",
                 optimizer,
                 5,
                 callbacks,
                 chk_period=1,
                 loss_in_name=True)

        shallow_path = common.shallow_path("HL8K",
                                           trainset_name,
                                           feat_net,
                                           ext=False)
        early_stopping = EarlyStopping('val_loss',
                                       min_delta=0.001,
                                       patience=10,
                                       verbose=1)
        reduceLR = ReduceLROnPlateau('val_loss',
                                     factor=0.1,
                                     patience=4,
                                     verbose=1,
                                     epsilon=0.0001)
        callbacks = [early_stopping, reduceLR]

        LF = new_model(in_shape,
                       out_shape,
                       hiddens=[Hidden(8000, 0.5)],
                       lf=True,
                       lf_decay=0.03)
        LF.load_weights(shallow_path + '.weights.best.h5', by_name=True)
        optimizer = SGD(lr=0.0001, momentum=0.9, decay=1e-6, nesterov=True)
        addestra(LF,
                 "LF_FT_HL8K",
                 optimizer,
                 epochs=100,
                 callbacks=callbacks,
                 chk_period=1)
Пример #5
0
    def addestra(model, name, optimizer, epochs, callbacks, chk_period=-1, loss_in_name=False):
        shallow_path = common.shallow_path(name, trainset_name, feat_net, ext=False)

        if chk_period > 0:
            name = shallow_path + '.weights.{epoch:02d}' + ('-{val_loss:.2f}.h5' if loss_in_name else '.h5')
            checkpoint = ModelCheckpoint(name, monitor='val_loss', save_weights_only=True, period=chk_period)
            callbacks.append(checkpoint)

        bestpoint = ModelCheckpoint(shallow_path + '.weights.best.h5', monitor='val_loss', save_best_only=True, save_weights_only=True)
        callbacks.append(bestpoint)

        model.compile(optimizer=optimizer, loss=LOSS, metrics=METRIC)
        #model.summary()
        #print("Valid split: " + str(valid_split))
        model.fit(trainset.data, trainset.getLabelsVec(), nb_epoch=epochs, batch_size=BATCH, callbacks=callbacks,
                  shuffle=True, validation_data=valid_data, validation_split=valid_split)


        save_model_json(model, shallow_path + '.json')
        model.save_weights(shallow_path + '.weights.last.h5')