示例#1
0
    def train(self, data, labels, hyperparams=dict()):
        '''
        data : list of input
        labels : list of corresponding output
        hyperparams = dictionary maps from name to its value
        
        return : None 
        '''

        # loss count
        one_occurence = np.count_nonzero(labels)
        zero_occurence = len(labels) - one_occurence
        self.alpha = zero_occurence / one_occurence

        self.model.compile(optimizer=Adam(),
                           loss=weighted_categorical_crossentropy(self.alpha),
                           metrics=['accuracy'])

        binary_labels = self._int_to_binary(labels)
        if "validation" in hyperparams.keys():
            val_data = hyperparams["validation"][0]
            val_labels = self._int_to_binary(hyperparams["validation"][1])
            self.model.fit(data,
                           binary_labels,
                           epochs=200,
                           batch_size=32,
                           validation_data=(val_data, val_labels))
        else:
            self.model.fit(data, binary_labels, epochs=200, batch_size=32)
        return
ith = prefix + 'bsize8_w1_spe' + str(steps_per_epoch) + '_ep' + str(nb_epochs)
log_name = '../logs/' + ith + '.log'
csv_logger = keras.callbacks.CSVLogger(log_name)
ckpt_filepath = '../weights/' + prefix + 'finetune_map_model_bsize8_w1_spe' + str(
    steps_per_epoch) + '_ep{epoch:02d}.hdf5'
model_ckpt = keras.callbacks.ModelCheckpoint(ckpt_filepath, period=2)
myvalidation_steps = 5

train_datagen = SynthMap_DataGenerator_Centerline_Localheight(
    image_root_path=image_root_path, batch_size=8, seed=3333, mode='training')

#output: t/nt, centerline,
#model.compile(adam, loss = [weighted_categorical_crossentropy(weights1),weighted_categorical_crossentropy(weights2)  ,regress_loss2,regress_loss1,regress_loss2])
model.compile(adam,
              loss=[
                  weighted_categorical_crossentropy(weights1),
                  weighted_categorical_crossentropy(weights2), regress_loss1
              ])
callbacks = [csv_logger, model_ckpt]

#model.fit_generator(train_datagen, steps_per_epoch = 250, epochs = 100, validation_data = val_datagen, validation_steps = myvalidation_steps, callbacks = callbacks, shuffle=True,max_queue_size=5 )

#model.fit_generator(train_datagen, steps_per_epoch = steps_per_epoch, epochs = nb_epochs,  callbacks = callbacks, shuffle=True,max_queue_size=5 )

model.fit_generator(iter(train_datagen),
                    steps_per_epoch=steps_per_epoch,
                    epochs=nb_epochs,
                    callbacks=callbacks)

model.save('../weights/finetune_map_model_' + ith + '.hdf5')
'''
示例#3
0
weights2 = np.array([1., weight_ratio]) # probability map for background and centerline

adam = keras.optimizers.Adam(lr=0.0001)
ith = prefix + 'bsize8_w1_spe'+str(steps_per_epoch)+'_ep' + str(nb_epochs)
log_name = '../logs/'+ ith + '.log'
csv_logger = keras.callbacks.CSVLogger(log_name)
ckpt_filepath = '../weights/'+ prefix + 'finetune_map_model_bsize8_w1_spe'+str(steps_per_epoch)+'_ep{epoch:02d}.hdf5'
model_ckpt = keras.callbacks.ModelCheckpoint(ckpt_filepath,period = 10)
myvalidation_steps = 5


train_datagen = SynthMap_DataGenerator_Centerline_Localheight_Dynamic(image_root_path = image_root_path, fonts_path=fonts_path,GB_path=GB_path,batch_size= 8,  seed = 3333, mode = 'training',overlap=True,showPicDir='../dynamicPics/')

#output: t/nt, centerline,
#model.compile(adam, loss = [weighted_categorical_crossentropy(weights1),weighted_categorical_crossentropy(weights2)  ,regress_loss2,regress_loss1,regress_loss2])
model.compile(adam, loss = [weighted_categorical_crossentropy(weights1),weighted_categorical_crossentropy(weights2)  ,regress_loss1])
callbacks = [csv_logger,model_ckpt]

#model.fit_generator(train_datagen, steps_per_epoch = 250, epochs = 100, validation_data = val_datagen, validation_steps = myvalidation_steps, callbacks = callbacks, shuffle=True,max_queue_size=5 )

#model.fit_generator(train_datagen, steps_per_epoch = steps_per_epoch, epochs = nb_epochs,  callbacks = callbacks, shuffle=True,max_queue_size=5 )

model.fit_generator(iter(train_datagen), steps_per_epoch = steps_per_epoch, epochs = nb_epochs,  callbacks = callbacks)

model.save('../weights/finetune_map_model_' + ith + '.hdf5')


'''
#model.load_weights('weights/pretrain_ugg_model_29_bsize8_w10_ep50.hdf5')
model.load_weights('weights/pretrain_ugg_model_32_bsize8_w1_ep100_inited.hdf5')
#model.compile('adam', loss = 'binary_crossentropy')