Exemplo n.º 1
0
try:
    model.load_weights('working_model.hdf5')
    print("...Previous weight data...")
except:
    print("...New weight data...")
    pass

for layer in model.layers[:-25]:
    layer.trainable = False

print(model.summary())

train_file_path = './DB/cctv_test_real/'
tiger_train = TextImageGenerator(train_file_path, img_w, img_h, batch_size,
                                 downsample_factor)
tiger_train.build_data("train")

valid_file_path = './DB/cctv_test_real/'
tiger_val = TextImageGenerator(valid_file_path, img_w, img_h, val_batch_size,
                               downsample_factor)
tiger_val.build_data("val")

ada = Adadelta()

early_stop = EarlyStopping(monitor='loss',
                           min_delta=0.001,
                           patience=4,
                           mode='min',
                           verbose=1)
#checkpoint = ModelCheckpoint(filepath='Epochs/LSTM+BN5--{epoch:02d}--{val_loss:.3f}.hdf5', monitor='loss', verbose=1, mode='min', period=1)
checkpoint = ModelCheckpoint(filepath='working_model.hdf5',
Exemplo n.º 2
0
K.set_learning_phase(0)

# # Model description and training

model = get_Model(training=True)

try:
    model.load_weights('LSTM+BN4--26--0.011.hdf5')
    print("...Previous weight data...")
except:
    print("...New weight data...")
    pass

train_file_path = './DB/train/' ##该目录下是指定好的图片,没有其他的文件夹
tiger_train = TextImageGenerator(train_file_path, img_w, img_h, batch_size, downsample_factor)
tiger_train.build_data()

valid_file_path = './DB/test/'##该目录下是指定好的图片,没有其他的文件夹
tiger_val = TextImageGenerator(valid_file_path, img_w, img_h, val_batch_size, downsample_factor)
tiger_val.build_data()

ada = Adadelta()

early_stop = EarlyStopping(monitor='loss', min_delta=0.001, patience=4, mode='min', verbose=1)
checkpoint = ModelCheckpoint(filepath='LSTM+BN5--{epoch:02d}--{val_loss:.3f}.hdf5', monitor='loss', verbose=1, mode='min', period=1)
# the loss calc occurs elsewhere, so use a dummy lambda func for the loss
model.compile(loss={'ctc': lambda y_true, y_pred: y_pred}, optimizer=ada)

# captures output of softmax so we can decode the output during visualization
model.fit_generator(generator=tiger_train.next_batch(),
                    steps_per_epoch=int(tiger_train.n / batch_size),
Exemplo n.º 3
0
K.set_learning_phase(0)

model = get_Model(training=True)

try:
    model.load_weights('../Pretrained_weights/Model--20--12.879.hdf5')
    print("...Previous Weights found...")
except:
    print('...Using New Weights...')
    pass

train_file_path = '../data/train/'
tiger_train = TextImageGenerator(train_file_path, img_w, img_h, batch_size,
                                 downsample_factor)
print('Building Data')
tiger_train.build_data(Train=True)
print('Done Building Data')

valid_file_path = '../data/val/'
tiger_val = TextImageGenerator(valid_file_path, img_w, img_h, val_batch_size,
                               downsample_factor)
tiger_val.build_data(Train=False)

# sgd = SGD(lr=0.02,
#               decay=1e-6,
#               momentum=0.9,
#               nesterov=True)
ada = Adam(lr=0.001)
early_stop = EarlyStopping(monitor='loss',
                           min_delta=0.001,
                           patience=4,