# In[20]: import utils; reload(utils) from utils import DataGenerator NUM_TRAIN_PAIRS = 150000 NUM_VAL_PAIRS = 10000 BATCH_SIZE = 128 datagen = DataGenerator(X_train, y_train, num_train_pairs = NUM_TRAIN_PAIRS, num_val_pairs = NUM_VAL_PAIRS, X_val = X_val[val_train], train_alphabet_to_index = train_alphabet_to_index, val_alphabet_to_index = val_train_index, y_val = y_val[val_train], batch_sz = BATCH_SIZE, verbose = True) datagen.create_data_transformer(rotation_range=10, width_shift_range=0.01, height_shift_range=0.01, shear_range=0.01) STEPS_PER_EPOCH = NUM_TRAIN_PAIRS // BATCH_SIZE VALIDATION_STEPS = NUM_VAL_PAIRS // BATCH_SIZE from keras.optimizers import Adam learning_rate = 5e-5 adam = Adam(learning_rate) scheduler = LearningRateScheduler(lambda epoch : learning_rate * pow(0.985, epoch)) siamese_net.compile(loss='binary_crossentropy', optimizer=adam, metrics=['accuracy']) siamese_net.load_weights(INIT_WEIGHTS) # In[50]:
import utils; reload(utils) from utils import TripletGenerator NUM_TRAIN_TRIPLETS = 300000 NUM_VAL_TRIPLETS = 10000 BATCH_SIZE = 200 datagen = TripletGenerator(X_train, y_train, X_val, y_val, num_val_triplets=NUM_VAL_TRIPLETS, batch_sz=BATCH_SIZE, num_train_triplets=NUM_TRAIN_TRIPLETS, train_alphabet_to_index = train_alphabet_to_index, test_alphabet_to_index = test_alphabet_to_index, random_transform=True) # In[18]: datagen.create_data_transformer() # In[32]: from keras.callbacks import ReduceLROnPlateau, ModelCheckpoint, EarlyStopping reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.5, patience=3, verbose = 1, min_lr=1e-8) early_stopping = EarlyStopping(monitor='oneshot_acc', min_delta=1e-4, patience=25, verbose=0, mode='auto') checkpointer = ModelCheckpoint(filepath=CHECKPOINTED_WEIGHTS, verbose=1, save_best_only=True, monitor='oneshot_acc')