def train_model():
    def create_base_model():
        conv_base = ResNet50(include_top=False,
                             weights='imagenet',
                             input_shape=(224, 224, 3))

        #conv_base.trainable = False
        x = conv_base.output
        x = tf.keras.layers.Dropout(0.5)(x)
        embedding = GlobalAveragePooling2D()(x)
        embedding = Dense(128)(embedding)
        return Model(conv_base.input, embedding)

    def SiameseNetwork(base_model):
        """
        Create the siamese model structure using the supplied base and head model.
        """
        input_a = Input(shape=(224, 224, 3), name="image1")
        input_b = Input(shape=(224, 224, 3), name="image2")

        processed_a = base_model(input_a)
        processed_b = base_model(input_b)

        head = Concatenate()([processed_a, processed_b])
        head = Dense(1)(head)
        head = Activation(activation='sigmoid')(head)
        return Model([input_a, input_b], head)

    train_ds, val_ds, test_ds, test_labels = generator_fsl.create_generators()

    base_model = create_base_model()
    siamese_network = SiameseNetwork(base_model)

    #siamese_network.save("test.h5")
    lr_schedule = tfa.optimizers.ExponentialCyclicalLearningRate(
        initial_learning_rate=1e-8,
        maximal_learning_rate=1e-6,
        step_size=240,
    )
    opt = Adam(learning_rate=1e-8)

    siamese_network.compile(optimizer=opt,
                            loss='binary_crossentropy',
                            metrics=['accuracy', 'RootMeanSquaredError'])

    history = siamese_network.fit(train_ds,
                                  epochs=100,
                                  steps_per_epoch=50,
                                  validation_data=val_ds,
                                  validation_steps=20)

    prediction = siamese_network.predict_classes(test_ds)
    evaluate = siamese_network.evaluate(test_ds, steps=32)

    return history, evaluate, prediction, test_labels
Exemple #2
0
def test_siamese():
    """
    Test that all components the siamese network work correctly by executing a
    training run against generated data.
    """

    num_classes = 5
    input_shape = (3, )
    epochs = 1000

    # Generate some data
    x_train = np.random.rand(100, 3)
    y_train = np.random.randint(num_classes, size=100)

    x_test = np.random.rand(30, 3)
    y_test = np.random.randint(num_classes, size=30)

    # Define base and head model
    def create_base_model(input_shape):
        model_input = Input(shape=input_shape)

        embedding = Dense(4)(model_input)
        embedding = BatchNormalization()(embedding)
        embedding = Activation(activation='relu')(embedding)

        return Model(model_input, embedding)

    def create_head_model(embedding_shape):
        embedding_a = Input(shape=embedding_shape)
        embedding_b = Input(shape=embedding_shape)

        head = Concatenate()([embedding_a, embedding_b])
        head = Dense(4)(head)
        head = BatchNormalization()(head)
        head = Activation(activation='sigmoid')(head)

        head = Dense(1)(head)
        head = BatchNormalization()(head)
        head = Activation(activation='sigmoid')(head)

        return Model([embedding_a, embedding_b], head)

    # Create siamese neural network
    base_model = create_base_model(input_shape)
    head_model = create_head_model(base_model.output_shape)
    siamese_network = SiameseNetwork(base_model, head_model)

    # Prepare siamese network for training
    siamese_network.compile(loss='binary_crossentropy',
                            optimizer=keras.optimizers.adam())

    # Evaluate network before training to establish a baseline
    score_before = siamese_network.evaluate_generator(x_train,
                                                      y_train,
                                                      batch_size=64)

    # Train network
    siamese_network.fit(x_train,
                        y_train,
                        validation_data=(x_test, y_test),
                        batch_size=64,
                        epochs=epochs)

    # Evaluate network
    score_after = siamese_network.evaluate(x_train, y_train, batch_size=64)

    # Ensure that the training loss score improved as a result of the training
    assert (score_before > score_after)
Exemple #3
0
import numpy as np
from siamese import SiameseNetwork, create_pairs, compute_accuracy

# Set constants
BATCH_SIZE = 128
N_EPOCHS = 20
CLASS_DIM = 120

# Construct input data
with open('/book/working/data/inter_emb.np', 'rb') as f:
    X_train = np.array(np.load(f), dtype=np.float32)
with open('/book/working/data/labels.np', 'rb') as f:
    y_train = np.array(np.load(f), dtype=np.int8)
    digit_indices = [np.where(y_train == i)[0] for i in range(CLASS_DIM)]
tr_pairs, tr_y = create_pairs(X_train, digit_indices, CLASS_DIM)

# Construct Siamese network
model, base_network = SiameseNetwork()
model.fit([tr_pairs[:, 0], tr_pairs[:, 1]],
          tr_y,
          batch_size=BATCH_SIZE,
          epochs=N_EPOCHS)

# Compute final accuracy on training set
y_pred = model.predict([tr_pairs[:, 0], tr_pairs[:, 1]])
tr_acc = compute_accuracy(tr_y, y_pred)
print('* Accuracy on training set: %0.2f%%' % (100 * tr_acc))

# Save
model.save('/book/working/models/siamese.h5')
Exemple #4
0
                        optimizer=keras.optimizers.adam(),
                        metrics=['accuracy'])

siamese_checkpoint_path = "./siamese_checkpoint"

siamese_callbacks = [
    EarlyStopping(monitor='val_acc', patience=10, verbose=0),
    ModelCheckpoint(siamese_checkpoint_path,
                    monitor='val_acc',
                    save_best_only=True,
                    verbose=0)
]

siamese_network.fit(x_train,
                    y_train,
                    validation_data=(x_test, y_test),
                    batch_size=1000,
                    epochs=epochs,
                    callbacks=siamese_callbacks)

siamese_network.load_weights(siamese_checkpoint_path)
embedding = base_model.outputs[-1]

y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)

# Add softmax layer to the pre-trained embedding network
embedding = Dense(num_classes)(embedding)
embedding = BatchNormalization()(embedding)
embedding = Activation(activation='sigmoid')(embedding)

model = Model(base_model.inputs[0], embedding)