def train_model():
    def create_base_model():
        conv_base = ResNet50(include_top=False,
                             weights='imagenet',
                             input_shape=(224, 224, 3))

        #conv_base.trainable = False
        x = conv_base.output
        x = tf.keras.layers.Dropout(0.5)(x)
        embedding = GlobalAveragePooling2D()(x)
        embedding = Dense(128)(embedding)
        return Model(conv_base.input, embedding)

    def SiameseNetwork(base_model):
        """
        Create the siamese model structure using the supplied base and head model.
        """
        input_a = Input(shape=(224, 224, 3), name="image1")
        input_b = Input(shape=(224, 224, 3), name="image2")

        processed_a = base_model(input_a)
        processed_b = base_model(input_b)

        head = Concatenate()([processed_a, processed_b])
        head = Dense(1)(head)
        head = Activation(activation='sigmoid')(head)
        return Model([input_a, input_b], head)

    train_ds, val_ds, test_ds, test_labels = generator_fsl.create_generators()

    base_model = create_base_model()
    siamese_network = SiameseNetwork(base_model)

    #siamese_network.save("test.h5")
    lr_schedule = tfa.optimizers.ExponentialCyclicalLearningRate(
        initial_learning_rate=1e-8,
        maximal_learning_rate=1e-6,
        step_size=240,
    )
    opt = Adam(learning_rate=1e-8)

    siamese_network.compile(optimizer=opt,
                            loss='binary_crossentropy',
                            metrics=['accuracy', 'RootMeanSquaredError'])

    history = siamese_network.fit(train_ds,
                                  epochs=100,
                                  steps_per_epoch=50,
                                  validation_data=val_ds,
                                  validation_steps=20)

    prediction = siamese_network.predict_classes(test_ds)
    evaluate = siamese_network.evaluate(test_ds, steps=32)

    return history, evaluate, prediction, test_labels
예제 #2
0
def test_siamese():
    """
    Test that all components the siamese network work correctly by executing a
    training run against generated data.
    """

    num_classes = 5
    input_shape = (3, )
    epochs = 1000

    # Generate some data
    x_train = np.random.rand(100, 3)
    y_train = np.random.randint(num_classes, size=100)

    x_test = np.random.rand(30, 3)
    y_test = np.random.randint(num_classes, size=30)

    # Define base and head model
    def create_base_model(input_shape):
        model_input = Input(shape=input_shape)

        embedding = Dense(4)(model_input)
        embedding = BatchNormalization()(embedding)
        embedding = Activation(activation='relu')(embedding)

        return Model(model_input, embedding)

    def create_head_model(embedding_shape):
        embedding_a = Input(shape=embedding_shape)
        embedding_b = Input(shape=embedding_shape)

        head = Concatenate()([embedding_a, embedding_b])
        head = Dense(4)(head)
        head = BatchNormalization()(head)
        head = Activation(activation='sigmoid')(head)

        head = Dense(1)(head)
        head = BatchNormalization()(head)
        head = Activation(activation='sigmoid')(head)

        return Model([embedding_a, embedding_b], head)

    # Create siamese neural network
    base_model = create_base_model(input_shape)
    head_model = create_head_model(base_model.output_shape)
    siamese_network = SiameseNetwork(base_model, head_model)

    # Prepare siamese network for training
    siamese_network.compile(loss='binary_crossentropy',
                            optimizer=keras.optimizers.adam())

    # Evaluate network before training to establish a baseline
    score_before = siamese_network.evaluate_generator(x_train,
                                                      y_train,
                                                      batch_size=64)

    # Train network
    siamese_network.fit(x_train,
                        y_train,
                        validation_data=(x_test, y_test),
                        batch_size=64,
                        epochs=epochs)

    # Evaluate network
    score_after = siamese_network.evaluate(x_train, y_train, batch_size=64)

    # Ensure that the training loss score improved as a result of the training
    assert (score_before > score_after)