Ejemplo n.º 1
0
    def validate_base_model_creation(self, feature_model_name, module_path):
        #Arrange
        base_model = BaseModel(feature_model_name, input_shape)

        #Act & Assert
        with mock_patch(module_path) as base_mock:
            base_model.base_model()
            base_mock.assert_called_once()
Ejemplo n.º 2
0
def siamese_triplet(base_model_name, input_shape, learning_rate, feature_dims):
    """It creates a siamese triplet model using the input as a base model.

    Arguments:
        base_model_name {string} -- A string containing the name of a base model.
        input_shape {(int, int, int))} -- A tuple to indicate the shape of inputs.
        learning_rate {float} -- A float value to control speed of learning.
        feature_dims {int} -- An integer indicating the dimensions of the feature vector.

    Returns:
        {A Model object} -- A keras model.
    """
    #Layer name guidances
    anchor_name_guidance = 'Anchor'
    positive_name_guidance = 'Positive'
    negative_name_guidance = 'Negative'

    #Base model handler
    base_model = BaseModel(base_model_name, input_shape)

    #Feature models
    feature_model = base_model.base_model()

    #Siamese inputs
    anchor_input = Input(shape = input_shape, name = anchor_name_guidance)
    positive_input = Input(shape = input_shape, name = positive_name_guidance)
    negative_input = Input(shape = input_shape, name = negative_name_guidance)

    #Feature vectors
    anchor_features = feature_model(anchor_input)
    positive_features = feature_model(positive_input)
    negative_features = feature_model(negative_input)

    #Loss layer
    X = Lambda(triplet_loss, output_shape = (1, ))([anchor_features, positive_features, negative_features])

    #Create an optimizer object
    adam_optimizer = Adam(lr = learning_rate)

    model = Model(inputs = [anchor_input, positive_input, negative_input], outputs = [X], name = 'Siamese Triplet')
    model.compile(loss = 'mae', optimizer = adam_optimizer, metrics = ['accuracy'])
    model.summary()

    return model
Ejemplo n.º 3
0
 def test_invalid_base_model_name(self):
     #Arrange Act & Assert
     with self.assertRaises(ValueError):
         base_model = BaseModel('nanana', input_shape)
         base_model.base_model()
Ejemplo n.º 4
0
def siamese_network(base_model_name, input_shape, learning_rate, feature_dims):
    """It creates a siamese network model using the input as a base model.

    Arguments:
        base_model_name {string} -- A string containing the name of a base model.
        input_shape {(int, int, int))} -- A tuple to indicate the shape of inputs.
        learning_rate {float} -- A float value to control speed of learning.
        feature_dims {int} -- An integer indicating the dimensions of the feature vector.

    Returns:
        {A Model object} -- A keras model.
    """
    #Layer name guidances
    anchor_name_guidance = 'Anchor'
    sample_name_guidance = 'Sample'

    #Base model handler
    base_model = BaseModel(base_model_name, input_shape)

    #Feature models
    feature_model = base_model.base_model()

    #Siamese inputs
    anchor_input = Input(shape = input_shape, name = anchor_name_guidance)
    sample_input = Input(shape = input_shape, name = sample_name_guidance)

    #Feature vectors
    anchor_features = feature_model(anchor_input)
    sample_features = feature_model(sample_input)

    lambda_product = Lambda(lambda x : x[0] * x[1])([anchor_features, sample_features])
    lambda_add = Lambda(lambda x : x[0] + x[1])([anchor_features, sample_features])
    lambda_abs = Lambda(lambda x : K.abs(x[0] - x[1]))([anchor_features, sample_features])
    lambda_eucledian_dist = Lambda(lambda x: K.square(x))(lambda_abs)

    #Layer specifications
    layer_specifications = [
                                #Concatenate and reshape lambda outputs
                                LayerSpecification(LayerType.Concatenate),
                                LayerSpecification(LayerType.Reshape, (4, feature_model.output_shape[1], 1)),

                                #Convolution layer #1
                                LayerSpecification(LayerType.Conv2D, 32, (4, 1), activation = 'relu', padding = 'valid'),
                                LayerSpecification(LayerType.Reshape, (feature_model.output_shape[1], 32, 1)),

                                #Convolution layer #2
                                LayerSpecification(LayerType.Conv2D, 1, (1, 32), activation = 'linear', padding = 'valid'),

                                #Flatten
                                LayerSpecification(LayerType.Flatten),

                                #Output unit
                                LayerSpecification(LayerType.Dense, 1, activation = 'sigmoid', use_bias = True)
                            ]

    #Model specification
    model_specification = ModelSpecification(layer_specifications)

    #Output
    X = model_specification.get_specification([lambda_product, lambda_add, lambda_abs, lambda_eucledian_dist])

    #Create an optimizer object
    adam_optimizer = Adam(lr = learning_rate)

    model = Model(inputs = [anchor_input, sample_input], outputs = [X], name = 'Siamese Model')
    model.compile(loss = 'binary_crossentropy', optimizer = adam_optimizer, metrics = ['accuracy'])
    model.summary()

    return model