Пример #1
0
def create_model(shape):
    """Implementation of a modified LeNet-5.

    Modified Architecture -- ConvNet --> Pool --> ConvNet --> Pool --> (
    Flatten) --> FullyConnected --> FullyConnected --> Softmax

    :param shape: -- shape of the images of the dataset
    :return: result -- a Model() instance in Keras
    """
    result = Sequential()

    # Layer 1
    result.add(
        Conv2D(filters=6,
               kernel_size=5,
               strides=1,
               activation='relu',
               input_shape=shape,
               name='convolution_1'))
    result.add(MaxPooling2D(pool_size=2, strides=2, name='max_pool_1'))

    # Layer 2
    result.add(
        Conv2D(filters=16,
               kernel_size=5,
               strides=1,
               activation='relu',
               name='convolution_2'))
    result.add(MaxPooling2D(pool_size=2, strides=2, name='max_pool_2'))

    # Layer 3
    result.add(Flatten(name='flatten'))
    result.add(Dense(units=120, activation='relu', name='fully_connected_1'))

    # Layer 4
    result.add(Dense(units=84, activation='relu', name='fully_connected_2'))

    # Output
    result.add(Dense(units=10, activation='softmax', name='output'))
    result._name = 'lenet5'
    return result
Пример #2
0
    def OptmizedCNNv1(input_shape=(28, 28, 1), classes=10):

        # Implementation of optimized CNN:

        # Modified Architecture -- ConvNet --> MaxPool --> ConvNet --> MaxPool --> (Flatten) --> FullyConnected --> FullyConnected --> Softmax

        model = Sequential([
            # Layer 1
            Conv2D(
                filters=6,
                kernel_size=3,
                strides=1,
                activation="relu",
                input_shape=(28, 28, 1),
                name="convolution1",
            ),
            MaxPooling2D(pool_size=2, strides=2, name="maxpool1"),
            # Layer 2
            Conv2D(
                filters=16,
                kernel_size=3,
                strides=1,
                activation="relu",
                name="convolution2",
            ),
            MaxPooling2D(pool_size=2, strides=2, name="maxpool2"),
            # Layer 3
            Flatten(name="flatten"),
            Dense(units=120, activation="relu", name="fullyconnected1"),
            # Layer 4
            Dense(units=84, activation="relu", name="fullyconnected2"),
            # Output
            Dense(units=10, activation="softmax", name="output"),
        ])

        model._name = "OptimizedCNNv1"

        return model
Пример #3
0
def init_convlrs_model(conv_size, lrs_size, lrs_levels, conv_time=False, lrs_diff=True, lrs_time=True, recursive_tensors=True, name_only=False, input_shape=None, num_classes=None, lrs_norm='BN'):
    
    conv_name = 'TConv' if conv_time else 'Conv'
    
    lrs_time_tag = 'T' if lrs_time else ''
    lrs_diff_tag = 'D' if lrs_diff else ''
    lrs_recurrent_tag = 'R' if recursive_tensors else ''
    lrs_name = '{}{}{}LRS{}'.format(lrs_time_tag, lrs_diff_tag, lrs_recurrent_tag, lrs_norm)
    
    model_name = '{}{}_H{}_W{}'.format(conv_name, lrs_name, conv_size, lrs_size)
    
    if name_only: return model_name
    
    num_sig_layers = 3
    
    model = Sequential()

    model.add(InputLayer(input_shape=input_shape))
    
    if conv_time:
        model.add(Time())
    model.add(Conv1D(conv_size, 8, padding='same', kernel_initializer='he_uniform'))
    model.add(BatchNormalization(axis=-1))
    model.add(Activation('relu'))
    
    
    if conv_time:
        model.add(Time())
    model.add(Conv1D(conv_size, 5, padding='same' , kernel_initializer='he_uniform'))
    model.add(BatchNormalization(axis=-1))
    model.add(Activation('relu'))
    
    
    if conv_time:
        model.add(Time())
    model.add(Conv1D(conv_size, 3, padding='same', kernel_initializer='he_uniform'))
    model.add(BatchNormalization(axis=-1))
    model.add(Activation('relu'))
    
    
    for i in range(num_sig_layers-1):
#         model.add(Conv1D(conv_size, 3, padding='same', kernel_initializer='he_uniform'))
#         model.add(BatchNormalization(axis=-1))
        if lrs_time:
            model.add(Time())
        if lrs_diff:
            model.add(Difference())
        model.add(LRS(lrs_size, lrs_levels, return_sequences=True, recursive_tensors=recursive_tensors))
        model.add(Reshape((input_shape[0]-i-1, lrs_levels, lrs_size,)))
        if lrs_norm == 'BN':
            model.add(BatchNormalization(axis=[-2]))
        elif lrs_norm == 'TBN':
            model.add(BatchNormalization(axis=[-2, -1]))
        elif lrs_norm == 'LN':
            model.add(LayerNormalization(axis=[-3, -1]))
#         model.add(LayerNormalization(axis=[1, 3]))
#model.add(LayerNormalization(axis=[1, 3]))
        model.add(Reshape((input_shape[0]-i-1, lrs_levels * lrs_size,)))
#         model.add(Activation('relu'))
#         if renorm:
#             model.add(BatchNormalization(axis=[1, 2], renorm=True, center=False, scale=False))
#         else:
        
#         
#     model.add(Conv1D(conv_size, 3, padding='same', kernel_initializer='he_uniform'))
#     model.add(BatchNormalization(axis=-1))
    if lrs_time:
        model.add(Time())
    if lrs_diff:
        model.add(Difference())
    model.add(LRS(lrs_size, lrs_levels, return_sequences=False, recursive_tensors=recursive_tensors))
#     model.add(BatchNormalization(axis=1, center=False, scale=False))
    model.add(Reshape((lrs_levels, lrs_size,)))
#     model.add(LayerNormalization(axis=[2]))
    model.add(BatchNormalization(axis=1))
    model.add(Reshape((lrs_levels * lrs_size,)))
#     model.add(BatchNormalization(axis=-1, renorm=renorm, center=False, scale=False))

#     model.add(tf.keras.layers.Dropout(0.5))
#     if gap:
#         model.add(GlobalAveragePooling1D())
#     else:
#     model.add(Lambda(lambda X: X[:, -1]))
    
    model.add(Dense(num_classes, activation='softmax'))
    
    model._name = model_name

    return model
Пример #4
0
def init_convlrs2_model(input_shape,
                        num_levels,
                        num_hidden,
                        num_classes,
                        difference=True,
                        add_time=True,
                        recursive_tensors=True,
                        reverse=False,
                        layer_norm=False):

    num_sig_layers = 3
    num_sig_hidden = int(num_hidden / num_levels)
    #     num_levels = 3

    model = Sequential()

    model.add(InputLayer(input_shape=input_shape))

    if add_time:
        model.add(Time())
    model.add(Conv1D(num_hidden, 8, padding='same'))
    model.add(BatchNormalization(axis=-1))
    model.add(Activation('relu'))

    if add_time:
        model.add(Time())
    model.add(Conv1D(num_hidden, 5, padding='same'))
    model.add(BatchNormalization(axis=-1))
    model.add(Activation('relu'))

    if add_time:
        model.add(Time())
    model.add(Conv1D(num_hidden, 3, padding='same'))
    model.add(BatchNormalization(axis=-1))
    model.add(Activation('relu'))

    for i in range(num_sig_layers - 1):
        if add_time:
            model.add(Time())
        if difference:
            model.add(Difference())
        model.add(
            LRS(num_sig_hidden,
                num_levels,
                return_sequences=True,
                reverse=reverse,
                recursive_tensors=recursive_tensors))
        model.add(Reshape((
            input_shape[0],
            num_levels,
            num_sig_hidden,
        )))
        if layer_norm:
            model.add(LayerNormalization(axis=-1, center=False, scale=False))
        else:
            model.add(
                BatchNormalization(axis=[1, 2], center=False, scale=False))
        model.add(Reshape((
            input_shape[0],
            num_sig_hidden * num_levels,
        )))

    if add_time:
        model.add(Time())
    if difference:
        model.add(Difference())
    model.add(
        LRS(num_sig_hidden,
            num_levels,
            reverse=reverse,
            recursive_tensors=recursive_tensors))
    model.add(Reshape((
        num_sig_hidden,
        num_levels,
    )))
    if layer_norm:
        model.add(LayerNormalization(axis=-1, center=False, scale=False))
    else:
        model.add(BatchNormalization(axis=1, center=False, scale=False))
    model.add(Reshape((num_sig_hidden * num_levels, )))

    model.add(Dense(num_classes, activation='softmax'))

    diff_tag = 'D' if difference else ''
    norm_tag = 'LN' if layer_norm else 'BN'

    model._name = 'Conv{}LRS{}2_M{}_H{}'.format(diff_tag, norm_tag, num_levels,
                                                num_hidden)

    return model
Пример #5
0
def init_ls2t_model(preprocess_size,
                    ls2t_size,
                    ls2t_order,
                    ls2t_depth,
                    preprocess='conv',
                    preprocess_time=True,
                    ls2t_diff=True,
                    ls2t_time=True,
                    recursive_tensors=True,
                    name_only=False,
                    input_shape=None,
                    num_classes=None):

    preprocess = preprocess.lower()

    if preprocess != 'conv' and preprocess != 'dense':
        preprocess_name = ''
    else:
        preprocess_name = 'Conv' if preprocess == 'conv' else 'Dense'

    ls2t_recurrent_tag = 'R' if recursive_tensors else ''
    ls2t_name = 'LS2T{}'.format(ls2t_recurrent_tag)

    model_name = '{}{}_H{}_N{}_M{}_D{}'.format(preprocess_name, ls2t_name,
                                               preprocess_size, ls2t_size,
                                               ls2t_order, ls2t_depth)

    if name_only: return model_name

    model = Sequential()

    model.add(InputLayer(input_shape=input_shape))
    model.add(Masking(mask_value=0.))

    if preprocess_name != '':
        if preprocess_time:
            model.add(seq2tens.layers.Time())
        if preprocess == 'conv':
            model.add(
                Conv1D(preprocess_size,
                       8,
                       padding='same',
                       kernel_initializer='he_uniform'))
        else:
            model.add(
                TimeDistributed(
                    Dense(preprocess_size, kernel_initializer='he_uniform')))
        model.add(BatchNormalization(axis=-1))
        model.add(Activation('relu'))

        if preprocess_time:
            model.add(seq2tens.layers.Time())
        if preprocess == 'conv':
            model.add(
                Conv1D(preprocess_size,
                       5,
                       padding='same',
                       kernel_initializer='he_uniform'))
        else:
            model.add(
                TimeDistributed(
                    Dense(preprocess_size, kernel_initializer='he_uniform')))
        model.add(BatchNormalization(axis=-1))
        model.add(Activation('relu'))

        if preprocess_time:
            model.add(seq2tens.layers.Time())
        if preprocess == 'conv':
            model.add(
                Conv1D(preprocess_size,
                       3,
                       padding='same',
                       kernel_initializer='he_uniform'))
        else:
            model.add(Dense(preprocess_size, kernel_initializer='he_uniform'))
        model.add(BatchNormalization(axis=-1))
        model.add(Activation('relu'))

    for i in range(ls2t_depth - 1):
        if ls2t_time:
            model.add(seq2tens.layers.Time())
        if ls2t_diff:
            model.add(seq2tens.layers.Difference())
        model.add(
            seq2tens.layers.LS2T(ls2t_size,
                                 ls2t_order,
                                 return_sequences=True,
                                 recursive_weights=recursive_tensors))
        model.add(Reshape((
            input_shape[0],
            ls2t_order,
            ls2t_size,
        )))
        model.add(BatchNormalization(axis=[-2]))
        model.add(Reshape((
            input_shape[0],
            ls2t_order * ls2t_size,
        )))

    if ls2t_time:
        model.add(seq2tens.layers.Time())
    if ls2t_diff:
        model.add(seq2tens.layers.Difference())
    model.add(
        seq2tens.layers.LS2T(ls2t_size,
                             ls2t_order,
                             return_sequences=False,
                             recursive_weights=recursive_tensors))
    model.add(Reshape((
        ls2t_order,
        ls2t_size,
    )))
    model.add(BatchNormalization(axis=1))
    model.add(Reshape((ls2t_order * ls2t_size, )))

    model.add(Dense(num_classes, activation='softmax'))

    model._name = model_name

    return model