示例#1
0
def cnn_lstm_attention_model(step, input_dim):
    """
    step: 步长
    input_dim:输入维度
    """
    inputs = Input(shape=(step, input_dim))

    x = Conv1D(filters=64, kernel_size=1,
               activation='elu')(inputs)  # , padding = 'same'
    # x = Conv1D(filters=32, kernel_size=1, activation='elu')(x)  # , padding = 'same'
    print("Conv1D 111", K.int_shape(x))
    x = Dropout(0.3)(x)
    # x = Conv1D(filters=64, kernel_size=2, activation='elu')(x)  # , padding = 'same'
    # x = MaxPooling1D(2)(x)
    #lstm_out = Bidirectional(LSTM(lstm_units, activation='relu'), name='bilstm')(x)
    # 对于GPU可以使用CuDNNLSTM
    #lstm_out = Bidirectional(LSTM(lstm_units, return_sequences=True))(x)
    lstm_out = LSTM(64, return_sequences=True)(x)

    # lstm_out = LSTM(16, return_sequences=True)(x)

    # lstm_out = LSTM(50, activation='elu', return_sequences=True, dropout=0.2)(lstm_out)
    lstm_out = Dropout(0.2)(lstm_out)
    print("lstm out", K.int_shape(lstm_out))
    # CNN和LSTM已经对特征进行提取了,这时候再用注意力机制效果可能会更好
    attention_mul = attention_3d_block2(lstm_out)

    attention_mul = Flatten()(attention_mul)

    output = Dense(1, activation='sigmoid')(attention_mul)
    model = Model(inputs=[inputs], outputs=output)
    return model
示例#2
0
def cnn_lstm_step4(step, input_dim):
    """
    step: 步长
    input_dim:输入维度
    """
    inputs = Input(shape=(step, input_dim))

    # x = Conv1D(filters=32, kernel_size=1, activation='relu')(inputs)  # , padding = 'same'

    x = Conv1D(filters=64, kernel_size=1,
               activation='elu')(inputs)  # , padding = 'same'
    x = Conv1D(filters=32, kernel_size=1,
               activation='elu')(x)  # , padding = 'same'

    print("Conv1D 111", K.int_shape(x))
    x = Dropout(0.3)(x)
    lstm_out = LSTM(64, return_sequences=True, activation='elu')(x)

    lstm_out = Dropout(0.2)(lstm_out)
    print("lstm out", K.int_shape(lstm_out))
    # CNN和LSTM已经对特征进行提取了,这时候再用注意力机制效果可能会更好
    attention_mul = attention_3d_block2(lstm_out)
    attention_mul = Flatten()(attention_mul)

    output = Dense(1, activation='sigmoid')(attention_mul)
    model = Model(inputs=[inputs], outputs=output)
    return model