Пример #1
0
 def __init__(self,
              num_capsule,
              dim_capsule,
              routings=3,
              kernel_size=(9, 1),
              share_weights=True,
              activation='default',
              **kwargs):
     super(Capsule_bojone, self).__init__(**kwargs)
     self.num_capsule = num_capsule
     self.dim_capsule = dim_capsule
     self.routings = routings
     self.kernel_size = kernel_size
     self.share_weights = share_weights
     if activation == 'default':
         self.activation = squash_bojone
     else:
         self.activation = L.Activation(activation)
Пример #2
0
    def build_model(self, inputs, outputs):
        # rnn type, RNN的类型
        if self.rnn_unit == "LSTM":
            layer_cell = L.LSTM
        elif self.rnn_unit == "CuDNNLSTM":
            layer_cell = L.CuDNNLSTM
        elif self.rnn_unit == "CuDNNGRU":
            layer_cell = L.CuDNNGRU
        else:
            layer_cell = L.GRU

        x = L.Activation(self.activate_mid)(outputs)
        # embedding遮挡
        x = L.SpatialDropout1D(self.dropout_spatial)(x)

        lstm_0_output = L.Bidirectional(layer_cell(
            units=self.rnn_unit,
            return_sequences=True,
            activation='relu',
            kernel_regularizer=keras.regularizers.l2(self.l2),
            recurrent_regularizer=keras.regularizers.l2(self.l2)),
                                        name="bi_lstm_0")(x)
        lstm_1_output = L.Bidirectional(layer_cell(
            units=self.rnn_unit,
            return_sequences=True,
            activation='relu',
            kernel_regularizer=keras.regularizers.l2(self.l2),
            recurrent_regularizer=keras.regularizers.l2(self.l2)),
                                        name="bi_lstm_1")(lstm_0_output)
        x = L.Concatenate()([lstm_1_output, lstm_0_output, x])
        x = AttentionWeightedAverage(name='attlayer',
                                     return_attention=False)(x)
        x = L.Dropout(self.dropout)(x)
        x = L.Flatten()(x)
        # dense-mid
        x = L.Dense(units=min(max(self.label, 64), self.embed_size),
                    activation=self.activate_mid)(x)
        x = L.Dropout(self.dropout)(x)
        # dense-end, 最后一层, dense到label
        self.outputs = L.Dense(units=self.label,
                               activation=self.activate_end)(x)
        self.model = M.Model(inputs=inputs, outputs=self.outputs)
        self.model.summary(132)