示例#1
0
    def __init__(self,
                 scope_name,
                 size,
                 param_attr=None,
                 bias_attr=None,
                 is_reverse=False,
                 gate_activation='sigmoid',
                 candidate_activation='tanh',
                 h_0=None,
                 origin_mode=False,
                 init_size = None):
        super(DynamicGRU, self).__init__(scope_name)

        self.gru_unit = GRUUnit(
            self.full_name(),
            size * 3,
            param_attr=param_attr,
            bias_attr=bias_attr,
            activation=candidate_activation,
            gate_activation=gate_activation,
            origin_mode=origin_mode)

        self.size = size
        self.h_0 = h_0
        self.is_reverse = is_reverse
示例#2
0
    def __init__(self, decoder_size, num_classes):
        super(GRUDecoderWithAttention, self).__init__()
        self.simple_attention = SimpleAttention(decoder_size)

        self.fc_1_layer = Linear(
            Config.encoder_size * 2, decoder_size * 3, bias_attr=False)
        self.fc_2_layer = Linear(
            decoder_size, decoder_size * 3, bias_attr=False)
        self.gru_unit = GRUUnit(
            size=decoder_size * 3, param_attr=None, bias_attr=None)
        self.out_layer = Linear(
            decoder_size, num_classes + 2, bias_attr=None, act='softmax')

        self.decoder_size = decoder_size
示例#3
0
    def __init__(self, scope_name, decoder_size, num_classes):
        super(GRUDecoderWithAttention, self).__init__(scope_name)
        self.simple_attention = SimpleAttention(self.full_name(), decoder_size)

        self.fc_1_layer = FC(self.full_name(),
                             size=decoder_size * 3,
                             bias_attr=False)
        self.fc_2_layer = FC(self.full_name(),
                             size=decoder_size * 3,
                             bias_attr=False)
        self.gru_unit = GRUUnit(
            self.full_name(),
            size=decoder_size * 3,
            param_attr=None,
            bias_attr=None)
        self.out_layer = FC(self.full_name(),
                            size=num_classes + 2,
                            bias_attr=None,
                            act='softmax')

        self.decoder_size = decoder_size
示例#4
0
    def __init__(self, name_scope, size):
        super(DynamicGRU, self).__init__(name_scope)

        self.gru_unit = GRUUnit(self.full_name(), size * 3)