예제 #1
0
    def call(self, x):
        # store the whole sequence so we can "attend" to it at each timestep
        self.x_seq = x

        self._uxpb = _time_distributed_dense(self.x_seq,
                                             self.U_a,
                                             b=self.b_a,
                                             input_dim=self.input_dim,
                                             timesteps=self.timesteps,
                                             output_dim=self.units)

        return super(AttentionDecoder, self).call(x)
예제 #2
0
    def call(self, x):
        # store the whole sequence so we can "attend" to it at each timestep
        self.x_seq = x

        # apply the a dense layer over the time dimension of the sequence
        # do it here because it doesn't depend on any previous steps
        # thefore we can save computation time:
        self._uxpb = _time_distributed_dense(self.x_seq, self.U_a, b=self.b_a,
                                             input_dim=self.input_dim,
                                             timesteps=self.timesteps,
                                             output_dim=self.units)

        return super(AttentionDecoder, self).call(x)
예제 #3
0
파일: selftatt.py 프로젝트: sersoage/NLP
    def call(self, x):

        self.x_seq = x

        # apply the a dense layer
        self._uxpb = _time_distributed_dense(self.x_seq,
                                             self.U_a,
                                             b=self.b_a,
                                             input_dim=self.input_dim,
                                             timesteps=self.timesteps,
                                             output_dim=self.units)

        return super(AttentionDecoder, self).call(x)