def run_transducer(current_block, transducer_width):
     # apply softmax on the correct outputs
     transducer_out = softmax(
         split_logits[current_block][0:transducer_width], axis=2)
     return transducer_out
示例#2
0
def forward(X, W1, b1, W2, b2):
    Z = np.tanh(X.dot(W1) + b1)
    return softmax(Z.dot(W2) + b2), Z
 def forward(self, X):
     # Z = relu(X.dot(self.W1) + self.b1)
     Z = np.tanh(X.dot(self.W1) + self.b1)
     return softmax(Z.dot(self.W2) + self.b2), Z
def forward(X, W, b):
    return softmax(X.dot(W) + b)
示例#5
0
 def forward(self, X):
     return softmax(X.dot(self.W) + self.b)