def forward(network, x):
    W1, W2, W3 = network['W1'], network['W2'], network['W3']
    b1, b2, b3 = network['b1'], network['b2'], network['b3']

    a1 = np.dot(x, W1) + b1
    z1 = sigmoid(a1)
    a2 = np.dot(z1, W2) + b2
    z2 = sigmoid(a2)
    a3 = np.dot(z2, W3) + b3
    y = identity(a3)
    return y
Пример #2
0
def predict(network, x):
    W1, W2, W3 = network['W1'], network['W2'], network['W3']
    b1, b2, b3 = network['b1'], network['b2'], network['b3']

    a1 = np.dot(x, W1) + b1
    z1 = sigmoid(a1)

    a2 = np.dot(z1, W2) + b2
    z2 = sigmoid(a2)

    a3 = np.dot(z2, W3) + b3
    y = softmax(a3)

    return y
Пример #3
0
   def get_probability_visible(self, visibles, hidden):
      """
      Returns the probability of setting visible units to 1, given the
      the hidden units and history of visible units.

      Assume visibles[0] = V{t-1}, visibles[1] = V{t-2}, etc.
      Then hiddens = H{t}
      """

      B_V = self.get_bias_visible(visibles)
      return sigmoid(np.dot(self.weights, hidden) + B_V)
Пример #4
0
   def get_probability_hidden(self, visibles, hiddens):
      """
      Returns the probability of setting hidden units to 1, given the 
      history of hidden and visible units.

      Assumes visibles[0] = V{t}, visibles[1] = V{t-1}, ...
      and hiddens[0] = H{t-1}, hiddens[1] = H{t-2}, ...

      visibles should have one extra entry than hiddens
      """

      # H = sigmoid(W'V_t + B_H(V_{t-m...t-1}, H{t-m...t-1}))
      B_H = self.get_bias_hidden(visibles[1:], hiddens)
      return sigmoid(np.dot(self.weights.transpose(), visibles[0]) + B_H)