Exemplo n.º 1
0
    def _mean_visibles_theano(self, h, v):
        """
        Computes the probabilities P(v=1|h).
        
        Parameters
        ----------
        h : array-like, shape (n_samples, n_groups, n_components)
            values of the hidden layer.
        v : The input original Visible Nodes
        Returns
        -------
        v: array-like,shape (n_samples, n_features)        
        """
        activations = np.array([
            convTheano(h[:, i, :], self.components_[i], border='full') +
            self.intercept_visible_ for i in range(self.n_groups)
        ]).sum(axis=0)

        visibles = np.array(v)
        windowSize = self.window_size
        visualSize = int(sqrt(v.shape[1]))
        innerSize = visualSize - 2 * windowSize + 2
        n_sample = v.shape[0]
        innerV = logistic_sigmoid(activations)
        innerV = innerV.reshape(
            n_sample, visualSize,
            visualSize)[:, windowSize - 1:visualSize - windowSize + 1,
                        windowSize - 1:visualSize - windowSize + 1]
        visibles = visibles.reshape(n_sample, visualSize, visualSize)

        visibles[:, windowSize - 1:visualSize - windowSize + 1,
                 windowSize - 1:visualSize - windowSize + 1] = innerV
        visibles = visibles.reshape(n_sample, -1)

        return visibles
Exemplo n.º 2
0
def testConvTheanoMulti():
    visibleSample = 3
    visibleNodes = np.ones((3, 28 * 28))
    h = np.arange(5 * 24 * 24)
    h = h.reshape(5, 24 * 24)
    h = np.array((h, 2 * h, 3 * h))
    current = time.time()
    for i in range(1000):
        a = np.array(
            [convTheano(visibleNodes[i, :], h[i, :, :]) for i in range(3)])
    print time.time() - current
    current = time.time()
    for i in range(1000):
        b = convTheano(visibleNodes, h.reshape(3 * 5, 24 * 24))
    print time.time() - current
    return a, b
Exemplo n.º 3
0
    def _mean_hiddens_theano(self, v):
        """Computes the probabilities P(h=1|v).
        
        Parameters
        ----------
        v : array-like, shape (n_samples, n_features)
            Values of the visible layer.

        Returns
        -------
        h : array-like, shape (n_samples, n_groups, n_components)
            Corresponding mean field values for the hidden layer.
            
        """
        activationsWithoutIntercept = convTheano(v, self.components_)
        activations = np.array([
            activationsWithoutIntercept[:, i, :] + self.intercept_hidden_[i]
            for i in range(self.n_groups)
        ])
        n_samples = v.shape[0]
        return logistic_sigmoid(
            activations.reshape(n_samples * self.n_groups,
                                self.n_components)).reshape(
                                    n_samples, self.n_groups,
                                    self.n_components)
Exemplo n.º 4
0
 def _mean_visibles_theano(self,h,v):
     """
     Computes the probabilities P(v=1|h).
     
     Parameters
     ----------
     h : array-like, shape (n_samples, n_groups, n_components)
         values of the hidden layer.
     v : The input original Visible Nodes
     Returns
     -------
     v: array-like,shape (n_samples, n_features)        
     """
     activations = np.array([convTheano(h[:,i,:],self.components_[i],border='full') + self.intercept_visible_ for i in range(self.n_groups)]).sum(axis = 0)
     
     visibles = np.array(v)
     windowSize = self.window_size
     visualSize = int(sqrt(v.shape[1]))
     innerSize = visualSize - 2 * windowSize + 2
     n_sample = v.shape[0]
     innerV = logistic_sigmoid(activations)
     innerV = innerV.reshape(n_sample,visualSize, visualSize)[:,windowSize - 1:visualSize - windowSize + 1, windowSize - 1: visualSize - windowSize + 1]
     visibles = visibles.reshape(n_sample,visualSize,visualSize)
     
     visibles[:,windowSize - 1: visualSize - windowSize + 1,windowSize - 1: visualSize - windowSize + 1] = innerV
     visibles = visibles.reshape(n_sample, -1)
     
     return visibles
Exemplo n.º 5
0
def testConvTheano(border='valid'):
    a = np.array((1, 0, 1, 0, 1, 0))
    b = np.array((0, 1, 0, 1, 0, 1))
    A = np.array((a, b, a, b, a, b))
    B = np.array((b, a, b, a, b, a))
    c = np.array((1, 1, 1))
    C = np.array((c, c, c))
    D = np.zeros((3, 3))
    D[1, 1] = 1
    Z = np.array((A, B))
    #print Z.shape
    Y = np.array((C, D))
    #print Y.shape
    return convTheano(Z.reshape(2, -1), Y.reshape(2, -1))
Exemplo n.º 6
0
    def _mean_hiddens_theano(self,v):
        """Computes the probabilities P(h=1|v).
        
        Parameters
        ----------
        v : array-like, shape (n_samples, n_features)
            Values of the visible layer.

        Returns
        -------
        h : array-like, shape (n_samples, n_groups, n_components)
            Corresponding mean field values for the hidden layer.
            
        """
        activationsWithoutIntercept = convTheano(v,self.components_)
        activations = np.array([activationsWithoutIntercept[:,i,:] + self.intercept_hidden_[i] for i in range(self.n_groups)])
        n_samples = v.shape[0]
        return logistic_sigmoid(activations.reshape(n_samples * self.n_groups, self.n_components)).reshape(n_samples,self.n_groups,self.n_components)
Exemplo n.º 7
0
 def _gradience_theano(self,v,mean_h):
     """Computer the gradience given the v and h.
     This is for getting the Grad0k./ If it is, we need to focus on the Ph0k
     Parameters
     ----------
     v: array-like, shape (n_samples, n_features)
         values of the visible layer.
     h: array-like, shape (n_samples, n_groups, n_components)
     
     Returns
     --------
     Grad: array-like,shape (n_groups, weight_windowSize * weight_windowSize)     
     
     """
     #weights = np.array([convTheano(v[i,:],mean_h[i,:,:]) for i in range(v.shape[0])]).sum(axis = 0)
     tempWeights = convTheano(v,mean_h.reshape(v.shape[0] * self.n_groups,self.n_components)).reshape(v.shape[0],v.shape[0],self.n_groups,-1)
     #tempWeights = np.array([tempWeights[i,i,:,:] for i in range(v.shape[0])])
     tempWeights = tempWeights[0]
     weights = tempWeights.sum(axis = 0)
     return weights
Exemplo n.º 8
0
 def _gradience_theano(self, v, mean_h):
     """Computer the gradience given the v and h.
     This is for getting the Grad0k./ If it is, we need to focus on the Ph0k
     Parameters
     ----------
     v: array-like, shape (n_samples, n_features)
         values of the visible layer.
     h: array-like, shape (n_samples, n_groups, n_components)
     
     Returns
     --------
     Grad: array-like,shape (n_groups, weight_windowSize * weight_windowSize)     
     
     """
     #weights = np.array([convTheano(v[i,:],mean_h[i,:,:]) for i in range(v.shape[0])]).sum(axis = 0)
     tempWeights = convTheano(
         v,
         mean_h.reshape(v.shape[0] * self.n_groups,
                        self.n_components)).reshape(v.shape[0], v.shape[0],
                                                    self.n_groups, -1)
     #tempWeights = np.array([tempWeights[i,i,:,:] for i in range(v.shape[0])])
     tempWeights = tempWeights[0]
     weights = tempWeights.sum(axis=0)
     return weights