Example #1
0
	def fit(self, X):
		n_samples, n_feats = X.shape
		n_train_batches = n_samples / self.batch_size
		v_X = share_data(X)

		self.da_ = DenoisingAutoEncoderFormula(n_visible = n_feats, 
									n_hidden = self.n_hidden, 
									corruption_level = self.corruption_level)
		index = T.lscalar('index')
		cost = self.da_.cost()
		gparams = T.grad(cost, self.da_.params)
		updates = [(param, param - self.learning_rate*gparam) 
					for (param, gparam) in zip(self.da_.params, gparams)]
		train_model = theano.function(inputs = [index], 
							outputs = cost,
							updates = updates,
							givens = {
								self.da_.X: v_X[index*self.batch_size:(index+1)*self.batch_size]
						})
		for epoch in xrange(self.n_epochs):
			cost = [train_model(i) for i in xrange(n_train_batches)]
			if self.verbose:
				print 'training epoch %d, recall cost %f ' % (
					epoch, np.mean(cost))
		return self
Example #2
0
	def fit(self, X):
		n_samples, n_feats = X.shape
		n_train_batches = n_samples / self.batch_size
		v_X = share_data(X)

		self.ca_ = ContractiveAutoEncoderFormula(n_visible = n_feats, 
									n_hidden = self.n_hidden, 
									batch_size = self.batch_size,
									contraction_level = self.contraction_level)
		index = T.lscalar('index')
		cost = self.ca_.cost()
		gparams = T.grad(cost, self.ca_.params)
		updates = [(param, param - self.learning_rate*gparam) 
					for (param, gparam) in zip(self.ca_.params, gparams)]
		train_model = theano.function(inputs = [index], 
							outputs = [T.mean(self.ca_.L_rec), self.ca_.L_jacob],
							updates = updates,
							givens = {
								self.ca_.X: v_X[index*self.batch_size:(index+1)*self.batch_size]
						})
		for epoch in xrange(self.n_epochs):
			recall, jacob = zip(*[train_model(i) for i in xrange(n_train_batches)])
			if self.verbose:
				print 'training epoch %d, recall cost %f, jacobian norm %f ' % (
					epoch, np.mean(recall), np.mean(np.sqrt(jacob))
				)
		return self
Example #3
0
    def fit(self, X):
        n_samples, n_feats = X.shape
        n_train_batches = n_samples / self.batch_size
        v_X = share_data(X)

        self.ca_ = ContractiveAutoEncoderFormula(
            n_visible=n_feats,
            n_hidden=self.n_hidden,
            batch_size=self.batch_size,
            contraction_level=self.contraction_level)
        index = T.lscalar('index')
        cost = self.ca_.cost()
        gparams = T.grad(cost, self.ca_.params)
        updates = [(param, param - self.learning_rate * gparam)
                   for (param, gparam) in zip(self.ca_.params, gparams)]
        train_model = theano.function(
            inputs=[index],
            outputs=[T.mean(self.ca_.L_rec), self.ca_.L_jacob],
            updates=updates,
            givens={
                self.ca_.X:
                v_X[index * self.batch_size:(index + 1) * self.batch_size]
            })
        for epoch in xrange(self.n_epochs):
            recall, jacob = zip(
                *[train_model(i) for i in xrange(n_train_batches)])
            if self.verbose:
                print 'training epoch %d, recall cost %f, jacobian norm %f ' % (
                    epoch, np.mean(recall), np.mean(np.sqrt(jacob)))
        return self
Example #4
0
    def fit(self, X):
        n_samples, n_feats = X.shape
        n_train_batches = n_samples / self.batch_size
        v_X = share_data(X)

        self.da_ = DenoisingAutoEncoderFormula(
            n_visible=n_feats,
            n_hidden=self.n_hidden,
            corruption_level=self.corruption_level)
        index = T.lscalar('index')
        cost = self.da_.cost()
        gparams = T.grad(cost, self.da_.params)
        updates = [(param, param - self.learning_rate * gparam)
                   for (param, gparam) in zip(self.da_.params, gparams)]
        train_model = theano.function(
            inputs=[index],
            outputs=cost,
            updates=updates,
            givens={
                self.da_.X:
                v_X[index * self.batch_size:(index + 1) * self.batch_size]
            })
        for epoch in xrange(self.n_epochs):
            cost = [train_model(i) for i in xrange(n_train_batches)]
            if self.verbose:
                print 'training epoch %d, recall cost %f ' % (epoch,
                                                              np.mean(cost))
        return self
Example #5
0
	def transform(self, X):
		v_X = share_data(X)
		return self.ca_.hidden_value(v_X).eval()
Example #6
0
 def transform(self, X):
     v_X = share_data(X)
     return self.ca_.hidden_value(v_X).eval()