Exemplo n.º 1
0
    def testGradient(self, X, y):
        '''
		Tests the analytical gradient computation by comparing it with the numerical gradients

		Arguments
		X		: data matrix the form [input dim., number of samples]
		y		: labels in the form [1, number of samples].
		
		Returns
		result	: 0 if passed, -1 if failed
		'''
        assert self.isInitialized, 'ERROR:DeepNetwork:testGradient: The instance is not properly initialized'

        if self.debug:
            print 'DEBUG:DeepNetwork:testGradient: Testing gradient computation...'

        result = 0

        theta_list = self.getNetworkParameters()
        theta = self.unstackParameters(theta_list)

        grad = self.computeGradient(theta, X, y)

        numGrad = AuxFunctions.computeNumericalGradient(func=self.computeCost,
                                                        params=theta,
                                                        args=((X, y)))

        errorGrad = np.sqrt(np.sum((grad - numGrad)**2))

        if errorGrad < 1e-4:
            if self.debug:
                print 'DEBUG:DeepNetwork:testGradient:Gradient error: ', errorGrad
                print 'DEBUG:DeepNetwork:testGradient:Gradient check PASSED!'
                print

            result = 0

        else:
            if self.debug:
                print 'DEBUG:DeepNetwork:testGradient:Gradient error: ', errorGrad
                print 'DEBUG:DeepNetwork:testGradient:Gradient check FAILED!'
                print

            result = -1

        return result
Exemplo n.º 2
0
    def testGradient(self, X, y):
        '''
		Tests the analytical gradient computation by comparing it with the numerical gradients

		Arguments
		X		: data matrix the form [number of parameters, number of samples]
		y		: labels in the form [1, number of samples]
		
		Returns
		result	: 0 if passed, -1 if failed
		'''
        assert self.isInitialized, 'ERROR:Linreg:testGradient: The instance is not properly initialized'
        assert X.shape[
            0] == self.nParams, 'ERROR:Linreg:testGradient: Dimensions of given data do not match with the number of parameters'

        if self.debug:
            print 'DEBUG:Linreg:testGradient: Testing gradient computation... '

        result = 0

        grad = self.computeGradient(self.theta, X, y)

        numGrad = AuxFunctions.computeNumericalGradient(func=self.computeCost,
                                                        params=self.theta,
                                                        args=(X, y))

        errorGrad = np.sqrt(np.sum((grad - numGrad)**2))

        if errorGrad < 1e-4:
            if self.debug:
                print 'DEBUG:Linreg:testGradient: Gradient error: ', errorGrad
                print 'DEBUG:Linreg:testGradient: Gradient check PASSED!'
                print

            result = 0
        else:
            if self.debug:
                print 'DEBUG:Linreg:testGradient: Gradient error: ', errorGrad
                print 'DEBUG:Linreg:testGradient: Gradient check FAILED!'
                print

            result = -1

        return result
Exemplo n.º 3
0
	def testGradient(self, X, y):
		'''
		Tests the analytical gradient computation by comparing it with the numerical gradients

		Arguments
		X		: data matrix the form [input dim., number of samples]
		y		: labels in the form [1, number of samples]
		
		Returns
		result	: 0 if passed, -1 if failed
		'''
		assert self.isInitialized, 'ERROR:CNN:testGradient: The instance is not properly initialized'
		
		if self.debug: print 'DEBUG:CNN:testGradient: Testing gradient computation...'
		
		result = 0;
		
		[weights, biases] = self.getNetworkParameters();
		
		params = self.rollParameters(weights, biases);
		
		grad = self.computeGradient(params, X, y);
		
		numGrad = AuxFunctions.computeNumericalGradient( func=self.computeCost, params=params, args=((X, y)) );
		
		errorGrad = np.sqrt(np.sum((grad - numGrad)**2));
		
		if errorGrad<1e-4:
			if self.debug:
				print 'DEBUG:CNN:testGradient:Gradient error: ', errorGrad
				print 'DEBUG:CNN:testGradient:Gradient check PASSED!'
				print
				
			result = 0;
		else:
			if self.debug:
				print 'DEBUG:CNN:testGradient:Gradient error: ', errorGrad
				print 'DEBUG:CNN:testGradient:Gradient check FAILED!'
				print
				
			result = -1;
			
		return result
Exemplo n.º 4
0
    def testGradient(self, X):
        '''
		Tests the analytical gradient computation by comparing it with the numerical gradients

		Arguments
		X		: data matrix the form [input dim., number of samples]
		
		Returns
		result	: 0 if passed, -1 if failed
		'''
        assert self.isInitialized, 'ERROR:SparseCoding:testGradient: The instance is not properly initialized'

        if self.debug:
            print 'DEBUG:SparseCoding:testGradient:Checking weight gradient...'

        result = 0

        grad = self.computeWeightGradient(self.weights_vec, self.features_vec,
                                          X)

        numGrad = AuxFunctions.computeNumericalGradient(
            func=self.computeWeightCost,
            params=self.weights_vec,
            args=(self.features_vec, X))

        errorGrad = np.sqrt(np.sum((grad - numGrad)**2))

        if errorGrad < 1e-4:
            if self.debug:
                print 'DEBUG:SparseCoding:testGradient:Gradient error: ', errorGrad
                print 'DEBUG:SparseCoding:testGradient:Gradient check PASSED!'
                print
        else:
            if self.debug:
                print 'DEBUG:SparseCoding:testGradient:Gradient error: ', errorGrad
                print 'DEBUG:SparseCoding:testGradient:Gradient check FAILED!'
                print

            result = -1

        if self.debug:
            print 'DEBUG:SparseCoding:testGradient:Checking feature gradient...'

        grad = self.computeFeatureGradient(self.features_vec, self.weights_vec,
                                           X)

        numGrad = AuxFunctions.computeNumericalGradient(
            func=self.computeFeatureCost,
            params=self.features_vec,
            args=(self.weights_vec, X))

        errorGrad = np.sqrt(np.sum((grad - numGrad)**2))

        if errorGrad < 1e-4:
            if self.debug:
                print 'DEBUG:SparseCoding:testGradient:Gradient error: ', errorGrad
                print 'DEBUG:SparseCoding:testGradient:Gradient check PASSED!'
                print
        else:
            if self.debug:
                print 'DEBUG:SparseCoding:testGradient:Gradient error: ', errorGrad
                print 'DEBUG:SparseCoding:testGradient:Gradient check FAILED!'
                print

            result = -1

        return result