Example #1
0
 def testOneDim(self):
     random.seed(42)
     X = np.random.rand(1000)
     y = 2 * X + 3
     regressor = skflow.TensorFlowLinearRegressor()
     regressor.fit(X, y)
     score = mean_squared_error(regressor.predict(X), y)
     self.assertLess(score, 0.3, "Failed with score = {0}".format(score))
Example #2
0
 def testMultiRegression(self):
     random.seed(42)
     rng = np.random.RandomState(1)
     X = np.sort(200 * rng.rand(100, 1) - 100, axis=0)
     y = np.array([np.pi * np.sin(X).ravel(), np.pi * np.cos(X).ravel()]).T
     regressor = skflow.TensorFlowLinearRegressor(learning_rate=0.01)
     regressor.fit(X, y)
     score = mean_squared_error(regressor.predict(X), y)
     self.assertLess(score, 10, "Failed with score = {0}".format(score))
Example #3
0
 def testBoston(self):
     random.seed(42)
     boston = datasets.load_boston()
     regressor = skflow.TensorFlowLinearRegressor(
         batch_size=boston.data.shape[0], steps=500, learning_rate=0.001)
     regressor.fit(boston.data, boston.target)
     score = mean_squared_error(boston.target,
                                regressor.predict(boston.data))
     self.assertLess(score, 150, "Failed with score = {0}".format(score))
Example #4
0
 def testLinearRegression(self):
     rng = np.random.RandomState(67)
     N = 1000
     n_weights = 10
     self.bias = 2
     self.X = rng.uniform(-1, 1, (N, n_weights))
     self.weights = 10 * rng.randn(n_weights)
     self.y = np.dot(self.X, self.weights)
     self.y += rng.randn(len(self.X)) * 0.05 + rng.normal(self.bias, 0.01)
     regressor = skflow.TensorFlowLinearRegressor(optimizer="SGD")
     regressor.fit(self.X, self.y)
     # Have to flatten weights since they come in (X, 1) shape
     self.assertAllClose(self.weights,
                         regressor.weights_.flatten(),
                         rtol=0.01)
     assert abs(self.bias - regressor.bias_) < 0.1