Example #1
0
 def testOneDim(self):
     random.seed(42)
     X = np.random.rand(1000)
     y = 2 * X + 3
     regressor = skflow.TensorFlowLinearRegressor()
     regressor.fit(X, y)
     score = mean_squared_error(regressor.predict(X), y)
     self.assertLess(score, 0.3, "Failed with score = {0}".format(score))
Example #2
0
 def testMultiRegression(self):
     rng = np.random.RandomState(1)
     X = np.sort(200 * rng.rand(100, 1) - 100, axis=0)
     y = np.array([np.pi * np.sin(X).ravel(), np.pi * np.cos(X).ravel()]).T
     regressor = skflow.TensorFlowLinearRegressor()
     regressor.fit(X, y)
     score = mean_squared_error(regressor.predict(X), y)
     self.assertLess(score, 10, "Failed with score = {0}".format(score))
Example #3
0
 def testBoston(self):
     random.seed(42)
     boston = datasets.load_boston()
     regressor = skflow.TensorFlowLinearRegressor(
         batch_size=boston.data.shape[0], steps=500, learning_rate=0.001)
     regressor.fit(boston.data, boston.target)
     score = mean_squared_error(boston.target,
                                regressor.predict(boston.data))
     self.assertLess(score, 150, "Failed with score = {0}".format(score))
Example #4
0
 def testLinearRegression(self):
     rng = np.random.RandomState(67)
     N = 1000
     n_weights = 10
     self.bias = 2
     self.X = rng.uniform(-1, 1, (N, n_weights))
     self.weights = 10 * rng.randn(n_weights)
     self.y = np.dot(self.X, self.weights)
     self.y += rng.randn(len(self.X)) * 0.05 + rng.normal(self.bias, 0.01)
     regressor = skflow.TensorFlowLinearRegressor()
     regressor.fit(self.X, self.y)
     # Have to flatten weights since they come in (X, 1) shape
     self.assertAllClose(self.weights, regressor.weights_.flatten(), rtol=0.01)
     assert abs(self.bias - regressor.bias_) < 0.1
Example #5
0
# param_grid = {
#     'n_estimators': [500, 1000, 3000],
#     'max_depth': [4, 6],
#     'min_samples_leaf': [3, 5, 9, 17],
#     'learning_rate': [0.1, 0.05, 0.02, 0.01],
#     'max_features': [1.0, 0.3, 0.1],
#     'loss': ['ls', 'lad', 'huber']
# }

# regressor = skflow.TensorFlowDNNRegressor(hidden_units=[10, 10, 10],
#     steps=20000, learning_rate=0.01, batch_size=13)

# regressor = SVR(kernel='rbf', C=1000, gamma='auto')

regressor = skflow.TensorFlowLinearRegressor(steps=2000,
                                             learning_rate=0.01,
                                             batch_size=13)

ts_a = datetime.datetime.now()

# Train and Predict

regressor.fit(X_train, y_train)
score = metrics.mean_squared_error(regressor.predict(scaler.transform(X_test)),
                                   y_test)
X_ty = regressor.predict(X_train)
score1 = metrics.mean_squared_error(X_ty, y_train)

print('Test MSE: {0:f}'.format(score))
print('Train MSE: {0:f}'.format(score1))
Example #6
0
import skflow

boston = datasets.load_boston()
X, y = boston.data, boston.target

X_train, X_test, y_train, y_test = cross_validation.train_test_split(
    X, y, test_size=0.25, random_state=33)

# 标准化
ss = preprocessing.StandardScaler()
X_train = ss.fit_transform(X_train)
X_test = ss.transform(X_test)

# 使用skflow的TensorFlowLinearRegressor回归器训练
tf_lr = skflow.TensorFlowLinearRegressor(steps=10000,
                                         learning_rate=0.01,
                                         batch_size=50)
tf_lr.fit(X_train, y_train)
tf_lr_y_predict = tf_lr.predict(X_test)

# 使用skflow的TensorFlowDNNRegressor回归器训练
tf_dnn_regressor = skflow.TensorFlowDNNRegressor(hidden_units=[100, 40],
                                                 steps=10000,
                                                 learning_rate=0.01,
                                                 batch_size=50)
tf_dnn_regressor.fit(X_train, y_train)
tf_dnn_regressor_y_predict = tf_dnn_regressor.predict(X_test)

# 使用skflow的RandomForestRegressor回归器训练
rfr = RandomForestRegressor()
rfr.fit(X_train, y_train)