示例#1
0
 def testOneDim(self):
     random.seed(42)
     x = np.random.rand(1000)
     y = 2 * x + 3
     feature_columns = learn.infer_real_valued_columns_from_input(x)
     regressor = learn.LinearRegressor(feature_columns=feature_columns)
     regressor.fit(x, y, max_steps=100)
     score = mean_squared_error(y, regressor.predict(x))
     self.assertLess(score, 1.0, "Failed with score = {0}".format(score))
示例#2
0
 def testBoston(self):
   random.seed(42)
   boston = datasets.load_boston()
   regressor = learn.LinearRegressor(
       feature_columns=learn.infer_real_valued_columns_from_input(boston.data))
   regressor.fit(boston.data, boston.target, max_steps=500)
   score = mean_squared_error(
       boston.target, np.array(list(regressor.predict(boston.data))))
   self.assertLess(score, 150, "Failed with score = {0}".format(score))
示例#3
0
 def testMultiRegression(self):
     random.seed(42)
     rng = np.random.RandomState(1)
     x = np.sort(200 * rng.rand(100, 1) - 100, axis=0)
     y = np.array([np.pi * np.sin(x).ravel(), np.pi * np.cos(x).ravel()]).T
     regressor = learn.LinearRegressor(
         feature_columns=learn.infer_real_valued_columns_from_input(x),
         target_dimension=2)
     regressor.fit(x, y, steps=100)
     score = mean_squared_error(regressor.predict(x), y)
     self.assertLess(score, 10, "Failed with score = {0}".format(score))
示例#4
0
 def testLinearRegression(self):
     rng = np.random.RandomState(67)
     n = 1000
     n_weights = 10
     bias = 2
     x = rng.uniform(-1, 1, (n, n_weights))
     weights = 10 * rng.randn(n_weights)
     y = np.dot(x, weights)
     y += rng.randn(len(x)) * 0.05 + rng.normal(bias, 0.01)
     regressor = learn.LinearRegressor(
         feature_columns=learn.infer_real_valued_columns_from_input(x),
         optimizer="SGD")
     regressor.fit(x, y, steps=200)
     # Have to flatten weights since they come in (x, 1) shape.
     self.assertAllClose(weights, regressor.weights_.flatten(), rtol=0.01)
示例#5
0
from sklearn import datasets, metrics, preprocessing, cross_validation
import numpy as np

boston = datasets.load_boston()
X, y = boston.data, boston.target

X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, \
                            test_size = 0.25, random_state =33)
# 对数据特征进行标准化处理
scaler = preprocessing.StandardScaler()
X_train = scaler.fit_transform(X_train)
X_test = scaler.transform(X_test)

#@@ 据说skflow已经被集成到TensorFlow的learn中了,所以有下面的写法
#@@ 但是下面的这个仍然存在问题
import tensorflow.contrib.learn.python.learn as learn
import tensorflow as tf
# 使用skflow的LinearRegressor
tf_lr = learn.LinearRegressor(feature_columns=learn.infer_real_valued_columns_from_input(X_train), \
                optimizer=tf.train.GradientDescentOptimizer(learning_rate=0.01))
#@@ optimizer是设置优化器的,默认的梯度0.2会发生梯度爆炸

tf_lr.fit(X_train, y_train, steps=1000, batch_size=50)
tf_lr_y_predict = tf_lr.predict(X_test)
tf_lr_y_predict = np.array(list(tf_lr_y_predict))

print('absoluate error:', metrics.mean_absolute_error(tf_lr_y_predict, y_test),
      '\n')
print('mean squared error:',
      metrics.mean_squared_error(tf_lr_y_predict, y_test), '\n')
print('R-squared value:', metrics.r2_score(tf_lr_y_predict, y_test))
#
# boston = datasets.load_boston()
#
# X, y = boston.data, boston.target
#
# X_train, X_test, y_train, y_test = model_selection.train_test_split(X, y, test_size=0.25, random_state=33)
#
# scaler = preprocessing.StandardScaler()
# X_train = scaler.fit_transform(X_train)
# X_test = scaler.fit(X_test)
#
# feature_columns = learn.infer_real_valued_columns_from_input(X_train)
# tf_lr = learn.LinearRegressor(feature_columns=feature_columns)
# tf_lr.fit(X_train, y_train, steps=10000, batch_size=50)
#
# tf_lr_y_predict = tf_lr.predict(X_test)

# print(metrics.mean_absolute_error(tf_lr_y_predict, y_test))

import tensorflow.contrib.learn.python.learn as learn
from sklearn import datasets, metrics, preprocessing

boston = datasets.load_boston()
x = preprocessing.StandardScaler().fit_transform(boston.data)
feature_columns = learn.infer_real_valued_columns_from_input(x)
regressor = learn.LinearRegressor(feature_columns=feature_columns)
regressor.fit(x, boston.target, steps=200, batch_size=32)
boston_predictions = list(regressor.predict(x, as_iterable=True))
score = metrics.mean_squared_error(boston_predictions, boston.target)
print("MSE: %f" % score)