예제 #1
0
  def testExportMonitorRegressionSignature(self):

    def _regression_signature(examples, unused_features, predictions):
      signatures = {}
      signatures['regression'] = (
          tf.contrib.session_bundle.exporter.regression_signature(examples,
                                                                  predictions))
      return signatures['regression'], signatures

    random.seed(42)
    x = np.random.rand(1000)
    y = 2 * x + 3
    cont_features = [tf.contrib.layers.real_valued_column('', dimension=1)]
    regressor = learn.LinearRegressor(feature_columns=cont_features)
    export_dir = tempfile.mkdtemp() + 'export/'
    export_monitor = learn.monitors.ExportMonitor(
        every_n_steps=1,
        export_dir=export_dir,
        exports_to_keep=1,
        signature_fn=_regression_signature)
    regressor.fit(x, y, steps=10, monitors=[export_monitor])

    self.assertTrue(tf.gfile.Exists(export_dir))
    self.assertFalse(tf.gfile.Exists(export_dir + '00000000/export'))
    self.assertTrue(tf.gfile.Exists(export_dir + '00000010/export'))
    # Validate the signature
    signature = self._get_default_signature(export_dir + '00000010/export.meta')
    self.assertTrue(signature.HasField('regression_signature'))
예제 #2
0
    def testExportMonitorInputFeature(self):
        random.seed(42)
        input_feature_key = 'my_example_key'

        def _export_input_fn():
            return {
                input_feature_key: tf.placeholder(dtype=tf.string,
                                                  shape=(1, )),
                _X_KEY: tf.random_uniform(shape=(1, ),
                                          minval=0.0,
                                          maxval=1000.0)
            }, None

        export_dir = tempfile.mkdtemp() + 'export/'
        monitor = learn.monitors.ExportMonitor(
            every_n_steps=1,
            export_dir=export_dir,
            input_fn=_export_input_fn,
            input_feature_key=input_feature_key,
            exports_to_keep=2,
            signature_fn=export.generic_signature_fn)
        regressor = learn.LinearRegressor(feature_columns=[_X_COLUMN])
        regressor.fit(input_fn=_training_input_fn,
                      steps=10,
                      monitors=[monitor])
        self._assert_export(monitor, export_dir, 'generic_signature')
    def testExportMonitorInputFeatureKeyNone(self):
        random.seed(42)
        input_feature_key = 'my_example_key'

        def _export_input_fn():
            return {
                input_feature_key:
                None,
                _X_KEY:
                random_ops.random_uniform(shape=(1, ),
                                          minval=0.0,
                                          maxval=1000.0)
            }, None

        monitor = learn.monitors.ExportMonitor(
            every_n_steps=1,
            export_dir=tempfile.mkdtemp() + 'export/',
            input_fn=_export_input_fn,
            input_feature_key=input_feature_key,
            exports_to_keep=2,
            signature_fn=export.generic_signature_fn)
        regressor = learn.LinearRegressor(feature_columns=[_X_COLUMN])
        with self.assertRaisesRegexp(ValueError, 'examples cannot be None'):
            regressor.fit(input_fn=_training_input_fn,
                          steps=10,
                          monitors=[monitor])
예제 #4
0
    def testExportMonitorRegressionSignature(self):
        def _regression_signature(examples, unused_features, predictions):
            signatures = {}
            signatures['regression'] = (exporter.regression_signature(
                examples, predictions))
            return signatures['regression'], signatures

        random.seed(42)
        x = np.random.rand(1000)
        y = 2 * x + 3
        cont_features = [feature_column.real_valued_column('', dimension=1)]
        regressor = learn.LinearRegressor(feature_columns=cont_features)
        export_dir = os.path.join(tempfile.mkdtemp(), 'export')
        export_monitor = learn.monitors.ExportMonitor(
            every_n_steps=1,
            export_dir=export_dir,
            exports_to_keep=1,
            signature_fn=_regression_signature)
        regressor.fit(x, y, steps=10, monitors=[export_monitor])

        self.assertTrue(gfile.Exists(export_dir))
        with self.assertRaises(errors.NotFoundError):
            saver.checkpoint_exists(
                os.path.join(export_dir, '00000000', 'export'))
        self.assertTrue(
            saver.checkpoint_exists(
                os.path.join(export_dir, '00000010', 'export')))
        # Validate the signature
        signature = self._get_default_signature(
            os.path.join(export_dir, '00000010', 'export.meta'))
        self.assertTrue(signature.HasField('regression_signature'))
예제 #5
0
def experiment_fn(output_dir):
    return tflearn.Experiment(
        tflearn.LinearRegressor(feature_columns=feature_cols,
                                model_dir=output_dir),
        train_input_fn=get_train(),
        eval_input_fn=get_valid(),
        eval_metrics={
            'rmse':
            tflearn.MetricSpec(
                metric_fn=metrics.streaming_root_mean_squared_error)
        })
예제 #6
0
 def testExportMonitor_EstimatorProvidesSignature(self):
     random.seed(42)
     x = np.random.rand(1000)
     y = 2 * x + 3
     cont_features = [tf.contrib.layers.real_valued_column('', dimension=1)]
     regressor = learn.LinearRegressor(feature_columns=cont_features)
     export_dir = tempfile.mkdtemp() + 'export/'
     export_monitor = learn.monitors.ExportMonitor(every_n_steps=1,
                                                   export_dir=export_dir,
                                                   exports_to_keep=2)
     regressor.fit(x, y, steps=10, monitors=[export_monitor])
     self._assert_export(export_monitor, export_dir, 'regression_signature')
예제 #7
0
 def testExportMonitor(self):
   random.seed(42)
   x = np.random.rand(1000)
   y = 2 * x + 3
   regressor = learn.LinearRegressor()
   export_dir = tempfile.mkdtemp() + 'export/'
   export_monitor = learn.monitors.ExportMonitor(every_n_steps=1,
                                                 export_dir=export_dir,
                                                 exports_to_keep=1)
   regressor.fit(x, y, steps=10,
                 monitors=[export_monitor])
   self.assertTrue(tf.gfile.Exists(export_dir))
   self.assertFalse(tf.gfile.Exists(export_dir + '00000000/export'))
   self.assertTrue(tf.gfile.Exists(export_dir + '00000010/export'))
 def testExportMonitor(self):
     random.seed(42)
     x = np.random.rand(1000)
     y = 2 * x + 3
     cont_features = [feature_column.real_valued_column('', dimension=1)]
     export_dir = tempfile.mkdtemp() + 'export/'
     export_monitor = learn.monitors.ExportMonitor(
         every_n_steps=1,
         export_dir=export_dir,
         exports_to_keep=2,
         signature_fn=export.generic_signature_fn)
     regressor = learn.LinearRegressor(feature_columns=cont_features)
     regressor.fit(x, y, steps=10, monitors=[export_monitor])
     self._assert_export(export_monitor, export_dir, 'generic_signature')
예제 #9
0
def part2():
    global boston, x_data, y_data
    NUM_STEPS = 200
    MINIBATCH_SIZE = 506

    feature_columns = learn.infer_real_valued_columns_from_input(x_data)

    reg = learn.LinearRegressor(
        feature_columns=feature_columns,
        optimizer=tf.train.GradientDescentOptimizer(learning_rate=0.1))

    reg.fit(x_data, boston.target, steps=NUM_STEPS, batch_size=MINIBATCH_SIZE)

    MSE = reg.evaluate(x_data, boston.target, steps=1)

    print(MSE)
예제 #10
0
def part4():
    global boston, x_data, y_data
    import pandas as pd
    import numpy as np
    N = 10000

    weight = np.random.randn(N) * 5 + 70
    spec_id = np.random.randint(0, 3, N)
    bias = [0.9, 1, 1.1]
    height = np.array(
        [weight[i] / 100 + bias[b] for i, b in enumerate(spec_id)])
    spec_name = ['Goblin', 'Human', 'ManBear']
    spec = [spec_name[s] for s in spec_id]

    df = pd.DataFrame({'Species': spec, 'Weight': weight, 'Height': height})

    from tensorflow.contrib import layers
    Weight = layers.real_valued_column("Weight")
    Species = layers.sparse_column_with_keys(column_name="Species",
                                             keys=spec_name)
    reg = learn.LinearRegressor(feature_columns=[Weight, Species])

    def input_fn(df):
        feature_cols = {}
        feature_cols['Weight'] = tf.constant(df['Weight'].values)

        feature_cols['Species'] = tf.SparseTensor(
            indices=[[i, 0] for i in range(df['Species'].size)],
            values=df['Species'].values,
            dense_shape=[df['Species'].size, 1])

        labels = tf.constant(df['Height'].values)

        return feature_cols, labels

    reg.fit(input_fn=lambda: input_fn(df), steps=50000)

    w_w = reg.get_variable_value('linear/Weight/weight')
    print(f"Estimation for Weight: {w_w}")

    v = reg.get_variable_names()
    print(f"Classes: {v}")

    s_w = reg.get_variable_value('linear/Species/weights')
    b = reg.get_variable_value('linear/bias_weight')
    print(f"Estimation for Species: {s_w + b}")
예제 #11
0
  def testExportMonitor(self):
    random.seed(42)
    x = np.random.rand(1000)
    y = 2 * x + 3
    regressor = learn.LinearRegressor()
    export_dir = tempfile.mkdtemp() + 'export/'
    export_monitor = learn.monitors.ExportMonitor(every_n_steps=1,
                                                  export_dir=export_dir,
                                                  exports_to_keep=1)
    regressor.fit(x, y, steps=10,
                  monitors=[export_monitor])

    self.assertTrue(tf.gfile.Exists(export_dir))
    self.assertFalse(tf.gfile.Exists(export_dir + '00000000/export'))
    self.assertTrue(tf.gfile.Exists(export_dir + '00000010/export'))
    # Validate the signature
    signature = self._get_default_signature(export_dir + '00000010/export.meta')
    self.assertTrue(signature.HasField('generic_signature'))
예제 #12
0
    def testExportMonitorInputFeatureKeyNoneNoFeatures(self):
        random.seed(42)
        input_feature_key = 'my_example_key'

        def _export_input_fn():
            return {input_feature_key: None}, None

        monitor = learn.monitors.ExportMonitor(
            every_n_steps=1,
            export_dir=tempfile.mkdtemp() + 'export/',
            input_fn=_export_input_fn,
            input_feature_key=input_feature_key,
            exports_to_keep=2,
            signature_fn=export.generic_signature_fn)
        regressor = learn.LinearRegressor(feature_columns=[_X_COLUMN])
        with self.assertRaisesRegexp(ValueError,
                                     'features or examples must be defined'):
            regressor.fit(input_fn=_training_input_fn,
                          steps=10,
                          monitors=[monitor])
예제 #13
0
    def testExportMonitorInputFeatureKeyNoFeatures(self):
        random.seed(42)
        input_feature_key = 'my_example_key'

        def _export_input_fn():
            return {
                input_feature_key: tf.placeholder(dtype=tf.string, shape=(1, ))
            }, None

        monitor = learn.monitors.ExportMonitor(
            every_n_steps=1,
            export_dir=tempfile.mkdtemp() + 'export/',
            input_fn=_export_input_fn,
            input_feature_key=input_feature_key,
            exports_to_keep=2,
            signature_fn=export.generic_signature_fn)
        regressor = learn.LinearRegressor(feature_columns=[_X_COLUMN])
        with self.assertRaisesRegexp(KeyError, _X_KEY):
            regressor.fit(input_fn=_training_input_fn,
                          steps=10,
                          monitors=[monitor])
예제 #14
0
  def testExportMonitor_EstimatorProvidesSignature(self):
    random.seed(42)
    x = np.random.rand(1000)
    y = 2 * x + 3
    cont_features = [tf.contrib.layers.real_valued_column('', dimension=1)]
    regressor = learn.LinearRegressor(feature_columns=cont_features)
    export_dir = tempfile.mkdtemp() + 'export/'
    export_monitor = learn.monitors.ExportMonitor(
        every_n_steps=1, export_dir=export_dir, exports_to_keep=2)
    regressor.fit(x, y, steps=10,
                  monitors=[export_monitor])

    self.assertTrue(tf.gfile.Exists(export_dir))
    # Only the written checkpoints are exported.
    self.assertTrue(tf.gfile.Exists(export_dir + '00000001/export'))
    self.assertTrue(tf.gfile.Exists(export_dir + '00000010/export'))
    self.assertEquals(export_monitor.last_export_dir,
                      six.b(os.path.join(export_dir, '00000010')))
    # Validate the signature
    signature = self._get_default_signature(export_dir + '00000010/export.meta')
    self.assertTrue(signature.HasField('regression_signature'))
예제 #15
0
    def testExportMonitorInputFeatureKeyMissing(self):
        random.seed(42)

        def _serving_input_fn():
            return {
                _X_KEY:
                random_ops.random_uniform(shape=(1, ),
                                          minval=0.0,
                                          maxval=1000.0)
            }, None

        input_feature_key = 'my_example_key'
        monitor = learn.monitors.ExportMonitor(
            every_n_steps=1,
            export_dir=os.path.join(tempfile.mkdtemp(), 'export'),
            input_fn=_serving_input_fn,
            input_feature_key=input_feature_key,
            exports_to_keep=2,
            signature_fn=export.generic_signature_fn)
        regressor = learn.LinearRegressor(feature_columns=[_X_COLUMN])
        with self.assertRaisesRegexp(KeyError, input_feature_key):
            regressor.fit(input_fn=_training_input_fn,
                          steps=10,
                          monitors=[monitor])
예제 #16
0
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 17 16:58:55 2018
使用内置的LinearRegressor,DNN以及Scikit-learn中的集成回归模型对“美国波士顿房价数据进行回归预测”
skflow已整合到tensorflow中
@author: mjw
"""
from sklearn import datasets,metrics,preprocessing,cross_validation
boston = datasets.load_boston()
x,y = boston.data,boston.target
x_train,x_test,y_train,y_test = cross_validation.train_test_split(x,y,test_size=0.25,random_state=33)
scaler = preprocessing.StandardScaler()
x_train = scaler.fit_transform(x_train)
x_test = scaler.transform(x_test)

import tensorflow.contrib.learn as learn  #替换
#tf_lr = learn.LinearRegressor(steps = 10000,learning_rate=0.01,batch_size=50)
tf_lr = learn.LinearRegressor(feature_columns = [])
tf_lr.fit(x_train,y_train)
tf_lr_y_predict = tf_lr.predict(x_test) 

print('--------Tensorflow linear regressor on boston dataset--------')
print('MAE',metrics.mean_absolute_error(tf_lr_y_predict,y_test))   #平均绝对误差
print('MSE',metrics.mean_squared_error(tf_lr_y_predict,y_test))   #均方误差
print('R-squared value',metrics.r2_score(tf_lr_y_predict,y_test))
#Linear Classifier
iris = datasets.load_iris()
feature_columns = learn.infer_real_valued_columns_from_input(iris.data)
classifier = learn.LinearClassifier(n_classes=3,
                                    feature_columns=feature_columns)
classifier.fit(iris.data, iris.target, steps=200, batch_size=32)
iris_predictions = list(classifier.predict(iris.data, as_iterable=True))
score = metrics.accuracy_score(iris.target, iris_predictions)
print("Accuracy: %f" % score)

#Linear Regression
boston = datasets.load_boston()
x = preprocessing.StandardScaler().fit_transform(boston.data)
feature_columns = learn.infer_real_valued_columns_from_input(x)
regressor = learn.LinearRegressor(feature_columns=feature_columns)
regressor.fit(x, boston.target, steps=200, batch_size=32)
boston_predictions = list(regressor.predict(x, as_iterable=True))
score = metrics.mean_squared_error(boston_predictions, boston.target)
print("MSE: %f" % score)

# Deep Neural Network
iris = datasets.load_iris()
feature_columns = learn.infer_real_valued_columns_from_input(iris.data)
classifier = learn.DNNClassifier(hidden_units=[10, 20, 10],
                                 n_classes=3,
                                 feature_columns=feature_columns)
classifier.fit(iris.data, iris.target, steps=200, batch_size=32)
iris_predictions = list(classifier.predict(iris.data, as_iterable=True))
score = metrics.accuracy_score(iris.target, iris_predictions)
print("Accuracy: %f" % score)
def build_estimator(model_dir):
    """Build an Estimator"""

    # Sparse base Columns
    gender = tf.contrib.layers.sparse_column_with_keys(column_name="sex",
                                                       keys=["F", "M"])
    unknown = tf.contrib.layers.sparse_column_with_keys(column_name="unknown",
                                                        keys=['Y', 'N'])
    action = tf.contrib.layers.sparse_column_with_keys(column_name="action",
                                                       keys=['Y', 'N'])
    adventure = tf.contrib.layers.sparse_column_with_keys(
        column_name="adventure", keys=['Y', 'N'])
    animation = tf.contrib.layers.sparse_column_with_keys(
        column_name="animation", keys=['Y', 'N'])
    children = tf.contrib.layers.sparse_column_with_keys(
        column_name="children", keys=['Y', 'N'])
    comedy = tf.contrib.layers.sparse_column_with_keys(column_name="comedy",
                                                       keys=['Y', 'N'])
    crime = tf.contrib.layers.sparse_column_with_keys(column_name="crime",
                                                      keys=['Y', 'N'])
    documentary = tf.contrib.layers.sparse_column_with_keys(
        column_name="documentary", keys=['Y', 'N'])
    drama = tf.contrib.layers.sparse_column_with_keys(column_name="drama",
                                                      keys=['Y', 'N'])
    fantasy = tf.contrib.layers.sparse_column_with_keys(column_name="fantasy",
                                                        keys=['Y', 'N'])
    filmnoir = tf.contrib.layers.sparse_column_with_keys(
        column_name="filmnoir", keys=['Y', 'N'])
    horror = tf.contrib.layers.sparse_column_with_keys(column_name="horror",
                                                       keys=['Y', 'N'])
    musical = tf.contrib.layers.sparse_column_with_keys(column_name="musical",
                                                        keys=['Y', 'N'])
    mystery = tf.contrib.layers.sparse_column_with_keys(column_name="mystery",
                                                        keys=['Y', 'N'])
    romance = tf.contrib.layers.sparse_column_with_keys(column_name="romance",
                                                        keys=['Y', 'N'])
    scifi = tf.contrib.layers.sparse_column_with_keys(column_name="scifi",
                                                      keys=['Y', 'N'])
    thriller = tf.contrib.layers.sparse_column_with_keys(
        column_name="thriller", keys=['Y', 'N'])
    war = tf.contrib.layers.sparse_column_with_keys(column_name="war",
                                                    keys=['Y', 'N'])
    western = tf.contrib.layers.sparse_column_with_keys(column_name="western",
                                                        keys=['Y', 'N'])
    occupation = tf.contrib.layers.sparse_column_with_hash_bucket(
        column_name="occupation", hash_bucket_size=1000)

    # Continuous Columns
    age = tf.contrib.layers.real_valued_column("age")
    time_diff = tf.contrib.layers.real_valued_column("time_diff")

    # Transformations
    age_buckets = tf.contrib.layers.bucketized_column(
        age, boundaries=[0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9])

    # Wide Columns and Deep Columns
    wide_columns = [
        gender, occupation, age_buckets, unknown, action, adventure, animation,
        children, comedy, crime, documentary, drama, fantasy, filmnoir, horror,
        musical, mystery, romance, scifi, thriller, war, western,
        tf.contrib.layers.crossed_column([gender, occupation],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([gender, action],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([gender, adventure],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([gender, animation],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([gender, children],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([gender, comedy],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([gender, crime],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([gender, drama],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([gender, fantasy],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([gender, horror],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([gender, musical],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([gender, mystery],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([gender, romance],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([gender, scifi],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([gender, thriller],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, gender],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, action],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, adventure],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, animation],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, children],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, comedy],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, crime],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, drama],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, fantasy],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, horror],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, musical],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, mystery],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, romance],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, scifi],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, thriller],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, gender, romance],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, action, adventure],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, animation, children],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, animation, comedy],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, children, comedy],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column(
            [age_buckets, animation, children, comedy],
            hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column(
            [age_buckets, crime, action, adventure],
            hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, drama, mystery],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([age_buckets, scifi, thriller],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([action, adventure],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([animation, children],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([animation, comedy],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([children, comedy],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([animation, children, comedy],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([crime, action, adventure],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([drama, mystery],
                                         hash_bucket_size=int(1e4)),
        tf.contrib.layers.crossed_column([scifi, thriller],
                                         hash_bucket_size=int(1e4))
    ]

    deep_columns = [
        tf.contrib.layers.embedding_column(gender, dimension=8),
        tf.contrib.layers.embedding_column(occupation, dimension=8),
        tf.contrib.layers.embedding_column(unknown, dimension=8),
        tf.contrib.layers.embedding_column(action, dimension=8),
        tf.contrib.layers.embedding_column(adventure, dimension=8),
        tf.contrib.layers.embedding_column(animation, dimension=8),
        tf.contrib.layers.embedding_column(children, dimension=8),
        tf.contrib.layers.embedding_column(comedy, dimension=8),
        tf.contrib.layers.embedding_column(crime, dimension=8),
        tf.contrib.layers.embedding_column(documentary, dimension=8),
        tf.contrib.layers.embedding_column(drama, dimension=8),
        tf.contrib.layers.embedding_column(fantasy, dimension=8),
        tf.contrib.layers.embedding_column(filmnoir, dimension=8),
        tf.contrib.layers.embedding_column(horror, dimension=8),
        tf.contrib.layers.embedding_column(musical, dimension=8),
        tf.contrib.layers.embedding_column(mystery, dimension=8),
        tf.contrib.layers.embedding_column(romance, dimension=8),
        tf.contrib.layers.embedding_column(scifi, dimension=8),
        tf.contrib.layers.embedding_column(thriller, dimension=8),
        tf.contrib.layers.embedding_column(war, dimension=8),
        tf.contrib.layers.embedding_column(western, dimension=8), time_diff,
        age
    ]

    # # Optimizers
    # linear_optimizer = tf.train.FtrlOptimizer(learning_rate=0.1,
    # 	l1_regularization_strength=0.01, l2_regularization_strength=0.01)

    # dnn_optimizer = tf.train.ProximalAdagradOptimizer(learning_rate=0.1,
    # 	l1_regularization_strength=0.001, l2_regularization_strength=0.001)

    if FLAGS.model_type == "wide":
        m = tflearn.LinearRegressor(model_dir=model_dir,
                                    feature_columns=wide_columns)
        # m = tflearn.LinearClassifier(model_dir=model_dir, feature_columns=wide_columns)
    elif FLAGS.model_type == "deep":
        m = tflearn.DNNRegressor(model_dir=model_dir,
                                 feature_columns=deep_columns,
                                 hidden_units=[64, 32, 16])
        # m = tflearn.DNNClassifier(model_dir=model_dir, feature_columns=deep_columns, hidden_units=[100, 50])
    elif FLAGS.model_type == "logistic":
        m = tflearn.LogisticRegressor()
    else:
        m = tflearn.DNNLinearCombinedRegressor(
            model_dir=model_dir,
            linear_feature_columns=wide_columns,
            dnn_feature_columns=deep_columns,
            dnn_hidden_units=[64, 32, 16])
        # m = tflearn.DNNLinearCombinedClassifier(model_dir=model_dir, linear_feature_columns=wide_columns,
        # 	dnn_feature_columns=deep_columns, dnn_hidden_units=[100, 50])

    return m
예제 #19
0
randomized_data = data.sample(frac=1)
training_examples = randomized_data.head(training_size)[FEATURES]
training_targets = randomized_data.head(training_size)[[TARGET]]
validation_examples = randomized_data.tail(verification_size)[FEATURES]
validation_targets = randomized_data.tail(verification_size)[[TARGET]]

STEPS = 5000
BATCH_SIZE = 5
periods = 1

feature_columns = [
    layers.sparse_column_with_keys(column_name="sex", keys=["M", "F", "I"])
] + ([layers.real_valued_column(name) for name in REAL_VALUED_FEATURES])

linear_regressor = learn.LinearRegressor(
    optimizer=tensorflow.train.GradientDescentOptimizer(0.05),
    feature_columns=feature_columns)


def input_fn(features, target=None):
    """Input builder function."""
    # Creates a dictionary mapping from each continuous feature column name (k) to
    # the values of that column stored in a constant Tensor.
    continuous_cols = {
        k: tensorflow.constant(features[k].values)
        for k in REAL_VALUED_FEATURES
    }
    # Creates a dictionary mapping from each categorical feature column name (k)
    # to the values of that column stored in a tf.SparseTensor.
    categorical_cols = {
        k: tensorflow.SparseTensor(indices=[[i, 0]
    return feature_cols, measured_heights


# In[ ]:


Weight = layers.real_valued_column("Weight")

Species = layers.sparse_column_with_keys(
    column_name="Species", keys=['Goblin','Human','ManBears'])


# In[ ]:


reg = learn.LinearRegressor(feature_columns=[Weight,Species])


# In[ ]:


reg.fit(input_fn=lambda:input_fn(table_species_weight_height), steps=25000)#steps=50000)


# In[ ]:


regressor_var_names = reg.get_variable_names()
#print regressor_var_names to see where the arguments for get_variable_value() come from.

w_w = reg.get_variable_value('linear/Weight/weight')
        'weekend': tf.constant(df.iloc[:, 'weekend']),
        'time_of_day': tf.constant(df.iloc[:, 'time_of_day'])
    }, tf.constant(df.iloc[:, 'avg_travel_time'])


route_quality = layers.real_valued_column('route_quality')
wind_direction = layers.real_valued_column('wind_direction')
wind_direction_range = layers.bucketized_column(
    wind_direction, boundaries=[0, 45, 90, 135, 180, 225, 270, 315, 360])
wind_speed = layers.real_valued_column('wind_speed')
temperature = layers.real_valued_column('temperature')
precipitation = layers.real_valued_column('precipitation')
weekend = layers.real_valued_column('weekend')
time_of_day = layers.real_valued_column('time_of_day')
regressor = learn.LinearRegressor(feature_columns=[
    route_quality, wind_direction_range, wind_speed, temperature,
    precipitation, weekend, time_of_day
])


def travel_input_fn_training():
    return travel_input_fn(training_travel_df)


def travel_input_fn_test():
    return travel_input_fn(test_travel_df)


for i in range(10):
    regressor.fit(input_fn=travel_input_fn, steps=10)
    eval_rst = regressor.evaluate(
        input_fn=travel_input_fn_training,
#using abstractions in tensorflow to do linear regression. tensorflow.contrib

import tensorflow as tf
from tensorflow.contrib import learn
from sklearn import datasets, metrics, preprocessing

boston = datasets.load_boston()
x_data = preprocessing.StandardScaler().fit_transform(boston.data)
y_data = boston.target

NUM_STEPS = 200
MINIBATCH_SIZE = 506
#infer_real_valued_columns_from_input takes a matrix of n samples and n features and returns
#a list of featurecolumn objects
#### This function is depreciated and will be removed in future versions, specify feature columns specifically
feature_columns = learn.infer_real_valued_columns_from_input(x_data)

reg = learn.LinearRegressor(
    feature_columns=feature_columns,
    optimizer=tf.train.GradientDescentOptimizer(learning_rate=0.1))

reg.fit(x_data, boston.target, steps=NUM_STEPS, batch_size=MINIBATCH_SIZE)

MSE = reg.evaluate(x_data, boston.target, steps=1)

print(MSE)
예제 #23
0
STEPS_PER_EPOCH = 100
EPOCHS = 500
BATCH_SIZE = 100

hidden_layers = [16, 16, 16, 16, 16]
dropout = 0.1

#logging.info('Saving to %s' % MODEL_PATH)

# Validation and Test Configuration
validation_metrics = {"MSE": tf.contrib.metrics.streaming_mean_squared_error}

# Building the Network
regressor = skflow.LinearRegressor(feature_columns=feature_columns,
				label_dimension=1,
				#hidden_units=hidden_layers,
				#dropout=dropout,
				)

# Train it
		# Fit the DNNRegressor (This is where the magic happens!!!)
regressor.fit(input_fn=training_input_fn())
		# Thats it -----------------------------
		# Start Tensorboard in Terminal:
		# 	tensorboard --logdir='./DNNRegressors/'
		# Now open Browser and visit localhost:6006\

		
		# This is just for fun and educational purpose:
		# Evaluate the DNNRegressor every 10th epoch