def testLinearRegression(self):
    my_seed = 42
    config = run_config.RunConfig(tf_random_seed=my_seed)
    boston = base.load_boston()
    columns = [feature_column.real_valued_column('', dimension=13)]

    # We train with

    with ops.Graph().as_default() as g1:
      random.seed(my_seed)
      g1.seed = my_seed
      variables.create_global_step()
      regressor1 = linear.LinearRegressor(
          optimizer=_NULL_OPTIMIZER, feature_columns=columns, config=config)
      regressor1.fit(x=boston.data, y=boston.target, steps=1)

    with ops.Graph().as_default() as g2:
      random.seed(my_seed)
      g2.seed = my_seed
      variables.create_global_step()
      regressor2 = linear.LinearRegressor(
          optimizer=_NULL_OPTIMIZER, feature_columns=columns, config=config)
      regressor2.fit(x=boston.data, y=boston.target, steps=1)

    self.assertAllClose(regressor1.weights_, regressor2.weights_)
    self.assertAllClose(regressor1.bias_, regressor2.bias_)
    self.assertAllClose(
        list(regressor1.predict(
            boston.data, as_iterable=True)),
        list(regressor2.predict(
            boston.data, as_iterable=True)),
        atol=1e-05)
Beispiel #2
0
 def testUntrained(self):
   boston = base.load_boston()
   est = estimator.SKCompat(estimator.Estimator(model_fn=linear_model_fn))
   with self.assertRaises(learn.NotFittedError):
     _ = est.score(x=boston.data, y=boston.target.astype(np.float64))
   with self.assertRaises(learn.NotFittedError):
     est.predict(x=boston.data)
 def testPredictInputFnWithQueue(self):
   est = estimator.Estimator(model_fn=linear_model_fn)
   boston = base.load_boston()
   est.fit(input_fn=boston_input_fn, steps=1)
   input_fn = functools.partial(boston_input_fn_with_queue, num_epochs=2)
   output = list(est.predict(input_fn=input_fn))
   self.assertEqual(len(output), boston.target.shape[0] * 2)
  def testContinueTrainingDictionaryInput(self):
    boston = base.load_boston()
    output_dir = tempfile.mkdtemp()
    est = estimator.Estimator(model_fn=linear_model_fn, model_dir=output_dir)
    boston_input = {'input': boston.data}
    float64_target = {'labels': boston.target.astype(np.float64)}
    est.fit(x=boston_input, y=float64_target, steps=50)
    scores = est.evaluate(
        x=boston_input,
        y=float64_target,
        metrics={'MSE': metric_ops.streaming_mean_squared_error})
    del est
    # Create another estimator object with the same output dir.
    est2 = estimator.Estimator(model_fn=linear_model_fn, model_dir=output_dir)

    # Check we can evaluate and predict.
    scores2 = est2.evaluate(
        x=boston_input,
        y=float64_target,
        metrics={'MSE': metric_ops.streaming_mean_squared_error})
    self.assertAllClose(scores2['MSE'], scores['MSE'])
    predictions = np.array(list(est2.predict(x=boston_input)))
    other_score = _sklearn.mean_squared_error(predictions,
                                              float64_target['labels'])
    self.assertAllClose(other_score, scores['MSE'])
Beispiel #5
0
  def testContinueTraining(self):
    boston = base.load_boston()
    output_dir = tempfile.mkdtemp()
    est = estimator.SKCompat(
        estimator.Estimator(
            model_fn=linear_model_fn, model_dir=output_dir))
    float64_labels = boston.target.astype(np.float64)
    est.fit(x=boston.data, y=float64_labels, steps=50)
    scores = est.score(
        x=boston.data,
        y=float64_labels,
        metrics={'MSE': metric_ops.streaming_mean_squared_error})
    del est
    # Create another estimator object with the same output dir.
    est2 = estimator.SKCompat(
        estimator.Estimator(
            model_fn=linear_model_fn, model_dir=output_dir))

    # Check we can evaluate and predict.
    scores2 = est2.score(
        x=boston.data,
        y=float64_labels,
        metrics={'MSE': metric_ops.streaming_mean_squared_error})
    self.assertAllClose(scores['MSE'], scores2['MSE'])
    predictions = np.array(list(est2.predict(x=boston.data)))
    other_score = _sklearn.mean_squared_error(predictions, float64_labels)
    self.assertAllClose(scores['MSE'], other_score)

    # Check we can keep training.
    est2.fit(x=boston.data, y=float64_labels, steps=100)
    scores3 = est2.score(
        x=boston.data,
        y=float64_labels,
        metrics={'MSE': metric_ops.streaming_mean_squared_error})
    self.assertLess(scores3['MSE'], scores['MSE'])
 def testPredictInputFnWithQueue(self):
   est = estimator.Estimator(model_fn=linear_model_fn)
   boston = base.load_boston()
   est.fit(input_fn=boston_input_fn, steps=1)
   input_fn = functools.partial(boston_input_fn_with_queue, num_epochs=2)
   output = list(est.predict(input_fn=input_fn))
   self.assertEqual(len(output), boston.target.shape[0] * 2)
    def testContinueTraining(self):
        boston = base.load_boston()
        output_dir = tempfile.mkdtemp()
        est = estimator.SKCompat(
            estimator.Estimator(model_fn=linear_model_fn,
                                model_dir=output_dir))
        float64_labels = boston.target.astype(np.float64)
        est.fit(x=boston.data, y=float64_labels, steps=50)
        scores = est.score(
            x=boston.data,
            y=float64_labels,
            metrics={'MSE': metric_ops.streaming_mean_squared_error})
        del est
        # Create another estimator object with the same output dir.
        est2 = estimator.SKCompat(
            estimator.Estimator(model_fn=linear_model_fn,
                                model_dir=output_dir))

        # Check we can evaluate and predict.
        scores2 = est2.score(
            x=boston.data,
            y=float64_labels,
            metrics={'MSE': metric_ops.streaming_mean_squared_error})
        self.assertAllClose(scores['MSE'], scores2['MSE'])
        predictions = np.array(list(est2.predict(x=boston.data)))
        other_score = _sklearn.mean_squared_error(predictions, float64_labels)
        self.assertAllClose(scores['MSE'], other_score)

        # Check we can keep training.
        est2.fit(x=boston.data, y=float64_labels, steps=100)
        scores3 = est2.score(
            x=boston.data,
            y=float64_labels,
            metrics={'MSE': metric_ops.streaming_mean_squared_error})
        self.assertLess(scores3['MSE'], scores['MSE'])
 def testUntrained(self):
     boston = base.load_boston()
     est = estimator.SKCompat(estimator.Estimator(model_fn=linear_model_fn))
     with self.assertRaises(learn.NotFittedError):
         _ = est.score(x=boston.data, y=boston.target.astype(np.float64))
     with self.assertRaises(learn.NotFittedError):
         est.predict(x=boston.data)
  def testContinueTrainingDictionaryInput(self):
    boston = base.load_boston()
    output_dir = tempfile.mkdtemp()
    est = estimator.Estimator(model_fn=linear_model_fn, model_dir=output_dir)
    boston_input = {'input': boston.data}
    float64_target = {'labels': boston.target.astype(np.float64)}
    est.fit(x=boston_input, y=float64_target, steps=50)
    scores = est.evaluate(
        x=boston_input,
        y=float64_target,
        metrics={'MSE': metric_ops.streaming_mean_squared_error})
    del est
    # Create another estimator object with the same output dir.
    est2 = estimator.Estimator(model_fn=linear_model_fn, model_dir=output_dir)

    # Check we can evaluate and predict.
    scores2 = est2.evaluate(
        x=boston_input,
        y=float64_target,
        metrics={'MSE': metric_ops.streaming_mean_squared_error})
    self.assertAllClose(scores2['MSE'], scores['MSE'])
    predictions = np.array(list(est2.predict(x=boston_input)))
    other_score = _sklearn.mean_squared_error(predictions,
                                              float64_target['labels'])
    self.assertAllClose(other_score, scores['MSE'])
def boston_input_fn(num_epochs=None):
    boston = base.load_boston()
    features = input_lib.limit_epochs(array_ops.reshape(
        constant_op.constant(boston.data), [-1, _BOSTON_INPUT_DIM]),
                                      num_epochs=num_epochs)
    labels = array_ops.reshape(constant_op.constant(boston.target), [-1, 1])
    return features, labels
  def testLinearRegression(self):
    my_seed = 42
    config = run_config.RunConfig(tf_random_seed=my_seed)
    boston = base.load_boston()
    columns = [feature_column.real_valued_column('', dimension=13)]

    # We train with

    with ops.Graph().as_default() as g1:
      random.seed(my_seed)
      g1.seed = my_seed
      variables.create_global_step()
      regressor1 = linear.LinearRegressor(
          optimizer=_NULL_OPTIMIZER, feature_columns=columns, config=config)
      regressor1.fit(x=boston.data, y=boston.target, steps=1)

    with ops.Graph().as_default() as g2:
      random.seed(my_seed)
      g2.seed = my_seed
      variables.create_global_step()
      regressor2 = linear.LinearRegressor(
          optimizer=_NULL_OPTIMIZER, feature_columns=columns, config=config)
      regressor2.fit(x=boston.data, y=boston.target, steps=1)

    self.assertAllClose(regressor1.weights_, regressor2.weights_)
    self.assertAllClose(regressor1.bias_, regressor2.bias_)
    self.assertAllClose(
        list(regressor1.predict_scores(
            boston.data, as_iterable=True)),
        list(regressor2.predict_scores(
            boston.data, as_iterable=True)),
        atol=1e-05)
  def testBostonDNN(self):
    boston = base.load_boston()
    feature_columns = [feature_column.real_valued_column("", dimension=13)]
    regressor = dnn.DNNRegressor(
        feature_columns=feature_columns,
        hidden_units=[10, 20, 10],
        config=run_config.RunConfig(tf_random_seed=1))
    regressor.fit(boston.data,
                  boston.target,
                  steps=300,
                  batch_size=boston.data.shape[0])
    weights = ([regressor.get_variable_value("dnn/hiddenlayer_0/weights")] +
               [regressor.get_variable_value("dnn/hiddenlayer_1/weights")] +
               [regressor.get_variable_value("dnn/hiddenlayer_2/weights")] +
               [regressor.get_variable_value("dnn/logits/weights")])
    self.assertEqual(weights[0].shape, (13, 10))
    self.assertEqual(weights[1].shape, (10, 20))
    self.assertEqual(weights[2].shape, (20, 10))
    self.assertEqual(weights[3].shape, (10, 1))

    biases = ([regressor.get_variable_value("dnn/hiddenlayer_0/biases")] +
              [regressor.get_variable_value("dnn/hiddenlayer_1/biases")] +
              [regressor.get_variable_value("dnn/hiddenlayer_2/biases")] +
              [regressor.get_variable_value("dnn/logits/biases")])
    self.assertEqual(biases[0].shape, (10,))
    self.assertEqual(biases[1].shape, (20,))
    self.assertEqual(biases[2].shape, (10,))
    self.assertEqual(biases[3].shape, (1,))
    def testBostonDNN(self):
        boston = base.load_boston()
        feature_columns = [feature_column.real_valued_column("", dimension=13)]
        regressor = dnn.DNNRegressor(
            feature_columns=feature_columns,
            hidden_units=[10, 20, 10],
            config=run_config.RunConfig(tf_random_seed=1))
        regressor.fit(boston.data,
                      boston.target,
                      steps=300,
                      batch_size=boston.data.shape[0])
        weights = (
            [regressor.get_variable_value("dnn/hiddenlayer_0/weights")] +
            [regressor.get_variable_value("dnn/hiddenlayer_1/weights")] +
            [regressor.get_variable_value("dnn/hiddenlayer_2/weights")] +
            [regressor.get_variable_value("dnn/logits/weights")])
        self.assertEqual(weights[0].shape, (13, 10))
        self.assertEqual(weights[1].shape, (10, 20))
        self.assertEqual(weights[2].shape, (20, 10))
        self.assertEqual(weights[3].shape, (10, 1))

        biases = ([regressor.get_variable_value("dnn/hiddenlayer_0/biases")] +
                  [regressor.get_variable_value("dnn/hiddenlayer_1/biases")] +
                  [regressor.get_variable_value("dnn/hiddenlayer_2/biases")] +
                  [regressor.get_variable_value("dnn/logits/biases")])
        self.assertEqual(biases[0].shape, (10, ))
        self.assertEqual(biases[1].shape, (20, ))
        self.assertEqual(biases[2].shape, (10, ))
        self.assertEqual(biases[3].shape, (1, ))
Beispiel #14
0
def boston_input_fn(num_epochs=None):
  boston = base.load_boston()
  features = input_lib.limit_epochs(
      array_ops.reshape(
          constant_op.constant(boston.data), [-1, _BOSTON_INPUT_DIM]),
      num_epochs=num_epochs)
  labels = array_ops.reshape(constant_op.constant(boston.target), [-1, 1])
  return features, labels
Beispiel #15
0
def boston_eval_fn():
  boston = base.load_boston()
  n_examples = len(boston.target)
  features = array_ops.reshape(
      constant_op.constant(boston.data), [n_examples, _BOSTON_INPUT_DIM])
  labels = array_ops.reshape(
      constant_op.constant(boston.target), [n_examples, 1])
  return array_ops.concat([features, features], 0), array_ops.concat(
      [labels, labels], 0)
def boston_eval_fn():
    boston = base.load_boston()
    n_examples = len(boston.target)
    features = array_ops.reshape(constant_op.constant(boston.data),
                                 [n_examples, _BOSTON_INPUT_DIM])
    labels = array_ops.reshape(constant_op.constant(boston.target),
                               [n_examples, 1])
    return array_ops.concat([features, features],
                            0), array_ops.concat([labels, labels], 0)
Beispiel #17
0
def boston_input_fn():
  boston = base.load_boston()
  features = math_ops.cast(
      array_ops.reshape(constant_op.constant(boston.data), [-1, 13]),
      dtypes.float32)
  labels = math_ops.cast(
      array_ops.reshape(constant_op.constant(boston.target), [-1, 1]),
      dtypes.float32)
  return features, labels
 def testWithModelFnOps(self):
     """Test for model_fn that returns `ModelFnOps`."""
     est = estimator.Estimator(model_fn=linear_model_fn_with_model_fn_ops)
     boston = base.load_boston()
     est.fit(input_fn=boston_input_fn, steps=1)
     input_fn = functools.partial(boston_input_fn, num_epochs=1)
     scores = est.evaluate(input_fn=input_fn, steps=1)
     self.assertIn('loss', scores.keys())
     output = list(est.predict(input_fn=input_fn))
     self.assertEqual(len(output), boston.target.shape[0])
Beispiel #19
0
 def testWithModelFnOps(self):
   """Test for model_fn that returns `ModelFnOps`."""
   est = estimator.Estimator(model_fn=linear_model_fn_with_model_fn_ops)
   boston = base.load_boston()
   est.fit(input_fn=boston_input_fn, steps=1)
   input_fn = functools.partial(boston_input_fn, num_epochs=1)
   scores = est.evaluate(input_fn=input_fn, steps=1)
   self.assertIn('loss', scores.keys())
   output = list(est.predict(input_fn=input_fn))
   self.assertEqual(len(output), boston.target.shape[0])
def _get_regression_input_fns():
  boston = base.load_boston()
  data = boston.data.astype(np.float32)
  labels = boston.target.astype(np.int32)

  train_input_fn = numpy_io.numpy_input_fn(
      x=data, y=labels, batch_size=506, num_epochs=None, shuffle=False)

  predict_input_fn = numpy_io.numpy_input_fn(
      x=data[:1,], y=None, batch_size=1, num_epochs=1, shuffle=False)
  return train_input_fn, predict_input_fn
  def testPredictConstInputFn(self):
    est = estimator.Estimator(model_fn=linear_model_fn)
    boston = base.load_boston()
    est.fit(input_fn=boston_input_fn, steps=1)

    def input_fn():
      features = array_ops.reshape(
          constant_op.constant(boston.data), [-1, _BOSTON_INPUT_DIM])
      labels = array_ops.reshape(constant_op.constant(boston.target), [-1, 1])
      return features, labels

    output = list(est.predict(input_fn=input_fn))
    self.assertEqual(len(output), boston.target.shape[0])
  def testPredictConstInputFn(self):
    est = estimator.Estimator(model_fn=linear_model_fn)
    boston = base.load_boston()
    est.fit(input_fn=boston_input_fn, steps=1)

    def input_fn():
      features = array_ops.reshape(
          constant_op.constant(boston.data), [-1, _BOSTON_INPUT_DIM])
      labels = array_ops.reshape(constant_op.constant(boston.target), [-1, 1])
      return features, labels

    output = list(est.predict(input_fn=input_fn))
    self.assertEqual(len(output), boston.target.shape[0])
 def testBostonAll(self):
   boston = base.load_boston()
   est = estimator.SKCompat(estimator.Estimator(model_fn=linear_model_fn))
   float64_labels = boston.target.astype(np.float64)
   est.fit(x=boston.data, y=float64_labels, steps=100)
   scores = est.score(
       x=boston.data,
       y=float64_labels,
       metrics={'MSE': metric_ops.streaming_mean_squared_error})
   predictions = np.array(list(est.predict(x=boston.data)))
   other_score = _sklearn.mean_squared_error(predictions, boston.target)
   self.assertAllClose(scores['MSE'], other_score)
   self.assertTrue('global_step' in scores)
   self.assertEqual(100, scores['global_step'])
 def testBostonAll(self):
   boston = base.load_boston()
   est = estimator.SKCompat(estimator.Estimator(model_fn=linear_model_fn))
   float64_labels = boston.target.astype(np.float64)
   est.fit(x=boston.data, y=float64_labels, steps=100)
   scores = est.score(
       x=boston.data,
       y=float64_labels,
       metrics={'MSE': metric_ops.streaming_mean_squared_error})
   predictions = np.array(list(est.predict(x=boston.data)))
   other_score = _sklearn.mean_squared_error(predictions, boston.target)
   self.assertAllClose(scores['MSE'], other_score)
   self.assertTrue('global_step' in scores)
   self.assertEqual(100, scores['global_step'])
 def testBostonAllDictionaryInput(self):
   boston = base.load_boston()
   est = estimator.Estimator(model_fn=linear_model_fn)
   boston_input = {'input': boston.data}
   float64_target = {'labels': boston.target.astype(np.float64)}
   est.fit(x=boston_input, y=float64_target, steps=100)
   scores = est.evaluate(
       x=boston_input,
       y=float64_target,
       metrics={'MSE': metric_ops.streaming_mean_squared_error})
   predictions = np.array(list(est.predict(x=boston_input)))
   other_score = _sklearn.mean_squared_error(predictions, boston.target)
   self.assertAllClose(other_score, scores['MSE'])
   self.assertTrue('global_step' in scores)
   self.assertEqual(scores['global_step'], 100)
 def testBostonAllDictionaryInput(self):
   boston = base.load_boston()
   est = estimator.Estimator(model_fn=linear_model_fn)
   boston_input = {'input': boston.data}
   float64_target = {'labels': boston.target.astype(np.float64)}
   est.fit(x=boston_input, y=float64_target, steps=100)
   scores = est.evaluate(
       x=boston_input,
       y=float64_target,
       metrics={'MSE': metric_ops.streaming_mean_squared_error})
   predictions = np.array(list(est.predict(x=boston_input)))
   other_score = _sklearn.mean_squared_error(predictions, boston.target)
   self.assertAllClose(other_score, scores['MSE'])
   self.assertTrue('global_step' in scores)
   self.assertEqual(scores['global_step'], 100)
Beispiel #27
0
  def testDNNRegression(self):
    my_seed = 42
    config = run_config.RunConfig(tf_random_seed=my_seed)
    boston = base.load_boston()
    columns = [feature_column.real_valued_column('', dimension=13)]

    with ops.Graph().as_default() as g1:
      random.seed(my_seed)
      g1.seed = my_seed
      variables.create_global_step()
      regressor1 = dnn.DNNRegressor(
          hidden_units=[10],
          feature_columns=columns,
          optimizer=_NULL_OPTIMIZER,
          config=config)
      regressor1.fit(x=boston.data, y=boston.target, steps=1)

    with ops.Graph().as_default() as g2:
      random.seed(my_seed)
      g2.seed = my_seed
      variables.create_global_step()
      regressor2 = dnn.DNNRegressor(
          hidden_units=[10],
          feature_columns=columns,
          optimizer=_NULL_OPTIMIZER,
          config=config)
      regressor2.fit(x=boston.data, y=boston.target, steps=1)

    weights1 = ([regressor1.get_variable_value('dnn/hiddenlayer_0/weights')] +
                [regressor1.get_variable_value('dnn/logits/weights')])
    weights2 = ([regressor2.get_variable_value('dnn/hiddenlayer_0/weights')] +
                [regressor2.get_variable_value('dnn/logits/weights')])
    for w1, w2 in zip(weights1, weights2):
      self.assertAllClose(w1, w2)

    biases1 = ([regressor1.get_variable_value('dnn/hiddenlayer_0/biases')] +
               [regressor1.get_variable_value('dnn/logits/biases')])
    biases2 = ([regressor2.get_variable_value('dnn/hiddenlayer_0/biases')] +
               [regressor2.get_variable_value('dnn/logits/biases')])
    for b1, b2 in zip(biases1, biases2):
      self.assertAllClose(b1, b2)
    self.assertAllClose(
        list(regressor1.predict_scores(
            boston.data, as_iterable=True)),
        list(regressor2.predict_scores(
            boston.data, as_iterable=True)),
        atol=1e-05)
  def testDNNRegression(self):
    my_seed = 42
    config = run_config.RunConfig(tf_random_seed=my_seed)
    boston = base.load_boston()
    columns = [feature_column.real_valued_column('', dimension=13)]

    with ops.Graph().as_default() as g1:
      random.seed(my_seed)
      g1.seed = my_seed
      variables.create_global_step()
      regressor1 = dnn.DNNRegressor(
          hidden_units=[10],
          feature_columns=columns,
          optimizer=_NULL_OPTIMIZER,
          config=config)
      regressor1.fit(x=boston.data, y=boston.target, steps=1)

    with ops.Graph().as_default() as g2:
      random.seed(my_seed)
      g2.seed = my_seed
      variables.create_global_step()
      regressor2 = dnn.DNNRegressor(
          hidden_units=[10],
          feature_columns=columns,
          optimizer=_NULL_OPTIMIZER,
          config=config)
      regressor2.fit(x=boston.data, y=boston.target, steps=1)

    weights1 = ([regressor1.get_variable_value('dnn/hiddenlayer_0/weights')] +
                [regressor1.get_variable_value('dnn/logits/weights')])
    weights2 = ([regressor2.get_variable_value('dnn/hiddenlayer_0/weights')] +
                [regressor2.get_variable_value('dnn/logits/weights')])
    for w1, w2 in zip(weights1, weights2):
      self.assertAllClose(w1, w2)

    biases1 = ([regressor1.get_variable_value('dnn/hiddenlayer_0/biases')] +
               [regressor1.get_variable_value('dnn/logits/biases')])
    biases2 = ([regressor2.get_variable_value('dnn/hiddenlayer_0/biases')] +
               [regressor2.get_variable_value('dnn/logits/biases')])
    for b1, b2 in zip(biases1, biases2):
      self.assertAllClose(b1, b2)
    self.assertAllClose(
        list(regressor1.predict(
            boston.data, as_iterable=True)),
        list(regressor2.predict(
            boston.data, as_iterable=True)),
        atol=1e-05)
Beispiel #29
0
def _get_regression_input_fns():
    boston = base.load_boston()
    data = boston.data.astype(np.float32)
    labels = boston.target.astype(np.int32)

    train_input_fn = numpy_io.numpy_input_fn(x=data,
                                             y=labels,
                                             batch_size=506,
                                             num_epochs=None,
                                             shuffle=False)

    predict_input_fn = numpy_io.numpy_input_fn(x=data[:1, ],
                                               y=None,
                                               batch_size=1,
                                               num_epochs=1,
                                               shuffle=False)
    return train_input_fn, predict_input_fn
    def testRegression(self):
        """Tests multi-class classification using matrix data as input."""

        hparams = tensor_forest.ForestHParams(num_trees=3,
                                              max_nodes=1000,
                                              num_classes=1,
                                              num_features=13,
                                              regression=True,
                                              split_after_samples=20)

        regressor = random_forest.TensorForestEstimator(hparams.fill())

        boston = base.load_boston()
        data = boston.data.astype(np.float32)
        labels = boston.target.astype(np.int32)

        regressor.fit(x=data, y=labels, steps=100, batch_size=50)
        regressor.evaluate(x=data, y=labels, steps=10)
  def testRegression(self):
    """Tests multi-class classification using matrix data as input."""

    hparams = tensor_forest.ForestHParams(
        num_trees=3,
        max_nodes=1000,
        num_classes=1,
        num_features=13,
        regression=True,
        split_after_samples=20)

    regressor = random_forest.TensorForestEstimator(hparams.fill())

    boston = base.load_boston()
    data = boston.data.astype(np.float32)
    labels = boston.target.astype(np.int32)

    regressor.fit(x=data, y=labels, steps=100, batch_size=50)
    regressor.evaluate(x=data, y=labels, steps=10)
Beispiel #32
0
# -*- coding: utf-8 -*-

from tensorflow.contrib.learn.python.learn.datasets import base

iris_data, iris_label = base.load_iris()
house_data, house_label = base.load_boston()
Beispiel #33
0
 def testEstimatorParams(self):
   boston = base.load_boston()
   est = estimator.SKCompat(
       estimator.Estimator(
           model_fn=linear_model_params_fn, params={'learning_rate': 0.01}))
   est.fit(x=boston.data, y=boston.target, steps=100)
 def testPredict(self):
     est = estimator.Estimator(model_fn=linear_model_fn)
     boston = base.load_boston()
     est.fit(input_fn=boston_input_fn, steps=1)
     output = list(est.predict(x=boston.data, batch_size=10))
     self.assertEqual(len(output), boston.target.shape[0])
 def testEstimatorParams(self):
     boston = base.load_boston()
     est = estimator.SKCompat(
         estimator.Estimator(model_fn=linear_model_params_fn,
                             params={'learning_rate': 0.01}))
     est.fit(x=boston.data, y=boston.target, steps=100)
Beispiel #36
0
 def testPredict(self):
   est = estimator.Estimator(model_fn=linear_model_fn)
   boston = base.load_boston()
   est.fit(input_fn=boston_input_fn, steps=1)
   output = list(est.predict(x=boston.data, batch_size=10))
   self.assertEqual(len(output), boston.target.shape[0])