Esempio n. 1
0
    def testWithFeatureColumns(self):
        head_fn = head_lib._multi_class_head_with_softmax_cross_entropy_loss(
            n_classes=3,
            loss_reduction=losses.Reduction.SUM_OVER_NONZERO_WEIGHTS)

        hparams = tensor_forest.ForestHParams(num_trees=3,
                                              max_nodes=1000,
                                              num_classes=3,
                                              num_features=4,
                                              split_after_samples=20,
                                              inference_tree_paths=True)

        est = random_forest.CoreTensorForestEstimator(
            hparams.fill(),
            head=head_fn,
            feature_columns=[core_feature_column.numeric_column('x')])

        iris = base.load_iris()
        data = {'x': iris.data.astype(np.float32)}
        labels = iris.target.astype(np.int32)

        input_fn = numpy_io.numpy_input_fn(x=data,
                                           y=labels,
                                           batch_size=150,
                                           num_epochs=None,
                                           shuffle=False)

        est.train(input_fn=input_fn, steps=100)
        res = est.evaluate(input_fn=input_fn, steps=1)

        self.assertEqual(1.0, res['accuracy'])
        self.assertAllClose(0.55144483, res['loss'])
Esempio n. 2
0
 def testIrisDNN(self):
   iris = base.load_iris()
   feature_columns = [feature_column.real_valued_column("", dimension=4)]
   classifier = dnn.DNNClassifier(
       feature_columns=feature_columns,
       hidden_units=[10, 20, 10],
       n_classes=3,
       config=run_config.RunConfig(tf_random_seed=1))
   classifier.fit(iris.data, iris.target, max_steps=200)
   variable_names = classifier.get_variable_names()
   self.assertEqual(
       classifier.get_variable_value("dnn/hiddenlayer_0/weights").shape,
       (4, 10))
   self.assertEqual(
       classifier.get_variable_value("dnn/hiddenlayer_1/weights").shape,
       (10, 20))
   self.assertEqual(
       classifier.get_variable_value("dnn/hiddenlayer_2/weights").shape,
       (20, 10))
   self.assertEqual(
       classifier.get_variable_value("dnn/logits/weights").shape, (10, 3))
   self.assertIn("dnn/hiddenlayer_0/biases", variable_names)
   self.assertIn("dnn/hiddenlayer_1/biases", variable_names)
   self.assertIn("dnn/hiddenlayer_2/biases", variable_names)
   self.assertIn("dnn/logits/biases", variable_names)
Esempio n. 3
0
    def testAdditionalOutputs(self):
        """Tests multi-class classification using matrix data as input."""
        hparams = tensor_forest.ForestHParams(num_trees=1,
                                              max_nodes=100,
                                              num_classes=3,
                                              num_features=4,
                                              split_after_samples=20,
                                              inference_tree_paths=True)
        classifier = random_forest.CoreTensorForestEstimator(
            hparams.fill(), keys_column='keys', include_all_in_serving=True)

        iris = base.load_iris()
        data = iris.data.astype(np.float32)
        labels = iris.target.astype(np.int32)

        input_fn = numpy_io.numpy_input_fn(x={
            'x':
            data,
            'keys':
            np.arange(len(iris.data)).reshape(150, 1)
        },
                                           y=labels,
                                           batch_size=10,
                                           num_epochs=1,
                                           shuffle=False)

        classifier.train(input_fn=input_fn, steps=100)
        predictions = list(classifier.predict(input_fn=input_fn))
        # Check that there is a key column, tree paths and var.
        for pred in predictions:
            self.assertTrue('keys' in pred)
            self.assertTrue('tree_paths' in pred)
            self.assertTrue('prediction_variance' in pred)
Esempio n. 4
0
def _iris_data_input_fn():
  # Converts iris data to a logistic regression problem.
  iris = base.load_iris()
  ids = np.where((iris.target == 0) | (iris.target == 1))
  features = constant_op.constant(iris.data[ids], dtype=dtypes.float32)
  labels = constant_op.constant(iris.target[ids], dtype=dtypes.float32)
  labels = array_ops.reshape(labels, labels.get_shape().concatenate(1))
  return features, labels
Esempio n. 5
0
 def _input_fn():
     iris = base.load_iris()
     return {
         'feature': constant_op.constant(iris.data,
                                         dtype=dtypes.float32)
     }, constant_op.constant(iris.target,
                             shape=[150],
                             dtype=dtypes.int32)
Esempio n. 6
0
 def testMultiClass_NpMatrixData(self):
     """Tests multi-class classification using numpy matrix data as input."""
     iris = base.load_iris()
     train_x = iris.data
     train_y = iris.target
     classifier = debug.DebugClassifier(n_classes=3)
     classifier.fit(x=train_x, y=train_y, steps=200)
     scores = classifier.evaluate(x=train_x, y=train_y, steps=1)
     self._assertInRange(0.0, 1.0, scores['accuracy'])
Esempio n. 7
0
 def testDNNDropout0(self):
   # Dropout prob == 0.
   iris = base.load_iris()
   feature_columns = [feature_column.real_valued_column("", dimension=4)]
   classifier = dnn.DNNClassifier(
       feature_columns=feature_columns,
       hidden_units=[10, 20, 10],
       n_classes=3,
       dropout=0.0,
       config=run_config.RunConfig(tf_random_seed=1))
   classifier.fit(iris.data, iris.target, max_steps=200)
Esempio n. 8
0
def _get_classification_input_fns():
    iris = base.load_iris()
    data = iris.data.astype(np.float32)
    labels = iris.target.astype(np.int32)

    train_input_fn = numpy_io.numpy_input_fn(x=data,
                                             y=labels,
                                             batch_size=150,
                                             num_epochs=None,
                                             shuffle=False)

    predict_input_fn = numpy_io.numpy_input_fn(x=data[:1, ],
                                               y=None,
                                               batch_size=1,
                                               num_epochs=1,
                                               shuffle=False)
    return train_input_fn, predict_input_fn
Esempio n. 9
0
def iris_input_multiclass_fn():
    iris = base.load_iris()
    return {
        'feature': constant_op.constant(iris.data, dtype=dtypes.float32)
    }, constant_op.constant(iris.target, shape=(150, 1), dtype=dtypes.int32)
Esempio n. 10
0
def prepare_iris_data_for_logistic_regression():
    # Converts iris data to a logistic regression problem.
    iris = base.load_iris()
    ids = np.where((iris.target == 0) | (iris.target == 1))
    return base.Dataset(data=iris.data[ids], target=iris.target[ids])