Example #1
0
    def test_column_order(self):
        price_a = fc.numeric_column('price_a')
        price_b = fc.numeric_column('price_b')
        wire_cast = fc.categorical_column_with_hash_bucket('wire_cast', 4)
        with ops.Graph().as_default() as g:
            features = {
                'price_a': [[1.]],
                'price_b': [[3.]],
                'wire_cast':
                sparse_tensor.SparseTensor(values=['omar'],
                                           indices=[[0, 0]],
                                           dense_shape=[1, 1])
            }
            fc.make_linear_model(features, [price_a, wire_cast, price_b],
                                 weight_collections=['my-vars'])
            my_vars = g.get_collection('my-vars')
            self.assertIn('price_a', my_vars[0].name)
            self.assertIn('price_b', my_vars[1].name)
            self.assertIn('wire_cast', my_vars[2].name)

        with ops.Graph().as_default() as g:
            features = {
                'price_a': [[1.]],
                'price_b': [[3.]],
                'wire_cast':
                sparse_tensor.SparseTensor(values=['omar'],
                                           indices=[[0, 0]],
                                           dense_shape=[1, 1])
            }
            fc.make_linear_model(features, [wire_cast, price_b, price_a],
                                 weight_collections=['my-vars'])
            my_vars = g.get_collection('my-vars')
            self.assertIn('price_a', my_vars[0].name)
            self.assertIn('price_b', my_vars[1].name)
            self.assertIn('wire_cast', my_vars[2].name)
Example #2
0
 def test_dense_trainable_false(self):
     price = fc.numeric_column('price')
     with ops.Graph().as_default() as g:
         features = {'price': constant_op.constant([[1.], [5.]])}
         fc.make_linear_model(features, [price], trainable=False)
         trainable_vars = g.get_collection(
             ops.GraphKeys.TRAINABLE_VARIABLES)
         self.assertEqual([], trainable_vars)
Example #3
0
 def test_raises_if_duplicate_name(self):
     with self.assertRaisesRegexp(
             ValueError, 'Duplicate feature column name found for columns'):
         fc.make_linear_model(features={'a': [[0]]},
                              feature_columns=[
                                  fc.numeric_column('a'),
                                  fc.numeric_column('a')
                              ])
Example #4
0
 def test_dense_trainable_default(self):
     price = fc.numeric_column('price')
     with ops.Graph().as_default() as g:
         features = {'price': constant_op.constant([[1.], [5.]])}
         fc.make_linear_model(features, [price])
         bias = get_linear_model_bias()
         price_var = get_linear_model_column_var(price)
         trainable_vars = g.get_collection(
             ops.GraphKeys.TRAINABLE_VARIABLES)
         self.assertIn(bias, trainable_vars)
         self.assertIn(price_var, trainable_vars)
Example #5
0
 def test_sparse_trainable_false(self):
     wire_cast = fc.categorical_column_with_hash_bucket('wire_cast', 4)
     with ops.Graph().as_default() as g:
         wire_tensor = sparse_tensor.SparseTensor(values=['omar'],
                                                  indices=[[0, 0]],
                                                  dense_shape=[1, 1])
         features = {'wire_cast': wire_tensor}
         fc.make_linear_model(features, [wire_cast], trainable=False)
         trainable_vars = g.get_collection(
             ops.GraphKeys.TRAINABLE_VARIABLES)
         self.assertEqual([], trainable_vars)
Example #6
0
 def test_dense_collection(self):
     price = fc.numeric_column('price')
     with ops.Graph().as_default() as g:
         features = {'price': constant_op.constant([[1.], [5.]])}
         fc.make_linear_model(features, [price],
                              weight_collections=['my-vars'])
         my_vars = g.get_collection('my-vars')
         bias = get_linear_model_bias()
         price_var = get_linear_model_column_var(price)
         self.assertIn(bias, my_vars)
         self.assertIn(price_var, my_vars)
Example #7
0
 def test_sparse_collection(self):
     wire_cast = fc.categorical_column_with_hash_bucket('wire_cast', 4)
     with ops.Graph().as_default() as g:
         wire_tensor = sparse_tensor.SparseTensor(values=['omar'],
                                                  indices=[[0, 0]],
                                                  dense_shape=[1, 1])
         features = {'wire_cast': wire_tensor}
         fc.make_linear_model(features, [wire_cast],
                              weight_collections=['my-vars'])
         my_vars = g.get_collection('my-vars')
         bias = get_linear_model_bias()
         wire_cast_var = get_linear_model_column_var(wire_cast)
         self.assertIn(bias, my_vars)
         self.assertIn(wire_cast_var, my_vars)
Example #8
0
 def test_raises_if_shape_mismatch(self):
     price = fc.numeric_column('price', shape=2)
     with ops.Graph().as_default():
         features = {'price': constant_op.constant([[1.], [5.]])}
         predictions = fc.make_linear_model(features, [price])
         with _initialized_session():
             with self.assertRaisesRegexp(Exception,
                                          'requested shape has 4'):
                 predictions.eval()
Example #9
0
    def test_should_be_dense_or_categorical_column(self):
        class NotSupportedColumn(fc._FeatureColumn):
            @property
            def name(self):
                return 'NotSupportedColumn'

            def _transform_feature(self, cache):
                pass

            @property
            def _parse_example_config(self):
                pass

        with self.assertRaisesRegexp(
                ValueError,
                'must be either a _DenseColumn or _CategoricalColumn'):
            fc.make_linear_model(features={'a': [[0]]},
                                 feature_columns=[NotSupportedColumn()])
Example #10
0
 def test_dense_multi_dimension(self):
     price = fc.numeric_column('price', shape=2)
     with ops.Graph().as_default():
         features = {'price': constant_op.constant([[1., 2.], [5., 6.]])}
         predictions = fc.make_linear_model(features, [price])
         price_var = get_linear_model_column_var(price)
         with _initialized_session() as sess:
             self.assertAllClose([[0.], [0.]], price_var.eval())
             sess.run(price_var.assign([[10.], [100.]]))
             self.assertAllClose([[210.], [650.]], predictions.eval())
Example #11
0
 def test_dense_bias(self):
     price = fc.numeric_column('price')
     with ops.Graph().as_default():
         features = {'price': constant_op.constant([[1.], [5.]])}
         predictions = fc.make_linear_model(features, [price])
         bias = get_linear_model_bias()
         price_var = get_linear_model_column_var(price)
         with _initialized_session() as sess:
             self.assertAllClose([0.], bias.eval())
             sess.run(price_var.assign([[10.]]))
             sess.run(bias.assign([5.]))
             self.assertAllClose([[15.], [55.]], predictions.eval())
Example #12
0
 def test_dense_multi_dimension_multi_output(self):
     price = fc.numeric_column('price', shape=2)
     with ops.Graph().as_default():
         features = {'price': constant_op.constant([[1., 2.], [5., 6.]])}
         predictions = fc.make_linear_model(features, [price], units=3)
         bias = get_linear_model_bias()
         price_var = get_linear_model_column_var(price)
         with _initialized_session() as sess:
             self.assertAllClose([0., 0., 0.], bias.eval())
             self.assertAllClose([[0., 0., 0.], [0., 0., 0.]],
                                 price_var.eval())
             sess.run(price_var.assign([[1., 2., 3.], [10., 100., 1000.]]))
             sess.run(bias.assign([2., 3., 4.]))
             self.assertAllClose([[23., 205., 2007.], [67., 613., 6019.]],
                                 predictions.eval())
Example #13
0
 def test_sparse_combiner(self):
     wire_cast = fc.categorical_column_with_hash_bucket('wire_cast', 4)
     with ops.Graph().as_default():
         wire_tensor = sparse_tensor.SparseTensor(
             values=['omar', 'stringer', 'marlo'],  # hashed to = [2, 0, 3]
             indices=[[0, 0], [1, 0], [1, 1]],
             dense_shape=[2, 2])
         features = {'wire_cast': wire_tensor}
         predictions = fc.make_linear_model(features, [wire_cast],
                                            sparse_combiner='mean')
         bias = get_linear_model_bias()
         wire_cast_var = get_linear_model_column_var(wire_cast)
         with _initialized_session() as sess:
             sess.run(
                 wire_cast_var.assign([[10.], [100.], [1000.], [10000.]]))
             sess.run(bias.assign([5.]))
             self.assertAllClose([[1005.], [5010.]], predictions.eval())
Example #14
0
 def test_dense_multi_column(self):
     price1 = fc.numeric_column('price1', shape=2)
     price2 = fc.numeric_column('price2')
     with ops.Graph().as_default():
         features = {
             'price1': constant_op.constant([[1., 2.], [5., 6.]]),
             'price2': constant_op.constant([[3.], [4.]])
         }
         predictions = fc.make_linear_model(features, [price1, price2])
         bias = get_linear_model_bias()
         price1_var = get_linear_model_column_var(price1)
         price2_var = get_linear_model_column_var(price2)
         with _initialized_session() as sess:
             self.assertAllClose([0.], bias.eval())
             self.assertAllClose([[0.], [0.]], price1_var.eval())
             self.assertAllClose([[0.]], price2_var.eval())
             self.assertAllClose([[0.], [0.]], predictions.eval())
             sess.run(price1_var.assign([[10.], [100.]]))
             sess.run(price2_var.assign([[1000.]]))
             sess.run(bias.assign([7.]))
             self.assertAllClose([[3217.], [4657.]], predictions.eval())
Example #15
0
 def test_sparse_multi_output(self):
     wire_cast = fc.categorical_column_with_hash_bucket('wire_cast', 4)
     with ops.Graph().as_default():
         wire_tensor = sparse_tensor.SparseTensor(
             values=['omar', 'stringer', 'marlo'],  # hashed to = [2, 0, 3]
             indices=[[0, 0], [1, 0], [1, 1]],
             dense_shape=[2, 2])
         features = {'wire_cast': wire_tensor}
         predictions = fc.make_linear_model(features, [wire_cast], units=3)
         bias = get_linear_model_bias()
         wire_cast_var = get_linear_model_column_var(wire_cast)
         with _initialized_session() as sess:
             self.assertAllClose([0., 0., 0.], bias.eval())
             self.assertAllClose([[0.] * 3] * 4, wire_cast_var.eval())
             sess.run(
                 wire_cast_var.assign([[10., 11., 12.], [100., 110., 120.],
                                       [1000., 1100., 1200.],
                                       [10000., 11000., 12000.]]))
             sess.run(bias.assign([5., 6., 7.]))
             self.assertAllClose(
                 [[1005., 1106., 1207.], [10015., 11017., 12019.]],
                 predictions.eval())
Example #16
0
 def test_should_be_feature_column(self):
     with self.assertRaisesRegexp(ValueError, 'must be a _FeatureColumn'):
         fc.make_linear_model(features={'a': [[0]]},
                              feature_columns='NotSupported')
Example #17
0
 def test_does_not_support_dict_columns(self):
     with self.assertRaisesRegexp(
             ValueError,
             'Expected feature_columns to be iterable, found dict.'):
         fc.make_linear_model(features={'a': [[0]]},
                              feature_columns={'a': fc.numeric_column('a')})