Ejemplo n.º 1
0
    def test_partitioned_dynamic_embedding_lookup_2D_input(
            self, sigma_dimension):
        emb_dim = 5 + sigma_dimension
        config = test_util.default_de_config(emb_dim, [1] * emb_dim)
        emb, sigma = sparse_features._partitioned_dynamic_embedding_lookup(
            [['input1', ''], ['input2', 'input3']],
            config,
            5,
            sigma_dimension,
            'feature_name1_%d' % sigma_dimension,
            service_address=self._kbs_address)

        if sigma_dimension > 0:
            self.assertEqual((2, 2, sigma_dimension), sigma.shape)
            self.assertEqual((2, 2, 5), emb.shape)
            self.assertAllClose(
                [[[1] * sigma_dimension, [0] * sigma_dimension
                  ], [[1] * sigma_dimension, [1] * sigma_dimension]],
                sigma.numpy())
            self.assertAllClose([[[1] * 5, [0] * 5], [[1] * 5, [1] * 5]],
                                emb.numpy())
        else:
            self.assertAllClose([[[1] * 5, [0] * 5], [[1] * 5, [1] * 5]],
                                emb.numpy())
            self.assertIsNone(sigma)
Ejemplo n.º 2
0
    def test_compute_sampled_logits_grad(self):
        cs_config = cs_config_builder.build_candidate_sampler_config(
            cs_config_builder.negative_sampler(unique=True,
                                               algorithm='UNIFORM'))
        de_config = test_util.default_de_config(3, cs_config=cs_config)

        # Add a few embeddings into knowledge bank.
        de_ops.dynamic_embedding_update(['key1', 'key2', 'key3'],
                                        tf.constant([[1.0, 2.0, 3.0],
                                                     [4.0, 5.0, 6.0],
                                                     [7.0, 8.0, 9.0]]),
                                        de_config,
                                        'emb',
                                        service_address=self._kbs_address)

        # A simple one layer NN model.
        # Input data: x = [[1, 2], [3, 4]].
        # Weights from input to logit output layer: W = [[1, 2, 3], [4, 5, 6]].
        # Input activation at output layer i = x*W = [[9, 12, 15], [19, 26, 33]].
        # Logits output therefore becomes E*i, where E are the embeddings of output
        #  keys, i.e., E = [[1, 2, 3], [4, 5, 6], [7, 8, 9]].
        # Then the logits output becomes [[78, 186, 294], [170, 404, 638]]
        #
        # If we define the loss to be L = tf.reduced_sum(Logits), then
        # dL/dE = sum_by_key(i) = [[28, 38, 48], [28, 38, 48], [28, 38, 48]].
        # So the expected new embeddings become
        # E - 0.1 * dL/dE = [[-1.8, -1.8, -1.8], [1.2, 1.2, 1.2], [4.2, 4.2, 4.2]].
        weights = tf.Variable([[1, 2, 3], [4, 5, 6]], dtype=tf.float32)
        inputs = tf.constant([[1.0, 2.0], [3.0, 4.0]])

        with tf.GradientTape() as tape:
            logits, _, _, _, _ = cs_ops.compute_sampled_logits(
                [['key1', ''], ['key2', 'key3']],
                tf.matmul(inputs, weights),
                3,
                de_config,
                'emb',
                service_address=self._kbs_address)
            loss = tf.reduce_sum(logits)

        # Applies the gradient descent.
        grads = tape.gradient(loss, weights)

        # The gradients updated by the knowledge bank.
        updated_embedding = de_ops.dynamic_embedding_lookup(
            ['key1', 'key2', 'key3'],
            de_config,
            'emb',
            service_address=self._kbs_address)
        self.assertAllClose(
            updated_embedding,
            [[-1.8, -1.8, -1.8], [1.2, 1.2, 1.2], [4.2, 4.2, 4.2]])

        # The gradients w.r.t. the weight W is calculated as
        # dL/dw = dL/di * di/dW = sum_by_dim(E) * x =
        # [12, 15, 18] * [[4, 4, 4], [6, 6, 6]] = [[48, 60, 72], [72, 90, 108]]
        self.assertAllClose(grads, [[48, 60, 72], [72, 90, 108]])
Ejemplo n.º 3
0
 def test_single_feature_lookup_1D(self, sigma_dimension):
     emb_dim = 5 + sigma_dimension
     config = test_util.default_de_config(emb_dim, [1] * emb_dim)
     fea_embed = sparse_features.SparseFeatureEmbedding(
         config, {'fea': (5, sigma_dimension)},
         op_name='single_feature_%d' % sigma_dimension,
         service_address=self._kbs_address)
     embed, _, _, embed_map = fea_embed.lookup(['input1', 'input2'])
     if sigma_dimension > 0:
         self.assertEqual((2, 5), embed.shape)
     else:
         self.assertAllClose([[1] * 5, [1] * 5], embed)
     self.assertEqual(['fea'], list(embed_map.keys()))
     self.assertEqual((2, 5), embed_map['fea'].shape)
     self.assertEqual(['fea'], list(fea_embed._variable_map.keys()))
Ejemplo n.º 4
0
    def test_embed_single_feature_1D_input(self, sigma_dimension):
        emb_dim = 5 + sigma_dimension
        config = test_util.default_de_config(emb_dim, [1] * emb_dim)
        emb, vc, sigma, input_embed, variables = sparse_features.embed_single_feature(
            ['input1', 'input2'],
            config,
            5,
            sigma_dimension,
            'feature_name2_%d' % sigma_dimension,
            service_address=self._kbs_address)

        if sigma_dimension > 0:
            self.assertIsNotNone(variables)
            self.assertEqual((2, 5), emb.shape)
            self.assertEqual(5, vc.shape)
            self.assertEqual((2, 1), sigma.shape)
            self.assertEqual((2, 5), input_embed.shape)
        else:
            self.assertAllClose([[1] * 5, [1] * 5], emb.numpy())
            self.assertIsNone(vc)
            self.assertIsNone(sigma)
            self.assertAllClose([[1] * 5, [1] * 5], input_embed)

        # Lookup again with given variables. Checks all values are the same.
        new_emb, new_vc, new_sigma, new_input_embed, variables = (
            sparse_features.embed_single_feature(
                ['input1', 'input2'],
                config,
                5,
                sigma_dimension,
                'feature_name2_%d' % sigma_dimension,
                variables=variables,
                service_address=self._kbs_address))
        if sigma_dimension > 0:
            self.assertIsNotNone(variables)
        self.assertAllClose(emb.numpy(), new_emb.numpy())
        if vc is not None:
            self.assertAllClose(vc.numpy(), new_vc.numpy())
        if sigma is not None:
            self.assertAllClose(sigma.numpy(), new_sigma.numpy())
        self.assertAllClose(input_embed.numpy(), new_input_embed.numpy())
Ejemplo n.º 5
0
    def test_brute_force_topk(self):
        cs_config = cs_config_builder.build_candidate_sampler_config(
            cs_config_builder.brute_force_topk_sampler('DOT_PRODUCT'))
        de_config = test_util.default_de_config(2, cs_config=cs_config)
        # Add a few embeddings into knowledge bank.
        de_ops.dynamic_embedding_update(['key1', 'key2', 'key3'],
                                        tf.constant([[2.0, 4.0], [4.0, 8.0],
                                                     [8.0, 16.0]]),
                                        de_config,
                                        'emb',
                                        service_address=self._kbs_address)

        keys, logits = cs_ops.top_k([[1.0, 2.0], [-1.0, -2.0]],
                                    3,
                                    de_config,
                                    'emb',
                                    service_address=self._kbs_address)
        self.assertAllEqual(
            keys.numpy(),
            [[b'key3', b'key2', b'key1'], [b'key1', b'key2', b'key3']])
        self.assertAllClose(logits.numpy(), [[40, 20, 10], [-10, -20, -40]])
Ejemplo n.º 6
0
 def test_multiple_feature_lookup_2D_without_sigma(self):
     config = test_util.default_de_config(5, [1] * 5)
     fea_embed = sparse_features.SparseFeatureEmbedding(
         config, {
             'fea1': (5, 0),
             'fea2': (5, 0)
         },
         op_name='multiple_feature3',
         service_address=self._kbs_address)
     embed, _, _, embed_map = fea_embed.lookup({
         'fea1': [['input1', ''], ['input2', '']],
         'fea2': [['input3', 'input5'], ['input4', 'input6']]
     })
     self.assertAllClose([[1] * 10, [1] * 10], embed.numpy())
     self.assertLen(embed_map.keys(), 2)
     self.assertIn('fea1', embed_map.keys())
     self.assertIn('fea2', embed_map.keys())
     self.assertEqual((2, 2, 5), embed_map['fea1'].shape)
     self.assertEqual((2, 2, 5), embed_map['fea2'].shape)
     self.assertLen(fea_embed._variable_map.keys(), 2)
     self.assertIn('fea1', fea_embed._variable_map.keys())
     self.assertIn('fea2', fea_embed._variable_map.keys())
Ejemplo n.º 7
0
    def test_embed_single_feature_2D_input(self, sigma_dimension):
        emb_dim = 5 + sigma_dimension
        config = test_util.default_de_config(emb_dim, [1] * emb_dim)
        emb, vc, sigma, input_embed, var = sparse_features.embed_single_feature(
            [['input1', ''], ['input2', 'input3']],
            config,
            5,
            sigma_dimension,
            'feature_name3_%d' % sigma_dimension,
            service_address=self._kbs_address)

        if sigma_dimension > 0:
            self.assertIsNotNone(var)
            self.assertEqual((2, 5), emb.shape)
            self.assertEqual(5, vc.shape)
            self.assertEqual((2, 2), sigma.shape)
            self.assertEqual((2, 2, 5), input_embed.shape)
        else:
            self.assertAllClose([[1] * 5, [1] * 5], emb)
            self.assertIsNone(vc)
            self.assertIsNone(sigma)
            self.assertEqual((2, 2, 5), input_embed.shape)
 def setUp(self):
     super(DynamicEmbeddingNeighborCacheTest, self).setUp()
     self._config = test_util.default_de_config(2)
     self._service_server = test_util.start_kbs_server()
 def setUp(self):
     super(DynamicMemoryOpsTest, self).setUp()
     self._config = test_util.default_de_config(2)
     self._service_server = test_util.start_kbs_server()
     self._kbs_address = 'localhost:%d' % self._service_server.port()
     context.clear_all_collection()
Ejemplo n.º 10
0
 def setUp(self):
     super(FeatureEmbeddingTest, self).setUp()
     self._config = test_util.default_de_config(2)
     self._service_server = test_util.start_kbs_server()
     self._kbs_address = 'localhost:%d' % self._service_server.port()
Ejemplo n.º 11
0
    def test_compute_sampled_logits(self):
        cs_config = cs_config_builder.build_candidate_sampler_config(
            cs_config_builder.negative_sampler(unique=True,
                                               algorithm='UNIFORM'))
        de_config = test_util.default_de_config(3, cs_config=cs_config)

        # Add a few embeddings into knowledge bank.
        de_ops.dynamic_embedding_update(['key1', 'key2', 'key3'],
                                        tf.constant([[1.0, 2.0, 3.0],
                                                     [4.0, 5.0, 6.0],
                                                     [7.0, 8.0, 9.0]]),
                                        de_config,
                                        'emb',
                                        service_address=self._kbs_address)

        # Sample logits.
        logits, labels, keys, mask, weights = cs_ops.compute_sampled_logits(
            [['key1', ''], ['key2', 'key3']],
            tf.constant([[2.0, 4.0, 1], [-2.0, -4.0, 1]]),
            3,
            de_config,
            'emb',
            service_address=self._kbs_address)

        # Expected results:
        # - Example one returns one positive key {'key2'} and two negative keys
        #   {'key2', 'key3'}.
        # - Example two returns two positive keys {'key2', 'key3'} and one
        #   positive key {'key1'}.
        expected_weights = {
            b'key1': [1, 2, 3],
            b'key2': [4, 5, 6],
            b'key3': [7, 8, 9]
        }
        expected_labels = [{
            b'key1': 1,
            b'key2': 0,
            b'key3': 0
        }, {
            b'key1': 0,
            b'key2': 1,
            b'key3': 1
        }]
        # Logit for example one:
        # - 'key1': [2, 4, 1] * [1, 2, 3] = 13
        # - 'key2': [2, 4, 1] * [4, 5, 6] = 34
        # - 'key3': [2, 4, 1] * [7, 8, 9] = 55
        # Logit for example two:
        # - 'key1': [-2, -4, 1] * [1, 2, 3] = -7
        # - 'key2': [-2, -4, 1] * [4, 5, 6] = -22
        # - 'key3': [-2, -4, 1] * [7, 8, 9] = -37
        expected_logits = [{
            b'key1': 13,
            b'key2': 34,
            b'key3': 55
        }, {
            b'key1': -7,
            b'key2': -22,
            b'key3': -37
        }]
        # Check keys and weights.
        for b in range(2):
            self.assertEqual(1, mask.numpy()[b])

            for key in {b'key1', b'key2', b'key3'}:
                self.assertIn(key, keys.numpy()[b])
            for i in range(3):
                key = keys.numpy()[b][i]
                self.assertAllClose(expected_weights[key],
                                    weights.numpy()[b][i])
                self.assertAllClose(expected_labels[b][key],
                                    labels.numpy()[b][i])
                self.assertAllClose(expected_logits[b][key],
                                    logits.numpy()[b][i])