def test_set_slot_to_optimizer(self): embed_name = "test_emb" indices = np.ndarray([2], dtype=np.int32) embed_values = np.ndarray([2, 2], dtype=np.float32) slot_values = { "m": np.ndarray([2, 2], dtype=np.float32), "v": np.ndarray([2, 2], dtype=np.float32), } params = Parameters() params.embedding_params[embed_name] = EmbeddingTable(embed_name, 8) for slot in ["m", "v"]: slot_table_name = get_slot_table_name(embed_name, slot) params.embedding_params[slot_table_name] = EmbeddingTable( slot_table_name, 2, "0.0", True) opt = Adam() opt_wrapper = OptimizerWrapper(opt, None, params.get_embedding_param) opt_wrapper._init_thread_local() opt_wrapper._tls._unique_ids_all_layers[embed_name] = indices opt_wrapper._create_embedding_variable(embed_name, embed_values) opt_wrapper._get_slot_and_set_to_optimizer(embed_name) self.assertEqual(len(opt._slots), 1) opt_slots = list(opt._slots.values())[0] self.assertEqual(sorted(opt_slots.keys()), ["m", "v"]) for name in ["m", "v"]: self.assertTrue( np.allclose(opt_slots[name].numpy(), slot_values[name]))
def test_delete_variables(self): params = Parameters() embed_layers = ["test_1", "test_2"] slot_names = ["m", "v"] dim = 8 for layer in embed_layers: params.embedding_params[layer] = EmbeddingTable(layer, dim) for slot in slot_names: slot_key = get_slot_table_name(layer, slot) params.embedding_params[slot_key] = EmbeddingTable( slot_key, dim, "0.0", True) opt = Adam() opt_wrapper = OptimizerWrapper(opt, None, params.get_embedding_param, params.set_embedding_param) opt_wrapper._init_thread_local() for name in embed_layers: opt_wrapper._tls._unique_ids_all_layers[name] = np.ndarray( [2], np.int32) opt_wrapper._create_embedding_variable( name, np.ndarray([2, dim], np.float32)) opt_wrapper._get_slot_and_set_to_optimizer(name) self.assertTrue(len(opt._weights) == 4) self.assertTrue(len(opt._slots) == 2) for slot_dict in opt._slots.values(): self.assertTrue(len(slot_dict) == 2) opt_wrapper._delete_slots_and_weights_in_optimizer() self.assertTrue(len(opt._weights) == 0) self.assertTrue(len(opt._slots) == 0)
def test_update_embedding_param(self): params = Parameters() for name in ["test_1", "test_2"]: params.embedding_params[name] = EmbeddingTable(name, 8) slot_key = get_slot_table_name(name, "momentum") params.embedding_params[slot_key] = EmbeddingTable( slot_key, 8, "0.0", True) indices = { "test_1": np.array([1, 5]), "test_2": np.array([10]), } embed_vars = { "test_1": tf.Variable(np.random.rand(2, 8).astype(np.float32)), "test_2": tf.Variable(np.random.rand(1, 8).astype(np.float32)), } slot_vars = { "test_1": { "momentum": tf.Variable(np.random.rand(2, 8).astype(np.float32)) }, "test_2": { "momentum": tf.Variable(np.random.rand(1, 8).astype(np.float32)) }, } opt = SGD(momentum=0.1) opt_wrapper = OptimizerWrapper(opt, None, None, params.set_embedding_param) opt_wrapper._tls._unique_ids_all_layers = indices opt_wrapper._tls._embed_variables = embed_vars opt_wrapper._tls._slot_variables = slot_vars opt_wrapper._update_embedding_param() for name in ["test_1", "test_2"]: self.assertTrue( np.allclose( embed_vars[name].numpy(), params.get_embedding_param(name, indices[name]), )) slot = "momentum" slot_table_name = get_slot_table_name(name, slot) self.assertTrue( np.allclose( slot_vars[name][slot].numpy(), params.get_embedding_param(slot_table_name, indices[name]), ))
def _test_correctness(self, optimizer_class, X, Y, seed, **opt_kwargs): """Test the correctness of specific TensorFlow optimizer.""" _model_file = get_module_file_path( os.path.dirname(os.path.realpath(__file__)), "embedding_test_module.KerasEmbeddingModel", ) model_module = load_module(_model_file).__dict__ # train model with TensorFlow optimizer dim = 4 weights = self._random_init_model_weight([(4, dim), (4, dim), (72, 1), (1, )], seed) loss_fn = model_module["loss"] model1 = model_module["KerasEmbeddingModel"](4, dim, weights) opt1 = optimizer_class(**opt_kwargs) _train(model1, opt1, X, Y, loss_fn, random_seed=seed) model2 = model_module["EdlEmbeddingModel"](dim, weights[2:]) opt2 = optimizer_class(**opt_kwargs) embedding_weight_names = [ layer.embedding_weight_name for layer in find_layer(model2, Embedding) ] # create Parameters object and initialize embedding vectors params = Parameters() for weight_name, embed_value in zip(embedding_weight_names, weights[:2]): embed_table = EmbeddingTable(weight_name, dim) embed_table.set(range(len(embed_value)), embed_value) params.embedding_params[weight_name] = embed_table _train_edl_embedding_with_optimizer_wrapper(model2, opt2, X, Y, loss_fn, params, random_seed=seed) # compare trained parameters wrong_msg = ( "The updated parameters of Optimizer Wrapper and TensorFlow " "optimizer %s differ." % opt1.get_config()["name"]) for layer1, layer2 in zip(model1.layers, model2.layers): if "embedding" in layer2.name: w1 = layer1.weights[0].numpy() w2 = params.get_embedding_param(layer2.embedding_weight_name, range(4)) self.assertTrue(np.isclose(w1, w2).all(), msg=wrong_msg) else: for w1, w2 in zip(layer1.weights, layer2.weights): self.assertTrue(np.isclose(w1.numpy(), w2.numpy()).all(), msg=wrong_msg)
def test_worker_pull_embedding(self): model_def = "mnist_functional_api.mnist_functional_api.custom_model" self._create_pserver(model_def, 2) arguments = [ "--worker_id", 0, "--job_type", elasticdl_pb2.TRAINING, "--minibatch_size", self._batch_size, "--model_zoo", self._model_zoo_path, "--model_def", model_def, "--distribution_strategy", DistributionStrategy.PARAMETER_SERVER, ] args = parse_worker_args(arguments) worker = Worker(args, ps_channels=self._channels) # Test lookup embedding vectors that do not exist layers = ["test-2", "test-2-slot"] ids = [3, 5, 1, 6, 10, 2, 1, 2, 4, 7, 9] embedding_table_args = [ (layers[0], 8, "uniform", False), (layers[1], 8, 3.3, True), ] # initialize embedding table object for pserver in self._pservers: for layer, table_args in zip(layers, embedding_table_args): pserver.parameters.embedding_params[layer] = EmbeddingTable( *table_args ) result_dict = {} for layer in layers: embedding = worker.pull_embedding_vectors(layer, ids) result_dict[layer] = embedding for layer in layers: expected_result = [] for embedding_id in ids: ps_id = int_to_id(embedding_id, len(self._pservers)) table = self._pservers[ps_id].parameters.embedding_params[ layer ] expected_result.append(table.get([embedding_id])) expected_result = np.concatenate(expected_result) self.assertTrue(np.allclose(expected_result, result_dict[layer]))
def create_slot_params(self, slot_names, init_values): embed_layer_names = list(self.embedding_params.keys()) for layer_name in embed_layer_names: for slot_name in slot_names: key = get_slot_table_name(layer_name, slot_name) if key in self.embedding_params: raise ValueError( "An embedding layer has unexpected name %s" % key) self.embedding_params[key] = EmbeddingTable( key, self.embedding_params[layer_name].dim, init_values[slot_name], True, )
def test_create_embedding_table_for_slots(self): slot_name = "momentum" init_value = 3.5 table = EmbeddingTable( get_slot_table_name(self.name, slot_name), dim=self.dim, initializer=init_value, is_slot=True, ) self.assertIsNotNone(table) self.assertEqual(table.name, get_slot_table_name(self.name, slot_name)) self.assertEqual(table.dim, self.dim) # test initialize embedding = table.get([2]) self.assertTrue((embedding - init_value < 0.0001).all())
def test_save_parameters_to_checkpoint_file(self): with tempfile.TemporaryDirectory() as tempdir: checkpoint_saver = CheckpointSaver( checkpoint_dir=os.path.join(tempdir, "ckpt/"), checkpoint_steps=5, keep_checkpoint_max=3, include_evaluation=False, ) pserver_servicer = PserverServicer( parameters=Parameters(), grads_to_wait=0, optimizer="optimizer", checkpoint_saver=checkpoint_saver, ps_id=0, num_ps_pods=1, ) model_params = { "v0": tf.Variable([[1, 1, 1], [1, 1, 1]]), "v1": tf.Variable([[2, 2, 2], [2, 2, 2]]), } server_params = pserver_servicer._parameters for var_name, var_value in model_params.items(): server_params.non_embedding_params[var_name] = var_value embedding_table = EmbeddingTable( name="embedding_0", dim=3, initializer="random_uniform" ) server_params.embedding_params["embedding_0"] = embedding_table server_params.set_embedding_param( name="embedding_0", indices=np.array([0, 1]), values=np.array([[1, 1, 1], [2, 2, 2]]), ) for i in range(100): pserver_servicer._parameters.version += 1 pserver_servicer._save_params_to_checkpoint_if_needed() self.assertEqual(len(os.listdir(checkpoint_saver._directory)), 3) self.assertEqual( sorted(os.listdir(checkpoint_saver._directory)), ["version-100", "version-90", "version-95"], ) self.assertEqual( os.listdir(checkpoint_saver._directory + "/version-100"), ["variables-0-of-1.ckpt"], )
def _mock_model_parameters(self, model): params = Parameters() for weight in model.trainable_variables: if "embedding" in weight.name: embedding_table = EmbeddingTable( name=weight.name, dim=weight.shape[1], initializer="RandomUniform", ) embedding_table.set(np.arange(weight.shape[0]), np.ones(weight.shape)) params.embedding_params[weight.name] = embedding_table else: params.non_embedding_params[weight.name] = tf.ones( weight.shape) params.version = 100 return params
def test_restore_parameters_from_checkpoint(self): checkpoint_dir = "elasticdl/python/tests/testdata/ps_ckpt" checkpoint_saver = CheckpointSaver(checkpoint_dir, 0, 0, False) params = Parameters() table = EmbeddingTable("embedding", 2, "random_uniform") table.set([0, 1, 2, 3], np.ones((4, 2), dtype=np.float32)) params.embedding_params["embedding"] = table params.non_embedding_params["dense/kernel:0"] = tf.Variable( [[1.0], [1.0]] ) params.non_embedding_params["dense/bias:0"] = tf.Variable([1.0]) params.version = 100 model_pb = params.to_model_pb() checkpoint_saver.save(100, model_pb, False) checkpoint_dir_for_init = checkpoint_dir + "/version-100" args = PserverArgs( ps_id=0, num_ps_pods=2, model_zoo=_test_model_zoo_path, model_def="test_module.custom_model", checkpoint_dir_for_init=checkpoint_dir_for_init, ) pserver_0 = ParameterServer(args) embedding_table = pserver_0.parameters.embedding_params["embedding"] self.assertEqual( list(embedding_table.embedding_vectors.keys()), [0, 2] ) self.assertEqual( list(pserver_0.parameters.non_embedding_params.keys()), ["dense/kernel:0"], ) self.assertTrue( np.array_equal( pserver_0.parameters.non_embedding_params[ "dense/kernel:0" ].numpy(), np.array([[1], [1]], dtype=int), ) ) self.assertEqual(pserver_0.parameters.version, 100) args = PserverArgs( ps_id=1, num_ps_pods=2, model_zoo=_test_model_zoo_path, model_def="test_module.custom_model", checkpoint_dir_for_init=checkpoint_dir_for_init, ) pserver_1 = ParameterServer(args) embedding_table = pserver_1.parameters.embedding_params["embedding"] self.assertEqual( list(embedding_table.embedding_vectors.keys()), [1, 3] ) self.assertEqual( list(pserver_1.parameters.non_embedding_params.keys()), ["dense/bias:0"], ) self.assertTrue( np.array_equal( pserver_1.parameters.non_embedding_params[ "dense/bias:0" ].numpy(), np.array([1], dtype=int), ) ) self.assertEqual(pserver_1.parameters.version, 100)
def _test_async_correctness( self, grads_and_vars_batches, embed_values, expected_non_embed_values, expected_embed_values=None, ): """Checks the correctness of async OptimizerWrapper. This function creates many threads and these threads call `OptimizerWrapper.apply_gradients` simultaneously. Args: grads_and_vars_batches: A python list of `grads_and_vars`. Every thread takes a `grads_and_vars` and calls `apply_gradients`. embed_values: A python dictionary of `(layer_name, embedding table)`. expected_non_embed_values: A python list of expected non-embdding values after applying gradients. expected_embed_values: A python dictionary of expected embedding values after applying gradients. None means no need to check embedding values. """ thread_num = len(grads_and_vars_batches) input_dims = {} embed_var_n = len(embed_values) params = Parameters() for layer, values in embed_values.items(): embed_dim = values.shape[1] input_dims[layer] = values.shape[0] embed_table = EmbeddingTable(layer, embed_dim) embed_table.set(range(input_dims[layer]), values) params.embedding_params[layer] = embed_table opt = SGD(0.1) opt_wrapper = OptimizerWrapper( opt, True, lookup_embedding_func=params.get_embedding_param, update_embedding_func=params.set_embedding_param, ) # call optimizer_wrapper.apply_gradients asynchronously def _apply_gradients(opt_wrapper, grads_and_vars): # sleep 1s to wait that all threads are in this method call time.sleep(1) opt_wrapper.apply_gradients(grads_and_vars) executor = ThreadPoolExecutor(max_workers=thread_num) tasks = [ executor.submit(_apply_gradients, opt_wrapper, grads_and_vars) for grads_and_vars in grads_and_vars_batches ] _ = [task.result() for task in tasks] # check updated results of non-embedding variables non_embed_vars = [ var for grad, var in grads_and_vars_batches[0][:-embed_var_n] ] for var, expected_value in zip(non_embed_vars, expected_non_embed_values): self.assertTrue(np.isclose(var.numpy(), expected_value).all()) # `expected_embed_values=None` means that no need to check # embedding table if not expected_embed_values: return # check updated results of embedding table for layer, expected_values in expected_embed_values.items(): value = params.get_embedding_param(layer, range(input_dims[layer])) self.assertTrue( any([ np.isclose(value, expected).all() for expected in expected_values ]))
def setUp(self): self.name = "embedding_1" self.dim = 10 self.initializer = "uniform" self.table = EmbeddingTable(self.name, self.dim, self.initializer)