def load_ops_fn():
            """Returns the retrieve ops for AdaGrad embedding tables.

      Returns:
        A list of ops to load embedding and slot variables from CPU to TPU.
      """
            load_op_list = []
            for host_id, table_variable, accumulator_variable in (zip(
                    range(num_hosts), table_variables, accumulator_variables)):
                with ops.colocate_with(table_variable):
                    load_parameters_op = (
                        tpu_ops.load_tpu_embedding_adagrad_parameters(
                            parameters=table_variable,
                            accumulators=accumulator_variable,
                            table_name=table,
                            num_shards=num_hosts,
                            shard_id=host_id))
                load_op_list.append(load_parameters_op)
            return load_op_list
    def create_variables_and_ops(self, table, slot_variable_names, num_hosts,
                                 table_config, table_variables):
        accumulator_initializer = init_ops.constant_initializer(
            self._optimization_parameters.initial_accumulator)
        accumulator_variables = _create_partitioned_variables(
            name=slot_variable_names.accumulator,
            num_hosts=num_hosts,
            vocabulary_size=table_config.vocabulary_size,
            embedding_dimension=table_config.dimension,
            collections=[ops.GraphKeys.GLOBAL_VARIABLES],
            initializer=accumulator_initializer)
        slot_variables = AdagradSlotVariable(accumulator_variables)

        load_ops = []
        retrieve_ops = []
        for host_id, table_variable, accumulator_variable in (zip(
                range(num_hosts), table_variables, accumulator_variables)):
            with ops.colocate_with(table_variable):
                load_parameters_op = (
                    tpu_ops.load_tpu_embedding_adagrad_parameters(
                        parameters=table_variable,
                        accumulators=accumulator_variable,
                        table_name=table,
                        num_shards=num_hosts,
                        shard_id=host_id))
                retrieved_table, retrieved_accumulator = (
                    tpu_ops.retrieve_tpu_embedding_adagrad_parameters(
                        table_name=table,
                        num_shards=num_hosts,
                        shard_id=host_id))
                retrieve_parameters_op = control_flow_ops.group(
                    state_ops.assign(table_variable, retrieved_table),
                    state_ops.assign(accumulator_variable,
                                     retrieved_accumulator))

            load_ops.append(load_parameters_op)
            retrieve_ops.append(retrieve_parameters_op)
        return slot_variables, load_ops, retrieve_ops
  def create_variables_and_ops(self, table, slot_variable_names, num_hosts,
                               table_config, table_variables):
    accumulator_initializer = init_ops.constant_initializer(
        self._optimization_parameters.initial_accumulator)
    accumulator_variables = _create_partitioned_variables(
        name=slot_variable_names.accumulator,
        num_hosts=num_hosts,
        vocabulary_size=table_config.vocabulary_size,
        embedding_dimension=table_config.dimension,
        collections=[ops.GraphKeys.GLOBAL_VARIABLES],
        initializer=accumulator_initializer)
    slot_variables = AdagradSlotVariable(accumulator_variables)

    load_ops = []
    retrieve_ops = []
    for host_id, table_variable, accumulator_variable in (zip(
        range(num_hosts), table_variables, accumulator_variables)):
      with ops.colocate_with(table_variable):
        load_parameters_op = (
            tpu_ops.load_tpu_embedding_adagrad_parameters(
                parameters=table_variable,
                accumulators=accumulator_variable,
                table_name=table,
                num_shards=num_hosts,
                shard_id=host_id))
        retrieved_table, retrieved_accumulator = (
            tpu_ops.retrieve_tpu_embedding_adagrad_parameters(
                table_name=table,
                num_shards=num_hosts,
                shard_id=host_id))
        retrieve_parameters_op = control_flow_ops.group(
            state_ops.assign(table_variable, retrieved_table),
            state_ops.assign(accumulator_variable, retrieved_accumulator))

      load_ops.append(load_parameters_op)
      retrieve_ops.append(retrieve_parameters_op)
    return slot_variables, load_ops, retrieve_ops