async def create_tuple(self, elements):
     target_val = await self._target.create_tuple(
         anonymous_tuple.map_structure(lambda x: x.value, elements))
     wrapped_val = TracingExecutorValue(self, self._get_new_value_index(),
                                        target_val)
     self._trace.append(
         ('create_tuple',
          anonymous_tuple.map_structure(lambda x: x.index,
                                        elements), wrapped_val.index))
     return wrapped_val
Example #2
0
def _unwrap_execution_context_value(val):
    """Recursively removes wrapping from `val` under anonymous tuples."""
    if isinstance(val, anonymous_tuple.AnonymousTuple):
        return anonymous_tuple.map_structure(_unwrap_execution_context_value,
                                             val)
    elif isinstance(val, ExecutionContextValue):
        return _unwrap_execution_context_value(val.value)
    else:
        return val
Example #3
0
def _validate_value_type_and_encoders(value_type, encoders, encoder_type):
  """Validates if `value_type` and `encoders` are compatible."""
  if isinstance(encoders, _ALLOWED_ENCODERS):
    # If `encoders` is not a container, then `value_type` should be an instance
    # of `tff.TensorType.`
    if not isinstance(value_type, computation_types.TensorType):
      raise ValueError(
          '`value_type` and `encoders` do not have the same structure.')

    _validate_encoder(encoders, value_type, encoder_type)
  else:
    # If `encoders` is a container, then `value_type` should be an instance of
    # `tff.NamedTupleType.`
    if not isinstance(value_type, computation_types.NamedTupleType):
      raise TypeError('`value_type` is not compatible with the expected input '
                      'of the `encoders`.')
    anonymous_tuple.map_structure(
        lambda e, v: _validate_encoder(e, v, encoder_type),
        anonymous_tuple.from_container(encoders, recursive=True), value_type)
  def test_federated_zip_at_clients_unnamed(self):

    @computations.federated_computation
    def comp():
      return intrinsics.federated_zip([
          intrinsics.federated_value(10, placements.CLIENTS),
          intrinsics.federated_value(20, placements.CLIENTS)
      ])

    self.assertEqual(str(comp.type_signature), '( -> {<int32,int32>}@CLIENTS)')
    result = comp()
    self.assertIsInstance(result, list)
    self.assertLen(result, 12)
    for v in result:
      self.assertEqual(
          str(anonymous_tuple.map_structure(lambda x: x.numpy(), v)), '<10,20>')
Example #5
0
  def test_with_one_arg_tf_comp_in_two_arg_fed_comp(self):
    ex = lambda_executor.LambdaExecutor(eager_executor.EagerExecutor())
    loop = asyncio.get_event_loop()

    @computations.tf_computation(tf.int32, tf.int32)
    def add_numbers(x, y):
      return x + y

    @computations.federated_computation(tf.int32, tf.int32)
    def comp(x, y):
      return add_numbers(x, x), add_numbers(x, y), add_numbers(y, y)

    v1 = loop.run_until_complete(ex.create_value(comp))
    v2 = loop.run_until_complete(ex.create_value(10, tf.int32))
    v3 = loop.run_until_complete(ex.create_value(20, tf.int32))
    v4 = loop.run_until_complete(
        ex.create_tuple(
            anonymous_tuple.AnonymousTuple([(None, v2), (None, v3)])))
    v5 = loop.run_until_complete(ex.create_call(v1, v4))
    result = loop.run_until_complete(v5.compute())
    self.assertEqual(
        str(anonymous_tuple.map_structure(lambda x: x.numpy(), result)),
        '<20,30,40>')
Example #6
0
 async def create_struct(self, elements):
     target_val = await self._target.create_struct(
         anonymous_tuple.map_structure(lambda x: x.value, elements))
     wrapped_val = SizingExecutorValue(self, target_val)
     return wrapped_val
def create_binary_operator_with_upcast(
    type_signature: computation_types.Type,
    operator: Callable[[Any, Any], Any]) -> pb.Computation:
  """Creates TF computation upcasting its argument and applying `operator`.

  Args:
    type_signature: Value convertible to `computation_types.NamedTupleType`,
      with two elements, both of the same type or the second able to be upcast
      to the first, as explained in `apply_binary_operator_with_upcast`, and
      both containing only tuples and tensors in their type tree.
    operator: Callable defining the operator.

  Returns:
    A `building_blocks.CompiledComputation` encapsulating a function which
    upcasts the second element of its argument and applies the binary
    operator.
  """

  py_typecheck.check_callable(operator)
  type_signature = computation_types.to_type(type_signature)
  type_analysis.check_tensorflow_compatible_type(type_signature)
  if not isinstance(
      type_signature,
      computation_types.NamedTupleType) or len(type_signature) != 2:
    raise TypeError('To apply a binary operator, we must by definition have an '
                    'argument which is a `NamedTupleType` with 2 elements; '
                    'asked to create a binary operator for type: {t}'.format(
                        t=type_signature))
  if type_analysis.contains_types(type_signature,
                                  computation_types.SequenceType):
    raise TypeError(
        'Applying binary operators in TensorFlow is only '
        'supported on Tensors and NamedTupleTypes; you '
        'passed {t} which contains a SequenceType.'.format(t=type_signature))

  def _pack_into_type(to_pack, type_spec):
    """Pack Tensor value `to_pack` into the nested structure `type_spec`."""
    if isinstance(type_spec, computation_types.NamedTupleType):
      elem_iter = anonymous_tuple.iter_elements(type_spec)
      return anonymous_tuple.AnonymousTuple([
          (elem_name, _pack_into_type(to_pack, elem_type))
          for elem_name, elem_type in elem_iter
      ])
    elif isinstance(type_spec, computation_types.TensorType):
      return tf.broadcast_to(to_pack, type_spec.shape)

  with tf.Graph().as_default() as graph:
    first_arg, operand_1_binding = tensorflow_utils.stamp_parameter_in_graph(
        'x', type_signature[0], graph)
    operand_2_value, operand_2_binding = tensorflow_utils.stamp_parameter_in_graph(
        'y', type_signature[1], graph)
    if type_signature[0].is_equivalent_to(type_signature[1]):
      second_arg = operand_2_value
    else:
      second_arg = _pack_into_type(operand_2_value, type_signature[0])

    if isinstance(type_signature[0], computation_types.TensorType):
      result_value = operator(first_arg, second_arg)
    elif isinstance(type_signature[0], computation_types.NamedTupleType):
      result_value = anonymous_tuple.map_structure(operator, first_arg,
                                                   second_arg)
    else:
      raise TypeError('Encountered unexpected type {t}; can only handle Tensor '
                      'and NamedTupleTypes.'.format(t=type_signature[0]))

  result_type, result_binding = tensorflow_utils.capture_result_from_graph(
      result_value, graph)

  type_signature = computation_types.FunctionType(type_signature, result_type)
  parameter_binding = pb.TensorFlow.Binding(
      tuple=pb.TensorFlow.NamedTupleBinding(
          element=[operand_1_binding, operand_2_binding]))
  tensorflow = pb.TensorFlow(
      graph_def=serialization_utils.pack_graph_def(graph.as_graph_def()),
      parameter=parameter_binding,
      result=result_binding)
  return pb.Computation(
      type=type_serialization.serialize_type(type_signature),
      tensorflow=tensorflow)