Exemplo n.º 1
0
 def foo(x):
     return intrinsics.federated_aggregate(x, Accumulator(0, 0),
                                           accumulate, merge, report)
Exemplo n.º 2
0
 def foo(x):
   return intrinsics.federated_aggregate(x, build_federated_zero(),
                                         accumulate, merge, report)
Exemplo n.º 3
0
 def foo(x):
   val = intrinsics.federated_aggregate(x, build_empty_accumulator(),
                                        accumulate, merge, report)
   self.assertIsInstance(val, value_base.Value)
   return val
Exemplo n.º 4
0
def federated_aggregate_keras_metric(
    metrics: Union[tf.keras.metrics.Metric,
                   Sequence[tf.keras.metrics.Metric]], federated_values):
  """Aggregates variables a keras metric placed at CLIENTS to SERVER.

  Args:
    metrics: a single `tf.keras.metrics.Metric` or a `Sequence` of metrics . The
      order must match the order of variables in `federated_values`.
    federated_values: a single federated value, or a `Sequence` of federated
      values. The values must all have `tff.CLIENTS` placement. If value is a
      `Sequence` type, it must match the order of the sequence in `metrics.

  Returns:
    The result of performing a federated sum on federated_values, then assigning
    the aggregated values into the variables of the corresponding
    `tf.keras.metrics.Metric` and calling `tf.keras.metrics.Metric.result`. The
    resulting structure has `tff.SERVER` placement.
  """
  member_types = tf.nest.map_structure(lambda t: t.type_signature.member,
                                       federated_values)

  @computations.tf_computation
  def zeros_fn():
    # `member_type` is a (potentially nested) `tff.StructType`, which is an
    # `structure.Struct`.
    return structure.map_structure(lambda v: tf.zeros(v.shape, dtype=v.dtype),
                                   member_types)

  zeros = zeros_fn()

  @computations.tf_computation(member_types, member_types)
  def accumulate(accumulators, variables):
    return tf.nest.map_structure(tf.add, accumulators, variables)

  @computations.tf_computation(member_types, member_types)
  def merge(a, b):
    return tf.nest.map_structure(tf.add, a, b)

  @computations.tf_computation(member_types)
  def report(accumulators):
    """Insert `accumulators` back into the keras metric to obtain result."""

    def finalize_metric(metric: tf.keras.metrics.Metric, values):
      # Note: the following call requires that `type(metric)` have a no argument
      # __init__ method, which will restrict the types of metrics that can be
      # used. This is somewhat limiting, but the pattern to use default
      # arguments and export the values in `get_config()` (see
      # `tf.keras.metrics.TopKCategoricalAccuracy`) works well.
      #
      # If type(metric) is subclass of another tf.keras.metric arguments passed
      # to __init__ must include arguments expected by the superclass and
      # specified in superclass get_config().
      keras_metric = None
      try:
        # This is some trickery to reconstruct a metric object in the current
        # scope, so that the `tf.Variable`s get created when we desire.
        keras_metric = type(metric).from_config(metric.get_config())
      except TypeError as e:
        # Re-raise the error with a more helpful message, but the previous stack
        # trace.
        raise TypeError(
            'Caught exception trying to call `{t}.from_config()` with '
            'config {c}. Confirm that {t}.__init__() has an argument for '
            'each member of the config.\nException: {e}'.format(
                t=type(metric), c=metric.get_config(), e=e))

      assignments = []
      for v, a in zip(keras_metric.variables, values):
        assignments.append(v.assign(a))
      with tf.control_dependencies(assignments):
        return keras_metric.result()

    if isinstance(metrics, tf.keras.metrics.Metric):
      # Only a single metric to aggregate.
      return finalize_metric(metrics, accumulators)
    else:
      # Otherwise map over all the metrics.
      return collections.OrderedDict([
          (name, finalize_metric(metric, values))
          for metric, (name, values) in zip(metrics, accumulators.items())
      ])

  return intrinsics.federated_aggregate(federated_values, zeros, accumulate,
                                        merge, report)
Exemplo n.º 5
0
def federated_sample(value, max_num_samples=100):
    """Aggregation to produce uniform sample of at most `max_num_samples` values.

  Each client value is assigned a random number when it is examined during each
  accumulation. Each accumulate and merge only keeps the top N values based
  on the random number. Report drops the random numbers and only returns the
  at most N values sampled from the accumulated client values using standard
  reservoir sampling (https://en.wikipedia.org/wiki/Reservoir_sampling), where
  N is user provided `max_num_samples`.

  Args:
    value: A `tff.Value` placed on the `tff.CLIENTS`.
    max_num_samples: The maximum number of samples to collect from client
      values. If fewer clients than the defined max sample size participated in
      the round of computation, the actual number of samples will equal the
      number of clients in the round.

  Returns:
    At most `max_num_samples` samples of the value from the `tff.CLIENTS`.
  """
    _validate_value_on_clients(value)
    member_type = value.type_signature.member
    accumulator_type = _get_accumulator_type(member_type)
    zeros = _zeros_for_sample(member_type)

    @tf.function
    def fed_concat_expand_dims(a, b):
        b = tf.expand_dims(b, axis=0)
        return tf.concat([a, b], axis=0)

    @tf.function
    def fed_concat(a, b):
        return tf.concat([a, b], axis=0)

    @tf.function
    def fed_gather(value, indices):
        return tf.gather(value, indices)

    def apply_sampling(accumulators, rands):
        size = tf.shape(rands)[0]
        k = tf.minimum(size, max_num_samples)
        indices = tf.math.top_k(rands, k=k).indices
        # TODO(b/121288403): Special-casing anonymous tuple shouldn't be needed.
        if isinstance(member_type, computation_types.NamedTupleType):
            return anonymous_tuple.map_structure(
                lambda v: fed_gather(v, indices),
                accumulators), fed_gather(rands, indices)
        return fed_gather(accumulators, indices), fed_gather(rands, indices)

    @computations.tf_computation(accumulator_type, value.type_signature.member)
    def accumulate(current, value):
        """Accumulates samples through concatenation."""
        rands = fed_concat_expand_dims(current.rands,
                                       tf.random.uniform(shape=()))
        # TODO(b/121288403): Special-casing anonymous tuple shouldn't be needed.
        if isinstance(member_type, computation_types.NamedTupleType):
            accumulators = anonymous_tuple.map_structure(
                fed_concat_expand_dims,
                _ensure_anonymous_tuple(current.accumulators),
                _ensure_anonymous_tuple(value))
        else:
            accumulators = fed_concat_expand_dims(current.accumulators, value)

        accumulators, rands = apply_sampling(accumulators, rands)
        return _Samples(accumulators, rands)

    @computations.tf_computation(accumulator_type, accumulator_type)
    def merge(a, b):
        """Merges accumulators through concatenation."""
        # TODO(b/121288403): Special-casing anonymous tuple shouldn't be needed.
        if isinstance(accumulator_type, computation_types.NamedTupleType):
            samples = anonymous_tuple.map_structure(fed_concat,
                                                    _ensure_anonymous_tuple(a),
                                                    _ensure_anonymous_tuple(b))
        else:
            samples = fed_concat(a, b)
        accumulators, rands = apply_sampling(samples.accumulators,
                                             samples.rands)
        return _Samples(accumulators, rands)

    @computations.tf_computation(accumulator_type)
    def report(value):
        return value.accumulators

    return intrinsics.federated_aggregate(value, zeros, accumulate, merge,
                                          report)
 def comp():
     value = intrinsics.federated_value(10, placement_literals.CLIENTS)
     return intrinsics.federated_aggregate(value, 0, add_int, add_int,
                                           add_five)
Exemplo n.º 7
0
 def comp():
     tens = intrinsics.federated_value(10, placements.CLIENTS)
     return intrinsics.federated_aggregate(tens, 0, add_int, add_int,
                                           add_five)
Exemplo n.º 8
0
 def comp():
     x = intrinsics.federated_value(10, placements.CLIENTS)
     return intrinsics.federated_aggregate(x, 0, add_numbers,
                                           add_numbers,
                                           add_one_because_why_not)
Exemplo n.º 9
0
  def next_fn(global_state, value, weight=None):
    """Defines next_fn for StatefulAggregateFn."""
    # Weighted aggregation is not supported.
    # TODO(b/140236959): Add an assertion that weight is None here, so the
    # contract of this method is better established. Will likely cause some
    # downstream breaks.
    del weight

    #######################################
    # Define local tf_computations

    # TODO(b/129567727): Make most of these tf_computations polymorphic
    # so type manipulation isn't needed.

    global_state_type = initialize_fn.type_signature.result

    @computations.tf_computation(global_state_type)
    def derive_sample_params(global_state):
      return query.derive_sample_params(global_state)

    @computations.tf_computation(derive_sample_params.type_signature.result,
                                 value.type_signature.member)
    def preprocess_record(params, record):
      # TODO(b/123092620): Once TFF passes the expected container type (instead
      # of AnonymousTuple), we shouldn't need this.
      record = from_tff_result_fn(record)

      return query.preprocess_record(params, record)

    # TODO(b/123092620): We should have the expected container type here.
    value_type = value_type_fn(value)

    tensor_specs = type_utils.type_to_tf_tensor_specs(value_type)

    @computations.tf_computation
    def zero():
      return query.initial_sample_state(tensor_specs)

    sample_state_type = zero.type_signature.result

    @computations.tf_computation(sample_state_type,
                                 preprocess_record.type_signature.result)
    def accumulate(sample_state, preprocessed_record):
      return query.accumulate_preprocessed_record(sample_state,
                                                  preprocessed_record)

    @computations.tf_computation(sample_state_type, sample_state_type)
    def merge(sample_state_1, sample_state_2):
      return query.merge_sample_states(sample_state_1, sample_state_2)

    @computations.tf_computation(merge.type_signature.result)
    def report(sample_state):
      return sample_state

    @computations.tf_computation(sample_state_type, global_state_type)
    def post_process(sample_state, global_state):
      result, new_global_state = query.get_noised_result(
          sample_state, global_state)
      return new_global_state, result

    #######################################
    # Orchestration logic

    sample_params = intrinsics.federated_map(derive_sample_params, global_state)
    client_sample_params = intrinsics.federated_broadcast(sample_params)
    preprocessed_record = intrinsics.federated_map(
        preprocess_record, (client_sample_params, value))
    agg_result = intrinsics.federated_aggregate(preprocessed_record, zero(),
                                                accumulate, merge, report)

    return intrinsics.federated_map(post_process, (agg_result, global_state))
Exemplo n.º 10
0
 def foo(x):
     return intrinsics.federated_aggregate(
         x, collections.OrderedDict([('sum', 0), ('n', 0)]), accumulate,
         merge, report)
Exemplo n.º 11
0
 def foo(x):
   return intrinsics.federated_aggregate(x, build_empty_accumulator(),
                                         accumulate, merge, report)
Exemplo n.º 12
0
 def comp():
   client_vals = intrinsics.federated_value(
       collections.OrderedDict([('a', (10, 2.0))]), placements.CLIENTS)
   zeros = collections.OrderedDict([('a', (0, 0.0))])
   return intrinsics.federated_aggregate(client_vals, zeros, add_int,
                                         add_int, add_five)