Example #1
0
    def test_record_new_flags(self):
        with utils_impl.record_new_flags() as hparam_flags:
            flags.DEFINE_string('exp_name', 'name',
                                'Unique name for the experiment.')
            flags.DEFINE_float('learning_rate', 0.1,
                               'Optimizer learning rate.')

        self.assertCountEqual(hparam_flags, ['exp_name', 'learning_rate'])
Example #2
0
    def test_record_new_flags(self):
        with utils_impl.record_new_flags() as hparam_flags:
            flags.DEFINE_string('exp_name', 'name',
                                'Unique name for the experiment.')
            flags.DEFINE_integer('random_seed', 0,
                                 'Random seed for the experiment.')

        self.assertCountEqual(hparam_flags, ['exp_name', 'random_seed'])
Example #3
0
  def test_convert_flag_names_to_odict(self):
    with utils_impl.record_new_flags() as hparam_flags:
      flags.DEFINE_integer('flag1', 1, 'This is the first flag.')
      flags.DEFINE_float('flag2', 2.0, 'This is the second flag.')

    hparam_odict = utils_impl.lookup_flag_values(hparam_flags)
    expected_odict = collections.OrderedDict(flag1=1, flag2=2.0)

    self.assertEqual(hparam_odict, expected_odict)
from absl import flags
from absl import logging
import pandas as pd
import tensorflow as tf
import tensorflow_federated as tff

from tensorflow_federated.python.research.optimization.shakespeare import dataset
from tensorflow_federated.python.research.optimization.shakespeare import models
from tensorflow_federated.python.research.optimization.shared import keras_callbacks
from tensorflow_federated.python.research.optimization.shared import keras_metrics
from tensorflow_federated.python.research.optimization.shared import optimizer_utils
from tensorflow_federated.python.research.utils import utils_impl

FLAGS = flags.FLAGS

with utils_impl.record_new_flags() as hparam_flags:
    optimizer_utils.define_optimizer_flags('centralized')
    flags.DEFINE_string(
        'experiment_name', None,
        'Name of the experiment. Part of the name of the output '
        'directory.')
    flags.DEFINE_integer('num_epochs', 60, 'Number of epochs to train.')
    flags.DEFINE_integer('batch_size', 10,
                         'Size of batches for training and eval.')
    flags.DEFINE_boolean('shuffle_train_data', True,
                         'Whether to shuffle the training data.')

flags.DEFINE_string(
    'root_output_dir', '/tmp/tff/optimization/shakespeare/centralized',
    'The top-level output directory experiment runs. --experiment_name will '
    'be append, and the directory will contain tensorboard logs, metrics CSVs '
Example #5
0
  flags.DEFINE_float('clip', 0.05, 'Initial clip.')
  flags.DEFINE_float('noise_multiplier', None,
                     'Noise multiplier. If None, no DP is used.')
  flags.DEFINE_float('adaptive_clip_learning_rate', 0,
                     'Adaptive clip learning rate.')
  flags.DEFINE_float('target_unclipped_quantile', 0.5,
                     'Target unclipped quantile.')
  flags.DEFINE_float(
      'clipped_count_budget_allocation', 0.1,
      'Fraction of privacy budget to allocate for clipped counts.')
  flags.DEFINE_boolean(
      'per_vector_clipping', False, 'Use per-vector clipping'
      'to indepednelty clip each weight tensor instead of the'
      'entire model.')

with utils_impl.record_new_flags() as training_loop_flags:
  flags.DEFINE_integer('total_rounds', 200, 'Number of total training rounds.')
  flags.DEFINE_string(
      'experiment_name', None, 'The name of this experiment. Will be append to '
      '--root_output_dir to separate experiment results.')
  flags.DEFINE_string('root_output_dir', '/tmp/differential_privacy/',
                      'Root directory for writing experiment output.')
  flags.DEFINE_boolean(
      'write_metrics_with_bz2', True, 'Whether to use bz2 '
      'compression when writing output metrics to a csv file.')
  flags.DEFINE_integer(
      'rounds_per_eval', 1,
      'How often to evaluate the global model on the validation dataset.')
  flags.DEFINE_integer('rounds_per_checkpoint', 50,
                       'How often to checkpoint the global model.')
  flags.DEFINE_integer(