Beispiel #1
0
  def test_conv_dropout_shape(self):
    image = tf.random.normal([3, 28 * 28])
    model = models.create_conv_dropout_model(only_digits=False)
    logits = model(image)

    self.assertIsNotNone(logits)
    self.assertEqual(logits.shape, [3, 62])
def create_compiled_keras_model():
  """Create compiled keras model."""
  if FLAGS.training_model == 'cnn':
    model = models.create_conv_dropout_model(only_digits=FLAGS.only_digits)
  elif FLAGS.training_model == 'orig_cnn':
    model = models.create_original_fedavg_cnn_model(
        only_digits=FLAGS.only_digits)
  elif FLAGS.training_model == '2nn':
    model = models.create_two_hidden_layer_model(only_digits=FLAGS.only_digits)
  elif FLAGS.training_model == 'resnet':
    model = models.create_resnet(num_blocks=9, only_digits=FLAGS.only_digits)
  else:
    raise ValueError('Model {} is not supported.'.format(FLAGS.training_model))

  model.compile(
      loss=tf.keras.losses.sparse_categorical_crossentropy,
      optimizer=tf.keras.optimizers.SGD(
          learning_rate=FLAGS.learning_rate, momentum=FLAGS.momentum),
      metrics=[tf.keras.metrics.SparseCategoricalAccuracy()])
  return model
def run_experiment():
    """Runs the training experiment."""
    emnist_train, emnist_test = tff.simulation.datasets.emnist.load_data(
        only_digits=True)

    example_tuple = collections.namedtuple('Example', ['x', 'y'])

    def element_fn(element):
        return example_tuple(
            # The expand_dims adds a channel dimension.
            x=tf.expand_dims(element['pixels'], -1),
            y=element['label'])

    all_train = emnist_train.create_tf_dataset_from_all_clients().map(
        element_fn)
    all_train = all_train.shuffle(TOTAL_EXAMPLES).repeat().batch(BATCH_SIZE)

    all_test = emnist_test.create_tf_dataset_from_all_clients().map(element_fn)
    all_test = all_test.batch(BATCH_SIZE)

    train_data_elements = int(TOTAL_EXAMPLES / BATCH_SIZE)

    model = models.create_conv_dropout_model(only_digits=True)
    model.compile(loss=tf.keras.losses.sparse_categorical_crossentropy,
                  optimizer=tf.keras.optimizers.SGD(learning_rate=0.01,
                                                    momentum=0.9,
                                                    decay=0.2 /
                                                    train_data_elements,
                                                    nesterov=True),
                  metrics=[tf.keras.metrics.SparseCategoricalAccuracy()])

    model.fit_generator(all_train,
                        steps_per_epoch=train_data_elements,
                        epochs=25,
                        verbose=1,
                        validation_data=all_test)
    score = model.evaluate_generator(all_test, verbose=0)
    print('Final test loss: %.4f' % score[0])
    print('Final test accuracy: %.4f' % score[1])