Example #1
0
    def _model_fit(self,
                   strategy,
                   steps_per_execution=1,
                   validation_data=None,
                   x=None,
                   steps_per_epoch=10,
                   run_eagerly=False,
                   with_normalization_layer=False,
                   callbacks=None):
        if callbacks is None:
            callbacks = []

        model, default_callbacks = self._model_compile(
            strategy, steps_per_execution, run_eagerly,
            with_normalization_layer)
        callbacks += default_callbacks

        def dataset_fn(input_context):
            del input_context
            x = random_ops.random_uniform((10, 10))
            y = random_ops.random_uniform((10, ))
            return dataset_ops.DatasetV2.from_tensor_slices(
                (x, y)).shuffle(10).repeat().batch(2)

        x = x or dataset_creator.DatasetCreator(dataset_fn)
        validation_data = (validation_data
                           or dataset_creator.DatasetCreator(dataset_fn))

        model.fit(x,
                  epochs=10,
                  steps_per_epoch=steps_per_epoch,
                  callbacks=callbacks,
                  validation_data=validation_data,
                  validation_steps=steps_per_epoch)
        return model
    def _model_fit(self,
                   strategy,
                   steps_per_execution=1,
                   validation_data=None,
                   x=None,
                   steps_per_epoch=10,
                   run_eagerly=False,
                   with_normalization_layer=False,
                   callbacks=None,
                   use_lookup_layer=False):
        if callbacks is None:
            callbacks = []

        model, default_callbacks = self._model_compile(
            strategy, steps_per_execution, run_eagerly,
            with_normalization_layer, use_lookup_layer)
        callbacks += default_callbacks

        x = x or dataset_creator.DatasetCreator(
            self._get_dataset_fn(use_lookup_layer))
        validation_data = (validation_data or dataset_creator.DatasetCreator(
            self._get_dataset_fn(use_lookup_layer)))

        model.fit(x,
                  epochs=10,
                  steps_per_epoch=steps_per_epoch,
                  callbacks=callbacks,
                  validation_data=validation_data,
                  validation_steps=steps_per_epoch)
        return model
    def testModelPredictWithDatasetCreator(self, strategy):
        if isinstance(
                strategy,
                collective_all_reduce_strategy.CollectiveAllReduceStrategy):
            self.skipTest("b/189223991")

        def _dataset_fn(input_context):
            del input_context
            x = constant_op.constant([1., 2., 3., 1., 5., 1.])
            return dataset_ops.DatasetV2.from_tensor_slices(x).repeat().batch(
                2)

        _, predictions = self._model_predict(
            strategy,
            steps=3,
            test_data=dataset_creator.DatasetCreator(_dataset_fn),
        )

        # Check the first (0th index), fourth (3rd index) and the last predictions
        # because the first, fourth and the last input is the same in
        # `model.predict` so there predictions should match.
        self.assertTrue(
            all(predictions[0] == predictions[i] for i in [0, 3, 5]))

        self.assertFalse(
            all(predictions[0] == predictions[i] for i in [0, 1, 2, 4]))
Example #4
0
    def test_dataset_creator_usage_in_parameter_server_model_fit(self):
        cluster_def = multi_worker_test_base.create_in_process_cluster(
            num_workers=2, num_ps=1, rpc_layer="grpc")
        cluster_def["chief"] = [
            "localhost:%d" % multi_worker_test_base.pick_unused_port()
        ]
        strategy = parameter_server_strategy_v2.ParameterServerStrategyV2(
            SimpleClusterResolver(ClusterSpec(cluster_def), rpc_layer="grpc"))
        with strategy.scope():
            model = sequential.Sequential([core_layers.Dense(10)])
        model.compile(gradient_descent.SGD(), loss="mse")

        def dataset_fn(input_context):
            global_batch_size = 64
            batch_size = input_context.get_per_replica_batch_size(
                global_batch_size)
            dataset = dataset_ops.DatasetV2.from_tensors(([1.], [1.])).repeat()
            dataset = dataset.shard(input_context.num_input_pipelines,
                                    input_context.input_pipeline_id)
            dataset = dataset.batch(batch_size)
            dataset = dataset.prefetch(2)
            return dataset

        history = model.fit(dataset_creator.DatasetCreator(dataset_fn),
                            epochs=10,
                            steps_per_epoch=10,
                            verbose=0)
        self.assertLen(history.history["loss"], 10)
Example #5
0
  def _model_fit(self,
                 steps_per_execution=1,
                 validation_data=None,
                 x=None,
                 steps_per_epoch=10,
                 run_eagerly=False,
                 with_normalization_layer=False):
    model, callbacks = self._model_compile(steps_per_execution, run_eagerly,
                                           with_normalization_layer)

    def dataset_fn(input_context):
      del input_context
      x = random_ops.random_uniform((10, 10))
      y = random_ops.random_uniform((10,))
      return dataset_ops.DatasetV2.from_tensor_slices(
          (x, y)).shuffle(10).repeat().batch(2)

    x = x or dataset_creator.DatasetCreator(dataset_fn)

    model.fit(
        x,
        epochs=10,
        steps_per_epoch=steps_per_epoch,
        verbose=0,
        callbacks=callbacks,
        validation_data=validation_data)
    return model
Example #6
0
  def test_dataset_creator(self):
    with self.assertRaisesRegex(
        TypeError, "`dataset_fn` for `DatasetCreator` must be a `callable`."):
      dataset_creator.DatasetCreator(2)

    dataset_fn = lambda: 3
    with self.assertRaisesRegex(
        TypeError, "The `callable` provided to `DatasetCreator` must return "
        "a Dataset."):
      dataset_creator.DatasetCreator(dataset_fn)()

    dataset_fn = lambda: dataset_ops.DatasetV2.from_tensor_slices([1, 1])
    got = dataset_creator.DatasetCreator(dataset_fn)()
    self.assertEqual(
        next(iter(got)),
        next(iter(dataset_ops.DatasetV2.from_tensor_slices([1, 1]))))
    def test_dataset_creator_input_options_with_cluster_coordinator(self):
        dataset_fn = lambda _: dataset_ops.DatasetV2.from_tensor_slices([1, 1])
        input_options = distribute_lib.InputOptions(
            experimental_fetch_to_device=True,
            experimental_per_replica_buffer_size=2)
        x = dataset_creator.DatasetCreator(dataset_fn,
                                           input_options=input_options)
        strategy = self._get_parameter_server_strategy()
        with strategy.scope():
            model = sequential.Sequential([core_layers.Dense(10)])
            model._cluster_coordinator = cluster_coordinator.ClusterCoordinator(
                strategy)
            data_handler = data_adapter.get_data_handler(x,
                                                         steps_per_epoch=2,
                                                         model=model)

        iter_rv = iter(data_handler._dataset)._values[0]
        iter_rv._rebuild_on(model._cluster_coordinator._cluster.workers[0])
        distributed_iterator = iter_rv._get_values()

        # Ensuring the resulting `DistributedIterator` has the right options.
        self.assertTrue(
            distributed_iterator._options.experimental_fetch_to_device)
        self.assertEqual(
            distributed_iterator._options.experimental_per_replica_buffer_size,
            2)
Example #8
0
    def test_dataset_creator_model_fit_without_strategy(self):
        model = sequential.Sequential([core_layers.Dense(10)])
        model.compile(gradient_descent.SGD(), loss="mse")

        history = model.fit(dataset_creator.DatasetCreator(
            self._get_dataset_fn()),
                            epochs=10,
                            steps_per_epoch=10,
                            verbose=0)
        self.assertLen(history.history["loss"], 10)
Example #9
0
  def test_dataset_creator_model_fit_without_strategy(self, use_input_options):
    model = sequential.Sequential([core_layers.Dense(10)])
    model.compile(gradient_descent.SGD(), loss="mse")

    input_options = distribute_lib.InputOptions() if use_input_options else None
    history = model.fit(
        dataset_creator.DatasetCreator(self._get_dataset_fn(), input_options),
        epochs=10,
        steps_per_epoch=10,
        verbose=0)
    self.assertLen(history.history["loss"], 10)
Example #10
0
  def testModelFitAndPredict(self, strategy):
    def fit_dataset_fn(input_context):
      del input_context
      x = random_ops.random_uniform((10, 1))
      y = random_ops.random_uniform((10,))
      return dataset_ops.DatasetV2.from_tensor_slices(
          (x, y)).shuffle(10).repeat().batch(2)

    x = dataset_creator.DatasetCreator(fit_dataset_fn)
    validation_data = dataset_creator.DatasetCreator(fit_dataset_fn)

    model = self._model_fit(strategy, x=x, validation_data=validation_data)
    _, predictions = self._model_predict(strategy, model, steps=3)

    # Check the first (0th index), fourth (3rd index) and the last predictions
    # because the first, fourth and the last input is the same in
    # `model.predict` so there predictions should match.
    self.assertTrue(all(predictions[0] == predictions[i] for i in [0, 3, 5]))

    self.assertFalse(
        all(predictions[0] == predictions[i] for i in [0, 1, 2, 4]))
Example #11
0
    def _model_fit(self,
                   strategy,
                   steps_per_execution=1,
                   validation_data=None,
                   x=None,
                   y=None,
                   shuffle=True,
                   batch_size=None,
                   steps_per_epoch=10,
                   run_eagerly=False,
                   with_normalization_layer=False,
                   callbacks=None,
                   use_lookup_layer=False):
        if callbacks is None:
            callbacks = []

        model, default_callbacks = self._model_compile(
            strategy, steps_per_execution, run_eagerly,
            with_normalization_layer, use_lookup_layer)
        callbacks += default_callbacks

        if x is None:
            x = dataset_creator.DatasetCreator(
                self._get_dataset_fn(use_lookup_layer))

        if validation_data is None:
            validation_data = dataset_creator.DatasetCreator(
                self._get_dataset_fn(use_lookup_layer))

        model.fit(x,
                  y,
                  shuffle=shuffle,
                  batch_size=batch_size,
                  epochs=10,
                  steps_per_epoch=steps_per_epoch,
                  callbacks=callbacks,
                  validation_data=validation_data,
                  validation_steps=steps_per_epoch)
        return model
Example #12
0
    def test_dataset_creator_usage_in_parameter_server_model_fit(self):
        cluster_def = multi_worker_test_base.create_in_process_cluster(
            num_workers=2, num_ps=1, rpc_layer="grpc")
        strategy = parameter_server_strategy_v2.ParameterServerStrategyV2(
            SimpleClusterResolver(ClusterSpec(cluster_def), rpc_layer="grpc"))
        with strategy.scope():
            model = sequential.Sequential([core_layers.Dense(10)])
        model.compile(gradient_descent.SGD(), loss="mse")

        history = model.fit(dataset_creator.DatasetCreator(
            self._get_dataset_fn()),
                            epochs=10,
                            steps_per_epoch=10,
                            verbose=0)
        self.assertLen(history.history["loss"], 10)
    def testModelFitwithStepsPerEpochNegativeOne(self, strategy):
        def dataset_fn(input_context):
            del input_context
            x = random_ops.random_uniform((10, 10))
            y = random_ops.random_uniform((10, ))
            return dataset_ops.DatasetV2.from_tensor_slices(
                (x, y)).shuffle(10).batch(2)

        if strategy._should_use_with_coordinator:
            with self.assertRaises(
                (errors.OutOfRangeError, errors.CancelledError)):
                self._model_fit(
                    strategy,
                    steps_per_epoch=-1,
                    x=dataset_creator.DatasetCreator(dataset_fn),
                    validation_data=dataset_creator.DatasetCreator(dataset_fn),
                )
        else:
            self._model_fit(
                strategy,
                steps_per_epoch=-1,
                x=dataset_creator.DatasetCreator(dataset_fn),
                validation_data=dataset_creator.DatasetCreator(dataset_fn),
            )
Example #14
0
  def test_dataset_creator_input_options(self):
    dataset_fn = lambda _: dataset_ops.DatasetV2.from_tensor_slices([1, 1])
    input_options = distribute_lib.InputOptions(
        experimental_fetch_to_device=True,
        experimental_per_replica_buffer_size=2)
    x = dataset_creator.DatasetCreator(dataset_fn, input_options=input_options)
    with collective_all_reduce_strategy.CollectiveAllReduceStrategy().scope():
      data_handler = data_adapter.get_data_handler(
          x,
          steps_per_epoch=2,
          model=sequential.Sequential([core_layers.Dense(10)]))

    # Ensuring the resulting `DistributedDatasetsFromFunction` has the right
    # options.
    self.assertTrue(data_handler._dataset._options.experimental_fetch_to_device)
    self.assertEqual(
        data_handler._dataset._options.experimental_per_replica_buffer_size, 2)
Example #15
0
    def _model_evaluate(self,
                        strategy,
                        steps_per_execution=1,
                        x=None,
                        y=None,
                        batch_size=None,
                        steps=10,
                        run_eagerly=False,
                        with_normalization_layer=False,
                        callbacks=None):
        if callbacks is None:
            callbacks = []

        model, default_callbacks = self._model_compile(
            strategy,
            steps_per_execution,
            run_eagerly,
            with_normalization_layer,
        )
        callbacks += default_callbacks

        def dataset_fn(input_context):
            del input_context
            x = random_ops.random_uniform((10, 10))
            y = random_ops.random_uniform((10, 1))
            return dataset_ops.DatasetV2.from_tensor_slices(
                (x, y)).shuffle(10).repeat().batch(8)

        if x is None:
            x = dataset_creator.DatasetCreator(dataset_fn)

        model.evaluate(x=x,
                       y=y,
                       steps=steps,
                       callbacks=callbacks,
                       batch_size=batch_size)
        return model