Example #1
0
    def from_trial(
        trial_inst: det.Trial,
        context: det.TrialContext,
        env: det.EnvContext,
        workloads: workload.Stream,
        load_path: Optional[pathlib.Path],
        rendezvous_info: det.RendezvousInfo,
        hvd_config: horovod.HorovodContext,
    ) -> det.TrialController:
        check.is_instance(
            context, keras.TFKerasTrialContext,
            "TFKerasTrialController needs a TFKerasTrialContext")
        context = cast(keras.TFKerasTrialContext, context)

        check.is_instance(trial_inst, TFKerasTrial,
                          "TFKerasTrialController needs a TFKerasTrial")
        trial = cast(TFKerasTrial, trial_inst)

        session = TFKerasTrialController._configure_session(
            env, hvd_config, trial.session_config())

        training_data = keras._adapt_data_from_data_loader(
            input_data=trial.build_training_data_loader(),
            batch_size=context.get_per_slot_batch_size(),
        )

        validation_data = keras._adapt_data_from_data_loader(
            input_data=trial.build_validation_data_loader(),
            batch_size=context.get_per_slot_batch_size(),
        )

        trial.build_model()
        check.is_not_none(context.model, "Please call wrap_model(...).")

        check.is_not_none(context.compile_args,
                          "Please call model.compile(...).")
        compile_args = cast(inspect.BoundArguments, context.compile_args)

        TFKerasTrialController.compile_model(context=context,
                                             compile_args=compile_args,
                                             env=env,
                                             hvd_config=hvd_config)

        tf_keras_callbacks = trial.keras_callbacks()

        return TFKerasTrialController(
            context.model,
            session,
            keras.TFKerasTrainConfig(training_data, validation_data,
                                     tf_keras_callbacks),
            context,
            env,
            workloads,
            load_path,
            rendezvous_info,
            hvd_config,
        )
Example #2
0
    def from_trial(
        cls: Type["TFKerasTrialController"],
        trial_inst: det.Trial,
        context: det.TrialContext,
        env: det.EnvContext,
        workloads: Optional[workload.Stream] = None,
    ) -> det.TrialController:
        check.is_instance(
            context, keras.TFKerasTrialContext, "TFKerasTrialController needs a TFKerasTrialContext"
        )
        context = cast(keras.TFKerasTrialContext, context)

        check.is_instance(trial_inst, TFKerasTrial, "TFKerasTrialController needs a TFKerasTrial")
        trial = cast(TFKerasTrial, trial_inst)

        # Keras only supports horovod backend for distributed training
        session = cls._configure_session(
            env, trial.session_config(), use_horovod=context.distributed.size > 1
        )

        training_data = keras._adapt_data_from_data_loader(
            input_data=trial.build_training_data_loader(),
            batch_size=context.get_per_slot_batch_size(),
        )

        validation_data = keras._adapt_data_from_data_loader(
            input_data=trial.build_validation_data_loader(),
            batch_size=context.get_per_slot_batch_size(),
        )

        trial.build_model()
        check.is_not_none(context.model, "Please call wrap_model(...).")

        check.is_not_none(context.compile_args, "Please call model.compile(...).")
        compile_args = cast(inspect.BoundArguments, context.compile_args)

        cls.compile_model(context=context, compile_args=compile_args, env=env)

        tf_keras_callbacks = trial.keras_callbacks()

        return cls(
            context.model,
            session,
            keras.TFKerasTrainConfig(training_data, validation_data, tf_keras_callbacks),
            trial,
            context,
            env,
            workloads,
        )
Example #3
0
            def fit_generator(wrapper, *args: Any, **kwargs: Any) -> None:
                if not self.compile_args:
                    raise errors.InvalidExperimentException(
                        "Must call .compile before calling .fit_generator().")

                fit_generator_args = inspect.signature(
                    model.fit_generator).bind(*args, **kwargs)
                fit_generator_args.apply_defaults()

                training_data = fit_generator_args.arguments["generator"]

                if fit_generator_args.arguments["validation_data"] is None:
                    raise errors.InvalidExperimentException(
                        "Determined requires validation_data in the call to fit_generator()."
                    )

                validation_data = keras._adapt_data_from_data_loader(
                    input_data=fit_generator_args.arguments["validation_data"],
                    batch_size=self.env.per_slot_batch_size,
                )

                self.train_config = TFKerasTrainConfig(
                    training_data=training_data,
                    validation_data=validation_data,
                    callbacks=fit_generator_args.arguments["callbacks"],
                )

                self.configure_fit(
                    verbose=fit_generator_args.arguments["verbose"],
                    class_weight=fit_generator_args.arguments["class_weight"],
                    shuffle=fit_generator_args.arguments["shuffle"],
                    workers=fit_generator_args.arguments["workers"],
                    use_multiprocessing=fit_generator_args.
                    arguments["use_multiprocessing"],
                    max_queue_size=fit_generator_args.
                    arguments["max_queue_size"],
                )

                if train_fn:
                    train_fn()
Example #4
0
            def fit(wrapper, *args: Any, **kwargs: Any) -> None:
                """Communicate a model, data, and other training configuration with the harness.

                Parameters:
                    the same as tf.keras.Model.fit except for this function only handles the
                    following cases of data

                    x: Input data. It could be:
                        1) A Numpy array (or array-like), or a list of arrays (in case the model
                        has multiple inputs).
                        2) A dict mapping input names to the corresponding array, if the model
                        has named inputs.
                        3) A tf.data dataset. Should return a tuple of either (inputs, targets) or
                        (inputs, targets, sample_weights).
                        4) A keras.utils.Sequence returning (inputs, targets) or (inputs, targets,
                        sample weights).

                    y: Target data. Like the input data x, it could be either Numpy array(s).
                        If x is a dataset or keras.utils.Sequence instance, y should not be
                        specified(since targets will be obtained from x).

                    validation_data: Data on which to evaluate the loss and any model metrics
                        at the end of each epoch. The model will not be trained on this data.
                        validation_data will override validation_split. validation_data could be:
                        1) tuple (x_val, y_val) of Numpy arrays
                        2) tuple (x_val, y_val, val_sample_weights) of Numpy arrays
                        3) dataset For the first two cases, batch_size must be provided.
                        For the last case, validation_steps could be provided.
                """
                if not self.compile_args:
                    raise errors.InvalidExperimentException(
                        "Must call .compile before calling .fit()."
                    )

                fit_args = inspect.signature(model.fit).bind(*args, **kwargs)
                fit_args.apply_defaults()

                training_data = keras._adapt_data_from_fit_args(
                    x=fit_args.arguments["x"],
                    y=fit_args.arguments["y"],
                    sample_weight=fit_args.arguments["sample_weight"],
                    batch_size=self.env.per_slot_batch_size,
                )

                if fit_args.arguments["validation_data"] is None:
                    raise errors.InvalidExperimentException(
                        "Determined requires validation_data in the call to fit()."
                    )

                validation_data = keras._adapt_data_from_data_loader(
                    input_data=fit_args.arguments["validation_data"],
                    batch_size=self.env.per_slot_batch_size,
                )

                self.train_config = TFKerasTrainConfig(
                    training_data=training_data,
                    validation_data=validation_data,
                    callbacks=fit_args.arguments["callbacks"],
                )

                self.configure_fit(
                    verbose=fit_args.arguments["verbose"],
                    shuffle=fit_args.arguments["shuffle"],
                    class_weight=fit_args.arguments["class_weight"],
                    workers=fit_args.arguments["workers"],
                    use_multiprocessing=fit_args.arguments["use_multiprocessing"],
                    max_queue_size=fit_args.arguments["max_queue_size"],
                )

                if train_fn:
                    train_fn()
def test_adapt_invalid_data_type() -> None:
    seqs = utils.make_xor_data_sequences()
    test = keras._adapt_data_from_data_loader(seqs[1], batch_size=1)
    with pytest.raises(det.errors.InvalidDataTypeException) as err:
        keras._adapt_data_from_data_loader((None, test), batch_size=1)
        assert err is not None