Exemplo n.º 1
0
def run_experiment(run_config):
    """Train, evaluate, and export the model using tf.estimator.train_and_evaluate API"""

    train_input_fn = input.generate_input_fn(
        file_names_pattern=HYPER_PARAMS.train_files,
        mode=tf.estimator.ModeKeys.TRAIN,
        num_epochs=HYPER_PARAMS.num_epochs,
        batch_size=HYPER_PARAMS.train_batch_size)

    eval_input_fn = input.generate_input_fn(
        file_names_pattern=HYPER_PARAMS.eval_files,
        mode=tf.estimator.ModeKeys.EVAL,
        batch_size=HYPER_PARAMS.eval_batch_size)

    exporter = tf.estimator.FinalExporter(
        'estimator',
        input.SERVING_FUNCTIONS[HYPER_PARAMS.export_format],
        as_text=
        False  # change to true if you want to export the model as readable text
    )

    # compute the number of training steps based on num_epoch, train_size, and train_batch_size
    if HYPER_PARAMS.train_size is not None and HYPER_PARAMS.num_epochs is not None:
        train_steps = (HYPER_PARAMS.train_size / HYPER_PARAMS.train_batch_size) * \
                      HYPER_PARAMS.num_epochs
    else:
        train_steps = HYPER_PARAMS.train_steps

    train_spec = tf.estimator.TrainSpec(train_input_fn,
                                        max_steps=int(train_steps))

    eval_spec = tf.estimator.EvalSpec(
        eval_input_fn,
        steps=HYPER_PARAMS.eval_steps,
        exporters=[exporter],
        name='estimator-eval',
        throttle_secs=HYPER_PARAMS.eval_every_secs,
    )

    print("* experiment configurations")
    print("===========================")
    print("Train size: {}".format(HYPER_PARAMS.train_size))
    print("Epoch count: {}".format(HYPER_PARAMS.num_epochs))
    print("Train batch size: {}".format(HYPER_PARAMS.train_batch_size))
    print("Training steps: {} ({})".format(
        int(train_steps),
        "supplied" if HYPER_PARAMS.train_size is None else "computed"))
    print("Evaluate every: {} seconds".format(HYPER_PARAMS.eval_every_secs))
    print("===========================")

    if metadata.TASK_TYPE == "classification":
        estimator = model.create_classifier(config=run_config)
    elif metadata.TASK_TYPE == "regression":
        estimator = model.create_regressor(config=run_config)
    else:
        estimator = model.create_estimator(config=run_config)

    # train and evaluate
    tf.estimator.train_and_evaluate(estimator, train_spec, eval_spec)
Exemplo n.º 2
0
def experiment():

    train_input_fn = generate_input_fn(is_train=True,
                                       tfrecords_path=config.tfrecords_path,
                                       batch_size=config.batch_size,
                                       time_step=config.time_step)

    eval_input_fn = generate_input_fn(is_train=False,
                                      tfrecords_path=config.tfrecords_path,
                                      batch_size=config.batch_size_eval,
                                      time_step=config.time_step_eval)

    estimator = Estimator(train_input_fn=train_input_fn,
                          eval_input_fn=eval_input_fn,
                          model_fn=model_fn)

    estimator.train()
Exemplo n.º 3
0
def run_experiment(run_config):
    """Train, evaluate, and export the model using tf.estimator.train_and_evaluate API"""

    train_input_fn = input.generate_input_fn(
        file_names_pattern=HYPER_PARAMS.train_files,
        mode=tf.estimator.ModeKeys.TRAIN,
        num_epochs=HYPER_PARAMS.num_epochs,
        batch_size=HYPER_PARAMS.train_batch_size)

    eval_input_fn = input.generate_input_fn(
        file_names_pattern=HYPER_PARAMS.eval_files,
        mode=tf.estimator.ModeKeys.EVAL,
        batch_size=HYPER_PARAMS.eval_batch_size)

    exporter = tf.estimator.FinalExporter(
        'estimator',
        input.SERVING_FUNCTIONS[HYPER_PARAMS.export_format],
        as_text=
        False  # change to true if you want to export the model as readable text
    )

    # compute the number of training steps based on num_epoch, train_size, and train_batch_size
    if HYPER_PARAMS.train_size is not None and HYPER_PARAMS.num_epochs is not None:
        train_steps = (HYPER_PARAMS.train_size / HYPER_PARAMS.train_batch_size) * \
                      HYPER_PARAMS.num_epochs
    else:
        train_steps = HYPER_PARAMS.train_steps

    train_spec = tf.estimator.TrainSpec(train_input_fn,
                                        max_steps=int(train_steps))

    eval_spec = tf.estimator.EvalSpec(
        eval_input_fn,
        steps=HYPER_PARAMS.eval_steps,
        exporters=[exporter],
        throttle_secs=HYPER_PARAMS.eval_every_secs,
    )

    print("* experiment configurations")
    print("===========================")
    print("Train size: {}".format(HYPER_PARAMS.train_size))
    print("Epoch count: {}".format(HYPER_PARAMS.num_epochs))
    print("Train batch size: {}".format(HYPER_PARAMS.train_batch_size))
    print("Training steps: {} ({})".format(
        int(train_steps),
        "supplied" if HYPER_PARAMS.train_size is None else "computed"))
    print("Evaluate every {} seconds".format(HYPER_PARAMS.eval_every_secs))
    print("===========================")

    if metadata.TASK_TYPE == "classification":
        estimator = model.create_classifier(config=run_config)
    elif metadata.TASK_TYPE == "regression":
        estimator = model.create_regressor(config=run_config)
    else:
        estimator = model.create_estimator(config=run_config)

    # train and evaluate
    tf.estimator.train_and_evaluate(estimator, train_spec, eval_spec)

    # This is the export for the Tensorflow Model Analysis tool.
    if HYPER_PARAMS.export_format in ['CSV', 'EXAMPLE']:
        eval_receiver_fn = input.TFMA_SERVING_FUNCTIONS[
            HYPER_PARAMS.export_format]
        tfma_export.export_eval_savedmodel(
            estimator=estimator,
            export_dir_base=os.path.join(estimator.model_dir, "tfma_export"),
            eval_input_receiver_fn=eval_receiver_fn)
    else:
        tf.logging.info("TFMA doesn't yet support a JSON input receiver. "
                        "The TFMA export will not be created.")