Exemple #1
0
 def _estimator_fn(model_dir, exogenous_feature_columns):
     return estimators.TimeSeriesRegressor(model=ar_model.ARModel(
         periodicities=10,
         input_window_size=10,
         output_window_size=6,
         num_features=1,
         exogenous_feature_columns=exogenous_feature_columns,
         prediction_model_factory=functools.partial(
             ar_model.LSTMPredictionModel, num_units=10)),
                                           config=_SeedRunConfig(),
                                           model_dir=model_dir)
Exemple #2
0
  def test_one_shot_prediction_head_export(self):

    def _new_temp_dir():
      return os.path.join(tf.compat.v1.test.get_temp_dir(), str(ops.uid()))
    model_dir = _new_temp_dir()
    categorical_column = tf.feature_column.categorical_column_with_hash_bucket(
        key="categorical_exogenous_feature", hash_bucket_size=16)
    exogenous_feature_columns = [
        tf.feature_column.numeric_column(
            "2d_exogenous_feature", shape=(2,)),
        tf.feature_column.embedding_column(
            categorical_column=categorical_column, dimension=10)]
    estimator = ts_estimators.TimeSeriesRegressor(
        model=ar_model.ARModel(
            periodicities=10,
            input_window_size=10,
            output_window_size=6,
            num_features=5,
            exogenous_feature_columns=exogenous_feature_columns,
            prediction_model_factory=functools.partial(
                ar_model.LSTMPredictionModel, num_units=10)),
        head_type=ts_head_lib.OneShotPredictionHead,
        model_dir=model_dir)

    def train_input_fn():
      num_range = tf.range(16, dtype=tf.dtypes.int64)
      features = {
          feature_keys.TrainEvalFeatures.TIMES:
              tf.compat.v1.expand_dims(num_range, axis=0),
          feature_keys.TrainEvalFeatures.VALUES:
              tf.compat.v1.expand_dims(
                  tf.tile(num_range[:, None], [1, 5]), axis=0),
          "2d_exogenous_feature":
              tf.ones([1, 16, 2]),
          "categorical_exogenous_feature":
              tf.compat.v1.expand_dims(
                  tf.tile(["strkey"], [16])[:, None], axis=0)
      }
      return features

    estimator.train(input_fn=train_input_fn, steps=5)
    result = estimator.evaluate(input_fn=train_input_fn, steps=1)
    self.assertIn("average_loss", result)
    self.assertNotIn(feature_keys.State.STATE_TUPLE, result)
    input_receiver_fn = estimator.build_raw_serving_input_receiver_fn()
    export_location = estimator.export_saved_model(_new_temp_dir(),
                                                   input_receiver_fn)
    graph = tf.Graph()
    with graph.as_default():
      with tf.compat.v1.Session() as session:
        signatures = tf.compat.v1.saved_model.load(
            session, [tf.saved_model.SERVING], export_location)
        self.assertEqual([feature_keys.SavedModelLabels.PREDICT],
                         list(signatures.signature_def.keys()))
        predict_signature = signatures.signature_def[
            feature_keys.SavedModelLabels.PREDICT]
        six.assertCountEqual(
            self,
            [feature_keys.FilteringFeatures.TIMES,
             feature_keys.FilteringFeatures.VALUES,
             "2d_exogenous_feature",
             "categorical_exogenous_feature"],
            predict_signature.inputs.keys())
        features = {
            feature_keys.TrainEvalFeatures.TIMES: numpy.tile(
                numpy.arange(35, dtype=numpy.int64)[None, :], [2, 1]),
            feature_keys.TrainEvalFeatures.VALUES: numpy.tile(numpy.arange(
                20, dtype=numpy.float32)[None, :, None], [2, 1, 5]),
            "2d_exogenous_feature": numpy.ones([2, 35, 2]),
            "categorical_exogenous_feature": numpy.tile(numpy.array(
                ["strkey"] * 35)[None, :, None], [2, 1, 1])
        }
        feeds = {
            graph.as_graph_element(input_value.name): features[input_key]
            for input_key, input_value in predict_signature.inputs.items()}
        fetches = {output_key: graph.as_graph_element(output_value.name)
                   for output_key, output_value
                   in predict_signature.outputs.items()}
        output = session.run(fetches, feed_dict=feeds)
        self.assertEqual((2, 15, 5), output["mean"].shape)
    # Build a parsing input function, then make a tf.Example for it to parse.
    export_location = estimator.export_saved_model(
        _new_temp_dir(),
        estimator.build_one_shot_parsing_serving_input_receiver_fn(
            filtering_length=20, prediction_length=15))
    graph = tf.Graph()
    with graph.as_default():
      with tf.compat.v1.Session() as session:
        example = example_pb2.Example()
        times = example.features.feature[feature_keys.TrainEvalFeatures.TIMES]
        values = example.features.feature[feature_keys.TrainEvalFeatures.VALUES]
        times.int64_list.value.extend(range(35))
        for i in range(20):
          values.float_list.value.extend(
              [float(i) * 2. + feature_number
               for feature_number in range(5)])
        real_feature = example.features.feature["2d_exogenous_feature"]
        categortical_feature = example.features.feature[
            "categorical_exogenous_feature"]
        for i in range(35):
          real_feature.float_list.value.extend([1, 1])
          categortical_feature.bytes_list.value.append(b"strkey")
        # Serialize the tf.Example for feeding to the Session
        examples = [example.SerializeToString()] * 2
        signatures = tf.compat.v1.saved_model.load(
            session, [tf.saved_model.SERVING], export_location)
        predict_signature = signatures.signature_def[
            feature_keys.SavedModelLabels.PREDICT]
        ((_, input_value),) = predict_signature.inputs.items()
        feeds = {graph.as_graph_element(input_value.name): examples}
        fetches = {output_key: graph.as_graph_element(output_value.name)
                   for output_key, output_value
                   in predict_signature.outputs.items()}
        output = session.run(fetches, feed_dict=feeds)
        self.assertEqual((2, 15, 5), output["mean"].shape)
Exemple #3
0
def _train_on_generated_data(
    generate_fn, generative_model, train_iterations, seed,
    learning_rate=0.1, ignore_params_fn=lambda _: (),
    derived_param_test_fn=lambda _: (),
    train_input_fn_type=input_pipeline.WholeDatasetInputFn,
    train_state_manager=state_management.PassthroughStateManager()):
  """The training portion of parameter recovery tests."""
  random_seed.set_random_seed(seed)
  generate_graph = ops.Graph()
  with generate_graph.as_default():
    with session.Session(graph=generate_graph):
      generative_model.initialize_graph()
      time_series_reader, true_parameters = generate_fn(generative_model)
      true_parameters = {
          tensor.name: value for tensor, value in true_parameters.items()}
  eval_input_fn = input_pipeline.WholeDatasetInputFn(time_series_reader)
  eval_state_manager = state_management.PassthroughStateManager()
  true_parameter_eval_graph = ops.Graph()
  with true_parameter_eval_graph.as_default():
    generative_model.initialize_graph()
    ignore_params = ignore_params_fn(generative_model)
    feature_dict, _ = eval_input_fn()
    eval_state_manager.initialize_graph(generative_model)
    feature_dict[TrainEvalFeatures.VALUES] = math_ops.cast(
        feature_dict[TrainEvalFeatures.VALUES], generative_model.dtype)
    model_outputs = eval_state_manager.define_loss(
        model=generative_model,
        features=feature_dict,
        mode=estimator_lib.ModeKeys.EVAL)
    with session.Session(graph=true_parameter_eval_graph) as sess:
      variables.global_variables_initializer().run()
      coordinator = coordinator_lib.Coordinator()
      queue_runner_impl.start_queue_runners(sess, coord=coordinator)
      true_param_loss = model_outputs.loss.eval(feed_dict=true_parameters)
      true_transformed_params = {
          param: param.eval(feed_dict=true_parameters)
          for param in derived_param_test_fn(generative_model)}
      coordinator.request_stop()
      coordinator.join()

  saving_hook = _SavingTensorHook(
      tensors=true_parameters.keys(),
      every_n_iter=train_iterations - 1)

  class _RunConfig(estimator_lib.RunConfig):

    @property
    def tf_random_seed(self):
      return seed

  estimator = estimators.TimeSeriesRegressor(
      model=generative_model,
      config=_RunConfig(),
      state_manager=train_state_manager,
      optimizer=adam.AdamOptimizer(learning_rate))
  train_input_fn = train_input_fn_type(time_series_reader=time_series_reader)
  trained_loss = (estimator.train(
      input_fn=train_input_fn,
      max_steps=train_iterations,
      hooks=[saving_hook]).evaluate(
          input_fn=eval_input_fn, steps=1))["loss"]
  logging.info("Final trained loss: %f", trained_loss)
  logging.info("True parameter loss: %f", true_param_loss)
  return (ignore_params, true_parameters, true_transformed_params,
          trained_loss, true_param_loss, saving_hook,
          true_parameter_eval_graph)