Ejemplo n.º 1
0
    def create_model(self):
        """Creates recommendation model based on params.

    Returns:
      Keras model.
    """
        return _rm.RecommendationModel(self.params)
 def test_model_train_cnn(self):
     input_config = self._create_test_input_config(
         input_config_pb2.EncoderType.CNN)
     model_config = self._create_test_model_config()
     test_model = recommendation_model.RecommendationModel(
         input_config=input_config, model_config=model_config)
     batch_size = 4
     input_context_movie_id = tf.keras.layers.Input(shape=(10, ),
                                                    dtype=tf.int32,
                                                    batch_size=batch_size,
                                                    name='context_movie_id')
     input_context_movie_rating = tf.keras.layers.Input(
         shape=(10, ),
         dtype=tf.float32,
         batch_size=batch_size,
         name='context_movie_rating')
     input_label_movie_id = tf.keras.layers.Input(shape=(1, ),
                                                  dtype=tf.int32,
                                                  batch_size=batch_size,
                                                  name='label_movie_id')
     inputs = {
         'context_movie_id': input_context_movie_id,
         'context_movie_rating': input_context_movie_rating,
         'label_movie_id': input_label_movie_id
     }
     logits = test_model(inputs)
     self.assertAllEqual([batch_size, 20], logits.shape.as_list())
Ejemplo n.º 3
0
    def create_model(self):
        """Creates recommendation model based on params.

    Returns:
      Keras model.
    """
        return _model.RecommendationModel(self.input_spec, self.model_hparams)
Ejemplo n.º 4
0
def export(checkpoint_path, export_dir, params, max_history_length):
    """Export savedmodel."""
    model = recommendation_model.RecommendationModel(params)
    checkpoint = tf.train.Checkpoint(model=model)
    checkpoint.restore(checkpoint_path).run_restore_ops()
    signatures = {
        tf.saved_model.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
        model.serve.get_concrete_function(input_context=tf.TensorSpec(
            shape=[max_history_length], dtype=tf.dtypes.int32, name='context'))
    }
    tf.saved_model.save(model, export_dir=export_dir, signatures=signatures)
    return export_dir
Ejemplo n.º 5
0
 def test_model_serve(self):
     config = {
         "context_embedding_dim": 128,
         "label_embedding_dim": 32,
         "hidden_layer_dim_ratios": [1, 0.5, 0.25],
         "item_vocab_size": 16,
         "encoder_type": "bow",
         "num_predictions": 10
     }
     test_model = recommendation_model.RecommendationModel(config)
     input_context = tf.constant([1, 2, 3, 4, 5])
     outputs = test_model.serve(input_context)
     self.assertAllEqual([10],
                         outputs["top_prediction_ids"].shape.as_list())
     self.assertAllEqual([10],
                         outputs["top_prediction_scores"].shape.as_list())
def build_keras_model(input_config: input_config_pb2.InputConfig,
                      model_config: model_config_class.ModelConfig):
  """Construct and compile recommendation keras model.

  Construct recommendation model according to input config and model config.
  Compile the model with optimizer, loss function and eval metrics.

  Args:
    input_config: The configuration object(input_config_pb2.InputConfig) that
      holds parameters for model input feature processing.
    model_config: A ModelConfig object that holds parameters to set up the
      model architecture.

  Returns:
    The compiled keras model.
  """
  model = recommendation_model.RecommendationModel(
      input_config=input_config, model_config=model_config)
  compile_model(model, model_config.eval_top_k, FLAGS.learning_rate,
                FLAGS.gradient_clip_norm)
  return model
Ejemplo n.º 7
0
 def test_model_train(self):
     config = {
         "context_embedding_dim": 128,
         "label_embedding_dim": 32,
         "hidden_layer_dim_ratios": [1, 0.5, 0.25],
         "item_vocab_size": 16,
         "encoder_type": "bow"
     }
     batch_size = 128
     test_model = recommendation_model.RecommendationModel(config)
     input_context = tf.keras.layers.Input(shape=(None, ),
                                           dtype=tf.int32,
                                           batch_size=batch_size,
                                           name="context")
     input_label = tf.keras.layers.Input(shape=(1, ),
                                         dtype=tf.int32,
                                         batch_size=batch_size,
                                         name="label")
     inputs = {"context": input_context, "label": input_label}
     logits = test_model(inputs)
     self.assertAllEqual([batch_size, config["item_vocab_size"] + 1],
                         logits.shape.as_list())
def save_model(checkpoint_path: str, export_dir: str,
               input_config: input_config_pb2.InputConfig,
               model_config: model_config_class.ModelConfig):
  """Export to savedmodel.

  Args:
    checkpoint_path: The path to the checkpoint that the model will be exported
      based on.
    export_dir: The directory to export models to.
    input_config: The input config of the model.
    model_config: The configuration to set up the model.
  """
  model = recommendation_model.RecommendationModel(
      input_config=input_config,
      model_config=model_config)
  checkpoint = tf.train.Checkpoint(model=model)
  checkpoint.restore(checkpoint_path).run_restore_ops()
  input_specs = input_pipeline.get_serving_input_specs(input_config)
  signatures = {
      tf.saved_model.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
          model.serve.get_concrete_function(**input_specs)
  }
  tf.saved_model.save(model, export_dir=export_dir, signatures=signatures)
Ejemplo n.º 9
0
def build_keras_model(params, learning_rate, gradient_clip_norm):
    """Construct and compile recommendation keras model."""
    model = recommendation_model.RecommendationModel(params)
    compile_model(model, params, learning_rate, gradient_clip_norm)
    return model