Exemplo n.º 1
0
        def _pack_obj(obj):
            """Recursively packs objects.
            """
            try:
                return copy.deepcopy(obj)
            except TypeError:
                pass  # is this a Keras serializable?
            try:
                model_metadata = saving_utils.model_metadata(obj)
                training_config = model_metadata["training_config"]
                model = serialize(obj)
                weights = obj.get_weights()
                return SavedKerasModel(
                    cls=obj.__class__,
                    model=model,
                    weights=weights,
                    training_config=training_config,
                )
            except (TypeError, AttributeError):
                pass  # try manually packing the object
            if hasattr(obj, "__dict__"):
                for key, val in obj.__dict__.items():
                    obj.__dict__[key] = _pack_obj(val)
                return obj
            if isinstance(obj, (list, tuple)):
                obj_type = type(obj)
                new_obj = obj_type([_pack_obj(o) for o in obj])
                return new_obj

            return obj
Exemplo n.º 2
0
def save_optimizer_weights(model, filepath, overwrite=True, **kwargs):
    if not isinstance(filepath, h5py.File):
        # If file exists and should not be overwritten.
        if not overwrite and os.path.isfile(filepath):
            proceed = hdf5_format.ask_to_proceed_with_overwrite(filepath)
            if not proceed:
                return
        f = h5py.File(filepath, mode='w')
        opened_new_file = True
    else:
        f = filepath
        opened_new_file = False
    try:
        model_metadata = saving_utils.model_metadata(
            model, include_optimizer=True, require_config=False)
        for k, v in model_metadata.items():
            if isinstance(v, (dict, list, tuple)):
                f.attrs[k] = json.dumps(
                    v, default=serialization.get_json_type).encode('utf8')
            else:
                f.attrs[k] = v
        if not isinstance(model.optimizer, optimizers.TFOptimizer):
            hdf5_format.save_optimizer_weights_to_hdf5_group(f, model.optimizer)
        f.flush()
    finally:
        if opened_new_file:
            f.close()
Exemplo n.º 3
0
 def _python_properties_internal(self):
     metadata = super(ModelSavedModelSaver,
                      self)._python_properties_internal()
     metadata.update(
         saving_utils.model_metadata(self.obj,
                                     include_optimizer=True,
                                     require_config=False))
     return metadata
Exemplo n.º 4
0
    def _write_array(
        self,
        include_optimizer: bool,
        serialized_weights: bytes,
        serialized_optimizer_weights: bytes,
        meta: Optional[Meta],
    ) -> None:
        """Write Tensorflow model to a TileDB array."""
        assert self.model
        # TODO: Change timestamp when issue in core is resolved
        with tiledb.open(self.uri,
                         "w",
                         timestamp=current_milli_time(),
                         ctx=self.ctx) as tf_model_tiledb:
            if isinstance(self.model, (Functional, Sequential)):
                tf_model_tiledb[:] = {
                    "model_weights": np.array([serialized_weights]),
                    "optimizer_weights":
                    np.array([serialized_optimizer_weights]),
                }
            else:
                # Insert weights and optimizer
                layer_names = []
                weight_names = []
                weight_values = []
                for layer in sorted(self.model.layers, key=attrgetter("name")):
                    weights = layer.trainable_weights + layer.non_trainable_weights
                    weight_values.append(
                        pickle.dumps(backend.batch_get_value(weights)))
                    weight_names.append(
                        pickle.dumps([w.name.encode("utf8") for w in weights]))
                    layer_names.append(layer.name)

                tf_model_tiledb[:] = {
                    "layer_name":
                    np.array(layer_names),
                    "weight_values":
                    np.array(weight_values),
                    "weight_names":
                    np.array(weight_names),
                    # TODO (TeamML) Fix array scheme to avoid optimizer_weight repetition. Nullable
                    "optimizer_weights":
                    np.array([
                        serialized_optimizer_weights
                        for _ in range(len(self.model.layers))
                    ]),
                }

            # Insert all model metadata
            model_metadata = saving_utils.model_metadata(
                self.model, include_optimizer)
            for key, value in model_metadata.items():
                tf_model_tiledb.meta[key] = json.dumps(
                    value, default=json_utils.get_json_type).encode("utf8")

            self.update_model_metadata(array=tf_model_tiledb, meta=meta)
Exemplo n.º 5
0
    def _python_properties_internal(self):
        metadata = super(ModelSavedModelSaver,
                         self)._python_properties_internal()
        # Network stateful property is dependent on the child layers.
        metadata.pop('stateful')
        metadata['is_graph_network'] = self.obj._is_graph_network  # pylint: disable=protected-access

        metadata.update(
            saving_utils.model_metadata(self.obj,
                                        include_optimizer=True,
                                        require_config=False))
        return metadata
Exemplo n.º 6
0
def pack_keras_model(model_obj, protocol):
    """Pickle a Keras Model.

    Arguments:
        model_obj: an instance of a Keras Model.
        protocol: pickle protocol version, ignored.

    Returns
    -------
    Pickled model
        A tuple following the pickle protocol.
    """
    model_metadata = saving_utils.model_metadata(model_obj)
    training_config = model_metadata.get("training_config", None)
    model = serialize_layer(model_obj)
    weights = model_obj.get_weights()
    return (unpack_keras_model, (model, training_config, weights))
Exemplo n.º 7
0
    def _write_array(
        self,
        model: Model,
        include_optimizer: bool,
        serialized_weights: bytes,
        serialized_optimizer_weights: bytes,
        meta: Optional[dict],
    ):
        """
        Writes Tensorflow model to a TileDB array.
        """
        with tiledb.open(self.uri, "w") as tf_model_tiledb:
            # Insert weights and optimizer
            tf_model_tiledb[:] = {
                "model_weights": np.array([serialized_weights]),
                "optimizer_weights": np.array([serialized_optimizer_weights]),
            }

            # Insert all model metadata
            model_metadata = saving_utils.model_metadata(
                model, include_optimizer)
            for key, value in model_metadata.items():
                try:
                    tf_model_tiledb.meta[key] = json.dumps(
                        value, default=json_utils.get_json_type).encode("utf8")
                except:
                    logging.warning(
                        "Exception occurred during Json serialization of metadata!"
                    )

            # Add Python version to array's metadata
            tf_model_tiledb.meta["python_version"] = platform.python_version()

            # Add extra metadata given by the user to array's metadata
            if meta:
                for key, value in meta.items():
                    try:
                        tf_model_tiledb.meta[key] = json.dumps(
                            value,
                            default=json_utils.get_json_type).encode("utf8")
                    except:
                        logging.warning(
                            "Exception occurred during Json serialization of metadata!"
                        )
Exemplo n.º 8
0
def pack_keras_model(model_obj, protocol):
    """Pickle a Keras Model.

    Arguments:
        model_obj: an instance of a Keras Model.
        protocol: pickle protocol version, ignored.

    Returns
    -------
    Pickled model
        A tuple following the pickle protocol.
    """
    if not isinstance(model_obj, Model):
        raise TypeError("`model_obj` must be an instance of a Keras Model")
    # pack up model
    model_metadata = saving_utils.model_metadata(model_obj)
    training_config = model_metadata.get("training_config", None)
    model = serialize(model_obj)
    weights = model_obj.get_weights()
    return (unpack_keras_model, (model, training_config, weights))
Exemplo n.º 9
0
def _clone_prebuilt_model(build_fn):
    """Clones and compiles a pre-built model when build_fn is an existing
            Keras model instance.

    Arguments:
        build_fn : instance of Keras Model.

    Returns: copy of the input model with no training.
    """
    model = clone_model(build_fn)
    # clone_model does not compy over compilation parameters, do those manually
    model_metadata = saving_utils.model_metadata(build_fn)
    if "training_config" in model_metadata:
        training_config = model_metadata["training_config"]
    else:
        raise ValueError("To use %s as `build_fn`, you must compile"
                         "it first." % build_fn)

    model.compile(
        **saving_utils.compile_args_from_training_config(training_config))

    return model
Exemplo n.º 10
0
def save_model_to_hdf5(model,
                       filepath,
                       overwrite=True,
                       include_optimizer=True):
    """Saves a model to a HDF5 file.

  The saved model contains:
      - the model's configuration (topology)
      - the model's weights
      - the model's optimizer's state (if any)

  Thus the saved model can be reinstantiated in
  the exact same state, without any of the code
  used for model definition or training.

  Arguments:
      model: Keras model instance to be saved.
      filepath: One of the following:
          - String, path where to save the model
          - `h5py.File` object where to save the model
      overwrite: Whether we should overwrite any existing
          model at the target location, or instead
          ask the user with a manual prompt.
      include_optimizer: If True, save optimizer's state together.

  Raises:
      ImportError: if h5py is not available.
  """

    if h5py is None:
        raise ImportError('`save_model` requires h5py.')

    # TODO(psv) Add warning when we save models that contain non-serializable
    # entities like metrics added using `add_metric` and losses added using
    # `add_loss.`
    if len(model.weights) != len(model._undeduplicated_weights):
        logging.warning(
            'Found duplicated `Variable`s in Model\'s `weights`. '
            'This is usually caused by `Variable`s being shared by '
            'Layers in the Model. These `Variable`s will be treated '
            'as separate `Variable`s when the Model is restored. To '
            'avoid this, please save with `save_format="tf"`.')

    if not isinstance(filepath, h5py.File):
        # If file exists and should not be overwritten.
        if not overwrite and os.path.isfile(filepath):
            proceed = ask_to_proceed_with_overwrite(filepath)
            if not proceed:
                return

        f = h5py.File(filepath, mode='w')
        opened_new_file = True
    else:
        f = filepath
        opened_new_file = False

    try:
        model_metadata = saving_utils.model_metadata(model, include_optimizer)
        for k, v in model_metadata.items():
            if isinstance(v, (dict, list, tuple)):
                f.attrs[k] = json.dumps(
                    v, default=serialization.get_json_type).encode('utf8')
            else:
                f.attrs[k] = v

        model_weights_group = f.create_group('model_weights')
        model_layers = model.layers
        save_weights_to_hdf5_group(model_weights_group, model_layers)

        # TODO(b/128683857): Add integration tests between tf.keras and external
        # Keras, to avoid breaking TF.js users.
        if (include_optimizer and model.optimizer
                and not isinstance(model.optimizer, optimizers.TFOptimizer)):
            save_optimizer_weights_to_hdf5_group(f, model.optimizer)

        f.flush()
    finally:
        if opened_new_file:
            f.close()
 def __reduce__(self):
     model_metadata = saving_utils.model_metadata(self)
     training_config = model_metadata.get("training_config", None)
     model = serialize(self)
     weights = self.get_weights()
     return (unpack, (model, training_config, weights))
Exemplo n.º 12
0
 def _reduce_tf_model(model):
     model_metadata = saving_utils.model_metadata(model)
     training_config = model_metadata.get("training_config", None)
     weights = model.get_weights()
     model = serialize(model)
     return TensorflowDispatcher._make_model, (model, training_config, weights)