Пример #1
0
def deserialize(name, custom_objects=None):
    """Returns activation function given a string identifier.

    Args:
      name: The name of the activation function.
      custom_objects: Optional `{function_name: function_obj}`
        dictionary listing user-provided activation functions.

    Returns:
        Corresponding activation function.

    For example:

    >>> tf.keras.activations.deserialize('linear')
     <function linear at 0x1239596a8>
    >>> tf.keras.activations.deserialize('sigmoid')
     <function sigmoid at 0x123959510>
    >>> tf.keras.activations.deserialize('abcd')
    Traceback (most recent call last):
    ...
    ValueError: Unknown activation function:abcd

    Raises:
        ValueError: `Unknown activation function` if the input string does not
        denote any defined Tensorflow activation function.
    """
    activation_functions = {}
    current_module = sys.modules[__name__]

    # we put 'current_module' after 'activation_layers' to prefer the local one
    # if there is a collision
    generic_utils.populate_dict_with_module_objects(
        activation_functions,
        (activation_layers, current_module),
        obj_filter=callable,
    )

    return generic_utils.deserialize_keras_object(
        name,
        module_objects=activation_functions,
        custom_objects=custom_objects,
        printable_module_name="activation function",
    )
Пример #2
0
def populate_deserializable_objects():
    """Populates dict ALL_OBJECTS with every built-in layer.
  """
    global LOCAL
    if not hasattr(LOCAL, 'ALL_OBJECTS'):
        LOCAL.ALL_OBJECTS = {}
        LOCAL.GENERATED_WITH_V2 = None

    if LOCAL.ALL_OBJECTS and LOCAL.GENERATED_WITH_V2 == tf.__internal__.tf2.enabled(
    ):
        # Objects dict is already generated for the proper TF version:
        # do nothing.
        return

    LOCAL.ALL_OBJECTS = {}
    LOCAL.GENERATED_WITH_V2 = tf.__internal__.tf2.enabled()

    base_cls = base_layer.Layer
    generic_utils.populate_dict_with_module_objects(
        LOCAL.ALL_OBJECTS,
        ALL_MODULES,
        obj_filter=lambda x: inspect.isclass(x) and issubclass(x, base_cls))

    # Overwrite certain V1 objects with V2 versions
    if tf.__internal__.tf2.enabled():
        generic_utils.populate_dict_with_module_objects(
            LOCAL.ALL_OBJECTS,
            ALL_V2_MODULES,
            obj_filter=lambda x: inspect.isclass(x) and issubclass(
                x, base_cls))

    # These deserialization aliases are added for backward compatibility,
    # as in TF 1.13, "BatchNormalizationV1" and "BatchNormalizationV2"
    # were used as class name for v1 and v2 version of BatchNormalization,
    # respectively. Here we explicitly convert them to their canonical names.
    LOCAL.ALL_OBJECTS[
        'BatchNormalizationV1'] = normalization.BatchNormalization
    LOCAL.ALL_OBJECTS[
        'BatchNormalizationV2'] = normalization_v2.BatchNormalization

    # Prevent circular dependencies.
    from keras import models  # pylint: disable=g-import-not-at-top
    from keras.premade.linear import LinearModel  # pylint: disable=g-import-not-at-top
    from keras.premade.wide_deep import WideDeepModel  # pylint: disable=g-import-not-at-top
    from keras.feature_column.sequence_feature_column import SequenceFeatures  # pylint: disable=g-import-not-at-top

    LOCAL.ALL_OBJECTS['Input'] = input_layer.Input
    LOCAL.ALL_OBJECTS['InputSpec'] = input_spec.InputSpec
    LOCAL.ALL_OBJECTS['Functional'] = models.Functional
    LOCAL.ALL_OBJECTS['Model'] = models.Model
    LOCAL.ALL_OBJECTS['SequenceFeatures'] = SequenceFeatures
    LOCAL.ALL_OBJECTS['Sequential'] = models.Sequential
    LOCAL.ALL_OBJECTS['LinearModel'] = LinearModel
    LOCAL.ALL_OBJECTS['WideDeepModel'] = WideDeepModel

    if tf.__internal__.tf2.enabled():
        from keras.feature_column.dense_features_v2 import DenseFeatures  # pylint: disable=g-import-not-at-top
        LOCAL.ALL_OBJECTS['DenseFeatures'] = DenseFeatures
    else:
        from keras.feature_column.dense_features import DenseFeatures  # pylint: disable=g-import-not-at-top
        LOCAL.ALL_OBJECTS['DenseFeatures'] = DenseFeatures

    # Merge layers, function versions.
    LOCAL.ALL_OBJECTS['add'] = merge.add
    LOCAL.ALL_OBJECTS['subtract'] = merge.subtract
    LOCAL.ALL_OBJECTS['multiply'] = merge.multiply
    LOCAL.ALL_OBJECTS['average'] = merge.average
    LOCAL.ALL_OBJECTS['maximum'] = merge.maximum
    LOCAL.ALL_OBJECTS['minimum'] = merge.minimum
    LOCAL.ALL_OBJECTS['concatenate'] = merge.concatenate
    LOCAL.ALL_OBJECTS['dot'] = merge.dot
Пример #3
0
def populate_deserializable_objects():
    """Populates dict ALL_OBJECTS with every built-in layer."""
    global LOCAL
    if not hasattr(LOCAL, "ALL_OBJECTS"):
        LOCAL.ALL_OBJECTS = {}
        LOCAL.GENERATED_WITH_V2 = None

    if (
        LOCAL.ALL_OBJECTS
        and LOCAL.GENERATED_WITH_V2 == tf.__internal__.tf2.enabled()
    ):
        # Objects dict is already generated for the proper TF version:
        # do nothing.
        return

    LOCAL.ALL_OBJECTS = {}
    LOCAL.GENERATED_WITH_V2 = tf.__internal__.tf2.enabled()

    base_cls = base_layer.Layer
    generic_utils.populate_dict_with_module_objects(
        LOCAL.ALL_OBJECTS,
        ALL_MODULES,
        obj_filter=lambda x: inspect.isclass(x) and issubclass(x, base_cls),
    )

    # Overwrite certain V1 objects with V2 versions
    if tf.__internal__.tf2.enabled():
        generic_utils.populate_dict_with_module_objects(
            LOCAL.ALL_OBJECTS,
            ALL_V2_MODULES,
            obj_filter=lambda x: inspect.isclass(x) and issubclass(x, base_cls),
        )

    # These deserialization aliases are added for backward compatibility,
    # as in TF 1.13, "BatchNormalizationV1" and "BatchNormalizationV2"
    # were used as class name for v1 and v2 version of BatchNormalization,
    # respectively. Here we explicitly convert them to their canonical names.
    LOCAL.ALL_OBJECTS[
        "BatchNormalizationV1"
    ] = batch_normalization_v1.BatchNormalization
    LOCAL.ALL_OBJECTS[
        "BatchNormalizationV2"
    ] = batch_normalization.BatchNormalization

    # Prevent circular dependencies.
    from keras import models
    from keras.feature_column.sequence_feature_column import (
        SequenceFeatures,
    )
    from keras.premade_models.linear import (
        LinearModel,
    )
    from keras.premade_models.wide_deep import (
        WideDeepModel,
    )

    LOCAL.ALL_OBJECTS["Input"] = input_layer.Input
    LOCAL.ALL_OBJECTS["InputSpec"] = input_spec.InputSpec
    LOCAL.ALL_OBJECTS["Functional"] = models.Functional
    LOCAL.ALL_OBJECTS["Model"] = models.Model
    LOCAL.ALL_OBJECTS["SequenceFeatures"] = SequenceFeatures
    LOCAL.ALL_OBJECTS["Sequential"] = models.Sequential
    LOCAL.ALL_OBJECTS["LinearModel"] = LinearModel
    LOCAL.ALL_OBJECTS["WideDeepModel"] = WideDeepModel

    if tf.__internal__.tf2.enabled():
        from keras.feature_column.dense_features_v2 import (
            DenseFeatures,
        )

        LOCAL.ALL_OBJECTS["DenseFeatures"] = DenseFeatures
    else:
        from keras.feature_column.dense_features import (
            DenseFeatures,
        )

        LOCAL.ALL_OBJECTS["DenseFeatures"] = DenseFeatures

    # Merging layers, function versions.
    LOCAL.ALL_OBJECTS["add"] = merging.add
    LOCAL.ALL_OBJECTS["subtract"] = merging.subtract
    LOCAL.ALL_OBJECTS["multiply"] = merging.multiply
    LOCAL.ALL_OBJECTS["average"] = merging.average
    LOCAL.ALL_OBJECTS["maximum"] = merging.maximum
    LOCAL.ALL_OBJECTS["minimum"] = merging.minimum
    LOCAL.ALL_OBJECTS["concatenate"] = merging.concatenate
    LOCAL.ALL_OBJECTS["dot"] = merging.dot
Пример #4
0
def populate_deserializable_objects():
    """Populates dict ALL_OBJECTS with every built-in initializer."""
    global LOCAL
    if not hasattr(LOCAL, "ALL_OBJECTS"):
        LOCAL.ALL_OBJECTS = {}
        LOCAL.GENERATED_WITH_V2 = None

    if (LOCAL.ALL_OBJECTS
            and LOCAL.GENERATED_WITH_V2 == tf.__internal__.tf2.enabled()):
        # Objects dict is already generated for the proper TF version:
        # do nothing.
        return

    LOCAL.ALL_OBJECTS = {}
    LOCAL.GENERATED_WITH_V2 = tf.__internal__.tf2.enabled()

    # Compatibility aliases (need to exist in both V1 and V2).
    LOCAL.ALL_OBJECTS["ConstantV2"] = initializers_v2.Constant
    LOCAL.ALL_OBJECTS["GlorotNormalV2"] = initializers_v2.GlorotNormal
    LOCAL.ALL_OBJECTS["GlorotUniformV2"] = initializers_v2.GlorotUniform
    LOCAL.ALL_OBJECTS["HeNormalV2"] = initializers_v2.HeNormal
    LOCAL.ALL_OBJECTS["HeUniformV2"] = initializers_v2.HeUniform
    LOCAL.ALL_OBJECTS["IdentityV2"] = initializers_v2.Identity
    LOCAL.ALL_OBJECTS["LecunNormalV2"] = initializers_v2.LecunNormal
    LOCAL.ALL_OBJECTS["LecunUniformV2"] = initializers_v2.LecunUniform
    LOCAL.ALL_OBJECTS["OnesV2"] = initializers_v2.Ones
    LOCAL.ALL_OBJECTS["OrthogonalV2"] = initializers_v2.Orthogonal
    LOCAL.ALL_OBJECTS["RandomNormalV2"] = initializers_v2.RandomNormal
    LOCAL.ALL_OBJECTS["RandomUniformV2"] = initializers_v2.RandomUniform
    LOCAL.ALL_OBJECTS["TruncatedNormalV2"] = initializers_v2.TruncatedNormal
    LOCAL.ALL_OBJECTS["VarianceScalingV2"] = initializers_v2.VarianceScaling
    LOCAL.ALL_OBJECTS["ZerosV2"] = initializers_v2.Zeros

    # Out of an abundance of caution we also include these aliases that have
    # a non-zero probability of having been included in saved configs in the past.
    LOCAL.ALL_OBJECTS["glorot_normalV2"] = initializers_v2.GlorotNormal
    LOCAL.ALL_OBJECTS["glorot_uniformV2"] = initializers_v2.GlorotUniform
    LOCAL.ALL_OBJECTS["he_normalV2"] = initializers_v2.HeNormal
    LOCAL.ALL_OBJECTS["he_uniformV2"] = initializers_v2.HeUniform
    LOCAL.ALL_OBJECTS["lecun_normalV2"] = initializers_v2.LecunNormal
    LOCAL.ALL_OBJECTS["lecun_uniformV2"] = initializers_v2.LecunUniform

    if tf.__internal__.tf2.enabled():
        # For V2, entries are generated automatically based on the content of
        # initializers_v2.py.
        v2_objs = {}
        base_cls = initializers_v2.Initializer
        generic_utils.populate_dict_with_module_objects(
            v2_objs,
            [initializers_v2],
            obj_filter=lambda x: inspect.isclass(x) and issubclass(
                x, base_cls),
        )
        for key, value in v2_objs.items():
            LOCAL.ALL_OBJECTS[key] = value
            # Functional aliases.
            LOCAL.ALL_OBJECTS[generic_utils.to_snake_case(key)] = value
    else:
        # V1 initializers.
        v1_objs = {
            "Constant": tf.compat.v1.constant_initializer,
            "GlorotNormal": tf.compat.v1.glorot_normal_initializer,
            "GlorotUniform": tf.compat.v1.glorot_uniform_initializer,
            "Identity": tf.compat.v1.initializers.identity,
            "Ones": tf.compat.v1.ones_initializer,
            "Orthogonal": tf.compat.v1.orthogonal_initializer,
            "VarianceScaling": tf.compat.v1.variance_scaling_initializer,
            "Zeros": tf.compat.v1.zeros_initializer,
            "HeNormal": initializers_v1.HeNormal,
            "HeUniform": initializers_v1.HeUniform,
            "LecunNormal": initializers_v1.LecunNormal,
            "LecunUniform": initializers_v1.LecunUniform,
            "RandomNormal": initializers_v1.RandomNormal,
            "RandomUniform": initializers_v1.RandomUniform,
            "TruncatedNormal": initializers_v1.TruncatedNormal,
        }
        for key, value in v1_objs.items():
            LOCAL.ALL_OBJECTS[key] = value
            # Functional aliases.
            LOCAL.ALL_OBJECTS[generic_utils.to_snake_case(key)] = value

    # More compatibility aliases.
    LOCAL.ALL_OBJECTS["normal"] = LOCAL.ALL_OBJECTS["random_normal"]
    LOCAL.ALL_OBJECTS["uniform"] = LOCAL.ALL_OBJECTS["random_uniform"]
    LOCAL.ALL_OBJECTS["one"] = LOCAL.ALL_OBJECTS["ones"]
    LOCAL.ALL_OBJECTS["zero"] = LOCAL.ALL_OBJECTS["zeros"]