def test_serialization_v2(self): activation_map = {tf.math.softmax: 'softmax'} for fn_v2_key in activation_map: fn_v2 = activations.get(fn_v2_key) config = activations.serialize(fn_v2) fn = activations.deserialize(config) assert fn.__name__ == activation_map[fn_v2_key]
def get(identifier: Union[str, Callable]) -> Callable: try: return keras_get(identifier) except ValueError: if isinstance(identifier, str): return deserialize(identifier, custom_objects=globals()) else: raise ValueError('Could not interpret:', identifier)
def from_config(cls, config, custom_objects=None): linear_config = config.pop('linear_model') linear_model = layer_module.deserialize(linear_config, custom_objects) dnn_config = config.pop('dnn_model') dnn_model = layer_module.deserialize(dnn_config, custom_objects) activation = activations.deserialize(config.pop('activation', None), custom_objects=custom_objects) return cls(linear_model=linear_model, dnn_model=dnn_model, activation=activation, **config)
def test_serialization(): all_activations = ['softmax', 'relu', 'elu', 'tanh', 'sigmoid', 'hard_sigmoid', 'linear', 'softplus', 'softsign', 'selu'] for name in all_activations: fn = activations.get(name) ref_fn = getattr(activations, name) assert fn == ref_fn config = activations.serialize(fn) fn = activations.deserialize(config) assert fn == ref_fn
def test_serialization(self): all_activations = [ "softmax", "relu", "elu", "tanh", "sigmoid", "hard_sigmoid", "linear", "softplus", "softsign", "selu", "gelu", "relu6", ] for name in all_activations: fn = activations.get(name) ref_fn = getattr(activations, name) assert fn == ref_fn config = activations.serialize(fn) fn = activations.deserialize(config) assert fn == ref_fn