def from_config(cls, config): return cls( inputs=[[ preprocessors_module.deserialize(preprocessor) for preprocessor in preprocessors ] for preprocessors in config["inputs"]], outputs=[[ preprocessors_module.deserialize(preprocessor) for preprocessor in preprocessors ] for preprocessors in config["outputs"]], )
def test_multi_label_deserialize_without_error(): encoder = encoders.MultiLabelEncoder() dataset = tf.data.Dataset.from_tensor_slices([1, 2]).batch(32) encoder = preprocessors.deserialize(preprocessors.serialize(encoder)) assert encoder.transform(dataset) is dataset
def test_softmax_deserialize_without_error(): postprocessor = postprocessors.SoftmaxPostprocessor() dataset = tf.data.Dataset.from_tensor_slices([1, 2]).batch(32) postprocessor = preprocessors.deserialize( preprocessors.serialize(postprocessor)) assert postprocessor.transform(dataset) is dataset
def test_one_hot_encoder_deserialize_transforms_to_np(): encoder = encoders.OneHotEncoder(["a", "b", "c"]) encoder.fit(np.array(["a", "b", "a"])) encoder = preprocessors.deserialize(preprocessors.serialize(encoder)) one_hot = encoder.transform( tf.data.Dataset.from_tensor_slices([["a"], ["c"], ["b"]]).batch(2)) for data in one_hot: assert data.shape[1:] == [3]
def from_config(cls, config): init_config = { "column_types": config["column_types"], "column_names": config["column_names"], } obj = cls(**init_config) obj.layer = preprocessors.deserialize(config["encoding_layer"]) for encoding_layer, vocab in zip(obj.layer.encoding_layers, config["encoding_vocab"]): if encoding_layer is not None: encoding_layer.set_vocabulary(vocab) return obj
def from_config(cls, config): config["preprocessor"] = preprocessors.deserialize( config["preprocessor"]) return super().from_config(config)