def get_config(self): return { "inputs": [[ preprocessors_module.serialize(preprocessor) for preprocessor in preprocessors ] for preprocessors in self.inputs], "outputs": [[ preprocessors_module.serialize(preprocessor) for preprocessor in preprocessors ] for preprocessors in self.outputs], }
def test_multi_label_deserialize_without_error(): encoder = encoders.MultiLabelEncoder() dataset = tf.data.Dataset.from_tensor_slices([1, 2]).batch(32) encoder = preprocessors.deserialize(preprocessors.serialize(encoder)) assert encoder.transform(dataset) is dataset
def test_softmax_deserialize_without_error(): postprocessor = postprocessors.SoftmaxPostprocessor() dataset = tf.data.Dataset.from_tensor_slices([1, 2]).batch(32) postprocessor = preprocessors.deserialize( preprocessors.serialize(postprocessor)) assert postprocessor.transform(dataset) is dataset
def test_one_hot_encoder_deserialize_transforms_to_np(): encoder = encoders.OneHotEncoder(["a", "b", "c"]) encoder.fit(np.array(["a", "b", "a"])) encoder = preprocessors.deserialize(preprocessors.serialize(encoder)) one_hot = encoder.transform( tf.data.Dataset.from_tensor_slices([["a"], ["c"], ["b"]]).batch(2)) for data in one_hot: assert data.shape[1:] == [3]
def get_config(self): vocab = [] for encoding_layer in self.layer.encoding_layers: if encoding_layer is None: vocab.append([]) else: vocab.append(encoding_layer.get_vocabulary()) return { "column_types": self.column_types, "column_names": self.column_names, "encoding_layer": preprocessors.serialize(self.layer), "encoding_vocab": vocab, }
def get_config(self): config = super().get_config() config.update( {"preprocessor": preprocessors.serialize(self.preprocessor)}) return config