def test_normalize(): dataset = common.generate_data(dtype='dataset') new_dataset = run_preprocessor(preprocessor_module.Normalization(), dataset, common.generate_data(dtype='dataset'), dtype=tf.float32) assert isinstance(new_dataset, tf.data.Dataset)
def build(self, hp, inputs=None): input_node = nest.flatten(inputs)[0] output_node = input_node block_type = self.block_type or hp.Choice( 'block_type', ['resnet', 'xception', 'vanilla'], default='resnet') normalize = self.normalize if normalize is None: normalize = hp.Choice('normalize', [True, False], default=True) augment = self.augment if augment is None: augment = hp.Choice('augment', [True, False], default=True) if normalize: output_node = preprocessor.Normalization()(output_node) if augment: output_node = preprocessor.ImageAugmentation( seed=self.seed)(output_node) sub_block_name = self.name + '_' + block_type if block_type == 'resnet': output_node = block.ResNetBlock(name=sub_block_name)(output_node) elif block_type == 'xception': output_node = block.XceptionBlock(name=sub_block_name)(output_node) elif block_type == 'vanilla': output_node = block.ConvBlock(name=sub_block_name)(output_node) return output_node
def test_normalize(): normalize = preprocessor.Normalization() x_train = np.random.rand(100, 32, 32, 3) dataset = tf.data.Dataset.from_tensor_slices(x_train) normalize.set_hp(kerastuner.HyperParameters()) for x in dataset: normalize.update(x) normalize.finalize() normalize.set_config(normalize.get_config()) weights = normalize.get_weights() normalize.clear_weights() normalize.set_weights(weights) for a in dataset: normalize.transform(a) def map_func(x): return tf.py_function(normalize.transform, inp=[x], Tout=(tf.float64, )) new_dataset = dataset.map(map_func) for _ in new_dataset: pass assert isinstance(new_dataset, tf.data.Dataset)