Exemple #1
0
    def test_tfdataset_with_tf_data_dataset_which_requires_table(self):

        keys = [1, 0, -1]
        dataset = tf.data.Dataset.from_tensor_slices([1, 2, -1, 5] * 40)
        table = tf.contrib.lookup.HashTable(
            initializer=tf.contrib.lookup.KeyValueTensorInitializer(
                keys=keys, values=list(reversed(keys))),
            default_value=100)
        dataset = dataset.map(table.lookup)

        def transform(x):
            float_x = tf.to_float(x)
            return float_x, 1

        dataset = dataset.map(transform)
        dataset = TFDataset.from_tf_data_dataset(dataset, batch_size=16)
        seq = tf.keras.Sequential([
            tf.keras.layers.Flatten(input_shape=()),
            tf.keras.layers.Dense(10, activation="softmax")
        ])
        seq.compile(optimizer=tf.keras.optimizers.RMSprop(),
                    loss='sparse_categorical_crossentropy',
                    metrics=['accuracy'])
        model = KerasModel(seq)
        model.fit(dataset)
 def input_function():
     ds = tf.data.Dataset.from_tensor_slices((dict(data_df), ))
     ds = TFDataset.from_tf_data_dataset(
         dataset=ds,
         batch_size=batch_size,
         batch_per_thread=batch_per_thread)
     return ds
Exemple #3
0
    def input_fn():
        def map_func(data):
            image = data['image']
            label = data['label']
            one_hot_label = tf.one_hot(label, depth=10)
            noise = tf.random.normal(mean=0.0, stddev=1.0, shape=(NOISE_DIM, ))
            generator_inputs = (noise, one_hot_label)
            discriminator_inputs = ((tf.to_float(image) / 255.0) - 0.5) * 2
            return (generator_inputs, discriminator_inputs)

        ds = tfds.load("mnist", split="train")
        ds = ds.map(map_func)
        dataset = TFDataset.from_tf_data_dataset(ds, batch_size=56)
        return dataset
Exemple #4
0
    def test_tfdataset_with_tf_data_dataset(self):
        dataset = tf.data.Dataset.from_tensor_slices(
            (np.random.randn(102, 28, 28,
                             1), np.random.randint(0, 10, size=(102, ))))
        dataset = dataset.map(lambda feature, label:
                              (tf.to_float(feature), label))
        dataset = TFDataset.from_tf_data_dataset(dataset, batch_size=16)
        seq = tf.keras.Sequential([
            tf.keras.layers.Flatten(input_shape=(28, 28, 1)),
            tf.keras.layers.Dense(10, activation="softmax")
        ])

        seq.compile(optimizer=tf.keras.optimizers.RMSprop(),
                    loss='sparse_categorical_crossentropy',
                    metrics=['accuracy'])
        model = KerasModel(seq)
        model.fit(dataset)
        dataset = tf.data.Dataset.from_tensor_slices(
            (np.random.randn(102, 28, 28,
                             1), np.random.randint(0, 10, size=(102, ))))
        dataset = dataset.map(lambda feature, label:
                              (tf.to_float(feature), label))
        dataset = TFDataset.from_tf_data_dataset(dataset, batch_per_thread=16)
        model.evaluate(dataset)
Exemple #5
0
    def test_tfdataset_with_tf_data_dataset_which_contains_bool(self):
        dataset = tf.data.Dataset.from_tensor_slices(
            (np.random.randn(102, 28, 28,
                             1), np.random.randint(0, 10, size=(102, )),
             np.ones(shape=(102, 28, 28, 1), dtype=np.bool)))
        dataset = TFDataset.from_tf_data_dataset(dataset, batch_size=16)

        feature, labels, mask = dataset.tensors

        float_mask = tf.to_float(mask)
        masked_feature = tf.to_float(feature) * float_mask
        flatten = tf.layers.flatten(masked_feature)
        logits = tf.layers.dense(flatten, 10)
        loss = tf.reduce_mean(
            tf.losses.sparse_softmax_cross_entropy(logits=logits,
                                                   labels=labels))
        opt = TFOptimizer.from_loss(loss, Adam())
        opt.optimize()
 def input_function():
     ds = tf.data.Dataset.from_tensor_slices((dict(data_df), label_df))
     ds = ds.shuffle(1000)
     ds = TFDataset.from_tf_data_dataset(dataset=ds,
                                         batch_size=batch_size)
     return ds