def _input_fn() -> Tuple[tf.Tensor, tf.Tensor]:
        data, labels = xor_data()
        dataset = tf.data.Dataset.from_tensor_slices((data, labels))
        dataset = context.wrap_dataset(dataset)
        if shuffle:
            dataset = dataset.shuffle(1000)

        def map_dataset(x, y):
            return {"input": x}, y

        dataset = dataset.batch(batch_size)
        dataset = dataset.map(map_dataset)

        return dataset
 def make_dataset() -> tf.data.Dataset:
     data, labels = xor_data()
     ds = tf.data.Dataset.from_tensor_slices((data, labels))
     return ds
Exemple #3
0
            "learning_rate": 0.1,
            "global_batch_size": 4,
            "trial_type": "default",
        }
    }

    context = init(
        config=config, local=args.local, test=args.test, context_dir=str(pathlib.Path.cwd())
    )

    model = Sequential()
    model.add(Dense(context.get_hparam("hidden_size"), activation="sigmoid", input_shape=(2,)))
    model.add(Dense(1))

    if args.use_dataset:
        data, labels = utils.xor_data()

        train = context.wrap_dataset(tf.data.Dataset.from_tensor_slices((data, labels)))
        train = train.batch(context.get_hparam("global_batch_size"))
        valid = context.wrap_dataset(tf.data.Dataset.from_tensor_slices((data, labels)))
        valid = valid.batch(context.get_hparam("global_batch_size"))
    else:
        train, valid = utils.make_xor_data_sequences(batch_size=4)

    model = context.wrap_model(model)
    model.compile(
        SGD(lr=context.get_hparam("learning_rate")),
        binary_crossentropy,
        metrics=[categorical_error],
    )
    model.fit(x=train, steps_per_epoch=100, validation_data=valid, workers=0)