コード例 #1
0
def test_sequence_offset() -> None:
    seq = utils.make_xor_data_sequences()[0]
    offset_seq = keras._SequenceWithOffset(seq, batch_offset=1)
    assert len(seq) == len(offset_seq)

    for i in range(len(offset_seq)):
        a = offset_seq[i]
        b = seq[(i + 1) % len(seq)]
        assert len(a) == len(b)
        for i in range(len(a)):
            assert np.equal(a[i], b[i]).all()
コード例 #2
0
def test_adapt_sequence() -> None:
    seqs = utils.make_xor_data_sequences()
    train = keras._adapt_keras_data(seqs[0], batch_size=1)
    assert isinstance(train, keras.SequenceAdapter)
    test = keras._adapt_keras_data(seqs[1], batch_size=1)
    assert isinstance(test, keras.SequenceAdapter)
    assert seqs[0] is train._sequence._sequence
    assert seqs[1] is test._sequence._sequence

    assert train is keras._adapt_keras_data(train, batch_size=1)
    assert test is keras._adapt_keras_data(test, batch_size=1)
コード例 #3
0
            "global_batch_size": 4,
            "trial_type": "default",
        }
    }

    context = init(
        config=config, local=args.local, test=args.test, context_dir=str(pathlib.Path.cwd())
    )

    model = Sequential()
    model.add(Dense(context.get_hparam("hidden_size"), activation="sigmoid", input_shape=(2,)))
    model.add(Dense(1))

    if args.use_dataset:
        data, labels = utils.xor_data()

        train = context.wrap_dataset(tf.data.Dataset.from_tensor_slices((data, labels)))
        train = train.batch(context.get_hparam("global_batch_size"))
        valid = context.wrap_dataset(tf.data.Dataset.from_tensor_slices((data, labels)))
        valid = valid.batch(context.get_hparam("global_batch_size"))
    else:
        train, valid = utils.make_xor_data_sequences(batch_size=4)

    model = context.wrap_model(model)
    model.compile(
        SGD(lr=context.get_hparam("learning_rate")),
        binary_crossentropy,
        metrics=[categorical_error],
    )
    model.fit(x=train, steps_per_epoch=100, validation_data=valid, workers=0)
コード例 #4
0
 def build_validation_data_loader(self) -> keras.InputData:
     _, test = make_xor_data_sequences(batch_size=4)
     return keras.SequenceAdapter(test, workers=0)
コード例 #5
0
 def build_training_data_loader(self) -> keras.InputData:
     train, _ = make_xor_data_sequences(batch_size=4)
     return keras.SequenceAdapter(train, workers=0)
コード例 #6
0
def test_adapt_invalid_data_type() -> None:
    seqs = utils.make_xor_data_sequences()
    test = keras._adapt_data_from_data_loader(seqs[1], batch_size=1)
    with pytest.raises(det.errors.InvalidDataTypeException) as err:
        keras._adapt_data_from_data_loader((None, test), batch_size=1)
        assert err is not None
コード例 #7
0
import determined as det
from determined import keras
from determined_common import check
from tests.experiment import utils  # noqa: I100


class Empty(Sequence):
    def __getitem__(self, index: int) -> list:
        return []

    def __len__(self) -> int:
        return 0


SEQ = utils.make_xor_data_sequences()[0]
MULTITHREADING_MULTIPROCESS_SUITE = [
    (0, False, SEQ),
    (1, False, SEQ),
    (1, True, SEQ),
    (2, False, SEQ),
    (2, True, SEQ),
]


def test_sequence_offset() -> None:
    seq = utils.make_xor_data_sequences()[0]
    offset_seq = keras._SequenceWithOffset(seq, batch_offset=1)
    assert len(seq) == len(offset_seq)

    for i in range(len(offset_seq)):
コード例 #8
0
 def build_validation_data_loader(self) -> keras.InputData:
     _, test = make_xor_data_sequences(batch_size=4)
     return test
コード例 #9
0
 def build_training_data_loader(self) -> keras.InputData:
     train, _ = make_xor_data_sequences(batch_size=4)
     return train