Esempio n. 1
0
def test_multiple_monitoring_datasets():
    # tests that DefaultTrainingAlgorithm can take multiple
    # monitoring datasets.

    BATCH_SIZE = 2
    BATCHES = 3
    NUM_FEATURES = 4
    dim = 3
    m = 10

    rng = np.random.RandomState([2014, 02, 25])
    X = rng.randn(m, dim)
    Y = rng.randn(m, dim)

    train = DenseDesignMatrix(X=X)
    test = DenseDesignMatrix(X=Y)

    algorithm = DefaultTrainingAlgorithm(
        batch_size=BATCH_SIZE,
        batches_per_iter=BATCHES,
        monitoring_dataset={'train': train, 'test': test})

    model = S3C(nvis=NUM_FEATURES, nhid=1,
                irange=.01, init_bias_hid=0., init_B=1.,
                min_B=1., max_B=1., init_alpha=1.,
                min_alpha=1., max_alpha=1., init_mu=0.,
                m_step=Grad_M_Step(learning_rate=0.),
                e_step=E_Step(h_new_coeff_schedule=[1.]))

    algorithm.setup(model=model, dataset=train)
    algorithm.train(dataset=train)
Esempio n. 2
0
def test_counting():
    BATCH_SIZE = 2
    BATCHES = 3
    NUM_FEATURES = 4
    num_examples = BATCHES * BATCH_SIZE
    dataset = DummyDataset(num_examples=num_examples,
                           num_features=NUM_FEATURES)
    algorithm = DefaultTrainingAlgorithm(batch_size=BATCH_SIZE,
                                         batches_per_iter=BATCHES)
    model = S3C(nvis=NUM_FEATURES,
                nhid=1,
                irange=.01,
                init_bias_hid=0.,
                init_B=1.,
                min_B=1.,
                max_B=1.,
                init_alpha=1.,
                min_alpha=1.,
                max_alpha=1.,
                init_mu=0.,
                m_step=Grad_M_Step(learning_rate=0.),
                e_step=E_Step(h_new_coeff_schedule=[1.]))
    algorithm.setup(model=model, dataset=dataset)
    algorithm.train(dataset=dataset)
    if not (model.monitor.get_batches_seen() == BATCHES):
        raise AssertionError('Should have seen '+str(BATCHES) + \
                ' batches but saw '+str(model.monitor.get_batches_seen()))

    assert model.monitor.get_examples_seen() == num_examples
    assert isinstance(model.monitor.get_examples_seen(), py_integer_types)
    assert isinstance(model.monitor.get_batches_seen(), py_integer_types)
Esempio n. 3
0
    def set_training_criteria(self,
                              learning_rate=0.05,
                              batch_size=10,
                              max_epochs=10):

        self.training_alg = DefaultTrainingAlgorithm(
            batch_size=batch_size,
            monitoring_dataset=self.datasets,
            termination_criterion=EpochCounter(max_epochs))
Esempio n. 4
0
def test_train_batch():
    # Just tests that train_batch can be called without crashing

    m = 1
    dim = 2
    rng = np.random.RandomState([2014, 03, 17])
    X = rng.randn(m, dim)
    train = DenseDesignMatrix(X=X)

    rbm = RBM(nvis=dim, nhid=3)
    trainer = DefaultTrainingAlgorithm(batch_size=1, batches_per_iter=10)
    trainer.setup(rbm, train)
    trainer.train(train)
Esempio n. 5
0
def test_unspecified_batch_size():

    # Test that failing to specify the batch size results in a
    # NoBatchSizeError

    m = 1
    dim = 2
    rng = np.random.RandomState([2014, 03, 17])
    X = rng.randn(m, dim)
    train = DenseDesignMatrix(X=X)

    rbm = RBM(nvis=dim, nhid=3)
    trainer = DefaultTrainingAlgorithm()
    try:
        trainer.setup(rbm, train)
    except NoBatchSizeError:
        return
    raise AssertionError("Missed the lack of a batch size")