def test_multiple_monitoring_datasets(): # tests that DefaultTrainingAlgorithm can take multiple # monitoring datasets. BATCH_SIZE = 2 BATCHES = 3 NUM_FEATURES = 4 dim = 3 m = 10 rng = np.random.RandomState([2014, 02, 25]) X = rng.randn(m, dim) Y = rng.randn(m, dim) train = DenseDesignMatrix(X=X) test = DenseDesignMatrix(X=Y) algorithm = DefaultTrainingAlgorithm( batch_size=BATCH_SIZE, batches_per_iter=BATCHES, monitoring_dataset={'train': train, 'test': test}) model = S3C(nvis=NUM_FEATURES, nhid=1, irange=.01, init_bias_hid=0., init_B=1., min_B=1., max_B=1., init_alpha=1., min_alpha=1., max_alpha=1., init_mu=0., m_step=Grad_M_Step(learning_rate=0.), e_step=E_Step(h_new_coeff_schedule=[1.])) algorithm.setup(model=model, dataset=train) algorithm.train(dataset=train)
def test_counting(): BATCH_SIZE = 2 BATCHES = 3 NUM_FEATURES = 4 num_examples = BATCHES * BATCH_SIZE dataset = DummyDataset(num_examples=num_examples, num_features=NUM_FEATURES) algorithm = DefaultTrainingAlgorithm(batch_size=BATCH_SIZE, batches_per_iter=BATCHES) model = S3C(nvis=NUM_FEATURES, nhid=1, irange=.01, init_bias_hid=0., init_B=1., min_B=1., max_B=1., init_alpha=1., min_alpha=1., max_alpha=1., init_mu=0., m_step=Grad_M_Step(learning_rate=0.), e_step=E_Step(h_new_coeff_schedule=[1.])) algorithm.setup(model=model, dataset=dataset) algorithm.train(dataset=dataset) if not (model.monitor.get_batches_seen() == BATCHES): raise AssertionError('Should have seen '+str(BATCHES) + \ ' batches but saw '+str(model.monitor.get_batches_seen())) assert model.monitor.get_examples_seen() == num_examples assert isinstance(model.monitor.get_examples_seen(), py_integer_types) assert isinstance(model.monitor.get_batches_seen(), py_integer_types)
def test_counting(): BATCH_SIZE = 2 BATCHES = 3 NUM_FEATURES = 4 num_examples = BATCHES * BATCH_SIZE dataset = DummyDataset(num_examples=num_examples, num_features=NUM_FEATURES) algorithm = DefaultTrainingAlgorithm(batch_size=BATCH_SIZE, batches_per_iter=BATCHES) model = S3C( nvis=NUM_FEATURES, nhid=1, irange=0.01, init_bias_hid=0.0, init_B=1.0, min_B=1.0, max_B=1.0, init_alpha=1.0, min_alpha=1.0, max_alpha=1.0, init_mu=0.0, m_step=Grad_M_Step(learning_rate=0.0), e_step=E_Step(h_new_coeff_schedule=[1.0]), ) algorithm.setup(model=model, dataset=dataset) algorithm.train(dataset=dataset) if not (model.monitor.get_batches_seen() == BATCHES): raise AssertionError( "Should have seen " + str(BATCHES) + " batches but saw " + str(model.monitor.get_batches_seen()) ) assert model.monitor.get_examples_seen() == num_examples assert isinstance(model.monitor.get_examples_seen(), py_integer_types) assert isinstance(model.monitor.get_batches_seen(), py_integer_types)
def test_train_batch(): # Just tests that train_batch can be called without crashing m = 1 dim = 2 rng = np.random.RandomState([2014, 03, 17]) X = rng.randn(m, dim) train = DenseDesignMatrix(X=X) rbm = RBM(nvis=dim, nhid=3) trainer = DefaultTrainingAlgorithm(batch_size=1, batches_per_iter=10) trainer.setup(rbm, train) trainer.train(train)
def test_counting(): BATCH_SIZE = 2 BATCHES = 3 NUM_FEATURES = 4 num_examples = BATCHES * BATCH_SIZE dataset = DummyDataset( num_examples = num_examples, num_features = NUM_FEATURES) algorithm = DefaultTrainingAlgorithm( batch_size = BATCH_SIZE, batches_per_iter = BATCHES) model = S3C( nvis = NUM_FEATURES, nhid = 1, irange = .01, init_bias_hid = 0., init_B = 1., min_B = 1., max_B = 1., init_alpha = 1., min_alpha = 1., max_alpha = 1., init_mu = 0., m_step = Grad_M_Step( learning_rate = 0.), e_step = E_Step( h_new_coeff_schedule = [ 1. ])) algorithm.setup(model = model, dataset = dataset) algorithm.train(dataset = dataset) assert model.monitor.get_batches_seen() == BATCHES assert model.monitor.get_examples_seen() == num_examples assert isinstance(model.monitor.get_examples_seen(),int) assert isinstance(model.monitor.get_batches_seen(),int)