def test_minibatch_score_trainer(): X = np.random.random((100, 10)) Z = np.random.random((100, 2)) X, Z = theano_floatx(X, Z) data = { 'train': (X, Z), 'val': (X, Z), 'test': (X, Z) } cut_size = 10 class MyModel(mlp.Mlp): def score(self, X, Z): assert X.shape[0] <= cut_size return super(MyModel, self).score(X, Z) m = MyModel(10, [100], 2, ['tanh'], 'identity', 'squared', max_iter=10) score = MinibatchScore(cut_size, [0, 0]) trainer = Trainer( m, data, score=score, pause=lambda info: True, stop=lambda info: False) trainer.val_key = 'val' for _ in trainer.iter_fit(X, Z): break
def test_minibatch_score_trainer(): X = np.random.random((100, 10)) X, = theano_floatx(X) cut_size = 10 class MyAutoEncoder(autoencoder.AutoEncoder): def score(self, X): assert X.shape[0] <= cut_size return super(MyAutoEncoder, self).score(X) m = MyAutoEncoder(10, [100], ['tanh'], 'identity', 'squared', tied_weights=True, max_iter=10) score = MinibatchScore(cut_size, [0]) trainer = Trainer(m, score=score, pause=lambda info: True, stop=lambda info: False) trainer.eval_data = {'val': (X, )} trainer.val_key = 'val' for _ in trainer.iter_fit(X): break
def test_training_continuation(): if sys.platform == 'win32': raise SkipTest() # Make model and data for the test. X = np.random.random((100, 10)) Z = np.random.random((100, 2)) X, Z = theano_floatx(X, Z) data = { 'train': (X, Z), 'val': (X, Z), 'test': (X, Z) } optimizer = 'rmsprop', {'step_rate': 0.0001} m = mlp.Mlp(10, [2], 2, ['tanh'], 'identity', 'squared', max_iter=10, optimizer=optimizer) # Train the mdoel with a trainer for 2 epochs. stopper = climin.stops.OnSignal() stops = climin.stops.Any([stopper, climin.stops.AfterNIterations(5)]) t = Trainer( m, data, stop=stops, pause=climin.stops.always) t.val_key = 'val' for info in t.iter_fit(X, Z): os.kill(os.getpid(), stopper.sig) assert info['n_iter'] == 1
def test_training_continuation(): # Make model and data for the test. X = np.random.random((10, 2)) X, = theano_floatx(X) optimizer = 'gd' m = autoencoder.AutoEncoder(2, [2], ['tanh'], 'identity', 'squared', tied_weights=True, max_iter=10, optimizer=optimizer) # Train the mdoel with a trainer for 2 epochs. stopper = climin.stops.OnSignal() print stopper.sig stops = climin.stops.Any([stopper, climin.stops.AfterNIterations(5)]) t = Trainer(m, stop=stops, pause=climin.stops.always) t.val_key = 'val' t.eval_data = {'val': (X, )} killed = False for info in t.iter_fit(X): os.kill(os.getpid(), stopper.sig) assert info['n_iter'] == 1
def test_training_continuation(): # Make model and data for the test. X = np.random.random((10, 2)) X, = theano_floatx(X) optimizer = 'gd' m = autoencoder.AutoEncoder(2, [2], ['tanh'], 'identity', 'squared', tied_weights=True, max_iter=10, optimizer=optimizer) # Train the mdoel with a trainer for 2 epochs. stopper = climin.stops.OnSignal() print stopper.sig stops = climin.stops.Any([stopper, climin.stops.AfterNIterations(5)]) t = Trainer( m, stop=stops, pause=climin.stops.always) t.val_key = 'val' t.eval_data = {'val': (X,)} killed = False for info in t.iter_fit(X): os.kill(os.getpid(), stopper.sig) assert info['n_iter'] == 1
def test_minibatch_score_trainer(): X = np.random.random((100, 10)) Z = np.random.random((100, 2)) X, Z = theano_floatx(X, Z) data = {'train': (X, Z), 'val': (X, Z), 'test': (X, Z)} cut_size = 10 class MyModel(mlp.Mlp): def score(self, X, Z): assert X.shape[0] <= cut_size return super(MyModel, self).score(X, Z) m = MyModel(10, [100], 2, ['tanh'], 'identity', 'squared', max_iter=10) score = MinibatchScore(cut_size, [0, 0]) trainer = Trainer(m, data, score=score, pause=lambda info: True, stop=lambda info: False) trainer.val_key = 'val' for _ in trainer.iter_fit(X, Z): break
def test_training_continuation(): if sys.platform == 'win32': raise SkipTest() # Make model and data for the test. X = np.random.random((100, 10)) Z = np.random.random((100, 2)) X, Z = theano_floatx(X, Z) data = {'train': (X, Z), 'val': (X, Z), 'test': (X, Z)} optimizer = 'rmsprop', {'step_rate': 0.0001} m = mlp.Mlp(10, [2], 2, ['tanh'], 'identity', 'squared', max_iter=10, optimizer=optimizer) # Train the mdoel with a trainer for 2 epochs. stopper = climin.stops.OnSignal() stops = climin.stops.Any([stopper, climin.stops.AfterNIterations(5)]) t = Trainer(m, data, stop=stops, pause=climin.stops.always) t.val_key = 'val' for info in t.iter_fit(X, Z): os.kill(os.getpid(), stopper.sig) assert info['n_iter'] == 1
def test_minibatch_score_trainer(): X = np.random.random((100, 10)) X, = theano_floatx(X) cut_size = 10 class MyAutoEncoder(autoencoder.AutoEncoder): def score(self, X): assert X.shape[0] <= cut_size return super(MyAutoEncoder, self).score(X) m = MyAutoEncoder(10, [100], ['tanh'], 'identity', 'squared', tied_weights=True, max_iter=10) score = MinibatchScore(cut_size, [0]) trainer = Trainer( m, score=score, pause=lambda info: True, stop=lambda info: False) trainer.eval_data = {'val': (X,)} trainer.val_key = 'val' for _ in trainer.iter_fit(X): break