def test_module_output_not_1d(self, net_cls, data): from skorch.toy import make_classifier module = make_classifier( input_units=20, output_units=1, ) # the output will not be squeezed net = net_cls(module, max_epochs=1) net.fit(*data) # does not raise
def classifier_module(): """Return a simple classifier module class.""" from skorch.toy import make_classifier return make_classifier( input_units=20, hidden_units=10, num_hidden=2, dropout=0.5, )
def test_module_output_2d_raises(self, net_cls, data): from skorch.toy import make_classifier module = make_classifier( input_units=20, output_units=2, ) net = net_cls(module, max_epochs=1) with pytest.raises(ValueError) as exc: net.fit(*data) msg = exc.value.args[0] expected = ("Expected module output to have shape (n,) or " "(n, 1), got (128, 2) instead") assert msg == expected
def train(): X, y = make_classification(1000, 20, n_informative=10, random_state=0) X = X.astype(np.float32) y = y.astype(np.int64) module = make_classifier(input_units=20) net = NeuralNetClassifier( module, max_epochs=10, lr=0.1, callbacks=[TriggerKeyError(), PrintMemory()], device='cuda', ) return net.fit(X, y)
def test_passes_kwargs_to_neuralnet_optimizer( self, filtered_optimizer, filter_requires_grad): from skorch import NeuralNetClassifier from skorch.toy import make_classifier module_cls = make_classifier( input_units=1, num_hidden=0, output_units=1, ) with pytest.warns(DeprecationWarning): opt = filtered_optimizer(torch.optim.SGD, filter_requires_grad) net = NeuralNetClassifier( module_cls, optimizer=opt, optimizer__momentum=0.9) net.initialize() assert isinstance(net.optimizer_, torch.optim.SGD) assert len(net.optimizer_.param_groups) == 1 assert net.optimizer_.param_groups[0]['momentum'] == 0.9
def test_make_classifier(self): from skorch.toy import make_classifier module = make_classifier()() assert isinstance(module.sequential[-1], nn.Softmax)