def setUpClass(self): self.X, self.y = get_mnist() self.model = Model(lr=0.01, n_epoch=3, loss=SCCE(), metrics=['loss', 'accuracy'], optimizer=RMSProp()) self.model.add_layer(Dense(200, inshape=784, activation=ReLU())) self.model.add_layer(Dense(10, activation=Softmax())) self.fit_metrics = self.model.fit(self.X, self.y)
class TestMNIST(unittest.TestCase): @classmethod def setUpClass(self): self.X, self.y = get_mnist() self.model = Model(lr=0.01, n_epoch=3, loss=SCCE(), metrics=['loss', 'accuracy'], optimizer=RMSProp()) self.model.add_layer(Dense(200, inshape=784, activation=ReLU())) self.model.add_layer(Dense(10, activation=Softmax())) self.fit_metrics = self.model.fit(self.X, self.y) def test_training_accuracy_above_ninety(self): self.assertGreater(self.fit_metrics['train']['accuracy'], 0.9) def test_validation_accuracy_above_ninety(self): self.assertGreater(self.fit_metrics['val']['accuracy'], 0.9)
def setUpClass(self): self.X, self.y = get_mnist() np.random.seed(100) self.X = np.random.permutation(self.X)[:1000] np.random.seed(100) self.y = np.random.permutation(self.y)[:1000] self.model = Model(lr=0.001, n_epoch=100, batch_size=3, loss=SCCE(), metrics=['loss', 'accuracy'], optimizer=SGD()) self.model.add_layer(Convolution(1, (3, 3), inshape=(None, 1, 28, 28))) self.model.add_layer(MeanPooling((2, 2))) self.model.add_layer(Convolution(2, (4, 4))) self.model.add_layer(MeanPooling((2, 2))) self.model.add_layer(Flatten()) self.model.add_layer(Dense(10, activation=Softmax())) self.fit_metrics = self.model.fit(self.X, self.y)
def setUpClass(cls): cls.model = Model(progress=False, validation_split=0, batch_size=4, metrics=['loss', 'accuracy'], log_interval=5000) cls.model.add_layer(Dense(3, inshape=2, activation=Sigmoid())) cls.model.add_layer(Dense(1, activation=Sigmoid())) cls.model.compile() cls.X_train = np.array([[0, 0], [0, 1], [1, 0], [1, 1]]) cls.Y_train = np.array([[0], [1], [1], [0]]) cls.model.fit(cls.X_train, cls.Y_train)
class TestMNISTWithConvnet(unittest.TestCase): @classmethod def setUpClass(self): self.X, self.y = get_mnist() np.random.seed(100) self.X = np.random.permutation(self.X)[:1000] np.random.seed(100) self.y = np.random.permutation(self.y)[:1000] self.model = Model(lr=0.001, n_epoch=100, batch_size=3, loss=SCCE(), metrics=['loss', 'accuracy'], optimizer=SGD()) self.model.add_layer(Convolution(1, (3, 3), inshape=(None, 1, 28, 28))) self.model.add_layer(MeanPooling((2, 2))) self.model.add_layer(Convolution(2, (4, 4))) self.model.add_layer(MeanPooling((2, 2))) self.model.add_layer(Flatten()) self.model.add_layer(Dense(10, activation=Softmax())) self.fit_metrics = self.model.fit(self.X, self.y) def test_training_accuracy_above_ninety(self): self.assertGreater(self.fit_metrics['train']['accuracy'], 0.8) def test_validation_accuracy_above_ninety(self): self.assertGreater(self.fit_metrics['val']['accuracy'], 0.8)
from slugnet.activation import ReLU, Softmax from slugnet.layers import Dense, Dropout from slugnet.loss import SoftmaxCategoricalCrossEntropy as SCCE from slugnet.model import Model from slugnet.optimizers import RMSProp from slugnet.data.mnist import get_mnist X, y = get_mnist() model = Model(lr=0.01, n_epoch=3, loss=SCCE(), metrics=['loss', 'accuracy'], optimizer=RMSProp()) model.add_layer(Dense(200, inshape=784, activation=ReLU())) model.add_layer(Dropout(0.5)) model.add_layer(Dense(10, activation=Softmax())) model.fit(X, y)
from slugnet.activation import ReLU, Softmax from slugnet.layers import Convolution, Dense, MeanPooling, Flatten from slugnet.loss import SoftmaxCategoricalCrossEntropy as SCCE from slugnet.model import Model from slugnet.optimizers import SGD from slugnet.data.mnist import get_mnist X, y = get_mnist() X = X.reshape((-1, 1, 28, 28)) / 255.0 np.random.seed(100) X = np.random.permutation(X)[:1000] np.random.seed(100) y = np.random.permutation(y)[:1000] model = Model(lr=0.001, n_epoch=100, batch_size=3, loss=SCCE(), metrics=['loss', 'accuracy'], optimizer=SGD()) model.add_layer(Convolution(1, (3, 3), inshape=(None, 1, 28, 28))) model.add_layer(MeanPooling((2, 2))) model.add_layer(Convolution(2, (4, 4))) model.add_layer(MeanPooling((2, 2))) model.add_layer(Flatten()) model.add_layer(Dense(10, activation=Softmax())) model.fit(X, y)