def main(): #trivial test X = np.random.randn(100, 5) A = np.random.randn(10, 5) bias = np.random.randn(10) Y = np.dot(A, X.T).T + bias print(np.dot(A, X.T).T.shape) model = nn.Sequential() model.add(nn.Linear(5, 10)) model.add(nn.Tanh(10)) model.add(nn.Linear(10, 10)) model.add(nn.MSE(10)) print("Batch mode") for i in xrange(10000): print("Loss", model.forward(X, Y)) model.backward(alpha=0.1) print("Single mode") for i in xrange(10000): for j in xrange(X.shape[0]): print("Loss", model.forward(X[j], Y[j])) model.backward(alpha=0.1)
def main(): np.random.seed(1) model = nn.Sequential() model.add(nn.Linear(2, 5)) model.add(nn.Sigmoid()) model.add(nn.Linear(5, 1)) #model.add(nn.Sigmoid()) model.set_metric(nn.MSE()) x = np.array([[0, 0], [0, 1], [1, 0], [1, 1]]) y = np.array([[0], [1], [1], [0]]) model.fit(x, y, 5000, 1)
import nn network = nn.Container() network.add(nn.Reshape((1, 784))) network.add(nn.Linear(784, 100)) network.add(nn.Sigmoid()) network.add(nn.Linear(100, 10)) network.add(nn.Sigmoid()) network.add(nn.MSE(), cost=True) network.make()
args = parse() if args.dataset == 'linear': X_train, Y_train = generate_linear(n=100) X_test, Y_test = generate_linear(n=100) elif args.dataset == 'xor': X_train, Y_train = generate_XOR_easy() X_test, Y_test = generate_XOR_easy() else: raise RuntimeError('Dataset Not Found') net = Net() if args.criterion == 'mse': criterion = nn.MSE() elif args.criterion == 'crossentropy': criterion = nn.CrossEntropy() else: raise RuntimeError('Criterion Not Found') if args.optimizer == 'sgd': optimizer = optim.SGD(net.parameters(), lr=args.lr, momentum=args.momentum) elif args.optimizer == 'adagrad': optimizer = optim.Adagrad(net.parameters(), lr=args.lr) else: raise RuntimeError('Optimizer Not Found') model = Model(net, criterion, optimizer) train_history = model.train(X_train, Y_train, epochs=args.epochs) test_history = model.test(X_test, Y_test)