import numpy as np import Layers train_x = np.random.uniform(-5, 5, (100, 10)) ans_w = np.random.uniform(-5, 5, (10, 1)) ans_b = 3 train_y = train_x.dot(ans_w) + ans_b w = np.random.uniform(-5, 5, (10, 1)) b = np.random.uniform(-5, 5, (1, 1)) learning_rate = 0.001 epoch = 20000 keys = ['w', 'b'] layers = {} layers['w'] = Layers.MulLayer(w) layers['b'] = Layers.AddLayer(b) lastlayer = Layers.MSELayer(train_y) grads = {} for epoch in range(epoch): x = train_x y = train_y for key in keys: x = layers[key].forward(x) loss = lastlayer.forward(x) if epoch % 100 == 0: print("err : ", loss) dout = lastlayer.backward()
y_train = one_hot # initialize parameters num_classes = y_train.shape[1] hidden = 50 keys = ['w1', 'b1', 'ReLU', 'w2', 'b2'] params = {} params['w1'] = np.random.uniform(-1, 1, (x_train.shape[1], hidden)) params['b1'] = np.random.uniform(-1, 1, (hidden)) params['w2'] = np.random.uniform(-1, 1, (hidden, num_classes)) params['b2'] = np.random.uniform(-1, 1, (num_classes)) layers = {} layers['w1'] = Layers.MulLayer(params['w1']) layers['b1'] = Layers.AddLayer(params['b1']) layers['ReLU'] = Layers.ELU() layers['w2'] = Layers.MulLayer(params['w2']) layers['b2'] = Layers.AddLayer(params['b2']) lastlayer = Layers.SoftmaxLayer() grads = {} # initialize hyperparameters learning_rate = 0.01 epochs = 10000 batch_size = 512 for epoch in range(epochs): batch_mask = np.random.choice(x_train.shape[0], batch_size) x = x_train[batch_mask]
y_test = one_hot[50000:60000] x_test = x_train[50000:60000] x_train = x_train[:50000] # initialize parameters num_classes = y_train.shape[1] hidden = 50 x_train = x_train.reshape(x_train.shape[0], 1, 28, 28) x_test = x_test.reshape(x_test.shape[0], 1, 28, 28) print(x_train.shape) model = Model() model.addlayer(ConvLayer(32, (3, 3)), name='conv1') model.addlayer(Layers.ReLU(), name='relu1') model.addlayer(Layers.Dropout(), name='dropout1') model.addlayer(Layers.Flatten(), name='flatten') model.addlayer(Layers.MulLayer(10), name="w1") model.addlayer(Layers.AddLayer(10), name='b1') model.addlayer(Layers.ReLU(), name='relu3') model.addlayer(Layers.Dropout(0.5), name='dropout3') model.addlayer(Layers.SoftmaxLayer(), name='softmax') optimizer = Optimizer.Adam(batch_size=32) model.train(x_train, y_train, optimizer, 10000, 0.01) model.save() print("--TRAIN EVAL--") model.eval(x_train, y_train) print("--TEST EVAL--") model.eval(x_test, y_test)