def main(): #return testLr() X, y = preparDataSet(N=200) net = RegressionNet() #RegressionNet(hidden=10) #print(net) optimizer = optimizerTorch(net.parameters(), lr=1e-1) lossFuc = lossFunction() #mean suqare error lambda1 = lambda epoch: 0.5**(epoch // 200) #decay 0.5 every times scheduler = torch.optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda1) EPOCHS = 800 for epoch in range(EPOCHS): t = time.time() net.zero_grad() pred = net(X) loss = lossFuc(pred, y) #print('loss=',type(loss),loss) loss.backward() optimizer.step() scheduler.step() if epoch % 50 == 0: log = f'epoch[{epoch+1}/{EPOCHS}] loss={round(float(loss),4)},run in {round(time.time()-t,4)}s' print(log) #print('epoch:',epoch, 'lr:',optimizer.param_groups[0]["lr"]) predict(net)
def main(): X, y = preparDataSetMul(N=2000, gamma=0.00001) net = RegressionNet2(input=2, hidden=20, hiddenlayers=2) #RegressionNet(hidden=10) print(net) optimizer = optimizerTorch(net.parameters(), lr=1e-3) lossFuc = lossFunction() #mean suqare error lambda1 = lambda epoch: 0.8**(epoch // 100) #decay 0.5 every times scheduler = torch.optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda1) EPOCHS = 2800 for epoch in range(EPOCHS): t = time.time() net.zero_grad() pred = net(X) #print(pred) loss = lossFuc(pred, y) #print('loss=', float(loss)) loss.backward() optimizer.step() #scheduler.step() if epoch % 100 == 0: lr = optimizer.param_groups[0]["lr"] log = f'epoch[{epoch+1}/{EPOCHS}] loss={round(float(loss),4)},lr={float(lr)},run in {round(time.time()-t,4)}s' print(log) predict(net, a=[0.1, 0.2]) predict(net, a=[0.3, 0.1])
def main(): #file = r'./db/fucDatasetClf_2F_MClass_1000.csv' #X,_,y,_ = getCsvDataset(file) file = r'./db/Iris.xlsx' X, _, y, _ = getExcelDataset(file) y = y.type(torch.long) descpritDataset(X, y) #print(X.shape) features = X.shape[1] labels = np.unique(y) print('features=', features, 'labels=', labels) epoches = 800 lr = 1e-2 lossFuc = nn.CrossEntropyLoss() #lossFuc = nn.NLLLoss() net = ClassifierNet(input=features, output=len(labels), hidden=20) summary(net, (1, features)) optimizer = optimizerTorch(net.parameters(), lr=lr) trainNet(net, X, y, optimizer, EPOCHS=epoches, lossFuc=lossFuc) accuracy(net, X, y)
def main(): trainset, testset, train, test = prepareData(batch_size=20000) curEpoch, curLoss = 0, 0 weightsDir = r'./res/weights/' #net = ClassifierNet(input=28*28, output=10, hidden=20) #Fc #net = ClassifierCNN_Net(10) #cnn #net = ClassifierCNN_Net2(10) #Sequential cnn net = ClassifierCNN_Net3(10) optimizer = optimizerTorch(net.parameters(), lr=1e-3) lossFuc = nn.CrossEntropyLoss() #nn.NLLLoss if 1: #continue training net, optimizer, curEpoch, curLoss = load_model(net, optimizer, weightsDir) print(net) #return print('training start...') losse_list = [] acc_list = [] EPOCHS = 30 #optimizerDesc(optimizer) for epoch in range(EPOCHS): t = time.time() for data in trainset: X, y = data #print('X.shape=',X.shape, X.dtype) #print('y.shape=',y.shape, y.dtype) net.zero_grad() output = net(X) loss = lossFuc(output, y) loss.backward() optimizer.step() epoch = curEpoch + epoch #acc,_ = accuracy2(net,train) acc = accuracy(net, trainset) acc_list.append(round(acc, 4)) losse_list.append(float(loss)) log = f'epoch[{epoch+1-curEpoch}/{EPOCHS}][total:{epoch+1}], loss={round(float(loss),4)}, accuracy={round(acc,4)}, run in {round(time.time()-t,4)}s' print(log) saveModel(net, optimizer, epoch, loss, weightsDir) #plotLossAndAcc(losse_list, acc_list) plotFromLog(r'./log/log.txt') evaluateModel(net, trainset, train)
def main(): #testLayers() trainset, testset, _, _ = prepareData(batch_size=10) #net = Net() net = ClassifierNet(input=28 * 28, output=10, hidden=20) #print(net) optimizer = optimizerTorch(net.parameters(), lr=1e-4) EPOCHS = 10 #lr = float(optimizer.param_groups[0]["lr"]) #print('lr=',lr) # x = torch.rand(28,28) # x = x.view(-1, 28*28) # outPut = net(x) # print(outPut) for epoch in range(EPOCHS): t = time.time() for data in trainset: X, y = data #print('X.shape=',X.shape) #print('y.shape=',y.shape) net.zero_grad() output = net(X.view(-1, 28 * 28)) loss = F.cross_entropy(output, y) loss.backward() optimizer.step() log = f'epoch[{epoch+1}/{EPOCHS}] loss={round(float(loss),4)},run in {round(time.time()-t,4)}s' print(log) writeLog(log + '\n') accuracy(net, trainset) saveModel(net, optimizer, epoch, loss, r'./res/')
def main(): X, y = preparDataSet(N=200) net = RegressionNet() #RegressionNet(hidden=10) #print(net) optimizer = optimizerTorch(net.parameters(), lr=1e-2) lossFuc = lossFunction() #mean suqare error EPOCHS = 800 for epoch in range(EPOCHS): t = time.time() net.zero_grad() pred = net(X) loss = lossFuc(pred, y) #print('loss=',type(loss),loss) loss.backward() optimizer.step() if epoch % 50 == 0: log = f'epoch[{epoch+1}/{EPOCHS}] loss={round(float(loss),4)},run in {round(time.time()-t,4)}s' print(log) predict(net)
def main(): #testLayers() X, y = preparDataSet(gamma=0.1) net = RegressionNet(hidden=50) optimizer = optimizerTorch(net.parameters(), lr=1e-2) lossFuc = lossFunction() my_images = [] fig, ax = plt.subplots() EPOCHS = 400 for epoch in range(EPOCHS): t = time.time() net.zero_grad() pred = net(X) #print(X.shape, pred.shape, y.shape) loss = lossFuc(pred, y) loss.backward() optimizer.step() # plot and show learning process plt.cla() ax.set_title('Regression Analysis', fontsize=12) ax.set_xlabel('X /Feature /Independent variable', fontsize=10) ax.set_ylabel('Y /Label /Dependent variable', fontsize=10) #ax.set_xlim(-1.05, 1.5) #ax.set_ylim(-0.25, 1.25) ax.scatter(X.data.numpy(), y.data.numpy(), color="orange") ax.plot(X.data.numpy(), pred.data.numpy(), 'g-', lw=3) ax.text(0.75, 0.16, 'Epoch = %d' % epoch, transform=ax.transAxes, fontdict={ 'size': 10, 'color': 'red' }) ax.text(0.75, 0.12, 'Loss = %.4f' % loss.data.numpy(), transform=ax.transAxes, fontdict={ 'size': 10, 'color': 'red' }) # Used to return the plot as an image array # (https://ndres.me/post/matplotlib-animated-gifs-easily/) fig.canvas.draw() # draw the canvas, cache the renderer image = np.frombuffer(fig.canvas.tostring_rgb(), dtype='uint8') image = image.reshape(fig.canvas.get_width_height()[::-1] + (3, )) if EPOCHS < 200 or (EPOCHS < 500 and epoch % 2 == 0) or (epoch % 4 == 0): my_images.append(image) if epoch % 50 == 0: log = f'epoch[{epoch+1}/{EPOCHS}] loss={round(float(loss),4)},run in {round(time.time()-t,4)}s' print(log) #plt.show() #break # save images as a gif imageio.mimsave('./res/curve.gif', my_images, fps=20)