def main():
    #return testLr()

    X, y = preparDataSet(N=200)
    net = RegressionNet()  #RegressionNet(hidden=10)
    #print(net)

    optimizer = optimizerTorch(net.parameters(), lr=1e-1)
    lossFuc = lossFunction()  #mean suqare error
    lambda1 = lambda epoch: 0.5**(epoch // 200)  #decay 0.5 every times
    scheduler = torch.optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda1)

    EPOCHS = 800
    for epoch in range(EPOCHS):
        t = time.time()

        net.zero_grad()
        pred = net(X)
        loss = lossFuc(pred, y)
        #print('loss=',type(loss),loss)
        loss.backward()
        optimizer.step()
        scheduler.step()

        if epoch % 50 == 0:
            log = f'epoch[{epoch+1}/{EPOCHS}] loss={round(float(loss),4)},run in {round(time.time()-t,4)}s'
            print(log)
            #print('epoch:',epoch, 'lr:',optimizer.param_groups[0]["lr"])

    predict(net)
def main():
    X, y = preparDataSetMul(N=2000, gamma=0.00001)

    net = RegressionNet2(input=2, hidden=20,
                         hiddenlayers=2)  #RegressionNet(hidden=10)
    print(net)

    optimizer = optimizerTorch(net.parameters(), lr=1e-3)
    lossFuc = lossFunction()  #mean suqare error
    lambda1 = lambda epoch: 0.8**(epoch // 100)  #decay 0.5 every times
    scheduler = torch.optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda1)

    EPOCHS = 2800
    for epoch in range(EPOCHS):
        t = time.time()

        net.zero_grad()
        pred = net(X)
        #print(pred)
        loss = lossFuc(pred, y)
        #print('loss=', float(loss))
        loss.backward()
        optimizer.step()
        #scheduler.step()

        if epoch % 100 == 0:
            lr = optimizer.param_groups[0]["lr"]
            log = f'epoch[{epoch+1}/{EPOCHS}] loss={round(float(loss),4)},lr={float(lr)},run in {round(time.time()-t,4)}s'
            print(log)

    predict(net, a=[0.1, 0.2])
    predict(net, a=[0.3, 0.1])
def trainNet(net, X, y, optimizer, EPOCHS=400, lossFuc=lossFunction()):
    losses = []
    for epoch in range(EPOCHS):
        t = time.time()

        net.zero_grad()
        pred = net(X)
        #print('X.shape, pred.shape, y.shape=', X.shape, pred.shape, y.shape)
        #print('pred.dtype, y,dtype=', pred.dtype, y.dtype)
        loss = lossFuc(pred, y)
        loss.backward()
        optimizer.step()

        if epoch % (EPOCHS // 10) == 0:
            log = f'epoch[{epoch+1}/{EPOCHS}] loss={round(float(loss),4)},run in {round(time.time()-t,4)}s'
            print(log)

        losses.append(float(loss))
    return losses
def main():
    X, y = preparDataSet(N=200)
    net = RegressionNet()  #RegressionNet(hidden=10)
    #print(net)

    optimizer = optimizerTorch(net.parameters(), lr=1e-2)
    lossFuc = lossFunction()  #mean suqare error

    EPOCHS = 800
    for epoch in range(EPOCHS):
        t = time.time()

        net.zero_grad()
        pred = net(X)
        loss = lossFuc(pred, y)
        #print('loss=',type(loss),loss)
        loss.backward()
        optimizer.step()

        if epoch % 50 == 0:
            log = f'epoch[{epoch+1}/{EPOCHS}] loss={round(float(loss),4)},run in {round(time.time()-t,4)}s'
            print(log)

    predict(net)
Beispiel #5
0
def main():
    #testLayers()
    X, y = preparDataSet(gamma=0.1)
    net = RegressionNet(hidden=50)

    optimizer = optimizerTorch(net.parameters(), lr=1e-2)
    lossFuc = lossFunction()

    my_images = []
    fig, ax = plt.subplots()

    EPOCHS = 400
    for epoch in range(EPOCHS):
        t = time.time()

        net.zero_grad()

        pred = net(X)
        #print(X.shape, pred.shape, y.shape)
        loss = lossFuc(pred, y)
        loss.backward()
        optimizer.step()

        # plot and show learning process
        plt.cla()
        ax.set_title('Regression Analysis', fontsize=12)
        ax.set_xlabel('X /Feature /Independent variable', fontsize=10)
        ax.set_ylabel('Y /Label /Dependent variable', fontsize=10)
        #ax.set_xlim(-1.05, 1.5)
        #ax.set_ylim(-0.25, 1.25)
        ax.scatter(X.data.numpy(), y.data.numpy(), color="orange")
        ax.plot(X.data.numpy(), pred.data.numpy(), 'g-', lw=3)
        ax.text(0.75,
                0.16,
                'Epoch = %d' % epoch,
                transform=ax.transAxes,
                fontdict={
                    'size': 10,
                    'color': 'red'
                })
        ax.text(0.75,
                0.12,
                'Loss = %.4f' % loss.data.numpy(),
                transform=ax.transAxes,
                fontdict={
                    'size': 10,
                    'color': 'red'
                })

        # Used to return the plot as an image array
        # (https://ndres.me/post/matplotlib-animated-gifs-easily/)
        fig.canvas.draw()  # draw the canvas, cache the renderer
        image = np.frombuffer(fig.canvas.tostring_rgb(), dtype='uint8')
        image = image.reshape(fig.canvas.get_width_height()[::-1] + (3, ))

        if EPOCHS < 200 or (EPOCHS < 500 and epoch % 2 == 0) or (epoch % 4
                                                                 == 0):
            my_images.append(image)

        if epoch % 50 == 0:
            log = f'epoch[{epoch+1}/{EPOCHS}] loss={round(float(loss),4)},run in {round(time.time()-t,4)}s'
            print(log)
        #plt.show()
        #break

    # save images as a gif
    imageio.mimsave('./res/curve.gif', my_images, fps=20)