Exemple #1
0
        with open(file_path, 'r', encoding='utf-8') as f:
            data = [line.split() for line in f.readlines()]

    except:
        with open(file_path, 'r', encoding='utf-8') as f:
            data = [line.split() for line in f.readlines()]
    sentences.extend(data)
    labels.extend([label] * len(data))
    label += 1
print("\n--------Building essays completed!----------\n")
essays = Eassy.Eassy(sentences, labels)
essays.E2V()
print("\n--------Calculating essay vectors completed!----------\n")
train_x, test_x, train_y, test_y = essays.split()

rnn = Architecture.Representation(64, 64, 64)

# loss function
loss_fun = torch.nn.CrossEntropyLoss()
opt = torch.optim.Adam(rnn.parameters(), lr=0.01)
print("\n--------Starting train!----------\n")

acclist = []
for epoch in range(200):
    lab = []
    pred = []
    for essay, label in zip(train_x, train_y):
        essay = torch.unsqueeze(essay, 0)
        essay = torch.unsqueeze(essay, 0)
        essay = essay.float()
        label = torch.tensor([label])
Exemple #2
0
            acc = Acc(pred,label)
            accList[step].append(acc)
            print("\n<<-----epoch:{0} batch:{1}  | batch_x_size:{2}  | loss:{3:.3f}  | acc:{4:.3f}----->>\n".format(epoch,step,batch_x.size(),loss,acc))
            loss.backward()
            opt.step()
    plt.plot(range(1,iter+1),accList[0],marker='*',mec='b',mfc='w')
    plt.plot(range(1,iter+1),accList[1],marker='s',mec='g',mfc='w')
    plt.xlabel("epoch")
    plt.ylabel("acc")
    plt.legend()
    plt.show()

if __name__ == "__main__":
    train_loader,dataLen_train,test_loader,dataLen_test = load_data()
    print("dataLen: ",dataLen_train)
    model = Architecture.Representation(5, dataLen_train)

    loss_fun = torch.nn.CrossEntropyLoss()
    opt = torch.optim.Adam(model.parameters(), lr=0.001)
    train(train_loader,300)
    for step, (batch_x, batch_y) in enumerate(test_loader):
        test = model.eval()
        out = test(test_loader)
        out = out.view(-1, out.shape[2])
        # print(out)
        pred = torch.max(out, 1)[1].data.numpy()
        print(pred)
        batch_y = batch_y.view(-1)
        loss = loss_fun(out, batch_y)
        label = numpy.asarray(batch_y, dtype=int, order=None)
        print(label)