Ejemplo n.º 1
0
def pred(dev_iter, net, device=None):
    if device is None:
        device = list(net.parameters())[0].device
    label = []
    label_true = []
    net = net.to(device)
    with torch.no_grad():
        net.eval()
        for X, Y, z in dev_iter:
            # print(torch.argmax(net(X.)), dim=1)
            # break
            # print(torch.argmax(net(X.to(device)),dim=1))
            # print(net(X.to(device)))
            # break
            label.extend(
                torch.argmax(net(X.to(device)), dim=1).cpu().numpy().tolist())
            label_true.extend(Y.numpy().tolist())
        net.train()
    return f1(label, label_true, classifications=2)
Ejemplo n.º 2
0
    load_pretrained_embedding(vocab.itos, glove_vocab))
net.embedding.weight.requires_grad = True  # 直接加载预训练好的, 所以不需要更新它

#####
num_epochs = 10

optimizer = torch.optim.Adam(net.parameters())
loss = torch.nn.CrossEntropyLoss()  # softmax,交叉熵
train(train_iter, net, loss, optimizer, device, num_epochs)
# 测试
label = []
label_true = []
net = net.to(device)
for X, Y, Z in dev_iter:
    # print(torch.argmax(net(X.)), dim=1)
    # break
    #print(torch.argmax(net(X.to(device)),dim=1))
    #print(net(X.to(device)))
    #break
    label.extend(
        torch.argmax(net(X.to(device), Z), dim=1).cpu().numpy().tolist())
    label_true.extend(Y.numpy().tolist())
k = 0
for i, j in zip(label, label_true):
    if i == j:
        k = k + 1
print(k / 1067)
# 评价函数 evaluate model
from eval.score import f1
print('f1_score is :{}'.format(f1(label_true, label, 2)))