Ejemplo n.º 1
0
def evaluateWordSort(model, epoch):
    """Evaluate after a train epoch"""
    print('Epoch [{}] -- Evaluate'.format(epoch))

    x_val, y_val = batch(4)
    out, _ = model(x_val, y_val, teacher_force_ratio=0.)
    out = out.permute(1, 0)

    for i in range(out.size(0)):
        print("=============================================")
        print(
            "yref",
            y_val[i],
            out[i],
            y_val[i] - out[i],
        )

        xv = convertToWordSingle(x_val[i])
        print("orig", xv)
        v = out[i].numpy()
        print("[", end="")
        for index in v:
            print(xv[index] + ", ", end="")

        print("]")
Ejemplo n.º 2
0
def evaluateWordSort(model, epoch):
    """Evaluate after a train epoch"""
    print('Epoch [{}] -- Evaluate'.format(epoch))

    model.eval()
    x_val, y_val = batch(1)

    trg_seq_len, batch_size = y_val.shape

    sentence_tensor = x_val

    outputs = [[30]]
    # outputs = list(y_val[0][0].numpy())

    for i in range(trg_seq_len - 1):
        trg_tensor = torch.LongTensor(outputs).to(device)
        # trg_tensor = trg_tensor.permute(1, 0)

        with torch.no_grad():
            output = model(sentence_tensor, trg_tensor)

        best_guess = output.argmax(2)[-1, :].item()
        outputs.append([best_guess])

        print(outputs, y_val)
    """
Ejemplo n.º 3
0
def train(pNet, optimizer, epoch, clip=1.):

    print('Epoch [{}] -- Train'.format(epoch))
    for step in range(STEPS_PER_EPOCH):
        optimizer.zero_grad()
        x, y = batch(BATCH_SIZE)
        out, loss = pNet(x, y)

        loss.backward()
        torch.nn.utils.clip_grad_norm_(pNet.parameters(), clip)
        optimizer.step()

        if (step + 1) % 100 == 0:
            print('Epoch [{}] loss: {}'.format(epoch, loss.item()))
Ejemplo n.º 4
0
def train(model, optimizer, epoch, clip=1.):
    """Train single epoch"""
    model.train()
    print('Epoch [{}] -- Train'.format(epoch))
    criterion = nn.CrossEntropyLoss()

    count = 0
    for step in range(STEPS_PER_EPOCH):
        # print(count)

        optimizer.zero_grad()
        # Forward
        inp_data, target = batch(32)
        # w = convertToWordsBatch(x)

        #output = output.reshape(-1, output.shape[2])
        #target = target[1:].reshape(-1)
        # if(count == 3):
        #   target = target[:3]

        trg = target[:-1, :]
        # trg = target
        output = model(inp_data, trg)
        best_guess = output.argmax(2).transpose(0, 1)
        # [-1, :].item()
        output = output.reshape(-1, output.shape[2])
        target = target[1:].reshape(-1)
        # target = target.reshape(-1)

        # if count == 3:
        #   print(count, "...............test")
        #   print(target, best_guess, best_guess-target)
        #   count += 1

        # output = output.reshape(-1, output.shape[2])
        # target = target[1:].reshape(-1)
        # optimizer.zero_grad()
        loss = criterion(output, target)

        # Backward
        loss.backward()
        nn.utils.clip_grad_norm_(model.parameters(), clip)
        optimizer.step()

        if (step + 1) % 100 == 0:
            count += 1
            print('Epoch [{}] loss: {}'.format(epoch, loss.item()))
Ejemplo n.º 5
0
from dataGen.dataGenerator import batch, convertToWordsBatch, convertToWordSingle
#from data import batch

print("=========Generating words================")
X, Y = batch(5)
print(X)
print(Y)
print(X.shape)

print(X.numpy().shape)

Xa = convertToWordsBatch(X.numpy())
print("--------------------***************")
print(Xa)

for x in X:
    xv = convertToWordSingle(x)
    print(">>", xv)
Ejemplo n.º 6
0
from dataGen.dataGenerator import batch, convertToWordsBatch, convertToWordSingle
#from data import batch

print("=========Generating words 1================")
X, Y = batch(4)
# exit()
print(X)
print(Y)
print(X.shape)
# exit()
Xa = convertToWordsBatch(X)
print("--------------------***************")
print(Xa)

for x in X:
    xv = convertToWordSingle(x)
    print(">>", xv)