Beispiel #1
0
def training_batch(batch_index, model, sess, batches):
    for index in batch_index:
        user_input, num_idx, item_input, labels = data.batch_gen(batches, index)
        feed_dict = {model.user_input: user_input, model.num_idx: num_idx[:, None],
                     model.item_input: item_input[:, None],
                     model.labels: labels[:, None]}
        sess.run([model.loss, model.optimizer], feed_dict)
Beispiel #2
0
def training_loss(model, sess, batches):
    train_loss = 0.0
    num_batch = len(batches[1])
    for index in range(num_batch):
        user_input, num_idx, item_input, labels = data.batch_gen(batches, index)
        feed_dict = {model.user_input: user_input, model.num_idx: num_idx[:, None], model.item_input: item_input[:, None],model.labels: labels[:, None]}
        train_loss += sess.run(model.loss, feed_dict)
    return train_loss / num_batch
def training_loss(model, batches):
    train_loss = 0.0
    num_batch = len(batches[1])
    optimizer = optim.Adam(model.parameters(), lr=0.01)

    for index in range(num_batch):
        user_input, num_idx, item_input, labels = data.batch_gen(
            batches, index)

        user_input = torch.Tensor(user_input)
        num_idx = torch.Tensor(num_idx)
        item_input = torch.Tensor(item_input)
        labels = torch.Tensor(labels)

        optimizer.zero_grad()
        loss = model(user_input, num_idx, item_input, labels)
        print("epoch %d : loss %f" % (index + 1, loss))
        train_loss += loss
        loss.backward()
        optimizer.step()
    return train_loss / num_batch