Ejemplo n.º 1
0
import torch
import numpy as np
from baseline.utils import lookup_sentence, get_version
from torch.autograd import Variable
import torch.autograd
import torch.nn as nn
import torch.nn.functional
import math
import copy

PYT_MAJOR_VERSION = get_version(torch)


def sequence_mask(lengths):
    lens = lengths.cpu()
    max_len = torch.max(lens)
    # 1 x T
    row = torch.arange(0, max_len.item()).type_as(lens).view(1, -1)
    # B x 1
    col = lens.view(-1, 1)
    # Broadcast to B x T, compares increasing number to max
    mask = row < col
    return mask


def classify_bt(model, batch_time):
    tensor = torch.from_numpy(batch_time) if type(batch_time) == np.ndarray else batch_time
    probs = model(torch.autograd.Variable(tensor, requires_grad=False).cuda()).exp().data
    probs.div_(torch.sum(probs))
    results = []
    batchsz = probs.size(0)
Ejemplo n.º 2
0
import math
import copy
import numpy as np
import torch
import torch.autograd
import torch.nn as nn
import torch.nn.functional as F
from baseline.utils import lookup_sentence, get_version, Offsets


PYT_MAJOR_VERSION = get_version(torch)


def sequence_mask(lengths, max_len=-1):
    lens = lengths.cpu()
    if max_len < 0:
        max_len = torch.max(lens).item()
    # 1 x T
    row = torch.arange(0, max_len).type_as(lens).view(1, -1)
    # B x 1
    col = lens.view(-1, 1)
    # Broadcast to B x T, compares increasing number to max
    mask = row < col
    return mask


class VariationalDropout(nn.Module):
    """Inverted dropout that applies the same mask at each time step."""

    def __init__(self, p=0.5):
        """Variational Dropout