Example #1
0
class ModuleAssociate(torch.nn.Module):
    def __init__(self):
        super(ModuleAssociate, self).__init__()

        # H1 = Variable(torch.randn(num_words, 100))
        # H2 = Variable(torch.randn(num_words, 100))

        self.C1 = ModuleC(100, 100)
        self.C2 = ModuleC(100, 100)

    def forward(self, oo):
        ll = self.C1.forward(oo)
        ll2 = self.C1.forward(oo)

        # ll is Nx100. So if we do llxll' we get NxN as required
        A = torch.mm(ll, ll.transpose(0,1))
        # ll2 is Nx100. So if we do ll2xll2' we get NxN as required
        B = torch.mm(ll2, ll2.transpose(0, 1))
        # Flatten A to N dimensional vector
        A_f = A.view(A.size(0)*A.size(0))
        # Flatten B to N dimensional vector
        B_f = B.view(B.size(0)*B.size(0))
        # Expand dimensions to facilitate concatenation
        A_f = A_f.unsqueeze(1)
        B_f = B_f.unsqueeze(1)
        R = torch.cat((A_f, B_f), dim=1)

        return R

        # return self.linear3(x)
Example #2
0
    def __init__(self):
        super(ModuleAssociate, self).__init__()

        # H1 = Variable(torch.randn(num_words, 100))
        # H2 = Variable(torch.randn(num_words, 100))

        self.C1 = ModuleC(100, 100)
        self.C2 = ModuleC(100, 100)
    def __init__(self):
        super(ComputationGraphTableNeighborParse, self).__init__()
        self.k = 8
        self.D_in = 300 + self.k

        self.A = ModuleA(self.D_in, 100)
        self.B = ModuleB()
        self.B2 = ModuleB2(100, 100, 100)
        self.C = ModuleC(100, 2)
        self.D = ModuleD(100, 100)
        # self.Cat = ModuleCollect(100, self.N)
        self.iterations = 1
Example #4
0
    def __init__(self):
        super(SimpleDocProcModel, self).__init__()
        self.k = 8
        self.D_in = 300 + self.k

        self.A = ModuleA(self.D_in, 100)
        self.B = ModuleB()
        self.B2 = ModuleB2(100, 100, 100)
        self.C = ModuleC(100, 2)
        self.D = ModuleD(100, 100)
        # self.Cat = ModuleCollect(100, self.N)
        self.iterations = 1
class ComputationGraphTableNeighborParse(torch.nn.Module):
    def __init__(self):
        super(ComputationGraphTableNeighborParse, self).__init__()
        self.k = 8
        self.D_in = 300 + self.k

        self.A = ModuleA(self.D_in, 100)
        self.B = ModuleB()
        self.B2 = ModuleB2(100, 100, 100)
        self.C = ModuleC(100, 2)
        self.D = ModuleD(100, 100)
        # self.Cat = ModuleCollect(100, self.N)
        self.iterations = 1

    def set_iterations(self, iterations):
        self.iterations = iterations

    def concat(self, x, indices, indices_not_found, num_words):
        y = Variable(torch.zeros(num_words, 100 * 5)).cuda()
        y[:, 000:100] = x[indices[:, 0]]
        y[:, 100:200] = x[indices[:, 1]]
        y[:, 200:300] = x[indices[:, 2]]
        y[:, 300:400] = x[indices[:, 3]]
        y[:, 400:500] = x[indices[:, 4]]
        y[indices_not_found] = 0

        return y

    def forward(self, indices, indices_not_found, vv, num_words):

        uu = self.A.forward(vv)
        hh = Variable(torch.zeros(num_words, 100)).cuda()

        for i in range(self.iterations):
            ww = self.concat(uu, indices, indices_not_found, num_words)
            bb = self.B.forward(ww, hh)
            oo, hh = self.B2.forward(bb)
            ll = self.C.forward(oo)
            uu = self.D.forward(hh)

        return ll
Example #6
0
class ModuleAssociate2(torch.nn.Module):
    def __init__(self):
        super(ModuleAssociate2, self).__init__()
        self.num_features = 30

        self.P = ModuleC(100,30)
        self.C = ModuleC(60, 2)

    def expand_way_1(self, A, num_words):
        return A.view(1, num_words, self.num_features).expand(num_words, 1, num_words, self.num_features).transpose(0, 2).contiguous().view(
            num_words * num_words, self.num_features)

    def expand_way_2(self, A, num_words):
        return A.expand(num_words, num_words, self.num_features).contiguous().view(num_words * num_words, self.num_features)

    def concat_each(self, A, num_words):
        A1 = self.expand_way_1(A, num_words)
        A2 = self.expand_way_2(A, num_words)
        return torch.cat((A1, A2), dim=1)

    def forward(self, oo, num_words):
        oo = self.P(oo)
        each_concatenated = self.concat_each(oo, num_words)
        return self.C.forward(each_concatenated)
Example #7
0
    def __init__(self):
        super(ModuleAssociate2, self).__init__()
        self.num_features = 30

        self.P = ModuleC(100,30)
        self.C = ModuleC(60, 2)