Beispiel #1
0
    def __getitem__(self, args):
        from typing import Iterable
        if not isinstance(args, Iterable):
            return Tensor(super(Tensor, self).__getitem__(args))
        if isinstance(args[0], dygraph.core.VarBase):
            if isinstance(args, tuple):
                if len(args) == 2:
                    return torch.take(
                        self,
                        list(
                            zip(args[0].numpy().astype(int).tolist(),
                                args[1].numpy().astype(int).tolist())))
                else:
                    raise ("not support more than 2 axis array indexing")
            else:
                return torch.take(self, args[0].numpy().astype(int).tolist())

        return Tensor(super(Tensor, self).__getitem__(args))
Beispiel #2
0
 def forward(self, x, y):
     out = self.main(x)
     out = porch.Tensor(out)
     out = out.view(out.size(0), -1)  # (batch, num_domains)
     idx = porch.LongTensor(np.arange(y.shape[0]))
     # out = out[idx, y]  # (batch)
     s = porch.take(
         out, list(zip(range(y.shape[0]),
                       y.numpy().astype(int).tolist())))
     return s
Beispiel #3
0
    def finetune(self, z, y):
        h = self.shared(z)
        out = []
        for layer in self.unshared:
            out += [layer(h)]
        out = porch.stack(out, dim=1)  # (batch, num_domains, style_dim)

        s = porch.take(
            out, list(zip(range(y.size(0)),
                          y.numpy().astype(int).tolist())))
        return s, h, out
Beispiel #4
0
    def forward(self, x, y):

        h = self.shared(x)
        h = porch.varbase_to_tensor(h)
        h = h.view(h.size(0), -1)
        out = []
        for layer in self.unshared:
            out += [layer(h)]
        out = porch.stack(out, dim=1)  # (batch, num_domains, style_dim)
        s = porch.take(
            out, list(zip(range(y.shape[0]),
                          y.numpy().astype(int).tolist())))
        return s