def forward(self, input): BaumOut, mask = self.Baum(input) abc = self.features(self.input_norm(input)) a = (abc[:, 0, :, :].contiguous()) b = (abc[:, 1, :, :].contiguous()) c = (abc[:, 2, :, :].contiguous()) return abc2A(a, b, c) + BaumOut, 1.0
def forward(self,x): if x.is_cuda: self.gk = self.gk.cuda() else: self.gk = self.gk.cpu() gx = self.gx(F.pad(x, (1, 1, 0, 0), 'replicate')) gy = self.gy(F.pad(x, (0, 0, 1, 1), 'replicate')) a1 = (gx * gx * self.gk.unsqueeze(0).unsqueeze(0).expand_as(gx)).view(x.size(0),-1).mean(dim=1) b1 = (gx * gy * self.gk.unsqueeze(0).unsqueeze(0).expand_as(gx)).view(x.size(0),-1).mean(dim=1) c1 = (gy * gy * self.gk.unsqueeze(0).unsqueeze(0).expand_as(gx)).view(x.size(0),-1).mean(dim=1) a, b, c, l1, l2 = self.invSqrt(a1,b1,c1) rat1 = l1/l2 mask = (torch.abs(rat1) <= 6.).float().view(-1); return abc2A(a,b,c), mask
def forward(self, input): abc = self.features(self.input_norm(input)) return abc2A(abc[:,0,:,:] + 1. ,abc[:,1,:,:] , abc[:,2,:,:] + 1.):
def forward(self, input): abc = self.features(self.input_norm(input)) a = (abc[:, 0, :, :].contiguous() + 1.0) b = (abc[:, 1, :, :].contiguous() + 0.0) c = (abc[:, 2, :, :].contiguous() + 1.0) return abc2A(a, b, c), 1.0
def forward(self, input): abc = self.features(self.input_norm(input)) return abc2A(abc[:, 0, :, :].contiguous() + 1., abc[:, 1, :, :].contiguous(), abc[:, 2, :, :].contiguous() + 1.)