Пример #1
0
    def forward(self, pred, target):
        assert pred.dim() == target.dim(), "inconsistent dimensions"
        mask = (target>0).detach()
        #mask = (target > 0) & (target <= 500).detach()
        #pred = normalize_prediction_robust(pred,mask)
        #target = normalize_prediction_robust(target,mask)

        res = pred - target
        res = res[mask.bool()].abs().mean()
        M = torch.sum(mask)
        self.loss = res
        gradl = 0
        #pred = normalize_prediction_robust(pred,mask)
        #target = normalize_prediction_robust(target,mask)
        for scale in range(1, 5):
            pred_r = F.interpolate(pred, scale_factor=1/scale, mode='bilinear')
            target_r = F.interpolate(target,scale_factor=1/scale, mode='bilinear')
            mask = (target_r > 0).detach()
            pgrads: torch.Tensor = kornia.spatial_gradient(pred_r, order=1)
            tgrads: torch.Tensor = kornia.spatial_gradient(target_r, order=1)
            #res = pred_r - target_r
            #trim = 0.2
            diff2 = (pgrads[:, :, 0] - tgrads[:, :, 0]) + (pgrads[:, :, 1]  - tgrads[:, :, 1])
            #res = res[mask.bool()].abs().mean()
            #diff2 = diff2[mask.bool()].abs().mean()
            gradl+= diff2.abs().mean()

       # trimmed, _ = torch.sort(res.view(-1), descending=False)[
        #             : int(len(res) * (1.0 - trim))
        #             ]

        #self.loss = trimmed.sum() / (2 * M.sum()) + diff2
        self.loss+= 0.5 * gradl
        return self.loss
Пример #2
0

def imshow(input: torch.Tensor):
    out: torch.Tensor = torchvision.utils.make_grid(input, nrow=2, padding=1)
    out_np: np.array = kornia.tensor_to_image(out)
    plt.imshow(out_np)
    plt.axis('off')


#############################
# Show original
imshow(x_rgb)

#################################
# Compute the 1st order derivates
grads: torch.Tensor = kornia.spatial_gradient(x_gray, order=1)  # BxCx2xHxW
grads_x = grads[:, :, 0]
grads_y = grads[:, :, 1]

#################################
# Show first derivatives in x
imshow(grads_x)

#################################
# Show first derivatives in y
imshow(grads_y)

#################################
# Sobel Edges
# Once with the gradients in the two directions we can computet the Sobel edges.
# However, in kornia we already have it implemented.