def backward(ctx, graddist, gradidx): xyz1, xyz2, assignment = ctx.saved_tensors graddist = graddist.contiguous() gradxyz1 = torch.zeros(xyz1.size(), device='cuda').contiguous() gradxyz2 = torch.zeros(xyz2.size(), device='cuda').contiguous() emd.backward(xyz1, xyz2, gradxyz1, graddist, assignment) return gradxyz1, gradxyz2, None, None
def backward(ctx, gradcost): xyz1, xyz2, match = ctx.saved_tensors gradxyz1 = torch.zeros(xyz1.size()).cuda() gradxyz2 = torch.zeros(xyz2.size()).cuda() emd.backward(xyz1, xyz2, gradxyz1, gradxyz2, match) return gradxyz1, gradxyz2