def forward(self, idxs, locs, data): self.save_for_backward(idxs,) nlocs = locs.new(*locs.size()) ndata = locs.new(*data.size()) if locs.is_cuda: if not _extc.spnc_reorder_data(locs, data, idxs, nlocs, ndata, self.reverse): raise Exception("Cuda error") else: _ext.spn_reorder_data(locs, data, idxs, nlocs, ndata, self.reverse) return nlocs, ndata
def backward(self, grad_locs, grad_data): idxs, = self.saved_tensors nlocs = grad_locs.new(*grad_locs.size()) ndata = grad_data.new(*grad_data.size()) if grad_locs.is_cuda: if not _ext.spnc_reorder_data(grad_locs, grad_data, idxs, nlocs, ndata, 1 - self.reverse): raise Exception("Cuda error") else: _ext.spn_reorder_data(grad_locs, grad_data, idxs, nlocs, ndata, 1 - self.reverse) return idxs.new(idxs.size()).fill_(0), nlocs, ndata