def forward(ctx, out, src, index, dim): func = get_func('scatter_div', src) func(src, index, out, dim) ctx.mark_dirty(out) ctx.save_for_backward(out, src, index) ctx.dim = dim return out
def backward(ctx, grad_out, grad_arg): index, arg = ctx.saved_tensors grad_src = None if ctx.needs_input_grad[1]: grad_src = grad_out.new_zeros(index.size()) func = get_func('index_backward', grad_out) func(grad_out, index, arg, grad_src, ctx.dim) return None, grad_src, None, None
def forward(ctx, out, src, index, dim): arg = index.new_full(out.size(), -1) func = get_func('scatter_min', src) func(src, index, out, arg, dim) ctx.mark_dirty(out) ctx.dim = dim ctx.save_for_backward(index, arg) return out, arg
def forward(ctx, out, src, index, dim): arg = torch.zeros(out.size(), dtype=torch.long) arg.fill_(-1) # arg = index.new_full(out.size(), -1) func = get_func('scatter_max', src) func(src, index, out, arg, dim) # ctx.mark_dirty(out) ctx.dim = dim ctx.save_for_backward(index, arg) return out, arg