def forward(self, input1, input2): self.save_for_backward(input1, input2) kH, kW = self.kernel_size patchH, patchW = self.patch_size padH, padW = self.padding dilation_patchH, dilation_patchW = self.dilation_patch dH, dW = self.stride output = correlation.forward(input1, input2, kH, kW, patchH, patchW, padH, padW, dilation_patchH, dilation_patchW, dH, dW) return output
def forward(self, input1, input2): self.save_for_backward(input1, input2) kH, kW = self.kernel_size patchH, patchW = self.patch_size padH, padW = self.padding dilation_patchH, dilation_patchW = self.dilation_patch dH, dW = self.stride if input1.type() != 'torch.cuda.FloatTensor': input1 = input1.type('torch.cuda.FloatTensor') input2 = input2.type('torch.cuda.FloatTensor') output = correlation.forward(input1, input2, kH, kW, patchH, patchW, padH, padW, dilation_patchH, dilation_patchW, dH, dW) if output.type() != 'torch.cuda.HalfTensor': output = output.type('torch.cuda.HalfTensor') return output
def forward(ctx, input1, input2, kernel_size=1, patch_size=1, stride=1, padding=0, dilation_patch=1): ctx.save_for_backward(input1, input2) kH, kW = ctx.kernel_size = _pair(kernel_size) patchH, patchW = ctx.patch_size = _pair(patch_size) padH, padW = ctx.padding = _pair(padding) dilation_patchH, dilation_patchW = ctx.dilation_patch = _pair( dilation_patch) dH, dW = ctx.stride = _pair(stride) output = correlation.forward(input1, input2, kH, kW, patchH, patchW, padH, padW, dilation_patchH, dilation_patchW, dH, dW) return output