def mapped_transposed_conv_test(conv_layer, weight, input, oh, ow, sample_map, interp_weights=None, cuda=False): # Make sure the input requires a gradient input.requires_grad = True if cuda: input = input.cuda() sample_map = sample_map.cuda() # Set the layer weights bias = params.bias(weight.shape[1]) conv_layer.weight.data = weight if not cuda else weight.cuda() conv_layer.bias.data = bias if not cuda else bias.cuda() # Run a forward pass output, forward_time = time_cuda( conv_layer, [input, oh, ow, sample_map, interp_weights]) # Run a backward pass _, backward_time = time_cuda(output.backward, [params.gradients(output)]) # Run a numerical gradient check gradcheck_result = gradcheck(conv_layer, (input, oh, ow, sample_map, interp_weights)) # Return the report return output, forward_time, backward_time, gradcheck_result
def mapped_resample_test(resample_layer, input, sample_map, interp_weights=None, cuda=False): # Make sure the input requires a gradient input.requires_grad = True if cuda: input = input.cuda() sample_map = sample_map.cuda() if interp_weights is not None: interp_weights = interp_weights.cuda() # Run a forward pass output, forward_time = time_cuda(resample_layer, [input, sample_map, interp_weights]) # Run a backward pass _, backward_time = time_cuda(output.backward, [params.gradients(output)]) # Run a numerical gradient check gradcheck_result = gradcheck(resample_layer, (input, sample_map, interp_weights)) # Return the report return output, forward_time, backward_time, gradcheck_result