Пример #1
0
import torch
import torch.nn as nn
from model.FunctionLayers import PyramidROIAlign
from model import Utils
from torchvision.models.resnet import ResNet, Bottleneck


grads = Utils.GradSaver()


# ResNet
class ResBlock(nn.Module):
    """
    Construct Residual block used in the ResNet.
    """
    def __init__(self, in_channels, filters, stride=1, res_conv=False, train_bn=True):
        """
        in_channels: the channel number of input tensor
        filters: [n_filter1, n_filter2, n_filter3], the filter number of the three conv blocks
        stride: the stride of the first conv1x1 (including shortcut)
        res_conv: bool, whether conv1x1 is used in the shortcut
        """
        super().__init__()
        self.res_conv = res_conv

        self.conv1 = nn.Sequential(nn.Conv2d(in_channels, filters[0], kernel_size=1, stride=stride),
                                   nn.BatchNorm2d(filters[0], track_running_stats=train_bn),
                                   nn.ReLU())
        self.conv2 = nn.Sequential(nn.Conv2d(filters[0], filters[1], kernel_size=3, padding=1),
                                   nn.BatchNorm2d(filters[1], track_running_stats=train_bn),
                                   nn.ReLU())
Пример #2
0
    #
    # rua = ops.roi_align(images, box, output_size=[60,40])
    # rua = rua.squeeze(dim=1).cpu()
    # plt.imshow(rua[0, 0])
    # plt.show()

    # 2.
    # numpy_data = np.zeros([40000, 1], dtype=float)
    # cpu_data = torch.tensor(numpy_data)
    # gpu_data = cpu_data.cuda()
    #
    # print('gpu: ', torch.argmax(gpu_data, dim=1))
    # print('cpu: ', torch.argmax(cpu_data, dim=1))

    # 3.
    grad_saver = Utils.GradSaver()
    size = 10

    x = 0.1 * torch.ones([size, size], dtype=torch.float32,
                         requires_grad=True).cuda()
    y = torch.ones([size, size], dtype=torch.float32,
                   requires_grad=False).cuda()
    z = torch.nn.functional.binary_cross_entropy(x, y)

    # In here, save_grad('y') returns a hook (a function) that keeps 'y' as name
    x.register_hook(grad_saver.save_grad('x_grad'))
    z.register_hook(grad_saver.save_grad('z_grad'))
    z.backward()

    grad_saver.print_grad('x_grad')
    grad_saver.print_grad('z_grad')