def __init__(self, is_train=True):
        super().__init__()

        # Normalization
        if not is_train:
            self.Norm = ops.AffineChannel2d(3)
            self.Norm.weight.data = torch.from_numpy(
                1. / np.array(cfg.PIXEL_STDS)).float()
            self.Norm.bias.data = torch.from_numpy(
                -1. * np.array(cfg.PIXEL_MEANS) /
                np.array(cfg.PIXEL_STDS)).float()

        # Backbone
        conv_body = registry.BACKBONES[cfg.BACKBONE.CONV_BODY]
        self.Conv_Body = conv_body()
        self.dim_in = self.Conv_Body.dim_out
        self.spatial_scale = self.Conv_Body.spatial_scale

        # Feature Pyramid Network
        if cfg.MODEL.FPN_ON:
            fpn_body = registry.FPN_BODY[cfg.FPN.BODY]
            self.Conv_Body_FPN = fpn_body(self.dim_in, self.spatial_scale)
            self.dim_in = self.Conv_Body_FPN.dim_out
            self.spatial_scale = self.Conv_Body_FPN.spatial_scale
        else:
            self.dim_in = self.dim_in[-1:]
            self.spatial_scale = self.spatial_scale[-1:]

        # Region Proposal Network
        if cfg.MODEL.RPN_ON:
            self.RPN = build_rpn(self.dim_in)

        # RoI Head
        if cfg.MODEL.FASTER_ON:
            if cfg.MODEL.CASCADE_ON:
                self.Cascade_RCNN = CascadeRCNN(self.dim_in,
                                                self.spatial_scale)
            else:
                self.Fast_RCNN = FastRCNN(self.dim_in, self.spatial_scale)

        if cfg.MODEL.MASK_ON:
            self.Mask_RCNN = MaskRCNN(self.dim_in, self.spatial_scale)

        if cfg.MODEL.KEYPOINT_ON:
            self.Keypoint_RCNN = KeypointRCNN(self.dim_in, self.spatial_scale)

        if cfg.MODEL.PARSING_ON:
            self.Parsing_RCNN = ParsingRCNN(self.dim_in, self.spatial_scale)

        if cfg.MODEL.UV_ON:
            self.UV_RCNN = UVRCNN(self.dim_in, self.spatial_scale)

        self._init_modules()
Exemplo n.º 2
0
def convert_bn2affine_model(module,
                            process_group=None,
                            channel_last=False,
                            merge=True):
    """
    This function is learned from the NVIDIA/apex.
    It can be seen here:
    https://github.com/NVIDIA/apex/blob/master/apex/parallel/sync_batchnorm.py

    Recursively traverse module and its children to replace all instances of
    ``torch.nn.modules.batchnorm._BatchNorm`` with `ops.AffineChannel2d`.
    """
    mod = module
    if isinstance(module,
                  torch.nn.modules.batchnorm._BatchNorm) and not isinstance(
                      module, ops.MixtureBatchNorm2d):
        # print(module.weight.cpu().detach().numpy().shape)
        mod = ops.AffineChannel2d(module.num_features)
        mod.weight.data = module.weight.data.clone().detach()
        mod.bias.data = module.bias.data.clone().detach()
        freeze_params(mod)  # freeze affine params
        if merge:
            gamma = module.weight.data.clone().detach().numpy()
            beta = module.bias.data.clone().detach().numpy()
            mu = module.running_mean.data.clone().detach().numpy()
            var = module.running_var.data.clone().detach().numpy()
            eps = module.eps

            new_gamma = gamma / (np.power(var + eps, 0.5))  # new bn.weight
            new_beta = beta - gamma * mu / (np.power(var + eps, 0.5)
                                            )  # new bn.bias

            mod.weight.data = torch.from_numpy(new_gamma)
            mod.bias.data = torch.from_numpy(new_beta)
    for name, child in module.named_children():
        mod.add_module(
            name,
            convert_bn2affine_model(child,
                                    process_group=process_group,
                                    channel_last=channel_last,
                                    merge=merge))
    del module
    return mod
Exemplo n.º 3
0
def make_norm(c, norm='bn', eps=1e-5, an_k=10):
    if norm == 'bn':
        return nn.BatchNorm2d(c, eps=eps)
    elif norm == 'affine':
        return ops.AffineChannel2d(c)
    elif norm == 'gn':
        group = 32 if c >= 32 else c
        assert c % group == 0
        return nn.GroupNorm(group, c, eps=eps)
    elif norm == 'an_bn':
        return ops.MixtureBatchNorm2d(c, an_k)
    elif norm == 'an_gn':
        group = 32 if c >= 32 else c
        assert c % group == 0
        return ops.MixtureGroupNorm(c, group, an_k)
    elif norm == 'none':
        return None
    else:
        return nn.BatchNorm2d(c, eps=eps)