def __init__(self, stride, scale, ratios):
     super(proposal_layer, self).__init__()
     self.anchors = torch.from_numpy(
         generate_anchors(scales=numpy.array(scale),
                          ratios=numpy.array(numpy.array(ratios))))
     self.anchor_num = self.anchors.shape[0]
     self.stride = stride
    def __init__(self, feat_stride, scales, ratios):
        super(_SiamProposalLayer, self).__init__()

        self._feat_stride = feat_stride
        self._anchors = torch.from_numpy(
            generate_anchors(scales=np.array(scales),
                             ratios=np.array(ratios))).float()
        self._num_anchors = self._anchors.size(0)
Esempio n. 3
0
 def __init__(self, feat_stride, scales, ratios, classes, n_classes):
     super(_RPNAggregationLayer, self).__init__()
     self._feat_stride = feat_stride
     self.classes = classes
     self.n_classes = n_classes
     anchor_scales = scales
     self._anchors = torch.from_numpy(
         generate_anchors(scales=np.array(anchor_scales),
                          ratios=np.array(ratios))).float()
     self._num_anchors = self._anchors.size(0)
 def __init__(self, stride, scale, ratios, allowed_border, name):
     super(anchor_target_layer, self).__init__()
     self.stride = stride
     self.scale = scale
     self.name = name
     self.anchors = torch.from_numpy(
         generate_anchors(scales=numpy.array(scale),
                          ratios=numpy.array(ratios))).float()
     self.anchor_num = self.anchors.shape[0]
     self.allowed_border = allowed_border
Esempio n. 5
0
    def __init__(self, feat_stride, scales, ratios):
        super(_SiamAnchorTargetLayer, self).__init__()

        self._feat_stride = feat_stride
        self._scales = scales
        anchor_scales = scales
        self._anchors = torch.from_numpy(generate_anchors(scales=np.array(anchor_scales), ratios=np.array(ratios))).float()
        self._num_anchors = self._anchors.size(0)

        # allow boxes to sit over the edge by a small amount
        self._allowed_border = 0  # default is 0
    def __init__(self, feat_stride, scales, ratios, use_gpu=False):
        super(_ProposalLayer, self).__init__()

        self._feat_stride = feat_stride   # [16]
        self._anchors = torch.from_numpy(generate_anchors(scales=np.array(scales),
                                                          ratios=np.array(ratios))).float()
        self._num_anchors = self._anchors.size(0)
        self.use_gpu = use_gpu
        if self.use_gpu:
            self.nms_gpu = True
        else:
            self.nms_gpu = False
Esempio n. 7
0
    def __init__(self,
                 classes,
                 class_agnostic,
                 feat_name='efficientnet-b2',
                 feat_list=('conv3', 'conv4', 'conv5', 'conv6', 'conv7'),
                 pretrained=True,
                 D_bifpn=3,
                 W_bifpn=88):
        super(EfficientDet, self).__init__(classes, class_agnostic, feat_name,
                                           feat_list, pretrained)

        self.ED_BIFPN = BIFPN(in_channels=self.FeatExt.get_list_features(),
                              out_channels=W_bifpn,
                              stack=D_bifpn,
                              num_outs=5)
        self.ED_retinahead = retinaHeader(num_classes=self.n_classes,
                                          in_channels=W_bifpn,
                                          class_agnostic=self.class_agnostic)

        self._anchor_scales = cfg.RCNN_COMMON.ANCHOR_SCALES
        self._anchor_ratios = cfg.RCNN_COMMON.ANCHOR_RATIOS
        self._feat_stride = cfg.RCNN_COMMON.FEAT_STRIDE

        self._anchors = []
        self._num_anchors = []
        for i in self._feat_stride:
            if isinstance(self._anchor_scales[0], list):
                scales = self._anchor_scales[i]
            else:
                scales = self._anchor_scales
            if isinstance(self._anchor_ratios[0], list):
                ratios = self._anchor_ratios[i]
            else:
                ratios = self._anchor_ratios
            anchor = torch.from_numpy(
                generate_anchors(base_size=i,
                                 scales=np.array(scales),
                                 ratios=np.array(ratios))).float()
            self._anchors.append(anchor)
            self._num_anchors.append(anchor.size(0))

        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                weight_kaiming_init(m)
            elif isinstance(m, nn.BatchNorm2d):
                m.weight.data.fill_(1)
                m.bias.data.zero_()
        # self.freeze_bn()

        self.iter_counter = 0