Ejemplo n.º 1
0
    def __init__(self,
                 down_conv_nn,
                 use_xyz,
                 bn=True,
                 activation=nn.ReLU(),
                 *args,
                 **kwargs):
        super(RSConvMapper, self).__init__()

        self._down_conv_nn = down_conv_nn
        self._use_xyz = use_xyz

        self.nn = nn.ModuleDict()

        if len(self._down_conv_nn) == 2:  # First layer
            self._first_layer = True
            f_in, f_intermediate, f_out = self._down_conv_nn[0]
            self.nn["features_nn"] = pt_utils.SharedMLP(self._down_conv_nn[1],
                                                        bn=bn)

        else:
            self._first_layer = False
            f_in, f_intermediate, f_out = self._down_conv_nn

        self.nn["mlp_msg"] = pt_utils.SharedMLP([f_in, f_intermediate, f_out],
                                                bn=bn)

        self.nn["norm"] = Seq(*[nn.BatchNorm2d(f_out), activation])

        self._f_out = f_out
Ejemplo n.º 2
0
    def __init__(self,
                 down_conv_nn,
                 channel_raising_nn,
                 use_xyz,
                 bn=True,
                 *args,
                 **kwargs):
        super(RSConvMapper, self).__init__()

        self._down_conv_nn = down_conv_nn
        self._channel_raising_nn = channel_raising_nn
        self._use_xyz = use_xyz

        if len(self._down_conv_nn) == 2:  # First layer
            self._first_layer = True
            f_in, f_intermediate, f_out = self._down_conv_nn[0]
            self.features_nn = pt_utils.SharedMLP(self._down_conv_nn[1], bn=bn)

        else:
            self._first_layer = False
            f_in, f_intermediate, f_out = self._down_conv_nn

        self.mlp_msg = pt_utils.SharedMLP([f_in, f_intermediate, f_out], bn=bn)

        self.mlp_out = nn.Conv1d(f_out,
                                 channel_raising_nn[-1],
                                 kernel_size=(1, 1))
Ejemplo n.º 3
0
 def __init__(self,
              npoint=None,
              radii=None,
              nsample=None,
              down_conv_nn=None,
              bn=True,
              activation="LeakyReLU",
              use_xyz=True,
              **kwargs):
     assert len(radii) == len(nsample) == len(down_conv_nn)
     super(PointNetMSGDown,
           self).__init__(DenseFPSSampler(num_to_sample=npoint),
                          DenseRadiusNeighbourFinder(radii, nsample),
                          **kwargs)
     self.use_xyz = use_xyz
     self.npoint = npoint
     self.mlps = nn.ModuleList()
     for i in range(len(radii)):
         mlp_spec = down_conv_nn[i]
         if self.use_xyz:
             mlp_spec[0] += 3
         self.mlps.append(
             pt_utils.SharedMLP(down_conv_nn[i],
                                bn=bn,
                                activation=get_activation(activation)))
    def __init__(self,
                 *,
                 npoint: int,
                 radii: List[float],
                 nsamples: List[int],
                 mlps: List[List[int]],
                 bn: bool = True,
                 use_xyz: bool = True):
        super().__init__()

        assert len(radii) == len(nsamples) == len(mlps)

        self.npoint = npoint
        self.groupers = nn.ModuleList()
        self.mlps = nn.ModuleList()
        for i in range(len(radii)):
            radius = radii[i]
            nsample = nsamples[i]
            self.groupers.append(
                pointnet2_utils.QueryAndGroup(radius, nsample, use_xyz=use_xyz)
                if npoint is not None else pointnet2_utils.GroupAll(use_xyz))
            mlp_spec = mlps[i]
            if use_xyz:
                mlp_spec[0] += 3

            self.mlps.append(pt_utils.SharedMLP(mlp_spec, bn=bn))
Ejemplo n.º 5
0
    def __init__(self,
                 radii,
                 nsamples,
                 mlps,
                 bn=True,
                 use_xyz=True,
                 vote=False):
        # type: (PointnetSAModuleMSG, int, list[float], list[int], list[list[int]], bool, bool) -> None
        super(PointnetSAModuleMSG, self).__init__()
        # 继承了_PointnetSAModuleBase中定义的操作方法

        assert len(radii) == len(nsamples) == len(mlps)

        self.groupers = nn.ModuleList()
        self.mlps = nn.ModuleList()
        for i in range(len(radii)):
            radius = radii[i]  # 多尺度就在于球查询有多种半径
            nsample = nsamples[i]
            if vote is False:
                self.groupers.append(
                    pointnet2_utils.QueryAndGroup(radius,
                                                  nsample,
                                                  use_xyz=use_xyz))
                # self.add_module(str(len(self)), module)
            else:  # 如果要霍夫投票,就要计算得分
                self.groupers.append(
                    pointnet2_utils.QueryAndGroup_score(radius,
                                                        nsample,
                                                        use_xyz=use_xyz))

            mlp_spec = mlps[i]
            if use_xyz:  # 如果要使用坐标信息,则MLP的输入要加三个
                mlp_spec[0] += 3

            self.mlps.append(pt_utils.SharedMLP(mlp_spec, bn=bn))
Ejemplo n.º 6
0
    def __init__(self,
                 radii,
                 nsamples,
                 mlps,
                 bn=True,
                 use_xyz=True,
                 vote=False):
        # type: (PointnetSAModuleMSG, int, List[float], List[int], List[List[int]], bool, bool) -> None
        super(PointnetSAModuleMSG, self).__init__()

        assert len(radii) == len(nsamples) == len(mlps)

        self.groupers = nn.ModuleList()
        self.mlps = nn.ModuleList()
        for i in range(len(radii)):
            radius = radii[i]
            nsample = nsamples[i]
            if vote is False:
                self.groupers.append(
                    pointnet2_utils.QueryAndGroup(radius,
                                                  nsample,
                                                  use_xyz=use_xyz))
            else:
                self.groupers.append(
                    pointnet2_utils.QueryAndGroup_score(radius,
                                                        nsample,
                                                        use_xyz=use_xyz))

            mlp_spec = mlps[i]
            if use_xyz:
                mlp_spec[0] += 3

            self.mlps.append(pt_utils.SharedMLP(mlp_spec, bn=bn))
Ejemplo n.º 7
0
    def __init__(self,
                 up_conv_nn,
                 bn=True,
                 bias=False,
                 activation="LeakyReLU",
                 **kwargs):
        super(DenseFPModule, self).__init__(None, **kwargs)

        self.nn = pt_utils.SharedMLP(up_conv_nn,
                                     bn=bn,
                                     activation=get_activation(activation))
Ejemplo n.º 8
0
 def __init__(self,
              nn,
              aggr="max",
              bn=True,
              activation="LeakyReLU",
              **kwargs):
     super(GlobalDenseBaseModule, self).__init__()
     self.nn = pt_utils.SharedMLP(nn,
                                  bn=bn,
                                  activation=get_activation(activation))
     if aggr.lower() not in ["mean", "max"]:
         raise Exception(
             "The aggregation provided is unrecognized {}".format(aggr))
     self._aggr = aggr.lower()
    def __init__(self, npoint, radii, nsamples, mlps, bn=True):
        # type: (PointnetSAModuleMSGRRI, int, List[float], List[int], List[List[int]], bool, bool) -> None
        super(PointnetSAModuleMSGRRI, self).__init__()

        assert len(radii) == len(nsamples) == len(mlps)

        self.npoint = npoint
        self.groupers = nn.ModuleList()
        self.mlps = nn.ModuleList()
        for i in range(len(radii)):
            radius = radii[i]
            nsample = nsamples[i]
            self.groupers.append(
                pointnet2_utils.QueryAndGroupRRI(radius, nsample))
            mlp_spec = mlps[i]

            self.mlps.append(pt_utils.SharedMLP(mlp_spec, bn=bn))
Ejemplo n.º 10
0
    def __init__(self, input_channels=3, use_xyz=True, objective=False):
        super(Pointnet_Tracking, self).__init__()

        self.backbone_net = Pointnet_Backbone(input_channels, use_xyz)

        self.cosine = nn.CosineSimilarity(dim=1)

        self.mlp = pt_utils.SharedMLP([4 + 256, 256, 256, 256], bn=True)

        self.FC_layer_cla = (pt_utils.Seq(256).conv1d(256, bn=True).conv1d(
            256, bn=True).conv1d(1, activation=None))
        self.fea_layer = (pt_utils.Seq(256).conv1d(256, bn=True).conv1d(
            256, activation=None))
        self.vote_layer = (pt_utils.Seq(3 + 256).conv1d(256, bn=True).conv1d(
            256, bn=True).conv1d(3 + 256, activation=None))
        self.vote_aggregation = PointnetSAModule(radius=0.3,
                                                 nsample=16,
                                                 mlp=[1 + 256, 256, 256, 256],
                                                 use_xyz=use_xyz)
        self.num_proposal = 64
        self.FC_proposal = (pt_utils.Seq(256).conv1d(256, bn=True).conv1d(
            256, bn=True).conv1d(3 + 1 + 1, activation=None))
Ejemplo n.º 11
0
 def __init__(self, mlp, bn=True):
     # type: (PointnetFPModule, List[int], bool) -> None
     super(PointnetFPModule, self).__init__()
     self.mlp = pt_utils.SharedMLP(mlp, bn=bn)
Ejemplo n.º 12
0
    def __init__(self, input_channels=3, use_xyz=True, objective=False):
        super(Pointnet_Tracking, self).__init__()

        self.backbone_net = Pointnet_Backbone(input_channels, use_xyz)

        self.cosine = nn.CosineSimilarity(dim=1)

        self.mlp = pt_utils.SharedMLP([4 + 256, 256, 256, 256], bn=True)
        # SharedMLP(
        #   (layer0): Conv2d(
        #     (conv): Conv2d(260, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
        #     (normlayer): BatchNorm2d(
        #       (bn): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        #     )
        #     (activation): ReLU(inplace=True)
        #   )
        #   (layer1): Conv2d(
        #     (conv): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
        #     (normlayer): BatchNorm2d(
        #       (bn): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        #     )
        #     (activation): ReLU(inplace=True)
        #   )
        #   (layer2): Conv2d(
        #     (conv): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
        #     (normlayer): BatchNorm2d(
        #       (bn): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        #     )
        #     (activation): ReLU(inplace=True)
        #   )
        # )
        self.FC_layer_cla = (pt_utils.Seq(256).conv1d(256, bn=True).conv1d(
            256, bn=True).conv1d(1, activation=None))
        # Seq(
        #   (0): Conv1d(
        #     (conv): Conv1d(256, 256, kernel_size=(1,), stride=(1,), bias=False)
        #     (normlayer): BatchNorm1d(
        #       (bn): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        #     )
        #     (activation): ReLU(inplace=True)
        #   )
        #   (1): Conv1d(
        #     (conv): Conv1d(256, 256, kernel_size=(1,), stride=(1,), bias=False)
        #     (normlayer): BatchNorm1d(
        #       (bn): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        #     )
        #     (activation): ReLU(inplace=True)
        #   )
        #   (2): Conv1d(
        #     (conv): Conv1d(256, 1, kernel_size=(1,), stride=(1,))
        #   )
        # )
        self.fea_layer = (pt_utils.Seq(256).conv1d(256, bn=True).conv1d(
            256, activation=None))
        # Seq(
        # (0): Conv1d(
        #     (conv): Conv1d(256, 256, kernel_size=(1,), stride=(1,), bias=False)
        #     (normlayer): BatchNorm1d(
        #     (bn): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        #     )
        #     (activation): ReLU(inplace=True)
        # )
        # (1): Conv1d(
        #     (conv): Conv1d(256, 256, kernel_size=(1,), stride=(1,))
        # )
        # )
        self.vote_layer = (pt_utils.Seq(3 + 256).conv1d(256, bn=True).conv1d(
            256, bn=True).conv1d(3 + 256, activation=None))
        # Seq(
        #   (0): Conv1d(
        #     (conv): Conv1d(259, 256, kernel_size=(1,), stride=(1,), bias=False)
        #     (normlayer): BatchNorm1d(
        #       (bn): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        #     )
        #     (activation): ReLU(inplace=True)
        #   )
        #   (1): Conv1d(
        #     (conv): Conv1d(256, 256, kernel_size=(1,), stride=(1,), bias=False)
        #     (normlayer): BatchNorm1d(
        #       (bn): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        #     )
        #     (activation): ReLU(inplace=True)
        #   )
        #   (2): Conv1d(
        #     (conv): Conv1d(256, 259, kernel_size=(1,), stride=(1,))
        #   )
        # )
        self.vote_aggregation = PointnetSAModule(
            radius=0.3,
            nsample=16,
            mlp=[1 + 256, 256, 256, 256],
            use_xyz=use_xyz)  # 根据投票聚合投影候选中心点
        # PointnetSAModule(
        #   (groupers): ModuleList(
        #     (0): QueryAndGroup()
        #   )
        #   (mlps): ModuleList(
        #     (0): SharedMLP(
        #       (layer0): Conv2d(
        #         (conv): Conv2d(260, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
        #         (normlayer): BatchNorm2d(
        #           (bn): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        #         )
        #         (activation): ReLU(inplace=True)
        #       )
        #       (layer1): Conv2d(
        #         (conv): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
        #         (normlayer): BatchNorm2d(
        #           (bn): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        #         )
        #         (activation): ReLU(inplace=True)
        #       )
        #       (layer2): Conv2d(
        #         (conv): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
        #         (normlayer): BatchNorm2d(
        #           (bn): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        #         )
        #         (activation): ReLU(inplace=True)
        #       )
        #     )
        #   )
        # )
        self.num_proposal = 64  # 实际上是_PointnetSAModuleBase(nn.Module)的forward中的npoint输入参数
        self.FC_proposal = (pt_utils.Seq(256).conv1d(256, bn=True).conv1d(
            256, bn=True).conv1d(3 + 1 + 1, activation=None))
Ejemplo n.º 13
0
 def __init__(self, nn, **kwargs):
     super(GlobalDenseBaseModule, self).__init__()
     self.nn = pt_utils.SharedMLP(nn)
Ejemplo n.º 14
0
    def __init__(self, up_conv_nn, bn=True, **kwargs):
        super(DenseFPModule, self).__init__(None, **kwargs)

        self.nn = pt_utils.SharedMLP(up_conv_nn, bn=bn)
 def __init__(self, *, mlp: List[int], bn: bool = True):
     super().__init__()
     self.mlp = pt_utils.SharedMLP(mlp, bn=bn)