Exemplo n.º 1
0
    def __init__(self, in_channels, out_channels, radius, nsample, config):
        """A PosPool operator for local aggregation

        Args:
            in_channels: input channels.
            out_channels: output channels.
            radius: ball query radius
            nsample: neighborhood limit.
            config: config file
        """
        super(PosPool, self).__init__()
        self.in_channels = in_channels
        self.out_channels = out_channels
        self.radius = radius
        self.nsample = nsample
        self.position_embedding = config.pospool.position_embedding
        self.reduction = config.pospool.reduction
        self.output_conv = config.pospool.output_conv or (self.in_channels !=
                                                          self.out_channels)

        self.grouper = MaskedQueryAndGroup(radius,
                                           nsample,
                                           use_xyz=False,
                                           ret_grouped_xyz=True,
                                           normalize_xyz=True)
        if self.output_conv:
            self.out_conv = nn.Sequential(
                nn.Conv1d(in_channels, out_channels, kernel_size=1,
                          bias=False),
                nn.BatchNorm1d(out_channels, eps=1e-3, momentum=0.01),
                nn.LeakyReLU(inplace=True))
        else:
            self.out_transform = nn.Sequential(
                nn.BatchNorm1d(out_channels, eps=1e-3, momentum=0.01),
                nn.LeakyReLU(inplace=True))
Exemplo n.º 2
0
    def __init__(self, in_channels, out_channels, radius, nsample, config):
        """A PseudoGrid operator for local aggregation

        Args:
            in_channels: input channels.
            out_channels: output channels.
            radius: ball query radius
            nsample: neighborhood limit.
            config: config file
        """
        super(PseudoGrid, self).__init__()
        self.in_channels = in_channels
        self.out_channels = out_channels
        self.radius = radius
        self.nsample = nsample
        self.KP_influence = config.pseudo_grid.KP_influence
        self.num_kernel_points = config.pseudo_grid.num_kernel_points
        self.convolution_mode = config.pseudo_grid.convolution_mode
        self.output_conv = config.pseudo_grid.output_conv or (
            self.in_channels != self.out_channels)

        # create kernel points
        KP_extent = config.pseudo_grid.KP_extent
        fixed_kernel_points = config.pseudo_grid.fixed_kernel_points
        density_parameter = config.density_parameter
        self.extent = 2 * KP_extent * radius / density_parameter
        K_radius = 1.5 * self.extent
        K_points_numpy = create_kernel_points(K_radius,
                                              self.num_kernel_points,
                                              num_kernels=1,
                                              dimension=3,
                                              fixed=fixed_kernel_points)

        K_points_numpy = K_points_numpy.reshape((self.num_kernel_points, 3))
        self.register_buffer(
            'K_points',
            torch.from_numpy(K_points_numpy).type(torch.float32))

        self.grouper = MaskedQueryAndGroup(radius,
                                           nsample,
                                           use_xyz=False,
                                           ret_grouped_xyz=True,
                                           normalize_xyz=False)
        self.kernel_weights = weight_variable(
            [self.num_kernel_points, in_channels])

        if self.output_conv:
            self.out_conv = nn.Sequential(
                nn.Conv1d(in_channels, out_channels, kernel_size=1,
                          bias=False),
                nn.BatchNorm1d(out_channels, eps=1e-3, momentum=0.01),
                nn.LeakyReLU(inplace=True))
        else:
            self.out_transform = nn.Sequential(
                nn.BatchNorm1d(out_channels, eps=1e-3, momentum=0.01),
                nn.LeakyReLU(inplace=True))
Exemplo n.º 3
0
    def __init__(self, in_channels, out_channels, radius, nsample, config):
        """A PointWiseMLP operator for local aggregation

        Args:
            in_channels: input channels.
            out_channels: output channels.
            radius: ball query radius
            nsample: neighborhood limit.
            config: config file
        """
        super(PointWiseMLP, self).__init__()
        self.in_channels = in_channels
        self.out_channels = out_channels
        self.nsample = nsample
        self.feature_type = config.pointwisemlp.feature_type
        self.feature_input_channels = {
            'dp_fj': 3 + in_channels,
            'fi_df': 2 * in_channels,
            'dp_fi_df': 3 + 2 * in_channels
        }
        self.feature_input_channels = self.feature_input_channels[
            self.feature_type]
        self.num_mlps = config.pointwisemlp.num_mlps
        self.reduction = config.pointwisemlp.reduction

        self.grouper = MaskedQueryAndGroup(radius,
                                           nsample,
                                           use_xyz=False,
                                           ret_grouped_xyz=True,
                                           normalize_xyz=True)

        self.mlps = nn.Sequential()
        if self.num_mlps == 1:
            self.mlps.add_module(
                'conv0',
                nn.Sequential(
                    nn.Conv2d(self.feature_input_channels,
                              self.out_channels,
                              kernel_size=1,
                              bias=False),
                    nn.BatchNorm2d(self.out_channels, eps=1e-3, momentum=0.01),
                    nn.LeakyReLU(inplace=True)))
        else:
            mfdim = max(self.in_channels // 2, 9)
            self.mlps.add_module(
                'conv0',
                nn.Sequential(
                    nn.Conv2d(self.feature_input_channels,
                              mfdim,
                              kernel_size=1,
                              bias=False),
                    nn.BatchNorm2d(mfdim, eps=1e-3, momentum=0.01),
                    nn.LeakyReLU(inplace=True)))
            for i in range(self.num_mlps - 2):
                self.mlps.add_module(
                    f'conv{i + 1}',
                    nn.Sequential(
                        nn.Conv2d(mfdim, mfdim, kernel_size=1, bias=False),
                        nn.BatchNorm2d(mfdim, eps=1e-3, momentum=0.01),
                        nn.LeakyReLU(inplace=True)))
            self.mlps.add_module(
                f'conv{self.num_mlps - 1}',
                nn.Sequential(
                    nn.Conv2d(mfdim,
                              self.out_channels,
                              kernel_size=1,
                              bias=False),
                    nn.BatchNorm2d(self.out_channels, eps=1e-3, momentum=0.01),
                    nn.LeakyReLU(inplace=True)))
Exemplo n.º 4
0
    def __init__(self, in_channels, out_channels, radius, nsample, config):
        """A AdaptiveWeight operator for local aggregation

        Args:
            in_channels: input channels.
            out_channels: output channels.
            radius: ball query radius
            nsample: neighborhood limit.
            config: config file
        """
        super(AdaptiveWeight, self).__init__()
        self.in_channels = in_channels
        self.out_channels = out_channels
        self.nsample = nsample
        self.weight_type = config.adaptive_weight.weight_type
        self.weight_to_channels = {
            'dp': 3,
            'df': in_channels,
            'fj': in_channels,
            'dp_df': 3 + in_channels,
            'dp_fj': 3 + in_channels,
            'fi_df': 2 * in_channels,
            'dp_fi_df': 3 + 2 * in_channels,
            'rscnn': 10
        }
        self.weight_input_channels = self.weight_to_channels[self.weight_type]
        self.num_mlps = config.adaptive_weight.num_mlps
        self.shared_channels = config.adaptive_weight.shared_channels
        self.weight_softmax = config.adaptive_weight.weight_softmax
        self.reduction = config.adaptive_weight.reduction
        self.output_conv = config.adaptive_weight.output_conv or (
            self.in_channels != self.out_channels)

        self.grouper = MaskedQueryAndGroup(radius,
                                           nsample,
                                           use_xyz=False,
                                           ret_grouped_xyz=True,
                                           normalize_xyz=True)

        self.mlps = nn.Sequential()
        self.mlps.add_module(
            'conv0',
            nn.Conv2d(self.weight_input_channels,
                      self.in_channels // self.shared_channels,
                      kernel_size=1))
        for i in range(self.num_mlps - 1):
            self.mlps.add_module(f'relu{i}', nn.LeakyReLU(inplace=True))
            self.mlps.add_module(
                f'conv{i + 1}',
                nn.Conv2d(self.in_channels // self.shared_channels,
                          self.in_channels // self.shared_channels,
                          kernel_size=1))

        if self.output_conv:
            self.out_conv = nn.Sequential(
                nn.Conv1d(in_channels, out_channels, kernel_size=1,
                          bias=False),
                nn.BatchNorm1d(out_channels, eps=1e-3, momentum=0.01),
                nn.LeakyReLU(inplace=True))
        else:
            self.out_transform = nn.Sequential(
                nn.BatchNorm1d(out_channels, eps=1e-3, momentum=0.01),
                nn.LeakyReLU(inplace=True))