def __init__(self, *, mlps: List[List[int]], radii: List[float], nsamples: List[int], post_mlp: List[int], bn: bool = True, use_xyz: bool = True, sample_uniformly: bool = False): super().__init__() assert (len(mlps) == len(nsamples) == len(radii)) self.post_mlp = pt_utils.SharedMLP(post_mlp, bn=bn) self.groupers = nn.ModuleList() self.mlps = nn.ModuleList() for i in range(len(radii)): radius = radii[i] nsample = nsamples[i] self.groupers.append( pointnet2_utils.QueryAndGroup( radius, nsample, use_xyz=use_xyz, sample_uniformly=sample_uniformly)) mlp_spec = mlps[i] if use_xyz: mlp_spec[0] += 3 self.mlps.append(pt_utils.SharedMLP(mlp_spec, bn=bn))
def __init__(self, *, npoint: int, radii: List[float], nsamples: List[int], mlps: List[List[int]], bn: bool = True, use_xyz: bool = True, sample_uniformly: bool = False): super().__init__() assert len(radii) == len(nsamples) == len(mlps) self.npoint = npoint self.groupers = nn.ModuleList() self.mlps = nn.ModuleList() for i in range(len(radii)): radius = radii[i] nsample = nsamples[i] self.groupers.append( pointnet2_utils.QueryAndGroup(radius, nsample, use_xyz=use_xyz, sample_uniformly=sample_uniformly ) if npoint is not None else pointnet2_utils.GroupAll(use_xyz)) mlp_spec = mlps[i] if use_xyz: mlp_spec[0] += 3 self.mlps.append(pt_utils.SharedMLP(mlp_spec, bn=bn))
def __init__(self, *, mlp: List[int], npoint: int = None, radius: float = None, nsample: int = None, bn: bool = True, use_xyz: bool = True): super().__init__() self.npoint = npoint self.groupers = nn.ModuleList() self.mlps = nn.ModuleList() if self.npoint is not None: assert radius is not None assert nsample is not None self.groupers.append( pointnet2_utils.QueryAndGroup(radius, nsample, use_xyz=use_xyz)) else: self.groupers.append(pointnet2_utils.GroupAll(use_xyz=use_xyz)) if use_xyz: mlp[0] += 3 self.mlps.append(pt_utils.SharedMLP(mlp, bn=bn))
def __init__( self, *, mlp: List[int], radius: float = None, nsample: int = None, bn: bool = True, use_xyz: bool = True, pooling: str = 'max', sigma: float = None, # for RBF pooling normalize_xyz: bool = False, # noramlize local XYZ with radius sample_uniformly: bool = False, ret_unique_cnt: bool = False ): super().__init__() self.radius = radius self.nsample = nsample self.pooling = pooling self.mlp_module = None self.use_xyz = use_xyz self.sigma = sigma if self.sigma is None: self.sigma = self.radius/2 self.normalize_xyz = normalize_xyz self.ret_unique_cnt = ret_unique_cnt self.grouper = pointnet2_utils.QueryAndGroup(radius, nsample, use_xyz=use_xyz, ret_grouped_xyz=True, normalize_xyz=normalize_xyz, sample_uniformly=sample_uniformly, ret_unique_cnt=ret_unique_cnt) mlp_spec = mlp if use_xyz and len(mlp_spec)>0: mlp_spec[0] += 3 self.mlp_module = pt_utils.SharedMLP(mlp_spec, bn=bn)
def __init__(self, *, mlp: List[int], bn: bool = True): """ :param mlp: list of int :param bn: whether to use batchnorm """ super().__init__() self.mlp = pt_utils.SharedMLP(mlp, bn=bn)
def __init__(self, npoint, radii, nsamples, mlps, bn=True, use_xyz=True): # type: (PointnetSAModuleMSGPN2, int, List[float], List[int], List[List[int]], bool, bool) -> None super(PointnetSAModuleMSGPN2, self).__init__() assert len(radii) == len(nsamples) == len(mlps) self.npoint = npoint self.groupers = nn.ModuleList() self.mlps = nn.ModuleList() self.out_mlps = nn.ModuleList() for i in range(len(radii)): radius = radii[i] nsample = nsamples[i] self.groupers.append( pointnet2_utils.QueryAndGroup(radius, nsample, use_xyz=use_xyz) if npoint is not None else pointnet2_utils.GroupAll(use_xyz)) mlp_spec = mlps[i] self.out_mlps.append( nn.Sequential(nn.Conv1d(mlp_spec[1], mlp_spec[-1], 1), nn.BatchNorm1d(mlp_spec[-1]), nn.ReLU(inplace=True))) mlp_spec = mlp_spec[0:2] if use_xyz: mlp_spec[0] += 3 self.mlps.append(pt_utils.SharedMLP(mlp_spec, bn=bn))
def __init__(self, *, npoint: int, radii: List[float], nsamples: List[int], mlps: List[List[int]], bn: bool = True, use_xyz: bool = True, pool_method='max_pool', instance_norm=False): """ :param npoint: int :param radii: list of float, list of radii to group with :param nsamples: list of int, number of samples in each ball query :param mlps: list of list of int, spec of the pointnet before the global pooling for each scale :param bn: whether to use batchnorm :param use_xyz: :param pool_method: max_pool / avg_pool :param instance_norm: whether to use instance_norm """ super().__init__() assert len(radii) == len(nsamples) == len(mlps) self.npoint = npoint self.groupers = nn.ModuleList() self.mlps = nn.ModuleList() for i in range(len(radii)): radius = radii[i] nsample = nsamples[i] self.groupers.append( pointnet2_utils.QueryAndGroup(radius, nsample, use_xyz=use_xyz) if npoint is not None else pointnet2_utils.GroupAll(use_xyz) ) mlp_spec = mlps[i] if use_xyz: mlp_spec[0] += 3 self.mlps.append(pt_utils.SharedMLP(mlp_spec, bn=bn, instance_norm=instance_norm)) self.pool_method = pool_method
def __init__( self, *, mlp: List[int], npoint: int = None, radius: float = None, nsample: int = None, bn: bool = True, use_xyz: bool = True, pooling: str = 'max', sigma: float = None, # for RBF pooling normalize_xyz: bool = False, # noramlize local XYZ with radius sample_uniformly: bool = False, ret_unique_cnt: bool = False, binary='None'): super().__init__() self.npoint = npoint self.radius = radius self.nsample = nsample self.pooling = pooling self.mlp_module = None self.use_xyz = use_xyz self.sigma = sigma if self.sigma is None: self.sigma = self.radius / 2 self.normalize_xyz = normalize_xyz self.ret_unique_cnt = ret_unique_cnt if npoint is not None: self.grouper = pointnet2_utils.QueryAndGroup( radius, nsample, use_xyz=use_xyz, ret_grouped_xyz=True, normalize_xyz=normalize_xyz, sample_uniformly=sample_uniformly, ret_unique_cnt=ret_unique_cnt) else: self.grouper = pointnet2_utils.GroupAll(use_xyz, ret_grouped_xyz=True) mlp_spec = mlp if use_xyz and len(mlp_spec) > 0: mlp_spec[0] += 3 if binary == 'None': self.mlp_module = pt_utils.SharedMLP(mlp_spec, bn=bn) elif binary == 'BiMLP': self.mlp_module = BiMLP(mlp_spec, BiLinear=BiLinearLSR) elif binary == 'BiSharedMLP': self.mlp_module = pt_utils.BiSharedMLP(mlp_spec, bn=bn, lsr=False) elif binary == 'BiLSRSharedMLP': self.mlp_module = pt_utils.BiSharedMLP(mlp_spec, bn=bn, lsr=True) else: raise NotImplementedError
def __init__( self, *, mlp: List[int], npoint: int = None, radius: float = None, nsample: int = None, bn: bool = True, use_xyz: bool = True, pooling: str = 'max', normalize_xyz: bool = False, # noramlize local XYZ with radius sample_uniformly: bool = False, ret_unique_cnt: bool = False): super().__init__() self.npoint = npoint self.nsample = nsample self.pooling = pooling self.mlp_module = None self.use_xyz = use_xyz self.normalize_xyz = normalize_xyz self.ret_unique_cnt = ret_unique_cnt ''' if npoint is not None: print ("not used for plane") else: ''' self.grouper1 = pointnet2_utils.QueryAndGroup( radius, nsample, use_xyz=use_xyz, ret_grouped_xyz=True, normalize_xyz=normalize_xyz, sample_uniformly=sample_uniformly, ret_unique_cnt=ret_unique_cnt) self.grouper2 = pointnet2_utils.GroupAll(use_xyz, ret_grouped_xyz=True) mlp_spec = mlp if use_xyz and len(mlp_spec) > 0: mlp_spec[0] += 3 self.mlp_module1 = pt_utils.SharedMLP(mlp_spec, bn=bn) self.mlp_module2 = pt_utils.SharedMLP(mlp_spec, bn=bn)
def __init__(self, *, mlp: List[int], bn: bool = True, binary='None'): super().__init__() if binary == 'None': self.mlp = pt_utils.SharedMLP(mlp, bn=bn) elif binary == 'BiMLP': self.mlp = BiMLP(mlp, BiLinear=BiLinearLSR) elif binary == 'BiSharedMLP': self.mlp = pt_utils.BiSharedMLP(mlp, bn=bn, lsr=False) elif binary == 'BiLSRSharedMLP': self.mlp = pt_utils.BiSharedMLP(mlp, bn=bn, lsr=True) else: raise NotImplementedError
def __init__( self, *, mlp: List[int], npoint: int = None, split: int = 18, radius: float = None, nsample: int = None, bn: bool = True, use_xyz: bool = True, pooling: str = 'max', sigma: float = None, # for RBF pooling normalize_xyz: bool = False, # noramlize local XYZ with radius sample_uniformly: bool = False, ret_unique_cnt: bool = False, same_idx: bool = False, use_feature: bool = True ): super().__init__() self.npoint = npoint self.radius = radius self.split = split self.nsample = nsample self.pooling = pooling self.mlp_module = None self.use_xyz = use_xyz self.sigma = sigma if self.sigma is None: self.sigma = self.radius/2 self.normalize_xyz = normalize_xyz self.ret_unique_cnt = ret_unique_cnt self.same_idx = same_idx if npoint is not None: ''' self.grouper = pointnet2_utils.PairwiseGroup(radius, nsample, use_xyz=use_xyz, ret_grouped_xyz=True, normalize_xyz=normalize_xyz, sample_uniformly=sample_uniformly, ret_unique_cnt=ret_unique_cnt, use_feature=use_feature) ''' self.grouper = pointnet2_utils.QueryAndGroup(radius, nsample, use_xyz=use_xyz, ret_grouped_xyz=True, normalize_xyz=normalize_xyz,sample_uniformly=sample_uniformly, ret_unique_cnt=ret_unique_cnt, use_feature=use_feature, ret_idx=True) else: self.grouper = pointnet2_utils.GroupAll(use_xyz, ret_grouped_xyz=True) mlp_spec = mlp if use_feature and len(mlp_spec)>0: mlp_spec[0] += mlp_spec[0] if use_xyz and len(mlp_spec)>0: mlp_spec[0] += 3 self.mlp_module = nn.ModuleList() for i in range(split): self.mlp_module.append(pt_utils.SharedMLP(mlp_spec, bn=bn))
def __init__(self, nsample, seed_feature_dim, cylinder_radius=0.05, hmin=-0.02, hmax_list=[0.01,0.02,0.03,0.04]): super().__init__() self.nsample = nsample self.in_dim = seed_feature_dim self.cylinder_radius = cylinder_radius mlps = [self.in_dim, 64, 128, 256] self.groupers = [] for hmax in hmax_list: self.groupers.append(CylinderQueryAndGroup( cylinder_radius, hmin, hmax, nsample, use_xyz=True )) self.mlps = pt_utils.SharedMLP(mlps, bn=True)
def __init__(self, *, mlp: List[int], bn: bool = True): super().__init__() self.mlp = pt_utils.SharedMLP(mlp, bn=bn)