Ejemplo n.º 1
0
    def __init__(self, num_class=2):
        super(FusionNet, self).__init__()

        self.color_moudle = Net(num_class=num_class, is_first_bn=True)
        self.depth_moudle = Net(num_class=num_class, is_first_bn=True)
        self.ir_moudle = Net(num_class=num_class, is_first_bn=True)

        self.color_SE = SEModule(512, reduction=16)
        self.depth_SE = SEModule(512, reduction=16)
        self.ir_SE = SEModule(512, reduction=16)

        self.bottleneck = nn.Sequential(
            nn.Conv2d(512 * 3, 512, kernel_size=1, padding=0),
            nn.BatchNorm2d(512), nn.ReLU(inplace=True))

        self.res_0 = self._make_layer(SEResNeXtBottleneck,
                                      planes=256,
                                      blocks=2,
                                      stride=2,
                                      groups=32,
                                      reduction=16,
                                      downsample_kernel_size=1,
                                      downsample_padding=0)
        self.res_1 = self._make_layer(SEResNeXtBottleneck,
                                      planes=512,
                                      blocks=2,
                                      stride=2,
                                      groups=32,
                                      reduction=16,
                                      downsample_kernel_size=1,
                                      downsample_padding=0)

        self.fc = nn.Sequential(nn.Dropout(0.5), nn.Linear(2048, 256),
                                nn.ReLU(inplace=True),
                                nn.Linear(256, num_class))
    def __init__(self, num_class=2, deploy=False, width_multiplier=[0.75, 0.75, 0.75, 2.5], num_blocks=[2, 4, 14, 1], override_groups_map=None):
        super(FusionNet, self).__init__()
        self.deploy = deploy
        self.cur_layer_idx = 1
        self.in_planes = 384
        self.override_groups_map = override_groups_map or dict()

        assert 0 not in self.override_groups_map

        self.color_moudle = Net(num_class=num_class, is_first_bn=True)
        self.depth_moudle = Net(num_class=num_class, is_first_bn=True)
        self.ir_moudle = Net(num_class=num_class, is_first_bn=True)


        self.color_SE = SEModule(128,reduction=16)
        self.depth_SE = SEModule(128,reduction=16)
        self.ir_SE = SEModule(128,reduction=16)

        self.res_0 = self._make_layer(BasicBlock, 384, 256, 2, stride=2)
        self.res_1 = self._make_layer(BasicBlock, 256, 512, 2, stride=2)

        # self.res_0 = self._make_RepVGG_layer(384, num_blocks[2], stride=2)
        # self.res_1 = self._make_RepVGG_layer(int(512 * width_multiplier[3]), num_blocks[3], stride=2)

        self.fc = nn.Sequential(nn.Dropout(0.5),
                                # nn.Linear(int(512 * width_multiplier[3]), 256),
                                nn.Linear(int(512), 256),
                                nn.ReLU(inplace=True),
                                nn.Linear(256, num_class))
    def __init__(self, num_class=2, modality='fusion'):
        super(FusionNet, self).__init__()
        # Net是model_baseline中的net,返回
        # logit.shape: torch.Size([batch_size, 2])
        # logit.shape: torch.Size([batch_size, 300])
        # fea.shape: torch.Size([batch_size, 512])
        self.modality = modality
        if self.modality == 'fusion':
            self.color_moudle = Net(num_class=num_class, is_first_bn=True)
            self.depth_moudle = Net(num_class=num_class, is_first_bn=True)
            self.ir_moudle = Net(num_class=num_class, is_first_bn=True)

            # SEModule,输入channels和reduction,这个channel要和前一个网络的输出维度一致
            self.color_SE = SEModule(128, reduction=16)
            self.depth_SE = SEModule(128, reduction=16)
            self.ir_SE = SEModule(128, reduction=16)

            # 采用resnet的方式创建两个层
            self.res_0 = self._make_layer(BasicBlock, 384, 256, 2, stride=2)
        else:
            self.color_moudle = Net(num_class=num_class, is_first_bn=True)
            self.color_SE = SEModule(128, reduction=16)
            self.res_0 = self._make_layer(BasicBlock, 128, 256, 2, stride=2)
        self.res_1 = self._make_layer(BasicBlock, 256, 512, 2, stride=2)

        self.fc = nn.Sequential(nn.Dropout(0.5), nn.Linear(512, 256),
                                nn.ReLU(inplace=True),
                                nn.Linear(256, num_class))
    def __init__(self, num_class=2, deploy=False, width_multiplier=[0.75, 0.75, 0.75, 2.5], num_blocks=[2, 4, 14, 1], override_groups_map=None):
        super(FusionNet, self).__init__()
        self.deploy = deploy
        # width_multiplier=[1, 1, 1, 1]
        # num_blocks=[2, 2, 2, 2]
        self.cur_layer_idx = 1
        self.in_planes = int(256 * width_multiplier[2]) 
        self.override_groups_map = override_groups_map or dict()

        assert 0 not in self.override_groups_map
        
        self.color_module = Net(num_class=num_class, is_first_bn=True)
        self.depth_module = Net(num_class=num_class, is_first_bn=True)
        self.ir_module = Net(num_class=num_class, is_first_bn=True)

        self.color_SE = SEModule(96,reduction=8)
        self.depth_SE = SEModule(96,reduction=8)
        self.ir_SE = SEModule(96,reduction=8)

        self.bottleneck = nn.Sequential(nn.Conv2d(96*3, int(256 * width_multiplier[2]), kernel_size=1, padding=0),
                                         nn.BatchNorm2d(int(256 * width_multiplier[2])),
                                         nn.ReLU(inplace=True))

        self.res_0 = self._make_RepVGG_layer(int(256 * width_multiplier[2]), num_blocks[2], stride=2)
        self.res_1 = self._make_RepVGG_layer(int(512 * width_multiplier[3]), num_blocks[3], stride=2)

        self.fc = nn.Sequential(nn.Dropout(0.5),
                                nn.Linear(int(512 * width_multiplier[3]), 256),
                                nn.ReLU(inplace=True),
                                nn.Linear(256, num_class))
    def __init__(self, num_class=2):
        super(FusionNet, self).__init__()

        self.color_moudle = Net(num_class=num_class, is_first_bn=True)
        self.depth_moudle = Net(num_class=num_class, is_first_bn=True)
        self.ir_moudle = Net(num_class=num_class, is_first_bn=True)

        self.color_SE = SEModule(128, reduction=16)
        self.depth_SE = SEModule(128, reduction=16)
        self.ir_SE = SEModule(128, reduction=16)

        self.res_0 = self._make_layer(BasicBlock, 384, 256, 2, stride=2)
        self.res_1 = self._make_layer(BasicBlock, 256, 512, 2, stride=2)

        self.fc = nn.Sequential(nn.Dropout(0.5), nn.Linear(512, 256),
                                nn.ReLU(inplace=True),
                                nn.Linear(256, num_class))
Ejemplo n.º 6
0
    def __init__(self, num_class=2):
        super(FusionNet, self).__init__()

        self.color_moudle = Net(num_class=num_class, is_first_bn=True)
        self.depth_moudle = Net(num_class=num_class, is_first_bn=True)
        self.ir_moudle = Net(num_class=num_class, is_first_bn=True)

        self.color_SE = SEModule(512, reduction=16)
        self.depth_SE = SEModule(512, reduction=16)
        self.ir_SE = SEModule(512, reduction=16)

        self.bottleneck = nn.Sequential(
            nn.Conv2d(512 * 3, 128 * 3, kernel_size=1, padding=0),
            nn.BatchNorm2d(128 * 3), nn.ReLU(inplace=True))

        self.res_0 = self._make_layer(BasicBlock, 128 * 3, 256, 2, stride=2)
        self.res_1 = self._make_layer(BasicBlock, 256, 512, 2, stride=2)

        self.fc = nn.Sequential(nn.Dropout(0.5), nn.Linear(512, 256),
                                nn.ReLU(inplace=True),
                                nn.Linear(256, num_class))