コード例 #1
0
ファイル: objax.py プロジェクト: sanmayphy/equivariant-MLP
 def __init__(self,rep_in,rep_out,group,ch=384,num_layers=3):
     super().__init__()
     self.rep_in =rep_in(group)
     self.rep_out = rep_out(group)
     self.G = group
     chs = [self.rep_in.size()] + num_layers*[ch]
     cout = self.rep_out.size()
     logging.info("Initing MLP")
     self.net = Sequential(
         *[Sequential(nn.Linear(cin,cout),swish) for cin,cout in zip(chs,chs[1:])],
         nn.Linear(chs[-1],cout)
     )
コード例 #2
0
    def __init__(self,
                 num_blocks: list,
                 num_classes: int = 1000,
                 width_multiplier: Optional[list] = None,
                 override_groups_map=None,
                 deploy=False):
        super(RepVGG, self).__init__()

        assert len(width_multiplier) == 4

        self.deploy = deploy
        self.override_groups_map = override_groups_map or dict()

        assert 0 not in self.override_groups_map

        self.in_planes = min(64, int(64 * width_multiplier[0]))

        self.stage0 = RepVGGBlock(in_channels=3,
                                  out_channels=self.in_planes,
                                  kernel_size=3,
                                  stride=2,
                                  padding=1,
                                  deploy=self.deploy)
        self.cur_layer_idx = 1
        self.stage1 = self._make_stage(int(64 * width_multiplier[0]),
                                       num_blocks[0],
                                       stride=2)
        self.stage2 = self._make_stage(int(128 * width_multiplier[1]),
                                       num_blocks[1],
                                       stride=2)
        self.stage3 = self._make_stage(int(256 * width_multiplier[2]),
                                       num_blocks[2],
                                       stride=2)
        self.stage4 = self._make_stage(int(512 * width_multiplier[3]),
                                       num_blocks[3],
                                       stride=2)
        self.gap = F.average_pool_2d
        self.linear = nn.Linear(int(512 * width_multiplier[3]), num_classes)
コード例 #3
0
ファイル: objax.py プロジェクト: sanmayphy/equivariant-MLP
def MLPBlock(cin,cout):
    return Sequential(nn.Linear(cin,cout),swish)#,nn.BatchNorm0D(cout,momentum=.9),swish)#,