def initialize(self): self.bn0 = M.BatchNorm() self.c1 = M.ConvLayer(7, 64, stride=2, activation=M.PARAM_RELU, batch_norm=True, usebias=False) self.pool = M.MaxPool2D(3, 2) self.stage1 = Stage(64, num_units=3, stride=1) self.stage2 = Stage(128, num_units=4, stride=2) self.stage3 = Stage(256, num_units=6, stride=2) self.stage4 = Stage(512, num_units=3, stride=2) self.bn1 = M.BatchNorm() self.act = M.Activation(M.PARAM_RELU) self.ssh_c3_lateral = M.ConvLayer(1, 256, batch_norm=True, activation=M.PARAM_RELU) self.det3 = DETHead() self.head32 = RegressHead() self.ssh_c2_lateral = M.ConvLayer(1, 256, batch_norm=True, activation=M.PARAM_RELU) self.ssh_c3_upsampling = M.NNUpSample(2) self.ssh_c2_aggr = M.ConvLayer(3, 256, batch_norm=True, activation=M.PARAM_RELU) self.det2 = DETHead() self.head16 = RegressHead() self.ssh_m1_red_conv = M.ConvLayer(1, 256, batch_norm=True, activation=M.PARAM_RELU) self.ssh_c2_upsampling = M.NNUpSample(2) self.ssh_c1_aggr = M.ConvLayer(3, 256, batch_norm=True, activation=M.PARAM_RELU) self.det1 = DETHead() self.head8 = RegressHead()
def initialize(self): self.c11 = M.ConvLayer(3, 256, batch_norm=True) self.c21 = M.ConvLayer(3, 128, batch_norm=True, activation=M.PARAM_RELU) self.c22 = M.ConvLayer(3, 128, batch_norm=True) self.c31 = M.ConvLayer(3, 128, batch_norm=True, activation=M.PARAM_RELU) self.c32 = M.ConvLayer(3, 128, batch_norm=True) self.act = M.Activation(M.PARAM_RELU)
def initialize(self, chn, stride=1, shortcut=False): self.bn0 = M.BatchNorm() self.act = M.Activation(M.PARAM_RELU) self.c1 = M.ConvLayer(1, chn, activation=M.PARAM_RELU, batch_norm=True, usebias=False) self.c2 = M.ConvLayer(3, chn, stride=stride, activation=M.PARAM_RELU, batch_norm=True, usebias=False) self.c3 = M.ConvLayer(1, chn*4, usebias=False) self.shortcut = shortcut if shortcut: self.sc = M.ConvLayer(1, chn*4, stride=stride, usebias=False)