Пример #1
0
	def initialize(self):
		self.c1 = M.ConvLayer(3, 64, pad='SAME_LEFT', stride=2, activation=M.PARAM_RELU, usebias=False, batch_norm=True)
		self.c2 = M.ConvLayer(3, 64, pad='SAME_LEFT', stride=2, activation=M.PARAM_RELU, usebias=False, batch_norm=True)
		self.layer1 = ResBlock(256, 1, 4)
		self.stage1 = Stage([32, 64], [1, 2], 4, 1)
		self.stage2 = Stage([32, 64, 128], [None, None, 2], 4, 4)
		self.stage3 = Stage([32, 64, 128, 256], [None,None,None,2], 4, 3)
Пример #2
0
	def initialize(self, steps, inp, o):
		self.mods = nn.ModuleList()
		for i in range(steps):
			if i==(steps-1):
				self.mods.append(M.ConvLayer(3, o, stride=2, pad='SAME_LEFT', batch_norm=True, usebias=False))
			else:
				self.mods.append(M.ConvLayer(3, inp, stride=2, pad='SAME_LEFT', activation=M.PARAM_RELU, batch_norm=True, usebias=False))
Пример #3
0
	def initialize(self, out, stride, shortcut=False):
		self.shortcut = shortcut
		self.c1 = M.ConvLayer(1, out//4, usebias=False, activation=M.PARAM_RELU, batch_norm=True)
		self.c2 = M.ConvLayer(3, out//4, usebias=False, activation=M.PARAM_RELU, pad='SAME_LEFT', stride=stride, batch_norm=True)
		self.c3 = M.ConvLayer(1, out, usebias=False, batch_norm=True)
		if shortcut:
			self.sc = M.ConvLayer(1, out, usebias=False, stride=stride, batch_norm=True)
Пример #4
0
    def initialize(self, outchn, stride):
        self.stride = stride
        self.outchn = outchn
        self.bn0 = M.BatchNorm()
        self.c1 = M.ConvLayer(3,
                              outchn,
                              activation=M.PARAM_PRELU,
                              usebias=False,
                              batch_norm=True)
        self.c2 = M.ConvLayer(3,
                              outchn,
                              stride=stride,
                              usebias=False,
                              batch_norm=True)

        # se module
        #self.c3 = M.ConvLayer(1, outchn//16, activation=M.PARAM_PRELU)
        #self.c4 = M.ConvLayer(1, outchn, activation=M.PARAM_SIGMOID)

        # shortcut
        self.sc = M.ConvLayer(1,
                              outchn,
                              stride=stride,
                              usebias=False,
                              batch_norm=True)
Пример #5
0
    def initialize(self, num_layer, num_chn, multiplier, step):
        # step means how many mixed ops you want in one layer
        # the num_layer here is not number of conv layers, but number of block of layers
        self.num_layer = num_layer
        self.step = step

        self.c1 = M.ConvLayer(5,
                              64,
                              stride=2,
                              activation=M.PARAM_RELU,
                              batch_norm=True,
                              usebias=False)

        cells = nn.ModuleList()
        connections = []
        for i in range(num_layer):
            in_size = min(i + 1, 4)
            out_size = min(i + 2, 4)

            cell, conn = build_cells(step, multiplier, num_chn, in_size,
                                     out_size)
            cells.append(cell)
            connections.append(conn)

        self.cells = cells
        self.connections = connections

        self.down1 = FuseDown(3, 64, 512)
        self.down2 = FuseDown(2, 128, 512)
        self.down3 = FuseDown(1, 256, 512)
        self.down4 = M.ConvLayer(1, 512, batch_norm=True, usebias=False)

        self.final_conv = M.ConvLayer(1, 512, batch_norm=True, usebias=False)
Пример #6
0
    def initialize(self, ksize, filters, stride, expand):
        self.outchn = filters
        self.expand = expand
        self.stride = stride
        outchn = filters * expand
        self.bn0 = M.BatchNorm()
        self.c0 = M.ConvLayer(1,
                              outchn,
                              usebias=False,
                              batch_norm=True,
                              activation=M.PARAM_PRELU)
        self.c1 = M.DWConvLayer(ksize,
                                1,
                                stride=stride,
                                usebias=False,
                                batch_norm=True,
                                activation=M.PARAM_PRELU)

        # se
        self.se1 = M.ConvLayer(1, outchn // 8, activation=M.PARAM_PRELU)
        self.se2 = M.ConvLayer(1, outchn, activation=M.PARAM_SIGMOID)

        self.c2 = M.ConvLayer(1, filters, batch_norm=True, usebias=False)

        self.sc = M.ConvLayer(1,
                              filters,
                              stride=stride,
                              batch_norm=True,
                              usebias=False)
Пример #7
0
	def initialize(self):
		self.c11 = M.ConvLayer(3, 256, batch_norm=True)
		self.c21 = M.ConvLayer(3, 128, batch_norm=True, activation=M.PARAM_RELU)
		self.c22 = M.ConvLayer(3, 128, batch_norm=True)
		self.c31 = M.ConvLayer(3, 128, batch_norm=True, activation=M.PARAM_RELU)
		self.c32 = M.ConvLayer(3, 128, batch_norm=True)
		self.act = M.Activation(M.PARAM_RELU)
Пример #8
0
    def initialize(self, step, multiplier, c_pp, c_p, c, rate):
        self.multiplier = multiplier
        self.step = step
        if c_pp != -1:
            self.preprocess0 = M.ConvLayer(1,
                                           c,
                                           batch_norm=True,
                                           usebias=False)
        if rate == 2:
            self.preprocess1 = FactReduce(c)
        elif rate == 0:
            self.preprocess1 = FactIncrease(c)
        else:
            self.preprocess1 = M.ConvLayer(1,
                                           c,
                                           activation=M.PARAM_RELU,
                                           batch_norm=True,
                                           usebias=False)

        self._ops = nn.ModuleList()
        self.step = step
        for i in range(step):
            for j in range(2 + i):
                if c_pp == -1 and j == 0:
                    self._ops.append(None)
                else:
                    self._ops.append(MixedOp(c, 1))
        self.conv_last = M.ConvLayer(1,
                                     c,
                                     activation=M.PARAM_RELU,
                                     batch_norm=True,
                                     usebias=False)
Пример #9
0
	def initialize(self, outchns, strides):
		self.trans = nn.ModuleList()
		for i,(o,s) in enumerate(zip(outchns,strides)):
			if o is None or s is None:
				self.trans.append(None)
			elif s==1:
				self.trans.append(M.ConvLayer(3,o, stride=s, pad='SAME_LEFT', activation=M.PARAM_RELU, usebias=False, batch_norm=True))
			else:
				self.trans.append(M.ConvLayer(3,o, stride=s, pad='SAME_LEFT', activation=M.PARAM_RELU, usebias=False, batch_norm=True))
Пример #10
0
	def initialize(self, chn, stride=1, shortcut=False):
		self.bn0 = M.BatchNorm()
		self.act = M.Activation(M.PARAM_RELU)
		self.c1 = M.ConvLayer(1, chn, activation=M.PARAM_RELU, batch_norm=True, usebias=False)
		self.c2 = M.ConvLayer(3, chn, stride=stride, activation=M.PARAM_RELU, batch_norm=True, usebias=False)
		self.c3 = M.ConvLayer(1, chn*4, usebias=False)
		self.shortcut = shortcut
		if shortcut:
			self.sc = M.ConvLayer(1, chn*4, stride=stride, usebias=False)
Пример #11
0
	def initialize(self, upsample_layers, upsample_chn):
		self.prevlayers = nn.ModuleList()
		#self.uplayer = M.DeConvLayer(3, upsample_chn, stride=2, activation=M.PARAM_PRELU, batch_norm=True, usebias=False)
		self.uplayer = M.ConvLayer(3, upsample_chn*4, activation=M.PARAM_PRELU, usebias=False)
		self.d2s = DepthToSpace(2)
		self.postlayers = nn.ModuleList()
		for i in range(upsample_layers):
			self.prevlayers.append(M.ConvLayer(3, upsample_chn, activation=M.PARAM_PRELU, batch_norm=True, usebias=False))
		for i in range(upsample_layers):
			self.postlayers.append(M.ConvLayer(3, upsample_chn, activation=M.PARAM_PRELU, batch_norm=True, usebias=False))
Пример #12
0
 def initialize(self, num_layers, channel, density_level):
     self.layers = nn.ModuleList()
     for i in range(num_layers):
         self.layers.append(
             M.ConvLayer(3,
                         channel,
                         activation=M.PARAM_PRELU,
                         batch_norm=True,
                         usebias=False))
     self.layers.append(M.ConvLayer(1, density_level))
Пример #13
0
	def initialize(self, unit_list, chn_list, multiplier, step):
		self.step = step 

		self.c1 = M.ConvLayer(5, 64, stride=2, activation=M.PARAM_RELU, batch_norm=True, usebias=False)
		self.c2 = M.ConvLayer(3, 64, stride=1, activation=M.PARAM_RELU, batch_norm=True, usebias=False)

		self.stage1 = Stage(unit_list[0], chn_list[0], multiplier, step, reduce_size=chn_list[1])
		self.stage2 = Stage(unit_list[1], chn_list[1], multiplier, step, reduce_size=chn_list[2])
		self.stage3 = Stage(unit_list[2], chn_list[2], multiplier, step, reduce_size=chn_list[3])
		self.stage4 = Stage(unit_list[3], chn_list[3], multiplier, step, reduce_size=None)
Пример #14
0
	def initialize(self, density_num_layers, density_channels, density_level, gcn_layers, gcn_channels, head_layernum, head_chn, upsample_layers, upsample_chn):
		self.backbone = hrnet.Body()
		self.upsample = UpSample(upsample_layers, upsample_chn)
		self.head = Head(head_layernum, head_chn)
		self.head2 = Head(head_layernum, head_chn)
		# self.head_density = Head(head_layernum, head_chn)
		# self.density_branch = DensityBranch(density_num_layers, density_channels, config.num_pts*2)
		# self.id_branch = DensityBranch(density_num_layers, density_channels, config.id_featdim * config.num_pts)
		# self.density_branch = M.ConvLayer(1, config.num_pts)
		self.c1 = M.ConvLayer(1, config.num_pts)
		self.c2 = M.ConvLayer(1, config.num_pts)
Пример #15
0
 def initialize(self, num_pts, out_size):
     self.dekr = DEKR(num_pts)
     self.sampling = SamplingLayer(out_size)
     self.pre_conv = M.ConvLayer(3,
                                 32,
                                 activation=M.PARAM_RELU,
                                 batch_norm=True,
                                 usebias=False)
     self.refine_blocks = nn.ModuleList()
     self.refine_blocks.append(AdaptConvBlk(32))
     self.refine_blocks.append(AdaptConvBlk(32))
     self.final_conv = M.ConvLayer(1, num_pts + 1)
     self.num_pts = num_pts
Пример #16
0
    def initialize(self, head_layernum, head_chn, upsample_layers,
                   upsample_chn):
        self.backbone = hrnet.Body()
        self.upsample = UpSample(upsample_layers, upsample_chn)
        self.head = Head(head_layernum, head_chn)
        self.head2 = Head(head_layernum, head_chn)
        self.head3 = Head(head_layernum, head_chn)
        self.head4 = Head(head_layernum, head_chn)

        self.c1 = M.ConvLayer(1, config.num_pts)
        self.c2 = M.ConvLayer(1, config.num_pts)
        self.c3 = M.ConvLayer(1, 1)
        self.c4 = M.ConvLayer(1, config.num_pts - 1)
Пример #17
0
	def initialize(self, num_pts):
		self.backbone = Body()

		self.transition_hmap = M.ConvLayer(1, 32, usebias=False, batch_norm=True, activation=M.PARAM_RELU)
		self.head_hmap = ResBasicBlock(32, 1)
		self.conv_hmap = M.ConvLayer(1, num_pts+1)

		self.transition_off = M.ConvLayer(1, 15*num_pts, usebias=False, batch_norm=True, activation=M.PARAM_RELU)
		self.reg_blks_off = nn.ModuleList()
		self.convs_off = nn.ModuleList()
		for _ in range(num_pts):
			self.reg_blks_off.append(AdaptConvBlk())
			self.reg_blks_off.append(AdaptConvBlk())
			self.convs_off.append(M.ConvLayer(1, 2))
Пример #18
0
    def initialize(self,
                   ksizes,
                   channels,
                   strides,
                   expansions,
                   repeats,
                   finalType='E'):
        self.finalType = finalType

        self.c0 = M.ConvLayer(3,
                              32,
                              batch_norm=True,
                              usebias=False,
                              activation=M.PARAM_PRELU)

        self.body = nn.ModuleList()
        for i, (k, c, s, e, r) in enumerate(
                zip(ksizes, channels, strides, expansions, repeats)):
            stage = nn.ModuleList()
            for j in range(r):
                stage.append(MBBlock(k, c, s if j == 0 else 1, e))
            self.body.append(stage)

        self.phi = Phi(512)
        self.lmf = LMF(ratio=0.9)
Пример #19
0
 def initialize(self, out, stride):
     self.c1 = M.ConvLayer(5,
                           out,
                           stride=stride,
                           dilation_rate=2,
                           batch_norm=True,
                           usebias=False)
Пример #20
0
	def initialize(self, channel_list, blocknum_list, embedding_size, embedding_bn=True):
		self.c1 = M.ConvLayer(3, channel_list[0], activation=M.PARAM_PRELU)
		# self.u1 = ResBlock_v1(channel_list[1], stride=2)
		self.stage1 = Stage(channel_list[1], blocknum_list[0])
		self.stage2 = Stage(channel_list[2], blocknum_list[1])
		self.stage3 = Stage(channel_list[3], blocknum_list[2])
		self.stage4 = Stage(channel_list[4], blocknum_list[3])
		self.bn1 = M.BatchNorm()
		self.fc1 = M.Dense(512)
Пример #21
0
 def initialize(self, outchns):
     self.c1 = FuseUp(outchns[0])
     self.c2 = FuseUp(outchns[0])
     self.c3 = FuseUp(outchns[0])
     self.c_all = M.ConvLayer(3,
                              outchns[0] * 4,
                              activation=M.PARAM_RELU,
                              batch_norm=True,
                              usebias=False)
Пример #22
0
 def initialize(self, head_layernum, head_chn):
     self.layers = nn.ModuleList()
     for i in range(head_layernum):
         self.layers.append(
             M.ConvLayer(3,
                         head_chn,
                         activation=M.PARAM_PRELU,
                         batch_norm=True,
                         usebias=False))
Пример #23
0
	def initialize(self, step, multiplier, c):
		self.multiplier = multiplier
		self.step = step

		self._ops = nn.ModuleList()
		self.step = step
		for i in range(step):
			for j in range(1+i):
				self._ops.append(MixedOp(c, 1))
		self.conv_last = M.ConvLayer(1, c, activation=M.PARAM_RELU, batch_norm=True, usebias=False)
Пример #24
0
	def initialize(self, channel_list, blocknum_list, embedding_size, embedding_bn=True):
		self.c1 = M.ConvLayer(3, channel_list[0], 1, usebias=False, activation=M.PARAM_PRELU, batch_norm=True)
		# self.u1 = ResBlock_v1(channel_list[1], stride=2)
		self.stage1 = Stage(channel_list[1], blocknum_list[0])
		self.stage2 = Stage(channel_list[2], blocknum_list[1])
		self.stage3 = Stage(channel_list[3], blocknum_list[2])
		self.stage4 = Stage(channel_list[4], blocknum_list[3])
		self.bn1 = M.BatchNorm()
		print('Embedding_size:', embedding_size)
		self.fc1 = M.Dense(embedding_size, usebias=False)
Пример #25
0
 def initialize(self, outchn, stride):
     self.stride = stride
     self.outchn = outchn
     self.c1 = M.ConvLayer(1,
                           outchn,
                           batch_norm=True,
                           usebias=False,
                           activation=M.PARAM_RELU)
     self.c2 = M.ConvLayer(3,
                           outchn,
                           batch_norm=True,
                           stride=stride,
                           usebias=False,
                           activation=M.PARAM_RELU)
     self.c3 = M.ConvLayer(1, outchn * 4, batch_norm=True, usebias=False)
     self.sc = M.ConvLayer(1,
                           outchn * 4,
                           batch_norm=True,
                           stride=stride,
                           usebias=False)
Пример #26
0
	def initialize(self):
		self.bn0 = M.BatchNorm()
		self.c1 = M.ConvLayer(7, 64, stride=2, activation=M.PARAM_RELU, batch_norm=True, usebias=False)
		self.pool = M.MaxPool2D(3, 2)
		self.stage1 = Stage(64, num_units=3, stride=1)
		self.stage2 = Stage(128, num_units=4, stride=2)
		self.stage3 = Stage(256, num_units=6, stride=2)
		self.stage4 = Stage(512, num_units=3, stride=2)
		self.bn1 = M.BatchNorm()
		self.act = M.Activation(M.PARAM_RELU)

		self.ssh_c3_lateral = M.ConvLayer(1, 256, batch_norm=True, activation=M.PARAM_RELU)
		self.det3 = DETHead()
		self.head32 = RegressHead()

		self.ssh_c2_lateral = M.ConvLayer(1, 256, batch_norm=True, activation=M.PARAM_RELU)
		self.ssh_c3_upsampling = M.NNUpSample(2)
		self.ssh_c2_aggr = M.ConvLayer(3, 256, batch_norm=True, activation=M.PARAM_RELU)
		self.det2 = DETHead()
		self.head16 = RegressHead()

		self.ssh_m1_red_conv = M.ConvLayer(1, 256, batch_norm=True, activation=M.PARAM_RELU)
		self.ssh_c2_upsampling = M.NNUpSample(2)
		self.ssh_c1_aggr = M.ConvLayer(3, 256, batch_norm=True, activation=M.PARAM_RELU)
		self.det1 = DETHead()
		self.head8 = RegressHead()
Пример #27
0
 def initialize(self, channel_list, blocknum_list):
     self.c1 = M.ConvLayer(7,
                           channel_list[0],
                           stride=2,
                           usebias=False,
                           batch_norm=True,
                           activation=M.PARAM_RELU)
     self.maxpool = M.MaxPool2D(3, 2)
     self.stage1 = Stage(channel_list[1], blocknum_list[0], stride=1)
     self.stage2 = Stage(channel_list[2], blocknum_list[1], stride=2)
     self.stage3 = Stage(channel_list[3], blocknum_list[2], stride=2)
     self.stage4 = Stage(channel_list[4], blocknum_list[3], stride=2)
     self.fc1 = M.Dense(1000)
Пример #28
0
 def initialize(self):
     self.fc = nn.Linear(384, 192)
     self.block = Block(dim=192,
                        num_heads=3,
                        mlp_ratio=1,
                        qkv_bias=True,
                        qk_scale=None,
                        drop=0.0,
                        attn_drop=0.0,
                        drop_path=0.0,
                        norm_layer=nn.LayerNorm)
     self.upsample = UpSample(1, 32)
     self.conv = M.ConvLayer(1, 1)
Пример #29
0
    def initialize(self, channel_list, blocknum_list, drop_prob):
        self.c1 = M.ConvLayer(3,
                              channel_list[0],
                              usebias=False,
                              batch_norm=True,
                              activation=M.PARAM_PRELU)
        self.stage1 = Stage(channel_list[1], blocknum_list[0], drop_prob)
        self.stage2 = Stage(channel_list[2], blocknum_list[1], drop_prob)
        self.stage3 = Stage(channel_list[3], blocknum_list[2], drop_prob)
        self.stage4 = Stage(channel_list[4], blocknum_list[3], drop_prob)

        self.bn1 = M.BatchNorm()
        self.fc1 = M.Dense(512, usebias=False, batch_norm=True)
Пример #30
0
 def initialize(self, chn, frac, prob):
     self.c1 = M.ConvLayer(3,
                           chn,
                           activation=M.PARAM_PRELU,
                           usebias=False,
                           batch_norm=True)
     self.frac = frac
     if frac == 0:
         self.frac1 = M.ConvLayer(3,
                                  chn,
                                  activation=M.PARAM_PRELU,
                                  usebias=False,
                                  batch_norm=True)
         self.frac2 = M.ConvLayer(3,
                                  chn,
                                  activation=M.PARAM_PRELU,
                                  usebias=False,
                                  batch_norm=True)
     else:
         self.frac1 = FracUnit(chn, frac - 1, prob)
         self.merge = Merge(prob)
         self.frac2 = FracUnit(chn, frac - 1, prob)