Exemplo n.º 1
0
    def initialize(self, step, multiplier, c_pp, c_p, c, rate):
        self.multiplier = multiplier
        self.step = step
        if c_pp != -1:
            self.preprocess0 = M.ConvLayer(1,
                                           c,
                                           batch_norm=True,
                                           usebias=False)
        if rate == 2:
            self.preprocess1 = FactReduce(c)
        elif rate == 0:
            self.preprocess1 = FactIncrease(c)
        else:
            self.preprocess1 = M.ConvLayer(1,
                                           c,
                                           activation=M.PARAM_RELU,
                                           batch_norm=True,
                                           usebias=False)

        self._ops = nn.ModuleList()
        self.step = step
        for i in range(step):
            for j in range(2 + i):
                if c_pp == -1 and j == 0:
                    self._ops.append(None)
                else:
                    self._ops.append(MixedOp(c, 1))
        self.conv_last = M.ConvLayer(1,
                                     c,
                                     activation=M.PARAM_RELU,
                                     batch_norm=True,
                                     usebias=False)
Exemplo n.º 2
0
	def initialize(self):
		self.c1 = M.ConvLayer(3, 64, pad='SAME_LEFT', stride=2, activation=M.PARAM_RELU, usebias=False, batch_norm=True)
		self.c2 = M.ConvLayer(3, 64, pad='SAME_LEFT', stride=2, activation=M.PARAM_RELU, usebias=False, batch_norm=True)
		self.layer1 = ResBlock(256, 1, 4)
		self.stage1 = Stage([32, 64], [1, 2], 4, 1)
		self.stage2 = Stage([32, 64, 128], [None, None, 2], 4, 4)
		self.stage3 = Stage([32, 64, 128, 256], [None,None,None,2], 4, 3)
Exemplo n.º 3
0
    def initialize(self, num_layer, num_chn, multiplier, step):
        # step means how many mixed ops you want in one layer
        # the num_layer here is not number of conv layers, but number of block of layers
        self.num_layer = num_layer
        self.step = step

        self.c1 = M.ConvLayer(5,
                              64,
                              stride=2,
                              activation=M.PARAM_RELU,
                              batch_norm=True,
                              usebias=False)

        cells = nn.ModuleList()
        connections = []
        for i in range(num_layer):
            in_size = min(i + 1, 4)
            out_size = min(i + 2, 4)

            cell, conn = build_cells(step, multiplier, num_chn, in_size,
                                     out_size)
            cells.append(cell)
            connections.append(conn)

        self.cells = cells
        self.connections = connections

        self.down1 = FuseDown(3, 64, 512)
        self.down2 = FuseDown(2, 128, 512)
        self.down3 = FuseDown(1, 256, 512)
        self.down4 = M.ConvLayer(1, 512, batch_norm=True, usebias=False)

        self.final_conv = M.ConvLayer(1, 512, batch_norm=True, usebias=False)
Exemplo n.º 4
0
	def initialize(self, out, stride, shortcut=False):
		self.shortcut = shortcut
		self.c1 = M.ConvLayer(1, out//4, usebias=False, activation=M.PARAM_RELU, batch_norm=True)
		self.c2 = M.ConvLayer(3, out//4, usebias=False, activation=M.PARAM_RELU, pad='SAME_LEFT', stride=stride, batch_norm=True)
		self.c3 = M.ConvLayer(1, out, usebias=False, batch_norm=True)
		if shortcut:
			self.sc = M.ConvLayer(1, out, usebias=False, stride=stride, batch_norm=True)
Exemplo n.º 5
0
	def initialize(self, steps, inp, o):
		self.mods = nn.ModuleList()
		for i in range(steps):
			if i==(steps-1):
				self.mods.append(M.ConvLayer(3, o, stride=2, pad='SAME_LEFT', batch_norm=True, usebias=False))
			else:
				self.mods.append(M.ConvLayer(3, inp, stride=2, pad='SAME_LEFT', activation=M.PARAM_RELU, batch_norm=True, usebias=False))
Exemplo n.º 6
0
    def initialize(self, outchn, stride):
        self.stride = stride
        self.outchn = outchn
        self.bn0 = M.BatchNorm()
        self.c1 = M.ConvLayer(3,
                              outchn,
                              activation=M.PARAM_PRELU,
                              usebias=False,
                              batch_norm=True)
        self.c2 = M.ConvLayer(3,
                              outchn,
                              stride=stride,
                              usebias=False,
                              batch_norm=True)

        # se module
        #self.c3 = M.ConvLayer(1, outchn//16, activation=M.PARAM_PRELU)
        #self.c4 = M.ConvLayer(1, outchn, activation=M.PARAM_SIGMOID)

        # shortcut
        self.sc = M.ConvLayer(1,
                              outchn,
                              stride=stride,
                              usebias=False,
                              batch_norm=True)
Exemplo n.º 7
0
 def initialize(self,
                num_kpts,
                temp_length,
                input_dimension=3,
                output_dimension=3,
                output_pts=None):
     self.num_kpts = num_kpts
     self.temp_length = temp_length
     self.output_dimension = output_dimension
     self.output_pts = num_kpts if output_pts is None else output_pts
     self.input_dimension = input_dimension
     self.c1 = M.ConvLayer1D(3,
                             1024,
                             stride=3,
                             activation=M.PARAM_PRELU,
                             pad='VALID',
                             batch_norm=True,
                             usebias=False)
     self.r1 = ResBlock1D(k=3)
     self.r2 = ResBlock1D(k=3)
     self.r3 = ResBlock1D(k=3)
     self.r4 = ResBlock1D(k=3)
     # self.r3 = ResBlock1D(k=3, dilation=3)
     # self.c5 = M.ConvLayer1D(9, 256, activation=M.PARAM_PRELU, pad='VALID', batch_norm=True, usebias=False)
     self.c4 = M.ConvLayer1D(1, self.output_pts * self.output_dimension)
Exemplo n.º 8
0
 def initialize(self, num_heads, dim_per_head, drop):
     self.att = MultiHeadAtt(num_heads, dim_per_head, drop)
     self.l1 = M.Dense(dim_per_head * num_heads * 4)
     self.l2 = M.Dense(dim_per_head * num_heads)
     self.ln1 = M.LayerNorm(1)
     self.ln2 = M.LayerNorm(1)
     self.drop = drop
Exemplo n.º 9
0
	def initialize(self, dim, num_heads, attn_drop):
		self.num_heads = num_heads
		self.head_dim = dim // num_heads
		self.scale = self.head_dim ** -0.5 
		self.attn_drop = attn_drop

		self.qkv = M.Dense(dim*3, usebias=True)
		self.proj = M.Dense(dim)
Exemplo n.º 10
0
	def initialize(self, channel_list, blocknum_list, embedding_size, embedding_bn=True):
		self.c1 = M.ConvLayer(3, channel_list[0], activation=M.PARAM_PRELU)
		# self.u1 = ResBlock_v1(channel_list[1], stride=2)
		self.stage1 = Stage(channel_list[1], blocknum_list[0])
		self.stage2 = Stage(channel_list[2], blocknum_list[1])
		self.stage3 = Stage(channel_list[3], blocknum_list[2])
		self.stage4 = Stage(channel_list[4], blocknum_list[3])
		self.bn1 = M.BatchNorm()
		self.fc1 = M.Dense(512)
Exemplo n.º 11
0
	def initialize(self, outchns, strides):
		self.trans = nn.ModuleList()
		for i,(o,s) in enumerate(zip(outchns,strides)):
			if o is None or s is None:
				self.trans.append(None)
			elif s==1:
				self.trans.append(M.ConvLayer(3,o, stride=s, pad='SAME_LEFT', activation=M.PARAM_RELU, usebias=False, batch_norm=True))
			else:
				self.trans.append(M.ConvLayer(3,o, stride=s, pad='SAME_LEFT', activation=M.PARAM_RELU, usebias=False, batch_norm=True))
Exemplo n.º 12
0
	def initialize(self, unit_list, chn_list, multiplier, step):
		self.step = step 

		self.c1 = M.ConvLayer(5, 64, stride=2, activation=M.PARAM_RELU, batch_norm=True, usebias=False)
		self.c2 = M.ConvLayer(3, 64, stride=1, activation=M.PARAM_RELU, batch_norm=True, usebias=False)

		self.stage1 = Stage(unit_list[0], chn_list[0], multiplier, step, reduce_size=chn_list[1])
		self.stage2 = Stage(unit_list[1], chn_list[1], multiplier, step, reduce_size=chn_list[2])
		self.stage3 = Stage(unit_list[2], chn_list[2], multiplier, step, reduce_size=chn_list[3])
		self.stage4 = Stage(unit_list[3], chn_list[3], multiplier, step, reduce_size=None)
Exemplo n.º 13
0
 def initialize(self, outchn=512, dilation=1, k=3):
     self.bn = M.BatchNorm()
     self.c1 = M.ConvLayer1D(k,
                             outchn,
                             dilation_rate=dilation,
                             activation=M.PARAM_PRELU,
                             batch_norm=True,
                             usebias=False,
                             pad='VALID')
     self.c2 = M.ConvLayer1D(3, outchn, pad='VALID')
Exemplo n.º 14
0
	def initialize(self, channel_list, blocknum_list, embedding_size, embedding_bn=True):
		self.c1 = M.ConvLayer(3, channel_list[0], 1, usebias=False, activation=M.PARAM_PRELU, batch_norm=True)
		# self.u1 = ResBlock_v1(channel_list[1], stride=2)
		self.stage1 = Stage(channel_list[1], blocknum_list[0])
		self.stage2 = Stage(channel_list[2], blocknum_list[1])
		self.stage3 = Stage(channel_list[3], blocknum_list[2])
		self.stage4 = Stage(channel_list[4], blocknum_list[3])
		self.bn1 = M.BatchNorm()
		print('Embedding_size:', embedding_size)
		self.fc1 = M.Dense(embedding_size, usebias=False)
Exemplo n.º 15
0
	def initialize(self, upsample_layers, upsample_chn):
		self.prevlayers = nn.ModuleList()
		#self.uplayer = M.DeConvLayer(3, upsample_chn, stride=2, activation=M.PARAM_PRELU, batch_norm=True, usebias=False)
		self.uplayer = M.ConvLayer(3, upsample_chn*4, activation=M.PARAM_PRELU, usebias=False)
		self.d2s = DepthToSpace(2)
		self.postlayers = nn.ModuleList()
		for i in range(upsample_layers):
			self.prevlayers.append(M.ConvLayer(3, upsample_chn, activation=M.PARAM_PRELU, batch_norm=True, usebias=False))
		for i in range(upsample_layers):
			self.postlayers.append(M.ConvLayer(3, upsample_chn, activation=M.PARAM_PRELU, batch_norm=True, usebias=False))
Exemplo n.º 16
0
 def initialize(self, num_layers, channel, density_level):
     self.layers = nn.ModuleList()
     for i in range(num_layers):
         self.layers.append(
             M.ConvLayer(3,
                         channel,
                         activation=M.PARAM_PRELU,
                         batch_norm=True,
                         usebias=False))
     self.layers.append(M.ConvLayer(1, density_level))
Exemplo n.º 17
0
 def initialize(self, num_layers, channel, final_chn):
     self.layers = nn.ModuleList()
     for i in range(num_layers):
         self.layers.append(
             M.Dense(
                 channel,
                 activation=M.PARAM_PRELU,
                 usebias=False,
                 batch_norm=True))  # we do Laplasian norm in previous step
     self.layers.append(M.Dense(final_chn))
Exemplo n.º 18
0
	def initialize(self, num_kpts, temp_length):
		self.num_kpts = num_kpts
		self.temp_length = temp_length
		self.c1 = M.ConvLayer1D(3, 1024, stride=3, activation=M.PARAM_PRELU, pad='VALID', batch_norm=True, usebias=False)
		self.r1 = ResBlock1D(k=3)
		self.r2 = ResBlock1D(k=3)
		self.r3 = ResBlock1D(k=3)
		self.r4 = ResBlock1D(k=3)
		# self.r3 = ResBlock1D(k=3, dilation=3)
		# self.c5 = M.ConvLayer1D(9, 256, activation=M.PARAM_PRELU, pad='VALID', batch_norm=True, usebias=False)
		self.c4 = M.ConvLayer1D(1, num_kpts*3)
Exemplo n.º 19
0
	def initialize(self, density_num_layers, density_channels, density_level, gcn_layers, gcn_channels, head_layernum, head_chn, upsample_layers, upsample_chn):
		self.backbone = hrnet.Body()
		self.upsample = UpSample(upsample_layers, upsample_chn)
		self.head = Head(head_layernum, head_chn)
		self.head2 = Head(head_layernum, head_chn)
		# self.head_density = Head(head_layernum, head_chn)
		# self.density_branch = DensityBranch(density_num_layers, density_channels, config.num_pts*2)
		# self.id_branch = DensityBranch(density_num_layers, density_channels, config.id_featdim * config.num_pts)
		# self.density_branch = M.ConvLayer(1, config.num_pts)
		self.c1 = M.ConvLayer(1, config.num_pts)
		self.c2 = M.ConvLayer(1, config.num_pts)
Exemplo n.º 20
0
    def initialize(self, D=8, W=256, skips=[4]):
        # use_viewdirs = True 
        self.layers = nn.ModuleList()
        for i in range(D):
            self.layers.append(M.Dense(W, activation=M.PARAM_RELU))
        
        self.skips = skips

        self.alpha_fc = M.Dense(1)
        self.bottleneck = M.Dense(256)
        self.hidden = M.Dense(W//2)
        self.out_fc = M.Dense(3)
Exemplo n.º 21
0
    def initialize(self, head_layernum, head_chn, upsample_layers,
                   upsample_chn):
        self.backbone = hrnet.Body()
        self.upsample = UpSample(upsample_layers, upsample_chn)
        self.head = Head(head_layernum, head_chn)
        self.head2 = Head(head_layernum, head_chn)
        self.head3 = Head(head_layernum, head_chn)
        self.head4 = Head(head_layernum, head_chn)

        self.c1 = M.ConvLayer(1, config.num_pts)
        self.c2 = M.ConvLayer(1, config.num_pts)
        self.c3 = M.ConvLayer(1, 1)
        self.c4 = M.ConvLayer(1, config.num_pts - 1)
Exemplo n.º 22
0
    def initialize(self, channel_list, blocknum_list, drop_prob):
        self.c1 = M.ConvLayer(3,
                              channel_list[0],
                              usebias=False,
                              batch_norm=True,
                              activation=M.PARAM_PRELU)
        self.stage1 = Stage(channel_list[1], blocknum_list[0], drop_prob)
        self.stage2 = Stage(channel_list[2], blocknum_list[1], drop_prob)
        self.stage3 = Stage(channel_list[3], blocknum_list[2], drop_prob)
        self.stage4 = Stage(channel_list[4], blocknum_list[3], drop_prob)

        self.bn1 = M.BatchNorm()
        self.fc1 = M.Dense(512, usebias=False, batch_norm=True)
Exemplo n.º 23
0
 def initialize(self, num_pts, out_size):
     self.dekr = DEKR(num_pts)
     self.sampling = SamplingLayer(out_size)
     self.pre_conv = M.ConvLayer(3,
                                 32,
                                 activation=M.PARAM_RELU,
                                 batch_norm=True,
                                 usebias=False)
     self.refine_blocks = nn.ModuleList()
     self.refine_blocks.append(AdaptConvBlk(32))
     self.refine_blocks.append(AdaptConvBlk(32))
     self.final_conv = M.ConvLayer(1, num_pts + 1)
     self.num_pts = num_pts
Exemplo n.º 24
0
 def initialize(self, channel_list, blocknum_list):
     self.c1 = M.ConvLayer(7,
                           channel_list[0],
                           stride=2,
                           usebias=False,
                           batch_norm=True,
                           activation=M.PARAM_RELU)
     self.maxpool = M.MaxPool2D(3, 2)
     self.stage1 = Stage(channel_list[1], blocknum_list[0], stride=1)
     self.stage2 = Stage(channel_list[2], blocknum_list[1], stride=2)
     self.stage3 = Stage(channel_list[3], blocknum_list[2], stride=2)
     self.stage4 = Stage(channel_list[4], blocknum_list[3], stride=2)
     self.fc1 = M.Dense(1000)
Exemplo n.º 25
0
	def initialize(self, num_pts):
		self.backbone = Body()

		self.transition_hmap = M.ConvLayer(1, 32, usebias=False, batch_norm=True, activation=M.PARAM_RELU)
		self.head_hmap = ResBasicBlock(32, 1)
		self.conv_hmap = M.ConvLayer(1, num_pts+1)

		self.transition_off = M.ConvLayer(1, 15*num_pts, usebias=False, batch_norm=True, activation=M.PARAM_RELU)
		self.reg_blks_off = nn.ModuleList()
		self.convs_off = nn.ModuleList()
		for _ in range(num_pts):
			self.reg_blks_off.append(AdaptConvBlk())
			self.reg_blks_off.append(AdaptConvBlk())
			self.convs_off.append(M.ConvLayer(1, 2))
Exemplo n.º 26
0
 def initialize(self, outchn=1024, k=3):
     self.k = k
     # self.bn = M.BatchNorm()
     self.c1 = M.ConvLayer1D(k,
                             outchn,
                             stride=k,
                             activation=M.PARAM_PRELU,
                             batch_norm=True,
                             usebias=False,
                             pad='VALID')
     self.c2 = M.ConvLayer1D(1,
                             outchn,
                             activation=M.PARAM_PRELU,
                             batch_norm=True,
                             usebias=False,
                             pad='VALID')
Exemplo n.º 27
0
    def __init__(self, modelpath, nms=0.4):
        model = mnet.Detector()
        model = model.eval()
        x = torch.from_numpy(np.ones([1, 3, 640, 640]).astype(np.float32))
        _ = model(x)
        M.Saver(model).restore(modelpath)
        model.cuda()
        self.model = model

        self.nms_threshold = nms

        self.fpn_keys = []
        self._feat_stride_fpn = [32, 16, 8]

        for s in self._feat_stride_fpn:
            self.fpn_keys.append('stride%s' % s)

        self._anchors_fpn = dict(
            zip(self.fpn_keys, generate_anchors_fpn(dense_anchor=False)))
        for k in self._anchors_fpn:
            v = self._anchors_fpn[k].astype(np.float32)
            v = torch.from_numpy(v).cuda()
            self._anchors_fpn[k] = v

        self._num_anchors = dict(
            zip(self.fpn_keys,
                [anchors.shape[0] for anchors in self._anchors_fpn.values()]))
Exemplo n.º 28
0
    def __init__(self, modelpath, nms=0.4, worker=2):
        model = retina_resnet.Detector()
        model = model.eval()
        x = torch.from_numpy(np.ones([1, 3, 640, 640]).astype(np.float32))
        _ = model(x)
        M.Saver(model).restore(modelpath)
        model.cuda()
        if isinstance(config.gpus, list):
            if len(config.gpus) > 1:
                print('Using multiple gpus:', config.gpus)
                model = torch.nn.DataParallel(model, device_ids=config.gpus)
        self.model = model

        self.nms_threshold = nms

        self.fpn_keys = []
        self._feat_stride_fpn = [32, 16, 8]

        for s in self._feat_stride_fpn:
            self.fpn_keys.append('stride%s' % s)

        self._anchors_fpn = dict(
            zip(self.fpn_keys, generate_anchors_fpn(dense_anchor=False)))
        for k in self._anchors_fpn:
            v = self._anchors_fpn[k].astype(np.float32)
            v = torch.from_numpy(v).cuda()
            self._anchors_fpn[k] = v

        self._num_anchors = dict(
            zip(self.fpn_keys,
                [anchors.shape[0] for anchors in self._anchors_fpn.values()]))
        self.worker = worker
Exemplo n.º 29
0
 def forward(self, x):
     x = M.activation(x, M.PARAM_RELU)
     x1 = self.c1(x)
     x2 = self.c2(x[:, 1:, 1:, :])
     x = torch.cat([x1, x2], axis=1)
     x = self.bn(x)
     return x
Exemplo n.º 30
0
 def initialize(self, out, stride):
     self.c1 = M.ConvLayer(5,
                           out,
                           stride=stride,
                           dilation_rate=2,
                           batch_norm=True,
                           usebias=False)