def block(idx, inmaps, outmaps, sizeconv, strideconv, initscheme, actInPlace, sizepool=2, stridepool=2, addMaxpool=True): assert len(inmaps) == len(outmaps) == len(sizeconv) == len( strideconv) == len(idx), "lengths must be the same size" seq = Sequential() for i in range(len(inmaps)): seq.append( Conv2D(inmaps=inmaps[i], outmaps=outmaps[i], size=sizeconv[i], pad=sizeconv[i] // 2, stride=strideconv[i], initscheme=initscheme, dilation=1, useBias=True, name="conv%s" % idx[i])) seq.append(Activation(leakyRelu, inplace=actInPlace, args=(0.01, ))) if addMaxpool: seq.append( MaxPool2D(size=sizepool, stride=stridepool, name="conv%s_pool" % idx[-1])) return seq
def convBN(inmaps, outmaps, size, stride, pad, name, actInplace, bnInplace, scheme, typ="bn"): block = Sequential() if typ == "bn": names = ["conv_%s" % name, "bn_%s" % name, "relu_%s" % name] elif typ == "v3": names = ["%s_conv2d" % name, "%s_batchnorm" % name, "%s_relu" % name] else: raise ValueError("Unrecognized convBN type") block.append( Conv2D(inmaps, outmaps, size, stride, pad, useBias=False, initscheme=scheme, name=names[0])) block.append(BatchNorm2D(outmaps, inplace=bnInplace, name=names[1])) block.append(Activation(relu, inplace=actInplace, name=names[2])) return block
def residMiniBlock(inmaps, outmaps, size, stride, pad, blockname, mininame, addAct, actInplace, bnInplace, initscheme): block = Sequential() block.append( Conv2D(inmaps, outmaps, size, stride=stride, pad=pad, useBias=False, initscheme=initscheme, name="res%s_branch%s" % (blockname, mininame))) block.append( BatchNorm2D(outmaps, name="bn%s_branch%s" % (blockname, mininame), inplace=bnInplace)) if addAct: block.append( Activation(relu, inplace=actInplace, name="res%s_branch%s_relu" % (blockname, mininame))) return block
def buildSmallBranch(inplace=True, num=1): branch = Sequential() branch.append( Conv2D(128, 128, 3, pad=1, initscheme="none", name="conv5_1_CPM_L%d" % num)) branch.append( Activation(relu, inplace=inplace, name="relu5_1_CPM_L%d" % num)) branch.append( Conv2D(128, 128, 3, pad=1, initscheme="none", name="conv5_2_CPM_L%d" % num)) branch.append( Activation(relu, inplace=inplace, name="relu5_2_CPM_L%d" % num)) branch.append( Conv2D(128, 128, 3, pad=1, initscheme="none", name="conv5_3_CPM_L%d" % num)) branch.append( Activation(relu, inplace=inplace, name="relu5_3_CPM_L%d" % num)) branch.append( Conv2D(128, 512, 1, initscheme="none", name="conv5_4_CPM_L%d" % num)) branch.append( Activation(relu, inplace=inplace, name="relu5_4_CPM_L%d" % num)) branch.append( Conv2D(512, 19 * (3 - num), 1, initscheme="none", name="conv5_5_CPM_L%d" % num)) return branch
def buildBranch(fHeight, sentlength, branchMaps, embsize): seq = Sequential() seq.append(Conv2D(1, outmaps=branchMaps, size=(fHeight, embsize))) seq.append(MaxPool2D(size=(sentlength - fHeight + 1, 1))) seq.append(Reshape((-1, branchMaps))) return seq
def trainHardTest(optCls, dtype, *args, **kwargs): from PuzzleLib.Containers.Sequential import Sequential from PuzzleLib.Modules.Conv2D import Conv2D from PuzzleLib.Modules.BatchNorm2D import BatchNorm2D from PuzzleLib.Modules.Activation import Activation, relu from PuzzleLib.Modules.Cast import Cast from PuzzleLib.Cost.MSE import MSE seq = Sequential() seq.append(Conv2D(4, 8, 5, pad=1)) seq.append(BatchNorm2D(8)) seq.append(Activation(relu)) seq.append(Conv2D(8, 16, 5, pad=1)) seq.calcMode(dtype) seq.append(Cast(intype=dtype, outtype=np.float32)) optimizer = optCls(*args, **kwargs) optimizer.setupOn(seq, useGlobalState=True) mse = MSE() data = gpuarray.to_gpu(np.random.randn(4, 4, 5, 5).astype(dtype)) target = gpuarray.to_gpu(np.random.randn(4, 16, 1, 1).astype(np.float32)) for i in range(200): error, grad = mse(seq(data), target) optimizer.zeroGradParams() seq.backward(grad) optimizer.update() if (i + 1) % 5 == 0: print("Iteration #%d error: %s" % (i + 1, error))
def rnnTest(): batchsize, inmaps, inh, inw = 4, 2, 3, 3 outmaps, hsize = 4, 1 seq = Sequential(name="rnn") seq.append(Conv2D(inmaps, outmaps, 3, pad=1)) seq.append(Activation(relu)) seq.append(Reshape(shape=(batchsize, outmaps, inh * inw))) seq.append(SwapAxes(0, 1)) seq.append(RNN(inh * inw, hsize, layers=2, direction="bi", mode="tanh", getSequences=True, hintBatchSize=batchsize)) seq.append(SwapAxes(0, 1)) data = gpuarray.to_gpu(np.random.randn(batchsize, inmaps, inh, inw).astype(np.float32)) engine = buildRTEngine(seq, data.shape, savepath="../TestData", dtype=DataType.float32) outdata = seq(data) enginedata = engine(data) assert np.allclose(outdata.get(), enginedata.get())
def loadInceptionBN(modelpath, actInplace=False, bnInplace=False, initscheme="none", name="Inception-BN-0126"): net = Sequential(name=name) net.append( Conv2D(3, 64, 7, stride=2, pad=3, useBias=False, initscheme=initscheme, name="conv_1")) net.append(BatchNorm2D(64, inplace=bnInplace, name="bn_1")) net.append(Activation(relu, inplace=actInplace, name="relu_1")) net.append(MaxPool2D(3, 2, pad=1, name="pool_1")) net.append( Conv2D(64, 64, 1, useBias=False, initscheme=initscheme, name="conv_2_red")) net.append(BatchNorm2D(64, inplace=bnInplace, name="bn_2_red")) net.append(Activation(relu, inplace=actInplace, name="relu_2_red")) net.append( Conv2D(64, 192, 3, pad=1, useBias=False, initscheme=initscheme, name="conv_2")) net.append(BatchNorm2D(192, inplace=bnInplace, name="bn_2")) net.append(Activation(relu, inplace=actInplace, name="relu_2")) net.append(MaxPool2D(3, 2, pad=1, name="pool_2")) act, bn = actInplace, bnInplace net.extend( bnBlock(192, [64], [64, 64], [64, 96, 96], [32], act=act, bn=bn, scheme=initscheme, name="3a")) net.extend( bnBlock(256, [64], [64, 96], [64, 96, 96], [64], act=act, bn=bn, scheme=initscheme, name="3b")) net.extend( bnShrinkBlock(320, [128, 160], [64, 96, 96], bn=bn, act=act, scheme=initscheme, name="3c")) net.extend( bnBlock(576, [224], [64, 96], [96, 128, 128], [128], act=act, bn=bn, scheme=initscheme, name="4a")) net.extend( bnBlock(576, [192], [96, 128], [96, 128, 128], [128], act=act, bn=bn, scheme=initscheme, name="4b")) net.extend( bnBlock(576, [160], [128, 160], [128, 160, 160], [128], act=act, bn=bn, scheme=initscheme, name="4c")) net.extend( bnBlock(608, [96], [128, 192], [160, 192, 192], [128], act=act, bn=bn, scheme=initscheme, name="4d")) net.extend( bnShrinkBlock(608, [128, 192], [192, 256, 256], act=act, bn=bn, scheme=initscheme, name="4e")) net.extend( bnBlock(1056, [352], [192, 320], [160, 224, 224], [128], act=act, bn=bn, scheme=initscheme, name="5a")) net.extend( bnBlock(1024, [352], [192, 320], [192, 224, 224], [128], act=act, bn=bn, scheme=initscheme, name="5b")) net.append(AvgPool2D(7, 1, name="global_pool")) net.append(Flatten(name="flatten")) net.append(Linear(1024, 1000, initscheme=initscheme, name="fc1")) net.append(SoftMax(name="softmax")) if modelpath is not None: net.load(modelpath, assumeUniqueNames=True) return net
def loadInceptionV3(modelpath, actInplace=False, bnInplace=False, initscheme="none", name="Inception-7-0001"): net = Sequential(name=name) net.append( Conv2D(3, 32, 3, stride=2, useBias=False, initscheme=initscheme, name="conv_conv2d")) net.append(BatchNorm2D(32, name="conv_batchnorm")) net.append(Activation(relu, inplace=actInplace, name="conv_relu")) net.append( Conv2D(32, 32, 3, useBias=False, initscheme=initscheme, name="conv_1_conv2d")) net.append(BatchNorm2D(32, name="conv_1_batchnorm")) net.append(Activation(relu, inplace=actInplace, name="conv_1_relu")) net.append( Conv2D(32, 64, 3, pad=1, useBias=False, initscheme=initscheme, name="conv_2_conv2d")) net.append(BatchNorm2D(64, name="conv_2_batchnorm")) net.append(Activation(relu, inplace=actInplace, name="conv_2_relu")) net.append(MaxPool2D(3, 2, name="pool")) net.append( Conv2D(64, 80, 1, useBias=False, initscheme=initscheme, name="conv_3_conv2d")) net.append(BatchNorm2D(80, name="conv_3_batchnorm")) net.append(Activation(relu, inplace=actInplace, name="conv_3_relu")) net.append( Conv2D(80, 192, 3, useBias=False, initscheme=initscheme, name="conv_4_conv2d")) net.append(BatchNorm2D(192, name="conv_4_batchnorm")) net.append(Activation(relu, inplace=actInplace, name="conv_4_relu")) net.append(MaxPool2D(3, 2, name="pool1")) act, bn = actInplace, bnInplace net.extend( bnBlock(192, [64], [48, 64], [64, 96, 96], [32], "mixed", act, bn, initscheme, 5, 2, "v3")) net.extend( bnBlock(256, [64], [48, 64], [64, 96, 96], [64], "mixed_1", act, bn, initscheme, 5, 2, "v3")) net.extend( bnBlock(288, [64], [48, 64], [64, 96, 96], [64], "mixed_2", act, bn, initscheme, 5, 2, "v3")) net.extend( bnShrinkBlock(288, [384], [64, 96, 96], "mixed_3", act, bn, initscheme, False, 0, "v3")) net.extend( factorBlock(768, [192], [128, 128, 192], [128, 128, 128, 128, 192], [192], "mixed_4", act, bn, initscheme)) net.extend( factorBlock(768, [192], [160, 160, 192], [160, 160, 160, 160, 192], [192], "mixed_5", act, bn, initscheme)) net.extend( factorBlock(768, [192], [160, 160, 192], [160, 160, 160, 160, 192], [192], "mixed_6", act, bn, initscheme)) net.extend( factorBlock(768, [192], [192, 192, 192], [192, 192, 192, 192, 192], [192], "mixed_7", act, bn, initscheme)) net.extend( v3ShrinkBlock(768, [192, 320], [192, 192, 192, 192], "mixed_8", act, bn, initscheme)) net.extend( expandBlock(1280, [320], [384, 384, 384], [448, 384, 384, 384], [192], "mixed_9", act, bn, initscheme, pool="avg")) net.extend( expandBlock(2048, [320], [384, 384, 384], [448, 384, 384, 384], [192], "mixed_10", act, bn, initscheme, pool="max")) net.append(AvgPool2D(8, 1, name="global_pool")) net.append(Flatten(name="flatten")) net.append(Linear(2048, 1008, name="fc1")) net.append(SoftMax(name="softmax")) if modelpath is not None: net.load(modelpath, assumeUniqueNames=True) return net
def loadResNet(modelpath, layers, actInplace=False, bnInplace=False, initscheme="none", name=None): if layers == "50": if name is None: name = "ResNet-50" level3names = ["3%s" % alpha for alpha in string.ascii_lowercase[1:4]] level4names = ["4%s" % alpha for alpha in string.ascii_lowercase[1:6]] elif layers == "101": if name is None: name = "ResNet-101" level3names = ["3b%s" % num for num in range(1, 4)] level4names = ["4b%s" % num for num in range(1, 23)] elif layers == "152": if name is None: name = "ResNet-152" level3names = ["3b%s" % num for num in range(1, 8)] level4names = ["4b%s" % num for num in range(1, 36)] else: raise ValueError("Unsupported ResNet layers mode") net = Sequential(name=name) net.append( Conv2D(3, 64, 7, stride=2, pad=3, name="conv1", initscheme=initscheme, useBias=False)) net.append(BatchNorm2D(64, name="bn_conv1", inplace=bnInplace)) net.append(Activation(relu, inplace=actInplace, name="conv1_relu")) net.append(MaxPool2D(3, 2, name="pool1")) net.extend( residBlock(64, 64, 1, "2a", True, actInplace, bnInplace, initscheme)) net.extend( residBlock(256, 64, 1, "2b", False, actInplace, bnInplace, initscheme)) net.extend( residBlock(256, 64, 1, "2c", False, actInplace, bnInplace, initscheme)) net.extend( residBlock(256, 128, 2, "3a", True, actInplace, bnInplace, initscheme)) for name in level3names: net.extend( residBlock(512, 128, 1, name, False, actInplace, bnInplace, initscheme)) net.extend( residBlock(512, 256, 2, "4a", True, actInplace, bnInplace, initscheme)) for name in level4names: net.extend( residBlock(1024, 256, 1, name, False, actInplace, bnInplace, initscheme)) net.extend( residBlock(1024, 512, 2, "5a", True, actInplace, bnInplace, initscheme)) net.extend( residBlock(2048, 512, 1, "5b", False, actInplace, bnInplace, initscheme)) net.extend( residBlock(2048, 512, 1, "5c", False, actInplace, bnInplace, initscheme)) net.append(AvgPool2D(7, 1)) net.append(Flatten()) net.append(Linear(2048, 1000, initscheme=initscheme, name="fc1000")) net.append(SoftMax()) if modelpath is not None: net.load(modelpath, assumeUniqueNames=True) return net
def loadVGG(modelpath, layers, poolmode="max", initscheme="none", withLinear=True, actInplace=False, name=None): if poolmode == "avg": pool = AvgPool2D elif poolmode == "max": pool = MaxPool2D else: raise ValueError("Unsupported pool mode") if layers not in {"11", "16", "19"}: raise ValueError("Unsupported VGG layers mode") if name is None and layers == "11": name = "VGG_ILSVRC_11_layers" elif name is None and layers == "16": name = "VGG_ILSVRC_16_layers" elif name is None and layers == "19": name = "VGG_ILSVRC_19_layers" layers = int(layers) net = Sequential(name=name) net.append(Conv2D(3, 64, 3, pad=1, initscheme=initscheme, name="conv1_1")) net.append(Activation(relu, inplace=actInplace, name="relu1_1")) if layers > 11: net.append(Conv2D(64, 64, 3, pad=1, initscheme=initscheme, name="conv1_2")) net.append(Activation(relu, inplace=actInplace, name="relu1_2")) net.append(pool(2, 2, name="pool1")) net.append(Conv2D(64, 128, 3, pad=1, initscheme=initscheme, name="conv2_1")) net.append(Activation(relu, inplace=actInplace, name="relu2_1")) if layers > 11: net.append(Conv2D(128, 128, 3, pad=1, initscheme=initscheme, name="conv2_2")) net.append(Activation(relu, inplace=actInplace, name="relu2_2")) net.append(pool(2, 2, name="pool2")) net.append(Conv2D(128, 256, 3, pad=1, initscheme=initscheme, name="conv3_1")) net.append(Activation(relu, inplace=actInplace, name="relu3_1")) net.append(Conv2D(256, 256, 3, pad=1, initscheme=initscheme, name="conv3_2")) net.append(Activation(relu, inplace=actInplace, name="relu3_2")) if layers > 11: net.append(Conv2D(256, 256, 3, pad=1, initscheme=initscheme, name="conv3_3")) net.append(Activation(relu, inplace=actInplace, name="relu3_3")) if layers > 16: net.append(Conv2D(256, 256, 3, pad=1, initscheme=initscheme, name="conv3_4")) net.append(Activation(relu, inplace=actInplace, name="relu3_4")) net.append(pool(2, 2, name="pool3")) net.append(Conv2D(256, 512, 3, pad=1, initscheme=initscheme, name="conv4_1")) net.append(Activation(relu, inplace=actInplace, name="relu4_1")) net.append(Conv2D(512, 512, 3, pad=1, initscheme=initscheme, name="conv4_2")) net.append(Activation(relu, inplace=actInplace, name="relu4_2")) if layers > 11: net.append(Conv2D(512, 512, 3, pad=1, initscheme=initscheme, name="conv4_3")) net.append(Activation(relu, inplace=actInplace, name="relu4_3")) if layers > 16: net.append(Conv2D(512, 512, 3, pad=1, initscheme=initscheme, name="conv4_4")) net.append(Activation(relu, inplace=actInplace, name="relu4_4")) net.append(pool(2, 2, name="pool4")) net.append(Conv2D(512, 512, 3, pad=1, initscheme=initscheme, name="conv5_1")) net.append(Activation(relu, inplace=actInplace, name="relu5_1")) net.append(Conv2D(512, 512, 3, pad=1, initscheme=initscheme, name="conv5_2")) net.append(Activation(relu, inplace=actInplace, name="relu5_2")) if layers > 11: net.append(Conv2D(512, 512, 3, pad=1, initscheme=initscheme, name="conv5_3")) net.append(Activation(relu, inplace=actInplace, name="relu5_3")) if layers > 16: net.append(Conv2D(512, 512, 3, pad=1, initscheme=initscheme, name="conv5_4")) net.append(Activation(relu, inplace=actInplace, name="relu5_4")) net.append(pool(2, 2, name="pool5")) if withLinear: net.append(Flatten()) insize = int(np.prod(net.dataShapeFrom((1, 3, 224, 224)))) net.append(Linear(insize, 4096, initscheme=initscheme, name="fc6")) net.append(Activation(relu, inplace=actInplace, name="relu6")) net.append(Linear(4096, 4096, initscheme=initscheme, name="fc7")) net.append(Activation(relu, inplace=actInplace, name="relu7")) net.append(Linear(4096, 1000, initscheme=initscheme, name="fc8")) net.append(SoftMax()) if modelpath is not None: net.load(modelpath) return net
def buildBranch(inmaps=185, inplace=True, num=1, stage=2): branch = Sequential() branch.append( Conv2D(inmaps, 128, 7, pad=3, initscheme="none", name="Mconv1_stage%d_L%d" % (stage, num))) branch.append( Activation(relu, inplace=inplace, name="Mrelu1_stage%d_L%d" % (stage, num))) branch.append( Conv2D(128, 128, 7, pad=3, initscheme="none", name="Mconv2_stage%d_L%d" % (stage, num))) branch.append( Activation(relu, inplace=inplace, name="Mrelu2_stage%d_L%d" % (stage, num))) branch.append( Conv2D(128, 128, 7, pad=3, initscheme="none", name="Mconv3_stage%d_L%d" % (stage, num))) branch.append( Activation(relu, inplace=inplace, name="Mrelu3_stage%d_L%d" % (stage, num))) branch.append( Conv2D(128, 128, 7, pad=3, initscheme="none", name="Mconv4_stage%d_L%d" % (stage, num))) branch.append( Activation(relu, inplace=inplace, name="Mrelu4_stage%d_L%d" % (stage, num))) branch.append( Conv2D(128, 128, 7, pad=3, initscheme="none", name="Mconv5_stage%d_L%d" % (stage, num))) branch.append( Activation(relu, inplace=inplace, name="Mrelu5_stage%d_L%d" % (stage, num))) branch.append( Conv2D(128, 128, 1, initscheme="none", name="Mconv6_stage%d_L%d" % (stage, num))) branch.append( Activation(relu, inplace=inplace, name="Mrelu6_stage%d_L%d" % (stage, num))) branch.append( Conv2D(128, 19 * (3 - num), 1, initscheme="none", name="Mconv7_stage%d_L%d" % (stage, num))) return branch
def loadNet(name="", inplace=True, modelpath=None): net = Sequential(name) net.append(Conv2D(3, 64, 3, pad=1, initscheme="none", name="conv1_1")) net.append(Activation(relu, name="relu1_1", inplace=inplace)) net.append(Conv2D(64, 64, 3, pad=1, initscheme="none", name="conv1_2")) net.append(Activation(relu, name="relu1_2", inplace=inplace)) net.append(MaxPool2D(name="pool1_stage1")) net.append(Conv2D(64, 128, 3, pad=1, initscheme="none", name="conv2_1")) net.append(Activation(relu, name="relu2_1", inplace=inplace)) net.append(Conv2D(128, 128, 3, pad=1, initscheme="none", name="conv2_2")) net.append(Activation(relu, name="relu2_2", inplace=inplace)) net.append(MaxPool2D(name="pool2_stage1")) net.append(Conv2D(128, 256, 3, pad=1, initscheme="none", name="conv3_1")) net.append(Activation(relu, name="relu3_1", inplace=inplace)) net.append(Conv2D(256, 256, 3, pad=1, initscheme="none", name="conv3_2")) net.append(Activation(relu, name="relu3_2", inplace=inplace)) net.append(Conv2D(256, 256, 3, pad=1, initscheme="none", name="conv3_3")) net.append(Activation(relu, name="relu3_3", inplace=inplace)) net.append(Conv2D(256, 256, 3, pad=1, initscheme="none", name="conv3_4")) net.append(Activation(relu, name="relu3_4", inplace=inplace)) net.append(MaxPool2D(name="pool3_stage1")) net.append(Conv2D(256, 512, 3, pad=1, initscheme="none", name="conv4_1")) net.append(Activation(relu, name="relu4_1", inplace=inplace)) net.append(Conv2D(512, 512, 3, pad=1, initscheme="none", name="conv4_2")) net.append(Activation(relu, name="relu4_2", inplace=inplace)) net.append( Conv2D(512, 256, 3, pad=1, initscheme="none", name="conv4_3_CPM")) net.append(Activation(relu, name="relu4_3_CPM")) net.append( Conv2D(256, 128, 3, pad=1, initscheme="none", name="conv4_4_CPM")) net.append(Activation(relu, name="relu4_4_CPM")) block2 = buildSmallBlock(inplace=inplace) block3 = buildBigBlock(stage=2, prenet=block2, inplace=inplace) block4 = buildBigBlock(stage=3, prenet=block3, inplace=inplace) block5 = buildBigBlock(stage=4, prenet=block4, inplace=inplace) block6 = buildBigBlock(stage=5, prenet=block5, inplace=inplace) net.extend(block6) net.append(Replicate(2)) net.append(Parallel().append(buildBranch(stage=6, num=2, inplace=inplace)).append( buildBranch(stage=6, num=1, inplace=inplace))) net.append(Concat(axis=1)) if modelpath is not None: net.load(modelpath, assumeUniqueNames=True) return net
def loadNiNImageNet(modelpath, poolmode="max", actInplace=False, initscheme="none", name="CaffeNet"): if poolmode == "avg": pool = AvgPool2D elif poolmode == "max": pool = MaxPool2D else: raise ValueError("Unsupported pool mode") net = Sequential(name=name) net.append(Conv2D(3, 96, 11, stride=4, initscheme=initscheme, name="conv1")) net.append(Activation(relu, inplace=actInplace, name="relu0")) net.append(Conv2D(96, 96, 1, stride=1, initscheme=initscheme, name="cccp1")) net.append(Activation(relu, inplace=actInplace, name="relu1")) net.append(Conv2D(96, 96, 1, stride=1, initscheme=initscheme, name="cccp2")) net.append(Activation(relu, inplace=actInplace, name="relu2")) net.append(pool(3, 2, name="pool1")) net.append(Conv2D(96, 256, 5, stride=1, pad=2, initscheme=initscheme, name="conv2")) net.append(Activation(relu, inplace=actInplace, name="relu3")) net.append(Conv2D(256, 256, 1, stride=1, initscheme=initscheme, name="cccp3")) net.append(Activation(relu, inplace=actInplace, name="relu4")) net.append(Conv2D(256, 256, 1, stride=1, initscheme=initscheme, name="cccp4")) net.append(Activation(relu, inplace=actInplace, name="relu5")) net.append(pool(3, 2, name="pool2")) net.append(Conv2D(256, 384, 3, stride=1, pad=1, initscheme=initscheme, name="conv3")) net.append(Activation(relu, inplace=actInplace, name="relu6")) net.append(Conv2D(384, 384, 1, stride=1, initscheme=initscheme, name="cccp5")) net.append(Activation(relu, inplace=actInplace, name="relu7")) net.append(Conv2D(384, 384, 1, stride=1, initscheme=initscheme, name="cccp6")) net.append(Activation(relu, inplace=actInplace, name="relu8")) net.append(pool(3, 2, name="pool3")) net.append(Conv2D(384, 1024, 3, stride=1, pad=1, initscheme=initscheme, name="conv4-1024")) net.append(Activation(relu, inplace=actInplace, name="relu9")) net.append(Conv2D(1024, 1024, 1, stride=1, initscheme=initscheme, name="cccp7-1024")) net.append(Activation(relu, inplace=actInplace, name="relu10")) net.append(Conv2D(1024, 1000, 1, stride=1, initscheme=initscheme, name="cccp8-1024")) net.append(Activation(relu, inplace=actInplace, name="relu11")) net.append(AvgPool2D(5, 1, name="pool4")) net.append(Flatten()) net.append(SoftMax()) if modelpath is not None: net.load(modelpath) return net