Example #1
0
def tower(towername,
          names,
          maps,
          sizes,
          strides,
          pads,
          act,
          bn,
          scheme,
          typ="bn"):
    block = Sequential()

    lvlnames = ["%s_%s" % (towername, name) for name in names]

    for i, name in enumerate(lvlnames):
        if "pool" in name:
            block.append(pool2D(sizes[i], strides[i], pads[i], name=names[i]))

        else:
            act = False if i == len(names) - 1 else act
            block.extend(
                convBN(maps[i], maps[i + 1], sizes[i], strides[i], pads[i],
                       lvlnames[i], act, bn, scheme, typ))

    return block
Example #2
0
def residBlock(inmaps, hmaps, stride, blockname, convShortcut, actInplace,
               bnInplace, initscheme):
    block = Sequential()

    branch = Sequential()
    branch.extend(
        residMiniBlock(inmaps, hmaps, 1, stride, 0, blockname, "2a", True,
                       actInplace, bnInplace, initscheme))
    branch.extend(
        residMiniBlock(hmaps, hmaps, 3, 1, 1, blockname, "2b", True,
                       actInplace, bnInplace, initscheme))
    branch.extend(
        residMiniBlock(hmaps, 4 * hmaps, 1, 1, 0, blockname, "2c", False,
                       actInplace, bnInplace, initscheme))

    shortcut = Sequential()
    if convShortcut:
        shortcut.extend(
            residMiniBlock(inmaps, 4 * hmaps, 1, stride, 0, blockname, "1",
                           False, actInplace, bnInplace, initscheme))
    else:
        shortcut.append(Identity())

    block.append(Replicate(2))
    block.append(Parallel().append(branch).append(shortcut))

    block.append(Add())
    block.append(Activation(relu, inplace=actInplace))

    return block
Example #3
0
def loadInceptionBN(modelpath,
                    actInplace=False,
                    bnInplace=False,
                    initscheme="none",
                    name="Inception-BN-0126"):
    net = Sequential(name=name)

    net.append(
        Conv2D(3,
               64,
               7,
               stride=2,
               pad=3,
               useBias=False,
               initscheme=initscheme,
               name="conv_1"))
    net.append(BatchNorm2D(64, inplace=bnInplace, name="bn_1"))
    net.append(Activation(relu, inplace=actInplace, name="relu_1"))

    net.append(MaxPool2D(3, 2, pad=1, name="pool_1"))

    net.append(
        Conv2D(64,
               64,
               1,
               useBias=False,
               initscheme=initscheme,
               name="conv_2_red"))
    net.append(BatchNorm2D(64, inplace=bnInplace, name="bn_2_red"))
    net.append(Activation(relu, inplace=actInplace, name="relu_2_red"))

    net.append(
        Conv2D(64,
               192,
               3,
               pad=1,
               useBias=False,
               initscheme=initscheme,
               name="conv_2"))
    net.append(BatchNorm2D(192, inplace=bnInplace, name="bn_2"))
    net.append(Activation(relu, inplace=actInplace, name="relu_2"))

    net.append(MaxPool2D(3, 2, pad=1, name="pool_2"))

    act, bn = actInplace, bnInplace
    net.extend(
        bnBlock(192, [64], [64, 64], [64, 96, 96], [32],
                act=act,
                bn=bn,
                scheme=initscheme,
                name="3a"))
    net.extend(
        bnBlock(256, [64], [64, 96], [64, 96, 96], [64],
                act=act,
                bn=bn,
                scheme=initscheme,
                name="3b"))
    net.extend(
        bnShrinkBlock(320, [128, 160], [64, 96, 96],
                      bn=bn,
                      act=act,
                      scheme=initscheme,
                      name="3c"))

    net.extend(
        bnBlock(576, [224], [64, 96], [96, 128, 128], [128],
                act=act,
                bn=bn,
                scheme=initscheme,
                name="4a"))
    net.extend(
        bnBlock(576, [192], [96, 128], [96, 128, 128], [128],
                act=act,
                bn=bn,
                scheme=initscheme,
                name="4b"))
    net.extend(
        bnBlock(576, [160], [128, 160], [128, 160, 160], [128],
                act=act,
                bn=bn,
                scheme=initscheme,
                name="4c"))
    net.extend(
        bnBlock(608, [96], [128, 192], [160, 192, 192], [128],
                act=act,
                bn=bn,
                scheme=initscheme,
                name="4d"))
    net.extend(
        bnShrinkBlock(608, [128, 192], [192, 256, 256],
                      act=act,
                      bn=bn,
                      scheme=initscheme,
                      name="4e"))

    net.extend(
        bnBlock(1056, [352], [192, 320], [160, 224, 224], [128],
                act=act,
                bn=bn,
                scheme=initscheme,
                name="5a"))
    net.extend(
        bnBlock(1024, [352], [192, 320], [192, 224, 224], [128],
                act=act,
                bn=bn,
                scheme=initscheme,
                name="5b"))

    net.append(AvgPool2D(7, 1, name="global_pool"))
    net.append(Flatten(name="flatten"))
    net.append(Linear(1024, 1000, initscheme=initscheme, name="fc1"))
    net.append(SoftMax(name="softmax"))

    if modelpath is not None:
        net.load(modelpath, assumeUniqueNames=True)

    return net
Example #4
0
def loadInceptionV3(modelpath,
                    actInplace=False,
                    bnInplace=False,
                    initscheme="none",
                    name="Inception-7-0001"):
    net = Sequential(name=name)

    net.append(
        Conv2D(3,
               32,
               3,
               stride=2,
               useBias=False,
               initscheme=initscheme,
               name="conv_conv2d"))
    net.append(BatchNorm2D(32, name="conv_batchnorm"))
    net.append(Activation(relu, inplace=actInplace, name="conv_relu"))

    net.append(
        Conv2D(32,
               32,
               3,
               useBias=False,
               initscheme=initscheme,
               name="conv_1_conv2d"))
    net.append(BatchNorm2D(32, name="conv_1_batchnorm"))
    net.append(Activation(relu, inplace=actInplace, name="conv_1_relu"))

    net.append(
        Conv2D(32,
               64,
               3,
               pad=1,
               useBias=False,
               initscheme=initscheme,
               name="conv_2_conv2d"))
    net.append(BatchNorm2D(64, name="conv_2_batchnorm"))
    net.append(Activation(relu, inplace=actInplace, name="conv_2_relu"))

    net.append(MaxPool2D(3, 2, name="pool"))

    net.append(
        Conv2D(64,
               80,
               1,
               useBias=False,
               initscheme=initscheme,
               name="conv_3_conv2d"))
    net.append(BatchNorm2D(80, name="conv_3_batchnorm"))
    net.append(Activation(relu, inplace=actInplace, name="conv_3_relu"))

    net.append(
        Conv2D(80,
               192,
               3,
               useBias=False,
               initscheme=initscheme,
               name="conv_4_conv2d"))
    net.append(BatchNorm2D(192, name="conv_4_batchnorm"))
    net.append(Activation(relu, inplace=actInplace, name="conv_4_relu"))

    net.append(MaxPool2D(3, 2, name="pool1"))

    act, bn = actInplace, bnInplace
    net.extend(
        bnBlock(192, [64], [48, 64], [64, 96, 96], [32], "mixed", act, bn,
                initscheme, 5, 2, "v3"))
    net.extend(
        bnBlock(256, [64], [48, 64], [64, 96, 96], [64], "mixed_1", act, bn,
                initscheme, 5, 2, "v3"))
    net.extend(
        bnBlock(288, [64], [48, 64], [64, 96, 96], [64], "mixed_2", act, bn,
                initscheme, 5, 2, "v3"))
    net.extend(
        bnShrinkBlock(288, [384], [64, 96, 96], "mixed_3", act, bn, initscheme,
                      False, 0, "v3"))

    net.extend(
        factorBlock(768, [192], [128, 128, 192], [128, 128, 128, 128, 192],
                    [192], "mixed_4", act, bn, initscheme))
    net.extend(
        factorBlock(768, [192], [160, 160, 192], [160, 160, 160, 160, 192],
                    [192], "mixed_5", act, bn, initscheme))
    net.extend(
        factorBlock(768, [192], [160, 160, 192], [160, 160, 160, 160, 192],
                    [192], "mixed_6", act, bn, initscheme))
    net.extend(
        factorBlock(768, [192], [192, 192, 192], [192, 192, 192, 192, 192],
                    [192], "mixed_7", act, bn, initscheme))
    net.extend(
        v3ShrinkBlock(768, [192, 320], [192, 192, 192, 192], "mixed_8", act,
                      bn, initscheme))

    net.extend(
        expandBlock(1280, [320], [384, 384, 384], [448, 384, 384, 384], [192],
                    "mixed_9",
                    act,
                    bn,
                    initscheme,
                    pool="avg"))

    net.extend(
        expandBlock(2048, [320], [384, 384, 384], [448, 384, 384, 384], [192],
                    "mixed_10",
                    act,
                    bn,
                    initscheme,
                    pool="max"))

    net.append(AvgPool2D(8, 1, name="global_pool"))
    net.append(Flatten(name="flatten"))
    net.append(Linear(2048, 1008, name="fc1"))
    net.append(SoftMax(name="softmax"))

    if modelpath is not None:
        net.load(modelpath, assumeUniqueNames=True)

    return net
Example #5
0
def loadMiniYolo(modelpath, numOutput, actInplace=False, initscheme="none"):
    net = Sequential(name="YOLONet")

    block0 = block(idx=["1"],
                   inmaps=[3],
                   outmaps=[64],
                   sizeconv=[7],
                   strideconv=[2],
                   initscheme=initscheme,
                   actInPlace=actInplace)
    net.extend(block0)

    block1 = block(idx=["2"],
                   inmaps=[64],
                   outmaps=[192],
                   sizeconv=[3],
                   strideconv=[1],
                   initscheme=initscheme,
                   actInPlace=actInplace)
    net.extend(block1)

    block2 = block(idx=["3", "4", "5", "6"],
                   inmaps=[192, 128, 256, 256],
                   outmaps=[128, 256, 256, 512],
                   sizeconv=[1, 3, 1, 3],
                   strideconv=[1, 1, 1, 1],
                   initscheme=initscheme,
                   actInPlace=actInplace)
    net.extend(block2)

    block3 = block(
        idx=["7", "8", "9", "10", "11", "12", "13", "14", "15", "16"],
        inmaps=[512, 256, 512, 256, 512, 256, 512, 256, 512, 512],
        outmaps=[256, 512, 256, 512, 256, 512, 256, 512, 512, 1024],
        sizeconv=[1, 3, 1, 3, 1, 3, 1, 3, 1, 3],
        strideconv=[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
        initscheme=initscheme,
        actInPlace=actInplace)
    net.extend(block3)

    block4 = block(idx=["17", "18", "19", "20", "21", "22", "23", "24"],
                   inmaps=[1024, 512, 1024, 512, 1024, 1024, 1024, 1024],
                   outmaps=[512, 1024, 512, 1024, 1024, 1024, 1024, 1024],
                   sizeconv=[1, 3, 1, 3, 3, 3, 3, 3],
                   strideconv=[1, 1, 1, 1, 1, 2, 1, 1],
                   initscheme=initscheme,
                   actInPlace=actInplace,
                   addMaxpool=False)
    net.extend(block4)

    net.append(Flatten())
    insize = int(np.prod(net.dataShapeFrom((1, 3, 448, 448))))

    net.append(Linear(insize, 512, initscheme=initscheme, name="fc25"))
    net.append(Activation(relu, inplace=actInplace, name="fc_relu24"))

    net.append(Linear(512, 4096, initscheme=initscheme, name="fc26"))
    net.append(Activation(relu, inplace=actInplace, name="fc_relu25"))

    net.append(Linear(4096, numOutput, initscheme=initscheme, name="fc27"))
    net.append(SoftMax())

    if modelpath is not None:
        net.load(modelpath)

    return net
Example #6
0
def loadResNet(modelpath,
               layers,
               actInplace=False,
               bnInplace=False,
               initscheme="none",
               name=None):
    if layers == "50":
        if name is None:
            name = "ResNet-50"

        level3names = ["3%s" % alpha for alpha in string.ascii_lowercase[1:4]]
        level4names = ["4%s" % alpha for alpha in string.ascii_lowercase[1:6]]

    elif layers == "101":
        if name is None:
            name = "ResNet-101"

        level3names = ["3b%s" % num for num in range(1, 4)]
        level4names = ["4b%s" % num for num in range(1, 23)]

    elif layers == "152":
        if name is None:
            name = "ResNet-152"

        level3names = ["3b%s" % num for num in range(1, 8)]
        level4names = ["4b%s" % num for num in range(1, 36)]

    else:
        raise ValueError("Unsupported ResNet layers mode")

    net = Sequential(name=name)

    net.append(
        Conv2D(3,
               64,
               7,
               stride=2,
               pad=3,
               name="conv1",
               initscheme=initscheme,
               useBias=False))
    net.append(BatchNorm2D(64, name="bn_conv1", inplace=bnInplace))
    net.append(Activation(relu, inplace=actInplace, name="conv1_relu"))
    net.append(MaxPool2D(3, 2, name="pool1"))

    net.extend(
        residBlock(64, 64, 1, "2a", True, actInplace, bnInplace, initscheme))
    net.extend(
        residBlock(256, 64, 1, "2b", False, actInplace, bnInplace, initscheme))
    net.extend(
        residBlock(256, 64, 1, "2c", False, actInplace, bnInplace, initscheme))

    net.extend(
        residBlock(256, 128, 2, "3a", True, actInplace, bnInplace, initscheme))

    for name in level3names:
        net.extend(
            residBlock(512, 128, 1, name, False, actInplace, bnInplace,
                       initscheme))

    net.extend(
        residBlock(512, 256, 2, "4a", True, actInplace, bnInplace, initscheme))

    for name in level4names:
        net.extend(
            residBlock(1024, 256, 1, name, False, actInplace, bnInplace,
                       initscheme))

    net.extend(
        residBlock(1024, 512, 2, "5a", True, actInplace, bnInplace,
                   initscheme))
    net.extend(
        residBlock(2048, 512, 1, "5b", False, actInplace, bnInplace,
                   initscheme))
    net.extend(
        residBlock(2048, 512, 1, "5c", False, actInplace, bnInplace,
                   initscheme))

    net.append(AvgPool2D(7, 1))
    net.append(Flatten())
    net.append(Linear(2048, 1000, initscheme=initscheme, name="fc1000"))
    net.append(SoftMax())

    if modelpath is not None:
        net.load(modelpath, assumeUniqueNames=True)

    return net
Example #7
0
def loadW2L(modelpath, inmaps, nlabels, initscheme=None, name="w2l"):
    net = Sequential(name=name)

    net.extend(
        convBlock(inmaps,
                  256,
                  size=11,
                  stride=2,
                  pad=5,
                  dropout=0.2,
                  initscheme=initscheme,
                  name="conv1d_0"))

    net.extend(
        convBlock(256,
                  256,
                  size=11,
                  stride=1,
                  pad=5,
                  dropout=0.2,
                  initscheme=initscheme,
                  name="conv1d_1"))
    net.extend(
        convBlock(256,
                  256,
                  size=11,
                  stride=1,
                  pad=5,
                  dropout=0.2,
                  initscheme=initscheme,
                  name="conv1d_2"))
    net.extend(
        convBlock(256,
                  256,
                  size=11,
                  stride=1,
                  pad=5,
                  dropout=0.2,
                  initscheme=initscheme,
                  name="conv1d_3"))

    net.extend(
        convBlock(256,
                  384,
                  size=13,
                  stride=1,
                  pad=6,
                  dropout=0.2,
                  initscheme=initscheme,
                  name="conv1d_4"))
    net.extend(
        convBlock(384,
                  384,
                  size=13,
                  stride=1,
                  pad=6,
                  dropout=0.2,
                  initscheme=initscheme,
                  name="conv1d_5"))
    net.extend(
        convBlock(384,
                  384,
                  size=13,
                  stride=1,
                  pad=6,
                  dropout=0.2,
                  initscheme=initscheme,
                  name="conv1d_6"))

    net.extend(
        convBlock(384,
                  512,
                  size=17,
                  stride=1,
                  pad=8,
                  dropout=0.2,
                  initscheme=initscheme,
                  name="conv1d_7"))
    net.extend(
        convBlock(512,
                  512,
                  size=17,
                  stride=1,
                  pad=8,
                  dropout=0.2,
                  initscheme=initscheme,
                  name="conv1d_8"))
    net.extend(
        convBlock(512,
                  512,
                  size=17,
                  stride=1,
                  pad=8,
                  dropout=0.2,
                  initscheme=initscheme,
                  name="conv1d_9"))

    net.extend(
        convBlock(512,
                  640,
                  size=21,
                  stride=1,
                  pad=10,
                  dropout=0.3,
                  initscheme=initscheme,
                  name="conv1d_10"))
    net.extend(
        convBlock(640,
                  640,
                  size=21,
                  stride=1,
                  pad=10,
                  dropout=0.3,
                  initscheme=initscheme,
                  name="conv1d_11"))
    net.extend(
        convBlock(640,
                  640,
                  size=21,
                  stride=1,
                  pad=10,
                  dropout=0.3,
                  initscheme=initscheme,
                  name="conv1d_12"))

    net.extend(
        convBlock(640,
                  768,
                  size=25,
                  stride=1,
                  pad=12,
                  dropout=0.3,
                  initscheme=initscheme,
                  name="conv1d_13"))
    net.extend(
        convBlock(768,
                  768,
                  size=25,
                  stride=1,
                  pad=12,
                  dropout=0.3,
                  initscheme=initscheme,
                  name="conv1d_14"))
    net.extend(
        convBlock(768,
                  768,
                  size=25,
                  stride=1,
                  pad=12,
                  dropout=0.3,
                  initscheme=initscheme,
                  name="conv1d_15"))

    net.extend(
        convBlock(768,
                  896,
                  size=29,
                  stride=1,
                  pad=28,
                  dropout=0.4,
                  initscheme=initscheme,
                  dilation=2,
                  name="conv1d_16"))

    net.extend(
        convBlock(896,
                  1024,
                  size=1,
                  stride=1,
                  pad=0,
                  dropout=0.4,
                  initscheme=initscheme,
                  name="conv1d_17"))

    net.extend(
        convBlock(1024,
                  nlabels,
                  size=1,
                  stride=1,
                  pad=0,
                  dropout=0.0,
                  initscheme=initscheme,
                  bnAct=False,
                  name="conv1d_18"))

    if modelpath is not None:
        net.load(modelpath)

    return net
Example #8
0
def loadNet(name="", inplace=True, modelpath=None):
    net = Sequential(name)

    net.append(Conv2D(3, 64, 3, pad=1, initscheme="none", name="conv1_1"))
    net.append(Activation(relu, name="relu1_1", inplace=inplace))

    net.append(Conv2D(64, 64, 3, pad=1, initscheme="none", name="conv1_2"))
    net.append(Activation(relu, name="relu1_2", inplace=inplace))

    net.append(MaxPool2D(name="pool1_stage1"))

    net.append(Conv2D(64, 128, 3, pad=1, initscheme="none", name="conv2_1"))
    net.append(Activation(relu, name="relu2_1", inplace=inplace))

    net.append(Conv2D(128, 128, 3, pad=1, initscheme="none", name="conv2_2"))
    net.append(Activation(relu, name="relu2_2", inplace=inplace))

    net.append(MaxPool2D(name="pool2_stage1"))

    net.append(Conv2D(128, 256, 3, pad=1, initscheme="none", name="conv3_1"))
    net.append(Activation(relu, name="relu3_1", inplace=inplace))

    net.append(Conv2D(256, 256, 3, pad=1, initscheme="none", name="conv3_2"))
    net.append(Activation(relu, name="relu3_2", inplace=inplace))

    net.append(Conv2D(256, 256, 3, pad=1, initscheme="none", name="conv3_3"))
    net.append(Activation(relu, name="relu3_3", inplace=inplace))

    net.append(Conv2D(256, 256, 3, pad=1, initscheme="none", name="conv3_4"))
    net.append(Activation(relu, name="relu3_4", inplace=inplace))

    net.append(MaxPool2D(name="pool3_stage1"))

    net.append(Conv2D(256, 512, 3, pad=1, initscheme="none", name="conv4_1"))
    net.append(Activation(relu, name="relu4_1", inplace=inplace))

    net.append(Conv2D(512, 512, 3, pad=1, initscheme="none", name="conv4_2"))
    net.append(Activation(relu, name="relu4_2", inplace=inplace))

    net.append(
        Conv2D(512, 256, 3, pad=1, initscheme="none", name="conv4_3_CPM"))
    net.append(Activation(relu, name="relu4_3_CPM"))

    net.append(
        Conv2D(256, 128, 3, pad=1, initscheme="none", name="conv4_4_CPM"))
    net.append(Activation(relu, name="relu4_4_CPM"))

    block2 = buildSmallBlock(inplace=inplace)
    block3 = buildBigBlock(stage=2, prenet=block2, inplace=inplace)
    block4 = buildBigBlock(stage=3, prenet=block3, inplace=inplace)
    block5 = buildBigBlock(stage=4, prenet=block4, inplace=inplace)
    block6 = buildBigBlock(stage=5, prenet=block5, inplace=inplace)

    net.extend(block6)
    net.append(Replicate(2))

    net.append(Parallel().append(buildBranch(stage=6, num=2,
                                             inplace=inplace)).append(
                                                 buildBranch(stage=6,
                                                             num=1,
                                                             inplace=inplace)))

    net.append(Concat(axis=1))

    if modelpath is not None:
        net.load(modelpath, assumeUniqueNames=True)

    return net