Ejemplo n.º 1
0
def mixedTest():
	from PuzzleLib.Containers import Graph, Sequential
	from PuzzleLib.Modules import Linear, Split, Concat, Activation, relu

	v1 = Linear(100, 50, name="v1").node()
	h1 = Split(axis=1, sections=(20, 20, 10), name="h1").node(v1)

	v2 = Linear(100, 50, name="v2").node()
	h2 = Concat(axis=1, name="h2").node((h1, [1, 2]), v2)
	h3 = Activation(relu, name="h3").node(h2)

	h4 = Concat(axis=1, name="h4").node((h1, 0), h3)
	mlp = Graph(inputs=[v1, v2], outputs=h4)

	seq = Sequential()

	seq.append(Linear(10, 200))
	seq.append(Split(axis=1, sections=(100, 100)))

	seq.append(mlp)
	seq.append(Activation(relu))

	drawBoard(seq, filename="./TestData/mixed.gv", view=False, modulesOnly=False)
Ejemplo n.º 2
0
def splitTest():
    batchsize, maps, height, width = 2, 6, 4, 5

    net = Sequential(name="split")
    net.append(Split(axis=1, sections=(2, 4)))
    net.append(Parallel().append(SoftMax()).append(SoftMax()))

    data = gpuarray.to_gpu(
        np.random.randn(batchsize, maps, height, width).astype(np.float32))
    engine = buildVINOEngine(net, data.shape, savepath="../TestData")

    outdata = net(data)
    enginedata = engine(data)

    assert all(
        np.allclose(outdat.get(), enginedat.get())
        for outdat, enginedat in zip(outdata, enginedata))
Ejemplo n.º 3
0
def onHostTest():
    from PuzzleLib.Containers import Sequential
    from PuzzleLib.Modules import Conv2D, MaxPool2D, Activation, relu, Flatten, Linear

    data = np.random.randn(50000, 3, 28, 28).astype(np.float32)

    seq = Sequential()
    seq.append(Conv2D(3, 16, 9))
    seq.append(MaxPool2D())
    seq.append(Activation(relu))

    seq.append(Conv2D(16, 32, 5))
    seq.append(MaxPool2D())
    seq.append(Activation(relu))

    seq.append(Flatten())
    seq.append(Linear(3 * 3 * 32, 10))

    calc = Calculator(seq)
    calc.onBatchFinish = lambda calculator: print("Finished batch #%d" %
                                                  calculator.currBatch)
    calc.calcFromHost(data,
                      onMacroBatchFinish=lambda calculator: print(
                          "Finished mb #%d" % calculator.currMacroBatch))
Ejemplo n.º 4
0
def onHostTest():
    from PuzzleLib.Containers import Sequential
    from PuzzleLib.Modules import Conv2D, MaxPool2D, Activation, relu, Flatten, Linear

    from PuzzleLib.Cost.CrossEntropy import CrossEntropy

    data = np.random.randn(50000, 3, 28, 28).astype(np.float32)
    dataTarget = np.random.randint(low=0, high=10,
                                   size=(50000, )).astype(np.int32)

    seq = Sequential()
    seq.append(Conv2D(3, 16, 9))
    seq.append(MaxPool2D())
    seq.append(Activation(relu))

    seq.append(Conv2D(16, 32, 5))
    seq.append(MaxPool2D())
    seq.append(Activation(relu))

    seq.append(Flatten())
    seq.append(Linear(3 * 3 * 32, 10))

    entr = CrossEntropy()

    val = Validator(seq, entr)
    val.validateFromHost(data, dataTarget)
    print("Validation error on big data: %s" % val.error)
Ejemplo n.º 5
0
def buildNet():
    from PuzzleLib.Containers import Sequential
    from PuzzleLib.Modules import Conv2D, MaxPool2D, Activation, relu, Flatten, Linear

    seq = Sequential(name="lenet-5-like")
    seq.append(Conv2D(1, 16, 3))
    seq.append(MaxPool2D())
    seq.append(Activation(relu))

    seq.append(Conv2D(16, 32, 4))
    seq.append(MaxPool2D())
    seq.append(Activation(relu))

    seq.append(Flatten())
    seq.append(Linear(32 * 5 * 5, 1024))
    seq.append(Activation(relu))

    seq.append(Linear(1024, 10))
    return seq
Ejemplo n.º 6
0
def matchTest():
	from PuzzleLib.Containers import Sequential, Parallel
	from PuzzleLib.Modules import Linear, Activation, sigmoid, Replicate, Concat

	seq = Sequential()
	seq.append(Linear(128, 64, name="linear-1"))
	seq.append(Activation(sigmoid))
	seq.append(Replicate(times=2))

	parallel = Parallel()
	parallel.append(Linear(64, 10, name="linear-2"))
	parallel.append(Linear(64, 5, name="linear-3"))
	seq.append(parallel)

	seq.append(Concat(axis=1))

	v1 = Linear(128, 64, name="linear-1").node()
	h1 = Activation(sigmoid).node(v1)

	h2 = Linear(64, 10, name="linear-2").node(h1)
	h3 = Linear(64, 5, name="linear-3").node(h1)

	h4 = Concat(axis=1).node(h2, h3)

	mlp = Graph(inputs=v1, outputs=h4)

	mlp.getByName("linear-1").W.set(seq.getByName("linear-1").W)
	mlp.getByName("linear-1").b.set(seq.getByName("linear-1").b)

	mlp.getByName("linear-2").W.set(seq.getByName("linear-2").W)
	mlp.getByName("linear-2").b.set(seq.getByName("linear-2").b)

	mlp.getByName("linear-3").W.set(seq.getByName("linear-3").W)
	mlp.getByName("linear-3").b.set(seq.getByName("linear-3").b)

	data = gpuarray.to_gpu(np.random.randn(32, 128).astype(np.float32))
	seq(data)
	mlp(data)

	assert np.allclose(seq.data.get(), mlp.data.get())

	grad = gpuarray.to_gpu(np.random.randn(32, 15).astype(np.float32))
	seq.backward(grad)
	mlp.backward(grad)

	assert np.allclose(seq.grad.get(), mlp.grad.get())
Ejemplo n.º 7
0
def buildNet():
    from PuzzleLib.Containers import Sequential, Parallel
    from PuzzleLib.Modules import Linear, Activation, relu, Replicate, Concat

    seq = Sequential()

    seq.append(Linear(20, 10, name="linear-1"))
    seq.append(Activation(relu, name="relu-1"))

    seq.append(Linear(10, 5, name="linear-2"))
    seq.append(Activation(relu, name="relu-2"))

    seq.append(Replicate(times=2, name="repl"))
    seq.append(Parallel().append(Linear(5, 2, name="linear-3-1")).append(
        Linear(5, 3, name="linear-3-2")))
    seq.append(Concat(axis=1, name="concat"))

    return seq
Ejemplo n.º 8
0
def buildNet():
    from PuzzleLib.Containers import Sequential
    from PuzzleLib.Modules import Conv2D, MaxPool2D, Activation, relu, Flatten, Linear

    seq = Sequential()
    seq.append(Conv2D(3, 32, 5, pad=2, wscale=0.0001, initscheme="gaussian"))
    seq.append(MaxPool2D(3, 2))
    seq.append(Activation(relu))

    seq.append(Conv2D(32, 32, 5, pad=2, wscale=0.01, initscheme="gaussian"))
    seq.append(MaxPool2D(3, 2))
    seq.append(Activation(relu))

    seq.append(Conv2D(32, 64, 5, pad=2, wscale=0.01, initscheme="gaussian"))
    seq.append(MaxPool2D(3, 2))
    seq.append(Activation(relu))

    seq.append(Flatten())
    seq.append(
        Linear(seq.dataShapeFrom((1, 3, 32, 32))[1],
               64,
               wscale=0.1,
               initscheme="gaussian"))
    seq.append(Activation(relu))

    seq.append(Linear(64, 10, wscale=0.1, initscheme="gaussian"))
    return seq
Ejemplo n.º 9
0
def onHostTest():
    from PuzzleLib.Containers import Sequential
    from PuzzleLib.Modules import Conv2D, MaxPool2D, Activation, relu, Flatten, Linear

    from PuzzleLib.Cost.CrossEntropy import CrossEntropy
    from PuzzleLib.Optimizers.NesterovSGD import NesterovSGD

    data = np.random.randn(50000, 3, 28, 28).astype(np.float32)
    dataTarget = np.random.randint(low=0, high=10,
                                   size=(50000, )).astype(np.int32)

    seq = Sequential()
    seq.append(Conv2D(3, 16, 9))
    seq.append(MaxPool2D())
    seq.append(Activation(relu))

    seq.append(Conv2D(16, 32, 5))
    seq.append(MaxPool2D())
    seq.append(Activation(relu))

    seq.append(Flatten())
    seq.append(Linear(3 * 3 * 32, 10))

    entr = CrossEntropy()

    opt = NesterovSGD()
    opt.setupOn(seq)

    def onMacroBatchFinish(train):
        print("Finished mb #%d, error=%s" %
              (train.currMacroBatch, train.cost.getMeanError()))

    trainer = Trainer(seq, entr, opt)
    trainer.trainFromHost(data,
                          dataTarget,
                          onMacroBatchFinish=onMacroBatchFinish)
def loadNet(modelpath=None, name="OpenPoseFaceNet"):
    net = Sequential(name=name)

    net.append(Conv2D(3, 64, 3, pad=1, name="conv1_1"))
    net.append(Activation(relu, name="conv1_1_re"))
    net.append(Conv2D(64, 64, 3, pad=1, name="conv1_2"))
    net.append(Activation(relu, name="conv1_2_re"))

    net.append(MaxPool2D(2, 2, name="pool1"))

    net.append(Conv2D(64, 128, 3, pad=1, name="conv2_1"))
    net.append(Activation(relu, name="conv2_1_re"))
    net.append(Conv2D(128, 128, 3, pad=1, name="conv2_2"))
    net.append(Activation(relu, name="conv2_2_re"))

    net.append(MaxPool2D(2, 2, name="pool2"))

    net.append(Conv2D(128, 256, 3, pad=1, name="conv3_1"))
    net.append(Activation(relu, name="conv3_1_re"))
    net.append(Conv2D(256, 256, 3, pad=1, name="conv3_2"))
    net.append(Activation(relu, name="conv3_2_re"))
    net.append(Conv2D(256, 256, 3, pad=1, name="conv3_3"))
    net.append(Activation(relu, name="conv3_3_re"))
    net.append(Conv2D(256, 256, 3, pad=1, name="conv3_4"))
    net.append(Activation(relu, name="conv3_4_re"))

    net.append(MaxPool2D(2, 2, name="pool3"))

    net.append(Conv2D(256, 512, 3, pad=1, name="conv4_1"))
    net.append(Activation(relu, name="conv4_1_re"))
    net.append(Conv2D(512, 512, 3, pad=1, name="conv4_2"))
    net.append(Activation(relu, name="conv4_2_re"))
    net.append(Conv2D(512, 512, 3, pad=1, name="conv4_3"))
    net.append(Activation(relu, name="conv4_3_re"))
    net.append(Conv2D(512, 512, 3, pad=1, name="conv4_4"))
    net.append(Activation(relu, name="conv4_4_re"))

    net.append(Conv2D(512, 512, 3, pad=1, name="conv5_1"))
    net.append(Activation(relu, name="conv5_1_re"))
    net.append(Conv2D(512, 512, 3, pad=1, name="conv5_2"))
    net.append(Activation(relu, name="conv5_2_re"))

    net.append(Conv2D(512, 128, 3, pad=1, name="conv5_3_CPM"))
    net.append(Activation(relu, name="conv5_3_CPM_re"))

    net.append(Replicate(2))

    shortcut0 = Sequential()
    shortcut0.append(Identity())

    branch0 = Sequential()
    branch0.append(Replicate(2))

    shortcut1 = Sequential()
    shortcut1.append(Identity())

    branch1 = Sequential()
    branch1.append(Replicate(2))

    shortcut2 = Sequential()
    shortcut2.append(Identity())

    branch2 = Sequential()
    branch2.append(Replicate(2))

    shortcut3 = Sequential()
    shortcut3.append(Identity())

    branch3 = Sequential()
    branch3.append(Replicate(2))

    shortcut4 = Sequential()
    shortcut4.append(Identity())

    branch4 = Sequential()
    branch4.append(Conv2D(128, 512, 1, pad=0, name="conv6_1_CPM"))
    branch4.append(Activation(relu, name="conv6_1_CPM_re"))
    branch4.append(Conv2D(512, 71, 1, pad=0, name="conv6_2_CPM"))

    branches = [branch4, branch3, branch2, branch1, branch0, net]
    shortcuts = [shortcut4, shortcut3, shortcut2, shortcut1, shortcut0, None]

    for branchIdx, branch in enumerate(branches):
        if branchIdx == 0:
            continue

        branch.append(Parallel().append(branches[branchIdx - 1]).append(
            shortcuts[branchIdx - 1]))
        branch.append(
            Concat(name="features_in_stage_%d" % (branchIdx + 1), axis=1))

        branch.append(
            Conv2D(199, 128, 7, pad=3,
                   name="Mconv1_stage%d" % (branchIdx + 1)))
        branch.append(
            Activation(relu, name="Mconv1_stage%d_re" % (branchIdx + 1)))
        branch.append(
            Conv2D(128, 128, 7, pad=3,
                   name="Mconv2_stage%d" % (branchIdx + 1)))
        branch.append(
            Activation(relu, name="Mconv2_stage%d_re" % (branchIdx + 1)))
        branch.append(
            Conv2D(128, 128, 7, pad=3,
                   name="Mconv3_stage%d" % (branchIdx + 1)))
        branch.append(
            Activation(relu, name="Mconv3_stage%d_re" % (branchIdx + 1)))
        branch.append(
            Conv2D(128, 128, 7, pad=3,
                   name="Mconv4_stage%d" % (branchIdx + 1)))
        branch.append(
            Activation(relu, name="Mconv4_stage%d_re" % (branchIdx + 1)))
        branch.append(
            Conv2D(128, 128, 7, pad=3,
                   name="Mconv5_stage%d" % (branchIdx + 1)))
        branch.append(
            Activation(relu, name="Mconv5_stage%d_re" % (branchIdx + 1)))
        branch.append(
            Conv2D(128, 128, 1, pad=0,
                   name="Mconv6_stage%d" % (branchIdx + 1)))
        branch.append(
            Activation(relu, name="Mconv6_stage%d_re" % (branchIdx + 1)))
        branch.append(
            Conv2D(128, 71, 1, pad=0, name="Mconv7_stage%d" % (branchIdx + 1)))

    if modelpath is not None:
        net.load(modelpath, assumeUniqueNames=True, name=name)
        net.evalMode()

    return net