def calcTest(): from PuzzleLib.Modules import Linear, Split, Concat, Activation, relu v1 = Linear(100, 50, name="v1").node() h1 = Split(axis=1, sections=(20, 20, 10), name="h1").node(v1) v2 = Linear(100, 50, name="v2").node() h2 = Concat(axis=1, name="h2").node((h1, [1, 2]), v2) h3 = Activation(relu, name="h3").node(h2) h4 = Concat(axis=1, name="h4").node((h1, 0), h3) mlp = Graph(inputs=[v1, v2], outputs=h4) v1data = gpuarray.to_gpu(np.random.randn(5, 100).astype(np.float32)) v2data = gpuarray.to_gpu(np.random.randn(5, 100).astype(np.float32)) mlp.optimizeForShape([v1data.shape, v2data.shape]) mlp([v1data, v2data]) assert mlp.data.shape == (5, 100) assert mlp.dataShapeFrom([v1data.shape, v2data.shape]) == mlp.data.shape grad = gpuarray.to_gpu(np.random.randn(*mlp.data.shape).astype(np.float32)) mlp.backward(grad) assert len(mlp.grad) == 2 and mlp.grad[0].shape == mlp.grad[1].shape == (5, 100) assert mlp.gradShapeFrom(grad.shape) == [gr.shape for gr in mlp.grad]
def unittest(): from PuzzleLib.Containers.Sequential import Sequential from PuzzleLib.Modules import Linear, Activation, sigmoid, Identity, Concat data1 = gpuarray.to_gpu(np.random.randn(128, 128).astype(np.float32)) data2 = gpuarray.to_gpu(np.random.randn(128, 16).astype(np.float32)) data3 = gpuarray.to_gpu(np.random.randn(128, 32).astype(np.float32)) seq = Sequential() seq.append(Linear(128, 64)) seq.append(Activation(sigmoid)) parallel = Parallel() parallel.append(seq) parallel.append(Identity()) parallel.append(Identity()) concat = Concat(axis=1) parallel([data1, data2, data3]) concat(parallel.data) assert np.allclose(data2.get(), concat.data.get()[:, 64:64 + 16]) grad = gpuarray.to_gpu(np.random.randn(128, 112).astype(np.float32)) concat.backward(grad) parallel.backward(concat.grad) assert np.allclose(grad.get()[:, 64:64 + 16], parallel.grad[1].get()) parallel = parallel[::2] parallel([data1, data3])
def matchTest(): from PuzzleLib.Containers import Sequential, Parallel from PuzzleLib.Modules import Linear, Activation, sigmoid, Replicate, Concat seq = Sequential() seq.append(Linear(128, 64, name="linear-1")) seq.append(Activation(sigmoid)) seq.append(Replicate(times=2)) parallel = Parallel() parallel.append(Linear(64, 10, name="linear-2")) parallel.append(Linear(64, 5, name="linear-3")) seq.append(parallel) seq.append(Concat(axis=1)) v1 = Linear(128, 64, name="linear-1").node() h1 = Activation(sigmoid).node(v1) h2 = Linear(64, 10, name="linear-2").node(h1) h3 = Linear(64, 5, name="linear-3").node(h1) h4 = Concat(axis=1).node(h2, h3) mlp = Graph(inputs=v1, outputs=h4) mlp.getByName("linear-1").W.set(seq.getByName("linear-1").W) mlp.getByName("linear-1").b.set(seq.getByName("linear-1").b) mlp.getByName("linear-2").W.set(seq.getByName("linear-2").W) mlp.getByName("linear-2").b.set(seq.getByName("linear-2").b) mlp.getByName("linear-3").W.set(seq.getByName("linear-3").W) mlp.getByName("linear-3").b.set(seq.getByName("linear-3").b) data = gpuarray.to_gpu(np.random.randn(32, 128).astype(np.float32)) seq(data) mlp(data) assert np.allclose(seq.data.get(), mlp.data.get()) grad = gpuarray.to_gpu(np.random.randn(32, 15).astype(np.float32)) seq.backward(grad) mlp.backward(grad) assert np.allclose(seq.grad.get(), mlp.grad.get())
def buildGraph(): from PuzzleLib.Containers import Graph from PuzzleLib.Modules import Linear, Activation, relu, Concat inp = Linear(20, 10, name="linear-1").node() h = Activation(relu, name="relu-1").node(inp) h1 = Linear(10, 5, name="linear-2").node(h) h2 = Linear(10, 5, name="linear-3").node(h) output = Concat(axis=1, name="concat").node(h1, h2) graph = Graph(inputs=inp, outputs=output) return graph
def mixedTest(): from PuzzleLib.Containers import Graph, Sequential from PuzzleLib.Modules import Linear, Split, Concat, Activation, relu v1 = Linear(100, 50, name="v1").node() h1 = Split(axis=1, sections=(20, 20, 10), name="h1").node(v1) v2 = Linear(100, 50, name="v2").node() h2 = Concat(axis=1, name="h2").node((h1, [1, 2]), v2) h3 = Activation(relu, name="h3").node(h2) h4 = Concat(axis=1, name="h4").node((h1, 0), h3) mlp = Graph(inputs=[v1, v2], outputs=h4) seq = Sequential() seq.append(Linear(10, 200)) seq.append(Split(axis=1, sections=(100, 100))) seq.append(mlp) seq.append(Activation(relu)) drawBoard(seq, filename="./TestData/mixed.gv", view=False, modulesOnly=False)
def buildNet(): from PuzzleLib.Containers import Sequential, Parallel from PuzzleLib.Modules import Linear, Activation, relu, Replicate, Concat seq = Sequential() seq.append(Linear(20, 10, name="linear-1")) seq.append(Activation(relu, name="relu-1")) seq.append(Linear(10, 5, name="linear-2")) seq.append(Activation(relu, name="relu-2")) seq.append(Replicate(times=2, name="repl")) seq.append(Parallel().append(Linear(5, 2, name="linear-3-1")).append( Linear(5, 3, name="linear-3-2"))) seq.append(Concat(axis=1, name="concat")) return seq
def loadNet(modelpath=None, name="OpenPoseFaceNet"): net = Sequential(name=name) net.append(Conv2D(3, 64, 3, pad=1, name="conv1_1")) net.append(Activation(relu, name="conv1_1_re")) net.append(Conv2D(64, 64, 3, pad=1, name="conv1_2")) net.append(Activation(relu, name="conv1_2_re")) net.append(MaxPool2D(2, 2, name="pool1")) net.append(Conv2D(64, 128, 3, pad=1, name="conv2_1")) net.append(Activation(relu, name="conv2_1_re")) net.append(Conv2D(128, 128, 3, pad=1, name="conv2_2")) net.append(Activation(relu, name="conv2_2_re")) net.append(MaxPool2D(2, 2, name="pool2")) net.append(Conv2D(128, 256, 3, pad=1, name="conv3_1")) net.append(Activation(relu, name="conv3_1_re")) net.append(Conv2D(256, 256, 3, pad=1, name="conv3_2")) net.append(Activation(relu, name="conv3_2_re")) net.append(Conv2D(256, 256, 3, pad=1, name="conv3_3")) net.append(Activation(relu, name="conv3_3_re")) net.append(Conv2D(256, 256, 3, pad=1, name="conv3_4")) net.append(Activation(relu, name="conv3_4_re")) net.append(MaxPool2D(2, 2, name="pool3")) net.append(Conv2D(256, 512, 3, pad=1, name="conv4_1")) net.append(Activation(relu, name="conv4_1_re")) net.append(Conv2D(512, 512, 3, pad=1, name="conv4_2")) net.append(Activation(relu, name="conv4_2_re")) net.append(Conv2D(512, 512, 3, pad=1, name="conv4_3")) net.append(Activation(relu, name="conv4_3_re")) net.append(Conv2D(512, 512, 3, pad=1, name="conv4_4")) net.append(Activation(relu, name="conv4_4_re")) net.append(Conv2D(512, 512, 3, pad=1, name="conv5_1")) net.append(Activation(relu, name="conv5_1_re")) net.append(Conv2D(512, 512, 3, pad=1, name="conv5_2")) net.append(Activation(relu, name="conv5_2_re")) net.append(Conv2D(512, 128, 3, pad=1, name="conv5_3_CPM")) net.append(Activation(relu, name="conv5_3_CPM_re")) net.append(Replicate(2)) shortcut0 = Sequential() shortcut0.append(Identity()) branch0 = Sequential() branch0.append(Replicate(2)) shortcut1 = Sequential() shortcut1.append(Identity()) branch1 = Sequential() branch1.append(Replicate(2)) shortcut2 = Sequential() shortcut2.append(Identity()) branch2 = Sequential() branch2.append(Replicate(2)) shortcut3 = Sequential() shortcut3.append(Identity()) branch3 = Sequential() branch3.append(Replicate(2)) shortcut4 = Sequential() shortcut4.append(Identity()) branch4 = Sequential() branch4.append(Conv2D(128, 512, 1, pad=0, name="conv6_1_CPM")) branch4.append(Activation(relu, name="conv6_1_CPM_re")) branch4.append(Conv2D(512, 71, 1, pad=0, name="conv6_2_CPM")) branches = [branch4, branch3, branch2, branch1, branch0, net] shortcuts = [shortcut4, shortcut3, shortcut2, shortcut1, shortcut0, None] for branchIdx, branch in enumerate(branches): if branchIdx == 0: continue branch.append(Parallel().append(branches[branchIdx - 1]).append( shortcuts[branchIdx - 1])) branch.append( Concat(name="features_in_stage_%d" % (branchIdx + 1), axis=1)) branch.append( Conv2D(199, 128, 7, pad=3, name="Mconv1_stage%d" % (branchIdx + 1))) branch.append( Activation(relu, name="Mconv1_stage%d_re" % (branchIdx + 1))) branch.append( Conv2D(128, 128, 7, pad=3, name="Mconv2_stage%d" % (branchIdx + 1))) branch.append( Activation(relu, name="Mconv2_stage%d_re" % (branchIdx + 1))) branch.append( Conv2D(128, 128, 7, pad=3, name="Mconv3_stage%d" % (branchIdx + 1))) branch.append( Activation(relu, name="Mconv3_stage%d_re" % (branchIdx + 1))) branch.append( Conv2D(128, 128, 7, pad=3, name="Mconv4_stage%d" % (branchIdx + 1))) branch.append( Activation(relu, name="Mconv4_stage%d_re" % (branchIdx + 1))) branch.append( Conv2D(128, 128, 7, pad=3, name="Mconv5_stage%d" % (branchIdx + 1))) branch.append( Activation(relu, name="Mconv5_stage%d_re" % (branchIdx + 1))) branch.append( Conv2D(128, 128, 1, pad=0, name="Mconv6_stage%d" % (branchIdx + 1))) branch.append( Activation(relu, name="Mconv6_stage%d_re" % (branchIdx + 1))) branch.append( Conv2D(128, 71, 1, pad=0, name="Mconv7_stage%d" % (branchIdx + 1))) if modelpath is not None: net.load(modelpath, assumeUniqueNames=True, name=name) net.evalMode() return net