예제 #1
0
파일: LeNet.py 프로젝트: horcham/numpy-net
    def __init__(self, imagesize, flatten_size, num_labels, lr=1e-3):

        self.N, self.C, self.W, self.H = imagesize
        self.num_labels = num_labels
        self.lr = lr

        self.graph = nn.Graph()
        self.X = nn.Placeholder()
        W0 = nn.Variable(nn.UniformInit([5, 5, self.C, 32]), lr=lr)
        self.graph.add_var(W0)
        b0 = nn.Variable(nn.UniformInit([32, 1]), lr=lr)
        self.graph.add_var(b0)
        W1 = nn.Variable(nn.UniformInit([5, 5, 32, 64]), lr=lr)
        self.graph.add_var(W1)
        b1 = nn.Variable(nn.UniformInit([64, 1]), lr=lr)
        self.graph.add_var(b1)
        WFC0 = nn.Variable(nn.UniformInit([flatten_size, 1024]), lr=lr)
        self.graph.add_var(WFC0)
        bFC0 = nn.Variable(nn.UniformInit([1024, 1]), lr=lr)
        self.graph.add_var(bFC0)
        WFC1 = nn.Variable(nn.UniformInit([1024, num_labels]), lr=lr)
        self.graph.add_var(WFC1)
        bFC1 = nn.Variable(nn.UniformInit([num_labels, 1]), lr=lr)
        self.graph.add_var(bFC1)

        conv0 = nn.Op(nn.Conv2d(padding='valid'), self.X, [W0, b0])
        self.graph.add_op(conv0)
        act0 = nn.Layer(nn.ReluActivator(), conv0)
        self.graph.add_layer(act0)

        pool0 = nn.Op(nn.MaxPooling(filter_h=2, filter_w=2, stride=2), act0)
        self.graph.add_op(pool0)

        conv1 = nn.Op(nn.Conv2d(padding='valid'), pool0, [W1, b1])
        self.graph.add_op(conv1)
        act1 = nn.Layer(nn.ReluActivator(), conv1)
        self.graph.add_layer(act1)

        pool1 = nn.Op(nn.MaxPooling(filter_h=2, filter_w=2, stride=2), act1)
        self.graph.add_op(pool1)

        fla = nn.Op(nn.Flatten(), pool1)
        self.graph.add_op(fla)

        FC0 = nn.Op(nn.Dot(), fla, [WFC0, bFC0])
        self.graph.add_op(FC0)
        Fact0 = nn.Layer(nn.ReluActivator(), FC0)
        self.graph.add_layer(Fact0)
        dp0 = nn.Op(nn.Dropout(0.3), Fact0)
        self.graph.add_op(dp0)

        FC1 = nn.Op(nn.Dot(), dp0, [WFC1, bFC1])
        self.graph.add_op(FC1)
        # Fact1 = nn.Layer(nn.IdentityActivator(), FC1)
        # self.graph.add_layer(Fact1)

        self.graph.add_loss(nn.Loss(nn.Softmax()))
        self.graph.add_optimizer((nn.AdamOptimizer()))
예제 #2
0
파일: VGG16.py 프로젝트: horcham/numpy-net
    def __init__(self, imagesize, flatten_size, num_labels, lr=1e-4):

        self.N, self.C, self.W, self.H = imagesize
        self.num_labels = num_labels
        self.lr = lr

        self.graph = nn.Graph()
        self.X = nn.Placeholder()

        W0 = nn.Variable(nn.UniformInit([3, 3, self.C, 64]), lr=self.lr)
        self.graph.add_var(W0)
        b0 = nn.Variable(nn.UniformInit([64, 1]), lr=lr)
        self.graph.add_var(b0)
        conv0 = nn.Op(nn.Conv2d(padding='same'), self.X, [W0, b0])
        self.graph.add_op(conv0)
        act0 = nn.Layer(nn.ReluActivator(), conv0)
        self.graph.add_layer(act0)

        W1 = nn.Variable(nn.UniformInit([3, 3, 64, 64]), lr=self.lr)
        self.graph.add_var(W1)
        b1 = nn.Variable(nn.UniformInit([64, 1]), lr=lr)
        self.graph.add_var(b1)
        conv1 = nn.Op(nn.Conv2d(padding='same'), act0, [W1, b1])
        self.graph.add_op(conv1)
        act1 = nn.Layer(nn.ReluActivator(), conv1)
        self.graph.add_layer(act1)

        pool1 = nn.Op(nn.MaxPooling(filter_h=2, filter_w=2, stride=2), act1)
        self.graph.add_op(pool1)

        W2 = nn.Variable(nn.UniformInit([3, 3, 64, 128]), lr=self.lr)
        self.graph.add_var(W2)
        b2 = nn.Variable(nn.UniformInit([128, 1]), lr=lr)
        self.graph.add_var(b2)
        conv2 = nn.Op(nn.Conv2d(padding='same'), pool1, [W2, b2])
        self.graph.add_op(conv2)
        act2 = nn.Layer(nn.ReluActivator(), conv2)
        self.graph.add_layer(act2)

        W3 = nn.Variable(nn.UniformInit([3, 3, 128, 128]), lr=self.lr)
        self.graph.add_var(W3)
        b3 = nn.Variable(nn.UniformInit([128, 1]), lr=lr)
        self.graph.add_var(b3)
        conv3 = nn.Op(nn.Conv2d(padding='same'), act2, [W3, b3])
        self.graph.add_op(conv3)
        act3 = nn.Layer(nn.ReluActivator(), conv3)
        self.graph.add_layer(act3)

        pool3 = nn.Op(nn.MaxPooling(filter_h=2, filter_w=2, stride=2), act3)
        self.graph.add_op(pool3)

        W4 = nn.Variable(nn.UniformInit([3, 3, 128, 256]), lr=self.lr)
        self.graph.add_var(W4)
        b4 = nn.Variable(nn.UniformInit([256, 1]), lr=lr)
        self.graph.add_var(b4)
        conv4 = nn.Op(nn.Conv2d(padding='same'), pool3, [W4, b4])
        self.graph.add_op(conv4)
        act4 = nn.Layer(nn.ReluActivator(), conv4)
        self.graph.add_layer(act4)

        W5 = nn.Variable(nn.UniformInit([3, 3, 256, 256]), lr=self.lr)
        self.graph.add_var(W5)
        b5 = nn.Variable(nn.UniformInit([256, 1]), lr=lr)
        self.graph.add_var(b5)
        conv5 = nn.Op(nn.Conv2d(padding='same'), act4, [W5, b5])
        self.graph.add_op(conv5)
        act5 = nn.Layer(nn.ReluActivator(), conv5)
        self.graph.add_layer(act5)

        W6 = nn.Variable(nn.UniformInit([3, 3, 256, 256]), lr=self.lr)
        self.graph.add_var(W6)
        b6 = nn.Variable(nn.UniformInit([256, 1]), lr=lr)
        self.graph.add_var(b6)
        conv6 = nn.Op(nn.Conv2d(padding='same'), act5, [W6, b6])
        self.graph.add_op(conv6)
        act6 = nn.Layer(nn.ReluActivator(), conv6)
        self.graph.add_layer(act6)

        W7 = nn.Variable(nn.UniformInit([3, 3, 256, 256]), lr=self.lr)
        self.graph.add_var(W7)
        b7 = nn.Variable(nn.UniformInit([256, 1]), lr=lr)
        self.graph.add_var(b7)
        conv7 = nn.Op(nn.Conv2d(padding='same'), act6, [W7, b7])
        self.graph.add_op(conv7)
        act7 = nn.Layer(nn.ReluActivator(), conv7)
        self.graph.add_layer(act7)

        pool8 = nn.Op(nn.MaxPooling(filter_h=2, filter_w=2, stride=2), act7)
        self.graph.add_op(pool8)

        W9 = nn.Variable(nn.UniformInit([3, 3, 256, 512]), lr=self.lr)
        self.graph.add_var(W9)
        b9 = nn.Variable(nn.UniformInit([512, 1]), lr=lr)
        self.graph.add_var(b9)
        conv9 = nn.Op(nn.Conv2d(padding='same'), pool8, [W9, b9])
        self.graph.add_op(conv9)
        act9 = nn.Layer(nn.ReluActivator(), conv9)
        self.graph.add_layer(act9)

        W10 = nn.Variable(nn.UniformInit([3, 3, 512, 512]), lr=self.lr)
        self.graph.add_var(W10)
        b10 = nn.Variable(nn.UniformInit([512, 1]), lr=lr)
        self.graph.add_var(b10)
        conv10 = nn.Op(nn.Conv2d(padding='same'), act9, [W10, b10])
        self.graph.add_op(conv10)
        act10 = nn.Layer(nn.ReluActivator(), conv10)
        self.graph.add_layer(act10)

        W11 = nn.Variable(nn.UniformInit([3, 3, 512, 512]), lr=self.lr)
        self.graph.add_var(W11)
        b11 = nn.Variable(nn.UniformInit([512, 1]), lr=lr)
        self.graph.add_var(b11)
        conv11 = nn.Op(nn.Conv2d(padding='same'), act10, [W11, b11])
        self.graph.add_op(conv11)
        act11 = nn.Layer(nn.ReluActivator(), conv11)
        self.graph.add_layer(act11)

        pool12 = nn.Op(nn.MaxPooling(filter_h=2, filter_w=2, stride=2), act11)
        self.graph.add_op(pool12)

        W13 = nn.Variable(nn.UniformInit([3, 3, 512, 512]), lr=self.lr)
        self.graph.add_var(W13)
        b13 = nn.Variable(nn.UniformInit([512, 1]), lr=lr)
        self.graph.add_var(b13)
        conv13 = nn.Op(nn.Conv2d(padding='same'), pool12, [W13, b13])
        self.graph.add_op(conv13)
        act13 = nn.Layer(nn.ReluActivator(), conv13)
        self.graph.add_layer(act13)

        W14 = nn.Variable(nn.UniformInit([3, 3, 512, 512]), lr=self.lr)
        self.graph.add_var(W14)
        b14 = nn.Variable(nn.UniformInit([512, 1]), lr=lr)
        self.graph.add_var(b14)
        conv14 = nn.Op(nn.Conv2d(padding='same'), act13, [W14, b14])
        self.graph.add_op(conv14)
        act14 = nn.Layer(nn.ReluActivator(), conv14)
        self.graph.add_layer(act14)

        W15 = nn.Variable(nn.UniformInit([3, 3, 512, 512]), lr=self.lr)
        self.graph.add_var(W15)
        b15 = nn.Variable(nn.UniformInit([512, 1]), lr=lr)
        self.graph.add_var(b15)
        conv15 = nn.Op(nn.Conv2d(padding='same'), act14, [W15, b15])
        self.graph.add_op(conv15)
        act15 = nn.Layer(nn.ReluActivator(), conv15)
        self.graph.add_layer(act15)

        pool16 = nn.Op(nn.MaxPooling(filter_h=2, filter_w=2, stride=2), act15)
        self.graph.add_op(pool16)

        fla = nn.Op(nn.Flatten(), pool16)
        self.graph.add_op(fla)

        WFC0 = nn.Variable(nn.UniformInit([flatten_size, 4096]), lr=lr)
        self.graph.add_var(WFC0)
        bFC0 = nn.Variable(nn.UniformInit([4096, 1]), lr=lr)
        self.graph.add_var(bFC0)
        FC0 = nn.Op(nn.Dot(), fla, [WFC0, bFC0])
        self.graph.add_op(FC0)
        Fact0 = nn.Layer(nn.ReluActivator(), FC0)
        self.graph.add_layer(Fact0)

        WFC1 = nn.Variable(nn.UniformInit([4096, 4096]), lr=lr)
        self.graph.add_var(WFC1)
        bFC1 = nn.Variable(nn.UniformInit([4096, 1]), lr=lr)
        self.graph.add_var(bFC1)
        FC1 = nn.Op(nn.Dot(), Fact0, [WFC1, bFC1])
        self.graph.add_op(FC1)
        Fact1 = nn.Layer(nn.ReluActivator(), FC1)
        self.graph.add_layer(Fact1)

        WFC2 = nn.Variable(nn.UniformInit([4096, num_labels]), lr=lr)
        self.graph.add_var(WFC2)
        bFC2 = nn.Variable(nn.UniformInit([num_labels, 1]), lr=lr)
        self.graph.add_var(bFC2)
        FC2 = nn.Op(nn.Dot(), Fact1, [WFC2, bFC2])
        self.graph.add_op(FC2)
        # Fact2 = nn.Layer(nn.ReluActivator(), FC2)
        # self.graph.add_layer(Fact2)

        self.graph.add_loss(nn.Loss(nn.Softmax()))
        self.graph.add_optimizer((nn.AdamOptimizer()))
예제 #3
0
    def __init__(self, imagesize, flatten_size, num_labels, lr=1e-3):
        self.N, self.C, self.W, self.H = imagesize
        self.num_labels = num_labels
        self.lr = lr

        self.graph = nn.Graph()
        self.X = nn.Placeholder()

        W0 = nn.Variable(nn.UniformInit([11, 11, self.C, 96]), lr=lr)
        b0 = nn.Variable(nn.UniformInit([96, 1]), lr=lr)
        self.graph.add_vars([W0, b0])
        conv0 = nn.Op(nn.Conv2d(padding='same', stride=4), self.X, [W0, b0])
        self.graph.add_op(conv0)
        act0 = nn.Layer(nn.ReluActivator(), conv0)
        self.graph.add_layer(act0)
        pool0 = nn.Op(nn.MaxPooling(filter_h=3, filter_w=3, stride=2), act0)
        self.graph.add_op(pool0)

        W1 = nn.Variable(nn.UniformInit([5, 5, 96, 256]), lr=lr)
        b1 = nn.Variable(nn.UniformInit([256, 1]), lr=lr)
        self.graph.add_vars([W1, b1])
        conv1 = nn.Op(nn.Conv2d(padding='same', stride=1), pool0, [W1, b1])
        self.graph.add_op(conv1)
        act1 = nn.Layer(nn.ReluActivator(), conv1)
        self.graph.add_layer(act1)
        pool1 = nn.Op(nn.MaxPooling(filter_h=3, filter_w=3, stride=2), act1)
        self.graph.add_op(pool1)

        W2 = nn.Variable(nn.UniformInit([5, 5, 256, 384]), lr=lr)
        b2 = nn.Variable(nn.UniformInit([384, 1]), lr=lr)
        self.graph.add_vars([W2, b2])
        conv2 = nn.Op(nn.Conv2d(padding='same', stride=1), pool1, [W2, b2])
        self.graph.add_op(conv2)
        act2 = nn.Layer(nn.ReluActivator(), conv2)
        self.graph.add_layer(act2)

        W3 = nn.Variable(nn.UniformInit([5, 5, 384, 384]), lr=lr)
        b3 = nn.Variable(nn.UniformInit([384, 1]), lr=lr)
        self.graph.add_vars([W3, b3])
        conv3 = nn.Op(nn.Conv2d(padding='same', stride=1), act2, [W3, b3])
        self.graph.add_op(conv3)
        act3 = nn.Layer(nn.ReluActivator(), conv3)
        self.graph.add_layer(act3)

        W4 = nn.Variable(nn.UniformInit([5, 5, 384, 256]), lr=lr)
        b4 = nn.Variable(nn.UniformInit([256, 1]), lr=lr)
        self.graph.add_vars([W4, b4])
        conv4 = nn.Op(nn.Conv2d(padding='same', stride=1), act3, [W4, b4])
        self.graph.add_op(conv4)
        act4 = nn.Layer(nn.ReluActivator(), conv4)
        self.graph.add_layer(act4)

        pool5 = nn.Op(nn.MaxPooling(filter_h=3, filter_w=3, stride=2), act4)
        self.graph.add_op(pool5)

        fla = nn.Op(nn.Flatten(), pool5)
        self.graph.add_op(fla)

        WFC0 = nn.Variable(nn.UniformInit([flatten_size, 4096]), lr=lr)
        bFC0 = nn.Variable(nn.UniformInit([4096, 1]), lr=lr)
        FC0 = nn.Op(nn.Dot(), fla, [WFC0, bFC0])
        self.graph.add_op(FC0)
        Fact0 = nn.Layer(nn.ReluActivator(), FC0)
        self.graph.add_layer(Fact0)
        dp0 = nn.Op(nn.Dropout(0.3), Fact0)
        self.graph.add_op(dp0)

        WFC1 = nn.Variable(nn.UniformInit([4096, 4096]), lr=lr)
        bFC1 = nn.Variable(nn.UniformInit([4096, 1]), lr=lr)
        FC1 = nn.Op(nn.Dot(), dp0, [WFC1, bFC1])
        self.graph.add_op(FC1)
        Fact1 = nn.Layer(nn.ReluActivator(), FC1)
        self.graph.add_layer(Fact1)
        dp1 = nn.Op(nn.Dropout(0.3), Fact1)
        self.graph.add_op(dp1)

        WFC2 = nn.Variable(nn.UniformInit([4096, 1000]), lr=lr)
        bFC2 = nn.Variable(nn.UniformInit([1000, 1]), lr=lr)
        FC2 = nn.Op(nn.Dot(), dp1, [WFC2, bFC2])
        self.graph.add_op(FC2)
        Fact2 = nn.Layer(nn.ReluActivator(), FC2)
        self.graph.add_layer(Fact2)
        dp2 = nn.Op(nn.Dropout(0.3), Fact2)
        self.graph.add_op(dp2)

        WFC3 = nn.Variable(nn.UniformInit([1000, num_labels]), lr=lr)
        bFC3 = nn.Variable(nn.UniformInit([num_labels, 1]), lr=lr)
        FC3 = nn.Op(nn.Dot(), dp2, [WFC3, bFC3])
        self.graph.add_op(FC3)
        Fact3 = nn.Layer(nn.ReluActivator(), FC3)
        self.graph.add_layer(Fact3)
        dp3 = nn.Op(nn.Dropout(0.3), Fact3)
        self.graph.add_op(dp3)

        self.graph.add_loss(nn.Loss(nn.Softmax()))
        self.graph.add_optimizer((nn.AdamOptimizer()))
예제 #4
0
        te_Y += label
    tr_X, te_X = np.vstack(tr_X), np.vstack(te_X)
    tr_Y, te_Y = np.array(tr_Y), np.array(te_Y)

    tr_X = nn.scaleallimage(tr_X, (112, 112))
    te_X = nn.scaleallimage(te_X, (112, 112))

    tr_X, te_X = tr_X.astype(np.float32) / 127.5 - 1.0, te_X.astype(
        np.float32) / 127.5 - 1.0
    return tr_X, tr_Y, te_X, te_Y


X_train, Y_train, X_test, Y_test = readdata()
print(X_train.shape, Y_train.shape, X_test.shape, Y_test.shape)

X_train = nn.Variable(X_train, lr=0)

Y_train = np.expand_dims(Y_train, axis=1)
Y_train = nn.onehot(Y_train)
Y_train = nn.Variable(Y_train, lr=0)

X_test = nn.Variable(X_test, lr=0)

Y_test = np.expand_dims(Y_test, axis=1)
Y_test = nn.onehot(Y_test)
Y_test = nn.Variable(Y_test, lr=0)

# lenet = LeNet(X_train.value.shape, 512, 10)
# lenet.train(X_train, Y_train, X_test, Y_test, epochs=10, batchsize=10)

# res18 = ResNet18(X_train.value.shape, 25088, 10)
예제 #5
0
    def __init__(self, imagesize, flatten_size, num_labels, lr=1e-3):
        self.N, self.C, self.W, self.H = imagesize
        self.num_labels = num_labels
        self.lr = lr

        self.graph = nn.Graph()
        X = nn.Placeholder()

        W0 = nn.Variable(nn.UniformInit([3, 3, self.C, 64]), lr=lr)
        b0 = nn.Variable(nn.UniformInit([64, 1]), lr=lr)
        gamma0 = nn.Variable(nn.UniformInit([1, 64, self.W, self.H]), lr=lr)
        beta0 = nn.Variable(nn.UniformInit([1, 64, self.W, self.H]), lr=lr)
        self.graph.add_vars([W0, b0, gamma0, beta0])
        conv0 = nn.Op(nn.Conv2d(), X, [W0, b0])
        bn0 = nn.Op(nn.BatchNorm(gamma0, beta0), conv0)
        act0 = nn.Layer(nn.ReluActivator(), bn0)
        self.graph.add_ops([conv0, bn0])
        self.graph.add_layer(act0)

        # Block 1
        W1_1 = nn.Variable(nn.UniformInit([3, 3, 64, 64]), lr=lr)
        b1_1 = nn.Variable(nn.UniformInit([64, 1]), lr=lr)
        gamma1_1 = nn.Variable(nn.UniformInit([1, 64, self.W, self.H]), lr=lr)
        beta1_1 = nn.Variable(nn.UniformInit([1, 64, self.W, self.H]), lr=lr)
        W1_2 = nn.Variable(nn.UniformInit([3, 3, 64, 64]), lr=lr)
        b1_2 = nn.Variable(nn.UniformInit([64, 1]), lr=lr)
        gamma1_2 = nn.Variable(nn.UniformInit([1, 64, self.W, self.H]), lr=lr)
        beta1_2 = nn.Variable(nn.UniformInit([1, 64, self.W, self.H]), lr=lr)
        self.graph.add_vars([W1_1, b1_1, gamma1_1, beta1_1, \
                             W1_2, b1_2, gamma1_2, beta1_2])
        pamas1 = {'w1': W1_1, 'b1': b1_1, \
                  'gamma1': gamma1_1, 'beta1': beta1_1, \
                  'w2': W1_2, 'b2': b1_2, \
                  'gamma2': gamma1_2, 'beta2': beta1_2}
        B1 = nn.ResBlock(act0, pamas1)
        self.graph.add_block(B1)

        # Block 2
        W2_1 = nn.Variable(nn.UniformInit([3, 3, 64, 64]), lr=lr)
        b2_1 = nn.Variable(nn.UniformInit([64, 1]), lr=lr)
        gamma2_1 = nn.Variable(nn.UniformInit([1, 64, self.W, self.H]), lr=lr)
        beta2_1 = nn.Variable(nn.UniformInit([1, 64, self.W, self.H]), lr=lr)
        W2_2 = nn.Variable(nn.UniformInit([3, 3, 64, 64]), lr=lr)
        b2_2 = nn.Variable(nn.UniformInit([64, 1]), lr=lr)
        gamma2_2 = nn.Variable(nn.UniformInit([1, 64, self.W, self.H]), lr=lr)
        beta2_2 = nn.Variable(nn.UniformInit([1, 64, self.W, self.H]), lr=lr)
        self.graph.add_vars([W2_1, b2_1, gamma2_1, beta2_1, \
                             W2_2, b2_2, gamma2_2, beta2_2])
        pamas2 = {'w1': W2_1, 'b1': b2_1, \
                  'gamma1': gamma2_1, 'beta1': beta2_1, \
                  'w2': W2_2, 'b2': b2_2, \
                  'gamma2': gamma2_2, 'beta2': beta2_2}
        B2 = nn.ResBlock(B1, pamas2)
        self.graph.add_block(B2)

        # pool2
        pool2 = nn.Op(nn.MaxPooling(2, 2), B2)
        self.graph.add_layer(pool2)

        # Block 3
        W3_1 = nn.Variable(nn.UniformInit([3, 3, 64, 128]), lr=lr)
        b3_1 = nn.Variable(nn.UniformInit([128, 1]), lr=lr)
        gamma3_1 = nn.Variable(nn.UniformInit([1, 128, self.W / 2,
                                               self.H / 2]),
                               lr=lr)
        beta3_1 = nn.Variable(nn.UniformInit([1, 128, self.W / 2, self.H / 2]),
                              lr=lr)
        W3_2 = nn.Variable(nn.UniformInit([3, 3, 128, 128]), lr=lr)
        b3_2 = nn.Variable(nn.UniformInit([128, 1]), lr=lr)
        gamma3_2 = nn.Variable(nn.UniformInit([1, 128, self.W / 2,
                                               self.H / 2]),
                               lr=lr)
        beta3_2 = nn.Variable(nn.UniformInit([1, 128, self.W / 2, self.H / 2]),
                              lr=lr)

        w_sc3 = nn.Variable(nn.UniformInit([3, 3, 64, 128]), lr=lr)
        b_sc3 = nn.Variable(nn.UniformInit([128, 1]), lr=lr)
        gamma_sc3 = nn.Variable(nn.UniformInit(
            [1, 128, self.W / 2, self.H / 2]),
                                lr=lr)
        beta3_sc3 = nn.Variable(nn.UniformInit(
            [1, 128, self.W / 2, self.H / 2]),
                                lr=lr)
        self.graph.add_vars([W3_1, b3_1, gamma3_1, beta3_1, \
                             W3_2, b3_2, gamma3_2, beta3_2, \
                             w_sc3, b_sc3, gamma_sc3, beta3_sc3])
        pamas3 = {'w1': W3_1, 'b1': b3_1, \
                  'gamma1': gamma3_1, 'beta1': beta3_1, \
                  'w2': W3_2, 'b2': b3_2, \
                  'gamma2': gamma3_2, 'beta2': beta3_2}
        sc3 = {'w': w_sc3, 'b': b_sc3, 'gamma': gamma_sc3, 'beta': beta3_sc3}
        B3 = nn.ResBlock(pool2, pamas3, sc3)
        self.graph.add_block(B3)

        # Block 4
        W4_1 = nn.Variable(nn.UniformInit([3, 3, 128, 128]), lr=lr)
        b4_1 = nn.Variable(nn.UniformInit([128, 1]), lr=lr)
        gamma4_1 = nn.Variable(nn.UniformInit([1, 128, self.W / 2,
                                               self.H / 2]),
                               lr=lr)
        beta4_1 = nn.Variable(nn.UniformInit([1, 128, self.W / 2, self.H / 2]),
                              lr=lr)
        W4_2 = nn.Variable(nn.UniformInit([3, 3, 128, 128]), lr=lr)
        b4_2 = nn.Variable(nn.UniformInit([128, 1]), lr=lr)
        gamma4_2 = nn.Variable(nn.UniformInit([1, 128, self.W / 2,
                                               self.H / 2]),
                               lr=lr)
        beta4_2 = nn.Variable(nn.UniformInit([1, 128, self.W / 2, self.H / 2]),
                              lr=lr)
        self.graph.add_vars([W4_1, b4_1, gamma4_1, beta4_1, \
                             W4_2, b4_2, gamma4_2, beta4_2])
        pamas4 = {'w1': W4_1, 'b1': b4_1, \
                  'gamma1': gamma4_1, 'beta1': beta4_1, \
                  'w2': W4_2, 'b2': b4_2, \
                  'gamma2': gamma4_2, 'beta2': beta4_2}
        B4 = nn.ResBlock(B3, pamas4)
        self.graph.add_block(B4)

        # pool4
        pool4 = nn.Op(nn.MaxPooling(2, 2), B4)
        self.graph.add_layer(pool4)

        # Block 5
        W5_1 = nn.Variable(nn.UniformInit([3, 3, 128, 256]), lr=lr)
        b5_1 = nn.Variable(nn.UniformInit([256, 1]), lr=lr)
        gamma5_1 = nn.Variable(nn.UniformInit([1, 256, self.W / 4,
                                               self.H / 4]),
                               lr=lr)
        beta5_1 = nn.Variable(nn.UniformInit([1, 256, self.W / 4, self.H / 4]),
                              lr=lr)
        W5_2 = nn.Variable(nn.UniformInit([3, 3, 256, 256]), lr=lr)
        b5_2 = nn.Variable(nn.UniformInit([256, 1]), lr=lr)
        gamma5_2 = nn.Variable(nn.UniformInit([1, 256, self.W / 4,
                                               self.H / 4]),
                               lr=lr)
        beta5_2 = nn.Variable(nn.UniformInit([1, 256, self.W / 4, self.H / 4]),
                              lr=lr)

        w_sc5 = nn.Variable(nn.UniformInit([3, 3, 128, 256]), lr=lr)
        b_sc5 = nn.Variable(nn.UniformInit([256, 1]), lr=lr)
        gamma_sc5 = nn.Variable(nn.UniformInit(
            [1, 256, self.W / 4, self.H / 4]),
                                lr=lr)
        beta3_sc5 = nn.Variable(nn.UniformInit(
            [1, 256, self.W / 4, self.H / 4]),
                                lr=lr)
        self.graph.add_vars([W5_1, b5_1, gamma5_1, beta5_1, \
                             W5_2, b5_2, gamma5_2, beta5_2, \
                             w_sc5, b_sc5, gamma_sc5, beta3_sc5])
        pamas5 = {'w1': W5_1, 'b1': b5_1, \
                  'gamma1': gamma5_1, 'beta1': beta5_1, \
                  'w2': W5_2, 'b2': b5_2, \
                  'gamma2': gamma5_2, 'beta2': beta5_2}
        sc5 = {'w': w_sc5, 'b': b_sc5, 'gamma': gamma_sc5, 'beta': beta3_sc5}
        B5 = nn.ResBlock(pool4, pamas5, sc5)
        self.graph.add_block(B5)

        # Block 6
        W6_1 = nn.Variable(nn.UniformInit([3, 3, 256, 256]), lr=lr)
        b6_1 = nn.Variable(nn.UniformInit([256, 1]), lr=lr)
        gamma6_1 = nn.Variable(nn.UniformInit([1, 256, self.W / 4,
                                               self.H / 4]),
                               lr=lr)
        beta6_1 = nn.Variable(nn.UniformInit([1, 256, self.W / 4, self.H / 4]),
                              lr=lr)
        W6_2 = nn.Variable(nn.UniformInit([3, 3, 256, 256]), lr=lr)
        b6_2 = nn.Variable(nn.UniformInit([256, 1]), lr=lr)
        gamma6_2 = nn.Variable(nn.UniformInit([1, 256, self.W / 4,
                                               self.H / 4]),
                               lr=lr)
        beta6_2 = nn.Variable(nn.UniformInit([1, 256, self.W / 4, self.H / 4]),
                              lr=lr)
        self.graph.add_vars([W6_1, b6_1, gamma6_1, beta6_1, \
                             W6_2, b6_2, gamma6_2, beta6_2])
        pamas6 = {'w1': W6_1, 'b1': b6_1, \
                  'gamma1': gamma6_1, 'beta1': beta6_1, \
                  'w2': W6_2, 'b2': b6_2, \
                  'gamma2': gamma6_2, 'beta2': beta6_2}
        B6 = nn.ResBlock(B5, pamas6)
        self.graph.add_block(B6)

        # pool6
        pool6 = nn.Op(nn.MaxPooling(2, 2), B6)
        self.graph.add_layer(pool6)

        # Block 7
        W7_1 = nn.Variable(nn.UniformInit([3, 3, 256, 512]), lr=lr)
        b7_1 = nn.Variable(nn.UniformInit([512, 1]), lr=lr)
        gamma7_1 = nn.Variable(nn.UniformInit([1, 512, self.W / 8,
                                               self.H / 8]),
                               lr=lr)
        beta7_1 = nn.Variable(nn.UniformInit([1, 512, self.W / 8, self.H / 8]),
                              lr=lr)
        W7_2 = nn.Variable(nn.UniformInit([3, 3, 512, 512]), lr=lr)
        b7_2 = nn.Variable(nn.UniformInit([512, 1]), lr=lr)
        gamma7_2 = nn.Variable(nn.UniformInit([1, 512, self.W / 8,
                                               self.H / 8]),
                               lr=lr)
        beta7_2 = nn.Variable(nn.UniformInit([1, 512, self.W / 8, self.H / 8]),
                              lr=lr)

        w_sc7 = nn.Variable(nn.UniformInit([3, 3, 256, 512]), lr=lr)
        b_sc7 = nn.Variable(nn.UniformInit([512, 1]), lr=lr)
        gamma_sc7 = nn.Variable(nn.UniformInit(
            [1, 512, self.W / 8, self.H / 8]),
                                lr=lr)
        beta3_sc7 = nn.Variable(nn.UniformInit(
            [1, 512, self.W / 8, self.H / 8]),
                                lr=lr)
        self.graph.add_vars([W7_1, b7_1, gamma7_1, beta7_1, \
                             W7_2, b7_2, gamma7_2, beta7_2, \
                             w_sc7, b_sc7, gamma_sc7, beta3_sc7])
        pamas7 = {'w1': W7_1, 'b1': b7_1, \
                  'gamma1': gamma7_1, 'beta1': beta7_1, \
                  'w2': W7_2, 'b2': b7_2, \
                  'gamma2': gamma7_2, 'beta2': beta7_2}
        sc7 = {'w': w_sc7, 'b': b_sc7, 'gamma': gamma_sc7, 'beta': beta3_sc7}
        B7 = nn.ResBlock(pool6, pamas7, sc7)
        self.graph.add_block(B7)

        # Block 8
        W8_1 = nn.Variable(nn.UniformInit([3, 3, 512, 512]), lr=lr)
        b8_1 = nn.Variable(nn.UniformInit([512, 1]), lr=lr)
        gamma8_1 = nn.Variable(nn.UniformInit([1, 512, self.W / 8,
                                               self.H / 8]),
                               lr=lr)
        beta8_1 = nn.Variable(nn.UniformInit([1, 512, self.W / 8, self.H / 8]),
                              lr=lr)
        W8_2 = nn.Variable(nn.UniformInit([3, 3, 512, 512]), lr=lr)
        b8_2 = nn.Variable(nn.UniformInit([512, 1]), lr=lr)
        gamma8_2 = nn.Variable(nn.UniformInit([1, 512, self.W / 8,
                                               self.H / 8]),
                               lr=lr)
        beta8_2 = nn.Variable(nn.UniformInit([1, 512, self.W / 8, self.H / 8]),
                              lr=lr)
        self.graph.add_vars([W8_1, b8_1, gamma8_1, beta8_1, \
                             W8_2, b8_2, gamma8_2, beta8_2])
        pamas8 = {'w1': W8_1, 'b1': b8_1, \
                  'gamma1': gamma8_1, 'beta1': beta8_1, \
                  'w2': W8_2, 'b2': b8_2, \
                  'gamma2': gamma8_2, 'beta2': beta8_2}
        B8 = nn.ResBlock(B7, pamas8)
        self.graph.add_block(B8)

        # pool9
        pool9 = nn.Op(nn.MaxPooling(2, 2), B8)
        self.graph.add_layer(pool9)

        # flatten
        fla = nn.Op(nn.Flatten(), pool9)
        self.graph.add_op(fla)

        # Fc10
        WFC10 = nn.Variable(nn.UniformInit([flatten_size, num_labels]), lr=lr)
        self.graph.add_var(WFC10)
        bFC10 = nn.Variable(nn.UniformInit([num_labels, 1]), lr=lr)
        self.graph.add_var(bFC10)
        FC10 = nn.Op(nn.Dot(), fla, [WFC10, bFC10])
        self.graph.add_op(FC10)

        # loss and optimizer
        self.graph.add_loss(nn.Loss(nn.Softmax()))
        self.graph.add_optimizer((nn.AdamOptimizer()))