예제 #1
0
파일: network.py 프로젝트: lyuyanyii/CIFAR
def res_layer(inp, chl, stride=1, proj=False):
    pre = inp
    inp = conv_bn(inp, 1, stride, 0, chl // 4, True)
    chl //= 4
    name = inp.name
    #Global Average Pooling
    SE = inp.mean(axis=3).mean(axis=2)
    sum_lay = 0
    out_lay = 0
    lay = FullyConnected("fc0({})".format(name),
                         SE,
                         output_dim=chl,
                         nonlinearity=ReLU())
    #fc1
    lay = FullyConnected("fc1({})".format(name),
                         lay,
                         output_dim=chl,
                         nonlinearity=Sigmoid())
    inp = inp * lay.dimshuffle(0, 1, 'x', 'x')
    chl *= 4
    inp = conv_bn(inp, 3, 1, 1, chl // 4, True)
    chl //= 4
    name = inp.name
    #Global Average Pooling
    SE = inp.mean(axis=3).mean(axis=2)
    sum_lay = 0
    out_lay = 0
    lay = FullyConnected("fc0({})".format(name),
                         SE,
                         output_dim=chl,
                         nonlinearity=ReLU())
    #fc1
    lay = FullyConnected("fc1({})".format(name),
                         lay,
                         output_dim=chl,
                         nonlinearity=Sigmoid())
    inp = inp * lay.dimshuffle(0, 1, 'x', 'x')
    chl *= 4
    inp = conv_bn(inp, 1, 1, 0, chl, False)
    if proj:
        pre = conv_bn(pre, 1, stride, 0, chl, False)
    name = inp.name
    #Global Average Pooling
    SE = inp.mean(axis=3).mean(axis=2)
    sum_lay = 0
    out_lay = 0
    lay = FullyConnected("fc0({})".format(name),
                         SE,
                         output_dim=chl,
                         nonlinearity=ReLU())
    #fc1
    lay = FullyConnected("fc1({})".format(name),
                         lay,
                         output_dim=chl,
                         nonlinearity=Sigmoid())
    inp = inp * lay.dimshuffle(0, 1, 'x', 'x')
    inp = arith.ReLU(inp + pre)
    return inp
예제 #2
0
파일: network.py 프로젝트: lyuyanyii/CIFAR
def res_layer(inp, chl, stride = 1, proj = False, se = None):
	pre = inp
	inp = conv_bn(inp, 1, stride, 0, chl // 4, True)
	inp = conv_bn(inp, 3, 1, 1, chl // 4, True)
	inp = conv_bn(inp, 1, 1, 0, chl, False)
	if proj:
		pre = conv_bn(pre, 1, stride, 0, chl, False)
	name = inp.name
	#Global Average Pooling
	SE = inp.mean(axis = 3).mean(axis = 2)
	#fc0
	SE = FullyConnected(
		"fc0({})".format(name), SE, output_dim = chl // 4,
		nonlinearity = ReLU()
		)
	#fc1
	if se is None:
		se = SE
	else:
		se = O.Concat([se, SE], axis = 1)
	SE = FullyConnected(
		"fc1({})".format(name), se, output_dim = chl,
		nonlinearity = Sigmoid()
		)
	se = FullyConnected(
		"fc({})".format(se.name), se, output_dim = chl // 4,
		nonlinearity = ReLU()
		)
	inp = inp * SE.dimshuffle(0, 1, 'x', 'x')
	inp = arith.ReLU(inp + pre)
	return inp, se
예제 #3
0
def res_layer(inp, chl, stride=1, proj=False):
    pre = inp
    inp = conv_bn(inp, 1, stride, 0, chl // 4, True)
    chl //= 4
    name = inp.name
    #Global Average Pooling
    SE = inp.mean(axis=3).mean(axis=2)
    sum_lay = 0
    out_lay = 0
    width = 4
    lay = FullyConnected("fc0({})".format(name),
                         SE,
                         output_dim=chl,
                         nonlinearity=ReLU())
    #fc1
    lay = FullyConnected("fc1({})".format(name),
                         lay,
                         output_dim=chl * width,
                         nonlinearity=Identity())
    lay = lay.reshape(inp.shape[0], chl, width)
    lay = Softmax("softmax({})".format(name), lay, axis=2)
    for i in range(width):
        if i == 0:
            inp_lay = inp
        else:
            inp_lay = O.Concat([inp[:, width:, :, :], inp[:, :width, :, :]],
                               axis=1)
        inp_lay = inp_lay * lay[:, :, i].dimshuffle(0, 1, 'x', 'x')
    inp = inp_lay

    chl *= 4
    inp = conv_bn(inp, 3, 1, 1, chl // 4, True)
    inp = conv_bn(inp, 1, 1, 0, chl, False)
    if proj:
        pre = conv_bn(pre, 1, stride, 0, chl, False)
    name = inp.name
    #Global Average Pooling
    SE = inp.mean(axis=3).mean(axis=2)
    sum_lay = 0
    out_lay = 0
    lay = FullyConnected("fc0({})".format(name),
                         SE,
                         output_dim=chl,
                         nonlinearity=ReLU())
    #fc1
    lay = FullyConnected("fc1({})".format(name),
                         lay,
                         output_dim=chl,
                         nonlinearity=Sigmoid())
    inp = inp * lay.dimshuffle(0, 1, 'x', 'x')

    inp = arith.ReLU(inp + pre)
    return inp
예제 #4
0
파일: network.py 프로젝트: lyuyanyii/CIFAR
def dense_block(inp, k, l):
    lay = inp
    for i in range(l):
        cur_lay = bn_relu_conv(lay, 3, 1, 1, k, True, True)
        name = cur_lay.name
        #G.P.
        SE = cur_lay.mean(axis=3).mean(axis=2)
        SE = FullyConnected("fc0({})".format(name),
                            SE,
                            output_dim=k,
                            nonlinearity=ReLU())
        SE = FullyConnected("fc1({})".format(name),
                            SE,
                            output_dim=k,
                            nonlinearity=Sigmoid())
        cur_lay = cur_lay * SE.dimshuffle(0, 1, 'x', 'x')
        lay = Concat([lay, cur_lay], axis=1)
    return lay
예제 #5
0
def res_layer(inp, chl):
    pre = inp
    inp = conv_bn(inp, 3, 1, 1, chl, True)
    inp = conv_bn(inp, 3, 1, 1, chl, False)
    name = inp.name
    #Global Average Pooling
    SE = inp.mean(axis=3).mean(axis=2)
    #fc0
    SE = FullyConnected("fc0({})".format(name),
                        SE,
                        output_dim=SE.partial_shape[1],
                        nonlinearity=ReLU())
    #fc1
    SE = FullyConnected("fc1({})".format(name),
                        SE,
                        output_dim=SE.partial_shape[1],
                        nonlinearity=Sigmoid())
    inp = inp * SE.dimshuffle(0, 1, 'x', 'x')
    inp = arith.ReLU(inp + pre)
    return inp