Exemplo n.º 1
0
def res_layer(inp, chl):
    pre = inp
    #inp = conv_bn(inp, 3, 1, 1, chl, True)
    #inp = conv_bn(inp, 3, 1, 1, chl, False)
    inp = den_lay(inp, chl)
    inp = arith.ReLU(inp)
    inp = den_lay(inp, chl)
    inp = arith.ReLU(inp + pre)
    return inp
Exemplo n.º 2
0
def relu_conv_bn(inp,
                 ker_shape,
                 stride,
                 padding,
                 out_chl,
                 isrelu=True,
                 isbn=True):
    global idx
    idx += 1
    if isrelu:
        inp = arith.ReLU(inp)
    inp = Conv2D("conv{}".format(idx),
                 inp,
                 kernel_shape=ker_shape,
                 stride=stride,
                 padding=padding,
                 output_nr_channel=out_chl,
                 nonlinearity=Identity())
    if isbn:
        inp = BN("bn{}".format(idx), inp, eps=1e-9)
        inp = ElementwiseAffine("bnaff{}".format(idx),
                                inp,
                                shared_in_channels=False,
                                k=C(1),
                                b=C(0))
    return inp
Exemplo n.º 3
0
def bn_relu_conv(inp, ker_shape, stride, padding, out_chl, has_relu, has_bn, has_conv = True):
	global idx
	idx += 1
	if has_bn:
		l1 = BN("bn{}".format(idx), inp, eps = 1e-9)
		l1 = ElementwiseAffine("bnaff{}".format(idx), l1, shared_in_channels = False, k = C(1), b = C(0))
	else:
		l1 = inp
	
	if has_relu:
		l2 = arith.ReLU(l1)
	else:
		l2 = l1
	
	if not has_conv:
		return l2

	l3 = Conv2D(
		"conv{}".format(idx), l2, kernel_shape = ker_shape, stride = stride, padding = padding,
		output_nr_channel = out_chl,
		#W = G(mean = 0, std = (1 / (ker_shape**2 * inp.partial_shape[1]))**0.5),
		#b = C(0),
		nonlinearity = Identity()
		)
	
	return l3
Exemplo n.º 4
0
def conv_bn(inp, ker_shape, stride, padding, out_chl, isrelu, mode = None):
	global idx
	idx += 1
	print(inp.partial_shape, ker_shape, out_chl)
	if ker_shape == 1:
		W = ortho_group.rvs(out_chl)
		W = W[:, :inp.partial_shape[1]]
		W = W.reshape(W.shape[0], W.shape[1], 1, 1)
		W = ConstProvider(W)
		b = ConstProvider(np.zeros(out_chl))
	else:
		W = G(mean = 0, std = ((1 + int(isrelu)) / (ker_shape**2 * inp.partial_shape[1]))**0.5)
		b = C(0)
	l1 = Conv2D(
		"conv{}".format(idx), inp, kernel_shape = ker_shape, stride = stride, padding = padding,
		output_nr_channel = out_chl,
		group = mode,
		W = W,
		b = b,
		nonlinearity = Identity()
		)
	l2 = BN("bn{}".format(idx), l1, eps = 1e-9)
	l2 = ElementwiseAffine("bnaff{}".format(idx), l2, shared_in_channels = False, k = C(1), b = C(0))
	if isrelu:
		l2 = arith.ReLU(l2)
	return l2, l1
Exemplo n.º 5
0
def res_layer(inp, chl, group, stride=1, proj=False, shift=None):
    pre = inp
    if group == 1:
        inp = conv_bn(inp, 1, stride, 0, chl // 4, True, group=group)
        inp = conv_bn(inp, 3, 1, 1, chl // 4, True)
        inp = conv_bn(inp, 1, 1, 0, chl, False, group=group)
    else:
        """
		lay1 = conv_bn(inp, 3, 1, 1, chl // 4, True, group = group)
		inp = O.Concat([inp[:, shift * chl // 4 // group:, :, :], inp[:, :shift * chl // 4 // group, :, :]], axis = 1)
		lay2 = conv_bn(inp, 3, 1, 1, chl // 4, True, group = group)
		inp = lay1 + lay2
		"""
        inp = conv_bn(inp, 1, stride, 0, chl // 4, True, group=group)
        """
		subchl = chl // 4 // group
		inp = inp.reshape(inp.shape[0], group, subchl, inp.shape[2], inp.shape[3])
		inp = inp.dimshuffle(0, 2, 1, 3, 4)
		inp = inp.reshape(inp.shape[0], chl // 4, inp.shape[3], inp.shape[4])
		"""
        inp = conv_bn(inp, 3, 1, 1, chl // 4, True, group=group)
        inp = conv_bn(inp, 1, 1, 0, chl, False, group=group)
    if proj:
        pre = conv_bn(pre, 1, stride, 0, chl, False, group=group)
    inp = arith.ReLU(inp + pre)
    return inp
Exemplo n.º 6
0
def res_layer(inp, chl, stride = 1, proj = False, se = None):
	pre = inp
	inp = conv_bn(inp, 1, stride, 0, chl // 4, True)
	inp = conv_bn(inp, 3, 1, 1, chl // 4, True)
	inp = conv_bn(inp, 1, 1, 0, chl, False)
	if proj:
		pre = conv_bn(pre, 1, stride, 0, chl, False)
	name = inp.name
	#Global Average Pooling
	SE = inp.mean(axis = 3).mean(axis = 2)
	#fc0
	SE = FullyConnected(
		"fc0({})".format(name), SE, output_dim = chl // 4,
		nonlinearity = ReLU()
		)
	#fc1
	if se is None:
		se = SE
	else:
		se = O.Concat([se, SE], axis = 1)
	SE = FullyConnected(
		"fc1({})".format(name), se, output_dim = chl,
		nonlinearity = Sigmoid()
		)
	se = FullyConnected(
		"fc({})".format(se.name), se, output_dim = chl // 4,
		nonlinearity = ReLU()
		)
	inp = inp * SE.dimshuffle(0, 1, 'x', 'x')
	inp = arith.ReLU(inp + pre)
	return inp, se
Exemplo n.º 7
0
def res_layer(inp, chl, group, stride=1, proj=False, shift=None):
    pre = inp
    if group == 1:
        inp = conv_bn(inp, 1, stride, 0, chl // 4, True, group=group)
        inp = conv_bn(inp, 3, 1, 1, chl // 4, True)
        inp = conv_bn(inp, 1, 1, 0, chl, False, group=group)
    else:
        """
		lay1 = conv_bn(inp, 3, 1, 1, chl // 4, True, group = group)
		inp = O.Concat([inp[:, shift * chl // 4 // group:, :, :], inp[:, :shift * chl // 4 // group, :, :]], axis = 1)
		lay2 = conv_bn(inp, 3, 1, 1, chl // 4, True, group = group)
		inp = lay1 + lay2
		"""
        inp = conv_bn(inp,
                      1,
                      stride,
                      0,
                      chl // 4,
                      True,
                      group=group,
                      shift=shift)
        inp = conv_bn(inp, 3, 1, 1, chl // 4, True, group=group, shift=shift)
        inp = conv_bn(inp, 1, 1, 0, chl, False, group=group, shift=shift)
    if proj:
        pre = conv_bn(pre, 1, stride, 0, chl, False, group=group)
    inp = arith.ReLU(inp + pre)
    return inp
Exemplo n.º 8
0
def res_layer(inp, chl, stride = 1, proj = False):
	pre = inp
	inp = conv_bn(inp, 1, stride, 0, chl // 4, True)
	#inp = conv_bn(inp, 3, 1, 1, chl // 4, True)
	inp = den_layer(inp, chl // 4)
	inp = conv_bn(inp, 1, 1, 0, chl, False)
	if proj:
		pre = conv_bn(pre, 1, stride, 0, chl, False)
	name = inp.name
	#Global Average Pooling
	SE = inp.mean(axis = 3).mean(axis = 2)
	sum_lay = 0
	out_lay = 0
	lay = FullyConnected(
		"fc0({})".format(name), SE, output_dim = chl,
		nonlinearity = ReLU()
		)
	#fc1
	lay = FullyConnected(
		"fc1({})".format(name), lay, output_dim = chl,
		nonlinearity = Sigmoid()
		)
	inp = inp * lay.dimshuffle(0, 1, 'x', 'x')
	inp = arith.ReLU(inp + pre)
	return inp
Exemplo n.º 9
0
def bn_relu_conv(inp, ker_shape, stride, padding, out_chl, has_relu, has_bn, has_conv = True):
	global idx
	idx += 1
	if has_bn:
		l1 = BN("bn{}".format(idx), inp, eps = 1e-9)
		l1 = ElementwiseAffine("bnaff{}".format(idx), l1, shared_in_channels = False, k = C(1), b = C(0))
	else:
		l1 = inp
	
	if has_relu:
		l2 = arith.ReLU(l1)
	else:
		l2 = l1
	
	if not has_conv:
		return l2, None

	l3 = Conv2D(
		"conv{}".format(idx), l2, kernel_shape = ker_shape, stride = stride, padding = padding,
		output_nr_channel = out_chl,
		nonlinearity = Identity()
		)
	w = l3.inputs[1]
	assert ":W" in w.name
	
	return l3, w
Exemplo n.º 10
0
def res_layer(inp, chl, stride=1, proj=False):
    pre = inp
    inp = conv_bn(inp, 1, stride, 0, chl // 4, True)
    name = inp.name
    #Global Average Pooling
    SE = inp.mean(axis=3).mean(axis=2)
    sum_lay = 0
    out_lay = 0
    width = 4
    lay = FullyConnected("fc0({})".format(name),
                         SE,
                         output_dim=chl // 4,
                         nonlinearity=ReLU())
    #fc1
    lay = FullyConnected("fc1({})".format(name),
                         lay,
                         output_dim=chl // 4 * width,
                         nonlinearity=Identity())
    lay = lay.reshape(inp.shape[0], chl // 4, width)
    lay = Softmax("softmax({})".format(name), lay, axis=2)
    for i in range(width):
        if i == 0:
            inp_lay = inp
        else:
            inp_lay = O.Concat([inp[:, width:, :, :], inp[:, :width, :, :]],
                               axis=1)
        inp_lay = inp_lay * lay[:, :, i].dimshuffle(0, 1, 'x', 'x')
    inp = O.ReLU(inp_lay)
    inp = conv_bn(inp, 3, 1, 1, chl // 4, True)
    inp = conv_bn(inp, 1, 1, 0, chl, False)
    if proj:
        pre = conv_bn(pre, 1, stride, 0, chl, False)
    inp = arith.ReLU(inp + pre)
    return inp
Exemplo n.º 11
0
def res_layer(inp, chl, stride=1, proj=False):
    pre = inp
    inp = conv_bn(inp, 1, stride, 0, chl // 4, True)
    inp = conv_bn(inp, 3, 1, 1, chl // 4, True)
    inp = conv_bn(inp, 1, 1, 0, chl, False)
    if proj:
        pre = conv_bn(pre, 1, stride, 0, chl, False)
    inp = arith.ReLU(inp + pre)
    return inp
Exemplo n.º 12
0
def den_lay(inp, chl):
    out = []
    stage = 8
    for i in range(stage):
        lay = conv_bn(inp, 3, 1, 1, chl // stage, False)
        out.append(lay)
        lay = arith.ReLU(lay)
        inp = O.Concat([inp, lay], axis=1)
    return O.Concat(out, axis=1)
Exemplo n.º 13
0
def deconv_bn_relu(name, inp, kernel_shape = None, stride = None, padding = None, output_nr_channel = None, isbnrelu = True):
	lay = O.Deconv2DVanilla(name, inp, 
		kernel_shape = kernel_shape,
		stride = stride,
		padding = padding,
		output_nr_channel = output_nr_channel)
	if isbnrelu:
		lay = BN(name + "bn", lay, eps = 1e-9)
		lay = ElementwiseAffine(name + "bnaff", lay, shared_in_channels = False, k = C(1), b = C(0))
		lay = arith.ReLU(lay)
	return lay
Exemplo n.º 14
0
def conv_bn(inp, ker_shape, stride, padding, out_chl, isrelu):
	global idx
	idx += 1
	l1 = Conv2D(
		"conv{}".format(idx), inp, kernel_shape = ker_shape, stride = stride, padding = padding,
		output_nr_channel = out_chl,
		nonlinearity = Identity()
		)
	l2 = BN("bn{}".format(idx), l1, eps = 1e-9)
	l2 = ElementwiseAffine("bnaff{}".format(idx), l2, shared_in_channels = False, k = C(1), b = C(0))
	if isrelu:
		l2 = arith.ReLU(l2)
	return l2
Exemplo n.º 15
0
def res_block(inp, chl, n):
    stride = 2
    if chl == 16:
        stride = 1
    pre = inp
    inp = bn_relu_conv(inp, 3, stride, 1, chl, True, True)
    inp = bn_relu_conv(inp, 3, 1, 1, chl, True, True)
    inp = inp + bn_relu_conv(pre, 1, stride, 0, chl, True, True)
    inp = arith.ReLU(inp)

    for i in range(n - 1):
        inp = res_layer(inp, chl)

    return inp
Exemplo n.º 16
0
def res_block(inp, chl, n):
	stride = 2
	if chl == 16:
		stride = 1
	pre = inp
	inp = conv_bn(inp, 3, stride, 1, chl, True)
	inp = conv_bn(inp, 3, 1, 1, chl, False)
	inp = inp + conv_bn(pre, 1, stride, 0, chl, False)
	inp = arith.ReLU(inp)
	
	for i in range(n - 1):
		inp = res_layer(inp, chl)
	
	return inp
Exemplo n.º 17
0
def conv_bn(inp, ker_shape, stride, padding, out_chl, isrelu, group = 1, shift = 0):
	global idx
	idx += 1
	if group == 1:
		l1 = Conv2D(
			"conv{}".format(idx), inp, kernel_shape = ker_shape, stride = stride, padding = padding,
			output_nr_channel = out_chl,
			#W = G(mean = 0, std = ((1) / (ker_shape**2 * inp.partial_shape[1]))**0.5),
			#b = C(0),
			nonlinearity = Identity()
			)
	else:
		if shift == 0:
			l1 = Conv2D(
				"conv{}".format(idx), inp, kernel_shape = ker_shape, stride = stride, padding = padding,
				output_nr_channel = out_chl,
				#W = G(mean = 0, std = ((1) / (ker_shape**2 * inp.partial_shape[1]))**0.5),
				#b = C(0),
				nonlinearity = Identity(),
				group = group,
				)
		else:
			shift = 1
			l1 = inp
			while shift != group:
				l11 = Conv2D(
					"conv{}_{}_1".format(idx, shift), l1, kernel_shape = ker_shape, stride = stride, padding = padding,
					output_nr_channel = out_chl,
					#W = G(mean = 0, std = ((1) / (ker_shape**2 * inp.partial_shape[1]))**0.5),
					#b = C(0),
					nonlinearity = Identity(),
					group = group,
					)
				inp_chl = l1.partial_shape[1]
				l1 = O.Concat([l1[:, shift * inp_chl // group:, :, :], l1[:, :shift * inp_chl // group, :, :]], axis = 1)
				l12 = Conv2D(
					"conv{}_{}_2".format(idx, shift), l1, kernel_shape = ker_shape, stride = stride, padding = padding,
					output_nr_channel = out_chl,
					#W = G(mean = 0, std = ((1) / (ker_shape**2 * inp.partial_shape[1]))**0.5),
					#b = C(0),
					nonlinearity = Identity(),
					group = group,
					)
				l1 = l11 + l12
				shift *= 2
	l2 = BN("bn{}".format(idx), l1, eps = 1e-9)
	l2 = ElementwiseAffine("bnaff{}".format(idx), l2, shared_in_channels = False, k = C(1), b = C(0))
	if isrelu:
		l2 = arith.ReLU(l2)
	return l2
Exemplo n.º 18
0
def conv_bn(inp, ker_shape, stride, padding, out_chl, isrelu):
	global idx
	idx += 1
	l1 = Conv2D(
		"encoder_conv{}".format(idx), inp, kernel_shape = ker_shape, stride = stride, padding = padding,
		output_nr_channel = out_chl,
		W = G(mean = 0, std = ((1 + int(isrelu)) / (ker_shape**2 * inp.partial_shape[1]))**0.5),
		nonlinearity = Identity()
		)
	l2 = BN("encoder_bn{}".format(idx), l1, eps = 1e-9)
	l2 = ElementwiseAffine("bnaff{}".format(idx), l2, shared_in_channels = False, k = C(1), b = C(0))
	if isrelu:
		l2 = arith.ReLU(l2)
	return l2, l1
Exemplo n.º 19
0
def res_layer(inp, chl, stride=1, proj=False):
    pre = inp
    inp = conv_bn(inp, 1, stride, 0, chl // 4, True)
    inp = conv_bn(inp, 3, 1, 1, chl // 4, True)
    inp = conv_bn(inp, 1, 1, 0, chl, False)
    name = inp.name
    inp = ElementwiseAffine("aff({})".format(name),
                            inp,
                            shared_in_channels=False,
                            k=C(0.5),
                            b=C(0))
    if proj:
        pre = conv_bn(pre, 1, stride, 0, chl, False)
    inp = arith.ReLU(inp + pre)
    return inp
Exemplo n.º 20
0
def bn_relu_conv(inp, ker_shape, stride, padding, out_chl, isrelu, isbn):
	global idx
	idx += 1
	if isbn:
		inp = BN("bn{}".format(idx), inp, eps = 1e-9)
		inp = ElementwiseAffine("bnaff{}".format(idx), inp, shared_in_channels = False, k = C(1), b = C(0))
	if isrelu:
		inp = arith.ReLU(inp)
	inp = Conv2D(
		"conv{}".format(idx), inp, kernel_shape = ker_shape, stride = stride, padding = padding,
		output_nr_channel = out_chl,
		#W = G(mean = 0, std = ((1) / (ker_shape**2 * inp.partial_shape[1]))**0.5),
		#b = C(0),
		nonlinearity = Identity()
		)
	return inp
Exemplo n.º 21
0
def conv_wn(inp, ker_shape, stride, padding, out_chl, isrelu):
	global idx
	idx += 1
	l1 = Conv2D(
		"conv{}".format(idx), inp, kernel_shape = ker_shape, stride = stride, padding = padding,
		output_nr_channel = out_chl,
		W = G(mean = 0, std = 0.05),
		nonlinearity = Identity()
		)
	W = l1.inputs[1]
	#l2 = BN("bn{}".format(idx), l1, eps = 1e-9)
	w = l1.inputs[1]
	assert ":W" in w.name
	w = (w**2).sum(axis = 3).sum(axis = 2).sum(axis = 1)**0.5
	l1 = l1 / w.dimshuffle('x', 0, 'x', 'x')
	l2 = ElementwiseAffine("bnaff{}".format(idx), l1, shared_in_channels = False, k = C(1), b = C(0))
	if isrelu:
		l2 = arith.ReLU(l2)
	return l2, l1, W
Exemplo n.º 22
0
def res_block(inp, chl, n):
	lis_w = []
	stride = 2
	if chl == 16:
		stride = 1
	pre = inp
	inp, w = conv_bn(inp, 3, stride, 1, chl, True)
	lis_w.append(w)
	inp, w = conv_bn(inp, 3, 1, 1, chl, False)
	lis_w.append(w)
	res_path, w = conv_bn(pre, 1, stride, 0, chl, False)
	inp = inp + res_path
	lis_w.append(w)
	inp = arith.ReLU(inp)
	
	for i in range(n - 1):
		inp, lis_new = res_layer(inp, chl)
		lis_w += lis_new
	
	return inp, lis_w
Exemplo n.º 23
0
def res_layer(inp, chl):
    pre = inp
    inp = conv_bn(inp, 3, 1, 1, chl, True)
    inp = conv_bn(inp, 3, 1, 1, chl, False)
    name = inp.name
    #Global Average Pooling
    SE = inp.mean(axis=3).mean(axis=2)
    #fc0
    SE = FullyConnected("fc0({})".format(name),
                        SE,
                        output_dim=SE.partial_shape[1],
                        nonlinearity=ReLU())
    #fc1
    SE = FullyConnected("fc1({})".format(name),
                        SE,
                        output_dim=SE.partial_shape[1],
                        nonlinearity=Sigmoid())
    inp = inp * SE.dimshuffle(0, 1, 'x', 'x')
    inp = arith.ReLU(inp + pre)
    return inp
Exemplo n.º 24
0
def res_layer(inp, chl):
    pre = inp
    inp = conv_bn(inp, 3, 1, 1, chl, True)
    inp = conv_bn(inp, 3, 1, 1, chl, False)
    name = inp.name
    #Global Average Pooling
    SE = inp.mean(axis=3).mean(axis=2)
    group = 1
    #fc0
    SE = FullyConnected("fc0({})".format(name),
                        SE,
                        output_dim=chl,
                        nonlinearity=ReLU())
    #fc1
    SE = FullyConnected("fc1({})".format(name),
                        SE,
                        output_dim=(chl // group)**2 * group,
                        nonlinearity=Sigmoid())
    SE = SE.reshape(inp.shape[0] * group, chl // group, chl // group, 1, 1)
    w = SE
    SE /= SE.sum(axis=4).sum(axis=3).sum(axis=2).dimshuffle(
        0, 1, "x", "x", "x")
    #inp = inp * SE.dimshuffle(0, 1, 'x', 'x')
    inp = inp.reshape(1, inp.shape[0] * inp.shape[1], inp.shape[2],
                      inp.shape[3])
    inp = Conv2D(
        "conv({})".format(name),
        inp,
        kernel_shape=1,
        stride=1,
        padding=0,
        #output_nr_channel = chl,
        W=SE,
        nonlinearity=Identity(),
        #group = group
    )
    inp = inp.reshape(pre.shape)
    inp = arith.ReLU(inp + pre)
    return inp, w
Exemplo n.º 25
0
def conv_bn(inp, ker_shape, stride, padding, out_chl, isrelu):
    global idx
    idx += 1
    l10 = Conv2D("conv{}_0".format(idx),
                 inp,
                 kernel_shape=ker_shape,
                 stride=stride,
                 padding=padding,
                 output_nr_channel=out_chl // 2,
                 W=G(mean=0,
                     std=((1 + int(isrelu)) /
                          (ker_shape**2 * inp.partial_shape[1]))**0.5),
                 nonlinearity=Identity())
    l11 = Conv2D("conv{}_1".format(idx),
                 inp,
                 kernel_shape=ker_shape,
                 stride=stride,
                 padding=padding,
                 output_nr_channel=out_chl // 2,
                 W=G(mean=0,
                     std=((1 + int(isrelu)) /
                          (ker_shape**2 * inp.partial_shape[1]))**0.5),
                 nonlinearity=Identity())
    W = l11.inputs[1].owner_opr
    b = l11.inputs[2].owner_opr
    W.set_freezed()
    b.set_freezed()
    l1 = Concat([l10, l11], axis=1)
    l2 = BN("bn{}".format(idx), l1, eps=1e-9)
    l2 = ElementwiseAffine("bnaff{}".format(idx),
                           l2,
                           shared_in_channels=False,
                           k=C(1),
                           b=C(0))
    if isrelu:
        l2 = arith.ReLU(l2)
    return l2, l1
Exemplo n.º 26
0
def dfconv(inp, chl, isrelu, ker_shape = 3, stride = 1, padding = 1, dx = [-1, 0, 1], dy = [-1, 0, 1]):
	global idx
	name = "conv{}".format(idx)
	offsetlay = Conv2D(
		name + "conv1", inp, kernel_shape = 3, stride = 1, padding = 1,
		output_nr_channel = ker_shape**2,
		W = G(mean = 0, std = ((1) / (3**2 * inp.partial_shape[1]))**0.5),
		nonlinearity = Identity()
		)
	offsetlay = BN(name + "BN1", offsetlay, eps = 1e-9)
	offsetlay = arith.ReLU(offsetlay)
	offsetlay = Conv2D(
		name + "conv2", inp, kernel_shape = 3, stride = 1, padding = 1,
		output_nr_channel = ker_shape**2,
		W = G(mean = 0, std = ((1) / (3**2 * inp.partial_shape[1]))**0.5),
		nonlinearity = Identity()
		)
	offsetlay = BN(name + "BN2", offsetlay, eps = 1e-9)

	offsetx = inp.partial_shape[2] * Conv2D(
		name + "offsetx", offsetlay, kernel_shape = ker_shape, stride = stride, 
		padding = padding,
		output_nr_channel = ker_shape**2,
		W = G(mean = 0, std = (1 / (ker_shape**2 * inp.partial_shape[2]))**0.5),
		nonlinearity = Identity()
		)
	offsety = inp.partial_shape[3] * Conv2D(
		name + "offsety", offsetlay, kernel_shape = ker_shape, stride = stride, 
		padding = padding,
		output_nr_channel = ker_shape**2,
		W = G(mean = 0, std = (1 / (ker_shape**2 * inp.partial_shape[3]))**0.5),
		nonlinearity = Identity()
		)

	"""
	gamma = 0.0001
	ndim = ker_shape**2 * offsetx.partial_shape[2] * offsetx.partial_shape[3]
	offsetx = FullyConnected(
		name + "offsetx", offsetx, output_dim = ndim,
		W = G(mean = 0, std = (1 / ndim)**0.5),
		b = C(0),
		nonlinearity = Identity()
		)
	offsetx = offsetx.reshape(offsety.shape)
	offsety = FullyConnected(
		name + "offsety", offsety, output_dim = ndim,
		W = G(mean = 0, std = (1 / ndim)**0.5),
		b = C(0),
		nonlinearity = Identity()
		)
	offsety = offsety.reshape(offsetx.shape)
	"""
	
	outputs = []
	for sx in range(2):
		for sy in range(2):
			if sx == 0:
				ofx = Floor(offsetx)
				bilx = offsetx - ofx
			else:
				ofx = Ceil(offsetx)
				bilx = ofx - offsetx
			if sy == 0:
				ofy = Floor(offsety)
				bily = offsety - ofy
			else:
				ofy = Ceil(offsety)
				bily = ofy - offsety

			"""
			No padding
			padding1 = ConstProvider(np.zeros((inp.partial_shape[0], inp.partial_shape[1], 1, inp.partial_shape[3])))
			padding2 = ConstProvider(np.zeros((inp.partial_shape[0], inp.partial_shape[1], inp.partial_shape[2] + 2, 1)))
			arg_fea = Concat([padding1, inp, padding1], axis = 2)
			arg_fea = Concat([padding2, arg_fea, padding2], axis = 3)
			"""
			arg_fea = inp

			#one_mat = ConstProvider(np.ones((inp.partial_shape[2], inp.partial_shape[3])), dtype = np.int32)
			one_mat = ConstProvider(1, dtype = np.int32).add_axis(0).broadcast((ofx.partial_shape[2], ofx.partial_shape[3]))
			affx = (Cumsum(one_mat, axis = 0) - 1) * stride
			affy = (Cumsum(one_mat, axis = 1) - 1) * stride

			ofx = ofx + affx.dimshuffle('x', 'x', 0, 1)
			ofy = ofy + affy.dimshuffle('x', 'x', 0, 1)
			one_mat = ConstProvider(np.ones((ker_shape, ofx.partial_shape[2], ofx.partial_shape[3])))
			#ofx[:, :ker_shape, :, :] -= 1
			#ofx[:, ker_shape*2:, :, :] += 1
			ofx += Concat([one_mat * i for i in dx], axis = 0).dimshuffle('x', 0, 1, 2)
			#ofy[:, ::3, :, :] -= 1
			#ofy[:, 2::3, :, :] += 1
			one_mat = ones((1, ofx.partial_shape[2], ofx.partial_shape[3]))
			one_mat = Concat([one_mat * i for i in dy], axis = 0)
			one_mat = Concat([one_mat] * ker_shape, axis = 0)
			ofy += one_mat.dimshuffle('x', 0, 1, 2)
			ofx = Max(Min(ofx, arg_fea.partial_shape[2] - 1), 0)
			ofy = Max(Min(ofy, arg_fea.partial_shape[3] - 1), 0)

			def DeformReshape(inp, ker_shape):
				inp = inp.reshape(inp.shape[0], ker_shape, ker_shape, inp.shape[2], inp.shape[3])
				inp = inp.dimshuffle(0, 3, 1, 4, 2)
				inp = inp.reshape(inp.shape[0], inp.shape[1] * inp.shape[2], inp.shape[3] * inp.shape[4])
				return inp

			ofx = DeformReshape(ofx, ker_shape)
			ofy = DeformReshape(ofy, ker_shape)
			bilx = DeformReshape(bilx, ker_shape)
			bily = DeformReshape(bily, ker_shape)

			of = ofx * arg_fea.shape[2] + ofy
			arg_fea = arg_fea.reshape(arg_fea.shape[0], arg_fea.shape[1], -1)
			of = of.reshape(ofx.shape[0], -1)
			of = of.dimshuffle(0, 'x', 1)
			#of = Concat([of] * arg_fea.partial_shape[1], axis = 1)
			of = of.broadcast((of.shape[0], arg_fea.shape[1], of.shape[2]))
			arx = Linspace(0, arg_fea.shape[0], arg_fea.shape[0], endpoint = False)
			arx = arx.add_axis(1).add_axis(2).broadcast(of.shape)
			ary = Linspace(0, arg_fea.shape[1], arg_fea.shape[1], endpoint = False)
			ary = ary.add_axis(0).add_axis(2).broadcast(of.shape)
			of = of.add_axis(3)
			arx = arx.add_axis(3)
			ary = ary.add_axis(3)
			idxmap = Astype(Concat([arx, ary, of], axis = 3), np.int32)
			"""
			sample = []
			for i in range(arg_fea.partial_shape[0]):
				for j in range(arg_fea.partial_shape[1]):
					sample.append(arg_fea[i][j].ai[of[i][j]].dimshuffle('x', 0))
			sample = Concat(sample, axis = 0)
			"""
			sample = IndexingRemap(arg_fea, idxmap).reshape(inp.shape[0], inp.shape[1], bilx.shape[1], -1)
			bilx = bilx.dimshuffle(0, 'x', 1, 2).broadcast(sample.shape)
			bily = bily.dimshuffle(0, 'x', 1, 2).broadcast(sample.shape)
			sample *= bilx * bily
			
			outputs.append(sample)
	
	output = outputs[0]
	for i in outputs[1:]:
		output += i
	
	return conv_bn(output, ker_shape, 3, 0, chl, isrelu)
Exemplo n.º 27
0
def res_layer(inp, chl):
	pre = inp
	inp, w = conv_bn(inp, 3, 1, 1, chl, True)
	inp, w = conv_bn(inp, 3, 1, 1, chl, False)
	inp = arith.ReLU(inp + pre)
	return inp, [w, w]