def _concat_new_axis(t1, t2, t3, t4, axis=1): return Concat( [t1.add_axis(-1), t2.add_axis(-1), t3.add_axis(-1), t4.add_axis(-1)], axis=axis)
def dense_block(inp, k, l): lay = inp for i in range(l): cur_lay = bn_relu_conv(lay, 3, 1, 1, k, True, True) name = cur_lay.name group = k // 4 #G.P. SE = cur_lay.mean(axis=3).mean(axis=2) SE = FullyConnected("fc0({})".format(name), SE, output_dim=(k // group)**2 * group, nonlinearity=ReLU()) SE = FullyConnected("fc1({})".format(name), SE, output_dim=(k // group)**2 * group, nonlinearity=Sigmoid()) print(SE.name) SE = SE.reshape(cur_lay.shape[0] * group, k // group, k // group, 1, 1) preshape = cur_lay.shape cur_lay = cur_lay.reshape(1, cur_lay.shape[0] * cur_lay.shape[1], cur_lay.shape[2], cur_lay.shape[3]) cur_lay = Conv2D("conv({})".format(name), cur_lay, kernel_shape=1, stride=1, padding=0, W=SE, nonlinearity=Identity()) cur_lay = cur_lay.reshape(preshape) #cur_lay = cur_lay * SE.dimshuffle(0, 1, 'x', 'x') lay = Concat([lay, cur_lay], axis=1) return lay
def dense_block(inp, k, l): lay = inp lis_w = [] for i in range(l): cur_lay, w = bn_relu_conv(lay, 3, 1, 1, k, True, True) lis_w.append(w) lay = Concat([lay, cur_lay], axis=1) return lay, lis_w
def dense_block(inp, k, l): lay = inp pre_lay = 0 for i in range(l): cur_lay = pre_lay + bn_relu_conv(lay, 3, 1, 1, k, True, True) pre_lay = bn_relu_conv(cur_lay, 3, 1, 1, k, True, True) lay = Concat([lay, cur_lay], axis = 1) return lay
def b_resize(name, inp, rate=0.8): #inp = ConstProvider([[[[1, 2], [3, 4]]]], dtype = np.float32) f_size = inp.partial_shape[2] l = int(f_size * rate) s = [[0, l], [f_size - l, f_size]] ar0 = Linspace(0, inp.shape[0], inp.shape[0], endpoint=False) ar0 = ar0.add_axis(1).add_axis(2).add_axis(3).broadcast( inp.shape).add_axis(4) ar1 = Linspace(0, inp.shape[1], inp.shape[1], endpoint=False) ar1 = ar1.add_axis(0).add_axis(2).add_axis(3).broadcast( inp.shape).add_axis(4) fmaps = [inp] for i in range(4): xx = s[i % 2] yy = s[i // 2] #x = Linspace(xx[0], xx[1], f_size, endpoint = False) #y = Linspace(yy[0], yy[1], f_size, endpoint = False) x = ConstProvider(np.linspace(xx[0], xx[1], f_size, endpoint=False)) y = ConstProvider(np.linspace(yy[0], yy[1], f_size, endpoint=False)) fx, fy = Floor(x), Floor(y) cx, cy = Ceil(x), Ceil(y) nfmaps = [] for sx in range(2): for sy in range(2): ix = fx if sx == 0 else cx iy = fy if sy == 0 else cy bx = (cx - x + Equal(fx, cx) if sx == 0 else x - fx) by = (cy - y + Equal(fy, cy) if sy == 0 else y - fy) arx = ix.add_axis(0).add_axis(0).add_axis(3).broadcast( inp.shape).add_axis(4) ary = iy.add_axis(0).add_axis(0).add_axis(0).broadcast( inp.shape).add_axis(4) idxmap = Astype(Concat([ar0, ar1, arx, ary], axis=4), np.int32) sample = IndexingRemap(inp, idxmap) sample *= bx.dimshuffle('x', 'x', 0, 'x') * by.dimshuffle( 'x', 'x', 'x', 0) nfmaps.append(sample) fmap = nfmaps[0] for i in range(1, 4): fmap += nfmaps[i] fmaps.append(fmap) fmap = Concat(fmaps, axis=1) return fmap
def dense_block(inp, k, l, j): lay = inp for i in range(l): idx = j * l + i + 1 chl = lay.partial_shape[1] p = idx & -idx p = max(0, chl - p * k) sh_lay = lay[:, p:, :, :] cur_lay = bn_relu_conv(sh_lay, 3, 1, 1, k, True, True) lay = Concat([lay, cur_lay], axis=1) return lay
def dense_block(inp, k, l): lay = inp pre_lay = 0 for i in range(l): chl = lay.partial_shape[1] sh_lay = lay.reshape(lay.partial_shape[0], chl // k, k, lay.partial_shape[2], lay.partial_shape[3]) sh_lay = sh_lay.dimshuffle(0, 2, 1, 3, 4) sh_lay = sh_lay.reshape(lay.shape) cur_lay = bn_relu_conv(sh_lay, 3, 1, 1, k, True, True, group=k // 4) lay = Concat([lay, cur_lay], axis=1) return lay
def dense_block(inp, k, l): lay = inp for i in range(l): cur_lay = bn_relu_conv(lay, 3, 1, 1, k, True, True) name = cur_lay.name #G.P. SE = cur_lay.mean(axis=3).mean(axis=2) SE = FullyConnected("fc0({})".format(name), SE, output_dim=k, nonlinearity=ReLU()) SE = FullyConnected("fc1({})".format(name), SE, output_dim=k, nonlinearity=Sigmoid()) cur_lay = cur_lay * SE.dimshuffle(0, 1, 'x', 'x') lay = Concat([lay, cur_lay], axis=1) return lay
def conv_bn(inp, ker_shape, stride, padding, out_chl, isrelu): global idx idx += 1 l10 = Conv2D("conv{}_0".format(idx), inp, kernel_shape=ker_shape, stride=stride, padding=padding, output_nr_channel=out_chl // 2, W=G(mean=0, std=((1 + int(isrelu)) / (ker_shape**2 * inp.partial_shape[1]))**0.5), nonlinearity=Identity()) l11 = Conv2D("conv{}_1".format(idx), inp, kernel_shape=ker_shape, stride=stride, padding=padding, output_nr_channel=out_chl // 2, W=G(mean=0, std=((1 + int(isrelu)) / (ker_shape**2 * inp.partial_shape[1]))**0.5), nonlinearity=Identity()) W = l11.inputs[1].owner_opr b = l11.inputs[2].owner_opr W.set_freezed() b.set_freezed() l1 = Concat([l10, l11], axis=1) l2 = BN("bn{}".format(idx), l1, eps=1e-9) l2 = ElementwiseAffine("bnaff{}".format(idx), l2, shared_in_channels=False, k=C(1), b=C(0)) if isrelu: l2 = arith.ReLU(l2) return l2, l1
def dfconv(inp, chl, isrelu, ker_shape = 3, stride = 1, padding = 1, dx = [-1, 0, 1], dy = [-1, 0, 1]): inp = Conv2D( name + "conv", inp, kernel_shape = 3, stride = 1, padding = 1, output_nr_channel = ker_shape**2, W = G(mean = 0, std = ((1) / (ker_shape**2 * inp.partial_shape[1]))**0.5), nonlinearity = Identity() ) inp = BN(name + "BN", inp, eps = 1e-9) global idx #idx += 1 gamma = 0.001 offsetx = inp.partial_shape[2] * Conv2D( "conv{}_offsetx".format(idx + 1), inp, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = ker_shape**2, W = G(mean = 0, std = gamma / (ker_shape**2 * inp.partial_shape[2])), nonlinearity = Identity() ) offsety = inp.partial_shape[3] * Conv2D( "conv{}_offsety".format(idx + 1), inp, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = ker_shape**2, W = G(mean = 0, std = gamma / (ker_shape**2 * inp.partial_shape[3])), nonlinearity = Identity() ) outputs = [] for sx in range(2): for sy in range(2): if sx == 0: ofx = Floor(offsetx) bilx = offsetx - ofx else: ofx = Ceil(offsetx) bilx = ofx - offsetx if sy == 0: ofy = Floor(offsety) bily = offsety - ofy else: ofy = Ceil(offsety) bily = ofy - offsety """ No padding padding1 = ConstProvider(np.zeros((inp.partial_shape[0], inp.partial_shape[1], 1, inp.partial_shape[3]))) padding2 = ConstProvider(np.zeros((inp.partial_shape[0], inp.partial_shape[1], inp.partial_shape[2] + 2, 1))) arg_fea = Concat([padding1, inp, padding1], axis = 2) arg_fea = Concat([padding2, arg_fea, padding2], axis = 3) """ arg_fea = inp #one_mat = ConstProvider(np.ones((inp.partial_shape[2], inp.partial_shape[3])), dtype = np.int32) one_mat = ConstProvider(1, dtype = np.int32).add_axis(0).broadcast((ofx.partial_shape[2], ofx.partial_shape[3])) affx = (Cumsum(one_mat, axis = 0) - 1) * stride affy = (Cumsum(one_mat, axis = 1) - 1) * stride ofx = ofx + affx.dimshuffle('x', 'x', 0, 1) ofy = ofy + affy.dimshuffle('x', 'x', 0, 1) one_mat = ConstProvider(np.ones((ker_shape, ofx.partial_shape[2], ofx.partial_shape[3]))) #ofx[:, :ker_shape, :, :] -= 1 #ofx[:, ker_shape*2:, :, :] += 1 ofx += Concat([one_mat * i for i in dx], axis = 0).dimshuffle('x', 0, 1, 2) #ofy[:, ::3, :, :] -= 1 #ofy[:, 2::3, :, :] += 1 one_mat = ones((1, ofx.partial_shape[2], ofx.partial_shape[3])) one_mat = Concat([one_mat * i for i in dy], axis = 0) one_mat = Concat([one_mat] * ker_shape, axis = 0) ofy += one_mat.dimshuffle('x', 0, 1, 2) ofx = Max(Min(ofx, arg_fea.partial_shape[2] - 1), 0) ofy = Max(Min(ofy, arg_fea.partial_shape[3] - 1), 0) def DeformReshape(inp, ker_shape): inp = inp.reshape(inp.shape[0], ker_shape, ker_shape, inp.shape[2], inp.shape[3]) inp = inp.dimshuffle(0, 3, 1, 4, 2) inp = inp.reshape(inp.shape[0], inp.shape[1] * inp.shape[2], inp.shape[3] * inp.shape[4]) return inp ofx = DeformReshape(ofx, ker_shape) ofy = DeformReshape(ofy, ker_shape) bilx = DeformReshape(bilx, ker_shape) bily = DeformReshape(bily, ker_shape) of = ofx * arg_fea.shape[2] + ofy arg_fea = arg_fea.reshape(arg_fea.shape[0], arg_fea.shape[1], -1) of = of.reshape(ofx.shape[0], -1) of = of.dimshuffle(0, 'x', 1) #of = Concat([of] * arg_fea.partial_shape[1], axis = 1) of = of.broadcast((of.shape[0], arg_fea.shape[1], of.shape[2])) arx = Linspace(0, arg_fea.shape[0], arg_fea.shape[0], endpoint = False) arx = arx.add_axis(1).add_axis(2).broadcast(of.shape) ary = Linspace(0, arg_fea.shape[1], arg_fea.shape[1], endpoint = False) ary = ary.add_axis(0).add_axis(2).broadcast(of.shape) of = of.add_axis(3) arx = arx.add_axis(3) ary = ary.add_axis(3) idxmap = Astype(Concat([arx, ary, of], axis = 3), np.int32) """ sample = [] for i in range(arg_fea.partial_shape[0]): for j in range(arg_fea.partial_shape[1]): sample.append(arg_fea[i][j].ai[of[i][j]].dimshuffle('x', 0)) sample = Concat(sample, axis = 0) """ sample = IndexingRemap(arg_fea, idxmap).reshape(inp.shape[0], inp.shape[1], bilx.shape[1], -1) bilx = bilx.dimshuffle(0, 'x', 1, 2).broadcast(sample.shape) bily = bily.dimshuffle(0, 'x', 1, 2).broadcast(sample.shape) sample *= bilx * bily outputs.append(sample) output = outputs[0] for i in outputs[1:]: output += i return conv_bn(output, ker_shape, 3, 0, chl, isrelu)
def dfpooling(name, inp, window = 2, padding = 0, dx = [0, 1], dy = [0, 1]): #inp = ConstProvider([[[[1, 2], [3, 4]]]], dtype = np.float32) """ Add a new conv&bn to insure that the scale of the feature map is variance 1. """ ker_shape = window stride = window offsetlay = Conv2D( name + "conv", inp, kernel_shape = 3, stride = 1, padding = 1, output_nr_channel = ker_shape**2, W = G(mean = 0, std = ((1) / (3**2 * inp.partial_shape[1]))**0.5), nonlinearity = Identity() ) #offsetlay = BN(name + "BN", offsetlay, eps = 1e-9) offsetx = Conv2D( name + "conv1x", offsetlay, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = ker_shape**2, W = G(mean = 0, std = (1 / (ker_shape**2 * inp.partial_shape[2]))**0.5), nonlinearity = Identity() ) offsety = Conv2D( name + "conv1y", offsetlay, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = ker_shape**2, W = G(mean = 0, std = (1 / (ker_shape**2 * inp.partial_shape[3]))**0.5), nonlinearity = Identity() ) offset = Concat([offsetx, offsety], axis = 1) ndim = ker_shape**2 * offsetx.partial_shape[2] * offsetx.partial_shape[3] * 2 offset = FullyConnected( name + "offset", offsetx, output_dim = ndim, W = G(mean = 0, std = (1 / ndim)**2), #W = C(0), b = C(0), nonlinearity = Identity() ) offsetx = offset[:, :ndim // 2].reshape(offsetx.shape) offsety = offset[:, ndim // 2:].reshape(offsety.shape) """ offsetx = FullyConnected( name + "offsetx", offsetx, output_dim = ndim, W = G(mean = 0, std = gamma / ndim), b = C(0), nonlinearity = Identity() ) offsetx = offsetx.reshape(offsety.shape) offsety = FullyConnected( name + "offsety", offsety, output_dim = ndim, W = G(mean = 0, std = gamma / ndim), b = C(0), nonlinearity = Identity() ) offsety = offsety.reshape(offsetx.shape) print(offsety.partial_shape) """ #offsetx = ZeroGrad(offsetx) #offsety = ZeroGrad(offsety) outputs = [] for sx in range(2): for sy in range(2): if sx == 0: ofx = Floor(offsetx) bilx = 1 - (offsetx - ofx) else: ofx = Ceil(offsetx) bilx = 1 - (ofx - offsetx) if sy == 0: ofy = Floor(offsety) bily = 1 - (offsety - ofy) else: ofy = Ceil(offsety) bily = 1 - (ofy - offsety) """ No padding padding1 = ConstProvider(np.zeros((inp.partial_shape[0], inp.partial_shape[1], 1, inp.partial_shape[3]))) padding2 = ConstProvider(np.zeros((inp.partial_shape[0], inp.partial_shape[1], inp.partial_shape[2] + 2, 1))) arg_fea = Concat([padding1, inp, padding1], axis = 2) arg_fea = Concat([padding2, arg_fea, padding2], axis = 3) """ arg_fea = inp #one_mat = ConstProvider(np.ones((inp.partial_shape[2], inp.partial_shape[3])), dtype = np.int32) one_mat = ConstProvider(1, dtype = np.int32).add_axis(0).broadcast((ofx.shape[2], ofx.shape[3])) affx = (Cumsum(one_mat, axis = 0) - 1) * stride affy = (Cumsum(one_mat, axis = 1) - 1) * stride ofx = ofx + affx.dimshuffle('x', 'x', 0, 1) ofy = ofy + affy.dimshuffle('x', 'x', 0, 1) one_mat = ConstProvider(np.ones((ker_shape, ofx.partial_shape[2], ofx.partial_shape[3]))) #ofx[:, :ker_shape, :, :] -= 1 #ofx[:, ker_shape*2:, :, :] += 1 ofx += Concat([one_mat * i for i in dx], axis = 0).dimshuffle('x', 0, 1, 2) #ofy[:, ::3, :, :] -= 1 #ofy[:, 2::3, :, :] += 1 one_mat = ones((1, ofx.partial_shape[2], ofx.partial_shape[3])) one_mat = Concat([one_mat * i for i in dy], axis = 0) one_mat = Concat([one_mat] * ker_shape, axis = 0) ofy += one_mat.dimshuffle('x', 0, 1, 2) ofx = Max(Min(ofx, arg_fea.partial_shape[2] - 1), 0) ofy = Max(Min(ofy, arg_fea.partial_shape[3] - 1), 0) def DeformReshape(inp, ker_shape): inp = inp.reshape(inp.shape[0], ker_shape, ker_shape, inp.shape[2], inp.partial_shape[3]) inp = inp.dimshuffle(0, 3, 1, 4, 2) inp = inp.reshape(inp.shape[0], inp.shape[1] * inp.shape[2], inp.shape[3] * inp.shape[4]) return inp ofx = DeformReshape(ofx, ker_shape) ofy = DeformReshape(ofy, ker_shape) bilx = DeformReshape(bilx, ker_shape) bily = DeformReshape(bily, ker_shape) of = ofx * arg_fea.partial_shape[2] + ofy arg_fea = arg_fea.reshape(arg_fea.shape[0], arg_fea.shape[1], -1) of = of.reshape(ofx.shape[0], -1) of = of.dimshuffle(0, 'x', 1) #of = Concat([of] * arg_fea.partial_shape[1], axis = 1) of = of.broadcast((of.shape[0], arg_fea.shape[1], of.shape[2])) arx = Linspace(0, arg_fea.shape[0], arg_fea.shape[0], endpoint = False) arx = arx.add_axis(1).add_axis(2).broadcast(of.shape) ary = Linspace(0, arg_fea.shape[1], arg_fea.shape[1], endpoint = False) ary = ary.add_axis(0).add_axis(2).broadcast(of.shape) of = of.add_axis(3) arx = arx.add_axis(3) ary = ary.add_axis(3) idxmap = Astype(Concat([arx, ary, of], axis = 3), np.int32) """ sample = [] for i in range(arg_fea.partial_shape[0]): for j in range(arg_fea.partial_shape[1]): sample.append(arg_fea[i][j].ai[of[i][j]].dimshuffle('x', 0)) sample = Concat(sample, axis = 0) """ sample = IndexingRemap(arg_fea, idxmap).reshape(inp.shape[0], inp.shape[1], bilx.shape[1], -1) bilx = bilx.dimshuffle(0, 'x', 1, 2).broadcast(sample.shape) bily = bily.dimshuffle(0, 'x', 1, 2).broadcast(sample.shape) sample *= bilx * bily outputs.append(sample) output = outputs[0] for i in outputs[1:]: output += i return Pooling2D(name, output, window = 2, mode = "AVERAGE")
def dense_block(inp, k, l, B): lay = inp for i in range(l): cur_lay = dense_layer(lay, k, B) lay = Concat([lay, cur_lay], axis=1) return lay
def dfpooling(name, inp, window=2, padding=0, dx=[0, 1], dy=[0, 1]): #inp = ConstProvider([[[[1, 2], [3, 4]]]], dtype = np.float32) ker_shape = window stride = window gamma = 0.1 offsetx = gamma * inp.partial_shape[2] * Conv2D(name + "offsetx", inp, kernel_shape=ker_shape, stride=stride, padding=padding, output_nr_channel=ker_shape **2, W=C(0), nonlinearity=Identity()) offsety = gamma * inp.partial_shape[3] * Conv2D(name + "offsety", inp, kernel_shape=ker_shape, stride=stride, padding=padding, output_nr_channel=ker_shape **2, W=C(0), nonlinearity=Identity()) outputs = [] for sx in range(2): for sy in range(2): if sx == 0: ofx = Floor(offsetx) bilx = offsetx - ofx + Equal(Floor(offsetx), Ceil(offsetx)) else: ofx = Ceil(offsetx) bilx = ofx - offsetx if sy == 0: ofy = Floor(offsety) bily = offsety - ofy + Equal(Floor(offsety), Ceil(offsety)) else: ofy = Ceil(offsety) bily = ofy - offsety """ No padding padding1 = ConstProvider(np.zeros((inp.partial_shape[0], inp.partial_shape[1], 1, inp.partial_shape[3]))) padding2 = ConstProvider(np.zeros((inp.partial_shape[0], inp.partial_shape[1], inp.partial_shape[2] + 2, 1))) arg_fea = Concat([padding1, inp, padding1], axis = 2) arg_fea = Concat([padding2, arg_fea, padding2], axis = 3) """ arg_fea = inp #one_mat = ConstProvider(np.ones((inp.partial_shape[2], inp.partial_shape[3])), dtype = np.int32) one_mat = ConstProvider(1, dtype=np.int32).add_axis(0).broadcast( (ofx.partial_shape[2], ofx.partial_shape[3])) affx = (Cumsum(one_mat, axis=0) - 1) * stride affy = (Cumsum(one_mat, axis=1) - 1) * stride ofx = ofx + affx.dimshuffle('x', 'x', 0, 1) ofy = ofy + affy.dimshuffle('x', 'x', 0, 1) one_mat = ConstProvider( np.ones( (ker_shape, ofx.partial_shape[2], ofx.partial_shape[3]))) #ofx[:, :ker_shape, :, :] -= 1 #ofx[:, ker_shape*2:, :, :] += 1 ofx += Concat([one_mat * i for i in dx], axis=0).dimshuffle('x', 0, 1, 2) #ofy[:, ::3, :, :] -= 1 #ofy[:, 2::3, :, :] += 1 one_mat = ones((1, ofx.partial_shape[2], ofx.partial_shape[3])) one_mat = Concat([one_mat * i for i in dy], axis=0) one_mat = Concat([one_mat] * ker_shape, axis=0) ofy += one_mat.dimshuffle('x', 0, 1, 2) ofx = Max(Min(ofx, arg_fea.partial_shape[2] - 1), 0) ofy = Max(Min(ofy, arg_fea.partial_shape[3] - 1), 0) def DeformReshape(inp, ker_shape): inp = inp.reshape(inp.partial_shape[0], ker_shape, ker_shape, inp.partial_shape[2], inp.partial_shape[3]) inp = inp.dimshuffle(0, 3, 1, 4, 2) inp = inp.reshape(inp.partial_shape[0], inp.partial_shape[1] * inp.partial_shape[2], inp.partial_shape[3] * inp.partial_shape[4]) return inp ofx = DeformReshape(ofx, ker_shape) ofy = DeformReshape(ofy, ker_shape) bilx = DeformReshape(bilx, ker_shape) bily = DeformReshape(bily, ker_shape) of = ofx * arg_fea.partial_shape[2] + ofy arg_fea = arg_fea.reshape(arg_fea.partial_shape[0], arg_fea.partial_shape[1], -1) of = of.reshape(ofx.partial_shape[0], -1) of = of.dimshuffle(0, 'x', 1) #of = Concat([of] * arg_fea.partial_shape[1], axis = 1) of = of.broadcast((of.partial_shape[0], arg_fea.partial_shape[1], of.partial_shape[2])) arx = Linspace(0, arg_fea.partial_shape[0], arg_fea.partial_shape[0], endpoint=False) arx = arx.add_axis(1).add_axis(2).broadcast(of.shape) ary = Linspace(0, arg_fea.partial_shape[1], arg_fea.partial_shape[1], endpoint=False) ary = ary.add_axis(0).add_axis(2).broadcast(of.shape) of = of.add_axis(3) arx = arx.add_axis(3) ary = ary.add_axis(3) idxmap = Astype(Concat([arx, ary, of], axis=3), np.int32) """ sample = [] for i in range(arg_fea.partial_shape[0]): for j in range(arg_fea.partial_shape[1]): sample.append(arg_fea[i][j].ai[of[i][j]].dimshuffle('x', 0)) sample = Concat(sample, axis = 0) """ sample = IndexingRemap(arg_fea, idxmap).reshape(inp.partial_shape[0], inp.partial_shape[1], bilx.partial_shape[1], -1) bilx = bilx.dimshuffle(0, 'x', 1, 2).broadcast(sample.shape) bily = bily.dimshuffle(0, 'x', 1, 2).broadcast(sample.shape) sample *= bilx * bily outputs.append(sample) output = outputs[0] for i in outputs[1:]: output += i return Pooling2D(name, output, window=2, mode="AVERAGE")