def bn_relu_conv(inp, ker_shape, stride, padding, out_chl, has_relu, has_bn, has_conv = True, group = None): global idx idx += 1 if has_bn: l1 = BN("bn{}".format(idx), inp, eps = 1e-9) l1 = ElementwiseAffine("bnaff{}".format(idx), l1, shared_in_channels = False, k = C(1), b = C(0)) else: l1 = inp if has_relu: l2 = arith.ReLU(l1) else: l2 = l1 if not has_conv: return l2 if group is None: l3 = Conv2D( "conv{}".format(idx), l2, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = out_chl, nonlinearity = Identity() ) else: l3 = Conv2D( "conv{}".format(idx), l2, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = out_chl, nonlinearity = Identity(), group = group, ) return l3
def res_layer(inp, chl, stride = 1, proj = False): pre = inp inp = conv_bn(inp, 1, stride, 0, chl // 4, True) chl //= 4 name = inp.name #Global Average Pooling SE = inp.mean(axis = 3).mean(axis = 2) sum_lay = 0 out_lay = 0 width = 4 lay = FullyConnected( "fc0({})".format(name), SE, output_dim = chl, nonlinearity = ReLU() ) #fc1 lay = FullyConnected( "fc1({})".format(name), lay, output_dim = chl * width, nonlinearity = Identity() ) lay = lay.reshape(inp.shape[0], chl, width) lay = Softmax("softmax({})".format(name), lay, axis = 2) for i in range(width): if i == 0: inp_lay = inp else: inp_lay = O.Concat([inp[:, width:, :, :], inp[:, :width, :, :]], axis = 1) inp_lay = inp_lay * lay[:, :, i].dimshuffle(0, 1, 'x', 'x') inp = inp_lay chl *= 4 inp = conv_bn(inp, 3, 1, 1, chl // 4, True) inp = conv_bn(inp, 1, 1, 0, chl, False) if proj: pre = conv_bn(pre, 1, stride, 0, chl, False) name = inp.name #Global Average Pooling SE = inp.mean(axis = 3).mean(axis = 2) sum_lay = 0 out_lay = 0 width = 4 lay = FullyConnected( "fc0({})".format(name), SE, output_dim = chl, nonlinearity = ReLU() ) #fc1 lay = FullyConnected( "fc1({})".format(name), lay, output_dim = chl * width, nonlinearity = Identity() ) lay = lay.reshape(inp.shape[0], chl, width) lay = Softmax("softmax({})".format(name), lay, axis = 2) for i in range(width): if i == 0: inp_lay = inp else: inp_lay = O.Concat([inp[:, width:, :, :], inp[:, :width, :, :]], axis = 1) inp_lay = inp_lay * lay[:, :, i].dimshuffle(0, 1, 'x', 'x') inp = inp_lay inp = arith.ReLU(inp + pre) return inp
def conv_bn(inp, ker_shape, stride, padding, out_chl, isrelu, group = 1, shift = 0): global idx idx += 1 if group == 1: l1 = Conv2D( "conv{}".format(idx), inp, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = out_chl, #W = G(mean = 0, std = ((1) / (ker_shape**2 * inp.partial_shape[1]))**0.5), #b = C(0), nonlinearity = Identity() ) else: if shift == 0: l1 = Conv2D( "conv{}".format(idx), inp, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = out_chl, #W = G(mean = 0, std = ((1) / (ker_shape**2 * inp.partial_shape[1]))**0.5), #b = C(0), nonlinearity = Identity(), group = group, ) else: shift = 1 l1 = inp while shift != group: l11 = Conv2D( "conv{}_{}_1".format(idx, shift), l1, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = out_chl, #W = G(mean = 0, std = ((1) / (ker_shape**2 * inp.partial_shape[1]))**0.5), #b = C(0), nonlinearity = Identity(), group = group, ) inp_chl = l1.partial_shape[1] l1 = O.Concat([l1[:, shift * inp_chl // group:, :, :], l1[:, :shift * inp_chl // group, :, :]], axis = 1) l12 = Conv2D( "conv{}_{}_2".format(idx, shift), l1, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = out_chl, #W = G(mean = 0, std = ((1) / (ker_shape**2 * inp.partial_shape[1]))**0.5), #b = C(0), nonlinearity = Identity(), group = group, ) l1 = l11 + l12 shift *= 2 l2 = BN("bn{}".format(idx), l1, eps = 1e-9) l2 = ElementwiseAffine("bnaff{}".format(idx), l2, shared_in_channels = False, k = C(1), b = C(0)) if isrelu: l2 = arith.ReLU(l2) return l2
def make_network(minibatch_size=128): patch_size = 32 inp = DataProvider("data", shape=(minibatch_size, 15, patch_size, patch_size)) label = DataProvider("label", shape=(minibatch_size, )) #lay = bn_relu_conv(inp, 3, 1, 1, 16, False, False) lay, conv = conv_bn(inp, 3, 1, 1, 16, True) out = [conv] for chl in [32, 64, 128]: for i in range(10): lay, conv = conv_bn(lay, 3, 1, 1, chl, True) out.append(conv) if chl != 128: lay = b_resize("pooling{}".format(chl), lay) lay = Pooling2D("pooling{}".format(chl), lay, window=2, mode="MAX") #global average pooling print(lay.partial_shape) feature = lay.mean(axis=2).mean(axis=2) #feature = Pooling2D("glbpoling", lay, window = 8, stride = 8, mode = "AVERAGE") pred = Softmax( "pred", FullyConnected("fc0", feature, output_dim=10, W=G(mean=0, std=(1 / feature.partial_shape[1])**0.5), b=C(0), nonlinearity=Identity())) network = Network(outputs=[pred] + out) network.loss_var = CrossEntropyLoss(pred, label) return network
def bn_relu_conv(inp, ker_shape, stride, padding, out_chl, has_relu, has_bn, has_conv = True): global idx idx += 1 if has_bn: l1 = BN("bn{}".format(idx), inp, eps = 1e-9) l1 = ElementwiseAffine("bnaff{}".format(idx), l1, shared_in_channels = False, k = C(1), b = C(0)) else: l1 = inp if has_relu: l2 = arith.ReLU(l1) else: l2 = l1 if not has_conv: return l2 l3 = Conv2D( "conv{}".format(idx), l2, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = out_chl, #W = G(mean = 0, std = (1 / (ker_shape**2 * inp.partial_shape[1]))**0.5), #b = C(0), nonlinearity = Identity() ) return l3
def relu_conv_bn(inp, ker_shape, stride, padding, out_chl, isrelu=True, isbn=True): global idx idx += 1 if isrelu: inp = arith.ReLU(inp) inp = Conv2D("conv{}".format(idx), inp, kernel_shape=ker_shape, stride=stride, padding=padding, output_nr_channel=out_chl, nonlinearity=Identity()) if isbn: inp = BN("bn{}".format(idx), inp, eps=1e-9) inp = ElementwiseAffine("bnaff{}".format(idx), inp, shared_in_channels=False, k=C(1), b=C(0)) return inp
def make_network(minibatch_size=128): patch_size = 32 inp = DataProvider("data", shape=(minibatch_size, 3, patch_size, patch_size)) label = DataProvider("label", shape=(minibatch_size, )) lay = conv_bn(inp, 3, 1, 1, 16, True) n = 3 lis = [16, 32, 64] for i in lis: lay = res_block(lay, i, n) #global average pooling feature = lay.mean(axis=2).mean(axis=2) pred = Softmax( "pred", FullyConnected("fc0", feature, output_dim=10, W=G(mean=0, std=(2 / 64)**0.5), b=C(0), nonlinearity=Identity())) network = Network(outputs=[pred]) network.loss_var = CrossEntropyLoss(pred, label) return network
def make_network(minibatch_size = 128, debug = False): patch_size = 32 inp = DataProvider("data", shape = (minibatch_size, 3, patch_size, patch_size), dtype = np.float32) label = DataProvider("label", shape = (minibatch_size, ), dtype = np.int32) lay = conv_bn(inp, 3, 1, 1, 16, True) n = 18 lis = [16, 32, 64] for i in lis: lay = res_block(lay, i, n) #global average pooling #feature = lay.mean(axis = 2).mean(axis = 2) feature = Pooling2D("pooling", lay, window = 8, stride = 8, padding = 0, mode = "AVERAGE") pred = Softmax("pred", FullyConnected( "fc0", feature, output_dim = 10, nonlinearity = Identity() )) network = Network(outputs = [pred]) network.loss_var = CrossEntropyLoss(pred, label) if debug: visitor = NetworkVisitor(network.loss_var) for i in visitor.all_oprs: print(i) print(i.partial_shape) print("input = ", i.inputs) print("output = ", i.outputs) print() return network
def dense_block(inp, k, l): lay = inp for i in range(l): cur_lay = bn_relu_conv(lay, 3, 1, 1, k, True, True) name = cur_lay.name group = k // 4 #G.P. SE = cur_lay.mean(axis=3).mean(axis=2) SE = FullyConnected("fc0({})".format(name), SE, output_dim=(k // group)**2 * group, nonlinearity=ReLU()) SE = FullyConnected("fc1({})".format(name), SE, output_dim=(k // group)**2 * group, nonlinearity=Sigmoid()) print(SE.name) SE = SE.reshape(cur_lay.shape[0] * group, k // group, k // group, 1, 1) preshape = cur_lay.shape cur_lay = cur_lay.reshape(1, cur_lay.shape[0] * cur_lay.shape[1], cur_lay.shape[2], cur_lay.shape[3]) cur_lay = Conv2D("conv({})".format(name), cur_lay, kernel_shape=1, stride=1, padding=0, W=SE, nonlinearity=Identity()) cur_lay = cur_lay.reshape(preshape) #cur_lay = cur_lay * SE.dimshuffle(0, 1, 'x', 'x') lay = Concat([lay, cur_lay], axis=1) return lay
def conv_bn(inp, ker_shape, stride, padding, out_chl, isrelu, mode = None): global idx idx += 1 print(inp.partial_shape, ker_shape, out_chl) if ker_shape == 1: W = ortho_group.rvs(out_chl) W = W[:, :inp.partial_shape[1]] W = W.reshape(W.shape[0], W.shape[1], 1, 1) W = ConstProvider(W) b = ConstProvider(np.zeros(out_chl)) else: W = G(mean = 0, std = ((1 + int(isrelu)) / (ker_shape**2 * inp.partial_shape[1]))**0.5) b = C(0) l1 = Conv2D( "conv{}".format(idx), inp, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = out_chl, group = mode, W = W, b = b, nonlinearity = Identity() ) l2 = BN("bn{}".format(idx), l1, eps = 1e-9) l2 = ElementwiseAffine("bnaff{}".format(idx), l2, shared_in_channels = False, k = C(1), b = C(0)) if isrelu: l2 = arith.ReLU(l2) return l2, l1
def make_network(minibatch_size = 128): patch_size = 32 inp = DataProvider("data", shape = (minibatch_size, 3, patch_size, patch_size)) label = DataProvider("label", shape = (minibatch_size, )) #lay = bn_relu_conv(inp, 3, 1, 1, 16, False, False) lay, conv = conv_bn(inp, 3, 1, 1, 16, True) out = [conv] for chl in [32 * 3, 64 * 3, 128 * 3]: for i in range(10): lay, conv1, conv2 = xcep_layer(lay, chl) out.append(conv1) out.append(conv2) if chl != 128 * 3: lay = Pooling2D("pooling{}".format(chl), lay, window = 2, mode = "MAX") #global average pooling print(lay.partial_shape) feature = lay.mean(axis = 2).mean(axis = 2) #feature = Pooling2D("glbpoling", lay, window = 8, stride = 8, mode = "AVERAGE") W = ortho_group.rvs(feature.partial_shape[1]) W = W[:, :10] W = ConstProvider(W) b = ConstProvider(np.zeros((10, ))) pred = Softmax("pred", FullyConnected( "fc0", feature, output_dim = 10, W = W, b = b, nonlinearity = Identity() )) network = Network(outputs = [pred] + out) network.loss_var = CrossEntropyLoss(pred, label) return network
def make_network(minibatch_size=64): patch_size = 32 inp = DataProvider("data", shape=(minibatch_size, 3, patch_size, patch_size)) label = DataProvider("label", shape=(minibatch_size, )) k, l = 20, (40 - 4) // 3 lay = bn_relu_conv(inp, 3, 1, 1, k, False, False) for i in range(3): lay = transition(dense_block(lay, k, l), i) #global average pooling print(lay.partial_shape) feature = lay.mean(axis=2).mean(axis=2) #feature = Pooling2D("glbpoling", lay, window = 8, stride = 8, mode = "AVERAGE") pred = Softmax( "pred", FullyConnected("fc0", feature, output_dim=10, nonlinearity=Identity())) network = Network(outputs=[pred]) network.loss_var = CrossEntropyLoss(pred, label) info = CInfo() info.get_complexity(network.outputs).as_table().show() return network
def make_network(minibatch_size=64): patch_size = 32 inp = DataProvider("data", shape=(minibatch_size, 3, patch_size, patch_size)) label = DataProvider("label", shape=(minibatch_size, )) lay = bn_relu_conv(inp, 3, 1, 1, 16, False, False) k, l = 24, (100 - 4) // 3 for i in range(3): lay = transition(dense_block(lay, k, l, False), i) feature = lay pred = Softmax( "pred", FullyConnected("fc0", feature, output_dim=10, W=G(mean=0, std=(1 / feature.partial_shape[1])**0.5), b=C(0), nonlinearity=Identity())) network = Network(outputs=[pred]) network.loss_var = CrossEntropyLoss(pred, label) return network
def make_network(minibatch_size=64): patch_size = 32 inp = DataProvider("data", shape=(minibatch_size, 3, patch_size, patch_size)) label = DataProvider("label", shape=(minibatch_size, )) lay = bn_relu_conv(inp, 3, 1, 1, 16, False, False) k, l = 12, (40 - 4) // 3 for i in range(3): lay = transition(dense_block(lay, k, l), i) #global average pooling print(lay.partial_shape) feature = lay.mean(axis=2).mean(axis=2) #feature = Pooling2D("glbpoling", lay, window = 8, stride = 8, mode = "AVERAGE") pred = Softmax( "pred", FullyConnected("fc0", feature, output_dim=10, W=G(mean=0, std=(1 / feature.partial_shape[1])**0.5), b=C(0), nonlinearity=Identity())) network = Network(outputs=[pred]) network.loss_var = CrossEntropyLoss(pred, label) return network
def make_network(minibatch_size=128, debug=False): patch_size = 32 inp = DataProvider("data", shape=(minibatch_size, 3, patch_size, patch_size), dtype=np.float32) label = DataProvider("label", shape=(minibatch_size, ), dtype=np.int32) lay, w = conv_bn(inp, 3, 1, 1, 16, True) lis_w = [w] n = 3 lis = [16, 32, 64] for i in lis: lay, lis_new = res_block(lay, i, n) lis_w += lis_new #global average pooling #feature = lay.mean(axis = 2).mean(axis = 2) feature = Pooling2D("pooling", lay, window=8, stride=8, padding=0, mode="AVERAGE") pred = Softmax( "pred", FullyConnected( "fc0", feature, output_dim=10, #W = G(mean = 0, std = (1 / 64)**0.5), #b = C(0), nonlinearity=Identity())) network = Network(outputs=[pred]) network.loss_var = CrossEntropyLoss(pred, label) lmd = 1 for w in lis_w: w = w.reshape(w.partial_shape[0], -1).dimshuffle(1, 0) w = w / ((w**2).sum(axis=0)).dimshuffle('x', 0) A = O.MatMul(w.dimshuffle(1, 0), w) network.loss_var += lmd * ( (A - np.identity(A.partial_shape[0]))**2).mean() if debug: visitor = NetworkVisitor(network.loss_var) for i in visitor.all_oprs: print(i) print(i.partial_shape) print("input = ", i.inputs) print("output = ", i.outputs) print() return network
def conv_bn(inp, ker_shape, stride, padding, out_chl, isrelu): global idx idx += 1 l1 = Conv2D( "conv{}".format(idx), inp, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = out_chl, nonlinearity = Identity() ) l2 = BN("bn{}".format(idx), l1, eps = 1e-9) l2 = ElementwiseAffine("bnaff{}".format(idx), l2, shared_in_channels = False, k = C(1), b = C(0)) if isrelu: l2 = arith.ReLU(l2) return l2
def make_network(minibatch_size=128, debug=False): patch_size = 32 inp = DataProvider("data", shape=(minibatch_size, 3, patch_size, patch_size), dtype=np.float32) label = DataProvider("label", shape=(minibatch_size, ), dtype=np.int32) lay = conv_bn(inp, 3, 1, 1, 16 * 4 * 2, True) n = 4 * 3 group = 8 lis = [16 * 4, 32 * 4, 64 * 4] for i in range(len(lis)): lay = res_block(lay, lis[i], i, n, group) #global average pooling #feature = lay.mean(axis = 2).mean(axis = 2) feature = Pooling2D("pooling", lay, window=8, stride=8, padding=0, mode="AVERAGE") pred = Softmax( "pred", FullyConnected( "fc0", feature, output_dim=10, #W = G(mean = 0, std = (1 / 64)**0.5), #b = C(0), nonlinearity=Identity())) network = Network(outputs=[pred]) network.loss_var = CrossEntropyLoss(pred, label) info = CInfo() info.get_complexity(network.outputs).as_table().show() """ if debug: visitor = NetworkVisitor(network.loss_var) for i in visitor.all_oprs: print(i) print(i.partial_shape) print("input = ", i.inputs) print("output = ", i.outputs) print() """ return network
def conv_bn(inp, ker_shape, stride, padding, out_chl, isrelu): global idx idx += 1 l1 = Conv2D( "encoder_conv{}".format(idx), inp, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = out_chl, W = G(mean = 0, std = ((1 + int(isrelu)) / (ker_shape**2 * inp.partial_shape[1]))**0.5), nonlinearity = Identity() ) l2 = BN("encoder_bn{}".format(idx), l1, eps = 1e-9) l2 = ElementwiseAffine("bnaff{}".format(idx), l2, shared_in_channels = False, k = C(1), b = C(0)) if isrelu: l2 = arith.ReLU(l2) return l2, l1
def conv_bn(inp, ker_shape, stride, padding, out_chl, isrelu): global idx idx += 1 l10 = Conv2D("conv{}_0".format(idx), inp, kernel_shape=ker_shape, stride=stride, padding=padding, output_nr_channel=out_chl // 2, W=G(mean=0, std=((1 + int(isrelu)) / (ker_shape**2 * inp.partial_shape[1]))**0.5), nonlinearity=Identity()) l11 = Conv2D("conv{}_1".format(idx), inp, kernel_shape=ker_shape, stride=stride, padding=padding, output_nr_channel=out_chl // 2, W=G(mean=0, std=((1 + int(isrelu)) / (ker_shape**2 * inp.partial_shape[1]))**0.5), nonlinearity=Identity()) W = l11.inputs[1].owner_opr b = l11.inputs[2].owner_opr W.set_freezed() b.set_freezed() l1 = Concat([l10, l11], axis=1) l2 = BN("bn{}".format(idx), l1, eps=1e-9) l2 = ElementwiseAffine("bnaff{}".format(idx), l2, shared_in_channels=False, k=C(1), b=C(0)) if isrelu: l2 = arith.ReLU(l2) return l2, l1
def bn_relu_conv(inp, ker_shape, stride, padding, out_chl, isrelu, isbn): global idx idx += 1 if isbn: inp = BN("bn{}".format(idx), inp, eps = 1e-9) inp = ElementwiseAffine("bnaff{}".format(idx), inp, shared_in_channels = False, k = C(1), b = C(0)) if isrelu: inp = arith.ReLU(inp) inp = Conv2D( "conv{}".format(idx), inp, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = out_chl, #W = G(mean = 0, std = ((1) / (ker_shape**2 * inp.partial_shape[1]))**0.5), #b = C(0), nonlinearity = Identity() ) return inp
def make_network(minibatch_size=128): patch_size = 32 inp = DataProvider("data", shape=(minibatch_size, 3, patch_size, patch_size)) label = DataProvider("label", shape=(minibatch_size, )) #lay = bn_relu_conv(inp, 3, 1, 1, 16, False, False) lay, conv = conv_bn(inp, 3, 1, 1, 16, True) out = [conv] for chl in [32, 64, 128]: for i in range(10): lay, conv = conv_bn(lay, 3, 1, 1, chl, True) out.append(conv) if chl != 128: lay = Pooling2D("pooling{}".format(chl), lay, window=2, mode="MAX") #global average pooling print(lay.partial_shape) feature = lay.mean(axis=2).mean(axis=2) #feature = Pooling2D("glbpoling", lay, window = 8, stride = 8, mode = "AVERAGE") pred = Softmax( "pred", FullyConnected("fc0", feature, output_dim=10, W=G(mean=0, std=(1 / feature.partial_shape[1])**0.5), b=C(0), nonlinearity=Identity())) network = Network(outputs=[pred] + out) network.loss_var = CrossEntropyLoss(pred, label) #conv1 = out[0] #print(conv1.inputs[1].partial_shape) lmd = 0.01 for conv_lay in out: w = conv_lay #w = w.reshape(w.partial_shape[0], -1).dimshuffle(1, 0) w = w.dimshuffle(1, 0, 2, 3) w = w.reshape(w.partial_shape[0], -1).dimshuffle(1, 0) w = w / ((w**2).sum(axis=0)).dimshuffle('x', 0) A = MatMul(w.dimshuffle(1, 0), w) #print(A.partial_shape) network.loss_var += lmd * ( (A - np.identity(A.partial_shape[0]))**2).sum() return network
def make_network(minibatch_size=128, debug=False): patch_size = 32 inp = DataProvider("data", shape=(minibatch_size, 3, patch_size, patch_size), dtype=np.float32) label = DataProvider("label", shape=(minibatch_size, ), dtype=np.int32) lay = conv_bn(inp, 3, 1, 1, 16, True) lis = [16, 32, 64] for i in range(len(lis)): #lay = res_block(lay, lis[i], i, n) for j in range(40): lay = conv_bn(lay, 3, 1, 1, lis[i], False) if i < len(lis) - 1: lay = conv_bn(lay, 2, 2, 0, lis[i + 1], True) #global average pooling feature = lay.mean(axis=2).mean(axis=2) pred = Softmax( "pred", FullyConnected( "fc0", feature, output_dim=10, #W = G(mean = 0, std = (1 / 64)**0.5), #b = C(0), nonlinearity=Identity())) network = Network(outputs=[pred]) #info = CInfo() #info.get_complexity(network.outputs).as_table().show() network.loss_var = CrossEntropyLoss(pred, label) """ if debug: visitor = NetworkVisitor(network.loss_var) for i in visitor.all_oprs: print(i) print(i.partial_shape) print("input = ", i.inputs) print("output = ", i.outputs) print() """ return network
def conv_norm(inp, ker_shape, stride, padding, out_chl, isrelu): global idx idx += 1 inp = Conv2D("conv{}".format(idx), inp, kernel_shape=ker_shape, stride=stride, padding=padding, output_nr_channel=out_chl, nonlinearity=Identity()) mean = inp.mean(axis=3).mean(axis=2) std = ((inp - mean.dimshuffle(0, 1, 'x', 'x'))**2).mean(axis=3).mean(axis=2)**0.5 inp = (inp - mean.dimshuffle(0, 1, 'x', 'x')) / std.dimshuffle( 0, 1, 'x', 'x') if isrelu: inp = O.ReLU(inp) return inp
def make_network(minibatch_size=128): patch_size = 32 inp = DataProvider("data", shape=(minibatch_size, 3, patch_size, patch_size)) label = DataProvider("label", shape=(minibatch_size, )) idxmap = np.zeros((128, 3, 32, 32, 4), dtype=np.int32) sample = IndexingRemap(inp, idxmap) network = Network(outputs=[sample]) sample = FullyConnected("fc", sample, output_dim=1) network.loss_var = sample.sum() return network #lay = bn_relu_conv(inp, 3, 1, 1, 16, False, False) lay, conv = conv_bn(inp, 3, 1, 1, 32, True) out = [conv] """ for chl in [32, 64, 128]: for i in range(10): lay, conv = conv_bn(lay, 3, 1, 1, chl, True) out.append(conv) if chl != 128: lay = dfpooling("pooling{}".format(chl), lay) """ chl = 32 for i in range(3): lay, conv = dfconv(lay, chl, True, i == 0) #global average pooling print(lay.partial_shape) feature = lay.mean(axis=2).mean(axis=2) #feature = Pooling2D("glbpoling", lay, window = 8, stride = 8, mode = "AVERAGE") pred = Softmax( "pred", FullyConnected("fc0", feature, output_dim=10, W=G(mean=0, std=(1 / feature.partial_shape[1])**0.5), b=C(0), nonlinearity=Identity())) network = Network(outputs=[pred] + out) network.loss_var = CrossEntropyLoss(pred, label) return network
def conv_wn(inp, ker_shape, stride, padding, out_chl, isrelu): global idx idx += 1 l1 = Conv2D( "conv{}".format(idx), inp, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = out_chl, W = G(mean = 0, std = 0.05), nonlinearity = Identity() ) W = l1.inputs[1] #l2 = BN("bn{}".format(idx), l1, eps = 1e-9) w = l1.inputs[1] assert ":W" in w.name w = (w**2).sum(axis = 3).sum(axis = 2).sum(axis = 1)**0.5 l1 = l1 / w.dimshuffle('x', 0, 'x', 'x') l2 = ElementwiseAffine("bnaff{}".format(idx), l1, shared_in_channels = False, k = C(1), b = C(0)) if isrelu: l2 = arith.ReLU(l2) return l2, l1, W
def make_network(minibatch_size=64): patch_size = 32 inp = DataProvider("data", shape=(minibatch_size, 3, patch_size, patch_size)) label = DataProvider("label", shape=(minibatch_size, )) lay, w = bn_relu_conv(inp, 3, 1, 1, 16, False, False) lis_w = [w] k, l = 12, (40 - 4) // 3 for i in range(3): #lay = transition(dense_block(lay, k, l), i) lay, lis_new = dense_block(lay, k, l) lis_w += lis_new lay, lis_new = transition(lay, i) lis_w += lis_new #global average pooling print(lay.partial_shape) feature = lay.mean(axis=2).mean(axis=2) #feature = Pooling2D("glbpoling", lay, window = 8, stride = 8, mode = "AVERAGE") pred = Softmax( "pred", FullyConnected("fc0", feature, output_dim=10, nonlinearity=Identity())) network = Network(outputs=[pred]) network.loss_var = CrossEntropyLoss(pred, label) lmd = 0.01 for w in lis_w: if w is None: continue print(w.partial_shape) w = w.reshape(w.partial_shape[0], -1).dimshuffle(1, 0) w = w / ((w**2).sum(axis=0)).dimshuffle('x', 0) A = O.MatMul(w.dimshuffle(1, 0), w) network.loss_var += lmd * ( (A - np.identity(A.partial_shape[0]))**2).sum() return network
def res_layer(inp, chl): pre = inp inp = conv_bn(inp, 3, 1, 1, chl, True) inp = conv_bn(inp, 3, 1, 1, chl, False) name = inp.name #Global Average Pooling SE = inp.mean(axis=3).mean(axis=2) group = 1 #fc0 SE = FullyConnected("fc0({})".format(name), SE, output_dim=chl, nonlinearity=ReLU()) #fc1 SE = FullyConnected("fc1({})".format(name), SE, output_dim=(chl // group)**2 * group, nonlinearity=Sigmoid()) SE = SE.reshape(inp.shape[0] * group, chl // group, chl // group, 1, 1) w = SE SE /= SE.sum(axis=4).sum(axis=3).sum(axis=2).dimshuffle( 0, 1, "x", "x", "x") #inp = inp * SE.dimshuffle(0, 1, 'x', 'x') inp = inp.reshape(1, inp.shape[0] * inp.shape[1], inp.shape[2], inp.shape[3]) inp = Conv2D( "conv({})".format(name), inp, kernel_shape=1, stride=1, padding=0, #output_nr_channel = chl, W=SE, nonlinearity=Identity(), #group = group ) inp = inp.reshape(pre.shape) inp = arith.ReLU(inp + pre) return inp, w
def dfconv(inp, chl, isrelu, ker_shape = 3, stride = 1, padding = 1, dx = [-1, 0, 1], dy = [-1, 0, 1]): inp = Conv2D( name + "conv", inp, kernel_shape = 3, stride = 1, padding = 1, output_nr_channel = ker_shape**2, W = G(mean = 0, std = ((1) / (ker_shape**2 * inp.partial_shape[1]))**0.5), nonlinearity = Identity() ) inp = BN(name + "BN", inp, eps = 1e-9) global idx #idx += 1 gamma = 0.001 offsetx = inp.partial_shape[2] * Conv2D( "conv{}_offsetx".format(idx + 1), inp, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = ker_shape**2, W = G(mean = 0, std = gamma / (ker_shape**2 * inp.partial_shape[2])), nonlinearity = Identity() ) offsety = inp.partial_shape[3] * Conv2D( "conv{}_offsety".format(idx + 1), inp, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = ker_shape**2, W = G(mean = 0, std = gamma / (ker_shape**2 * inp.partial_shape[3])), nonlinearity = Identity() ) outputs = [] for sx in range(2): for sy in range(2): if sx == 0: ofx = Floor(offsetx) bilx = offsetx - ofx else: ofx = Ceil(offsetx) bilx = ofx - offsetx if sy == 0: ofy = Floor(offsety) bily = offsety - ofy else: ofy = Ceil(offsety) bily = ofy - offsety """ No padding padding1 = ConstProvider(np.zeros((inp.partial_shape[0], inp.partial_shape[1], 1, inp.partial_shape[3]))) padding2 = ConstProvider(np.zeros((inp.partial_shape[0], inp.partial_shape[1], inp.partial_shape[2] + 2, 1))) arg_fea = Concat([padding1, inp, padding1], axis = 2) arg_fea = Concat([padding2, arg_fea, padding2], axis = 3) """ arg_fea = inp #one_mat = ConstProvider(np.ones((inp.partial_shape[2], inp.partial_shape[3])), dtype = np.int32) one_mat = ConstProvider(1, dtype = np.int32).add_axis(0).broadcast((ofx.partial_shape[2], ofx.partial_shape[3])) affx = (Cumsum(one_mat, axis = 0) - 1) * stride affy = (Cumsum(one_mat, axis = 1) - 1) * stride ofx = ofx + affx.dimshuffle('x', 'x', 0, 1) ofy = ofy + affy.dimshuffle('x', 'x', 0, 1) one_mat = ConstProvider(np.ones((ker_shape, ofx.partial_shape[2], ofx.partial_shape[3]))) #ofx[:, :ker_shape, :, :] -= 1 #ofx[:, ker_shape*2:, :, :] += 1 ofx += Concat([one_mat * i for i in dx], axis = 0).dimshuffle('x', 0, 1, 2) #ofy[:, ::3, :, :] -= 1 #ofy[:, 2::3, :, :] += 1 one_mat = ones((1, ofx.partial_shape[2], ofx.partial_shape[3])) one_mat = Concat([one_mat * i for i in dy], axis = 0) one_mat = Concat([one_mat] * ker_shape, axis = 0) ofy += one_mat.dimshuffle('x', 0, 1, 2) ofx = Max(Min(ofx, arg_fea.partial_shape[2] - 1), 0) ofy = Max(Min(ofy, arg_fea.partial_shape[3] - 1), 0) def DeformReshape(inp, ker_shape): inp = inp.reshape(inp.shape[0], ker_shape, ker_shape, inp.shape[2], inp.shape[3]) inp = inp.dimshuffle(0, 3, 1, 4, 2) inp = inp.reshape(inp.shape[0], inp.shape[1] * inp.shape[2], inp.shape[3] * inp.shape[4]) return inp ofx = DeformReshape(ofx, ker_shape) ofy = DeformReshape(ofy, ker_shape) bilx = DeformReshape(bilx, ker_shape) bily = DeformReshape(bily, ker_shape) of = ofx * arg_fea.shape[2] + ofy arg_fea = arg_fea.reshape(arg_fea.shape[0], arg_fea.shape[1], -1) of = of.reshape(ofx.shape[0], -1) of = of.dimshuffle(0, 'x', 1) #of = Concat([of] * arg_fea.partial_shape[1], axis = 1) of = of.broadcast((of.shape[0], arg_fea.shape[1], of.shape[2])) arx = Linspace(0, arg_fea.shape[0], arg_fea.shape[0], endpoint = False) arx = arx.add_axis(1).add_axis(2).broadcast(of.shape) ary = Linspace(0, arg_fea.shape[1], arg_fea.shape[1], endpoint = False) ary = ary.add_axis(0).add_axis(2).broadcast(of.shape) of = of.add_axis(3) arx = arx.add_axis(3) ary = ary.add_axis(3) idxmap = Astype(Concat([arx, ary, of], axis = 3), np.int32) """ sample = [] for i in range(arg_fea.partial_shape[0]): for j in range(arg_fea.partial_shape[1]): sample.append(arg_fea[i][j].ai[of[i][j]].dimshuffle('x', 0)) sample = Concat(sample, axis = 0) """ sample = IndexingRemap(arg_fea, idxmap).reshape(inp.shape[0], inp.shape[1], bilx.shape[1], -1) bilx = bilx.dimshuffle(0, 'x', 1, 2).broadcast(sample.shape) bily = bily.dimshuffle(0, 'x', 1, 2).broadcast(sample.shape) sample *= bilx * bily outputs.append(sample) output = outputs[0] for i in outputs[1:]: output += i return conv_bn(output, ker_shape, 3, 0, chl, isrelu)
def dfpooling(name, inp, window = 2, padding = 0, dx = [0, 1], dy = [0, 1]): #inp = ConstProvider([[[[1, 2], [3, 4]]]], dtype = np.float32) """ Add a new conv&bn to insure that the scale of the feature map is variance 1. """ ker_shape = window stride = window offsetlay = Conv2D( name + "conv", inp, kernel_shape = 3, stride = 1, padding = 1, output_nr_channel = ker_shape**2, W = G(mean = 0, std = ((1) / (3**2 * inp.partial_shape[1]))**0.5), nonlinearity = Identity() ) #offsetlay = BN(name + "BN", offsetlay, eps = 1e-9) offsetx = Conv2D( name + "conv1x", offsetlay, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = ker_shape**2, W = G(mean = 0, std = (1 / (ker_shape**2 * inp.partial_shape[2]))**0.5), nonlinearity = Identity() ) offsety = Conv2D( name + "conv1y", offsetlay, kernel_shape = ker_shape, stride = stride, padding = padding, output_nr_channel = ker_shape**2, W = G(mean = 0, std = (1 / (ker_shape**2 * inp.partial_shape[3]))**0.5), nonlinearity = Identity() ) offset = Concat([offsetx, offsety], axis = 1) ndim = ker_shape**2 * offsetx.partial_shape[2] * offsetx.partial_shape[3] * 2 offset = FullyConnected( name + "offset", offsetx, output_dim = ndim, W = G(mean = 0, std = (1 / ndim)**2), #W = C(0), b = C(0), nonlinearity = Identity() ) offsetx = offset[:, :ndim // 2].reshape(offsetx.shape) offsety = offset[:, ndim // 2:].reshape(offsety.shape) """ offsetx = FullyConnected( name + "offsetx", offsetx, output_dim = ndim, W = G(mean = 0, std = gamma / ndim), b = C(0), nonlinearity = Identity() ) offsetx = offsetx.reshape(offsety.shape) offsety = FullyConnected( name + "offsety", offsety, output_dim = ndim, W = G(mean = 0, std = gamma / ndim), b = C(0), nonlinearity = Identity() ) offsety = offsety.reshape(offsetx.shape) print(offsety.partial_shape) """ #offsetx = ZeroGrad(offsetx) #offsety = ZeroGrad(offsety) outputs = [] for sx in range(2): for sy in range(2): if sx == 0: ofx = Floor(offsetx) bilx = 1 - (offsetx - ofx) else: ofx = Ceil(offsetx) bilx = 1 - (ofx - offsetx) if sy == 0: ofy = Floor(offsety) bily = 1 - (offsety - ofy) else: ofy = Ceil(offsety) bily = 1 - (ofy - offsety) """ No padding padding1 = ConstProvider(np.zeros((inp.partial_shape[0], inp.partial_shape[1], 1, inp.partial_shape[3]))) padding2 = ConstProvider(np.zeros((inp.partial_shape[0], inp.partial_shape[1], inp.partial_shape[2] + 2, 1))) arg_fea = Concat([padding1, inp, padding1], axis = 2) arg_fea = Concat([padding2, arg_fea, padding2], axis = 3) """ arg_fea = inp #one_mat = ConstProvider(np.ones((inp.partial_shape[2], inp.partial_shape[3])), dtype = np.int32) one_mat = ConstProvider(1, dtype = np.int32).add_axis(0).broadcast((ofx.shape[2], ofx.shape[3])) affx = (Cumsum(one_mat, axis = 0) - 1) * stride affy = (Cumsum(one_mat, axis = 1) - 1) * stride ofx = ofx + affx.dimshuffle('x', 'x', 0, 1) ofy = ofy + affy.dimshuffle('x', 'x', 0, 1) one_mat = ConstProvider(np.ones((ker_shape, ofx.partial_shape[2], ofx.partial_shape[3]))) #ofx[:, :ker_shape, :, :] -= 1 #ofx[:, ker_shape*2:, :, :] += 1 ofx += Concat([one_mat * i for i in dx], axis = 0).dimshuffle('x', 0, 1, 2) #ofy[:, ::3, :, :] -= 1 #ofy[:, 2::3, :, :] += 1 one_mat = ones((1, ofx.partial_shape[2], ofx.partial_shape[3])) one_mat = Concat([one_mat * i for i in dy], axis = 0) one_mat = Concat([one_mat] * ker_shape, axis = 0) ofy += one_mat.dimshuffle('x', 0, 1, 2) ofx = Max(Min(ofx, arg_fea.partial_shape[2] - 1), 0) ofy = Max(Min(ofy, arg_fea.partial_shape[3] - 1), 0) def DeformReshape(inp, ker_shape): inp = inp.reshape(inp.shape[0], ker_shape, ker_shape, inp.shape[2], inp.partial_shape[3]) inp = inp.dimshuffle(0, 3, 1, 4, 2) inp = inp.reshape(inp.shape[0], inp.shape[1] * inp.shape[2], inp.shape[3] * inp.shape[4]) return inp ofx = DeformReshape(ofx, ker_shape) ofy = DeformReshape(ofy, ker_shape) bilx = DeformReshape(bilx, ker_shape) bily = DeformReshape(bily, ker_shape) of = ofx * arg_fea.partial_shape[2] + ofy arg_fea = arg_fea.reshape(arg_fea.shape[0], arg_fea.shape[1], -1) of = of.reshape(ofx.shape[0], -1) of = of.dimshuffle(0, 'x', 1) #of = Concat([of] * arg_fea.partial_shape[1], axis = 1) of = of.broadcast((of.shape[0], arg_fea.shape[1], of.shape[2])) arx = Linspace(0, arg_fea.shape[0], arg_fea.shape[0], endpoint = False) arx = arx.add_axis(1).add_axis(2).broadcast(of.shape) ary = Linspace(0, arg_fea.shape[1], arg_fea.shape[1], endpoint = False) ary = ary.add_axis(0).add_axis(2).broadcast(of.shape) of = of.add_axis(3) arx = arx.add_axis(3) ary = ary.add_axis(3) idxmap = Astype(Concat([arx, ary, of], axis = 3), np.int32) """ sample = [] for i in range(arg_fea.partial_shape[0]): for j in range(arg_fea.partial_shape[1]): sample.append(arg_fea[i][j].ai[of[i][j]].dimshuffle('x', 0)) sample = Concat(sample, axis = 0) """ sample = IndexingRemap(arg_fea, idxmap).reshape(inp.shape[0], inp.shape[1], bilx.shape[1], -1) bilx = bilx.dimshuffle(0, 'x', 1, 2).broadcast(sample.shape) bily = bily.dimshuffle(0, 'x', 1, 2).broadcast(sample.shape) sample *= bilx * bily outputs.append(sample) output = outputs[0] for i in outputs[1:]: output += i return Pooling2D(name, output, window = 2, mode = "AVERAGE")
conv3 = Conv2D("conv3", pooling1, kernel_shape = 3, output_nr_channel = 10, W = G(mean = 0.0001, std = (1 / (5 * 3 * 3))**0.5), b = C(0), padding = (1, 1), nonlinearity = ReLU()) conv4 = Conv2D("conv4", conv3, kernel_shape = 3, output_nr_channel = 10, W = G(mean = 0.0001, std = (1 / (10 * 3 * 3))**0.5), b = C(0), padding = (1, 1), nonlinearity = ReLU()) pooling2 = Pooling2D("pooling2", conv4, window = (2, 2), mode = "max") feature = pooling2.reshape((-1, 7 * 7 * 10)) fc1 = FC("fc1", feature, output_dim = 100, W = G(mean = 0.0001, std = (1 / 490)**0.5), b = C(0), nonlinearity = ReLU()) fc2 = FC("fc2", fc1, output_dim = 10, W = G(mean = 0, std = (1 / 100)**0.5), b = C(0), nonlinearity = Identity()) #output_mat = Exp(fc2) / Exp(fc2).sum(axis = 1).dimshuffle(0, 'x') pred = Softmax("pred", fc2) label = DataProvider(name = "label", shape = (minibatch_size, ), dtype = np.int32) #loss = -Log(indexing_one_hot(output_mat, 1, label)).mean() loss = CrossEntropyLoss(pred, label) network = Network(pred, loss)