示例#1
0
def tf_net2deeper(model, weight, target_layer):
    n2n = Net2Net()
    with tf.Session(config=tf.ConfigProto(allow_soft_placement=True)) as sess:
        net = load_teacher_net(sess, model, weight)
        w1, b1 = get_weight_bias_of_layer(net, target_layer)
        new_w, new_b = n2n.deeper(w1, True)
    return new_w, new_b
示例#2
0
def tf_net2wider_rand(model, weight, target_layer, next_layer, new_width):
    n2n = Net2Net()
    with tf.Session(config=tf.ConfigProto(allow_soft_placement=True)) as sess:
        net = load_teacher_net(sess, model, weight)
        w1, b1 = get_weight_bias_of_layer(net, target_layer)
        w2, b2 = get_weight_bias_of_layer(net, next_layer)
        nw1, nb1, nw2 = n2n.wider_rand(w1, b1, w2, new_width)
    return nw1, nb1, nw2, b2
示例#3
0
def get_deeper_weights(ref_layer):
    '''
    To calculate new weights to make the net deeper using Net2Net class,
    one needs to swap the axes for the right order.
    Dim of Keras conv layer: (OutChannel, InChannel, kH, kW)
           conv layer Net2Net class accepts: (kH, kW, InChannel, OutChannel)
    '''
    parms = ref_layer.get_weights()
    n2n = Net2Net()
    if is_convolutional(ref_layer):
        weights = parms[0].swapaxes(0, 2).swapaxes(1, 3).swapaxes(2, 3)
        new_w, new_b = n2n.deeper(weights, True)
        new_w = new_w.swapaxes(0, 2).swapaxes(1, 3)
    else:
        weights = parms[0]
        new_w, new_b = n2n.deeper(weights, True)
    return new_w, new_b
示例#4
0
def net_to_deeper(model_teacher,model,l):
        #Get weights of the teacher model
        ws = []
        bs= []
        for layer_t in model_teacher.layers:
            if layer_t.get_weights() != []:
                ws.append(layer_t.get_weights()[0])
                bs.append(layer_t.get_weights()[1])

        n2n = Net2Net()
        new_w, new_b = n2n.deeper(ws[l-1], True)
        ws.insert(l,new_w)
        bs.insert(l,new_b)

        i = 0
        for layer_s in model.layers:
            if layer_s.get_weights() != []:
                layer_s.set_weights([ws[i],bs[i]])
                i += 1

        return model
示例#5
0
def net_to_wider(model_teacher,model,l,new_width_conv):
        #Get weights of the teacher model
        ws = []
        bs = []
        for layer in model_teacher.layers:
            if layer.get_weights() != []:
                ws.append(layer.get_weights()[0])
                bs.append(layer.get_weights()[1])
        w1 = ws[l]
        b1 = bs[l]
        w2 = ws[l+1]
        n2n = Net2Net()
        nw1, nb1, nw2 = n2n.wider(w1, b1, w2, new_width_conv, True)
        ws[l] = nw1
        bs[l] = nb1
        ws[l+1] = nw2
        i = 0
        for layer in model.layers:
            if layer.get_weights() != []:
                layer.set_weights([ws[i],bs[i]])
                i += 1
        return model