Beispiel #1
0
def create_resnet(depth=18):
    '''Original resnet, where the there is a relue after the addition layer'''
    net = ffnet.FeedForwardNet()
    net.add(
        Conv2D('input-conv',
               64,
               7,
               2,
               pad=3,
               use_bias=False,
               input_sample_shape=(3, 224, 224)))
    net.add(BatchNormalization('input-bn'))
    net.add(Activation('input_relu'))
    net.add(MaxPooling2D('input_pool', 3, 2, pad=1))
    conf = cfg[depth]
    if depth > 34:
        stage(0, net, conf[0], 64, 64, 256, 1, bottleneck)
        stage(1, net, conf[1], 256, 128, 512, 2, bottleneck)
        stage(2, net, conf[2], 512, 256, 1024, 2, bottleneck)
        stage(3, net, conf[3], 1024, 512, 2048, 2, bottleneck)
    else:
        stage(0, net, conf[0], 64, 64, 64, 1, basicblock)
        stage(1, net, conf[1], 64, 128, 128, 2, basicblock)
        stage(2, net, conf[2], 128, 256, 256, 2, basicblock)
        stage(3, net, conf[3], 256, 512, 512, 2, basicblock)
    net.add(AvgPooling2D('avg', 7, 1, pad=0))
    net.add(Flatten('flat'))
    net.add(Dense('dense', 1000))
    return net
Beispiel #2
0
def create_preact_resnet(depth=200):
    '''Resnet with the batchnorm and relu moved to before the conv layer for each block'''
    net = ffnet.FeedForwardNet()
    net.add(
        Conv2D('input-conv',
               64,
               7,
               2,
               pad=3,
               use_bias=False,
               input_sample_shape=(3, 224, 224)))
    net.add(BatchNormalization('input-bn'))
    net.add(Activation('input_relu'))
    net.add(MaxPooling2D('input_pool', 3, 2, pad=1))
    conf = cfg[depth]
    if depth > 34:
        stage(0, net, conf[0], 64, 64, 256, 1, bottleneck, preact=True)
        stage(1, net, conf[1], 256, 128, 512, 2, bottleneck, preact=True)
        stage(2, net, conf[2], 512, 256, 1024, 2, bottleneck, preact=True)
        stage(3, net, conf[3], 1024, 512, 2048, 2, bottleneck, preact=True)
    else:
        stage(0, net, conf[0], 64, 64, 64, 1, basicblock, preact=True)
        stage(1, net, conf[1], 64, 128, 128, 2, basicblock, preact=True)
        stage(2, net, conf[2], 128, 256, 256, 2, basicblock, preact=True)
        stage(3, net, conf[3], 256, 512, 512, 2, basicblock, preact=True)
    net.add(BatchNormalization('final-bn'))
    net.add(Activation('final-relu'))
    net.add(AvgPooling2D('avg', 7, 1, pad=0))
    net.add(Flatten('flat'))
    net.add(Dense('dense', 1000))
    return net
Beispiel #3
0
def create_net(depth, nb_classes, batchnorm=False, use_cpu=False):
    if use_cpu:
        layer.engine = 'singacpp'
    net = ffnet.FeedForwardNet()
    net = create_layers(net, cfg[depth], (3, 224, 224), batchnorm)
    net.add(Flatten('flat'))
    net.add(Dense('dense/classifier.0', 4096))
    net.add(Activation('act/classifier.1'))
    net.add(Dropout('dropout/classifier.2'))
    net.add(Dense('dense/classifier.3', 4096))
    net.add(Activation('act/classifier.4'))
    net.add(Dropout('dropout/classifier.5'))
    net.add(Dense('dense/classifier.6', nb_classes))
    return net
Beispiel #4
0
def add_transition(name, net, n_channels, last=False):
    net.add(BatchNormalization('%s/norm' % name))
    lyr = net.add(Activation('%s/relu' % name))
    if last:
        net.add(
            AvgPooling2D('%s/pool' % name,
                         lyr.get_output_sample_shape()[1:3],
                         pad=0))
        net.add(Flatten('flat'))
    else:
        net.add(
            Conv2D('%s/conv' % name,
                   n_channels,
                   1,
                   1,
                   pad=0,
                   use_bias=conv_bias))
        net.add(AvgPooling2D('%s/pool' % name, 2, 2, pad=0))
Beispiel #5
0
def create_net(shape, weight_path='bvlc_googlenet.pickle'):
    net = ffnet.FeedForwardNet()
    net.add(Conv2D('conv1/7x7_s2', 64, 7, 2, pad=3, input_sample_shape=shape))
    c1 = net.add(Activation('conv1/relu_7x7'))
    pool1 = pool(net, c1, 'pool1/3x3_s2', 3, 2)
    norm1 = net.add(LRN('pool1/norm1', 5, 0.0001, 0.75))
    c3x3r = conv(net, norm1, 'conv2', 64, 1, suffix='3x3_reduce')
    c3x3 = conv(net, c3x3r, 'conv2', 192, 3, pad=1, suffix='3x3')
    norm2 = net.add(LRN('conv2/norm2', 5, 0.0001, 0.75))
    pool2 = pool(net, norm2, 'pool2/3x3_s2', 3, 2)

    i3a = inception(net, pool2, 'inception_3a', 64, 96, 128, 16, 32, 32)
    i3b = inception(net, i3a, 'inception_3b', 128, 128, 192, 32, 96, 64)
    pool3 = pool(net, i3b, 'pool3/3x3_s2', 3, 2)
    i4a = inception(net, pool3, 'inception_4a', 192, 96, 208, 16, 48, 64)
    i4b = inception(net, i4a, 'inception_4b', 160, 112, 224, 24, 64, 64)
    i4c = inception(net, i4b, 'inception_4c', 128, 128, 256, 24, 64, 64)
    i4d = inception(net, i4c, 'inception_4d', 112, 144, 288, 32, 64, 64)
    i4e = inception(net, i4d, 'inception_4e', 256, 160, 320, 32, 128, 128)
    pool4 = pool(net, i4e, 'pool4/3x3_s2', 3, 2)
    i5a = inception(net, pool4, 'inception_5a', 256, 160, 320, 32, 128, 128)
    i5b = inception(net, i5a, 'inception_5b', 384, 192, 384, 48, 128, 128)
    pool5 = net.add(AvgPooling2D('pool5/7x7_s1', 7, 1, pad=0))
    drop5 = net.add(Dropout('drop', 0.4))
    flat = net.add(Flatten('flat'))
    dense = net.add(Dense('loss3/classifier', 1000))
    # prob=net.add(Softmax('softmax'))

    net.load(weight_path, use_pickle=True)
    print('total num of params %d' % (len(net.param_names())))
    # SINGA and Caffe have different layout for the weight matrix of the dense
    # layer
    for key, val in zip(net.param_names(), net.param_values()):
        # print key
        if key == 'loss3/classifier_weight' or key == 'loss3/classifier/weight':
            tmp = tensor.to_numpy(val)
            tmp = tmp.reshape(tmp.shape[::-1])
            val.copy_from_numpy(np.transpose(tmp))
    return net
Beispiel #6
0
def create_wide_resnet(depth=50):
    '''Similar original resnet except that a<=b<=c for the bottleneck block'''
    net = ffnet.FeedForwardNet()
    net.add(
        Conv2D('input-conv',
               64,
               7,
               2,
               pad=3,
               use_bias=False,
               input_sample_shape=(3, 224, 224)))
    net.add(BatchNormalization('input-bn'))
    net.add(Activation('input_relu'))
    net.add(MaxPooling2D('input_pool', 3, 2, pad=1))

    stage(0, net, 3, 64, 128, 256, 1, bottleneck)
    stage(1, net, 4, 256, 256, 512, 2, bottleneck)
    stage(2, net, 6, 512, 512, 1024, 2, bottleneck)
    stage(3, net, 3, 1024, 1024, 2048, 2, bottleneck)

    net.add(AvgPooling2D('avg_pool', 7, 1, pad=0))
    net.add(Flatten('flag'))
    net.add(Dense('dense', 1000))
    return net