Esempio n. 1
0
def create_net(num_classes=1001,
               sample_shape=(3, 299, 299),
               is_training=True,
               dropout_keep_prob=0.8,
               final_endpoint='InceptionV4/Mixed_7d',
               aux_endpoint='InceptionV4/Mixed_6e'):
    """Creates the Inception V4 model.

    Args:
        num_classes: number of predicted classes.
        is_training: whether is training or not.
        dropout_keep_prob: float, the fraction to keep before final layer.
        final_endpoint, aux_endpoint: refer to inception_v4_base()

    Returns:
        logits: the logits outputs of the model.
        end_points: the set of end_points from the inception model.
    """
    end_points = {}
    name = 'InceptionV4'
    net, end_points = inception_v4_base(sample_shape,
                                        final_endpoint=final_endpoint,
                                        aux_endpoint=aux_endpoint)
    # Auxiliary Head logits
    if aux_endpoint is not None:
        # 17 x 17 x 1024
        aux_logits = end_points[aux_endpoint + '-aux']
        blk = name + '/AuxLogits'
        net.add(
            AvgPooling2D('%s/AvgPool_1a_5x5' % blk,
                         5,
                         stride=3,
                         border_mode='VALID'), aux_logits)
        t = conv2d(net, '%s/Conv2d_1b_1x1' % blk, 128, 1)
        conv2d(net,
               '%s/Conv2d_2a' % blk,
               768,
               t.get_output_sample_shape()[1:3],
               border_mode='VALID')
        net.add(Flatten('%s/flat' % blk))
        end_points[blk] = net.add(Dense('%s/Aux_logits' % blk, num_classes))

    # Final pooling and prediction
    # 8 x 8 x 1536
    blk = name + '/Logits'
    last_layer = end_points[final_endpoint]
    net.add(
        AvgPooling2D('%s/AvgPool_1a' % blk,
                     last_layer.get_output_sample_shape()[1:3],
                     border_mode='VALID'), last_layer)
    # 1 x 1 x 1536
    net.add(Dropout('%s/Dropout_1b' % blk, 1 - dropout_keep_prob))
    net.add(Flatten('%s/PreLogitsFlatten' % blk))
    # 1536
    end_points[blk] = net.add(Dense('%s/Logits' % blk, num_classes))
    return net, end_points
Esempio n. 2
0
def create_net(depth, nb_classes, batchnorm=False, use_cpu=False):
    if use_cpu:
        layer.engine = 'singacpp'
    net = ffnet.FeedForwardNet()
    net = create_layers(net, cfg[depth], (3, 224, 224), batchnorm)
    net.add(Flatten('flat'))
    net.add(Dense('dense/classifier.0', 4096))
    net.add(Activation('act/classifier.1'))
    net.add(Dropout('dropout/classifier.2'))
    net.add(Dense('dense/classifier.3', 4096))
    net.add(Activation('act/classifier.4'))
    net.add(Dropout('dropout/classifier.5'))
    net.add(Dense('dense/classifier.6', nb_classes))
    return net
Esempio n. 3
0
def create_preact_resnet(depth=200):
    '''Resnet with the batchnorm and relu moved to before the conv layer for each block'''
    net = ffnet.FeedForwardNet()
    net.add(
        Conv2D('input-conv',
               64,
               7,
               2,
               pad=3,
               use_bias=False,
               input_sample_shape=(3, 224, 224)))
    net.add(BatchNormalization('input-bn'))
    net.add(Activation('input_relu'))
    net.add(MaxPooling2D('input_pool', 3, 2, pad=1))
    conf = cfg[depth]
    if depth > 34:
        stage(0, net, conf[0], 64, 64, 256, 1, bottleneck, preact=True)
        stage(1, net, conf[1], 256, 128, 512, 2, bottleneck, preact=True)
        stage(2, net, conf[2], 512, 256, 1024, 2, bottleneck, preact=True)
        stage(3, net, conf[3], 1024, 512, 2048, 2, bottleneck, preact=True)
    else:
        stage(0, net, conf[0], 64, 64, 64, 1, basicblock, preact=True)
        stage(1, net, conf[1], 64, 128, 128, 2, basicblock, preact=True)
        stage(2, net, conf[2], 128, 256, 256, 2, basicblock, preact=True)
        stage(3, net, conf[3], 256, 512, 512, 2, basicblock, preact=True)
    net.add(BatchNormalization('final-bn'))
    net.add(Activation('final-relu'))
    net.add(AvgPooling2D('avg', 7, 1, pad=0))
    net.add(Flatten('flat'))
    net.add(Dense('dense', 1000))
    return net
Esempio n. 4
0
def create_resnet(depth=18):
    '''Original resnet, where the there is a relue after the addition layer'''
    net = ffnet.FeedForwardNet()
    net.add(
        Conv2D('input-conv',
               64,
               7,
               2,
               pad=3,
               use_bias=False,
               input_sample_shape=(3, 224, 224)))
    net.add(BatchNormalization('input-bn'))
    net.add(Activation('input_relu'))
    net.add(MaxPooling2D('input_pool', 3, 2, pad=1))
    conf = cfg[depth]
    if depth > 34:
        stage(0, net, conf[0], 64, 64, 256, 1, bottleneck)
        stage(1, net, conf[1], 256, 128, 512, 2, bottleneck)
        stage(2, net, conf[2], 512, 256, 1024, 2, bottleneck)
        stage(3, net, conf[3], 1024, 512, 2048, 2, bottleneck)
    else:
        stage(0, net, conf[0], 64, 64, 64, 1, basicblock)
        stage(1, net, conf[1], 64, 128, 128, 2, basicblock)
        stage(2, net, conf[2], 128, 256, 256, 2, basicblock)
        stage(3, net, conf[3], 256, 512, 512, 2, basicblock)
    net.add(AvgPooling2D('avg', 7, 1, pad=0))
    net.add(Flatten('flat'))
    net.add(Dense('dense', 1000))
    return net
Esempio n. 5
0
def create_net(shape, weight_path='bvlc_googlenet.pickle'):
    net = ffnet.FeedForwardNet()
    net.add(Conv2D('conv1/7x7_s2', 64, 7, 2, pad=3, input_sample_shape=shape))
    c1 = net.add(Activation('conv1/relu_7x7'))
    pool1 = pool(net, c1, 'pool1/3x3_s2', 3, 2)
    norm1 = net.add(LRN('pool1/norm1', 5, 0.0001, 0.75))
    c3x3r = conv(net, norm1, 'conv2', 64, 1, suffix='3x3_reduce')
    c3x3 = conv(net, c3x3r, 'conv2', 192, 3, pad=1, suffix='3x3')
    norm2 = net.add(LRN('conv2/norm2', 5, 0.0001, 0.75))
    pool2 = pool(net, norm2, 'pool2/3x3_s2', 3, 2)

    i3a = inception(net, pool2, 'inception_3a', 64, 96, 128, 16, 32, 32)
    i3b = inception(net, i3a, 'inception_3b', 128, 128, 192, 32, 96, 64)
    pool3 = pool(net, i3b, 'pool3/3x3_s2', 3, 2)
    i4a = inception(net, pool3, 'inception_4a', 192, 96, 208, 16, 48, 64)
    i4b = inception(net, i4a, 'inception_4b', 160, 112, 224, 24, 64, 64)
    i4c = inception(net, i4b, 'inception_4c', 128, 128, 256, 24, 64, 64)
    i4d = inception(net, i4c, 'inception_4d', 112, 144, 288, 32, 64, 64)
    i4e = inception(net, i4d, 'inception_4e', 256, 160, 320, 32, 128, 128)
    pool4 = pool(net, i4e, 'pool4/3x3_s2', 3, 2)
    i5a = inception(net, pool4, 'inception_5a', 256, 160, 320, 32, 128, 128)
    i5b = inception(net, i5a, 'inception_5b', 384, 192, 384, 48, 128, 128)
    pool5 = net.add(AvgPooling2D('pool5/7x7_s1', 7, 1, pad=0))
    drop5 = net.add(Dropout('drop', 0.4))
    flat = net.add(Flatten('flat'))
    dense = net.add(Dense('loss3/classifier', 1000))
    # prob=net.add(Softmax('softmax'))

    net.load(weight_path, use_pickle=True)
    print('total num of params %d' % (len(net.param_names())))
    # SINGA and Caffe have different layout for the weight matrix of the dense
    # layer
    for key, val in zip(net.param_names(), net.param_values()):
        # print key
        if key == 'loss3/classifier_weight' or key == 'loss3/classifier/weight':
            tmp = tensor.to_numpy(val)
            tmp = tmp.reshape(tmp.shape[::-1])
            val.copy_from_numpy(np.transpose(tmp))
    return net
Esempio n. 6
0
def create_wide_resnet(depth=50):
    '''Similar original resnet except that a<=b<=c for the bottleneck block'''
    net = ffnet.FeedForwardNet()
    net.add(
        Conv2D('input-conv',
               64,
               7,
               2,
               pad=3,
               use_bias=False,
               input_sample_shape=(3, 224, 224)))
    net.add(BatchNormalization('input-bn'))
    net.add(Activation('input_relu'))
    net.add(MaxPooling2D('input_pool', 3, 2, pad=1))

    stage(0, net, 3, 64, 128, 256, 1, bottleneck)
    stage(1, net, 4, 256, 256, 512, 2, bottleneck)
    stage(2, net, 6, 512, 512, 1024, 2, bottleneck)
    stage(3, net, 3, 1024, 1024, 2048, 2, bottleneck)

    net.add(AvgPooling2D('avg_pool', 7, 1, pad=0))
    net.add(Flatten('flag'))
    net.add(Dense('dense', 1000))
    return net