예제 #1
0
def build_berry_alexnet(input_layer, num_classes, model_params={}):
    from berry import BerryModel
    from berry import layers

    nn = BerryModel()
    nn.add(input_layer)
    nn.add(layers.Convolution2D(
        nn.last, 96, 11, stride=4, pad='VALID', activation='relu',
        W_stddev=1e-2, b_val=0.1))
    nn.add(layers.MaxPooling2D(nn.last, 3, 2))
    nn.add(layers.Convolution2D(nn.last, 256, 5, pad='SAME', activation='relu',
                                W_stddev=1e-2, b_val=0.1))
    nn.add(layers.MaxPooling2D(nn.last, 3, 2))
    nn.add(layers.Convolution2D(nn.last, 384, 3, pad='SAME', activation='relu',
                                W_stddev=1e-2, b_val=0.1))
    nn.add(layers.Convolution2D(nn.last, 384, 3, pad='SAME', activation='relu',
                                W_stddev=1e-2, b_val=0.1))
    nn.add(layers.Convolution2D(nn.last, 256, 3, pad='SAME', activation='relu',
                                W_stddev=1e-2, b_val=0.1))
    nn.add(layers.MaxPooling2D(nn.last, 3, 2))
    nn.add(layers.Flatten(nn.last))
    nn.add(layers.Dense(
        nn.last, 4096, activation='relu', W_stddev=5e-3, b_val=0.1))
    nn.add(layers.Dropout(nn.last, .5))
    nn.add(layers.Dense(
        nn.last, 4096, activation='relu', W_stddev=5e-3, b_val=0.1))
    nn.add(layers.Dropout(nn.last, .5))
    nn.add(layers.Dense(
        nn.last, num_classes, activation='softmax', W_stddev=5e-3, b_val=0.1))
    return nn
예제 #2
0
파일: lenet.py 프로젝트: nagyistge/braid
def build_berry_lenet(input_layer, num_classes, model_params={}):
    from berry import layers, BerryModel

    nn = BerryModel()
    nn.add(input_layer)
    nn.add(
        layers.Convolution2D(nn.last,
                             20,
                             5,
                             pad='VALID',
                             activation='sigmoid',
                             W_stddev=1e-1))
    nn.add(layers.MaxPooling2D(nn.last, 2, 2))
    nn.add(
        layers.Convolution2D(nn.last,
                             50,
                             5,
                             pad='SAME',
                             W_stddev=1e-1,
                             activation='sigmoid'))
    nn.add(layers.MaxPooling2D(nn.last, 2, 2))
    nn.add(layers.Flatten(nn.last))
    nn.add(layers.Dense(nn.last, 500, activation='relu', W_stddev=5e-3))
    nn.add(
        layers.Dense(nn.last, num_classes, activation='softmax',
                     W_stddev=5e-3))
    return nn
예제 #3
0
def inference(i):
    layer_dict = OrderedDict()
    layer_dict['input'] = i
    l = layers.Convolution2D(i,
                             96,
                             11,
                             stride=4,
                             pad='SAME',
                             activation='relu',
                             name='conv_1')
    layer_dict[l.name] = l
    l = layers.MaxPooling2D(l, 3, 2, name='pool_1')
    layer_dict[l.name] = l
    l = layers.Convolution2D(l,
                             256,
                             5,
                             pad='SAME',
                             activation='relu',
                             name='conv_2')
    layer_dict[l.name] = l
    l = layers.MaxPooling2D(l, 3, 2, name='pool_2')
    layer_dict[l.name] = l
    l = layers.Convolution2D(l,
                             384,
                             3,
                             pad='SAME',
                             activation='relu',
                             name='conv_3')
    layer_dict[l.name] = l
    l = layers.Convolution2D(l,
                             384,
                             3,
                             pad='SAME',
                             activation='relu',
                             name='conv_4')
    layer_dict[l.name] = l
    l = layers.Convolution2D(l,
                             256,
                             3,
                             pad='SAME',
                             activation='relu',
                             name='conv_5')
    layer_dict[l.name] = l
    l = layers.MaxPooling2D(l, 3, 2, name='pool_5')
    layer_dict[l.name] = l
    l = layers.Flatten(l, name='flat')
    layer_dict[l.name] = l
    l = layers.Dense(l, 4096, activation='relu', name='fc_1')
    layer_dict[l.name] = l
    l = layers.Dropout(l, 0.5, name='drop_1')
    layer_dict[l.name] = l
    l = layers.Dense(l, 4096, activation='relu', name='fc_2')
    layer_dict[l.name] = l
    l = layers.Dropout(l, 0.5, name='drop_2')
    layer_dict[l.name] = l
    l = layers.Dense(l, 50, activation='softmax', name='output')
    layer_dict[l.name] = l
    print layer_dict.keys()
    return layer_dict, l.output
예제 #4
0
파일: test_vgg.py 프로젝트: nagyistge/braid
def inference(i, keep_prob):
    print i.get_shape()
    l = layers.Convolution2D(i, 64, 3, stride=1, pad='SAME', activation='relu')
    print l.output_shape
    l = layers.Convolution2D(l, 64, 3, stride=1, pad='SAME', activation='relu')
    print l.output_shape
    l = layers.MaxPooling2D(l, 2, 2)
    print l.output_shape

    l = layers.Convolution2D(l,
                             128,
                             3,
                             stride=1,
                             pad='SAME',
                             activation='relu')
    print l.output_shape
    l = layers.Convolution2D(l,
                             128,
                             3,
                             stride=1,
                             pad='SAME',
                             activation='relu')
    print l.output_shape
    l = layers.MaxPooling2D(l, 2, 2)
    print l.output_shape

    l = layers.Convolution2D(l,
                             256,
                             3,
                             stride=1,
                             pad='SAME',
                             activation='relu')
    print l.output_shape
    l = layers.Convolution2D(l,
                             256,
                             3,
                             stride=1,
                             pad='SAME',
                             activation='relu')
    print l.output_shape
    l = layers.Convolution2D(l,
                             256,
                             3,
                             stride=1,
                             pad='SAME',
                             activation='relu')
    print l.output_shape
    l = layers.MaxPooling2D(l, 2, 2)
    print l.output_shape

    l = layers.Convolution2D(l,
                             512,
                             3,
                             stride=1,
                             pad='SAME',
                             activation='relu')
    print l.output_shape
    l = layers.Convolution2D(l,
                             512,
                             3,
                             stride=1,
                             pad='SAME',
                             activation='relu')
    print l.output_shape
    l = layers.Convolution2D(l,
                             512,
                             3,
                             stride=1,
                             pad='SAME',
                             activation='relu')
    print l.output_shape
    l = layers.MaxPooling2D(l, 2, 2)
    print l.output_shape
    print l.output_shape
    l = layers.Convolution2D(l,
                             512,
                             3,
                             stride=1,
                             pad='SAME',
                             activation='relu')
    print l.output_shape
    l = layers.Convolution2D(l,
                             512,
                             3,
                             stride=1,
                             pad='SAME',
                             activation='relu')
    print l.output_shape
    l = layers.Convolution2D(l,
                             512,
                             3,
                             stride=1,
                             pad='SAME',
                             activation='relu')
    l = layers.MaxPooling2D(l, 2, 2)
    print l.output_shape
    l = layers.Flatten(l)
    l = layers.Dense(l, 4096, activation='relu')
    l = layers.Dropout(l, keep_prob)
    l = layers.Dense(l, 4096, activation='relu')
    l = layers.Dropout(l, keep_prob)
    l = layers.Dense(l, 50, activation='softmax')
    return l.output
예제 #5
0
def build_berry_vgg16(input_layer, num_classes, model_params={}):
    from berry import layers, BerryModel

    nn = BerryModel()
    nn.add(input_layer)
    # Expected input_shape = (224, 224, 3)
    nn.add(
        layers.Convolution2D(nn.last,
                             64,
                             5,
                             stride=2,
                             pad='SAME',
                             activation='relu',
                             init='deepnet',
                             b_val=0.0))
    nn.add(
        layers.Convolution2D(nn.last,
                             64,
                             3,
                             pad='SAME',
                             activation='relu',
                             init='deepnet',
                             b_val=0.0))
    nn.add(layers.MaxPooling2D(nn.last, 2, 2))

    nn.add(
        layers.Convolution2D(nn.last,
                             128,
                             3,
                             pad='SAME',
                             activation='relu',
                             init='deepnet',
                             b_val=0.0))
    nn.add(
        layers.Convolution2D(nn.last,
                             128,
                             3,
                             pad='SAME',
                             activation='relu',
                             init='deepnet',
                             b_val=0.0))
    nn.add(layers.MaxPooling2D(nn.last, 2, 2))

    nn.add(
        layers.Convolution2D(nn.last,
                             256,
                             3,
                             pad='SAME',
                             activation='relu',
                             init='deepnet',
                             b_val=0.0))
    nn.add(
        layers.Convolution2D(nn.last,
                             256,
                             3,
                             pad='SAME',
                             activation='relu',
                             init='deepnet',
                             b_val=0.0))
    nn.add(
        layers.Convolution2D(nn.last,
                             256,
                             3,
                             pad='SAME',
                             activation='relu',
                             init='deepnet',
                             b_val=0.0))
    nn.add(layers.MaxPooling2D(nn.last, 2, 2))

    nn.add(
        layers.Convolution2D(nn.last,
                             512,
                             3,
                             pad='SAME',
                             activation='relu',
                             init='deepnet',
                             b_val=0.0))
    nn.add(
        layers.Convolution2D(nn.last,
                             512,
                             3,
                             pad='SAME',
                             activation='relu',
                             init='deepnet',
                             b_val=0.0))
    nn.add(
        layers.Convolution2D(nn.last,
                             512,
                             3,
                             pad='SAME',
                             activation='relu',
                             init='deepnet',
                             b_val=0.0))
    nn.add(layers.MaxPooling2D(nn.last, 2, 2))

    nn.add(
        layers.Convolution2D(nn.last,
                             512,
                             3,
                             pad='SAME',
                             activation='relu',
                             init='deepnet',
                             b_val=0.0))
    nn.add(
        layers.Convolution2D(nn.last,
                             512,
                             3,
                             pad='SAME',
                             activation='relu',
                             init='deepnet',
                             b_val=0.0))
    nn.add(
        layers.Convolution2D(nn.last,
                             512,
                             3,
                             pad='SAME',
                             activation='relu',
                             init='deepnet',
                             b_val=0.0))
    nn.add(layers.MaxPooling2D(nn.last, 2, 2))

    nn.add(layers.Flatten(nn.last))
    nn.add(
        layers.Dense(nn.last,
                     4096,
                     activation='relu',
                     W_stddev=1e-2,
                     b_val=0.0))
    nn.add(layers.Dropout(nn.last, 0.5))
    nn.add(
        layers.Dense(nn.last,
                     4096,
                     activation='relu',
                     W_stddev=1e-2,
                     b_val=0.0))
    nn.add(layers.Dropout(nn.last, 0.5))
    nn.add(
        layers.Dense(nn.last,
                     num_classes,
                     activation='softmax',
                     W_stddev=1e-3,
                     b_val=0.0))

    return nn