Esempio n. 1
0
def lenet(lmdbData, lmdbLabel, batch_size):
    n = NetSpec()
    
    n.data  = L.Data(batch_size=batch_size, backend=P.Data.LMDB, source=lmdbData,
                    transform_param=dict(scale=1./255), ntop=1)
    
    n.label = L.Data(batch_size=batch_size, backend=P.Data.LMDB, source=lmdbLabel,
                    transform_param=dict(scale=1./255), ntop=1)

    n.conv1 = L.Convolution(n.data, kernel_size=4, num_output=200, weight_filler=dict(type='xavier'))
    n.pool1 = L.Pooling(n.conv1, kernel_size=2, stride=2, pool=P.Pooling.MAX)
    n.conv2 = L.Convolution(n.pool1, kernel_size=3, num_output=50, weight_filler=dict(type='xavier'))
    n.pool2 = L.Pooling(n.conv2, kernel_size=2, stride=1, pool=P.Pooling.MAX)
    n.fc1   = L.InnerProduct(n.pool2, num_output=200, weight_filler=dict(type='xavier'))
    n.relu1 = L.ReLU(n.fc1, in_place=True)
    n.score = L.InnerProduct(n.relu1, num_output=1200, weight_filler=dict(type='xavier'))
    n.loss  = L.Python(n.score, n.label, module='pyloss', layer='EuclideanLossLayer')

    return n.to_proto()
Esempio n. 2
0
 def deploy_tail(self, last_top):
     n = NetSpec()
     n.score = L.Softmax(bottom=last_top)
     return n.to_proto()