Example #1
0
 def test_num_entries_empty(self):
     
     path_lmdb_empty = os.path.join(self.dir_tmp, 'empty_lmdb')
     db = lmdb.open(path_lmdb_empty, map_size=int(1e12))
     with db.begin(write=True) as _:
         _
     db.close()
     
     assert_equal(0, r.num_entries(path_lmdb_empty))
Example #2
0
def est_min_num_fwd_passes(fpath_net, mode_str):
    """
    if multiple source for same mode, base num_passes on last
    fpath_net -- path to network definition
    mode_str -- train or test?
    
    return
    minimum no. of forward passes to cover training set 
    """
    from proto.proto_utils import Parser
    np = Parser().from_net_params_file(fpath_net)
    
    num_passes = 0
    
    for l in np.layer:
        if 'data' in l.type.lower() and mode_str.lower() in l.data_param.source.lower():
            num_entries = read_lmdb.num_entries(l.data_param.source)
            num_passes = int(num_entries / l.data_param.batch_size)
            if num_entries % l.data_param.batch_size != 0:
                print("WARNING: db size not a multiple of batch size. Adding another fwd. pass.")
                num_passes += 1
            print("%d fwd. passes with batch size %d" % (num_passes, l.data_param.batch_size))
            
    return num_passes
Example #3
0
 def test_num_entries_non_num(self):
     
     assert_equal(2, r.num_entries(self.path_lmdb_non_num))
     assert_equal(2, r.num_entries(self.path_lmdb_non_num, is_num_ord_dense=False))
     assert_raises(ValueError, r.num_entries, self.path_lmdb_non_num, is_num_ord_dense=True)
Example #4
0
 def test_num_entries_rand_ord(self):
     
     assert_equal(2, r.num_entries(self.path_lmdb_rand_ord))
     assert_equal(2, r.num_entries(self.path_lmdb_rand_ord, is_num_ord_dense=False))
     assert_not_equal(2, r.num_entries(self.path_lmdb_rand_ord, is_num_ord_dense=True))
Example #5
0
 def test_num_entries(self):
     
     assert_equal(2, r.num_entries(self.path_lmdb))
     assert_equal(2, r.num_entries(self.path_lmdb, is_num_ord_dense=True))
     assert_equal(2, r.num_entries(self.path_lmdb, is_num_ord_dense=False))
Example #6
0
        out[m] = infer_to_lmdb(caffe.Net(fpath_net, fpath_weights, m),
                               keys,
                               num_passes,
                               dst_prefix + '%s_' + ['train', 'test'][m] + '_lmdb')
    return out

if __name__ == '__main__':
    
    base_path = '/mnt/scratch/pierre/caffe_sandbox_tryouts/'
    
    def path_to(path):
	return base_path + path

    fpath_net = path_to('learning_curve/prototxt/net0_test.prototxt')
    fpath_weights = path_to('learning_curve/snapshots/net0_snapshot_iter_10000.caffemodel')
    fpath_db = path_to('inference/mnist_%s_train_lmdb')
    
    net = caffe.Net(fpath_net, fpath_weights, caffe.TRAIN)
    keys = ['fc2', 'fc1']
    x = infer_to_lmdb_cur(net, keys, 2, fpath_db)
    
    import os
    print 'Do lmdbs exist?'
    print [os.path.isdir(fpath_db % (k,)) for k in keys]
    print 'Number of entries in lmdbs:'
    print [read_lmdb.num_entries(fpath_db % (k,)) for k in keys]

    # Here you need to compute accuracy, roc, precision recall, confusion matrix
    
    pass
Example #7
0
    fpath_net = expanduser('~/models/dark/mnist/t0/lenet_train_test.prototxt')
    fpath_weights = expanduser('~/models/dark/mnist/t0/lenet_iter_10000.caffemodel')
    
#     x = response_to_lmdb(fpath_net, fpath_weights,
#                      ['ip2', 'ip1'],
#                      expanduser('~/models/dark/mnist/t0/mnistX_'))

    net = caffe.Net(fpath_net, fpath_weights, caffe.TRAIN)
    keys = ['ip2', 'ip1']
    x = infer_to_lmdb_cur(net, keys, 2,
                      expanduser('~/models/dark/mnist/t0/Xmnist_%s_train_lmdb'))
    
    print x
    
    import os
    print [os.path.isdir(expanduser('~/models/dark/mnist/t0/Xmnist_%s_train_lmdb') % (k,)) for k in keys]
    print [read_lmdb.num_entries(expanduser('~/models/dark/mnist/t0/Xmnist_%s_train_lmdb') % (k,)) for k in keys]
    #print [read_lmdb.read_values(expanduser('~/models/dark/mnist/t0/Xmnist_%s_train_lmdb') % (k,)) for k in keys]

#     with h5py.File(fpath, "w") as f:
#     
#         f['a'] = 0
#         
#         
#         f['b'] = [1, 2]
#         f['c'] = np.arange(3)
#         f['d'] = [np.array([[1,2],[4,5]], dtype=float), np.array([[1,2],[4, 5]], dtype=float)+10]
            
    #infer_to_h5(net, 1, ['accuracy'], fpath)
    
    pass