Example #1
0
    def test_num_entries_empty(self):

        path_lmdb_empty = os.path.join(self.dir_tmp, 'empty_lmdb')
        db = lmdb.open(path_lmdb_empty, map_size=int(1e12))
        with db.begin(write=True) as _:
            _
        db.close()

        assert_equal(0, r.num_entries(path_lmdb_empty))
Example #2
0
def shift_label_lmdb(path_src, path_dst):

    def func_data_decrement_0to255(value):
        import caffe
        _, x = read_lmdb.unpack_raw_datum(value)
        x[x == 0] = 256
        dat = caffe.io.array_to_datum(x - 1)  # 256 becomes 255
        return dat.SerializeToString()

    idxs_all = range(read_lmdb.num_entries(path_src))
    copy_lmdb.copy_samples_lmdb(path_src, path_dst, idxs_all,
                                func_data=func_data_decrement_0to255)
Example #3
0
def shift_label_lmdb(path_src, path_dst):
    def func_data_decrement_0to255(value):
        import caffe
        _, x = read_lmdb.unpack_raw_datum(value)
        x[x == 0] = 256
        dat = caffe.io.array_to_datum(x - 1)  # 256 becomes 255
        return dat.SerializeToString()

    idxs_all = range(read_lmdb.num_entries(path_src))
    copy_lmdb.copy_samples_lmdb(path_src,
                                path_dst,
                                idxs_all,
                                func_data=func_data_decrement_0to255)
Example #4
0
def est_min_num_fwd_passes(fpath_net, mode_str):
    """
    if multiple source for same mode, base num_passes on last
    fpath_net -- path to network definition
    mode_str -- train or test?
    
    return
    minimum no. of forward passes to cover training set 
    """
    from nideep.proto.proto_utils import Parser
    np = Parser().from_net_params_file(fpath_net)
    
    num_passes = 0
    
    for l in np.layer:
        if 'data' in l.type.lower() and mode_str.lower() in l.data_param.source.lower():
            num_entries = read_lmdb.num_entries(l.data_param.source)
            num_passes = int(num_entries / l.data_param.batch_size)
            if num_entries % l.data_param.batch_size != 0:
                print("WARNING: db size not a multiple of batch size. Adding another fwd. pass.")
                num_passes += 1
            print("%d fwd. passes with batch size %d" % (num_passes, l.data_param.batch_size))
            
    return num_passes
Example #5
0
def est_min_num_fwd_passes(fpath_net, mode_str):
    """
    if multiple source for same mode, base num_passes on last
    fpath_net -- path to network definition
    mode_str -- train or test?
    
    return
    minimum no. of forward passes to cover training set
    """
    from nideep.proto.proto_utils import Parser
    np = Parser().from_net_params_file(fpath_net)

    num_passes = 0

    for l in np.layer:
        if 'data' in l.type.lower() and mode_str.lower() in l.data_param.source.lower():
            num_entries = read_lmdb.num_entries(l.data_param.source)
            num_passes = int(num_entries / l.data_param.batch_size)
            if num_entries % l.data_param.batch_size != 0:
                print("WARNING: db size not a multiple of batch size. Adding another fwd. pass.")
                num_passes += 1
            print("%d fwd. passes with batch size %d" % (num_passes, l.data_param.batch_size))

    return num_passes
Example #6
0
 def num_entries(self):
     return read_lmdb.num_entries(self.p)
Example #7
0
    fpath_net = expanduser('~/models/dark/mnist/t0/lenet_train_test.prototxt')
    fpath_weights = expanduser('~/models/dark/mnist/t0/lenet_iter_10000.caffemodel')
    
#     x = response_to_lmdb(fpath_net, fpath_weights,
#                      ['ip2', 'ip1'],
#                      expanduser('~/models/dark/mnist/t0/mnistX_'))

    net = caffe.Net(fpath_net, fpath_weights, caffe.TRAIN)
    keys = ['ip2', 'ip1']
    x = infer_to_lmdb_cur(net, keys, 2,
                      expanduser('~/models/dark/mnist/t0/Xmnist_%s_train_lmdb'))
    
    print x
    
    import os
    print [os.path.isdir(expanduser('~/models/dark/mnist/t0/Xmnist_%s_train_lmdb') % (k,)) for k in keys]
    print [read_lmdb.num_entries(expanduser('~/models/dark/mnist/t0/Xmnist_%s_train_lmdb') % (k,)) for k in keys]
    #print [read_lmdb.read_values(expanduser('~/models/dark/mnist/t0/Xmnist_%s_train_lmdb') % (k,)) for k in keys]

#     with h5py.File(fpath, "w") as f:
#     
#         f['a'] = 0
#         
#         
#         f['b'] = [1, 2]
#         f['c'] = np.arange(3)
#         f['d'] = [np.array([[1,2],[4,5]], dtype=float), np.array([[1,2],[4, 5]], dtype=float)+10]
            
    #infer_to_h5(net, 1, ['accuracy'], fpath)
    
    pass
Example #8
0
    def test_num_entries_non_num(self):

        assert_equal(2, r.num_entries(self.path_lmdb_non_num))
Example #9
0
    def test_num_entries_rand_ord(self):

        assert_equal(2, r.num_entries(self.path_lmdb_rand_ord))
Example #10
0
    def test_num_entries(self):

        assert_equal(2, r.num_entries(self.path_lmdb))
Example #11
0
    fpath_net = expanduser('~/models/dark/mnist/t0/lenet_train_test.prototxt')
    fpath_weights = expanduser('~/models/dark/mnist/t0/lenet_iter_10000.caffemodel')

#     x = response_to_lmdb(fpath_net, fpath_weights,
#                      ['ip2', 'ip1'],
#                      expanduser('~/models/dark/mnist/t0/mnistX_'))

    net = caffe.Net(fpath_net, fpath_weights, caffe.TRAIN)
    keys = ['ip2', 'ip1']
    x = infer_to_lmdb_cur(net, keys, 2,
                      expanduser('~/models/dark/mnist/t0/Xmnist_%s_train_lmdb'))

    print x

    import os
    print [os.path.isdir(expanduser('~/models/dark/mnist/t0/Xmnist_%s_train_lmdb') % (k,)) for k in keys]
    print [read_lmdb.num_entries(expanduser('~/models/dark/mnist/t0/Xmnist_%s_train_lmdb') % (k,)) for k in keys]
    # print [read_lmdb.read_values(expanduser('~/models/dark/mnist/t0/Xmnist_%s_train_lmdb') % (k,)) for k in keys]

#     with h5py.File(fpath, "w") as f:
#
#         f['a'] = 0
#
#
#         f['b'] = [1, 2]
#         f['c'] = np.arange(3)
#         f['d'] = [np.array([[1,2],[4,5]], dtype=float), np.array([[1,2],[4, 5]], dtype=float)+10]

    # infer_to_h5(net, 1, ['accuracy'], fpath)