コード例 #1
0
def prepare_s3dis_train_single_file(fn):
    in_fn = 'data/S3DIS/room_block_10_10/' + fn
    all_data = [[] for _ in xrange(5)]
    bg = time.time()

    data = prepare_data(in_fn, True, True, False, False, True, True)
    for t in xrange(5):
        all_data[t] += data[t]
    data = prepare_data(in_fn, True, True, True, False, True, True)
    for t in xrange(5):
        all_data[t] += data[t]
    data = prepare_data(in_fn, True, True, False, True, True, True)
    for t in xrange(5):
        all_data[t] += data[t]
    data = prepare_data(in_fn, True, True, True, True, True, True)
    for t in xrange(5):
        all_data[t] += data[t]

    data = prepare_data(in_fn, True, False, False, False, True, True)
    for t in xrange(5):
        all_data[t] += data[t]
    data = prepare_data(in_fn, True, False, True, False, True, True)
    for t in xrange(5):
        all_data[t] += data[t]
    data = prepare_data(in_fn, True, False, False, True, True, True)
    for t in xrange(5):
        all_data[t] += data[t]
    data = prepare_data(in_fn, True, False, True, True, True, True)
    for t in xrange(5):
        all_data[t] += data[t]

    out_fn = 'data/S3DIS/sampled_train_nolimits/' + fn
    save_pkl(out_fn, all_data)
    print 'done {} cost {} s'.format(fn, time.time() - bg)
コード例 #2
0
def merge_train_by_area():
    from io_util import get_block_train_test_split
    train_list, test_list = get_block_train_test_split()
    random.shuffle(train_list)
    f = open('cached/s3dis_merged_train.txt', 'w')
    for ai in xrange(1, 7):
        cur_data = [[] for _ in xrange(5)]
        cur_idx = 0
        for fn in train_list:
            an = get_area(fn)
            if an != ai: continue
            data = read_pkl('data/S3DIS/sampled_train_new/' + fn)
            for i in xrange(5):
                cur_data[i] += data[i]

            if len(cur_data[0]) > 1000:
                save_pkl(
                    'data/S3DIS/merged_train_new/{}_{}.pkl'.format(
                        ai, cur_idx), cur_data)
                f.write('data/S3DIS/merged_train_new/{}_{}.pkl\n'.format(
                    ai, cur_idx))
                cur_idx += 1
                cur_data = [[] for _ in xrange(5)]

        if len(cur_data[0]) > 0:
            save_pkl(
                'data/S3DIS/merged_train_new/{}_{}.pkl'.format(ai, cur_idx),
                cur_data)
            f.write('data/S3DIS/merged_train_new/{}_{}.pkl\n'.format(
                ai, cur_idx))
            cur_idx += 1

        print 'area {} done'.format(ai)

    f.close()
コード例 #3
0
def semantic3d_test_to_block():
    with open('cached/semantic3d_test_stems.txt','r') as f:
        lines=f.readlines()
        fns=[fn.strip('\n').split(' ')[0] for fn in lines]
        pns=[int(fn.strip('\n').split(' ')[1]) for fn in lines]


    for fn,pn in zip(fns,pns):
        all_data=[[] for _ in xrange(13)]
        for t in xrange(pn):
            fs=('data/Semantic3D.Net/pkl/test_presample/' + fn + '_{}.pkl'.format(t))
            points,labels=read_room_pkl(fs)
            xyzs, rgbs, covars, lbls = sample_block(points,labels,ds_stride=sstride,block_size=bsize,
                                                    block_stride=bstride,min_pn=min_pn,use_rescale=False,
                                                    use_flip=False,use_rotate=False,covar_ds_stride=covar_ds_stride,
                                                    covar_nn_size=covar_nn_size,gpu_gather=True)

            print 'block num {}'.format(len(xyzs))

            data =  normalize_block_hierarchy(xyzs, rgbs, covars, lbls,
                                              bsize=bsize, nr1=nr1, nr2=nr2, nr3=nr3,
                                              vc1=vc1, vc2=vc2,resample=True, jitter_color=True,
                                              resample_low=resample_ratio_low,
                                              resample_high=resample_ratio_high,
                                              max_pt_num=max_pt_num)
            for i in xrange(13):
                all_data[i]+=data[i]

        save_pkl('data/Semantic3D.Net/block/test/'+fn+'.pkl',all_data)
        print '{} done'.format(fn)
コード例 #4
0
def merge_train_files():
    with open('cached/semantic3d_stems.txt','r') as f:
        stems=[line.split(' ')[0] for line in f.readlines()]
    with open('cached/semantic3d_train_pkl.txt','r') as f:
        fs=[line.strip('\n') for line in f.readlines()]

    of=open('cached/semantic3d_merged_train.txt','w')
    for s in stems:
        idx=0
        all_data=[[] for _ in xrange(4)]
        for f in fs:
            if not f.startswith(s):
                continue
            data=read_pkl('data/Semantic3D.Net/block/sampled/train/'+f)

            for i in xrange(4):
                all_data[i]+=data[i]

            if len(all_data[0])>300:
                print len(all_data[0])
                save_pkl('data/Semantic3D.Net/block/sampled/merged/'+s+'_{}.pkl'.format(idx),all_data)
                all_data=[[] for _ in xrange(4)]
                idx+=1

        if len(all_data[0])>0:
            save_pkl('data/Semantic3D.Net/block/sampled/merged/'+s+'_{}.pkl'.format(idx),all_data)
            idx+=1

        of.write('{} {}\n'.format(s,idx))
        print '{} done'.format(s)

    of.close()
コード例 #5
0
def prepare_s3dis_train_single_file_no_aug(fn):
    in_fn = 'data/S3DIS/room_block_10_10/' + fn
    bg = time.time()
    data = prepare_data(in_fn, False, False, False, False, False, False, 256)

    out_fn = 'data/S3DIS/sampled_no_aug/' + fn
    save_pkl(out_fn, data)
    print 'done {} cost {} s'.format(fn, time.time() - bg)
コード例 #6
0
def split_train_data(split_size):
    with open('data/ScanNet/scannet_train.pickle', 'rb') as f:
        points = cPickle.load(f)
        labels = cPickle.load(f)

    cur_size = 0
    idx = 0
    print 'total size {}'.format(len(points))
    while cur_size < len(points):
        save_pkl('data/ScanNet/train_split_{}.pkl'.format(idx), [
            points[cur_size:cur_size + split_size],
            labels[cur_size:cur_size + split_size]
        ])
        idx += 1
        cur_size += split_size
        print 'cur size {}'.format(cur_size)
コード例 #7
0
def modelnet_dataset_to_block():
    from io_util import read_model_h5
    train_list=['data/ModelNet40/ply_data_train{}.h5'.format(i) for i in xrange(5)]
    test_list=['data/ModelNet40/ply_data_test{}.h5'.format(i) for i in xrange(2)]
    # train_list2=['data/ModelNet40/ply_data_train{}.pkl'.format(i) for i in xrange(5)]
    # test_list2=['data/ModelNet40/ply_data_test{}.pkl'.format(i) for i in xrange(2)]

    for fi,filename in enumerate(train_list[:2]):
        points,labels=read_model_h5(filename)
        data = normalize_model_hierarchy(points,False)
        app_data=[]
        app_data.append(labels)
        app_data+=data
        save_pkl('data/ModelNet40/ply_data_train{}.pkl'.format(fi),app_data)
        print len(app_data)
        print '{} done'.format(fi)
コード例 #8
0
def semantic3d_presample_block():
    with open('cached/semantic3d_test_stems.txt','r') as f:
        fns=f.readlines()
        fns=[fn.strip('\n').split(' ')[0] for fn in fns]

    for fn in fns:
        fs=('data/Semantic3D.Net/pkl/test/' + fn + '.pkl')
        points,labels=read_room_pkl(fs)
        xyzs, rgbs, covars, lbls = sample_block(points,labels,ds_stride=0.03,block_size=50.0,
                                                block_stride=45.0,min_pn=128,use_rescale=False,
                                                use_flip=False,use_rotate=False,covar_ds_stride=0.01,
                                                covar_nn_size=0.1,gpu_gather=True)

        for t in xrange(len(xyzs)):
            points=np.concatenate([xyzs[t],rgbs[t]],axis=1)
            save_pkl('data/Semantic3D.Net/pkl/test_presample/' + fn + '_{}.pkl'.format(t),[points,lbls[t]])
コード例 #9
0
def process():
    sess, pls, ops, feed_dict = build_session()
    with open('cached/semantic3d_test_stems.txt','r') as f:
        lines=f.readlines()
        fss=[fn.strip('\n').split(' ')[0] for fn in lines]

    for fs in fss[2:3]:
        sxyzs,_,sprobs=eval_room_probs(fs,sess, pls, ops, feed_dict)
        qxyzs,_=read_pkl('data/Semantic3D.Net/pkl/test/{}.pkl'.format(fs))
        qxyzs=np.ascontiguousarray(qxyzs[:,:3],np.float32)
        sxyzs=np.ascontiguousarray(sxyzs[:,:3],np.float32)
        sprobs=np.ascontiguousarray(sprobs,np.float32)
        qprobs=interpolate(sxyzs,sprobs,qxyzs)

        save_results(sxyzs,qxyzs,sprobs,qprobs,FLAGS.prefix,fs)

        save_pkl('cached/sg28_qxyzs.pkl',qxyzs)
        save_pkl('cached/sg28_qprobs.pkl',qprobs)
コード例 #10
0
def process_test_data():
    with open('data/ScanNet/scannet_test.pickle', 'rb') as f:
        points = cPickle.load(f)
        labels = cPickle.load(f)

    room_num = len(points)
    bg = time.time()
    for i in xrange(room_num):
        if i % 10 == 0:
            print 'idx {} cost {} s'.format(i, time.time() - bg)
            bg = time.time()

        xyzs, covars, lbls = sample_block_scannet(points[i], labels[i],
                                                  ds_stride, block_size,
                                                  block_stride, min_pn, False,
                                                  False, False, covar_nn_size)
        data = normalize_block_scannet(xyzs, covars, lbls, block_size, nr1,
                                       nr2, nr3, vc1, vc2, False, rs_low,
                                       rs_high, pn_limits)

        save_pkl('data/ScanNet/sampled_test/test_{}.pkl'.format(i), data)
コード例 #11
0
def process_one_file(fid):
    points, labels = read_pkl(
        'data/ScanNet/split/train_split_{}.pkl'.format(fid))

    room_num = len(points)
    all_data = [[] for _ in xrange(12)]
    idx = 0
    bg = time.time()
    for i in xrange(room_num):
        if i % 10 == 0:
            print 'idx {} cost {} s'.format(i, time.time() - bg)
            bg = time.time()

        for t in xrange(5):
            xyzs, covars, lbls = sample_block_scannet(points[i], labels[i],
                                                      ds_stride, block_size,
                                                      block_stride, min_pn,
                                                      True, True, True,
                                                      covar_nn_size)
            data = normalize_block_scannet(xyzs, covars, lbls, block_size, nr1,
                                           nr2, nr3, vc1, vc2, True, rs_low,
                                           rs_high, pn_limits)

            for s in xrange(len(data)):
                all_data[s] += data[s]

        if len(all_data[0]) > 300:
            save_pkl(
                'data/ScanNet/sampled_train/train_{}_{}.pkl'.format(fid, idx),
                all_data)
            idx += 1
            all_data = [[] for _ in xrange(12)]

    if len(all_data[0]) > 0:
        save_pkl('data/ScanNet/sampled_train/train_{}_{}.pkl'.format(fid, idx),
                 all_data)
        idx += 1
コード例 #12
0
def prepare_subset():
    train_list, test_list = get_block_train_test_split()
    train_list += test_list
    file_list = [fn for fn in train_list if fn.split('_')[-2] == 'office']

    for fn in file_list:
        bg = time.time()
        path = 'data/S3DIS/room_block_10_10/' + fn
        flip_x = random.random() < 0.5
        flip_y = random.random() < 0.5
        swap = random.random() < 0.5
        all_data = [[] for _ in xrange(5)]
        for i in xrange(1):
            data = prepare_subset_single_file(path, 0.075, 1.5, 0.75, 128,
                                              True, swap, flip_x, flip_y, True,
                                              True)

            for k in xrange(5):
                all_data[k] += data[k]

        save_pkl('data/S3DIS/office_block/' + fn, all_data)
        print 'done {} cost {} s pn {}'.format(
            fn,
            time.time() - bg, np.mean([len(xyzs) for xyzs in all_data[0]]))
コード例 #13
0
def prepare_s3dis_test_single_file(fn):
    in_fn = 'data/S3DIS/room_block_10_10/' + fn
    data = prepare_data(in_fn, False, False, False, False, False, False, 256)

    out_fn = 'data/S3DIS/sampled_test_nolimits/' + fn
    save_pkl(out_fn, data)
コード例 #14
0
def semantic3d_sample_single_file_training_block(tfs):
    fs='data/Semantic3D.Net/block/train/'+tfs
    data=semantic3d_process_block_v2(fs)

    save_pkl('data/Semantic3D.Net/block/sampled/train/'+tfs,data)
    print '{} done'.format(tfs)