def test_presample():
    for t in xrange(17):
        points,labels=read_pkl('data/Semantic3D.Net/pkl/test_presample/MarketplaceFeldkirch_Station4_rgb_intensity-reduced_{}.pkl'.format(t))
        print points.shape
        idxs=libPointUtil.gridDownsampleGPU(points,0.1,False)
        points=points[idxs]
        output_points('test_result/{}.txt'.format(t), points)
Esempio n. 2
0
def test_train_block():
    with open('data/ScanNet/scannet_train.pickle', 'rb') as f:
        points = cPickle.load(f)
        labels = cPickle.load(f)

    room_num = len(points)
    for i in xrange(3):
        bg = time.time()
        xyzs, covars, lbls = sample_block_scannet(points[i], labels[i], 0.02,
                                                  3.0, 1.5, 128, True, True,
                                                  True, 0.1)
        cxyzs, dxyzs, covars, lbls, vlens, vlens_bgs, vcidxs, cidxs, nidxs, nidxs_bgs, nidxs_lens, block_mins=\
            normalize_block_scannet(xyzs,covars,lbls,3.0,0.1,0.3,1.0,0.15,0.5,True,0.8,1.0,10240)
        print 'single cost {} s'.format(time.time() - bg)

        colors = np.random.randint(0, 256, [21, 3])
        for t in xrange(len(cxyzs)):
            output_points('test_result/{}_{}.txt'.format(i, t),
                          cxyzs[t][0] + block_mins[t], colors[lbls[t]])
            print 'point num {} {} {}'.format(cxyzs[t][0].shape[0],
                                              cxyzs[t][1].shape[0],
                                              cxyzs[t][2].shape[0])
            print 'mean neighbor size {} {} {}'.format(
                float(nidxs[t][0].shape[0]) / cxyzs[t][0].shape[0],
                float(nidxs[t][1].shape[0]) / cxyzs[t][1].shape[0],
                float(nidxs[t][2].shape[0]) / cxyzs[t][2].shape[0])
Esempio n. 3
0
def test_interpolate():
    sxyzs = np.asarray([
        [0.0, 0.0, 0.0],
        [0.0, 1.0, 0.0],
        [1.0, 0.0, 0.0],
        [1.0, 1.0, 0.0],
    ],
                       dtype=np.float32)
    # sxyzs=np.random.uniform(0.0,1.0,[128,3])
    sxyzs = np.asarray(sxyzs, dtype=np.float32)
    sprobs = np.random.uniform(0.0, 255.0, [4, 3])
    print sprobs
    sprobs = np.asarray(sprobs, dtype=np.float32)

    qxyzs = np.random.uniform(0.0, 1.0, [1024, 3])
    qxyzs[:, 2] = 0.0
    qxyzs = np.asarray(qxyzs, dtype=np.float32)

    nidxs = libPointUtil.findNeighborInAnotherGPU(sxyzs, qxyzs, 4, 1)
    nidxs_lens = np.asarray([len(idxs) for idxs in nidxs], dtype=np.int32)
    nidxs_bgs = compute_nidxs_bgs(nidxs_lens)
    nidxs = np.concatenate(nidxs, axis=0)

    qprobs = libPointUtil.interpolateProbsGPU(sxyzs, qxyzs, sprobs, nidxs,
                                              nidxs_lens, nidxs_bgs, 2)
    print qprobs
    output_points('test_result/int_rgbs.txt', qxyzs,
                  np.asarray(qprobs, np.int))
Esempio n. 4
0
def test_prepare_dataset():
    points, nidxs, covars, rpoints, labels = read_block(
        '../data/S3DIS/folding/block/0_Area_1_conferenceRoom_1.h5')

    print 'points:'
    print np.min(points, axis=(0, 1))
    print np.max(points, axis=(0, 1))

    print 'nidixs'
    print np.min(nidxs, axis=(0, 1))
    print np.max(nidxs, axis=(0, 1))

    print 'rpoints'
    print np.min(rpoints, axis=(0, 1))
    print np.max(rpoints, axis=(0, 1))

    print 'covars'
    print np.min(np.sum(covars**2, axis=2)), np.max(np.sum(covars**2, axis=2))

    print 'labels'
    print np.min(labels), np.max(labels)

    from draw_util import output_points

    ccolors = get_class_colors()
    for block_i, block_rpoitns in enumerate(rpoints):
        colors = np.asarray(points[block_i, :, 3:] * 128 + 128, np.int)
        output_points('colors{}.txt'.format(block_i), block_rpoitns, colors)
        output_points('labels{}.txt'.format(block_i), block_rpoitns,
                      ccolors[labels[block_i], :])
def test_cluster_covar(xyz, covars, name):
    from sklearn.cluster import KMeans
    from draw_util import output_points
    kmeans = KMeans(5)
    colors = np.random.randint(0, 256, [5, 3])
    preds = kmeans.fit_predict(covars)
    output_points('test_result/{}.txt'.format(name), xyz, colors[preds])
Esempio n. 6
0
def test_random_rotate_sample():
    from data_util import read_room_pkl, get_block_train_test_split, save_room_pkl
    from draw_util import output_points
    import time

    train_list, test_list = get_block_train_test_split()
    train_list += test_list

    idx = 0
    for fn in train_list[:1]:
        begin = time.time()
        points, labels = read_room_pkl('../data/S3DIS/room_block_10_10/' +
                                       '153_Area_4_hallway_3.pkl')
        # output_points('test.txt',points)

        labels[labels == 13] = 12
        block_points_list, block_labels_list = random_rotate_sample_block(
            points, labels, block_size=3.0, stride=1.5)
        for block_points in block_points_list:
            # print block_points.shape
            print fn
            assert block_points.shape[0] != 0

        fs = fn.split('.')[0]
        for pts, lbs in zip(block_points_list, block_labels_list):
            output_points('test_result/' + fs + '_' + str(idx) + '.txt', pts)
            idx += 1

        print 'cost {} s'.format(time.time() - begin)
Esempio n. 7
0
def test_one_epoch(ops,
                   pls,
                   sess,
                   saver,
                   testset,
                   epoch_num,
                   feed_dict,
                   summary_writer=None):
    begin_time = time.time()
    test_loss = []
    all_preds, all_labels = [], []
    colors = get_scannet_class_colors()
    for i, feed_in in enumerate(testset):
        _, batch_labels, block_mins = fill_feed_dict(feed_in, feed_dict, pls,
                                                     FLAGS.num_gpus)

        feed_dict[pls['is_training']] = False
        all_labels += batch_labels

        loss, logits = sess.run([ops['total_loss'], ops['logits']], feed_dict)
        preds = np.argmax(logits[:, 1:], axis=1) + 1
        test_loss.append(loss)
        all_preds.append(preds)

        # output labels and true
        if FLAGS.eval and FLAGS.eval_output:
            cur = 0
            for k in xrange(FLAGS.num_gpus):
                xyzs = feed_dict[pls['cxyzs'][k][0]]
                lbls = feed_dict[pls['lbls'][k]]
                xyzs += block_mins[k]
                output_points('test_result/{}_{}_true.txt'.format(i, k), xyzs,
                              colors[lbls, :])
                output_points('test_result/{}_{}_pred.txt'.format(i, k), xyzs,
                              colors[preds[cur:cur + len(xyzs)], :])
                cur += len(xyzs)

    all_preds = np.concatenate(all_preds, axis=0)
    all_labels = np.concatenate(all_labels, axis=0)
    mask = all_labels != 0
    all_preds = all_preds[mask]
    all_labels = all_labels[mask]
    iou, miou, oiou, acc, macc, oacc = compute_iou(all_labels - 1,
                                                   all_preds - 1, 20)

    test_loss = np.mean(np.asarray(test_loss))
    log_str(
        'mean iou {:.5} overall iou {:5} loss {:5} \n mean acc {:5} overall acc {:5} cost {:3} s'
        .format(miou, oiou, test_loss, macc, oacc,
                time.time() - begin_time), FLAGS.log_file)

    if not FLAGS.eval:
        checkpoint_path = os.path.join(FLAGS.save_dir,
                                       'model{}.ckpt'.format(epoch_num))
        saver.save(sess, checkpoint_path)
    else:
        names = get_scannet_class_names()[1:]
        for i in xrange(len(names)):
            print '{} iou {} acc {}'.format(names[i], iou[i], acc[i])
def test_labels():
    import os
    fss,fns=read_semantic3d_pkl_stems()
    from io_util import get_semantic3d_class_colors
    colors=get_semantic3d_class_colors()
    for fn in os.listdir('data/Semantic3D.Net/block/train'):
        if fn.startswith(fss[6]) and fn.endswith('_0.pkl'): # or fn.endswith('_3.pkl')):
            points,labels=read_room_pkl('data/Semantic3D.Net/block/train/'+fn)
            idxs = libPointUtil.gridDownsampleGPU(points, 0.1, False)
            output_points('test_result/'+fn[:-4]+'.txt',points[idxs],colors[labels[idxs],:])
def test_single_sample():
    xyzs, rgbs, covars, lbls=\
        semantic3d_process_block_v2('data/Semantic3D.Net/block/train/bildstein_station1_xyz_intensity_rgb_9_2.pkl')
    from draw_util import get_semantic3d_class_colors
    colors=get_semantic3d_class_colors()
    max_pt_num=0
    for i in xrange(len(xyzs)):
        print xyzs[i].shape
        max_pt_num=max(xyzs[i].shape[0],max_pt_num)
        output_points('test_result/{}_rgb.txt'.format(i),xyzs[i],127*rgbs[i]+128)
        output_points('test_result/{}_lbl.txt'.format(i),xyzs[i],colors[lbls[i],:])

    print 'max pn {}'.format(max_pt_num)
Esempio n. 10
0
def visual_room():
    train_list, test_list = get_block_train_test_split()
    train_list += test_list
    file_list = [fn for fn in train_list if fn.split('_')[-2] == 'office']
    from draw_util import get_class_colors, output_points
    colors = get_class_colors()
    for fn in file_list:
        xyzs, rgbs, covars, labels, block_mins = read_pkl(
            'data/S3DIS/office_block/' + fn)
        for k in xrange(len(xyzs)):
            xyzs[k] += block_mins[k]
        xyzs = np.concatenate(xyzs, axis=0)
        labels = np.concatenate(labels, axis=0)

        output_points('test_result/{}.txt'.format(fn), xyzs, colors[labels])
Esempio n. 11
0
def test_fpfh():
    import draw_util
    from sklearn.cluster import KMeans
    feats,labels=read_points_feats('/home/pal/project/PCDL/pc_seg/data/S3DIS/point/fpfh/0_Area_1_conferenceRoom_1.h5')
    print np.max(feats,axis=0)
    print np.min(feats,axis=0)
    print np.mean(feats,axis=0)
    kmeans=KMeans(n_clusters=5,n_jobs=-1)
    preds=kmeans.fit_predict(feats[:,6:])
    for i in range(5):
        mask=preds==i
        color=np.random.randint(0,255,3)
        draw_util.output_points("kmeans{}.txt".format(i),feats[mask,0:3],color)

    colors=get_class_colors()
    draw_util.output_points("labels.txt",feats[:,:3],colors[labels,:])
Esempio n. 12
0
def test_radius_covar():
    from data_util import downsample_random
    from draw_util import output_points
    points, labels = read_room_h5(
        '../data/S3DIS/room/0_Area_1_conferenceRoom_1.h5')
    print points.shape
    points, labels, _ = downsample_random(points, labels, 0.02)
    print points.shape
    output_points('test.txt', points)
    covars = compute_radius_covars(points[:, :3], 0.1)
    covars /= np.sqrt(np.sum(covars**2, axis=1, keepdims=True))

    from sklearn.cluster import KMeans
    kmeans = KMeans(8, n_jobs=-1)
    pred = kmeans.fit_predict(covars)
    print pred.shape
    colors = np.random.randint(0, 255, [8, 3])
    output_points('cluster.txt', points, colors[pred, :])
Esempio n. 13
0
def test_block():
    import os
    fss,fns=read_semantic3d_pkl_stems()
    from draw_util import get_semantic3d_class_colors
    colors=get_semantic3d_class_colors()
    for fs in fss:
        all_points,all_labels=[],[]
        for fn in os.listdir('data/Semantic3D.Net/block/train'):
            if fn.startswith(fs) and fn.endswith('_0.pkl'): # or fn.endswith('_3.pkl')):
                points,labels=read_room_pkl('data/Semantic3D.Net/block/train/'+fn)
                idxs = libPointUtil.gridDownsampleGPU(points, 0.1, False)
                all_points.append(points[idxs])
                all_labels.append(labels[idxs])

        all_points = np.concatenate(all_points, axis=0)
        all_labels = np.concatenate(all_labels, axis=0)
        output_points('test_result/'+fs+'_labels.txt',all_points,colors[all_labels,:])
        output_points('test_result/'+fs+'_colors.txt',all_points)
def generate_anchor(center_num=5):

    if os.path.exists('cached/centers.txt'):
        with open('cached/centers.txt', 'r') as f:
            centers = []
            for line in f.readlines():
                line = line.strip('\n')
                subline = line.split(' ')
                centers.append(
                    [float(subline[0]),
                     float(subline[1]),
                     float(subline[2])])

            centers = np.asarray(centers, dtype=np.float32)
        if centers.shape[0] == center_num:
            return centers.transpose()

    pts = np.random.uniform(-1.0, 1.0, [100000, 3])
    pts /= np.sqrt(np.sum(pts**2, axis=1, keepdims=True) + 1e-6)
    kmeans = KMeans(center_num)
    labels = kmeans.fit_predict(pts)
    centers = []
    for i in xrange(center_num):
        centers.append(np.mean(pts[labels == i], axis=0))

    centers = np.asarray(centers)

    ang1 = -np.arctan2(centers[0, 0], centers[0, 1])
    cosv, sinv = np.cos(ang1), np.sin(ang1)
    m = np.asarray([[cosv, -sinv, 0], [sinv, cosv, 0], [0, 0, 1]],
                   dtype=np.float64)
    centers, pts = np.dot(centers, m), np.dot(pts, m)

    ang2 = -(np.pi / 2 - np.arctan2(centers[0, 2], centers[0, 1]))
    cosv, sinv = np.cos(ang2), np.sin(ang2)
    m = np.asarray([[1, 0, 0], [0, cosv, -sinv], [0, sinv, cosv]],
                   dtype=np.float64)
    centers, pts = np.dot(centers, m), np.dot(pts, m)

    output_points('cached/centers.txt', centers)

    return centers.transpose()
Esempio n. 15
0
def save_results(sxyzs, qxyzs, sprobs, qprobs, prefix, fs):
    colors = get_semantic3d_class_colors()
    spreds = np.argmax(sprobs[:, 1:], axis=1) + 1
    qpreds = np.argmax(qprobs[:, 1:], axis=1) + 1

    dir = 'data/Semantic3D.Net/{}'.format(prefix)
    if not os.path.exists(dir): os.mkdir(dir)
    with open('{}/{}.labels'.format(dir, fs), 'w') as f:
        for pred in qpreds:
            f.write('{}\n'.format(pred))

    idxs = libPointUtil.gridDownsampleGPU(sxyzs, 0.3, False)
    sxyzs = sxyzs[idxs]
    spreds = spreds[idxs]
    output_points('{}/{}_sparse.txt'.format(dir, fs), sxyzs, colors[spreds])

    idxs = libPointUtil.gridDownsampleGPU(qxyzs, 0.3, False)
    qxyzs = qxyzs[idxs]
    qpreds = qpreds[idxs]
    output_points('{}/{}_dense.txt'.format(dir, fs), qxyzs, colors[qpreds])
Esempio n. 16
0
def compare():
    from draw_util import output_points
    from sklearn.cluster import KMeans
    train_list, test_list = get_block_train_test_split()
    random.shuffle(train_list)

    train_list_add = ['data/S3DIS/sampled_train/' + fn for fn in train_list]
    for fi, fs in enumerate(train_list_add[:3]):
        cxyzs, dxyzs, rgbs, covars, lbls, vlens, vlens_bgs, vcidxs, cidxs, nidxs, nidxs_bgs, nidxs_lens, block_mins = read_pkl(
            fs)

        for i in xrange(len(cxyzs[:10])):
            print np.sum(np.sum(np.abs(covars[i]), axis=1) < 1e-3)
            kmeans = KMeans(5)
            colors = np.random.randint(0, 256, [5, 3])
            preds = kmeans.fit_predict(covars[i])
            output_points('test_result/{}_{}.txt'.format(fi, i), cxyzs[i][0],
                          colors[preds])

    print '//////////////////////////'
Esempio n. 17
0
def test_prepare_v2():
    points, covars, rpoints, labels = read_block_v2(
        '../data/S3DIS/folding/block_v2/5_Area_1_hallway_3.h5')

    print 'points:'
    print np.min(points, axis=(0, 1))
    print np.max(points, axis=(0, 1))

    print 'rpoints'
    print np.min(rpoints, axis=(0, 1))
    print np.max(rpoints, axis=(0, 1))

    print 'covars'
    print np.min(np.sum(covars**2, axis=2)), np.max(np.sum(covars**2, axis=2))

    print 'labels'
    print np.min(labels), np.max(labels)

    from draw_util import output_points

    ccolors = get_class_colors()
    for block_i, block_rpoitns in enumerate(rpoints):
        colors = np.asarray(points[block_i, :, 3:] * 128 + 128, np.int)
        output_points('colors{}.txt'.format(block_i), block_rpoitns, colors)
        output_points('labels{}.txt'.format(block_i), block_rpoitns,
                      ccolors[labels[block_i], :])

    from sklearn.cluster import KMeans
    kmeans = KMeans(8, n_jobs=-1)
    covars = np.reshape(covars, [-1, 9])
    pred = kmeans.fit_predict(covars)
    rpoints = np.reshape(rpoints, [-1, 3])
    colors = np.random.randint(0, 255, [8, 3])
    output_points('cluster.txt', rpoints, colors[pred, :])
Esempio n. 18
0
def test_block_train():
    train_list, test_list = get_block_train_test_split()

    from draw_util import get_class_colors, output_points
    # colors=get_class_colors()
    # for fn in train_list[:1]:
    #     xyzs, rgbs, covars, lbls, block_mins=read_pkl(fn)
    #
    #     for i in xrange(len(xyzs[:5])):
    #         rgbs[i]+=128
    #         rgbs[i]*=127
    #         output_points('test_result/{}clr.txt'.format(i),xyzs[i],rgbs[i])
    #         output_points('test_result/{}lbl.txt'.format(i),xyzs[i],colors[lbls[i]])
    # count=0
    # pt_nums=[]
    #
    # stem2num={}
    # for fn in train_list:
    #     xyzs, rgbs, covars, lbls, block_mins=read_pkl('data/S3DIS/sampled_train_nolimits/'+fn)
    #     stem='_'.join(fn.split('_')[1:])
    #     if stem in stem2num:
    #         stem2num[stem]+=len(xyzs)
    #     else:
    #         stem2num[stem]=len(xyzs)
    #
    #     print stem,stem2num[stem]
    #     count+=len(xyzs)
    #     pt_nums+=[len(pts) for pts in xyzs]
    #
    # print count
    # print np.max(pt_nums)
    # print np.histogram(pt_nums)

    xyzs, rgbs, covars, lbls, block_mins = read_pkl(
        'data/S3DIS/sampled_train_nolimits/{}'.format(
            '1_Area_1_conferenceRoom_2.pkl'))
    for i in xrange(len(xyzs)):
        output_points('test_result/{}.txt'.format(i), xyzs[i] + block_mins[i],
                      rgbs[i] * 127 + 128)
Esempio n. 19
0
def cluster():
    from sklearn.cluster import KMeans
    from draw_util import output_points
    ops, pls, feats_grad = build_model()
    xyzs, feats, labels = load_data()

    all_feats = tf.concat(ops, axis=1)

    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    config.allow_soft_placement = True
    config.log_device_placement = False
    sess = tf.Session(config=config)

    saver = tf.train.Saver(tf.trainable_variables())
    saver.restore(sess, 'model/gpn_edge_new_v2/model23.ckpt')

    kmeans = KMeans(n_clusters=5, n_jobs=-1)
    colors = np.random.randint(0, 256, [5, 3])
    for i in xrange(1):
        all_layer_feats = []
        for k in xrange(len(xyzs)):
            layer_feats = sess.run(all_feats,
                                   feed_dict={
                                       pls['xyzs']: xyzs[k],
                                       pls['feats']: feats[k],
                                       pls['labels']: labels[k],
                                   })
            all_layer_feats.append(layer_feats)

        all_layer_feats = np.concatenate(all_layer_feats, axis=0)
        print i, np.max(all_layer_feats, axis=0), np.mean(all_layer_feats)
        preds = kmeans.fit_predict(all_layer_feats)
        cur_loc = 0
        for t in xrange(2):
            output_points('test_result/{}_{}_preds.txt'.format(i, t), xyzs[t],
                          colors[preds[cur_loc:cur_loc + len(xyzs[t])], :])
            output_points('test_result/{}_{}_colors.txt'.format(i, t), xyzs[t],
                          feats[t][:, :3] * 127 + 128)
Esempio n. 20
0
def test_covar():
    train_list, test_list = get_block_train_test_split()
    points, labels = read_pkl('data/S3DIS/room_block_10_10/' + train_list[0])
    xyzs, rgbs, covars, lbls = sample_block(points,
                                            labels,
                                            sstride,
                                            bsize,
                                            bstride,
                                            min_pn=512,
                                            use_rescale=False,
                                            swap=False,
                                            flip_x=False,
                                            flip_y=False,
                                            covar_ds_stride=0.075,
                                            covar_nn_size=0.15)

    from sklearn.cluster import KMeans
    from draw_util import output_points
    for i in xrange(len(xyzs[:5])):
        kmeans = KMeans(5)
        colors = np.random.randint(0, 256, [5, 3])
        preds = kmeans.fit_predict(covars[i])
        output_points('test_result/{}.txt'.format(i), xyzs[i], colors[preds])
Esempio n. 21
0
def test_normalize():
    from draw_util import output_points, get_class_colors
    from sklearn.cluster import KMeans
    colors = get_class_colors()

    train_list, test_list = get_block_train_test_split()
    random.shuffle(train_list)
    train_list = ['data/S3DIS/room_block_10_10/' + fn for fn in train_list]
    # filename=train_list[0]
    filename = 'data/S3DIS/room_block_10_10/49_Area_1_office_8.pkl'
    points, labels = read_room_pkl(filename)  # [n,6],[n,1]
    begin = time.time()
    xyzs, rgbs, covars, lbls = sample_block(points,
                                            labels,
                                            SAMPLE_STRIDE,
                                            BLOCK_SIZE,
                                            BLOCK_STRIDE,
                                            min_pn=2048,
                                            use_rescale=True,
                                            use_flip=True,
                                            use_rotate=True)
    print 'sample_block cost {} s'.format(time.time() - begin)

    # for j in xrange(len(xyzs)):
    #     print np.min(xyzs[j],axis=0),np.max(xyzs[j],axis=0)
    #     print np.min(rgbs[j],axis=0),np.max(rgbs[j],axis=0)
    #     print xyzs[j].shape,lbls[j].shape
    # output_points('test_result/label_init{}.txt'.format(j),xyzs[j],colors[lbls[j].flatten(),:])
    # output_points('test_result/lrgbs_init{}.txt'.format(j),xyzs[j],rgbs[j])

    xyzs, rgbs, covars, lbls, nidxs, nidxs_lens, nidxs_bgs, cidxs=\
        normalize_block(xyzs,rgbs,covars,lbls,0.2,True,0.8,1.0,True,2.5)

    for j in xrange(len(xyzs)):
        print xyzs[j].shape, rgbs[j].shape, covars[j].shape, lbls[
            j].shape, nidxs[j].shape, nidxs_lens[j].shape, nidxs_bgs[
                j].shape, cidxs[j].shape
        print np.min(xyzs[j], axis=0), np.max(xyzs[j], axis=0)
        print np.min(rgbs[j], axis=0), np.max(rgbs[j], axis=0)
        print 'avg nn size: {}'.format(len(nidxs[j]) / float(len(xyzs[j])))
        # print xyzs[j].shape,lbls[j].shape
        output_points('test_result/label{}.txt'.format(j), xyzs[j],
                      colors[lbls[j].flatten(), :])
        output_points('test_result/lrgbs{}.txt'.format(j), xyzs[j],
                      np.asarray(rgbs[j] * 128 + 127, np.int32))

    for j in xrange(len(xyzs[0])):
        output_points(
            'test_result/nn{}.txt'.format(j),
            xyzs[0][nidxs[0][nidxs_bgs[0][j]:nidxs_bgs[0][j] +
                             nidxs_lens[0][j]], :])
Esempio n. 22
0
def test_sample():
    from draw_util import output_points, get_class_colors
    from sklearn.cluster import KMeans
    colors = get_class_colors()

    train_list, test_list = get_block_train_test_split()
    random.shuffle(train_list)
    train_list = ['data/S3DIS/room_block_10_10/' + fn for fn in train_list]
    filename = train_list[0]
    # filename='data/S3DIS/room_block_10_10/58_Area_2_auditorium_2.pkl'
    points, labels = read_room_pkl(filename)  # [n,6],[n,1]
    print np.min(points, axis=0)
    begin = time.time()
    xyzs, rgbs, covars, lbls = sample_block(points,
                                            labels,
                                            0.075,
                                            1.5,
                                            1.5,
                                            min_pn=2048 / 2)
    #use_rescale=True,use_flip=True,use_rotate=True)
    print 'sample_block cost {} s'.format(time.time() - begin)

    print np.min(np.concatenate(xyzs, axis=0), axis=0)
    kc = np.random.randint(0, 255, [5, 3])
    for j in xrange(len(xyzs)):
        # print xyzs[j].shape,lbls[j].shape
        output_points('test_result/label{}.txt'.format(j), xyzs[j],
                      colors[lbls[j].flatten(), :])
        output_points('test_result/lrgbs{}.txt'.format(j), xyzs[j], rgbs[j])

    kmeans = KMeans(5)
    preds = kmeans.fit_predict(np.concatenate(covars, axis=0))
    output_points('test_result/kmeans.txt', np.concatenate(xyzs, axis=0),
                  kc[preds.flatten(), :])

    pt_num = [len(xyz) for xyz in xyzs]
    print 'avg pt num: {}'.format(np.mean(pt_num))
Esempio n. 23
0
    :return:
        covars: n,k,9
    '''

    nidxs=np.ascontiguousarray(nidxs,dtype=np.int32)
    points=np.ascontiguousarray(points,dtype=np.float32)
    covars=PointsUtil.ComputeCovars(points,nidxs,nn_size,gpu_index)

    return covars


if __name__=="__main__":
    from draw_util import output_points
    import time
    points=np.loadtxt('/home/liuyuan/tmp/0_8_true.txt',dtype=np.float32)
    points[:,:2]+=0.5
    print np.min(points,axis=0)
    print np.max(points,axis=0)
    output_points('points.txt',points)

    bg=time.time()
    points=np.repeat(points[None,:,:],2,axis=0)
    # print points.shape

    voxels=PointsUtil.Points2VoxelBatchGPU(points,30)
    print 'cost {} s'.format(time.time()-bg)
    voxels=voxels[0]
    voxel_points= voxel2points(voxels)
    voxel_points=voxel_points.astype(np.float32)
    voxel_points[:,:3]/=np.max(voxel_points[:,:3],axis=0,keepdims=True)
    output_points('voxels.txt',voxel_points)
def test_one_epoch(ops,
                   pls,
                   sess,
                   saver,
                   testset,
                   epoch_num,
                   feed_dict,
                   summary_writer=None):
    begin_time = time.time()
    test_loss = []
    all_preds, all_labels = [], []
    colors = get_class_colors()
    weights = get_class_loss_weights()
    for i, feed_in in enumerate(testset):
        xyzs, rgbs, covars, lbls, nidxs, nidxs_lens, nidxs_bgs, cidxs, block_bgs, block_lens=\
            default_unpack_feats_labels(feed_in,FLAGS.num_gpus)

        for k in xrange(FLAGS.num_gpus):
            feed_dict[pls['xyzs'][k]] = xyzs[k]
            feed_dict[pls['rgbs'][k]] = rgbs[k]
            feed_dict[pls['covars'][k]] = covars[k]
            feed_dict[pls['lbls'][k]] = lbls[k]
            feed_dict[pls['nidxs'][k]] = nidxs[k]
            feed_dict[pls['nidxs_lens'][k]] = nidxs_lens[k]
            feed_dict[pls['nidxs_bgs'][k]] = nidxs_bgs[k]
            feed_dict[pls['cidxs'][k]] = cidxs[k]
            all_labels.append(lbls[k])
            if FLAGS.weighted_loss:
                feed_dict[pls['weights'][k]] = weights[lbls[k]]

        feed_dict[pls['is_training']] = False

        if FLAGS.eval and FLAGS.num_monitor:
            loss, preds, summary = sess.run(
                [ops['total_loss'], ops['preds'], ops['summary']], feed_dict)
            summary_writer.add_summary(summary)
        else:
            loss, preds = sess.run([ops['total_loss'], ops['preds']],
                                   feed_dict)
        test_loss.append(loss)
        all_preds.append(preds)

        # output labels and true
        if FLAGS.eval and FLAGS.eval_output:
            cur = 0
            for k in xrange(FLAGS.num_gpus):
                restore_xyzs = xyzs[k]
                restore_xyzs[:, :2] = restore_xyzs[:, :2] * 1.5 + 1.5
                restore_xyzs[:, 2] += 1.0
                restore_xyzs[:, 2] *= block_lens[k][2] / 2
                restore_xyzs += block_bgs[k]
                output_points('test_result/{}_{}_true.txt'.format(i, k),
                              restore_xyzs, colors[lbls[k], :])
                output_points('test_result/{}_{}_pred.txt'.format(i, k),
                              restore_xyzs,
                              colors[preds[cur:cur + len(xyzs[k])], :])
                cur += len(xyzs[k])

        if FLAGS.eval and FLAGS.num_monitor and i >= 2:
            break

    all_preds = np.concatenate(all_preds, axis=0)
    all_labels = np.concatenate(all_labels, axis=0)

    test_loss = np.mean(np.asarray(test_loss))

    iou, miou, oiou, acc, macc, oacc = compute_iou(all_labels, all_preds)

    log_str(
        'mean iou {:.5} overall iou {:5} loss {:5} \n mean acc {:5} overall acc {:5} cost {:3} s'
        .format(miou, oiou, test_loss, macc, oacc,
                time.time() - begin_time), FLAGS.log_file)

    if not FLAGS.eval:
        checkpoint_path = os.path.join(FLAGS.save_dir,
                                       'model{}.ckpt'.format(epoch_num))
        saver.save(sess, checkpoint_path)
    else:
        names = get_class_names()
        for i in xrange(len(names)):
            print '{} iou {} acc {}'.format(names[i], iou[i], acc[i])
Esempio n. 25
0
        spreds = np.argmax(sprobs, axis=1) + 1

        if fi <= 5:
            idxs = libPointUtil.gridDownsampleGPU(sxyzs, 0.01, False)
            sxyzs = sxyzs[idxs]
            spreds = spreds[idxs]
            slbls = slbls[idxs]
            # output_points('test_result/{}spreds.txt'.format(fi),sxyzs,colors[spreds,:])
            # output_points('test_result/{}slabel.txt'.format(fi),sxyzs,colors[slbls,:])

            idxs = libPointUtil.gridDownsampleGPU(qxyzs, 0.01, False)
            qxyzs = qxyzs[idxs]
            qpreds = qpreds[idxs]
            labels = labels[idxs]
            points = points[idxs]
            output_points('test_result/{}qpreds.txt'.format(fi), qxyzs,
                          colors[qpreds, :])
            output_points('test_result/{}qlabel.txt'.format(fi), qxyzs,
                          colors[labels.flatten(), :])
            output_points('test_result/{}qcolor.txt'.format(fi), points)
        else:
            break

        labels = labels.flatten()
        qpreds = qpreds.flatten()
        mask = labels != 0
        qpreds = qpreds[mask]
        labels = labels[mask]

        fp, tp, fn = acc_val(labels - 1, qpreds - 1, fp, tp, fn, 20)

        print 'total cost {} s'.format(time.time() - begin)
Esempio n. 26
0
        qprobs = []
        for t in xrange(qrn):
            beg_idxs = t * rn
            end_idxs = min((t + 1) * rn, qn)
            qrprobs = interpolate(sxyzs, sprobs, qxyzs[beg_idxs:end_idxs])
            print 'interpolate {} done'.format(t)
            qprobs.append(qrprobs)

        qprobs = np.concatenate(qprobs, axis=0)
        qpreds = np.argmax(qprobs[:, 1:], axis=1) + 1

        colors = get_semantic3d_class_colors()
        spreds = np.argmax(sprobs[:, 1:], axis=1) + 1

        print 'total cost {} s'.format(time.time() - begin)

        with open('data/Semantic3D.Net/{}.labels'.format(fn), 'w') as f:
            for p in qpreds:
                f.write('{}\n'.format(p))

        idxs = libPointUtil.gridDownsampleGPU(sxyzs, 0.1, False)
        sxyzs = sxyzs[idxs]
        spreds = spreds[idxs]
        output_points('test_result/{}_sparse.txt'.format(fn), sxyzs,
                      colors[spreds, :])
        idxs = libPointUtil.gridDownsampleGPU(qxyzs, 0.1, False)
        qxyzs = qxyzs[idxs]
        qpreds = qpreds[idxs]
        output_points('test_result/{}_dense.txt'.format(fn), qxyzs,
                      colors[qpreds, :])
Esempio n. 27
0
    train_provider = Provider(train_list, 'train', 4, read_fn)
    test_provider = Provider(test_list, 'test', 4, read_fn)

    try:
        begin = time.time()
        i = 0
        for data in train_provider:
            i += 1
            cxyzs, rgbs, covars, lbls, = default_unpack_feats_labels(data, 4)
            for k in xrange(4):
                print len(cxyzs[k])

        print 'batch_num {}'.format(i * 4)
        print 'train set cost {} s'.format(time.time() - begin)

    finally:
        train_provider.close()
        test_provider.close()


if __name__ == "__main__":
    # data=read_pkl('data/ModelNet40/ply_data_test1.pkl')
    # print len(data)
    # test_read_semantic_dataset()
    cxyzs, rgbs, covars, lbls = read_pkl('tmp_data.pkl')
    for i in xrange(4):
        print np.min(lbls[i]), np.max(lbls[i])
        print np.min(cxyzs[i], axis=0), np.max(cxyzs[i], axis=0)
        print np.min(rgbs[i], axis=0), np.max(rgbs[i], axis=0)
        output_points('test_result/tmp{}.txt'.format(i), cxyzs[i],
                      rgbs[i][:, :3] * 127 + 128)
Esempio n. 28
0
def output_hierarchy(cxyz1,
                     cxyz2,
                     cxyz3,
                     rgbs,
                     lbls,
                     vlens1,
                     vlens2,
                     dxyz1,
                     dxyz2,
                     vc1,
                     vc2,
                     idx=0,
                     colors=s3dis_colors):
    output_points('test_result/cxyz1_rgb_{}.txt'.format(idx), cxyz1, rgbs)
    output_points('test_result/cxyz1_lbl_{}.txt'.format(idx), cxyz1,
                  colors[lbls.flatten(), :])

    # test cxyz
    vidxs = []
    for i, l in enumerate(vlens1):
        vidxs += [i for _ in xrange(l)]
    colors = np.random.randint(0, 256, [vlens1.shape[0], 3])
    vidxs = np.asarray(vidxs, np.int32)

    output_points('test_result/cxyz1_{}.txt'.format(idx), cxyz1,
                  colors[vidxs, :])
    output_points('test_result/cxyz2_{}.txt'.format(idx), cxyz2, colors)

    vidxs = []
    for i, l in enumerate(vlens2):
        vidxs += [i for _ in xrange(l)]
    colors = np.random.randint(0, 256, [vlens2.shape[0], 3])
    vidxs = np.asarray(vidxs, np.int32)

    output_points('test_result/cxyz2a_{}.txt'.format(idx), cxyz2,
                  colors[vidxs, :])
    output_points('test_result/cxyz3a_{}.txt'.format(idx), cxyz3, colors)

    # test dxyz
    c = 0
    for k, l in enumerate(vlens1):
        for t in xrange(l):
            dxyz1[c + t] *= vc1
            dxyz1[c + t] += cxyz2[k]
        c += l
    output_points('test_result/dxyz1_{}.txt'.format(idx), dxyz1)

    c = 0
    for k, l in enumerate(vlens2):
        for t in xrange(l):
            dxyz2[c + t] *= vc2
            dxyz2[c + t] += cxyz3[k]
        c += l
    output_points('test_result/dxyz2_{}.txt'.format(idx), dxyz2)