Exemplo n.º 1
0
def main():
    params = parse_args()
    conf = get_conf(params)

    ckpt_path = os.path.join('./data/', params['experiment_name'], params['class_name'])
    epoch = conf.training_epochs

    reset_tf_graph()
    ae = PointNetAutoEncoder(conf.experiment_name, conf)
    ae.restore_model(ckpt_path, epoch, verbose=True)

    test_dir = './s3dis/Area_6/*/Annotations/{}_*.txt'
    all_pc_data = load_all_point_clouds_under_folder(test_dir, params['class_name'], n_points=conf.n_input[0], with_color=conf.input_color, n_threads=20, verbose=True)

    all_pc_data.shuffle_data()
    feed_pc, feed_model_names, _ = all_pc_data.next_batch(10)
    feed_pc_v, feed_pc_v_org = np.split(np.array([get_visible_points(x, org=True) for x in feed_pc]), 2, axis=1) # [normalized_pcs, original_pcs]

    feed_pc_v = np.array([x[0] for x in feed_pc_v])
    feed_pc_v_org = np.array([x[0] for x in feed_pc_v_org])

    ae_reconstructions, v_reconstructions, _ = ae.reconstruct([feed_pc, feed_pc_v], compute_loss=False)

    print('finish inference')

    visualize(params['experiment_name'], params['class_name'], feed_pc[:, :, :3], ae_reconstructions, feed_pc_v_org[:, :, :3], v_reconstructions)
Exemplo n.º 2
0
    'chamfer_nn_complete': 'chamfer_nn_idx_complete_test_set'
}
nn_idx = load_data(data_path, files, [nn_idx_dict[conf.target_pc_idx_type]])

correct_pred = None
if conf.correct_pred_only:
    pc_labels, pc_pred_labels = load_data(
        data_path, files, ['pc_label_test_set', 'pc_pred_labels_test_set'])
    correct_pred = (pc_labels == pc_pred_labels)

# load indices for attack
attack_pc_idx = np.load(osp.join(top_out_dir, flags.attack_pc_idx))
attack_pc_idx = attack_pc_idx[:, :conf.num_pc_for_attack]

# build classifier model and reload a saved model
reset_tf_graph()
classifier = PointNetClassifier(classifier_path,
                                flags.classifier_restore_epoch,
                                num_points=flags.num_points,
                                batch_size=10,
                                num_classes=flags.num_classes)

classes_for_attack = conf.class_names
classes_for_target = conf.class_names

for i in range(len(pc_classes)):
    pc_class_name = pc_classes[i]
    if pc_class_name not in classes_for_attack:
        continue

    save_dir = create_dir(osp.join(output_path, pc_class_name))
Exemplo n.º 3
0
def main():
    args, params = parse_args()
    conf = get_conf(params)
    print(conf)

    if args.tiny:
        train_dir = './s3dis/Area_4/*/Annotations/{}_*.txt'
    else:
        train_dir = './s3dis/Area_[1-5]/*/Annotations/{}_*.txt'

    test_dir = './s3dis/Area_6/*/Annotations/{}_*.txt'

    reset_tf_graph()
    ae = PointNetAutoEncoder(conf.experiment_name, conf)

    buf_size = 1  # Make 'training_stats' file to flush each output line regarding training.
    fout = open(osp.join(conf.train_dir, 'train_stats.txt'), 'a', buf_size)

    all_pc_data = load_all_point_clouds_under_folder(
        train_dir,
        params['class_name'],
        n_points=conf.n_input[0],
        with_color=conf.input_color,
        n_threads=20,
        verbose=True)

    print('Start training')
    train_stats = ae.train(all_pc_data, conf, log_file=fout)
    fout.close()

    print('Finish training')

    if visualize:
        print('Start visualizing')
        conf.z_rotate = False
        conf.gauss_augment = None

        ckpt_path = os.path.join('./data/', params['experiment_name'],
                                 params['class_name'])
        epoch = conf.training_epochs

        reset_tf_graph()
        ae = PointNetAutoEncoder(conf.experiment_name, conf)
        ae.restore_model(ckpt_path, epoch, verbose=True)

        all_pc_data = load_all_point_clouds_under_folder(
            test_dir,
            params['class_name'],
            n_points=conf.n_input[0],
            with_color=conf.input_color,
            n_threads=20,
            verbose=True)
        all_pc_data.shuffle_data()

        feed_pc, feed_model_names, _ = all_pc_data.next_batch(10)
        feed_pc_v, feed_pc_v_org = np.split(
            np.array([get_visible_points(x, org=True) for x in feed_pc]),
            2,
            axis=1)  # [normalized_pcs, original_pcs]

        feed_pc_v = np.array([x[0] for x in feed_pc_v])
        feed_pc_v_org = np.array([x[0] for x in feed_pc_v_org])

        ae_reconstructions, v_reconstructions, _ = ae.reconstruct(
            [feed_pc, feed_pc_v], compute_loss=False)

        visualize(params['experiment_name'], params['class_name'],
                  feed_pc[:, :, :3], ae_reconstructions,
                  feed_pc_v_org[:, :, :3], v_reconstructions, 3)

        print('Finish visualize')
Exemplo n.º 4
0
def main():
    params = parse_args()
    conf = get_conf(params)

    conf.gauss_augment = False
    conf.z_rotate = False

    ckpt_path = os.path.join('./data/', params['experiment_name'],
                             params['class_name'])
    epoch = conf.training_epochs

    reset_tf_graph()
    ae = PointNetAutoEncoder(conf.experiment_name, conf)
    ae.restore_model(ckpt_path, epoch, verbose=True)

    test_dir = './s3dis/Area_6/*/Annotations/{}_*.txt'
    all_pc_data = load_all_point_clouds_under_folder(
        test_dir,
        params['class_name'],
        n_points=conf.n_input[0],
        with_color=conf.input_color,
        n_threads=20,
        verbose=True)

    ae_recon, tl_recon, data_loss, labels, gt, gt_tl = ae.evaluate(
        all_pc_data, conf)  # gt_tl is not normalized to compare with gt

    if conf.input_color:
        gt = gt[:, :, :3]

    ae_loss = conf.loss  # Which distance to use for the matchings.
    assert ae_loss in ['emd', 'chamfer']

    if ae_loss == 'emd':
        use_EMD = True
    else:
        use_EMD = False  # Will use Chamfer instead.

    batch_size = 100  # Find appropriate number that fits in GPU.
    normalize = True  # Matched distances are divided by the number of points of thepoint-clouds.

    ae_mmd, ae_matched_dists = minimum_mathing_distance(ae_recon,
                                                        gt,
                                                        batch_size,
                                                        normalize=normalize,
                                                        use_EMD=use_EMD)
    tl_mmd, tl_matched_dists = minimum_mathing_distance(tl_recon,
                                                        gt,
                                                        batch_size,
                                                        normalize=normalize,
                                                        use_EMD=use_EMD)

    ae_cov, ae_matched_ids = coverage(ae_recon,
                                      gt,
                                      batch_size,
                                      normalize=normalize,
                                      use_EMD=use_EMD)
    tl_cov, tl_matched_ids = coverage(tl_recon,
                                      gt,
                                      batch_size,
                                      normalize=normalize,
                                      use_EMD=use_EMD)

    ae_jsd = jsd_between_point_cloud_sets(ae_recon, gt, resolution=28)
    tl_jsd = jsd_between_point_cloud_sets(tl_recon, gt, resolution=28)

    result_file = os.path.join(
        'result',
        params['experiment_name'] + '_' + params['class_name'] + '.txt')

    with open(result_file, 'w') as f:
        f.write('ae_mmd' + str(ae_mmd) + '\n')
        f.write('tl_mmd' + str(tl_mmd) + '\n')
        # f.write('ae_matched_dists' + str(ae_matched_dists) + '\n')
        f.write('ae_cov' + str(ae_cov) + '\n')
        f.write('tl_cov' + str(tl_cov) + '\n')
        # f.write('ae_matched_ids' + str(ae_matched_ids) + '\n')
        f.write('ae_jsd' + str(ae_jsd) + '\n')
        f.write('tl_jsd' + str(tl_jsd) + '\n')
Exemplo n.º 5
0
def get_chamfer_nn():
    start_time = time.time()
    num_examples_all, num_points, _ = point_clouds.shape
    chamfer_batch_size = 10

    # build chamfer graph
    reset_tf_graph()
    source_pc_pl = tf.placeholder(tf.float32, shape=[None, num_points, 3])
    target_pc_pl = tf.placeholder(tf.float32, shape=[None, num_points, 3])
    dists_s_t, _, dists_t_s, _ = nn_distance(source_pc_pl, target_pc_pl)
    chamfer_dist = tf.reduce_mean(dists_s_t, axis=1) + tf.reduce_mean(
        dists_t_s, axis=1)

    sess = tf.Session('')

    # compute chamfer distance matrix
    point_clouds_curr = point_clouds[flags.pc_start_idx:flags.pc_start_idx +
                                     flags.pc_batch_size]
    num_examples_curr = len(point_clouds_curr)
    chamfer_dist_mat_curr = -1 * np.ones([num_examples_all, num_examples_curr],
                                         dtype=np.float32)

    source_pc = np.tile(np.expand_dims(point_clouds_curr, axis=0),
                        [num_examples_all, 1, 1, 1])
    target_pc = np.tile(np.expand_dims(point_clouds, axis=1),
                        [1, num_examples_curr, 1, 1])

    for i in range(0, num_examples_all, chamfer_batch_size):
        for j in range(0, num_examples_curr, chamfer_batch_size):
            #print('i %d out of %d, j %d out of %d' %
            #      (min(i + chamfer_batch_size, num_examples_all), num_examples_all, min(j + chamfer_batch_size, num_examples_curr), num_examples_curr))

            sources = source_pc[i:i + chamfer_batch_size,
                                j:j + chamfer_batch_size]
            targets = target_pc[i:i + chamfer_batch_size,
                                j:j + chamfer_batch_size]

            s_batch = np.reshape(sources, [-1, num_points, 3])
            t_batch = np.reshape(targets, [-1, num_points, 3])
            feed_dict = {source_pc_pl: s_batch, target_pc_pl: t_batch}
            dist_batch = sess.run(chamfer_dist, feed_dict=feed_dict)
            dist_batch_reshape = np.reshape(dist_batch, sources.shape[:2])
            chamfer_dist_mat_curr[i:i + chamfer_batch_size, j:j +
                                  chamfer_batch_size] = dist_batch_reshape

    assert chamfer_dist_mat_curr.min(
    ) >= 0, 'the chamfer_dist_mat_curr matrix was not filled correctly'

    # save current chamfer distance data
    chamfer_dist_mat_file_name = '_'.join(['chamfer_dist_mat_complete'] +
                                          file_name_parts[-3:])
    chamfer_dist_mat_file_path = osp.join(data_path,
                                          chamfer_dist_mat_file_name)
    if osp.exists(chamfer_dist_mat_file_path):
        chamfer_dist_mat = np.load(chamfer_dist_mat_file_path)
    else:
        chamfer_dist_mat = -1 * np.ones([num_examples_all, num_examples_all],
                                        dtype=np.float32)

    chamfer_dist_mat[:, flags.pc_start_idx:flags.pc_start_idx +
                     flags.pc_batch_size] = chamfer_dist_mat_curr
    np.save(chamfer_dist_mat_file_path, chamfer_dist_mat)

    duration = time.time() - start_time
    print(
        'start index %d end index %d, out of size %d, duration (minutes): %.2f'
        % (flags.pc_start_idx,
           min(flags.pc_start_idx + flags.pc_batch_size,
               num_examples_all), num_examples_all, duration / 60.0))

    if chamfer_dist_mat.min() >= 0:
        # nearest neighbors indices
        chamfer_nn_idx = sort_dist_mat(chamfer_dist_mat)

        chamfer_nn_idx_file_name = '_'.join(['chamfer_nn_idx_complete'] +
                                            file_name_parts[-3:])
        chamfer_nn_idx_file_path = osp.join(data_path,
                                            chamfer_nn_idx_file_name)
        np.save(chamfer_nn_idx_file_path, chamfer_nn_idx)