Esempio n. 1
0
                pose = [p[0], p[1], p[2], nlx, nly, nlz]
                '''
                pose = [
                    float(jp["pos"]["x"]),
                    float(jp["pos"]["y"]),
                    float(jp["pos"]["z"]),
                    float(jp["orn"]["x"]),
                    float(jp["orn"]["y"]),
                    float(jp["orn"]["z"])
                ]
                anno.append(pose)

    if enable_anno:
        np.save("output/{}_anno".format(DATASET), np.array(anno))
    np.save("output/{}_pcs".format(DATASET), pcs)
    np.save("output/{}_names".format(DATASET), np.array(names))

    reset_tf_graph()
    ae_configuration = MODEL_DIR + '/configuration'
    ae_conf = Conf.load(ae_configuration)
    ae_conf.encoder_args['verbose'] = False
    ae_conf.decoder_args['verbose'] = False
    ae = PointNetAutoEncoder(ae_conf.experiment_name, ae_conf)

    ae.restore_model(MODEL_DIR, RESTORE_EPOCH, verbose=True)

    latent_codes = ae.get_latent_codes(pcs)

    print(latent_codes.shape)
    np.save("output/{}_latent".format(DATASET), np.array(latent_codes))
Esempio n. 2
0
    for idx, point_file in enumerate(pc_files[:]):
        cloud = PyntCloud.from_file(point_file)
        test_pcs[idx, :, :] = cloud.points[:n_points]
    #print(test_pcs[0].shape)

    reset_tf_graph()
    ae_configuration = model_dir + '/configuration'
    ae_conf = Conf.load(ae_configuration)
    ae_conf.encoder_args['verbose'] = False
    ae_conf.decoder_args['verbose'] = False
    ae = PointNetAutoEncoder(ae_conf.experiment_name, ae_conf)

    ae.restore_model(model_dir, restore_epoch, verbose=True)

    #latent_code = ae.transform(test_pcs[:1])
    latent_codes = ae.get_latent_codes(test_pcs)

    for pc_idx in range(len(latent_codes) - 1):
        a = latent_codes[pc_idx]
        b = latent_codes[pc_idx + 1]  #aug_latent_codes[0]
        diff = a - b
        steps = np.linspace(0.0, 1.0, num=9)
        interpolations = []
        for step in steps[:-1]:
            interpolations.append(a - step * diff)

        reconstructions = ae.decode(interpolations)

        for inter_id, rec in enumerate(reconstructions):
            #plot_3d_point_cloud(rec[:, 0], rec[:, 1], rec[:, 2], in_u_sphere=True)
            points2file(
    normalize=args.normalize_shape,
    file_names=file_names)
print 'Shape of DATA =', all_pc_data.point_clouds.shape

#######################
# Load pre-trained AE #
#######################
reset_tf_graph()
ae_conf = Conf.load(ae_configuration)
ae_conf.encoder_args['verbose'] = False
ae_conf.decoder_args['verbose'] = False
ae = PointNetAutoEncoder(ae_conf.experiment_name, ae_conf)
ae.restore_model(ae_conf.train_dir, ae_epoch, verbose=True)

# Use AE to convert raw pointclouds to latent codes.
latent_codes = ae.get_latent_codes(all_pc_data.point_clouds)
latent_data = PointCloudDataSet(latent_codes)
print 'Shape of DATA =', latent_data.point_clouds.shape

#######################
# Set GAN parameters. #
#######################
use_wgan = True  # Wasserstein with gradient penalty, or not?
n_epochs = args.epochs  # Epochs to train.

plot_train_curve = True
save_gan_model = True
saver_step = np.hstack(
    [np.array([1, 5, 10]),
     np.arange(50, n_epochs + 1, 250)])
Esempio n. 4
0
    te_pc, _, _ = all_te_pc_data.full_epoch_data()
    M = te_pc.shape[0]
    te_shape_lst.append(te_pc)
    for _ in range(M):
        te_lbl.append(i)

tr_pc = np.concatenate(tr_shape_lst)
tr_lbl = np.array(tr_lbl)
te_pc = np.concatenate(te_shape_lst)
te_lbl = np.array(te_lbl)

assert tr_pc.shape[0] == tr_lbl.shape[0]
assert te_pc.shape[0] == te_lbl.shape[0]

print("Gather latent vectors (train set)")
tr_latent = ae.get_latent_codes(tr_pc, batch_size=100)
print(tr_latent.shape)

tr_latent_save_path = os.path.join(conf.train_dir, 'MN_train_all_latent.npy')
tr_label_save_path = os.path.join(conf.train_dir, 'MN_train_all_label.npy')
np.save(tr_latent_save_path, tr_latent)
np.save(tr_label_save_path, tr_lbl)
print("Train latent vectors and labels save path:%s %s"\
      %(tr_latent_save_path, tr_label_save_path))

print("Gather latent vectors (test set)")
te_latent = ae.get_latent_codes(te_pc, batch_size=100)
print(te_latent.shape)

te_latent_save_path = os.path.join(conf.train_dir, 'MN_test_all_latent.npy')
te_label_save_path = os.path.join(conf.train_dir, 'MN_test_all_label.npy')