示例#1
0
def prepare_data_loaders():

    if not os.path.exists(BPS_CACHE_FILE):
        # load modelnet point clouds
        xtr, ytr, xte, yte = load_modelnet40(root_data_dir=DATA_PATH)

        # this will normalise your point clouds and return scaler parameters for inverse operation
        xtr_normalized = bps.normalize(xtr)
        xte_normalized = bps.normalize(xte)

        # this will encode your normalised point clouds with random basis of 512 points,
        # each BPS cell containing l2-distance to closest point
        print("converting data to BPS representation..")
        print("number of basis points: %d" % N_BPS_POINTS)
        print("BPS sampling radius: %f" % BPS_RADIUS)

        print("converting train..")
        xtr_bps = bps.encode(xtr_normalized,
                             bps_arrangement='grid',
                             n_bps_points=N_BPS_POINTS,
                             radius=BPS_RADIUS,
                             bps_cell_type='deltas')
        xtr_bps = xtr_bps.reshape([-1, 32, 32, 32, 3])

        print("converting test..")
        xte_bps = bps.encode(xte_normalized,
                             bps_arrangement='grid',
                             n_bps_points=N_BPS_POINTS,
                             radius=BPS_RADIUS,
                             bps_cell_type='deltas')

        xte_bps = xte_bps.reshape([-1, 32, 32, 32, 3])

        print("saving cache file for future runs..")
        np.savez(BPS_CACHE_FILE, xtr=xtr_bps, ytr=ytr, xte=xte_bps, yte=yte)

    else:
        print("loading converted data from cache..")
        data = np.load(BPS_CACHE_FILE)
        xtr_bps = data['xtr']
        ytr = data['ytr']
        xte_bps = data['xte']
        yte = data['yte']

    xtr_bps = xtr_bps.transpose(0, 4, 2, 3, 1)
    dataset_tr = pt.utils.data.TensorDataset(pt.Tensor(xtr_bps),
                                             pt.Tensor(ytr[:, 0]).long())
    tr_loader = pt.utils.data.DataLoader(dataset_tr,
                                         batch_size=64,
                                         shuffle=True)

    xte_bps = xte_bps.transpose(0, 4, 2, 3, 1)
    dataset_te = pt.utils.data.TensorDataset(pt.Tensor(xte_bps),
                                             pt.Tensor(yte[:, 0]).long())
    te_loader = pt.utils.data.DataLoader(dataset_te,
                                         batch_size=64,
                                         shuffle=True)

    return tr_loader, te_loader
示例#2
0
def prepare_data_loaders(n_parts=2):

    if n_parts == 1:
        APTBPS_CACHE_FILE = os.path.join(DATA_PATH, 'aptbps_mlp_data.npz')
    else:
        file_name = str(n_parts) + 'aptbps_mlp_data.npz'
        APTBPS_CACHE_FILE = os.path.join(DATA_PATH, file_name)

    if not os.path.exists(BPS_CACHE_FILE):

        # load modelnet point clouds
        xtr, ytr, xte, yte = load_modelnet40(root_data_dir=DATA_PATH)

        # this will normalise your point clouds and return scaler parameters for inverse operation
        xtr_normalized = bps.normalize(xtr)
        xte_normalized = bps.normalize(xte)

        # this will encode your normalised point clouds with random basis of 512 points,
        # each BPS cell containing l2-distance to closest point
        print("converting data to BPS representation..")
        print("number of basis points: %d" % N_BPS_POINTS)
        print("BPS sampling radius: %f" % BPS_RADIUS)
        print("converting train..")
        xtr_bps = aptbps.adaptive_encode(xtr_normalized,
                                         n_parts=n_parts,
                                         n_bps_points=N_BPS_POINTS,
                                         bps_cell_type='dists',
                                         radius=BPS_RADIUS)
        print("converting test..")
        xte_bps = aptbps.adaptive_encode(xte_normalized,
                                         n_parts=n_parts,
                                         n_bps_points=N_BPS_POINTS,
                                         bps_cell_type='dists',
                                         radius=BPS_RADIUS)
        print("conversion finished. ")
        print("saving cache file for future runs..")

        np.savez(APTBPS_CACHE_FILE, xtr=xtr_bps, ytr=ytr, xte=xte_bps, yte=yte)

    else:
        print("loading converted data from cache..")
        data = np.load(APTBPS_CACHE_FILE)
        xtr_bps = data['xtr']
        ytr = data['ytr']
        xte_bps = data['xte']
        yte = data['yte']

    dataset_tr = pt.utils.data.TensorDataset(pt.Tensor(xtr_bps),
                                             pt.Tensor(ytr[:, 0]).long())
    print("----")
    print(dataset_tr)
    train_loader = pt.utils.data.DataLoader(dataset_tr,
                                            batch_size=512,
                                            shuffle=True)
    print("---")
    print(train_loader)

    dataset_te = pt.utils.data.TensorDataset(pt.Tensor(xte_bps),
                                             pt.Tensor(yte[:, 0]).long())
    test_loader = pt.utils.data.DataLoader(dataset_te,
                                           batch_size=512,
                                           shuffle=True)

    return train_loader, test_loader
示例#3
0
# the effect is that the model 'rotates' slightly each frame.
def animate(i):
    ax.view_init(elev=ELEV, azim=i * ANIMATION_SPEED)
    ax.set_xlim3d(-0.8, 0.8)
    ax.set_ylim3d(-0.8, 0.8)
    ax.set_zlim3d(-0.7, 0.7)
    return fig,


# Generate reference set
basis_set = aptbps.generate_random_basis(N_BPS_POINTS,
                                         n_dims=3,
                                         radius=BPS_RADIUS,
                                         random_seed=RANDOM_SEED)

xtr, ytr, xte, yte = load_modelnet40(root_data_dir=DATA_PATH)

# Normalize
xtr_normalized = aptbps.normalize(xtr)

# Point cloud to animate
XTR_ID = 31

input_cloud = xtr_normalized[XTR_ID]
values = input_cloud.T

x, y, z = values

# 1. NORMALIZED INPUT POINT CLOUD

# Create a figure and a 3D Axes