Beispiel #1
0
def test():
    p_l_pair_path, _ = get_modalality_path()
    context = p_l_pair_path[:, 1]

    p_model, l_model, _, _, _, _ = model(pointcloud_dim, language_dim, trajectory_dim)
    p_model.load_weights('Weights/joint_pl_p_weights.h5', by_name=True)
    l_model.load_weights('Weights/joint_pl_l_weights.h5', by_name=True)

    pc_data = np.load('Processed_data/pc_data_248.npy')
    l_data = np.load('Processed_data/l_data_248.npy')

    pc_embedd = p_model.predict(pc_data)
    l_embedd = l_model.predict(l_data)

    n_pair = 248
    sim_matrix = np.zeros([n_pair, n_pair])
    for i in range(n_pair):
        pc_vector = pc_embedd[i]
        sim_score = np.sum(pc_vector * l_embedd, axis=-1)
        sim_matrix[i] = sim_score
        print('context index:', i, 'magnitude:', np.linalg.norm(pc_vector))
        print('--------------', context[i])
        print('most relevant:', context[sim_score.argmax()])
        print('most irrelevant:', context[sim_score.argmin()])
    return
def test():
    _, p_l_t_pair_path = get_modalality_path()
    context = p_l_t_pair_path[:, 1]

    _, _, _, _, p_l_embedding, traj_model = model(pointcloud_dim, language_dim,
                                                  trajectory_dim)
    p_l_embedding.load_weights('Weights/joint_pltau_pl_weights.h5',
                               by_name=True)
    traj_model.load_weights('Weights/joint_pltau_tau_weights.h5', by_name=True)

    pc_data = np.load('Processed_data/pc_data_1225.npy')
    l_data = np.load('Processed_data/l_data_1225.npy')
    traj_data = np.load('Processed_data/traj_data_1225.npy')
    distance_matrix = np.load('traj_distance_matrix.npy')

    pl_embedd = p_l_embedding.predict([pc_data, l_data])
    traj_embedd = traj_model.predict(traj_data)

    n_pair = 1225
    sim_matrix = np.zeros([n_pair, n_pair])
    for i in range(n_pair):
        pl_vector = pl_embedd[i]
        sim_score = np.sum(pl_vector * traj_embedd, axis=-1) / np.linalg.norm(
            pl_vector) / np.linalg.norm(traj_embedd, axis=-1)
        sim_matrix[i] = sim_score
        print('traj index:', i, context[i])
        print('most relevant:', context[sim_score.argmax()])
        print('most irrelevant:', context[sim_score.argmin()], 'original:',
              context[distance_matrix[i].argmax()])
        print(
            '-------------------------------------------------------------------------'
        )
    return
Beispiel #3
0
def training_data_preparation():
    ### prepare training data
    p_l_pair_path, p_l_t_pair_path = get_modalality_path()
    traj_paths = p_l_t_pair_path[:, 2]  # list of all trajectory files: 1225
    traj_samples = traj_paths.shape[0]
    x_train = np.zeros([traj_samples, trajectory_dim])  # -1~1

    for i in range(traj_samples):
        ### preprocess trajectory
        traj_path = traj_paths[i]
        traj_vector = preprocess_trajectory(traj_path)

        ### feed into modal data
        x_train[i] = traj_vector

    return x_train
def training_data_preparation():
    ### prepare training data
    p_l_pair_path, p_l_t_pair_path = get_modalality_path()
    p_paths = p_l_pair_path[:, 0]  # list of all pointcloud paths: 248
    p_samples = p_paths.shape[0]

    x_train = np.zeros([p_samples, pointcloud_dim])

    for i in range(p_samples):
        # print(i)
        pc_path = p_paths[i]
        pc_vector = preprocess_pointcloud(pc_path)
        x_train[i] = pc_vector

    # np.save('Processed_data/pc_data_248.npy', x_train)
    # exit()
    return x_train
def training_data_preparation():
    ### prepare training data
    p_l_pair_path, p_l_t_pair_path = get_modalality_path()
    context = p_l_pair_path[:,
                            1]  # list of all language instructions: 248 sentences
    tokenizer, reverse_tokenizer = language_tokenizer(context,
                                                      num_words=language_dim)

    l_samples = context.shape[0]
    x_train = np.zeros([l_samples, language_dim])

    for i in range(l_samples):
        language = context[i]
        l_vector = preprocess_language(language, tokenizer)
        x_train[i] = l_vector

    # np.save('Processed_data/l_data_248.npy', x_train)
    # exit()
    return x_train