Exemple #1
0
def generate_pointnav_dataset(config, num_episodes):
    sim = make_sim(id_sim=config.SIMULATOR.TYPE, config=config.SIMULATOR)

    sim.seed(config.SEED)
    random.seed(config.SEED)
    generator = pointnav_generator.generate_pointnav_episode(
        sim=sim,
        shortest_path_success_distance=config.TASK.SUCCESS_DISTANCE,
        shortest_path_max_steps=config.ENVIRONMENT.MAX_EPISODE_STEPS,
    )
    episodes = []
    for i in range(num_episodes):
        print(f"Generating episode {i+1}/{num_episodes}")
        episode = next(generator)
        episodes.append(episode)

    dataset = habitat.Dataset()
    dataset.episodes = episodes
    return dataset
def test_pointnav_episode_generator():
    config = get_config(CFG_TEST)
    config.defrost()
    config.DATASET.SPLIT = "val"
    config.ENVIRONMENT.MAX_EPISODE_STEPS = 500
    config.freeze()
    if not PointNavDatasetV1.check_config_paths_exist(config.DATASET):
        pytest.skip("Test skipped as dataset files are missing.")
    with habitat.Env(config) as env:
        env.seed(config.SEED)
        random.seed(config.SEED)
        generator = pointnav_generator.generate_pointnav_episode(
            sim=env.sim,
            shortest_path_success_distance=config.TASK.SUCCESS_DISTANCE,
            shortest_path_max_steps=config.ENVIRONMENT.MAX_EPISODE_STEPS,
        )
        episodes = []
        for i in range(NUM_EPISODES):
            episode = next(generator)
            episodes.append(episode)

        for episode in pointnav_generator.generate_pointnav_episode(
            sim=env.sim,
            num_episodes=NUM_EPISODES,
            shortest_path_success_distance=config.TASK.SUCCESS_DISTANCE,
            shortest_path_max_steps=config.ENVIRONMENT.MAX_EPISODE_STEPS,
            geodesic_to_euclid_min_ratio=0,
        ):
            episodes.append(episode)

        assert len(episodes) == 2 * NUM_EPISODES
        env.episode_iterator = iter(episodes)

        for episode in episodes:
            check_shortest_path(env, episode)

        dataset = habitat.Dataset()
        dataset.episodes = episodes
        assert (
            dataset.to_json()
        ), "Generated episodes aren't json serializable."
Exemple #3
0
def generate_episodes():
    # -- Load scene files
    semantic_data = np.load(SCENE_INFO_PATH, allow_pickle=True).item()
    df_semantic = semantic_data["df_semantic"]
    df_objects = semantic_data["df_objects"]

    # filter classes
    selected_classes = MATCHING_CLASSES.keys()
    df_objects = df_objects[df_objects.class_name.apply(
        lambda x: x in selected_classes)]
    print("_____________CLASSES INFO_____________")
    print(df_objects.class_name.value_counts())
    print(f"Total count: {len(df_objects)}")
    print("_______________________________________")

    # -- Load config
    config = get_config(BASE_CFG_PATH)
    base_success = config.TASK.SUCCESS_DISTANCE

    env = habitat.Env(config)
    env.seed(config.SEED)
    random.seed(config.SEED)

    # Generate spiral coords
    spiral_shift = np.array(spiral(5, 0.001))

    dataset_episodes = []
    for room in df_objects.room.unique():
        print(f"Generating episodes for room {room}")
        # if room in ['office_1', 'office_2', 'room_2', 'frl_apartment_0', 'office_3',
        #        'frl_apartment_2', 'hotel_0', 'apartment_0', 'frl_apartment_5',
        #        'room_1', 'room_0', 'apartment_2', 'apartment_1']:
        #     continue

        scene_path = SCENE_MOCKUP_PATH.format(room)
        scene_df = df_objects[df_objects.room == room]

        config.defrost()
        config.SIMULATOR.SCENE = scene_path
        config.freeze()
        _random_episode(env, config)
        out = env.reset()

        # =====================================================================
        # Determine floor coord/s

        pts = []
        while len(pts) < 300:
            pt = env.sim.sample_navigable_point()
            if env.sim.island_radius(pt) > ISLAND_RADIUS_LIMIT:
                pts.append(pt[1])
        floor_coords = pd.value_counts(pts).index.values
        floor_coord = []
        while len(floor_coords) > 0:
            floor_coord.append(floor_coords[0])
            floor_coords = floor_coords[floor_coords > (floor_coord[-1] + 2.3)]

        print(f"Room {room} has {len(floor_coord)} floors @ ({floor_coord})")
        print("objects", scene_df.class_name.unique())
        # =====================================================================

        for obj_idx, obj_row in scene_df.iterrows():
            print(f"-generating {NUM_EPISODES} for {obj_row['class_name']}")

            t_coord = obj_row["habitat_coord"]
            t_size = obj_row["habitat_size"]

            # Determine success distance
            h_to_obj = determine_height_to_object(t_coord, t_size, floor_coord)
            if h_to_obj is None:
                print(f"Coord under floor {obj_idx}, coord: {t_coord}")
                continue

            success_distance = \
                determine_height_to_object(t_coord, t_size, floor_coord) + \
                base_success + max(t_size) / 2.

            generator = generate_pointnav_episode(
                sim=env.sim,
                target=t_coord,
                shortest_path_success_distance=success_distance,
                shortest_path_max_steps=config.ENVIRONMENT.MAX_EPISODE_STEPS,
                geodesic_to_euclid_min_ratio=1.1,
                number_retries_per_target=50,
                geodesic_min_ratio_prob=0.5,
                floor_coord=floor_coord,
                spiral_coord=spiral_shift,
            )

            episodes = []

            for i in range(NUM_EPISODES):
                episode = next(generator)

                # Add arguments
                episode.room = room
                episode.t_coord = t_coord
                episode.t_size = t_size
                episode.class_name = obj_row['class_name']

                episodes.append(episode)

            for episode in episodes:
                check_shortest_path(env, episode)

            dataset_episodes += episodes

        np.save("dataset_all", dataset_episodes)

    dataset = habitat.Dataset()
    dataset.episodes = dataset_episodes
    out_fld = f"{out_fld_base}/content/"
    out_fld_dummy = f"{out_fld_base}/"

    os.mkdir(out_fld)

    for room in dfmslices.room.unique():
        dfmslice = dfm[dfm.room == room]

        dataset_elements = []
        for x in dfmslice.to_dict(orient="records"):
            p = copy.deepcopy(model_ep)
            p.__dict__ = x
            dataset_elements.append(x)

        dataset = habitat.Dataset()
        dataset.episodes = dataset_elements

        json_data = dataset.to_json()
        json_path = out_fld + f"{room}.json"
        print(f"1 gzip {json_path}")
        with open(json_path, "w") as f:
            f.write(json_data)
        os.system(f"gzip {json_path}")

    # Dummy file
    json_path = out_fld_dummy + f"{dataset_name}.json"
    print(f"2 gzip {json_path}")
    with open(json_path, "w") as f:
        f.write('{"episodes": []}')