Esempio n. 1
0
def build_class_dataset(context):
    import concurrent.futures
    import ai2thor.controller
    import multiprocessing as mp
    mp.set_start_method('spawn')

    controller = ai2thor.controller.Controller()
    executor = concurrent.futures.ProcessPoolExecutor(max_workers=4)
    futures = []

    for scene in controller.scene_names():
        print("processing scene %s" % scene)
        futures.append(executor.submit(class_dataset_images_for_scene, scene))

    for f in concurrent.futures.as_completed(futures):
        scene = f.result()
        print("scene name complete: %s" % scene)
Esempio n. 2
0
                        type=int,
                        default=200,
                        help='number of viewpoints to sample per scene')
    parser.add_argument('--fov',
                        type=int,
                        default=60,
                        help='camera field of view')
    args = parser.parse_args()

    os.mkdir(args.data_dir)
    os.mkdir(os.path.join(args.data_dir, 'train'))
    os.mkdir(os.path.join(args.data_dir, 'val'))

    controller = ai2thor.controller.Controller()
    controller.start()
    scene_names = controller.scene_names()
    scene_names = [s.replace('_physics', '') for s in scene_names]
    validation_scenes = [
        'FloorPlan1', 'FloorPlan2', 'FloorPlan201', 'FloorPlan202',
        'FloorPlan301', 'FloorPlan302', 'FloorPlan401', 'FloorPlan402'
    ]

    for scene_name in scene_names:
        if scene_name in validation_scenes:
            save_dir = os.path.join(args.data_dir, 'val', scene_name)
        else:
            save_dir = os.path.join(args.data_dir, 'train', scene_name)
        os.mkdir(save_dir)

        navigable_points, centre = setup_scene(controller, scene_name)
        locations, rotations, horizons = random_viewpoints(