Exemplo n.º 1
0
    def get_env(self): #COCO
        """
        Create environment for COCO dataset generation according to dataset config file

        Returns:
            :return env: (object) Environment for dataset generation
        """
        env = RandomizedEnvWrapper(env=gym.make(config['env_name'],
            robot = config['robot'],
            render_on = True,
            gui_on = config['gui_on'],
            show_bounding_boxes_gui = config['show_bounding_boxes_gui'],
            changing_light_gui = config['changing_light_gui'],
            shadows_on = config['shadows_on'],
            color_dict = config['color_dict'],
            object_sampling_area = config['object_sampling_area'],
            num_objects_range = config['num_objects_range'],
            used_objects = used_objects,
            active_cameras = config['active_cameras'],
            camera_resolution = config['camera_resolution'],
            renderer=p.ER_BULLET_HARDWARE_OPENGL,
            dataset = True,
            ), 
            config_path = config['output_folder']+'/config_dataset.json')
        p.setGravity(0, 0, -9.81)
        return env
Exemplo n.º 2
0
class GeneratorVae:
    """
    Generator class for image dataset for VAE vision model training
    """
    def __init__(self):
        self.object_settings = {"exported_object_classes": [], "exported_objects": []}
        self.env = None
        self.imsize = config["imsize"] # only supported format at the moment

    def get_env(self):
        """
        Create environment for VAE dataset generation according to dataset config file
        """
        self.env = RandomizedEnvWrapper(env=gym.make(config['env_name'],
            robot = config['robot'],
            render_on = True,
            gui_on = config['gui_on'],
            show_bounding_boxes_gui = config['show_bounding_boxes_gui'],
            changing_light_gui = config['changing_light_gui'],
            shadows_on = config['shadows_on'],
            color_dict = config['color_dict'],
            object_sampling_area = config['object_sampling_area'],
            num_objects_range = config['num_objects_range'],
            used_objects = used_objects,
            active_cameras = config['active_cameras'],
            camera_resolution = config['camera_resolution'],
            dataset = True,
            ), config_path = config['output_folder']+'/config_dataset.json')
        p.setGravity(0, 0, -9.81)

    def collect_data(self, steps):
        """
        Collect data for VAE dataset

        Parameters:
            :param steps: (int) Number of episodes initiated during dataset generation
        """
        data = np.zeros((steps, self.imsize, self.imsize, 3), dtype='f')
        for t in range(steps):
            self.env.reset(random_pos=True)
            self.env.render()
            action = [random.uniform(1,2) for x in range(6)]
            #action = [2,2,2,2,2,2]
            self.env.robot.reset_random(action)
            # send the Kuka arms up
            observation, reward, done, info = self.env.step(action)
            img = observation['camera_data'][6]['image']
            imgs = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
            img = cv2.resize(imgs[0:450,100:500], (self.imsize, self.imsize))
            cv2.imshow("image", img)
            cv2.waitKey(1)
            padding = 6 - len(str(t+7999))
            name = padding * "0" + str(t+7999)
            cv2.imwrite(os.path.join(dataset_pth, "img_{}.png".format(name)), img)
            data[t] = img
            print("Image {}/{}".format(t, steps))
        self.env.close()
Exemplo n.º 3
0
 def get_env(self):
     """
     Create environment for VAE dataset generation according to dataset config file
     """
     self.env = RandomizedEnvWrapper(env=gym.make(
         config['env_name'],
         robot=config['robot'],
         render_on=True,
         gui_on=config['gui_on'],
         show_bounding_boxes_gui=config['show_bounding_boxes_gui'],
         changing_light_gui=config['changing_light_gui'],
         shadows_on=config['shadows_on'],
         color_dict=config['color_dict'],
         object_sampling_area=config['object_sampling_area'],
         observation=config["observation"],
         used_objects=used_objects,
         task_objects=config["task_objects"],
         active_cameras=config['active_cameras'],
         camera_resolution=config['camera_resolution'],
         dataset=True,
     ),
                                     config_path=config['output_folder'] +
                                     '/config_dataset.json')
     p.setGravity(0, 0, -9.81)
Exemplo n.º 4
0
 def get_env(self): #DOPE
     env = RandomizedEnvWrapper(env=gym.make(config['env_name'],
         robot = config['robot'],
         render_on = True,
         gui_on = config['gui_on'],
         show_bounding_boxes_gui = config['show_bounding_boxes_gui'],
         changing_light_gui = config['changing_light_gui'],
         shadows_on = config['shadows_on'],
         color_dict = config['color_dict'],
         object_sampling_area = config['object_sampling_area'],
         num_objects_range = config['num_objects_range'],
         used_objects = used_objects,
         active_cameras = config['active_cameras'],
         camera_resolution = config['camera_resolution'],
         dataset = True,
         ), config_path = config['output_folder']+'/config_dataset.json')
     p.setGravity(0, 0, -9.81)
     return env