def sample_from_continuous_space(self): assert len(self.robot.GetGrabbed()) == 0 # sample pick pick_cont_params = None n_ik_attempts = 0 n_base_attempts = 0 status = "NoSolution" robot_pose = utils.get_body_xytheta(self.robot) robot_config = self.robot.GetDOFValues() # sample pick parameters pick_cont_params, status = self.sample_pick_cont_parameters() if status != 'HasSolution': utils.set_robot_config(robot_pose) return None, None, status self.pick_op.continuous_parameters = pick_cont_params self.pick_op.execute() # sample place place_cont_params, status = self.sample_place_cont_parameters(pick_cont_params) # reverting back to the state before sampling utils.one_arm_place_object(pick_cont_params) self.robot.SetDOFValues(robot_config) utils.set_robot_config(robot_pose) if status != 'HasSolution': return None, None, "InfeasibleIK" else: self.place_op.continuous_parameters = place_cont_params return pick_cont_params, place_cont_params, status
def is_collision_in_all_volumes(self, obj_being_moved): for op_instance in self.op_instances: #print "Checking place collisions" assert len(self.problem_env.robot.GetGrabbed()) == 0 obj_touched_before = op_instance.discrete_parameters['object'] associated_pick = self.pick_used[op_instance] if type(obj_touched_before) == str: obj_touched_before = self.problem_env.env.GetKinBody( obj_touched_before) obj_touched_before.Enable(True) pick_param = associated_pick.continuous_parameters # utils.two_arm_pick_object(obj_touched_before, pick_param) associated_pick.execute() #print "Number of place swept volumes = ", len(self.op_instances) if self.is_collision_in_single_volume(op_instance.low_level_motion, obj_being_moved): if op_instance.type.find('one_arm') != -1: utils.one_arm_place_object(pick_param) else: utils.two_arm_place_object(pick_param) return True utils.two_arm_place_object(pick_param) return False
def sample_from_continuous_space(self): # sample pick pick_cont_params = None n_ik_attempts = 0 n_base_attempts = 0 status = "NoSolution" robot_pose = utils.get_body_xytheta(self.robot) robot_config = self.robot.GetDOFValues() # sample pick parameters while pick_cont_params is None: pick_cont_params, status = self.sample_pick_cont_parameters() if status == 'InfeasibleBase': n_base_attempts += 1 elif status == 'InfeasibleIK': n_ik_attempts += 1 elif status == 'HasSolution': n_ik_attempts += 1 break if n_ik_attempts == 1 or n_base_attempts == 4: break if status != 'HasSolution': utils.set_robot_config(robot_pose) return None, None, status self.pick_op.continuous_parameters = pick_cont_params self.pick_op.execute() # sample place n_ik_attempts = 0 n_base_attempts = 0 status = "NoSolution" place_cont_params = None while place_cont_params is None: place_cont_params, status = self.sample_place_cont_parameters(pick_cont_params) if status == 'InfeasibleBase': n_base_attempts += 1 elif status == 'InfeasibleIK': n_ik_attempts += 1 elif status == 'HasSolution': n_ik_attempts += 1 break if n_ik_attempts == 1 or n_base_attempts == 1: break # reverting back to the state before sampling utils.one_arm_place_object(pick_cont_params) self.robot.SetDOFValues(robot_config) utils.set_robot_config(robot_pose) if status != 'HasSolution': return None, None, status else: self.place_op.continuous_parameters = place_cont_params return pick_cont_params, place_cont_params, status
def apply_operator_instance_and_get_reward(self, state, operator_instance, is_op_feasible): if not is_op_feasible: reward = self.infeasible_reward else: if operator_instance.type == 'two_arm_pick': two_arm_pick_object( operator_instance.discrete_parameters['object'], self.robot, operator_instance.continuous_parameters) reward = 0 elif operator_instance.type == 'two_arm_place': object_held = self.robot.GetGrabbed()[0] previous_region = self.problem_env.get_region_containing( object_held) two_arm_place_object(object_held, self.robot, operator_instance.continuous_parameters) current_region = self.problem_env.get_region_containing( object_held) if current_region.name == 'home_region' and previous_region != current_region: task_reward = 1 elif current_region.name == 'loading_region' and previous_region.name == 'home_region': task_reward = -1.5 else: task_reward = 0 # placing it in the same region reward = task_reward elif operator_instance.type == 'one_arm_pick': one_arm_pick_object( operator_instance.discrete_parameters['object'], operator_instance.continuous_parameters) reward = 1 elif operator_instance.type == 'one_arm_place': one_arm_place_object( operator_instance.discrete_parameters['object'], operator_instance.continuous_parameters) else: raise NotImplementedError return reward
def execute(self): env = openravepy.RaveGetEnvironments()[0] if self.type == 'two_arm_pick': obj_to_pick = utils.convert_to_kin_body( self.discrete_parameters['object']) if 'q_goal' in self.continuous_parameters and type(self.continuous_parameters['q_goal']) == list and\ len(self.continuous_parameters['q_goal']) > 1: try: two_arm_pick_object( obj_to_pick, {'q_goal': self.continuous_parameters['q_goal'][0]}) except: import pdb pdb.set_trace() else: two_arm_pick_object(obj_to_pick, self.continuous_parameters) elif self.type == 'two_arm_place': two_arm_place_object(self.continuous_parameters) elif self.type == 'two_arm_pick_two_arm_place': obj_to_pick = utils.convert_to_kin_body( self.discrete_parameters['object']) two_arm_pick_object(obj_to_pick, self.continuous_parameters['pick']) two_arm_place_object(self.continuous_parameters['place']) elif self.type == 'one_arm_pick': obj_to_pick = utils.convert_to_kin_body( self.discrete_parameters['object']) one_arm_pick_object(obj_to_pick, self.continuous_parameters) elif self.type == 'one_arm_place': one_arm_place_object(self.continuous_parameters) elif self.type == 'one_arm_pick_one_arm_place': obj_to_pick = utils.convert_to_kin_body( self.discrete_parameters['object']) one_arm_pick_object(obj_to_pick, self.continuous_parameters['pick']) one_arm_place_object(self.continuous_parameters['place']) else: raise NotImplementedError
def sample_cont_params(self): # todo refactor this function robot_pose = utils.get_body_xytheta(self.robot) robot_config = self.robot.GetDOFValues() assert len(self.robot.GetGrabbed()) == 0 if self.cached_picks is not None: (pick_tf, pick_params), (place_tf, place_params) = random.choice(zip(*self.cached_picks)) pick_region = self.problem_env.get_region_containing(self.target_obj) place_region = self.place_op.discrete_parameters['region'] pick_params = copy.deepcopy(pick_params) place_params = copy.deepcopy(place_params) #assert pick_params['operator_name'] == 'one_arm_pick' #assert place_params['operator_name'] == 'one_arm_pick' old_tf = self.target_obj.GetTransform() old_pose = get_body_xytheta(self.target_obj).squeeze() self.pick_op.continuous_parameters = place_params self.target_obj.SetTransform(place_tf) #self.pick_op.execute() set_robot_config(self.pick_op.continuous_parameters['q_goal'][-3:]) place_pose = self.place_generator.sample_from_uniform() place_base_pose = self.place_feasibility_checker.place_object_and_robot_at_new_pose(self.target_obj, place_pose, place_region) if self.problem_env.env.CheckCollision(self.problem_env.robot) or self.problem_env.env.CheckCollision(self.target_obj): self.target_obj.SetTransform(old_tf) return None, None, 'InfeasibleIK' def assert_region(region): try: assert self.problem_env.get_region_containing(self.target_obj).name == region.name return True except Exception as e: print(e) set_color(self.target_obj, [1,1,1]) import pdb pdb.set_trace() return False if not place_region.contains(self.target_obj.ComputeAABB()): self.target_obj.SetTransform(old_tf) return None, None, 'InfeasibleIK' #assert_region(place_region) place_params['operator_name'] = 'one_arm_place' place_params['object_pose'] = place_pose place_params['action_parameters'] = place_pose place_params['base_pose'] = place_base_pose place_params['q_goal'][-3:] = place_base_pose self.place_op.continuous_parameters = place_params self.pick_op.continuous_parameters = pick_params # is reference and will be changed lader self.target_obj.SetTransform(pick_tf) #assert_region(pick_region) #self.pick_op.execute() set_robot_config(self.pick_op.continuous_parameters['q_goal'][-3:]) pick_base_pose = self.place_feasibility_checker.place_object_and_robot_at_new_pose(self.target_obj, old_pose, pick_region) pick_params['q_goal'][-3:] = pick_base_pose #assert_region(pick_region) #self.target_obj.SetTransform(pick_tf) #self.pick_op.execute() #tf = np.dot(np.dot(old_tf, np.linalg.pinv(pick_tf)), self.problem_env.robot.GetTransform()) #self.place_op.execute() #self.target_obj.SetTransform(old_tf) #self.problem_env.robot.SetTransform(tf) #pick_base_pose = get_body_xytheta(self.problem_env.robot).squeeze() #pick_params['q_goal'][-3:] = pick_base_pose bad = False self.target_obj.SetTransform(old_tf) self.pick_op.execute() #assert_region(pick_region) if self.problem_env.env.CheckCollision(self.problem_env.robot): release_obj() self.target_obj.SetTransform(old_tf) return None, None, 'InfeasibleIK' self.place_op.execute() if self.problem_env.env.CheckCollision(self.problem_env.robot) or self.problem_env.env.CheckCollision(self.target_obj): self.target_obj.SetTransform(old_tf) return None, None, 'InfeasibleIK' if not self.place_op.discrete_parameters['region'].contains(self.target_obj.ComputeAABB()): self.target_obj.SetTransform(old_tf) return None, None, 'InfeasibleIK' #assert_region(place_region) #place_action = { # 'operator_name': 'one_arm_place', # 'q_goal': np.hstack([grasp_config, new_base_pose]), # 'base_pose': new_base_pose, # 'object_pose': place_parameters, # 'action_parameters': obj_pose, # 'g_config': grasp_config, # 'grasp_params': grasp_params, #} #pick_action = { # 'operator_name': operator_skeleton.type, # 'q_goal': np.hstack([grasp_config, pick_base_pose]), # 'grasp_params': grasp_params, # 'g_config': grasp_config, # 'action_parameters': pick_parameters, #} self.target_obj.SetTransform(old_tf) #assert pick_params['operator_name'] == 'one_arm_pick' #assert place_params['operator_name'] == 'one_arm_place' return pick_params, place_params, 'HasSolution' # sample pick pick_cont_params = None n_ik_attempts = 0 n_base_attempts = 0 status = "NoSolution" while pick_cont_params is None: pick_cont_params, status = self.sample_pick_cont_parameters() if status == 'InfeasibleBase': n_base_attempts += 1 elif status == 'InfeasibleIK': n_ik_attempts += 1 elif status == 'HasSolution': n_ik_attempts += 1 break if n_ik_attempts == 1 or n_base_attempts == 500: break if status != 'HasSolution': utils.set_robot_config(robot_pose) return None, None, status self.pick_op.continuous_parameters = pick_cont_params self.pick_op.execute() # sample place n_ik_attempts = 0 n_base_attempts = 0 status = "NoSolution" place_cont_params = None while place_cont_params is None: place_cont_params, status = self.sample_place_cont_parameters(pick_cont_params) if status == 'InfeasibleBase': n_base_attempts += 1 elif status == 'InfeasibleIK': n_ik_attempts += 1 elif status == 'HasSolution': n_ik_attempts += 1 break if n_ik_attempts == 1 or n_base_attempts == 500: break utils.one_arm_place_object(pick_cont_params) self.robot.SetDOFValues(robot_config) utils.set_robot_config(robot_pose) if status != 'HasSolution': return None, None, status else: self.place_op.continuous_parameters = place_cont_params return pick_cont_params, place_cont_params, status
def generate_training_data_single(): np.random.seed(config.pidx) random.seed(config.pidx) if config.domain == 'two_arm_mover': mover = Mover(config.pidx, config.problem_type) elif config.domain == 'one_arm_mover': mover = OneArmMover(config.pidx) else: raise NotImplementedError np.random.seed(config.planner_seed) random.seed(config.planner_seed) mover.set_motion_planner(BaseMotionPlanner(mover, 'prm')) mover.seed = config.pidx # todo change the root node mover.init_saver = DynamicEnvironmentStateSaver(mover.env) hostname = socket.gethostname() if hostname in {'dell-XPS-15-9560', 'phaedra', 'shakey', 'lab', 'glaucus', 'luke-laptop-1'}: root_dir = './' #root_dir = '/home/beomjoon/Dropbox (MIT)/cloud_results/' else: root_dir = '/data/public/rw/pass.port/tamp_q_results/' solution_file_dir = root_dir + '/test_results/greedy_results_on_mover_domain/domain_%s/n_objs_pack_%d'\ % (config.domain, config.n_objs_pack) if config.dont_use_gnn: solution_file_dir += '/no_gnn/' elif config.dont_use_h: solution_file_dir += '/gnn_no_h/loss_' + str(config.loss) + '/num_train_' + str(config.num_train) + '/' elif config.hcount: solution_file_dir += '/hcount/' elif config.hadd: solution_file_dir += '/gnn_hadd/loss_' + str(config.loss) + '/num_train_' + str(config.num_train) + '/' else: solution_file_dir += '/gnn/loss_' + str(config.loss) + '/num_train_' + str(config.num_train) + '/' solution_file_name = 'pidx_' + str(config.pidx) + \ '_planner_seed_' + str(config.planner_seed) + \ '_train_seed_' + str(config.train_seed) + \ '_domain_' + str(config.domain) + '.pkl' if not os.path.isdir(solution_file_dir): os.makedirs(solution_file_dir) solution_file_name = solution_file_dir + solution_file_name is_problem_solved_before = os.path.isfile(solution_file_name) if is_problem_solved_before and not config.plan: with open(solution_file_name, 'rb') as f: trajectory = pickle.load(f) success = trajectory.metrics['success'] tottime = trajectory.metrics['tottime'] else: t = time.time() trajectory, num_nodes = get_problem(mover) tottime = time.time() - t success = trajectory is not None plan_length = len(trajectory.actions) if success else 0 if not success: trajectory = Trajectory(mover.seed, mover.seed) trajectory.states = [s.get_predicate_evaluations() for s in trajectory.states] trajectory.state_prime = None trajectory.metrics = { 'n_objs_pack': config.n_objs_pack, 'tottime': tottime, 'success': success, 'plan_length': plan_length, 'num_nodes': num_nodes, } #with open(solution_file_name, 'wb') as f: # pickle.dump(trajectory, f) print 'Time: %.2f Success: %d Plan length: %d Num nodes: %d' % (tottime, success, trajectory.metrics['plan_length'], trajectory.metrics['num_nodes']) import pdb;pdb.set_trace() """ print("time: {}".format(','.join(str(trajectory.metrics[m]) for m in [ 'n_objs_pack', 'tottime', 'success', 'plan_length', 'num_nodes', ]))) """ print('\n'.join(str(a.discrete_parameters.values()) for a in trajectory.actions)) def draw_robot_line(env, q1, q2): return draw_line(env, list(q1)[:2] + [.5], list(q2)[:2] + [.5], color=(0,0,0,1), width=3.) mover.reset_to_init_state_stripstream() if not config.dontsimulate: if config.visualize_sim: mover.env.SetViewer('qtcoin') set_viewer_options(mover.env) raw_input('Start?') handles = [] for step_idx, action in enumerate(trajectory.actions): if action.type == 'two_arm_pick_two_arm_place': def check_collisions(q=None): if q is not None: set_robot_config(q, mover.robot) collision = False if mover.env.CheckCollision(mover.robot): collision = True for obj in mover.objects: if mover.env.CheckCollision(obj): collision = True if collision: print('collision') if config.visualize_sim: raw_input('Continue after collision?') check_collisions() o = action.discrete_parameters['two_arm_place_object'] pick_params = action.continuous_parameters['pick'] place_params = action.continuous_parameters['place'] full_path = [get_body_xytheta(mover.robot)[0]] + pick_params['motion'] + [pick_params['q_goal']] + \ place_params['motion'] + [place_params['q_goal']] for i, (q1, q2) in enumerate(zip(full_path[:-1], full_path[1:])): if np.linalg.norm(np.squeeze(q1)[:2] - np.squeeze(q2)[:2]) > 1: print(i, q1, q2) import pdb; pdb.set_trace() pickq = pick_params['q_goal'] pickt = pick_params['motion'] check_collisions(pickq) for q in pickt: check_collisions(q) obj = mover.env.GetKinBody(o) if len(pickt) > 0: for q1, q2 in zip(pickt[:-1], pickt[1:]): handles.append( draw_robot_line(mover.env, q1.squeeze(), q2.squeeze())) # set_robot_config(pickq, mover.robot) # if config.visualize_sim: # raw_input('Continue?') # set_obj_xytheta([1000,1000,0], obj) two_arm_pick_object(obj, pick_params) if config.visualize_sim: raw_input('Place?') o = action.discrete_parameters['two_arm_place_object'] r = action.discrete_parameters['two_arm_place_region'] placeq = place_params['q_goal'] placep = place_params['object_pose'] placet = place_params['motion'] check_collisions(placeq) for q in placet: check_collisions(q) obj = mover.env.GetKinBody(o) if len(placet) > 0: for q1, q2 in zip(placet[:-1], placet[1:]): handles.append( draw_robot_line(mover.env, q1.squeeze(), q2.squeeze())) # set_robot_config(placeq, mover.robot) # if config.visualize_sim: # raw_input('Continue?') # set_obj_xytheta(placep, obj) two_arm_place_object(place_params) check_collisions() if config.visualize_sim: raw_input('Continue?') elif action.type == 'one_arm_pick_one_arm_place': def check_collisions(q=None): if q is not None: set_robot_config(q, mover.robot) collision = False if mover.env.CheckCollision(mover.robot): collision = True for obj in mover.objects: if mover.env.CheckCollision(obj): collision = True if collision: print('collision') if config.visualize_sim: raw_input('Continue after collision?') check_collisions() pick_params = action.continuous_parameters['pick'] place_params = action.continuous_parameters['place'] one_arm_pick_object(mover.env.GetKinBody(action.discrete_parameters['object']), pick_params) check_collisions() if config.visualize_sim: raw_input('Place?') one_arm_place_object(place_params) check_collisions() if config.visualize_sim: raw_input('Continue?') else: raise NotImplementedError n_objs_pack = config.n_objs_pack if config.domain == 'two_arm_mover': goal_region = 'home_region' elif config.domain == 'one_arm_mover': goal_region = 'rectangular_packing_box1_region' else: raise NotImplementedError if all(mover.regions[goal_region].contains(o.ComputeAABB()) for o in mover.objects[:n_objs_pack]): print("successful plan length: {}".format(len(trajectory.actions))) else: print('failed to find plan') if config.visualize_sim: raw_input('Finish?') return