def to_blocks(tower): blocks = [] for ix in range(tower.shape[0]): block = Object('block', dimensions=tower[ix, 4:7].numpy().tolist(), mass=tower[ix, 0].item(), com=tower[ix, 1:4].numpy().tolist(), color=(1, 0, 0)) block.pose = Pose(Position(*tower[ix, 7:10].numpy().tolist()), Quaternion(*tower[ix, 10:14].numpy().tolist())) blocks.append(block) return blocks
def main(args): NOISE=0.00005 # define real world block params and initial poses #TODO: Load blocks from pickle file. if args.use_vision: with open(args.blocks_file, 'rb') as handle: blocks = pickle.load(handle) block_init_xy_poses = None # These will be initialized randomly but updated by the vision system. else: # block0 = Object('block0', Dimensions(.0381,.0318,.0635), 1.0, Position(0,0,0), Color(1,0,0)) # block1 = Object('block1', Dimensions(.0381,.0587,.0635), 1.0, Position(0,0,0), Color(0,0,1)) # block2 = Object('block2', Dimensions(.0635,.0381,.0746), 1.0, Position(0,0,0), Color(0,1,0)) block0 = Object('block0', Dimensions(.0381,.0318,.05), 1.0, Position(0,0,0), Color(1,0,0)) block1 = Object('block1', Dimensions(.0381,.0587,.06), 1.0, Position(0,0,0), Color(0,0,1)) block2 = Object('block2', Dimensions(.0635,.0381,.05), 1.0, Position(0,0,0), Color(0,1,0)) blocks = [block0, block1, block2] block_init_xy_poses = [Pose(Position(0.65,0.3,0), Quaternion(0,0,0,1)), Pose(Position(0.65,0.15,0), Quaternion(0,0,0,1)), Pose(Position(0.65,0.0,0), Quaternion(0,0,0,1))] panda = PandaAgent(blocks, NOISE, use_platform=False, block_init_xy_poses=block_init_xy_poses, teleport=False, use_vision=args.use_vision, use_action_server=args.use_action_server, real=args.real) # for now hard-code a tower, but in the future will be supplied from # active data collection or tower found through planning for evaluation tower_blocks = copy.copy(blocks) if args.use_vision: tower_poses = [Pose(Position(0.5,-0.25,0.0725), Quaternion(0,0,0,1)), Pose(Position(0.5,-0.25,0.18), Quaternion(0,0,0,1)), Pose(Position(0.5,-0.25,0.28), Quaternion(0,0,0,1))] else: tower_poses = [Pose(Position(0.3,0.25,.0318), Quaternion(0,0,0,1)), Pose(Position(0.3,0.25,.0953), Quaternion(0,0,0,1)), Pose(Position(0.3,0.25,.1643), Quaternion(0,0,0,1))] tower = [] for block, pose in zip(tower_blocks, tower_poses): block.set_pose(pose) block = get_rotated_block(block) # NOTE: have to do to set rotations field of block tower.append(block) # and execute the resulting plan. if args.use_action_server: panda.simulate_tower_parallel(tower, base_xy=(0.5, -0.25), real=args.real, vis=True, T=2500) else: panda.simulate_tower(tower, base_xy=(0.5, -0.25), real=args.real, vis=True, T=2500)
def __init__(self, direction=None, timesteps=50, rot=None, delta=0.005, block=None): """ PushAction moves the hand in the given world by a fixed distance every timestep. We assume we will push the block in a direction through the object's geometric center. :param world: The world which this action should apply to. Used to get the hand and calculate offsets. :param direction: A unit vector direction to push. :param timesteps: The number of timesteps to execute the action for. :param delta: How far to move each timestep. """ super(PushAction, self).__init__(T=timesteps) self.rot = rot self.timesteps = timesteps self.delta = delta self.tx = 0 if direction is None: self.direction = self.get_random_dir() else: self.direction = direction _, _, block_height = np.abs(rot.apply(block.dimensions)) table, leg = Object.platform() platform_height = table.dimensions.z + leg.dimensions.z self.push_start_pos = Position(x=0 - self.direction[0]*self.delta*20, y=0 - self.direction[1]*self.delta*20, z=platform_height + block_height/2 \ - self.direction[2]*self.delta*20)
def plot_constructability_over_time(logger): tower_keys = ['2block', '3block', '4block', '5block'] tallest_stable_over_time = np.zeros((logger.args.max_acquisitions, 5)) tp = TowerPlanner(stability_mode='contains') for tx in range(logger.args.max_acquisitions): acquired_data, _ = logger.load_acquisition_data(tx) # For each tower, figure out when it fell over. for kx, k in enumerate(tower_keys): towers = acquired_data[k]['towers'] for ix in range(0, towers.shape[0]): height = 1 for top_id in range(1, towers.shape[1]): block_tower = [Object.from_vector(towers[ix, bx, :]) for bx in range(0, top_id+1)] if not tp.tower_is_constructable(block_tower): break height += 1 tallest_stable_over_time[tx, height-1] += 1 max_x = 40 + 10*logger.args.max_acquisitions xs = np.arange(40, max_x, 10) w = 10 plt.figure(figsize=(20, 10)) plt.bar(xs, tallest_stable_over_time[:, 0], width=w, label=1) for kx in range(1, 5): plt.bar(xs, tallest_stable_over_time[:, kx], bottom=np.sum(tallest_stable_over_time[:, :kx], axis=1), width=w, label=kx+1) plt.xlabel('Acquisition Step') plt.ylabel('Height of tallest stable subtower') plt.legend() plt.savefig(logger.get_figure_path('tallest_breakdown.png'))
def random_block_set(args): dim_range = (args.block_min_dim, args.block_max_dim) block_set = [ Object.random('obj_' + str(n), dim_range=dim_range) for n in range(args.n_blocks) ] pkl_filename = 'block_set_' + str(args.n_blocks) + '.pkl' with open(pkl_filename, 'wb') as f: pickle.dump(block_set, f)
def get_random_pos(self, rot, block): """ Sample a random offset from a single corner of the platform. :param rot: The rotation the block will be placed with. :param block: Used to get the dimensions of the block to place. """ # rotate the block by that rotation to get the dimensions in x,y new_dims = np.abs(rot.apply(block.dimensions)) # and sample a position within the block to place on the corner of the platform place_pos = new_dims*(np.random.rand(3) - 0.5) table, _ = Object.platform() x, y, _ = place_pos + np.array(table.dimensions)/2 return Position(x, y, 0)
def ros_to_tower(msg): """ Converts a list of ROS BodyInfo messages to a tower of Object variables """ tower = [] for ros_blk in msg: # Unpack the block information name = ros_blk.name dims = (ros_blk.dimensions.x, ros_blk.dimensions.y, ros_blk.dimensions.z) com = (ros_blk.com.x, ros_blk.com.y, ros_blk.com.z) mass = ros_blk.mass color = (ros_blk.color.r, ros_blk.color.g, ros_blk.color.b) blk = Object(name, dims, mass, com, color) # Now create a Pose namedtuple pose = ros_to_pose_tuple(ros_blk.pose) pos = Position(pose[0][0], pose[0][1], pose[0][2]) rot = Quaternion(pose[1][0], pose[1][1], pose[1][2], pose[1][3]) pose_named = Pose(pos=pos, orn=Quaternion(0,0,0,1)) blk.set_pose(pose_named) blk.rotation = rot tower.append(blk) return tower
def inspect_validation_set(fname): with open(fname, 'rb') as handle: val_towers = pickle.load(handle) tp = TowerPlanner(stability_mode='contains') towers = val_towers['5block']['towers'] labels = val_towers['5block']['labels'] # Check how may towers fall over at a lower block. for tower_vec, label in zip(towers, labels): if label == 0: tower = [Object.from_vector(tower_vec[bx, :]) for bx in range(0, tower_vec.shape[0])] print(tp.tower_is_constructable(tower[:1]))
def sample_unlabeled_data(n_samples, block_set=None, range_n_blocks=(2, 5)): """ Generate n_samples random towers. For now each sample can also have random blocks. We should change this later so that the blocks are fixed (i.e., chosen elsewhere) and we only sample the configuration. :param n_samples: Number of random towers to consider. :param block_set (optional): blocks to use in towers. generate new blocks if None :return: Dict containining numpy arrays of the towers sorted by size. """ # initialize a dictionary of lists to store the generated data sampled_towers = {} for i in range(range_n_blocks[0], range_n_blocks[1] + 1): k = f'{i}block' sampled_towers[k] = {} sampled_towers[k]['towers'] = [] sampled_towers[k]['labels'] = [] if block_set is not None: sampled_towers[k]['block_ids'] = [] min_n_blocks = range_n_blocks[0] max_n_blocks = min(range_n_blocks[1], len(block_set)) # sample random towers and add them to the lists in the dictionary for ix in range(n_samples): n_blocks = np.random.randint(min_n_blocks, max_n_blocks + 1) # get n_blocks, either from scratch or from the block set if block_set is not None: blocks = np.random.choice(block_set, n_blocks, replace=False) else: blocks = [Object.random(f'obj_{ix}') for ix in range(n_blocks)] # sample a new tower tower = sample_random_tower(blocks) rotated_tower = [get_rotated_block(b) for b in tower] # and save that tower in the sampled_towers dict sampled_towers['%dblock' % n_blocks]['towers'].append( vectorize(rotated_tower)) if block_set is not None: block_ids = [block.get_id() for block in rotated_tower] sampled_towers['%dblock' % n_blocks]['block_ids'].append(block_ids) # convert all the sampled towers to numpy arrays for k in sampled_towers.keys(): sampled_towers[k]['towers'] = np.array(sampled_towers[k]['towers']) sampled_towers[k]['labels'] = np.zeros( (sampled_towers[k]['towers'].shape[0], )) if block_set is not None: sampled_towers[k]['block_ids'] = np.array( sampled_towers[k]['block_ids']) return sampled_towers
def make_platform_world(p_block, action): """ Given a block, create a world that has a platform to push that block off of. :param block: The Object which to place on the platform. """ platform_table, platform_leg = Object.platform() p_block.set_pose(Pose(ZERO_POS, Quaternion(*action.rot.as_quat()))) block = get_rotated_block(p_block) block.set_pose(Pose(pos=Position(x=action.pos.x, y=action.pos.y, z=platform_table.get_pose().pos.z+platform_table.dimensions.z/2.+block.dimensions.z/2.), orn=ZERO_ROT)) return World([platform_table, block, platform_leg])
def check_validation_robustness(noise=0.001, n_attempts=10): """ Try adding noise to the placement of each block in the validation set to see how many of the towers are robust to placement noise. """ with open('learning/data/validation_towers_robust.pkl', 'rb') as handle: #with open('learning/data/random_blocks_(x2000)_5blocks_uniform_mass.pkl', 'rb') as handle: val_towers = pickle.load(handle) robust = {k: 0 for k in val_towers.keys()} tp = TowerPlanner(stability_mode='contains') stable_towers = copy.deepcopy(val_towers) unstable_towers = copy.deepcopy(val_towers) for k in robust.keys(): stable_indices = [] unstable_indices = [] for ix in range(0, val_towers[k]['towers'].shape[0]): stable = True if val_towers[k]['labels'][ix] == 0: continue for _ in range(n_attempts): tower = val_towers[k]['towers'][ix, :, :].copy() label = val_towers[k]['labels'][ix] tower[:, 7:9] += np.random.randn(2*tower.shape[0]).reshape(tower.shape[0], 2)*noise block_tower = [Object.from_vector(tower[kx, :]) for kx in range(tower.shape[0])] if tp.tower_is_constructable(block_tower) != label: stable = False if stable: robust[k] += 1 stable_indices.append(ix) else: unstable_indices.append(ix) stable_towers[k]['towers'] = stable_towers[k]['towers'][stable_indices,...] stable_towers[k]['labels'] = stable_towers[k]['labels'][stable_indices] unstable_towers[k]['towers'] = unstable_towers[k]['towers'][unstable_indices,...] unstable_towers[k]['labels'] = unstable_towers[k]['labels'][unstable_indices] # with open('learning/data/stable_val.pkl', 'wb') as handle: # pickle.dump(stable_towers, handle) # with open('learning/data/unstable_val.pkl', 'wb') as handle: # pickle.dump(unstable_towers, handle) print(k, ':', robust[k], '/', val_towers[k]['towers'].shape[0] )
def check_stable_bases(logger): tower_keys = ['2block', '3block', '4block', '5block'] tp = TowerPlanner(stability_mode='contains') for tx in range(0, 80): print(tx) towers, _ = logger.load_acquisition_data(tx) for k in tower_keys: print(k) for tower_vec in towers[k]['towers']: tower = [Object.from_vector(tower_vec[bx, :]) for bx in range(0, tower_vec.shape[0])] if tp.tower_is_constructable(tower[:-1]): print('Stable Base') else: print('Unstable Base')
def is_robust(orig_tower, n_attempts=10, noise=0.001): """ Perturb each block in the tower by 1mm multiple times and make sure the label does not change. """ tp = TowerPlanner(stability_mode='contains') robust = True tower_vec = np.array( [orig_tower[bx].vectorize() for bx in range(0, len(orig_tower))]) label = tp.tower_is_constructable(orig_tower) for _ in range(n_attempts): tower = tower_vec.copy() tower[:, 7:9] += np.random.randn(2 * tower.shape[0]).reshape( tower.shape[0], 2) * noise block_tower = [ Object.from_vector(tower[kx, :]) for kx in range(tower.shape[0]) ] if tp.tower_is_constructable(block_tower) != label: robust = False return robust
def block_set_from_csv(args): block_set = [] with open(args.csv_file) as csv_file: csv_reader = csv.reader(csv_file, delimiter=',') for row_i, row in enumerate(csv_reader): if row_i > 0: id, dimensions, com, mass = row block = Object( 'obj_' + id, Dimensions(*np.multiply(string_to_list(dimensions), .01)), # g --> kg float(mass) * .001, # g --> kg Position( *np.multiply(string_to_list(com), .01)), # cm --> m Color(*np.random.rand(3))) print(block.dimensions, block.mass, block.com) block_set.append(block) pkl_filename = args.csv_file[:-4] + '.pkl' with open(pkl_filename, 'wb') as f: pickle.dump(block_set, f)
def save_collected_tower_images(logger): tower_keys = ['2block', '3block', '4block', '5block'] for tx in range(0, 20): towers, _ = logger.load_acquisition_data(tx) ix = np.random.randint(0, 10) print(towers['2block']['towers'].shape) start = 0 for k in tower_keys: end = start + towers[k]['towers'].shape[0] if ix < end: tower = towers[k]['towers'][ix - start, :, :] block_tower = [Object.from_vector(tower[bx, :]) for bx in range(tower.shape[0])] label = towers[k]['labels'][ix-start] break start = end w = World(block_tower) env = Environment([w], vis_sim=False, vis_frames=True) env.step(vis_frames=True) view_matrix = p.computeViewMatrixFromYawPitchRoll(distance=0.3, yaw=45, pitch=-10, roll=0, upAxisIndex=2, cameraTargetPosition=(0., 0., 0.25)) aspect = 100. / 190. nearPlane = 0.01 farPlane = 10 fov = 90 projection_matrix = p.computeProjectionMatrixFOV(fov, aspect, nearPlane, farPlane) image_data = p.getCameraImage(200, 380, shadow=1, viewMatrix=view_matrix, projectionMatrix=projection_matrix) w, h, im = image_data[:3] np_im = np.array(im, dtype=np.uint8).reshape(h, w, 4)[:, :, 0:3] #plt.imshow(np.array(np_im)) plt.imsave(logger.get_figure_path('tower_%d_%d.png' % (tx, label)), np_im) env.disconnect()
def active(strategy, vis=False): hypotheses = get_all_hypotheses() tp = TowerPlanner(stability_mode='contains') for nx in range(1, MAX_N): # Generate a random set of 5 blocks. blocks = [Object.random(f'obj_{ix}') for ix in range(NUM_BLOCKS)] # Choose a tower to build. if strategy == 'random': num_blocks = np.random.randint(2, NUM_BLOCKS + 1) tower = sample_random_tower(blocks[:num_blocks]) tower = [get_rotated_block(b) for b in tower] tower = [deepcopy(b) for b in tower] elif strategy == 'entropy': tower = find_entropy_tower(blocks, hypotheses) else: raise NotImplementedError() # Check for consistent models. valid_hypotheses = [] for h in hypotheses: true = tp.tower_is_stable(tower) pred = h(tower) if true == pred: valid_hypotheses.append(h) hypotheses = valid_hypotheses # Visualize the chosen tower and print the updated hypothesis list. if vis: TeleportAgent.simulate_tower(tower, vis=True, T=300) print(hypotheses) # Check if true model found. if len(hypotheses) == 1: break return nx
def get_labels(samples, exec_mode, agent, logger, xy_noise, save_tower=False, label_subtowers=False): """ Takes as input a dictionary from the get_subset function. Augment it with stability labels. :param samples: :param exec_mode: str in ['simple-model', 'noisy-model', 'sim', 'real'] :param agent: PandaAgent or None (if exec_mode == 'simple-model' or 'noisy-model') :return: """ labeled_samples = {'%dblock' % k: {} for k in [2, 3, 4, 5]} for k in labeled_samples: labeled_samples[k]['towers'] = [] labeled_samples[k]['block_ids'] = [] labeled_samples[k]['labels'] = [] block_placements = 0 tp = TowerPlanner(stability_mode='contains') for k in samples.keys(): n_towers, n_blocks, _ = samples[k]['towers'].shape labels = np.ones((n_towers, )) for ix in range(0, n_towers): print(f'Collecting tower {ix+1}/{n_towers} for {k} towers...') # Add noise to blocks and convert tower to Block representation. block_tower = [] for jx in range(n_blocks): vec_block = deepcopy(samples[k]['towers'][ix, jx, :]) if exec_mode == 'noisy-model': vec_block[7:9] += np.random.randn(2) * xy_noise block = Object.from_vector( vec_block) # block is already rotated if 'block_ids' in samples[k].keys(): block.name = 'obj_' + str(samples[k]['block_ids'][ix, jx]) block_tower.append(block) # Use tp to check for stability. if exec_mode == 'simple-model' or exec_mode == 'noisy-model': # iterate through each subtower until it falls (is not constructable) subtowers = [ block_tower[:k_sub] for k_sub in list(range(2, len(block_tower) + 1)) ] for k_sub, subtower in enumerate(subtowers, 2): if tp.tower_is_constructable(subtower): label = 1.0 else: label = 0.0 # add to labeled samples labeled_samples['%dblock' % k_sub]['towers'].append( samples[k]['towers'][ix, :k_sub, :]) if 'block_ids' in labeled_samples['%dblock' % k_sub]: labeled_samples['%dblock' % k_sub]['block_ids'].append( samples[k]['block_ids'][ix, :k_sub]) labeled_samples['%dblock' % k_sub]['labels'].append(label) # save tower file if save_tower: if 'block_ids' in samples[k].keys(): logger.save_towers_data( samples[k]['towers'][ix, :k_sub, :], samples[k]['block_ids'][ix, :k_sub], label) else: logger.save_towers_data( samples[k]['towers'][ix, :k_sub, :], None, label) # stop when tower falls if label == 0.0: block_placements += k_sub labels[ix] = 0.0 break else: vis = True success = False real = (exec_mode == 'real') # if planning fails, reset and try again while not success: success, label = agent.simulate_tower(block_tower, vis, real=real) print(f"Received success: {success}, label: {label}") if not success: if real: input( 'Resolve conflict causing planning to fail, then press \ enter to try again.') if isinstance(agent, PandaClientAgent): agent.restart_services() else: # in sim input( 'Should reset sim. Not yet handled. Exit and restart training.' ) labels[ix] = label if 'block_ids' in samples[k].keys(): logger.save_towers_data(samples[k]['towers'][ix, :, :], samples[k]['block_ids'][ix, :], labels[ix]) else: logger.save_towers_data(samples[k]['towers'][ix, :, :], None, labels[ix]) samples[k]['labels'] = labels if save_tower: # save block placement data logger.save_block_placement_data(block_placements) if label_subtowers: # vectorize labeled samples and return for ki, k in enumerate(labeled_samples, 2): if labeled_samples[k]['towers'] == []: labeled_samples[k]['towers'] = np.zeros((0, ki, 21)) labeled_samples[k]['block_ids'] = np.zeros((0, ki)) labeled_samples[k]['labels'] = np.zeros(0) labeled_samples[k]['towers'] = np.array( labeled_samples[k]['towers']) labeled_samples[k]['block_ids'] = np.array( labeled_samples[k]['block_ids']) labeled_samples[k]['labels'] = np.array( labeled_samples[k]['labels']) return labeled_samples else: return samples
parser.add_argument('--max-height', default=5, type=int, help='number of blocks in goal tower') args = parser.parse_args() if args.debug: import pdb pdb.set_trace() if args.block_set_fname is not '': with open(args.block_set_fname, 'rb') as f: block_set = pickle.load(f) else: block_set = [Object.random(f'obj_{ix}') for ix in range(args.n_blocks)] logger = ActiveExperimentLogger(args.exp_path) pre = 'discrete_' if args.discrete else '' ## RUN SEQUENTIAL PLANNER if args.method == 'sequential' or args.method == 'both': tower_sizes = [2, 3, 4, 5] tower_keys = [str(ts) + 'block' for ts in tower_sizes] max_height = args.max_height # Store regret for towers of each size. regrets = {k: [] for k in tower_keys} rewards = {k: [] for k in tower_keys} num_nodes = {k: [] for k in tower_keys} trees = []
def sample_sequential_data(block_set, dataset, n_samples): """ Generate n_samples random towers. Each tower has the property that its base (the tower until the last block) is stable. To ensure this, we start with all the stable towers plus base cases of single block towers. :param block_set: List of blocks that can be used in the towers. :param dataset: List of current towers that have been built. :param n_samples: Number of random towers to consider. :param max_blocks :return: Dict containining numpy arrays of the towers sorted by size. """ print('Generating data sequentially...') keys = ['2block', '3block', '4block', '5block'] # initialize a dictionary of lists to store the generated data sampled_towers = {k: {} for k in keys} for k in keys: sampled_towers[k]['towers'] = [] sampled_towers[k]['labels'] = [] if block_set is not None: sampled_towers[k]['block_ids'] = [] # Gather list of all stable towers. Towers should be of blocks that are rotated in "Block" format. stable_towers = [] # Get all single block stable towers. for block in block_set: for orn in QUATERNIONS: new_block = deepcopy(block) new_block.pose = Pose(ZERO_POS, orn) rot_block = get_rotated_block(new_block) rot_block.pose = Pose((0., 0., rot_block.dimensions.z / 2.), (0, 0, 0, 1)) stable_towers.append([rot_block]) # Get all stable towers from the dataset. for k in keys[:3]: if dataset is None: break tower_tensors = unprocess( dataset.tower_tensors[k].cpu().numpy().copy()) tower_labels = dataset.tower_labels[k] for ix, (tower_vec, tower_label) in enumerate(zip(tower_tensors, tower_labels)): if tower_label == 1: block_tower = [] for bx in range(tower_vec.shape[0]): block = Object.from_vector(tower_vec[bx, :]) if block_set is not None: block.name = 'obj_' + str( int(dataset.tower_block_ids[k][ix, bx])) block_tower.append(block) stable_towers.append(block_tower) # maintain block info my block name # TODO: this will NOT work if using random blocks block_lookup = {} for block in block_set: block_lookup[block.name] = block # Sample random towers by randomly choosing a stable base then trying to add a block. for ix in range(n_samples): # Choose a stable base. tower_ix = np.random.choice(np.arange(0, len(stable_towers))) base_tower = stable_towers[tower_ix] # Choose a block that's not already in the tower. remaining_blocks = {} for k in block_lookup: used = False for block in base_tower: if k == block.name: used = True if not used: remaining_blocks[k] = block_lookup[k] # if we switch block sets during training then the remaining_blocks list # will be longer than block_set - len(base_tower) #assert(len(remaining_blocks) == len(block_set) - len(base_tower)) new_block = deepcopy(np.random.choice(list(remaining_blocks.values()))) # Choose an orientation. orn = QUATERNIONS[np.random.choice(np.arange(0, len(QUATERNIONS)))] new_block.pose = Pose(ZERO_POS, orn) rot_block = get_rotated_block(new_block) # Sample a displacement. base_dims = np.array(base_tower[-1].dimensions)[:2] new_dims = np.array(rot_block.dimensions)[:2] max_displacements_xy = (base_dims + new_dims) / 2. noise_xy = np.clip(0.5 * np.random.randn(2), -0.95, 0.95) rel_xy = max_displacements_xy * noise_xy # Calculate the new pose. base_pos = np.array(base_tower[-1].pose.pos)[:2] pos_xy = base_pos + rel_xy pos_z = np.sum([b.dimensions.z for b in base_tower]) + rot_block.dimensions.z / 2. rot_block.pose = Pose((pos_xy[0], pos_xy[1], pos_z), (0, 0, 0, 1)) # Add block to tower. new_tower = base_tower + [rot_block] if False: w = World(new_tower) env = Environment([w], vis_sim=True, vis_frames=True) for tx in range(240): env.step(vis_frames=True) time.sleep(1 / 240.) env.disconnect() # Save that tower in the sampled_towers dict n_blocks = len(new_tower) sampled_towers['%dblock' % n_blocks]['towers'].append( vectorize(new_tower)) # save block id if block_set is not None: block_ids = [block.get_id() for block in new_tower] sampled_towers['%dblock' % n_blocks]['block_ids'].append(block_ids) # convert all the sampled towers to numpy arrays for k in keys: sampled_towers[k]['towers'] = np.array(sampled_towers[k]['towers']) if sampled_towers[k]['towers'].shape[0] == 0: sampled_towers[k]['towers'] = sampled_towers[k]['towers'].reshape( (0, int(k[0]), 21)) sampled_towers[k]['labels'] = np.zeros( (sampled_towers[k]['towers'].shape[0], )) if block_set is not None: sampled_towers[k]['block_ids'] = np.array( sampled_towers[k]['block_ids']) return sampled_towers
def sample_next_block(n_samples, bases={}, block_set=None): # if the dataset is empty, we are sampling the 2-block bases of the towers if bases == {}: return sample_unlabeled_data(n_samples, block_set=block_set, range_n_blocks=(2, 2)) # if the dataset is non-empty, then for each tower in the dataset we need to sample # a bunch of options for the next block to be placed on top else: assert len( list(bases.keys()) ) == 1, 'I want all the towers to be the same height cuz i\'m rushing' base_n_blocks_key = list(bases.keys())[0] base_n_blocks = int(base_n_blocks_key.strip('block')) n_towers = bases[base_n_blocks_key]['towers'].shape[0] n_new_blocks_per_base = np.ceil(n_samples / n_towers).astype(int) new_towers = [] new_block_ids = [] for i in range(n_towers): # pull out some information about the tower we're working on current_tower = bases[base_n_blocks_key]['towers'][i] top_block = current_tower[-1] top_block_dims = top_block[4:7] top_block_pos = top_block[7:10] top_of_tower_height = top_block_pos[2] + top_block_dims[2] / 2 # get n_new_blocks_per_base new blocks to add on top of the tower if block_set is None: new_top_blocks = [ Object.random(f'obj_{ix}') for ix in range(n_new_blocks_per_base) ] else: # we can't duplicate blocks, so only choose new top blocks from the # blocks that aren't already in the base block_ids_already_in_tower = list( bases[base_n_blocks_key]['block_ids'][i]) remaining_blocks = [ b for b in block_set if b.name.strip('obj_') not in block_ids_already_in_tower ] new_top_blocks = sample_with_replacement( remaining_blocks, k=n_new_blocks_per_base) # save the block ids for each of the new towers new_top_block_ids = [ b.name.strip('obj_') for b in new_top_blocks ] new_block_ids_local = np.zeros( [n_new_blocks_per_base, base_n_blocks + 1]) new_block_ids_local[:, :-1] = block_ids_already_in_tower new_block_ids_local[:, -1] = new_top_block_ids new_block_ids.append(new_block_ids_local) # get random rotations for each block orns = sample_with_replacement(QUATERNIONS, k=n_new_blocks_per_base) # apply the rotations to each block rotated_blocks = [] for orn, block in zip(orns, new_top_blocks): block.pose = Pose(ZERO_POS, orn) rotated_blocks.append(get_rotated_block(block)) # figure out how far each block can be moved w/ losing contact w/ the block below dims_xy = np.array([rb.dimensions for rb in rotated_blocks])[:, :2] # figure out how far each block can be moved w/ losing contact w/ the block below max_displacements_xy = (top_block_dims[:2] + dims_xy) / 2. # sample unscaled noise (clip bceause random normal can exceed -1, 1) noise_xy = np.clip(0.5 * np.random.randn(n_new_blocks_per_base, 2), -0.95, 0.95) # and scale the noise by the max allowed displacement rel_xy = max_displacements_xy * noise_xy # and get the actual pos by the difference to the top block pos pos_xy = top_block_pos[:2] + rel_xy # calculate the height of each block pos_z = np.array([ rb.dimensions.z / 2 + top_of_tower_height for rb in rotated_blocks ]) pos_xyz = pos_xyz = np.hstack([pos_xy, pos_z[:, None]]) for pos, orn, block in zip(pos_xyz, orns, new_top_blocks): block.pose = Pose(Position(*pos), orn) block.rotation = orn # create an array to hold all the new towers new_towers_local = np.zeros([ n_new_blocks_per_base, current_tower.shape[0] + 1, current_tower.shape[1] ]) # add the existing base of the tower to the array new_towers_local[:, :-1] = current_tower # and add the new top block to each tower new_towers_local[:, -1] = vectorize(new_top_blocks) new_towers.append(new_towers_local) # package the new towers into a dict of the appropriate format. include # block_ids if we are using a block set new_towers = np.concatenate(new_towers) new_samples = { 'towers': new_towers, 'labels': np.zeros(new_towers.shape[0]) } if block_set is not None: new_samples['block_ids'] = np.concatenate(new_block_ids) return {f'{base_n_blocks+1}block': new_samples}
def main(vis_tower=False): num_towers = 500 pw_stable = True # create a vector of stability labels where half are unstable and half are stable stability_labels = np.zeros(num_towers * 2, dtype=int) stability_labels[500:] = 1. dataset = {} for num_blocks in range(3, 6): vectorized_towers = [] for constructable in [False, True]: count = 0 while count < num_towers: # print the information about the tower we are about to generate stability_type = "con" if constructable else "uncon" stability_type += "/pw_stable" if pw_stable else "/pw_unstable" print( f'{count}/{num_towers}\t{stability_type} {num_blocks}-block tower' ) # generate random blocks. Use the block set if specified. otherwise # generate new blocks from scratch. Save the block names if using blocks # from the block set blocks = [ Object.random(f'obj_{ix}') for ix in range(num_blocks) ] cog_stable = np.random.choice([True, False]) tower = build_tower(blocks, constructable=constructable, pairwise_stable=pw_stable, cog_stable=cog_stable) if tower is None: continue if not is_robust(tower): print('Not Robust') continue if vis_tower: w = World(tower) env = Environment([w], vis_sim=True, vis_frames=True) print(stability_type) input() for tx in range(240): env.step(vis_frames=False) time.sleep(1 / 240.) env.disconnect() count += 1 # append the tower to the list vectorized_towers.append(vectorize(tower)) data = { 'towers': np.array(vectorized_towers), 'labels': stability_labels, } dataset[f'{num_blocks}block'] = data # save the generate data filename = 'learning/data/validation_towers_robust.pkl' print('Saving to', filename) with open(filename, 'wb') as f: pickle.dump(dataset, f)
def plan(self, blocks, ensemble, reward_fn, args, num_blocks=None, n_tower=None): #n = len(blocks) #max_height = 0 #max_tower = [] # Step (1): Build dataset of potential towers. tower_vectors, tower_block_ids = self.generate_candidate_towers( blocks, args, num_blocks, n_tower) # Step (2): Get predictions for each tower. towers = np.array(tower_vectors) block_ids = np.array(tower_block_ids) if args.planning_model == 'learned': # Since we are only planning for towers of a single size, # always use the '2block' key for simplicity. The rest currently # need at least some data for the code to work. labels = np.zeros((towers.shape[0], )) tower_dict = {} for k in self.tower_keys: tower_dict[k] = {} if k == '2block': tower_dict[k]['towers'] = towers tower_dict[k]['labels'] = labels tower_dict[k]['block_ids'] = block_ids else: tower_dict[k]['towers'] = towers[:5, ...] tower_dict[k]['labels'] = labels[:5] tower_dict[k]['block_ids'] = block_ids[:5, ...] tower_dataset = TowerDataset(tower_dict, augment=False) tower_sampler = TowerSampler(dataset=tower_dataset, batch_size=64, shuffle=False) tower_loader = DataLoader(dataset=tower_dataset, batch_sampler=tower_sampler) preds = [] if hasattr(self.logger.args, 'sampler') and self.logger.args.sampler == 'sequential': for tensor, _ in tower_loader: sub_tower_preds = [] for n_blocks in range(2, tensor.shape[1] + 1): if torch.cuda.is_available(): tensor = tensor.cuda() with torch.no_grad(): sub_tower_preds.append( ensemble.forward(tensor[:, :n_blocks, :])) sub_tower_preds = torch.stack(sub_tower_preds, dim=0) preds.append(sub_tower_preds.prod(dim=0)) else: for tensor, _ in tower_loader: if torch.cuda.is_available(): tensor = tensor.cuda() with torch.no_grad(): preds.append(ensemble.forward(tensor)) p_stables = torch.cat(preds, dim=0).mean(dim=1) elif args.planning_model == 'noisy-model': n_estimate = 10 p_stables = np.zeros(len(tower_vectors)) for ti, tower_vec in enumerate(tower_vectors): # estimate prob of constructability results = np.ones(n_estimate) all_stable = 1. for n in range(n_estimate): noisy_tower = [] for block_vec in tower_vec: noisy_block = deepcopy(block_vec) noisy_block[7:9] += np.random.randn( 2) * args.plan_xy_noise noisy_tower.append(noisy_block) block_tower = [ Object.from_vector(block) for block in noisy_tower ] if not self.tp.tower_is_constructable(block_tower): results[n] = 0. all_stable = 0. break p_stables[ti] = all_stable # np.mean(results) elif args.planning_model == 'simple-model': p_stables = np.zeros(len(tower_vectors)) for ti, tower_vec in enumerate(tower_vectors): block_tower = [ Object.from_vector(block) for block in tower_vec ] if self.tp.tower_is_constructable(block_tower): p_stables[ti] = 1. # Step (3): Find the tallest tower of a given height. max_reward, max_exp_reward, max_tower, max_stable = -100, -100, None, 0 ground_truth = -100 max_reward_block_ids = None for ix, (p, tower, tower_block_ids) in enumerate( zip(p_stables, towers, block_ids)): reward = reward_fn(tower) exp_reward = p * reward if exp_reward >= max_exp_reward: # and p > 0.5: if exp_reward > max_exp_reward or (exp_reward == max_exp_reward and p > max_stable): max_tower = tower_vectors[ix] max_reward = reward max_exp_reward = exp_reward max_stable = p max_reward_block_ids = tower_block_ids # Check ground truth stability to find maximum reward. if self.tp.tower_is_constructable([Object.from_vector(tower[ix,:]) for ix in range(tower.shape[0])]) \ and reward > ground_truth: ground_truth = reward if max_tower is None: print('None Found') max_tower = tower_vectors[0] max_reward = reward_fn(towers[0]) return max_tower, max_reward, ground_truth, max_reward_block_ids
def augment(all_data, K_skip, translate=False, mirror=False, vis_tower=False): datasets = {} for k_block in all_data.keys(): num_blocks = int(k_block.strip('block')) #print('Augmenting %d block towers...' % num_blocks) data = all_data[f'{num_blocks}block'] # load the tower data towers = data['towers'][::K_skip, :] labels = data['labels'][::K_skip] if 'block_ids' in data.keys() and data['block_ids'].shape != (0,): block_ids = data['block_ids'][::K_skip, :] N, K, D = towers.shape # calculate the number of augmented towers that will be created N_angles = 4 N_shift = 4 if translate else 1 N_mirror = 3 if mirror else 1 tower_multiplier = N_angles * N_mirror * N_shift N_towers_to_add = N * tower_multiplier # and create new arrays to store those towers augmented_towers = np.zeros((N_towers_to_add, K, D)) augmented_labels = np.zeros(N_towers_to_add) augmented_block_ids = np.zeros((N_towers_to_add, K)) for ix in range(N): #if ix % 1000 == 0: #print(ix) original_tower = [Object.from_vector(towers[ix, jx, :]) for jx in range(num_blocks)] rot_towers = [] for kx, z_rot in enumerate([0., np.pi/2., np.pi, 3*np.pi/2]): rot = R.from_rotvec([0., 0., z_rot]) # rotate each block in the tower and add the new tower to the dataset rot_poses = rot.apply(np.array([b.pose.pos for b in original_tower])) rot_tower = [] for bx in range(num_blocks): rot_block = deepcopy(original_tower[bx]) new_pose = Pose(Position(*rot_poses[bx,:].tolist()), Quaternion(*rot.as_quat().tolist())) # new_pose = Pose(Position(*rot.apply(block.pose.pos)), # Quaternion(*rot.as_quat().tolist())) rot_block.set_pose(new_pose) orig_rot = R.from_quat(rot_block.rotation) rot_block = get_rotated_block(rot_block) rot_block.rotation = (rot*orig_rot).as_quat().tolist() rot_tower.append(rot_block) augmented_towers[tower_multiplier*ix + kx, bx, :] = rot_tower[bx].vectorize() rot_towers.append(rot_tower) augmented_labels[tower_multiplier*ix + kx] = labels[ix] if 'block_ids' in data.keys(): augmented_block_ids[tower_multiplier*ix + kx, :] = block_ids[ix, :] # translate the base block in the tower and add after the rotated blocks # if translate: # dx, dy = np.random.uniform(-0.2, 0.2, 2) # shifted_tower = augmented_towers[(4+N_shift)*ix + kx, :].copy() # # Indices 7,8 correspond to the pose. # # The CoM doesn't need to be shifted because it is relative. # shifted_tower[:, 7] += dx # shifted_tower[:, 8] += dy # augmented_towers[tower_multiplier*ix + N_angles + kx, :, :] = shifted_tower # augmented_labels[tower_multiplier*ix + N_angles + kx] = labels[ix] # worlds = [World(original_tower)] + [World(tower) for tower in rot_towers] # env = Environment(worlds, vis_sim=True, vis_frames=True) # env.step(vis_frames=True) # input('Next?') # env.disconnect() # flip the mirror the COM about the COG and negate the relative position in x # and y for each block. Creates a new tower that is the mirror of the original # tower about the x and y axes if mirror: start_index = tower_multiplier*ix # pull out a vector of all the towers we just added. This will be of shape # [N_angles x num_blocks x D] rot_towers = augmented_towers[start_index: start_index+N_angles, ...] # create a vector that will mirror a tower when we multiply it # indices 1 and 7 correspond to the x coordinates of COM and relative position # indices 2 and 8 correspond to the y coordinates of COM and relative position mirror_in_x = np.ones([1,1,D]) mirror_in_y = np.ones([1,1,D]) mirror_in_x[..., [1,7]] *= -1 mirror_in_y[..., [2,8]] *= -1 # add the mirrored towers to the augmented towers dataset augmented_towers[start_index+N_angles*1 : start_index+N_angles*2, ...] = rot_towers * mirror_in_x augmented_towers[start_index+N_angles*2 : start_index+N_angles*3, ...] = rot_towers * mirror_in_y augmented_labels[start_index:start_index+N_angles*N_mirror] = labels[ix] if 'block_ids' in data.keys(): augmented_block_ids[start_index:start_index+N_angles*N_mirror, :] = block_ids[ix, :] if vis_tower: for i in range(tower_multiplier): print('VISUALIZE', ix*tower_multiplier+i, N_towers_to_add) new_tower = [Object.from_vector(augmented_towers[ix*tower_multiplier+i, jx, :]) for jx in range(num_blocks)] w = World(new_tower) env = Environment([w], vis_sim=True, vis_frames=True) for tx in range(60): env.step(vis_frames=False) time.sleep(1/240.) env.disconnect() datasets[f'{num_blocks}block'] = {'towers': augmented_towers, 'labels': augmented_labels} if 'block_ids' in data.keys(): datasets[f'{num_blocks}block']['block_ids'] = augmented_block_ids return datasets
""" Test the specified action by creating a test world and performing a random action. We also test here the the same action is applied across all particle worlds. """ if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--action-type', choices=['push', 'place'], required=True) parser.add_argument('--agent-type', choices=['teleport', 'panda'], required=True) args = parser.parse_args() # Create the block. true_com = Position(x=0., y=0., z=0.) block = Object(name='block', dimensions=Dimensions(x=0.05, y=0.1, z=0.05), mass=1, com=true_com, color=Color(r=1., g=0., b=0.)) # Test the action for all rotations. for r in rotation_group(): # Create the action. if args.action_type == 'push': action = PushAction(direction=None, timesteps=50, rot=r, block=block) elif args.action_type == 'place': action = PlaceAction(pos=None, rot=r) else:
def inspect_2block_towers(logger): """ In the full dataset, show the distribution of features. """ tower_keys = ['2block', '3block', '4block', '5block'] # dataset = logger.load_dataset(logger.args.max_acquisitions - 1) # print(dataset.tower_tensors['2block'].shape) # plt.hist(dataset.tower_tensors['2block'][:,1,8], bins=10) # plt.show() #ensemble = logger.get_ensemble(10) ensemble = logger.get_ensemble(logger.args.max_acquisitions - 1) unlabeled = sample_unlabeled_data(10000) preds = get_predictions(unlabeled, ensemble) bald_scores = bald(preds).numpy() print('Best BALD') ixs = np.argsort(bald_scores)[::-1][:10] print(bald_scores[ixs]) input() tp = TowerPlanner(stability_mode='contains') preds2 = preds[:unlabeled['2block']['towers'].shape[0], :] bald_scores2 = bald_scores[:unlabeled['2block']['towers'].shape[0]] acquire_indices = np.argsort(bald_scores2)[::-1][:50] # for ix in range(preds2.shape[0]): # print(np.around(preds2[ix,:].numpy(), 2), np.around(bald_scores2[ix], 3)) print('-----') for ix in acquire_indices: d = decision_distance(unlabeled['2block']['towers'][ix,:,:]) tower = unlabeled['2block']['towers'][ix,:,:] l = tp.tower_is_constructable([Object.from_vector(tower[bx, :]) for bx in range(tower.shape[0])]) print(np.around(preds2[ix,:].numpy(), 4), np.around(bald_scores2[ix], 3), d, l) for ix in acquire_indices: unlabeled['2block']['towers'][ix,1,7:8] += 0.0 new_preds = get_predictions(unlabeled, ensemble) print('-----') for ix in acquire_indices: d = decision_distance(unlabeled['2block']['towers'][ix,:,:]) print(np.around(new_preds[ix,:].numpy(), 2)) plt.hist(unlabeled['2block']['towers'][acquire_indices,1,0]) plt.show() print('-----') start = 0 for k in tower_keys: end = start + unlabeled[k]['towers'].shape[0] p, b = preds[start:end, :], bald_scores[start:end] informative = b[b > 0.3] print(p.shape, informative.shape) accs = {k: [] for k in tower_keys} with open('learning/data/random_blocks_(x2000)_5blocks_uniform_mass.pkl', 'rb') as handle: val_towers = pickle.load(handle) preds = get_predictions(val_towers, ensemble).mean(1).numpy() dists = [] for ix in range(0, val_towers['2block']['towers'].shape[0]): d = decision_distance(val_towers['2block']['towers'][ix,:,:]) dists.append(d) print(len(dists)) plt.hist(dists, bins=100) plt.show() start = 0 for k in tower_keys: end = start + val_towers[k]['towers'].shape[0] acc = ((preds[start:end]>0.5) == val_towers[k]['labels']).mean() accs[k].append(acc) start = end print(accs)
def evaluate_planner(logger, blocks, reward_fn, fname, args, save_imgs=False, img_prefix=''): tower_keys = [str(ts)+'block' for ts in args.tower_sizes] tp = TowerPlanner(stability_mode='contains') ep = EnsemblePlanner(logger) # Store regret for towers of each size. regrets = {k: [] for k in tower_keys} rewards = {k: [] for k in tower_keys} if args.max_acquisitions is not None: eval_range = range(0, args.max_acquisitions, 10) elif args.acquisition_step is not None: eval_range = [args.acquisition_step] for tx in eval_range: print('Acquisition step:', tx) ensemble = logger.get_ensemble(tx) if torch.cuda.is_available(): ensemble = ensemble.cuda() for k, size in zip(tower_keys, args.tower_sizes): print('Tower size', k) num_failures, num_pw_failures = 0, 0 curr_regrets = [] curr_rewards = [] for t in range(0, args.n_towers): print('Tower number', t) if blocks is not None: plan_blocks = np.random.choice(blocks, size, replace=False) plan_blocks = copy.deepcopy(plan_blocks) else: plan_blocks = [Object.random() for _ in range(size)] tower, reward, max_reward, tower_block_ids = ep.plan(plan_blocks, ensemble, reward_fn, args, num_blocks=size, n_tower=t) block_tower = [] for vec_block, block_id in zip(tower, tower_block_ids): block = Object.from_vector(vec_block) block.name = 'obj_%d' % block_id block_tower.append(block) # save tower info to /evaluation_towers if args.exec_mode is None: if args.planning_model == 'noisy-model': logger.save_evaluation_tower(block_tower, reward, max_reward, tx, args.planning_model, args.problem, noise=args.plan_xy_noise) else: logger.save_evaluation_tower(block_tower, reward, max_reward, tx, args.planning_model, args.problem) # perturb tower if evaluating with noisy model if args.exec_mode == 'noisy-model': block_tower = [] for vec_block, block_id in zip(tower, tower_block_ids): vec_block[7:9] += np.random.randn(2)*args.exec_xy_noise block = Object.from_vector(vec_block) block.name = 'obj_%d' % block_id block_tower.append(block) # build found tower if args.exec_mode == 'noisy-model' or args.exec_mode == 'simple-model': if not tp.tower_is_constructable(block_tower): reward = 0 num_failures += 1 if tp.tower_is_pairwise_stable(block_tower): num_pw_failures += 1 else: pairs = [] dists = [] for i in range(len(tower) - 1): # check that each pair of blocks is stably individually top = block_tower[i+1] bottom = block_tower[i] if not tp.pair_is_stable(bottom, top): pairs.append(False) else: pairs.append(True) top_rel_pos = np.array(top.pose.pos) - np.array(bottom.pose.pos) top_rel_com = top_rel_pos + top.com dists.append((np.abs(top_rel_com)*2 - bottom.dimensions)[:2]) #print('Pairs:', pairs, dists) #print('PW Stable:', tp.tower_is_pairwise_stable(block_tower)) #print('Global Stable:', tp.tower_is_stable(block_tower)) if False and reward != 0: print(reward, max_reward) w = World(block_tower) env = Environment([w], vis_sim=True, vis_frames=True) input() for tx in range(240): env.step(vis_frames=True) time.sleep(1/240.) env.disconnect() # Note that in general max reward may not be the best possible due to sampling. #ground_truth = np.sum([np.max(b.dimensions) for b in blocks]) #print(max_reward, ground_truth) # Compare heights and calculate regret. regret = (max_reward - reward)/max_reward #print(reward, max_reward) #print(regret) curr_regrets.append(regret) curr_rewards.append(reward) if args.exec_mode == 'noisy-model' or args.exec_mode == 'simple-model': regrets[k].append(curr_regrets) rewards[k].append(curr_rewards) if args.max_acquisitions is not None: if args.exec_mode == 'noisy-model' or args.exec_mode == 'simple-model': with open(logger.get_figure_path(fname+'_regrets.pkl'), 'wb') as handle: pickle.dump(regrets, handle) with open(logger.get_figure_path(fname+'_rewards.pkl'), 'wb') as handle: pickle.dump(rewards, handle) # if just ran for one acquisition step, output final regret and reward if args.acquisition_step is not None: if args.exec_mode == 'noisy-model' or args.exec_mode == 'simple-model': final_median_regret = np.median(regrets[k][0]) final_upper75_regret = np.quantile(regrets[k][0], 0.75) final_lower25_regret = np.quantile(regrets[k][0][0], 0.25) final_median_reward = np.median(rewards[k][0]) final_upper75_reward = np.quantile(rewards[k][0], 0.75) final_lower25_reward = np.quantile(rewards[k][0], 0.25) final_average_regret = np.average(regrets[k][0]) final_std_regret = np.std(regrets[k][0]) final_average_reward = np.average(rewards[k][0]) final_std_reward = np.std(rewards[k][0]) print('Final Median Regret: (%f) %f (%f)' % (final_lower25_regret, final_median_regret, final_upper75_regret)) print('Final Median Reward: (%f) %f (%f)' % (final_lower25_reward, final_median_reward, final_upper75_reward)) print('Final Average Regret: %f +/- %f' % (final_average_regret, final_std_regret)) print('Final Average Reward: %f +/- %f' % (final_average_reward, final_std_reward))
def setup_panda_world(robot, blocks, xy_poses=None, use_platform=True): # Adjust robot position such that measurements match real robot reference frame robot_pose = numpy.eye(4) robot.set_transform(robot_pose) pddl_blocks = [] full_urdf_folder = 'pb_robot/tmp_urdfs' if not os.path.exists(full_urdf_folder): os.makedirs(full_urdf_folder) for block in blocks: pb_block_fname = create_pb_robot_urdf(block, block.name + '.urdf') pddl_block = pb_robot.body.createBody(pb_block_fname) pddl_blocks.append(pddl_block) table_x_offset = 0.2 floor_path = 'tamp/models/panda_table.urdf' shutil.copyfile(floor_path, 'pb_robot/models/panda_table.urdf') table_file = os.path.join('models', 'panda_table.urdf') pddl_table = pb_robot.body.createBody(table_file) pddl_table.set_point([table_x_offset, 0, 0]) frame_path = 'tamp/models/panda_frame.urdf' shutil.copyfile(frame_path, 'pb_robot/models/panda_frame.urdf') frame_file = os.path.join('models', 'panda_frame.urdf') pddl_frame = pb_robot.body.createBody(frame_file) pddl_frame.set_point( [table_x_offset + 0.762 - 0.0127, 0 + 0.6096 - 0.0127, 0]) wall_path = 'tamp/models/walls.urdf' shutil.copyfile(wall_path, 'pb_robot/models/walls.urdf') wall_file = os.path.join('models', 'walls.urdf') pddl_wall = pb_robot.body.createBody(wall_file) pddl_wall.set_point([table_x_offset + 0.762 + 0.005, 0, 0]) # Set the initial positions randomly on table. if xy_poses is None: storage_poses = [ (-0.4, -0.45), (-0.4, -0.25), # Left Corner (-0.25, -0.5), (-0.4, 0.25), # Back Center (-0.4, 0.45), (-0.25, 0.5), # Right Corner (-0., -0.5), (0., -0.35), # Left Side (-0., 0.5), (0., 0.35) ] # Right Side print('Placing blocks in storage locations...') for ix, block in enumerate(pddl_blocks): x, y = storage_poses[ix] dimensions = numpy.array(block.get_dimensions()).reshape((3, 1)) if ix < 6 and (ix not in [ 2, 5 ]): # Back storage should have long side along y-axis. for rot in all_rotations(): rot_dims = numpy.abs(rot.as_matrix() @ dimensions)[:, 0] if rot_dims[1] >= rot_dims[0] and rot_dims[1] >= rot_dims[ 2]: block.set_base_link_pose(((x, y, 0.), rot.as_quat())) break else: # Side storage should have long side along x-axis. for rot in all_rotations(): rot_dims = numpy.abs(rot.as_matrix() @ dimensions)[:, 0] if rot_dims[0] >= rot_dims[1] and rot_dims[0] >= rot_dims[ 2]: block.set_base_link_pose(((x, y, 0.), rot.as_quat())) break z = pb_robot.placements.stable_z(block, pddl_table) block.set_base_link_point([x, y, z]) else: for i, (block, xy_pose) in enumerate(zip(pddl_blocks, xy_poses)): full_pose = Pose( Position(xy_pose.pos.x, xy_pose.pos.y, xy_pose.pos.z), xy_pose.orn) block.set_base_link_pose(full_pose) # Setup platform. if use_platform: platform, leg = Object.platform() pb_platform_fname = create_pb_robot_urdf(platform, 'platform.urdf') pb_leg_fname = create_pb_robot_urdf(leg, 'leg.urdf') pddl_platform = pb_robot.body.createBody(pb_platform_fname) pddl_leg = pb_robot.body.createBody(pb_leg_fname) rotation = pb_robot.geometry.Euler(yaw=numpy.pi / 2) pddl_platform.set_base_link_pose( pb_robot.geometry.multiply(pb_robot.geometry.Pose(euler=rotation), pddl_platform.get_base_link_pose())) pddl_leg.set_base_link_pose( pb_robot.geometry.multiply(pb_robot.geometry.Pose(euler=rotation), pddl_leg.get_base_link_pose())) table_z = pddl_table.get_base_link_pose()[0][2] pddl_leg.set_base_link_point( [0.7, -0.4, table_z + leg.dimensions.z / 2]) pddl_platform.set_base_link_point([ 0.7, -0.4, table_z + leg.dimensions.z + platform.dimensions.z / 2. ]) else: pddl_platform = None pddl_leg = None return pddl_blocks, pddl_platform, pddl_leg, pddl_table, pddl_frame, pddl_wall