def cmd_create_launch(data_central, argv): '''Creates ROS launch files for all combinations of agents and robots.''' parser = OptionParser(prog='create-launch', usage=cmd_create_launch.short_usage) parser.disable_interspersed_args() parser.add_option("-a", "--agent", dest='id_agent', help="Agent ID") parser.add_option("-r", "--robot", dest='id_robot', help="Robot ID") parser.add_option('-p', '--package', dest='package', default='bootstrapping_adapter', help='Which ROS package to put the launch file in [%default].') # TODO: bag (options, args) = parser.parse_args(argv) check_no_spurious(args) bo_config = data_central.get_bo_config() if options.id_agent is None: id_agents = bo_config.agents.keys() else: id_agents = [options.id_agent] # TODO: expand if options.id_robot is None: id_robots = bo_config.robots.keys() else: id_robots = [options.id_robot] # TODO: expand for id_agent in id_agents: check_contained(id_agent, bo_config.agents) for id_robot in id_robots: check_contained(id_robot, bo_config.robots) from roslib import packages #@UnresolvedImport out = packages.get_pkg_dir(options.package) outdir = os.path.join(out, 'launch', 'boot_olympics') root = os.path.realpath(data_central.root) for id_robot, id_agent in itertools.product(id_robots, id_agents): robot_ros_node = wrap_python_robot(bo_config.robots[id_robot], root) agent_ros_node = wrap_python_agent(bo_config.agents[id_agent], root) xml = create_launch_xml(agent_ros_node=agent_ros_node, robot_ros_node=robot_ros_node, agent_node_id='my_agent', robot_node_id='my_robot', namespace='boot_olympics', bag=None, output=None) basename = '%s-%s' % (id_robot, id_agent) filename = os.path.join(outdir, '%s.launch' % basename) make_sure_dir_exists(filename) with open(filename, 'w') as f: f.write('<!-- Created by %s on %s -->\n' % ('boot_olympics', isodate())) f.write(xml) logger.info('Writing on %r.' % filename)
def __init__(self, id_agent, id_robot): self.id_agent = id_agent self.id_robot = id_robot self.id_episodes = set() self.num_observations = 0 self.agent_state = None self.id_state = isodate()
def get_sensel_pgftable(V, what, desc='No description given'): s = StringIO() s.write('# Created by get_sensel_pgftable()\n') s.write('# %s \n' % isodate()) s.write('# sensel: counter of the sensel from 0 to n-1.\n') s.write('# %s: %s \n' % (what, desc)) s.write('# senseln: counter of the sensel from 0 to 1.\n') s.write('sensel senseln %s\n' % what) index = np.linspace(0, 1, V.size) for i, x in enumerate(V): s.write('%d %g %g\n' % (i, x, index[i])) return s.getvalue()
def save_report(data_central, report, filename, resources_dir=None, save_pickle=False, save_hdf=True, check_hdf_written_correctly=True): """ filename.html """ report.text('report_date', isodate()) # TODO: add other stuff ds = data_central.get_dir_structure() report.to_html(filename, resources_dir=resources_dir) ds.file_is_done(filename) if save_pickle: pickle_name = os.path.splitext(filename)[0] + '.pickle' with warn_long_time_writing(pickle_name): safe_pickle_dump(report, pickle_name, protocol=2) ds.file_is_done(pickle_name) if save_hdf: hdf_name = os.path.splitext(filename)[0] + '.rr1.h5' with warn_long_time_writing(hdf_name): report.to_hdf(hdf_name) if check_hdf_written_correctly: with warn_long_time_reading(hdf_name, logger=logger): r2 = report_from_hdf(hdf_name) if report != r2: logger.error("Dont match") logger.error(report.format_tree()) logger.error(r2.format_tree()) raise Exception() ds.file_is_done(hdf_name)