def define_jobs_context(self, context): boot_root = self.get_boot_root() data_central = self.get_data_central() GlobalConfig.global_load_dir('default') recipe_agentlearn_by_parallel_concurrent(context, data_central, Exp23.explogs_learn, n=8, only_agents=['exp23_diffeof', 'exp23_diffeo_fast']) recipe_agentlearn_by_parallel(context, data_central, Exp23.explogs_learn, only_agents=['stats2']) from diffeo2dds_learn.programs.devel.save_video import video_visualize_diffeo_stream1_robot for c, id_robot in iterate_context_names(context, Exp23.robots): out = os.path.join(context.get_output_dir(), 'videos', '%s-diffeo_stream1.mp4' % id_robot) c.comp_config(video_visualize_diffeo_stream1_robot, id_robot=id_robot, boot_root=boot_root, out=out) for id_robot in Exp23.robots: recipe_episodeready_by_convert2(context, boot_root, id_robot) jobs_publish_learning_agents_robots(context, boot_root, Exp23.agents, Exp23.robots)
def get_simple_scenarios(context): pomdp_scenarios = [ dict(id_pomdp='idec-test01', expected_ntrajectories=2, expected_nbits=0, expected_nstates=1), dict(id_pomdp='idec-test02', expected_ntrajectories=2, expected_nbits=0, expected_nstates=1), dict(id_pomdp='idec-test03', expected_ntrajectories=4, expected_nbits=1, expected_nstates=2), dict(id_pomdp='idec-test04', expected_ntrajectories=3, expected_nbits=0, expected_nstates=1), ] from pkg_resources import resource_filename # @UnresolvedImport dirs = [ resource_filename("tmdp", "configs"), resource_filename("gridworld", "configs"), ] GlobalConfig.global_load_dirs(dirs) ts = [] for t in pomdp_scenarios: t['pomdp'] = context.comp_config(instance_mdp, t['id_pomdp']) ts.append(t) return ts
def define_jobs_context(self, context): GlobalConfig.global_load_dir('${B11_SRC}/bvapps/bdse1') rm = context.get_report_manager() rm.set_html_resources_prefix('jbds-est') data_central = self.get_data_central() jobs_learn_real(context, data_central, real_robots=JBDSEstimation.real_robots, explogs_learn=JBDSEstimation.explogs_learn) jobs_learn_simulations(context, data_central, simulated_robots=JBDSEstimation.simulated_robots, num_sim_episodes=1000, max_episode_len=30, episodes_per_tranche=50, explorer='expsw1') r = job_report_learn(context, JBDSEstimation.combs_estimation) context.add_report(r, 'learn_global') jobs_tex(context, JBDSEstimation.combs_estimation) jobs_servo(context, data_central, combinations=JBDSEstimation.combs_servo, explogs_test=JBDSEstimation.grids) jobs_report_summary_servo_xy(context, combinations=JBDSEstimation.combs_servo, explogs_test=JBDSEstimation.grids_xy) r = report_summary_servo_theta(context, combinations=JBDSEstimation.combs_servo_th, explogs_test=JBDSEstimation.grids_th) context.add_report(r, 'servo_theta_global')
def define_jobs_context(self, context): from pkg_resources import resource_filename # @UnresolvedImport config_dir = resource_filename("yc1304.uzhturtle", "config") GlobalConfig.global_load_dir(config_dir) GlobalConfig.global_load_dir('${DATASET_UZHTURTLE}') rm = context.get_report_manager() rm.set_html_resources_prefix('uzh-turtle-stats') data_central = self.get_data_central() logs = list(self.get_explogs_by_tag('uzhturtle')) agents = ['stats2', 'bdse_e1_ss'] robots = ['uzhturtle_un1_cf1', 'uzhturtle_un1_cf1_third'] for c, id_explog in iterate_context_explogs(context, logs): explog = get_conftools_explogs().instance(id_explog) print id_explog, explog jobs_learn_parallel(context, data_central=data_central, explogs_learn=logs, agents=agents, robots=robots, episodes_per_tranche=2)
def get_output_signals(self): dirnames = self.config.config_dirs.split(':') for d in dirnames: if d: GlobalConfig.global_load_dir(d) rawlog = self.config.rawlog id_rawlog, self.rawlog = get_conftools_rawlogs().instance_smarter(rawlog) all_signals = self.rawlog.get_signals() if self.config.signals is None: self.signals = list(all_signals.keys()) else: signal_list = filter(lambda x: x, self.config.signals.split(',')) if not signal_list: msg = 'Bad format: %r.' % self.config.signals raise BadConfig(msg, self, 'signals') self.signals = [] for s in signal_list: if not s in all_signals: msg = ('Signal %r not present in log %r (available: %r)' % (s, id_rawlog, all_signals.keys())) raise BadConfig(msg, self, 'signals') self.signals.append(s) return self.signals
def main(self): rospy.init_node('servo_demo', disable_signals=True) self.info('Started.') contracts.disable_all() boot_root = rospy.get_param('~boot_root') boot_root = expand_environment(boot_root) config_dir = rospy.get_param('~config_dir') id_robot_learned = rospy.get_param('~id_robot_learn') self.info('loading %r' % config_dir) GlobalConfig.global_load_dir(config_dir) id_agent = rospy.get_param('~id_agent') self.id_robot = rospy.get_param('~id_robot') self.sleep = rospy.get_param('~sleep', 0.005) self.info('sleep: %s' % self.sleep) self.error_threshold = float(rospy.get_param('~error_threshold')) raise_if_no_state = rospy.get_param('~raise_if_no_state', True) data_central = DataCentral(boot_root) ag_st = load_agent_state(data_central, id_agent, id_robot_learned, reset_state=False, raise_if_no_state=raise_if_no_state) self.agent, state = ag_st self.info('Loaded state: %s' % state) self.servo_agent = self.agent.get_servo() bo_config = get_boot_config() self.robot = bo_config.robots.instance(self.id_robot) self.boot_spec = self.robot.get_spec() self.publish_info_init() self.y = None self.y_goal = None self.started_now = False self.stopped_now = False self.e0 = 1 self.e = 1 self.last_boot_data = None self.state = STATE_WAIT self.info('Defining services') rospy.Service('set_goal', Empty, self.srv_set_goal) rospy.Service('start_servo', Empty, self.srv_start_servo) rospy.Service('stop_servo', Empty, self.srv_stop_servo) self.info('Finished initialization') self.count = 0 self.go()
def jobs_comptests(context): # configuration from conf_tools import GlobalConfig GlobalConfig.global_load_dir("example_package.configs") # mcdp_lang_tests from . import unittests # instantiation from comptests import jobs_registrar jobs_registrar(context, get_example_package_config())
def define_jobs_context(self, context): data_central = self.get_data_central() GlobalConfig.global_load_dir('default') agents = Exp33.agents robots = Exp33.robots explogs_learn = Exp33.explogs_learn explogs_test = Exp33.explogs_test jobs_learnp_and_servo(context, data_central=data_central, explogs_learn=explogs_learn, explogs_test=explogs_test, agents=agents, robots=robots)
def define_jobs_context(self, context): boot_root = self.get_boot_root() data_central = self.get_data_central() GlobalConfig.global_load_dir('default') recipe_agentlearn_by_parallel_concurrent_reps(context, data_central, Exp32.explogs_learn, n=8, max_reps=10) for id_robot in Exp32.robots: recipe_episodeready_by_convert2(context, boot_root, id_robot) jobs_publish_learning_agents_robots(context, boot_root, Exp32.agents, Exp32.robots)
def define_jobs_context(self, context): boot_root = self.get_boot_root() data_central = self.get_data_central() GlobalConfig.global_load_dir("default") recipe_agentlearn_by_parallel_concurrent_reps( context, data_central, Exp27.explogs_learn, n=8, max_reps=20, only_agents=["exp23_diffeof"] ) recipe_agentlearn_by_parallel(context, data_central, Exp27.explogs_learn, only_agents=["stats2", "cmdstats"]) for id_robot in Exp27.robots: recipe_episodeready_by_convert2(context, boot_root, id_robot) jobs_publish_learning_agents_robots(context, boot_root, Exp27.agents, Exp27.robots)
def comp_config_dynamic(self, f, *args, **kwargs) -> Promise: """ Defines jobs that will take a "context" argument to define more jobs. """ config_state = GlobalConfig.get_state() # so that compmake can use a good name if not 'command_name' in kwargs: kwargs['command_name'] = f.__name__ return self.comp_dynamic(wrap_state_dynamic, config_state, f, *args, **kwargs)
def comp_config(self, f, *args, **kwargs): """ Like comp, but we also automatically save the GlobalConfig state. """ config_state = GlobalConfig.get_state() # so that compmake can use a good name kwargs['command_name'] = f.__name__ return self.comp(wrap_state, config_state, f, *args, **kwargs)
def comp_config_dynamic(self, f, *args, **kwargs): """ Defines jobs that will take a "context" argument to define more jobs. """ config_state = GlobalConfig.get_state() # so that compmake can use a good name if not "command_name" in kwargs: kwargs["command_name"] = f.__name__ return self.comp_dynamic(wrap_state_dynamic, config_state, f, *args, **kwargs)
def define_jobs_context(self, context): CompTests.global_output_dir = self.get_options().output self.info('Setting output dir to %s' % CompTests.global_output_dir) CompTests.output_dir_for_current_test = None GlobalConfig.global_load_dir('default') modules = self.get_modules() if self.options.circle: env = os.environ v_index, v_total = 'CIRCLE_NODE_INDEX', 'CIRCLE_NODE_TOTAL' if v_index in env and v_total in env: index = int(os.environ[v_index]) total = int(os.environ[v_total]) msg = 'Detected I am worker #%s of %d in CircleCI.' % (index, total) self.info(msg) mine = [] for i in range(len(modules)): if i % total == index: mine.append(modules[i]) msg = 'I am only doing these modules: %s, instead of %s' % (mine, modules) self.info(msg) modules = mine if not modules: raise Exception('No modules found.') # XXX: what's the nicer way? options = self.get_options() do_coverage = options.coverage if do_coverage: import coverage coverage.process_startup() if not options.nonose: self.instance_nosetests_jobs(context, modules, do_coverage) #self.instance_nosesingle_jobs(context, modules) if not options.nocomp: self.instance_comptests_jobs(context, modules, create_reports=options.reports, do_coverage=do_coverage)
def define_jobs_context(self, context): GlobalConfig.global_load_dir('default') modules = self.get_modules() if not modules: raise Exception('No modules found.') # XXX: what's the nicer way? options = self.get_options() if not options.nonose: do_coverage = options.coverage self.instance_nosetests_jobs(context, modules, do_coverage) #self.instance_nosesingle_jobs(context, modules) if not options.nocomp: self.instance_comptests_jobs(context, modules, create_reports=options.reports)
def define_jobs_context(self, context): boot_root = self.get_boot_root() data_central = self.get_data_central() GlobalConfig.global_load_dir('default') recipe_agentlearn_by_parallel(context, data_central, Exp29.explogs_learn) for id_robot in Exp29.robots: recipe_episodeready_by_convert2(context, boot_root, id_robot) combinations = iterate_context_names_pair(context, Exp29.nmaps, Exp29.robots) for c, id_episode, id_robot in combinations: jobs_navigation_map(c, outdir=context.get_output_dir(), data_central=data_central, id_robot=id_robot, id_episode=id_episode)
def comp_config(self, f, *args, **kwargs) -> Promise: """ Like comp, but we also automatically save the GlobalConfig state. """ config_state = GlobalConfig.get_state() # so that compmake can use a good name if not 'command_name' in kwargs: kwargs['command_name'] = f.__name__ return self.comp(wrap_state, config_state, f, *args, **kwargs)
def define_jobs_context(self, context): from pkg_resources import resource_filename # @UnresolvedImport config_dir = resource_filename("yc1304.uzhturtle", "config") GlobalConfig.global_load_dir(config_dir) GlobalConfig.global_load_dir("${DATASET_UZHTURTLE}") rm = context.get_report_manager() rm.set_html_resources_prefix("uzh-turtle-plots") data_central = self.get_data_central() id_robot = "uzhturtle_un1_cf1_third" recipe_episodeready_by_convert2(context, boot_root=self.get_boot_root()) logs = list(self.get_explogs_by_tag("uzhturtle")) for c, id_explog in iterate_context_explogs(context, logs): jobs_turtleplot(c, data_central, id_robot, id_episode=id_explog)
def define_jobs_context(self, context): boot_root = self.get_boot_root() data_central = self.get_data_central() GlobalConfig.global_load_dir('default') recipe_agentlearn_by_parallel_concurrent(context, data_central, \ Exp24.explogs_learn, n=8, only_agents=['exp23_diffeof', 'exp23_diffeo_fast']) recipe_agentlearn_by_parallel(context, data_central, Exp24.explogs_learn, only_agents=['stats2']) for id_robot in Exp24.robots: recipe_episodeready_by_convert2(context, boot_root, id_robot) jobs_publish_learning_agents_robots(context, boot_root, Exp24.agents, Exp24.robots)
def define_jobs_context(self, context): boot_root = self.get_boot_root() data_central = self.get_data_central() # for vehicles GlobalConfig.global_load_dir('${B11_SRC}/bvapps/bdse1') recipe_episodeready_by_convert2(context, boot_root) recipe_episodeready_by_simulation_tranches(context, data_central, explorer=Exp42.explorer, episodes=Exp42.simulated_episodes, max_episode_len=30, episodes_per_tranche=50) for c in Exp42.combinations: recipe_agentlearn_by_parallel(context, data_central, c['episodes'], only_robots=[c['id_robot']], intermediate_reports=False, episodes_per_tranche=50) jobs_publish_learning_agents_robots(context, boot_root, Exp42.agents, Exp42.robots)
def main(): from vehicles_cairo import (cairo_ref_frame, cairo_rototranslate, vehicles_cairo_display_png) # Instance robot object id_robot = 'r_cam_A' filename = 'test.png' resolution = 0.5 config = get_boot_config() cd1 = '/Users/andrea/scm/boot11_env/src/vehicles/src/vehicles/configs' cd2 = '/Users/andrea/scm/boot11_env/src/bvapps/bo_app1/config' GlobalConfig.global_load_dir('default') GlobalConfig.global_load_dir(cd1) GlobalConfig.global_load_dir(cd2) robot = config.robots.instance(id_robot) # @UndefinedVariable robot.new_episode() locations = get_grid(robot=robot, debug=True, world=robot.world, vehicle=robot.vehicle, resolution=resolution) poses = [f['pose'] for f in locations] # poses = elastic(poses, alpha=0.1, num_iterations=20) print('Converting to yaml...') robot.vehicle.set_pose(poses[0]) state = robot.to_yaml() pprint(state) print(yaml_dump(state)) def extra_draw_world(cr): for pose in poses: with cairo_rototranslate(cr, SE2_from_SE3(pose)): cairo_ref_frame(cr, l=0.5, x_color=[0, 0, 0], y_color=[0, 0, 0]) plotting_params = {} plotting_params['extra_draw_world'] = extra_draw_world print('Writing to: %r' % filename) vehicles_cairo_display_png(filename, width=800, height=800, sim_state=state, **plotting_params) print('... done')
def initial_setup(self): config_dirs = self.get_config_dirs() GlobalConfig.global_load_dirs(config_dirs)
def define_jobs_context(self, context): # Load defaults for everybody GlobalConfig.global_load_dir('default') # Load our specific config from pkg_resources import resource_filename # @UnresolvedImport config_dir = resource_filename("dptr1", "configs") GlobalConfig.global_load_dir(config_dir) # So we can load objects we create GlobalConfig.global_load_dir(context.get_output_dir()) # get_diffeo2ddslearn_config().load('default') # First, learn the diffeomorphisms stream = "orbit-pt256-80" estimator = 'test_ddsest_unc_refine2' # "n35s" max_displ = 0.35 learned = context.subtask(DDSLLearn, estimator=estimator, stream=stream, max_displ=max_displ) rm = context.get_resource_manager() rm.set_resource(learned, 'discdds', id_discdds='test_ddsest_unc_refine2_orbit-pt256-80') # dp plearn -s $stream -l n35s -c "clean *summarize*; parmake " # dp plearn -s $stream -l n35o -c "clean *summarize*; parmake " distances = "L1,L2,L1w,L2w,cL2,cL1,cD10,D10,cD20,D20,cD30,D30,N10,N20,N30,cN10,cN20,cN30" # dp dist-stats -o $out/dp-dist-stats -d $distances -s $stream -c "clean report*; parmake" context.subtask(DPDistStats, streams=stream, distances=distances) dds_orbit = "%s-%s" % (estimator, stream) # dp pred-stats -o $out/dp-pred-stats -d $distances -s $stream --dds $stream-n35s -c "clean report*; parmake" # dp pred-stats -o $out/dp-pred-stats -d $distances -s $stream --dds $stream-n35o -c "clean report*; parmake" context.subtask(DPPredStats, distances=distances, streams=stream, dds=dds_orbit) # dp1 show-discdds-geo -o $out/dp-show-discdds-geo -t 0.04 dcl4r80 dds_cl = 'sym-dcl4r-80' context.subtask(DPShowGeo, discdds=dds_cl, tolerance=0.4) # dp1 show-discdds-geo -o $out/dp-show-discdds-geo h1orbit-pt256-80-n35s context.subtask(DPShowGeo, discdds=dds_orbit, tolerance=0.3) # dp1 show-discdds-fill -o $out/dp-show-discdds-fill -v 0.5 h1orbit-pt256-80-n35s context.subtask(DPShowFill, min_visibility=0.5, discdds=dds_orbit) # dp1 show-discdds-fill -o $out/dp-show-discdds-fill -v 0.5 dpx1 context.subtask(DPShowFill, min_visibility=0.5, discdds='sym-dpx1-80') # dp1 show-discdds-fill -o $out/dp-show-discdds-fill --collapse_threshold 0.5 -v 0.5 dtx1 context.subtask(DPShowFill, min_visibility=0.5, discdds='sym-dtx1-80', collapse_threshold=0.5) # dp1 show-discdds-fill -o $out/dp-show-discdds-fill -v 0.5 dcl4r80 context.subtask(DPShowFill, min_visibility=0.5, discdds=dds_cl, collapse_threshold=0.5) context.subtask(DPShowFill, min_visibility=0.5, discdds='sym-dpx3-80') context.subtask(DPShowFill, min_visibility=0.5, discdds='sym-dptcam3-80') c_planning = context.child('planning') alltestcases = [] for delay in range(1, 13): # XXX: I didn't want to change the name in the configuration pattern = 'tc_h1orbit-pt256-80-n35s_d%d' % (delay) + '_%03d' tcs = c_planning.subtask(DPLogCases, stream=stream, pattern=pattern, dds=dds_orbit, delay=delay) alltestcases.extend(tcs) # for id_tc in id_tc2job: # testcases = get_conftools_testcases() # testcases.add_spec(id_tc, 'generated', ['', {}]) c_planning.checkpoint('testcases') # nice -n 10 dp batch -o out.tr1/dp-batch tr1_orbit_r80 -c "parmake n=6" # nice -n 10 dp batch -o out.tr1/dp-batch tr1_park_r80 -c "parmake n=4" # nice -n 10 dp batch -o out.tr1/dp-batch tr1_park_r128 -c "parmake n=4" batches = ['tr1_orbit_r80', 'tr1_park_r80', 'tr1_park_r128'] c_planning.subtask(DPBatch, batches=",".join(batches), alltestcases=alltestcases)
def initial_setup(self): GlobalConfig.global_load_dir(self.options.config)
def initial_setup(self): options = self.get_options() # Load configurations for all modules self.info('loading config: %r' % options.directory) GlobalConfig.global_load_dir(options.directory)
def initial_setup(self): options = self.get_options() # Load configurations for all modules GlobalConfig.global_load_dirs(options.config_dirs)
from mocdp.dp_report.html import ast_to_html import os from contracts import contract from conf_tools import locate_files, GlobalConfig GlobalConfig.global_load_dir('mocdp') from reprep.utils.natsorting import natsorted files = [os.path.join('../../mocdp/', f) for f in [ 'examples/example-battery/battery.mcdp', 'examples/example-catalogue/catalogue1.mcdp', 'examples/example-catalogue/choose.mcdp', 'examples/example-catalogue/catalogue_plus_coproduct-compacter.mcdp', ]] files.extend([ 'big2.mcdp', ]) descriptions = { 'battery': """ ## Energetics + actuation This example shows co-design of **energetics** (choose the battery) and **actuation**. The **recursive co-design constraint** is that the actuators must generate lift to transport the battery, and the battery must provide power to the actuators.
def initial_setup(self): rs2b_config = get_rs2b_config() rs2b_config.load(rs2b_config.get_default_dir()) if self.options.config != '': GlobalConfig.global_load_dir(self.options.config)
def initial_setup(self): config_dirs = self.get_config_dirs() GlobalConfig.global_load_dirs(config_dirs) np.seterr(all='raise')
from mocdp.dp_report.html import ast_to_html import os from contracts import contract from conf_tools import locate_files, GlobalConfig GlobalConfig.global_load_dir('mocdp') from reprep.utils.natsorting import natsorted files = [ os.path.join('../../mocdp/', f) for f in [ 'examples/example-battery/battery.mcdp', 'examples/example-catalogue/catalogue1.mcdp', 'examples/example-catalogue/choose.mcdp', 'examples/example-catalogue/catalogue_plus_coproduct-compacter.mcdp', ] ] files.extend([ 'big2.mcdp', ]) descriptions = { 'battery': """ ## Energetics + actuation This example shows co-design of **energetics** (choose the battery) and **actuation**. The **recursive co-design constraint** is that the actuators must generate lift to transport the battery, and the battery must provide