def rlearn(config, parser): #@UnusedVariable t0 = time.time() """ Displays the learned DDS """ parser.add_option("-n", "--nthreads", help="Number of threads", type='int', default='4') parser.add_option("-r", "--nrefine", help="Number of time to refine learning", type='int', default='2') parser.add_option("-s", "--streams", help="Which streams to use.", default="*") parser.add_option("-i", "--comb", default="default") parser.add_option("-l", "--learners", help="Learner config.", default="*") parser.add_option("-o", "--output", help="Output directory", default='out/dp-rlearn/') parser.add_option("--sensels", default=None, help="Sensel indices to use for debuging refining module") parser.add_option("-c", "--command", help="Command to pass to compmake for batch mode") parser.add_option("--show", default=None, help="Name of learners to report") options = parser.parse_options() if options.show is not None: diffeomorphism2d_continuous.make_report(options.show.split(',')) sys.exit() nthreads = options.nthreads nrefine = options.nrefine learners = config.learners.expand_names(options.learners) streams = config.streams.expand_names(options.streams) if len(learners) > 1: logger.warn('Multiple learners are not supported for now') if len(streams) > 1: logger.warn('Multiple streams are not supported for now') id_comb = ",".join(streams) + "-" + ",".join(learners) outdir = os.path.join(options.output, id_comb) storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() rm = ReportManager(os.path.join(outdir, 'reports')) jobs_rlearn(config, rm, learners, streams, outdir, nthreads, nrefine, options.sensels) # Time and report the learning comp(learning_times_rlearn, outdir, learners, streams, nthreads, nrefine) rm.create_index_job() if options.command: return batch_command(options.command) else: compmake_console() return 0 logger.info("Done after time: " + str(time.time() - t0) + ' seconds')
def dp_predstats_main(config, parser): parser.add_option("-o", "--output", default='out/dp-pred-stats', help="Output directory") parser.add_option("-d", "--distances", default='*', help="Comma-separated list of distances. Can use *.") parser.add_option("-S", "--dds", help="Comma-separated list of diffeosystems.") parser.add_option("-s", "--streams", help="Comma-separated list of streams.") parser.add_option("-c", "--command", help="Command to pass to compmake for batch mode") options = parser.parse_options() if not options.streams: msg = 'Please specify streams using -s.' raise UserError(msg) if not options.dds: msg = 'Please specify which discdds to use.' raise UserError(msg) distances = config.distances.expand_names(options.distances) streams = config.streams.expand_names(options.streams) dds = config.streams.expand_names(options.dds) logger.info('Using distances: %s' % distances) logger.info('Using streams: %s' % streams) logger.info('Using discdds: %s' % dds) outdir = '%s/%s' % (options.output, options.dds) storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() rm = ReportManager(os.path.join(outdir, "reports")) # for id_discdds in dds: create_predstats_jobs(config=config, distances=distances, id_discdds=dds, streams=streams, rm=rm, maxd=10) rm.create_index_job() if options.command: return batch_command(options.command) else: compmake_console() return 0
def dp_dist_stats(config, parser): #@UnusedVariable """ Computes statistics for images distances for different plan steps. """ parser.add_option("-o", "--output", default='out/dp-dist-stats', help="Output directory") parser.add_option("-d", "--distances", default='*', help="Comma-separated list of distances. Can use *.") parser.add_option("-s", "--streams", default='*', help="Comma-separated list of streams. Can use *.") parser.add_option("-r", "--repeat", default=1, type='int', help="Repeat many times.") parser.add_option("-c", "--command", help="Command to pass to compmake for batch mode") options = parser.parse_options() distances = natsorted(config.distances.expand_names(options.distances)) streams = natsorted(config.streams.expand_names(options.streams)) logger.info('Using distances: %s' % distances) logger.info('Using streams: %s' % streams) id_comb = ','.join(streams) + '-' + ','.join(distances) outdir = os.path.join(options.output, id_comb) storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() rm = ReportManager(os.path.join(outdir, "reports")) create_diststats_jobs(config=config, distances=distances, streams=streams, rm=rm, maxd=10) rm.create_index_job() if options.command: return batch_command(options.command) else: compmake_console() return 0
def plearn(config, parser): """ Learn the diffeomorphisms in parallel. """ # parser.add_option("-i", "--id_image", help="ID image.", default='lena') ncpus = multiprocessing.cpu_count() parser.add_option("-n", "--nthreads", help="Number of threads", type='int', default=ncpus) parser.add_option("-s", "--streams", help="Which streams to use.", default="*") parser.add_option("-i", "--comb", default="default") parser.add_option("-l", "--learners", help="Learner config.", default="*") parser.add_option("-o", "--output", help="Output directory", default='out/dp-plearn/') parser.add_option("-c", "--command", help="Command to pass to compmake for batch mode") options = parser.parse_options() nthreads = options.nthreads learners = config.learners.expand_names(options.learners) streams = config.streams.expand_names(options.streams) id_comb = ",".join(streams) + "-" + ",".join(learners) outdir = os.path.join(options.output, id_comb) storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() rm = ReportManager(os.path.join(outdir, 'reports')) jobs_plearn(config, rm, learners, streams, outdir, nthreads) rm.create_index_job() # Time and report the learning comp(learning_times_plearn, outdir, learners, streams, nthreads) if options.command: return batch_command(options.command) else: compmake_console() return 0
def main(): # parser.add_option("--res", help="", default='[40, 80, 160, 320]') # options = parser.parse_options() rm = ReportManager('timeuse_reports') rm.add(comp(timeuse_report), 'timeuse', systems='all') rm.create_index_job() return batch_command('clean; make')
def uncert(config, parser): parser.add_option("-S", "--dds", help="DDS sytem .") parser.add_option("-c", "--command", \ help="Command to pass to compmake for batch mode") parser.add_option("-o", "--output", help="Output directory", \ default='out/dp-precision/') parser.add_option("-s", "--streams", help="Which streams to use.", \ default="*") parser.add_option("-d", "--distances", default='L2,L2w', \ help="Distances id to use for comparing the diffeo systems") parser.add_option("-l", "--length", default=9, type='int', \ help="Max length of test cases") options = parser.parse_options() dds = config.discdds.expand_names(options.dds) streams = config.streams.expand_names(options.streams) id_comb = ",".join(dds) outdir = os.path.join(options.output, id_comb) storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() rm = ReportManager(os.path.join(outdir, 'reports')) max_delta = options.length distances = config.distances.expand_names(options.distances) store = create_stats_jobs(config, dds, streams, distances, max_delta, outdir) records = comp(make_records, store) report = comp(report_stats, records, dds, streams, distances) r0 = comp(empty_report) rm.add(report, 'main', id_dds='dds') rm.add(r0, 'empty') rm.create_index_job() if options.command: return batch_command(options.command) else: compmake_console() return 0
def uncert(config, parser): # @UnusedVariable parser.add_option("-s", "--streams", help="Which streams to use.", default="*") parser.add_option("-S", "--dds", help="DDS sytem .") parser.add_option("-o", "--output", help="Output directory", default='out/dp-uncert/') parser.add_option("-c", "--command", help="Command to pass to compmake for batch mode") options = parser.parse_options() dds = config.discdds.expand_names(options.dds) streams = config.streams.expand_names(options.streams) id_comb = ",".join(dds) + "-" + ",".join(streams) outdir = os.path.join(options.output, id_comb) storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() rm = ReportManager(os.path.join(outdir, 'reports')) max_delta = 9 store = create_uncert_stats_jobs(config, dds, streams, max_delta, outdir) records = comp(make_records, store) report = comp(report_uncert_stats, records, dds) r0 = comp(empty_report) rm.add(report, 'main', id_dds='dds') rm.add(r0, 'empty') rm.create_index_job() if options.command: return batch_command(options.command) else: compmake_console() return 0
def main(): logger.info('Starting online planning') parser = OptionParser() parser.add_option("-t", "--id_otc", help="", default=None) parser.add_option("-e", "--env", help="environment name", default='default') parser.add_option("-o", "--result", help="Result storage file", default=None) parser.add_option("-r", "--report", help="Specify output reports", default='') (options, _) = parser.parse_args() config = DiffeoplanConfigMaster() config.load('default:/home/adam/diffeo-data/') report_opt = options.report.split(',') set_current_config(config) if options.id_otc is not None: testcases = options.id_otc.split(',') else: testcases = [] if options.result is not None: data_files = options.result.split(',') else: data_files = [] try: all_stats = [] for data_file in data_files: all_stats += pickle.load(open(data_file)) except: pass for tc in testcases: logger.info('Starting up tests with : %s' % tc) planning_module = config.online_testcases.instance(tc) all_stats += planning_module.run_all_tests(options.env) pickle.dump(all_stats, open(options.result, 'wb')) rm = ReportManager('out/online') if 'vis' in report_opt: for i, stat in enumerate(all_stats): report = comp(report_tools.run_report, stat) kwargs = dict(env=stat.labels['env']) rm.add(report, 'online_run_' + str(i), **kwargs) stats_file = yaml.load(open('/home/adam/git/boot12env/src/surf12adam/orbit.online_report.yaml')) stats_def = [] for i in range(len(stats_file)): if stats_file[i]['id'] in report_opt: stats_def.append(stats_file[i]) kwargs = {'summary':'summary'} report = comp(report_tools.stat_report, stats_def, all_stats) rm.add(report, 'summary_stats', **kwargs) kwargs = {'summary':'empty'} report = comp(report_tools.empty_report) rm.add(report, 'empty', **kwargs) rm.create_index_job() compmake_console()
def main(): logger.info('Starting online planning') parser = OptionParser() parser.add_option("-t", "--id_otc", help="", default=None) parser.add_option("-e", "--env", help="environment name", default='default') parser.add_option("-o", "--result", help="Result storage file", default=None) parser.add_option("-r", "--report", help="Specify output reports", default='') (options, _) = parser.parse_args() config = DiffeoplanConfigMaster() config.load('default:/home/adam/diffeo-data/') report_opt = options.report.split(',') set_current_config(config) if options.id_otc is not None: testcases = options.id_otc.split(',') else: testcases = [] if options.result is not None: data_files = options.result.split(',') else: data_files = [] try: all_stats = [] for data_file in data_files: all_stats += pickle.load(open(data_file)) except: pass for tc in testcases: logger.info('Starting up tests with : %s' % tc) planning_module = config.online_testcases.instance(tc) all_stats += planning_module.run_all_tests(options.env) pickle.dump(all_stats, open(options.result, 'wb')) rm = ReportManager('out/online') if 'vis' in report_opt: for i, stat in enumerate(all_stats): report = comp(report_tools.run_report, stat) kwargs = dict(env=stat.labels['env']) rm.add(report, 'online_run_' + str(i), **kwargs) stats_file = yaml.load( open('/home/adam/git/boot12env/src/surf12adam/orbit.online_report.yaml' )) stats_def = [] for i in range(len(stats_file)): if stats_file[i]['id'] in report_opt: stats_def.append(stats_file[i]) kwargs = {'summary': 'summary'} report = comp(report_tools.stat_report, stats_def, all_stats) rm.add(report, 'summary_stats', **kwargs) kwargs = {'summary': 'empty'} report = comp(report_tools.empty_report) rm.add(report, 'empty', **kwargs) rm.create_index_job() compmake_console()
def pixlearn(config, parser): parser.add_option("-n", "--nthreads", help="Number of threads", type='int', default='4') parser.add_option("--distribute", type='str', default='random', help="Strategy to distribute sensles to threads") parser.add_option("-s", "--id_stream", help="Which streams to use.", default="*") parser.add_option("-o", "--output", help="Output directory", default='out/dp-pixlearn/') parser.add_option("-l", "--id_learner", help="Learner config.", default="*") parser.add_option("-c", "--command", help="Command to pass to compmake for batch mode") options = parser.parse_options() nthreads = options.nthreads id_learner = config.learners.expand_names(options.id_learner) id_stream = config.streams.expand_names(options.id_stream) id_comb = ",".join(id_stream) + "-" + ",".join(id_learner) outdir = os.path.join(options.output, id_comb) storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() rm = ReportManager(os.path.join(outdir, 'reports')) pdb.set_trace() if options.distribute == 'random': max_nsensels = 40 * 30 # Generate a list with range of all indexes and assign them to threads all_indicies = np.array(range(max_nsensels)) dist = np.random.randint(nthreads, size=max_nsensels) sensel_indexes = [] for i in range(nthreads): sensel_indexes.append(all_indicies[dist == i]) if options.distribute == 'demo4': sensel_indexes = [[0, 1, 2, 3, 40, 41, 42, 43, 80, 81, 82, 83, 120, 121, 122, 123], [340, 341, 342, 343, 380, 381, 382, 383, 420, 421, 422, 423, 460, 461, 462, 463], [650, 651, 652, 653, 690, 691, 692, 693, 730, 731, 732, 733, 770, 771, 772, 773], [1076, 1077, 1078, 1079, 1116, 1117, 1118, 1119, 1156, 1157, 1158, 1159, 1196, 1197, 1198, 1199]] if len(id_learner) > 1 or len(id_stream) > 1: logger.warning('learners and streams after index 0 will be ignored') id_learner = id_learner[0] id_stream = id_stream[0] commands = [[256, 0, 0], [-256, 0, 0]] states = [[100]] # # Parallel part of code # Initiate parallel learning group = [] for i in range(nthreads): estimator_i = comp(sensel_group_learn, config, id_learner, id_stream, sensel_indexes[i]) group.append(estimator_i) diffeo_system_i = comp(estimator_summarize, estimator_i, commands, states, 'diffeo' + str(i)) estimator_report = comp(report_estimator, 'estimator' + str(i), estimator_i) rm.add(estimator_report, 'estimator' + str(i), id_learner=id_learner, id_stream=id_stream) diffeo_report = comp(report_dds, 'diffeo' + str(i), diffeo_system_i) rm.add(diffeo_report, 'diffeo' + str(i), id_learner=id_learner, id_stream=id_stream) estimator_main = comp(join_estimators, group) main_system = comp(estimator_summarize, estimator_main, commands, states, 'diffeo' + str(i)) diffeo_report = comp(report_dds, 'dds-%s-%s' % (id_stream, id_learner), main_system, job_id='learn-%s-%s-summarize-report' % (id_stream, id_learner)) rm.add(comp(empty_report), 'empty') rm.add(diffeo_report, 'dds', id_learner=id_learner, id_stream=id_stream) rm.create_index_job() if options.command: # return batch_command(options.command) batch_command(options.command) else: compmake_console()
def rlearn(config, parser): #@UnusedVariable t0 = time.time() """ Displays the learned DDS """ parser.add_option("-n", "--nthreads", help="Number of threads", type='int', default='4') parser.add_option("-r", "--nrefine", help="Number of time to refine learning", type='int', default='2') parser.add_option("-s", "--streams", help="Which streams to use.", default="*") parser.add_option("-i", "--comb", default="default") parser.add_option("-l", "--learners", help="Learner config.", default="*") parser.add_option("-o", "--output", help="Output directory", default='out/dp-rlearn/') parser.add_option( "--sensels", default=None, help="Sensel indices to use for debuging refining module") parser.add_option("-c", "--command", help="Command to pass to compmake for batch mode") parser.add_option("--show", default=None, help="Name of learners to report") options = parser.parse_options() if options.show is not None: diffeomorphism2d_continuous.make_report(options.show.split(',')) sys.exit() nthreads = options.nthreads nrefine = options.nrefine learners = config.learners.expand_names(options.learners) streams = config.streams.expand_names(options.streams) if len(learners) > 1: logger.warn('Multiple learners are not supported for now') if len(streams) > 1: logger.warn('Multiple streams are not supported for now') id_comb = ",".join(streams) + "-" + ",".join(learners) outdir = os.path.join(options.output, id_comb) storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() rm = ReportManager(os.path.join(outdir, 'reports')) jobs_rlearn(config, rm, learners, streams, outdir, nthreads, nrefine, options.sensels) # Time and report the learning comp(learning_times_rlearn, outdir, learners, streams, nthreads, nrefine) rm.create_index_job() if options.command: return batch_command(options.command) else: compmake_console() return 0 logger.info("Done after time: " + str(time.time() - t0) + ' seconds')
def create_bench_jobs(config, algos, testcases, outdir): # dict(id_algo, id_tc, id_discdds, plan_length) => PlanningResults allplanning = StoreResults() # dict(id_algo, id_tc, id_discdds, plan_length) => resultstats allruns = StoreResults() # dict(id_algo, id_tc) => DiffeoPlanningAlgorithm algoinit = StoreResults() rm = ReportManager(outdir) comp_store(config, job_id='config') # Let's instantiate all test cases and sort them by discdds # so that we do only one initialization per algorithms id_discdds2testcases = defaultdict(lambda: {}) alltc = {} # id -> Promise TestCase for id_tc in testcases: alltc[id_tc] = comp(instantiate_testcase, comp_store(config), id_tc) # Do it once, now, to get its dds tc1 = config.testcases.instance(id_tc) id_discdds2testcases[tc1.id_discdds][id_tc] = tc1 # Load discdds before, they might be automatically generated # as well so we want the generation to happen only once. discdds = {} # id -> Promise DiffeoSystem for id_discdds in id_discdds2testcases: discdds[id_discdds] = comp(instantiate_discdds, comp_store(config), id_discdds) # for each algorithm for id_algo in algos: config.algos[id_algo] # check it is in the configuration # for each dynamics for id_discdds, dds in discdds.items(): job_id = 'init-%s-%s' % (id_algo, id_discdds) # initialize the algorithm for that dynamics algo = comp(init_algorithm, comp_store(config), id_algo, id_discdds, discdds[id_discdds], job_id=job_id) algoinit[dict(id_algo=id_algo, id_discdds=id_discdds)] = algo # for each test case in that dynamics for id_tc, tc in id_discdds2testcases[id_discdds].items(): # run the planning job_id = 'plan-%s-%s' % (id_algo, id_tc) result = comp(run_planning, id_algo, id_tc, alltc[id_tc], algo, job_id=job_id) # compute statistics result_stats = comp(run_planning_stats, result, dds, alltc[id_tc], job_id=job_id + '-stats') attrs = dict(id_algo=id_algo, id_tc=id_tc, id_discdds=tc.id_discdds, true_plan_length=len(tc.true_plan)) allruns[attrs] = result_stats allplanning[attrs] = result jobs_report_algo_init(config, rm, algoinit) jobs_report_tc(config, rm, testcases, alltc) jobs_report_dds(config, rm, discdds) allstats = StoreResults() for key, run in allruns.items(): allstats[key] = comp(results2stats_dict, run, job_id=comp_stage_job_id(run, 'statsdict')) jobs_tables(allstats, rm) jobs_visualization(config, allruns, rm) rm.create_index_job()
def idealize_uncert(config, parser): ''' Assumes a constant displacement over the whole sensor domain ''' parser.add_option("-S", "--dds", help="DDS sytem .") parser.add_option("-c", "--command", \ help="Command to pass to compmake for batch mode") parser.add_option("-o", "--output", help="Output directory", \ default='out/idealized-dds/') options = parser.parse_options() id_discdds = options.dds dds = config.discdds.instance(id_discdds) outdir = os.path.join(options.output, id_discdds) storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() # rm = ReportManager(os.path.join(outdir, 'reports')) # Ideal id_iu_discdds = 'iu-' + id_discdds iu_dds = comp(_idealize_uncert, dds, job_id='idealize_uncert') comp(save_results, id_iu_discdds, outdir, iu_dds) diffeo_report = comp(report_dds, 'iu_dds-%s' % id_discdds, iu_dds) rm.add(diffeo_report, 'iu-dds', id_learner='idealized-uncertainty') # Relative id_uur_discdds = 'uur-' + id_discdds dds_copyr = copy.copy(dds) uur_dds = comp(_update_uncert, dds_copyr, length_score_norm_relative, job_id='update_uncert_relative') comp(save_results, id_uur_discdds, outdir, uur_dds, job_id='update_uncert_relative_save') diffeo_report = comp(report_dds, 'uur-dds-%s' % id_discdds, uur_dds, job_id='update_uncert_relative_report') rm.add(diffeo_report, 'uur-dds', id_learner='updated-uncertainty-uur') # Absolute dds_copya = copy.copy(dds) id_uua_discdds = 'uua-' + id_discdds uua_dds = comp(_update_uncert, dds_copya, length_score_norm, job_id='update_uncert_absolute') comp(save_results, id_uua_discdds, outdir, uua_dds, job_id='update_uncert_absolute_save') diffeo_report = comp(report_dds, 'uua-dds-%s' % id_discdds, uua_dds, job_id='update_uncert_absolute_report') rm.add(diffeo_report, 'uua-dds', id_learner='updated-uncertainty-uua') rm.create_index_job() if options.command: return batch_command(options.command) else: # pdb.set_trace() compmake_console() return 0