def rlearn(config, parser): #@UnusedVariable t0 = time.time() """ Displays the learned DDS """ parser.add_option("-n", "--nthreads", help="Number of threads", type='int', default='4') parser.add_option("-r", "--nrefine", help="Number of time to refine learning", type='int', default='2') parser.add_option("-s", "--streams", help="Which streams to use.", default="*") parser.add_option("-i", "--comb", default="default") parser.add_option("-l", "--learners", help="Learner config.", default="*") parser.add_option("-o", "--output", help="Output directory", default='out/dp-rlearn/') parser.add_option("--sensels", default=None, help="Sensel indices to use for debuging refining module") parser.add_option("-c", "--command", help="Command to pass to compmake for batch mode") parser.add_option("--show", default=None, help="Name of learners to report") options = parser.parse_options() if options.show is not None: diffeomorphism2d_continuous.make_report(options.show.split(',')) sys.exit() nthreads = options.nthreads nrefine = options.nrefine learners = config.learners.expand_names(options.learners) streams = config.streams.expand_names(options.streams) if len(learners) > 1: logger.warn('Multiple learners are not supported for now') if len(streams) > 1: logger.warn('Multiple streams are not supported for now') id_comb = ",".join(streams) + "-" + ",".join(learners) outdir = os.path.join(options.output, id_comb) storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() rm = ReportManager(os.path.join(outdir, 'reports')) jobs_rlearn(config, rm, learners, streams, outdir, nthreads, nrefine, options.sensels) # Time and report the learning comp(learning_times_rlearn, outdir, learners, streams, nthreads, nrefine) rm.create_index_job() if options.command: return batch_command(options.command) else: compmake_console() return 0 logger.info("Done after time: " + str(time.time() - t0) + ' seconds')
def dp_predstats_main(config, parser): parser.add_option("-o", "--output", default='out/dp-pred-stats', help="Output directory") parser.add_option("-d", "--distances", default='*', help="Comma-separated list of distances. Can use *.") parser.add_option("-S", "--dds", help="Comma-separated list of diffeosystems.") parser.add_option("-s", "--streams", help="Comma-separated list of streams.") parser.add_option("-c", "--command", help="Command to pass to compmake for batch mode") options = parser.parse_options() if not options.streams: msg = 'Please specify streams using -s.' raise UserError(msg) if not options.dds: msg = 'Please specify which discdds to use.' raise UserError(msg) distances = config.distances.expand_names(options.distances) streams = config.streams.expand_names(options.streams) dds = config.streams.expand_names(options.dds) logger.info('Using distances: %s' % distances) logger.info('Using streams: %s' % streams) logger.info('Using discdds: %s' % dds) outdir = '%s/%s' % (options.output, options.dds) storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() rm = ReportManager(os.path.join(outdir, "reports")) # for id_discdds in dds: create_predstats_jobs(config=config, distances=distances, id_discdds=dds, streams=streams, rm=rm, maxd=10) rm.create_index_job() if options.command: return batch_command(options.command) else: compmake_console() return 0
def dp_dist_stats(config, parser): #@UnusedVariable """ Computes statistics for images distances for different plan steps. """ parser.add_option("-o", "--output", default='out/dp-dist-stats', help="Output directory") parser.add_option("-d", "--distances", default='*', help="Comma-separated list of distances. Can use *.") parser.add_option("-s", "--streams", default='*', help="Comma-separated list of streams. Can use *.") parser.add_option("-r", "--repeat", default=1, type='int', help="Repeat many times.") parser.add_option("-c", "--command", help="Command to pass to compmake for batch mode") options = parser.parse_options() distances = natsorted(config.distances.expand_names(options.distances)) streams = natsorted(config.streams.expand_names(options.streams)) logger.info('Using distances: %s' % distances) logger.info('Using streams: %s' % streams) id_comb = ','.join(streams) + '-' + ','.join(distances) outdir = os.path.join(options.output, id_comb) storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() rm = ReportManager(os.path.join(outdir, "reports")) create_diststats_jobs(config=config, distances=distances, streams=streams, rm=rm, maxd=10) rm.create_index_job() if options.command: return batch_command(options.command) else: compmake_console() return 0
def dp_batch_main(config, parser): """ Runs batch planning experiments from batch configuration files. """ parser.add_option("-o", "--output", default='out/dp-batch', help="Output directory") parser.add_option("-c", "--command", help="Command to pass to compmake for batch mode") options, which = parser.parse() if not which: todo = config.sets.keys() id_comb = 'all' else: todo = config.sets.expand_names(which) id_comb = "+".join(sorted(todo)) logger.info('Batch sets to do: %s' % todo) outdir = os.path.join(options.output, 'set-%s' % id_comb) # Compmake storage for results storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() for id_set in todo: logger.info('Instantiating batch set %s' % id_set) spec = config.sets[id_set] try: algos = config.algos.expand_names(spec['algorithms']) testcases = config.testcases.expand_names(spec['testcases']) comp_prefix('%s' % id_set) b_outdir = os.path.join(outdir, id_set) create_bench_jobs(config=config, algos=algos, testcases=testcases, outdir=b_outdir) except: logger.error('Error while instantiating batch\n%s' % pformat(spec)) raise if options.command: return batch_command(options.command) else: compmake_console() return 0
def plearn(config, parser): """ Learn the diffeomorphisms in parallel. """ # parser.add_option("-i", "--id_image", help="ID image.", default='lena') ncpus = multiprocessing.cpu_count() parser.add_option("-n", "--nthreads", help="Number of threads", type='int', default=ncpus) parser.add_option("-s", "--streams", help="Which streams to use.", default="*") parser.add_option("-i", "--comb", default="default") parser.add_option("-l", "--learners", help="Learner config.", default="*") parser.add_option("-o", "--output", help="Output directory", default='out/dp-plearn/') parser.add_option("-c", "--command", help="Command to pass to compmake for batch mode") options = parser.parse_options() nthreads = options.nthreads learners = config.learners.expand_names(options.learners) streams = config.streams.expand_names(options.streams) id_comb = ",".join(streams) + "-" + ",".join(learners) outdir = os.path.join(options.output, id_comb) storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() rm = ReportManager(os.path.join(outdir, 'reports')) jobs_plearn(config, rm, learners, streams, outdir, nthreads) rm.create_index_job() # Time and report the learning comp(learning_times_plearn, outdir, learners, streams, nthreads) if options.command: return batch_command(options.command) else: compmake_console() return 0
def dp_bench_main(config, parser): # @UnusedVariable """ Runs a set of planning experiments. """ parser.add_option("-a", "--algorithms", default='*', help="Comma-separated list of algorithms. Can use *.") parser.add_option("-t", "--testcases", default='*', help="Comma-separated list of algorithms. Can use *.") parser.add_option("-o", "--output", default='out/dp-bench', help="Output directory") parser.add_option("-c", "--command", help="Command to pass to compmake for batch mode") options = parser.parse_options() algos = config.algos.expand_names(options.algorithms) testcases = config.testcases.expand_names(options.testcases) logger.info('Using %d algorithms: %s' % (len(algos), algos)) logger.info('Using %d testcases.' % (len(testcases))) outdir = options.output # Compmake storage for results storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() create_bench_jobs(config=config, algos=algos, testcases=testcases, outdir=outdir) if options.command: return batch_command(options.command) else: compmake_console() return 0
def main(): outdir = 'test/repman' storage = os.path.join(outdir, 'compmake') use_filesystem(storage) rm = ReportManager(outdir) report = comp(make_rep1, 'TestReport3') report2 = comp(make_rep1, 'TestReport4') rm.add(report, 'rep3') rm.add(report2, 'rep4') rm.create_index_job() read_rc_files() compmake_console()
def main(): outdir = "test/repman" storage = os.path.join(outdir, "compmake") use_filesystem(storage) rm = ReportManager(outdir) report = comp(make_rep1, "TestReport3") report2 = comp(make_rep1, "TestReport4") rm.add(report, "rep3") rm.add(report2, "rep4") rm.create_index_job() read_rc_files() compmake_console()
def uncert(config, parser): parser.add_option("-S", "--dds", help="DDS sytem .") parser.add_option("-c", "--command", \ help="Command to pass to compmake for batch mode") parser.add_option("-o", "--output", help="Output directory", \ default='out/dp-precision/') parser.add_option("-s", "--streams", help="Which streams to use.", \ default="*") parser.add_option("-d", "--distances", default='L2,L2w', \ help="Distances id to use for comparing the diffeo systems") parser.add_option("-l", "--length", default=9, type='int', \ help="Max length of test cases") options = parser.parse_options() dds = config.discdds.expand_names(options.dds) streams = config.streams.expand_names(options.streams) id_comb = ",".join(dds) outdir = os.path.join(options.output, id_comb) storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() rm = ReportManager(os.path.join(outdir, 'reports')) max_delta = options.length distances = config.distances.expand_names(options.distances) store = create_stats_jobs(config, dds, streams, distances, max_delta, outdir) records = comp(make_records, store) report = comp(report_stats, records, dds, streams, distances) r0 = comp(empty_report) rm.add(report, 'main', id_dds='dds') rm.add(r0, 'empty') rm.create_index_job() if options.command: return batch_command(options.command) else: compmake_console() return 0
def uncert(config, parser): # @UnusedVariable parser.add_option("-s", "--streams", help="Which streams to use.", default="*") parser.add_option("-S", "--dds", help="DDS sytem .") parser.add_option("-o", "--output", help="Output directory", default='out/dp-uncert/') parser.add_option("-c", "--command", help="Command to pass to compmake for batch mode") options = parser.parse_options() dds = config.discdds.expand_names(options.dds) streams = config.streams.expand_names(options.streams) id_comb = ",".join(dds) + "-" + ",".join(streams) outdir = os.path.join(options.output, id_comb) storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() rm = ReportManager(os.path.join(outdir, 'reports')) max_delta = 9 store = create_uncert_stats_jobs(config, dds, streams, max_delta, outdir) records = comp(make_records, store) report = comp(report_uncert_stats, records, dds) r0 = comp(empty_report) rm.add(report, 'main', id_dds='dds') rm.add(r0, 'empty') rm.create_index_job() if options.command: return batch_command(options.command) else: compmake_console() return 0
def go(self): # check that if we have a parent who is a quickapp, # then use its context qapp_parent = self.get_qapp_parent() if qapp_parent is not None: # self.info('Found parent: %s' % qapp_parent) qc = qapp_parent.child_context self.define_jobs_context(qc) return else: # self.info('Parent not found') pass # if False: # import resource # gbs = 5 # max_mem = long(gbs * 1000 * 1048576) # resource.setrlimit(resource.RLIMIT_AS, (max_mem, -1)) # resource.setrlimit(resource.RLIMIT_DATA, (max_mem, -1)) options = self.get_options() # if self.get_qapp_parent() is None: # only do this if somebody didn't do it before if not options.contracts: msg = ('PyContracts disabled for speed. ' 'Use --contracts to activate.') self.logger.warning(msg) contracts.disable_all() output_dir = options.output if options.reset: if os.path.exists(output_dir): self.logger.info('Removing output dir %r.' % output_dir) try: shutil.rmtree(output_dir) except OSError as e: # Directory not empty -- common enough on NFS filesystems # print('errno: %r' % e.errno) if e.errno == 39: pass else: raise # Compmake storage for results storage = os.path.join(output_dir, 'compmake') logger.debug('Creating storage in %s (compress = %s)' % (storage, options.compress)) db = StorageFilesystem(storage, compress=options.compress) currently_executing = ['root'] # The original Compmake context oc = Context(db=db, currently_executing=currently_executing) # Our wrapper qc = CompmakeContext(cc=oc, parent=None, qapp=self, job_prefix=None, output_dir=output_dir) read_rc_files(oc) original = oc.get_comp_prefix() self.define_jobs_context(qc) oc.comp_prefix(original) merged = context_get_merge_data(qc) # Only create the index job if we have reports defined # or some branched context (which might create reports) has_reports = len(qc.get_report_manager().allreports) > 0 has_branched = qc.has_branched() if has_reports or has_branched: # self.info('Creating reports') oc.comp_dynamic(_dynreports_create_index, merged) else: pass # self.info('Not creating reports.') ndefined = len(oc.get_jobs_defined_in_this_session()) if ndefined == 0: # self.comp was never called msg = 'No jobs defined.' raise ValueError(msg) else: if options.console: oc.compmake_console() return 0 else: cq = CacheQueryDB(oc.get_compmake_db()) targets = cq.all_jobs() todo, done, ready = cq.list_todo_targets(targets) if not todo and options.command is None: msg = "Note: there is nothing for me to do. " msg += '\n(Jobs todo: %s done: %s ready: %s)' % ( len(todo), len(done), len(ready)) msg += """\ This application uses a cache system for the results. This means that if you call it second time with the same arguments, and if you do not change any input, it will not do anything.""" self.warn(msg) return 0 if options.command is None: command = 'make recurse=1' else: command = options.command try: _ = oc.batch_command(command) # print('qapp: ret0 = %s' % ret0) except CommandFailed: # print('qapp: CommandFailed') ret = QUICKAPP_COMPUTATION_ERROR except ShellExitRequested: # print('qapp: ShellExitRequested') ret = 0 else: # print('qapp: else ret = 0') ret = 0 return ret
def pixlearn(config, parser): parser.add_option("-n", "--nthreads", help="Number of threads", type='int', default='4') parser.add_option("--distribute", type='str', default='random', help="Strategy to distribute sensles to threads") parser.add_option("-s", "--id_stream", help="Which streams to use.", default="*") parser.add_option("-o", "--output", help="Output directory", default='out/dp-pixlearn/') parser.add_option("-l", "--id_learner", help="Learner config.", default="*") parser.add_option("-c", "--command", help="Command to pass to compmake for batch mode") options = parser.parse_options() nthreads = options.nthreads id_learner = config.learners.expand_names(options.id_learner) id_stream = config.streams.expand_names(options.id_stream) id_comb = ",".join(id_stream) + "-" + ",".join(id_learner) outdir = os.path.join(options.output, id_comb) storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() rm = ReportManager(os.path.join(outdir, 'reports')) pdb.set_trace() if options.distribute == 'random': max_nsensels = 40 * 30 # Generate a list with range of all indexes and assign them to threads all_indicies = np.array(range(max_nsensels)) dist = np.random.randint(nthreads, size=max_nsensels) sensel_indexes = [] for i in range(nthreads): sensel_indexes.append(all_indicies[dist == i]) if options.distribute == 'demo4': sensel_indexes = [[0, 1, 2, 3, 40, 41, 42, 43, 80, 81, 82, 83, 120, 121, 122, 123], [340, 341, 342, 343, 380, 381, 382, 383, 420, 421, 422, 423, 460, 461, 462, 463], [650, 651, 652, 653, 690, 691, 692, 693, 730, 731, 732, 733, 770, 771, 772, 773], [1076, 1077, 1078, 1079, 1116, 1117, 1118, 1119, 1156, 1157, 1158, 1159, 1196, 1197, 1198, 1199]] if len(id_learner) > 1 or len(id_stream) > 1: logger.warning('learners and streams after index 0 will be ignored') id_learner = id_learner[0] id_stream = id_stream[0] commands = [[256, 0, 0], [-256, 0, 0]] states = [[100]] # # Parallel part of code # Initiate parallel learning group = [] for i in range(nthreads): estimator_i = comp(sensel_group_learn, config, id_learner, id_stream, sensel_indexes[i]) group.append(estimator_i) diffeo_system_i = comp(estimator_summarize, estimator_i, commands, states, 'diffeo' + str(i)) estimator_report = comp(report_estimator, 'estimator' + str(i), estimator_i) rm.add(estimator_report, 'estimator' + str(i), id_learner=id_learner, id_stream=id_stream) diffeo_report = comp(report_dds, 'diffeo' + str(i), diffeo_system_i) rm.add(diffeo_report, 'diffeo' + str(i), id_learner=id_learner, id_stream=id_stream) estimator_main = comp(join_estimators, group) main_system = comp(estimator_summarize, estimator_main, commands, states, 'diffeo' + str(i)) diffeo_report = comp(report_dds, 'dds-%s-%s' % (id_stream, id_learner), main_system, job_id='learn-%s-%s-summarize-report' % (id_stream, id_learner)) rm.add(comp(empty_report), 'empty') rm.add(diffeo_report, 'dds', id_learner=id_learner, id_stream=id_stream) rm.create_index_job() if options.command: # return batch_command(options.command) batch_command(options.command) else: compmake_console()
def idealize_uncert(config, parser): ''' Assumes a constant displacement over the whole sensor domain ''' parser.add_option("-S", "--dds", help="DDS sytem .") parser.add_option("-c", "--command", \ help="Command to pass to compmake for batch mode") parser.add_option("-o", "--output", help="Output directory", \ default='out/idealized-dds/') options = parser.parse_options() id_discdds = options.dds dds = config.discdds.instance(id_discdds) outdir = os.path.join(options.output, id_discdds) storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() # rm = ReportManager(os.path.join(outdir, 'reports')) # Ideal id_iu_discdds = 'iu-' + id_discdds iu_dds = comp(_idealize_uncert, dds, job_id='idealize_uncert') comp(save_results, id_iu_discdds, outdir, iu_dds) diffeo_report = comp(report_dds, 'iu_dds-%s' % id_discdds, iu_dds) rm.add(diffeo_report, 'iu-dds', id_learner='idealized-uncertainty') # Relative id_uur_discdds = 'uur-' + id_discdds dds_copyr = copy.copy(dds) uur_dds = comp(_update_uncert, dds_copyr, length_score_norm_relative, job_id='update_uncert_relative') comp(save_results, id_uur_discdds, outdir, uur_dds, job_id='update_uncert_relative_save') diffeo_report = comp(report_dds, 'uur-dds-%s' % id_discdds, uur_dds, job_id='update_uncert_relative_report') rm.add(diffeo_report, 'uur-dds', id_learner='updated-uncertainty-uur') # Absolute dds_copya = copy.copy(dds) id_uua_discdds = 'uua-' + id_discdds uua_dds = comp(_update_uncert, dds_copya, length_score_norm, job_id='update_uncert_absolute') comp(save_results, id_uua_discdds, outdir, uua_dds, job_id='update_uncert_absolute_save') diffeo_report = comp(report_dds, 'uua-dds-%s' % id_discdds, uua_dds, job_id='update_uncert_absolute_report') rm.add(diffeo_report, 'uua-dds', id_learner='updated-uncertainty-uua') rm.create_index_job() if options.command: return batch_command(options.command) else: # pdb.set_trace() compmake_console() return 0
def rlearn(config, parser): #@UnusedVariable t0 = time.time() """ Displays the learned DDS """ parser.add_option("-n", "--nthreads", help="Number of threads", type='int', default='4') parser.add_option("-r", "--nrefine", help="Number of time to refine learning", type='int', default='2') parser.add_option("-s", "--streams", help="Which streams to use.", default="*") parser.add_option("-i", "--comb", default="default") parser.add_option("-l", "--learners", help="Learner config.", default="*") parser.add_option("-o", "--output", help="Output directory", default='out/dp-rlearn/') parser.add_option( "--sensels", default=None, help="Sensel indices to use for debuging refining module") parser.add_option("-c", "--command", help="Command to pass to compmake for batch mode") parser.add_option("--show", default=None, help="Name of learners to report") options = parser.parse_options() if options.show is not None: diffeomorphism2d_continuous.make_report(options.show.split(',')) sys.exit() nthreads = options.nthreads nrefine = options.nrefine learners = config.learners.expand_names(options.learners) streams = config.streams.expand_names(options.streams) if len(learners) > 1: logger.warn('Multiple learners are not supported for now') if len(streams) > 1: logger.warn('Multiple streams are not supported for now') id_comb = ",".join(streams) + "-" + ",".join(learners) outdir = os.path.join(options.output, id_comb) storage = os.path.join(outdir, 'compmake') use_filesystem(storage) read_rc_files() rm = ReportManager(os.path.join(outdir, 'reports')) jobs_rlearn(config, rm, learners, streams, outdir, nthreads, nrefine, options.sensels) # Time and report the learning comp(learning_times_rlearn, outdir, learners, streams, nthreads, nrefine) rm.create_index_job() if options.command: return batch_command(options.command) else: compmake_console() return 0 logger.info("Done after time: " + str(time.time() - t0) + ' seconds')
def go(self): # check that if we have a parent who is a quickapp, # then use its context qapp_parent = self.get_qapp_parent() if qapp_parent is not None: # self.info('Found parent: %s' % qapp_parent) context = qapp_parent.child_context self.define_jobs_context(context) return else: # self.info('Parent not found') pass if False: import resource gbs = 5 max_mem = long(gbs * 1000 * 1048576L) resource.setrlimit(resource.RLIMIT_AS, (max_mem, -1)) resource.setrlimit(resource.RLIMIT_DATA, (max_mem, -1)) options = self.get_options() if self.get_qapp_parent() is None: # only do this if somebody didn't do it before if not options.contracts: msg = 'PyContracts disabled for speed. Use --contracts to activate.' self.logger.warning(msg) contracts.disable_all() warnings.warn('removed configuration below') # (start) output_dir = options.output # Compmake storage for results storage = os.path.join(output_dir, 'compmake') sf = StorageFilesystem(storage, compress=True) # sf = StorageFilesystem2(directory) # sf = MemoryCache(sf) set_compmake_db(sf) # use_filesystem(storage) read_rc_files() context = CompmakeContext(parent=None, qapp=self, job_prefix=None, output_dir=output_dir) self.context = context original = get_comp_prefix() self.define_jobs_context(context) comp_prefix(original) context.finalize_jobs() if context.n_comp_invocations == 0: # self.comp was never called msg = 'No jobs defined.' raise ValueError(msg) else: if not options.console: batch_result = batch_command(options.command) if isinstance(batch_result, str): ret = QUICKAPP_COMPUTATION_ERROR elif isinstance(batch_result, int): if batch_result == 0: ret = 0 else: # xxx: discarded information ret = QUICKAPP_COMPUTATION_ERROR else: assert False return ret else: compmake_console() return 0
def go(self): # check that if we have a parent who is a quickapp, # then use its context qapp_parent = self.get_qapp_parent() if qapp_parent is not None: # self.info('Found parent: %s' % qapp_parent) qc = qapp_parent.child_context self.define_jobs_context(qc) return else: # self.info('Parent not found') pass if False: import resource gbs = 5 max_mem = long(gbs * 1000 * 1048576L) resource.setrlimit(resource.RLIMIT_AS, (max_mem, -1)) resource.setrlimit(resource.RLIMIT_DATA, (max_mem, -1)) options = self.get_options() if self.get_qapp_parent() is None: # only do this if somebody didn't do it before if not options.contracts: msg = ('PyContracts disabled for speed. ' 'Use --contracts to activate.') self.logger.warning(msg) contracts.disable_all() output_dir = options.output if options.reset: if os.path.exists(output_dir): self.logger.info('Removing output dir %r.' % output_dir) shutil.rmtree(output_dir) # Compmake storage for results storage = os.path.join(output_dir, 'compmake') db = StorageFilesystem(storage, compress=True) currently_executing = ['root'] # The original Compmake context oc = Context(db=db, currently_executing=currently_executing) # Our wrapper qc = CompmakeContext(cc=oc, parent=None, qapp=self, job_prefix=None, output_dir=output_dir) read_rc_files(oc) original = oc.get_comp_prefix() self.define_jobs_context(qc) oc.comp_prefix(original) merged = context_get_merge_data(qc) # Only create the index job if we have reports defined # or some branched context (which might create reports) has_reports = len(qc.get_report_manager().allreports) > 0 has_branched = qc.has_branched() if has_reports or has_branched: self.info('Creating reports') oc.comp_dynamic(_dynreports_create_index, merged) else: self.info('Not creating reports.') ndefined = len(oc.get_jobs_defined_in_this_session()) if ndefined == 0: # self.comp was never called msg = 'No jobs defined.' raise ValueError(msg) else: if not options.console: try: oc.batch_command(options.command) except CommandFailed: ret = QUICKAPP_COMPUTATION_ERROR else: ret = 0 return ret else: oc.compmake_console() return 0