def go(self): # check that if we have a parent who is a quickapp, # then use its context qapp_parent = self.get_qapp_parent() if qapp_parent is not None: # self.info('Found parent: %s' % qapp_parent) qc = qapp_parent.child_context self.define_jobs_context(qc) return else: # self.info('Parent not found') pass # if False: # import resource # gbs = 5 # max_mem = long(gbs * 1000 * 1048576) # resource.setrlimit(resource.RLIMIT_AS, (max_mem, -1)) # resource.setrlimit(resource.RLIMIT_DATA, (max_mem, -1)) options = self.get_options() # if self.get_qapp_parent() is None: # only do this if somebody didn't do it before if not options.contracts: msg = ('PyContracts disabled for speed. ' 'Use --contracts to activate.') self.logger.warning(msg) contracts.disable_all() output_dir = options.output if options.reset: if os.path.exists(output_dir): self.logger.info('Removing output dir %r.' % output_dir) try: shutil.rmtree(output_dir) except OSError as e: # Directory not empty -- common enough on NFS filesystems # print('errno: %r' % e.errno) if e.errno == 39: pass else: raise # Compmake storage for results storage = os.path.join(output_dir, 'compmake') logger.debug('Creating storage in %s (compress = %s)' % (storage, options.compress)) db = StorageFilesystem(storage, compress=options.compress) currently_executing = ['root'] # The original Compmake context oc = Context(db=db, currently_executing=currently_executing) # Our wrapper qc = CompmakeContext(cc=oc, parent=None, qapp=self, job_prefix=None, output_dir=output_dir) read_rc_files(oc) original = oc.get_comp_prefix() self.define_jobs_context(qc) oc.comp_prefix(original) merged = context_get_merge_data(qc) # Only create the index job if we have reports defined # or some branched context (which might create reports) has_reports = len(qc.get_report_manager().allreports) > 0 has_branched = qc.has_branched() if has_reports or has_branched: # self.info('Creating reports') oc.comp_dynamic(_dynreports_create_index, merged) else: pass # self.info('Not creating reports.') ndefined = len(oc.get_jobs_defined_in_this_session()) if ndefined == 0: # self.comp was never called msg = 'No jobs defined.' raise ValueError(msg) else: if options.console: oc.compmake_console() return 0 else: cq = CacheQueryDB(oc.get_compmake_db()) targets = cq.all_jobs() todo, done, ready = cq.list_todo_targets(targets) if not todo and options.command is None: msg = "Note: there is nothing for me to do. " msg += '\n(Jobs todo: %s done: %s ready: %s)' % ( len(todo), len(done), len(ready)) msg += """\ This application uses a cache system for the results. This means that if you call it second time with the same arguments, and if you do not change any input, it will not do anything.""" self.warn(msg) return 0 if options.command is None: command = 'make recurse=1' else: command = options.command try: _ = oc.batch_command(command) # print('qapp: ret0 = %s' % ret0) except CommandFailed: # print('qapp: CommandFailed') ret = QUICKAPP_COMPUTATION_ERROR except ShellExitRequested: # print('qapp: ShellExitRequested') ret = 0 else: # print('qapp: else ret = 0') ret = 0 return ret
class CompmakeTest(unittest.TestCase): __metaclass__ = ABCMeta def setUp(self): self.root0 = mkdtemp() self.root = os.path.join(self.root0, 'compmake') self.db = StorageFilesystem(self.root, compress=True) self.cc = Context(db=self.db) # don't use '\r' set_compmake_config('interactive', False) set_compmake_config('console_status', False) from compmake.constants import CompmakeConstants CompmakeConstants.debug_check_invariants = True self.mySetUp() def tearDown(self): if False: print('not deleting %s' % self.root0) else: rmtree(self.root0) # optional init # noinspection PyPep8Naming def mySetUp(self): pass # useful def comp(self, *args, **kwargs): return self.cc.comp(*args, **kwargs) @contract(job_id=str, returns=Job) def get_job(self, job_id): db = self.cc.get_compmake_db() return get_job(job_id=job_id, db=db) def get_jobs(self, expression): """ Returns the list of jobs corresponding to the given expression. """ return list(parse_job_list(expression, context=self.cc)) def assert_cmd_success(self, cmds): """ Executes the (list of) commands and checks it was succesful. """ try: print('@ %s' % cmds) self.cc.batch_command(cmds) except MakeFailed as e: print('Detected MakeFailed') print('Failed jobs: %s' % e.failed) for job_id in e.failed: self.cc.interpret_commands_wrap('details %s' % job_id) except CommandFailed: # msg = 'Command %r failed. (res=%s)' % (cmds, res) raise self.cc.interpret_commands_wrap('check_consistency raise_if_error=1') def assert_cmd_fail(self, cmds): """ Executes the (list of) commands and checks it was succesful. """ try: self.cc.batch_command(cmds) except CommandFailed: pass else: msg = 'Command %r did not fail.' % cmds raise Exception(msg) @contract(cmd_string=str) def assert_cmd_success_script(self, cmd_string): """ This runs the "compmake_main" script which recreates the DB and context from disk. """ ret = compmake_main([self.root, '--nosysexit', '-c', cmd_string]) self.assertEqual(ret, 0) # useful tests def assert_defined_by(self, job_id, expected): self.assertEqual(self.get_job(job_id).defined_by, expected) def assertEqualSet(self, a, b): self.assertEqual(set(a), set(b)) @contract(expr=str) def assertJobsEqual(self, expr, jobs, ignore_dyn_reports=True): js = 'not-valid-yet' try: js = self.get_jobs(expr) if ignore_dyn_reports: js = [x for x in js if not 'dynreports' in x] self.assertEqualSet(js, jobs) except: print('expr %r -> %s' % (expr, js)) print('differs from %s' % jobs) raise def assertMakeFailed(self, func, nfailed, nblocked): try: func() except MakeFailed as e: if len(e.failed) != nfailed: msg = 'Expected %d failed, got %d: %s' % ( nfailed, len(e.failed), e.failed) raise Exception(msg) if len(e.blocked) != nblocked: msg = 'Expected %d blocked, got %d: %s' % ( nblocked, len(e.blocked), e.blocked) raise Exception(msg) except Exception as e: raise Exception('unexpected: %s' % e) def assert_job_uptodate(self, job_id, status): res = self.up_to_date(job_id) self.assertEqual(res, status, 'Want %r uptodate? %s' % (job_id, status)) @contract(returns=bool) def up_to_date(self, job_id): from compmake.jobs.uptodate import CacheQueryDB cq = CacheQueryDB(db=self.db) up, reason, timestamp = cq.up_to_date(job_id) print('up_to_date(%r): %s, %r, %s' % (job_id, up, reason, timestamp)) return up
class CompmakeTest(unittest.TestCase): __metaclass__ = ABCMeta def setUp(self): self.root0 = mkdtemp() self.root = os.path.join(self.root0, 'compmake') self.db = StorageFilesystem(self.root, compress=True) self.cc = Context(db=self.db) # don't use '\r' set_compmake_config('interactive', False) set_compmake_config('console_status', False) from compmake.constants import CompmakeConstants CompmakeConstants.debug_check_invariants = True self.mySetUp() def tearDown(self): if False: print('not deleting %s' % self.root0) else: rmtree(self.root0) # optional init # noinspection PyPep8Naming def mySetUp(self): pass # useful def comp(self, *args, **kwargs): return self.cc.comp(*args, **kwargs) @contract(job_id=str, returns=Job) def get_job(self, job_id): db = self.cc.get_compmake_db() return get_job(job_id=job_id, db=db) def get_jobs(self, expression): """ Returns the list of jobs corresponding to the given expression. """ return list(parse_job_list(expression, context=self.cc)) def assert_cmd_success(self, cmds): """ Executes the (list of) commands and checks it was succesful. """ try: print('@ %s' % cmds) self.cc.batch_command(cmds) except MakeFailed as e: print('Detected MakeFailed') print('Failed jobs: %s' % e.failed) for job_id in e.failed: self.cc.interpret_commands_wrap('details %s' % job_id) except CommandFailed: # msg = 'Command %r failed. (res=%s)' % (cmds, res) raise self.cc.interpret_commands_wrap('check_consistency raise_if_error=1') def assert_cmd_fail(self, cmds): """ Executes the (list of) commands and checks it was succesful. """ try: self.cc.batch_command(cmds) except CommandFailed: pass else: msg = 'Command %r did not fail.' % cmds raise Exception(msg) @contract(cmd_string=str) def assert_cmd_success_script(self, cmd_string): """ This runs the "compmake_main" script which recreates the DB and context from disk. """ ret = compmake_main([self.root, '--nosysexit', '-c', cmd_string]) self.assertEqual(ret, 0) # useful mcdp_lang_tests def assert_defined_by(self, job_id, expected): self.assertEqual(self.get_job(job_id).defined_by, expected) def assertEqualSet(self, a, b): self.assertEqual(set(a), set(b)) @contract(expr=str) def assertJobsEqual(self, expr, jobs, ignore_dyn_reports=True): js = 'not-valid-yet' try: js = self.get_jobs(expr) if ignore_dyn_reports: js = [x for x in js if not 'dynreports' in x] self.assertEqualSet(js, jobs) except: print('expr %r -> %s' % (expr, js)) print('differs from %s' % jobs) raise def assertMakeFailed(self, func, nfailed, nblocked): try: func() except MakeFailed as e: if len(e.failed) != nfailed: msg = 'Expected %d failed, got %d: %s' % (nfailed, len( e.failed), e.failed) raise Exception(msg) if len(e.blocked) != nblocked: msg = 'Expected %d blocked, got %d: %s' % ( nblocked, len(e.blocked), e.blocked) raise Exception(msg) except Exception as e: raise Exception('unexpected: %s' % e) def assert_job_uptodate(self, job_id, status): res = self.up_to_date(job_id) self.assertEqual(res, status, 'Want %r uptodate? %s' % (job_id, status)) @contract(returns=bool) def up_to_date(self, job_id): from compmake.jobs.uptodate import CacheQueryDB cq = CacheQueryDB(db=self.db) up, reason, timestamp = cq.up_to_date(job_id) print('up_to_date(%r): %s, %r, %s' % (job_id, up, reason, timestamp)) return up
def go(self): # check that if we have a parent who is a quickapp, # then use its context qapp_parent = self.get_qapp_parent() if qapp_parent is not None: # self.info('Found parent: %s' % qapp_parent) qc = qapp_parent.child_context self.define_jobs_context(qc) return else: # self.info('Parent not found') pass if False: import resource gbs = 5 max_mem = long(gbs * 1000 * 1048576L) resource.setrlimit(resource.RLIMIT_AS, (max_mem, -1)) resource.setrlimit(resource.RLIMIT_DATA, (max_mem, -1)) options = self.get_options() if self.get_qapp_parent() is None: # only do this if somebody didn't do it before if not options.contracts: msg = ('PyContracts disabled for speed. ' 'Use --contracts to activate.') self.logger.warning(msg) contracts.disable_all() output_dir = options.output if options.reset: if os.path.exists(output_dir): self.logger.info('Removing output dir %r.' % output_dir) shutil.rmtree(output_dir) # Compmake storage for results storage = os.path.join(output_dir, 'compmake') db = StorageFilesystem(storage, compress=True) currently_executing = ['root'] # The original Compmake context oc = Context(db=db, currently_executing=currently_executing) # Our wrapper qc = CompmakeContext(cc=oc, parent=None, qapp=self, job_prefix=None, output_dir=output_dir) read_rc_files(oc) original = oc.get_comp_prefix() self.define_jobs_context(qc) oc.comp_prefix(original) merged = context_get_merge_data(qc) # Only create the index job if we have reports defined # or some branched context (which might create reports) has_reports = len(qc.get_report_manager().allreports) > 0 has_branched = qc.has_branched() if has_reports or has_branched: self.info('Creating reports') oc.comp_dynamic(_dynreports_create_index, merged) else: self.info('Not creating reports.') ndefined = len(oc.get_jobs_defined_in_this_session()) if ndefined == 0: # self.comp was never called msg = 'No jobs defined.' raise ValueError(msg) else: if not options.console: try: oc.batch_command(options.command) except CommandFailed: ret = QUICKAPP_COMPUTATION_ERROR else: ret = 0 return ret else: oc.compmake_console() return 0