def parmake_job2_new_process(args): """ Starts the job in a new compmake process. """ (job_id, context) = args compmake_bin = which("compmake") db = context.get_compmake_db() storage = db.basepath # XXX: where = os.path.join(storage, "parmake_job2_new_process") if not os.path.exists(storage): try: os.makedirs(storage) except: pass out_result = os.path.join(where, "%s.results.pickle" % job_id) out_result = os.path.abspath(out_result) cmd = [compmake_bin, storage] if not all_disabled(): cmd += ["--contracts"] cmd += [ "--status_line_enabled", "0", "--colorize", "0", "-c", "make_single out_result=%s %s" % (out_result, job_id), ] cwd = os.getcwd() cmd_res = system_cmd_result( cwd, cmd, display_stdout=False, display_stderr=False, raise_on_error=False, capture_keyboard_interrupt=False ) ret = cmd_res.ret if ret == CompmakeConstants.RET_CODE_JOB_FAILED: # XXX: msg = "Job %r failed in external process" % job_id msg += indent(cmd_res.stdout, "stdout| ") msg += indent(cmd_res.stderr, "stderr| ") res = safe_pickle_load(out_result) os.unlink(out_result) result_dict_check(res) raise JobFailed.from_dict(res) elif ret != 0: msg = "Host failed while doing %r" % job_id msg += "\n cmd: %s" % " ".join(cmd) msg += "\n" + indent(cmd_res.stdout, "stdout| ") msg += "\n" + indent(cmd_res.stderr, "stderr| ") raise CompmakeBug(msg) # XXX: res = safe_pickle_load(out_result) os.unlink(out_result) result_dict_check(res) return res
def get(self, timeout=0): # @UnusedVariable if not self.told_you_ready: raise CompmakeBug("I didnt tell you it was ready.") if self.already_read: msg = 'Compmake BUG: should not call twice.' raise CompmakeBug(msg) self.already_read = True assert os.path.exists(self.retcode) ret_str = open(self.retcode, 'r').read() try: ret = int(ret_str) except ValueError: msg = 'Could not interpret file %r: %r.' % (self.retcode, ret_str) raise HostFailed(host='localhost', job_id=self.job_id, reason=msg, bt='') # # # raise HostFailed(host="xxx", # job_id=self.job_id, # reason=reason, bt="") # XXX # try: stderr = open(self.stderr, 'r').read() stdout = open(self.stdout, 'r').read() stderr = 'Contents of %s:\n' % self.stderr + stderr stdout = 'Contents of %s:\n' % self.stdout + stdout # if ret == CompmakeConstants.RET_CODE_JOB_FAILED: # msg = 'SGE Job failed (ret: %s)\n' % ret # msg += indent(stderr, '| ') # # mark_as_failed(self.job_id, msg, None) # raise JobFailed(msg) # elif ret != 0: # msg = 'SGE Job failed (ret: %s)\n' % ret # error(msg) # msg += indent(stderr, '| ') # raise JobFailed(msg) if not os.path.exists(self.out_results): msg = 'job succeeded but no %r found' % self.out_results msg += '\n' + indent(stderr, 'stderr') msg += '\n' + indent(stdout, 'stdout') raise CompmakeBug(msg) res = safe_pickle_load(self.out_results) result_dict_raise_if_error(res) return res finally: fs = [self.stderr, self.stdout, self.out_results, self.retcode] for filename in fs: if os.path.exists(filename): os.unlink(filename)
def aer_pipeline_transitions1_all(filename, name): """ Uses caches """ cache_name = os.path.splitext(filename)[0] + '.events-%s.pickle' % name if os.path.exists(cache_name): logger.debug('Using cache %s ' % cache_name) return safe_pickle_load(cache_name) else: logger.debug('Cache not found %s' % cache_name) values = aer_pipeline_transitions1_all_slave(filename, name) safe_pickle_dump(values, cache_name) return values
def __getitem__(self, key): if trace_queries: logger.debug('R %s' % str(key)) self.check_existence() filename = self.filename_for_key(key) if not os.path.exists(filename): raise CompmakeException('Could not find key %r.' % key) try: return safe_pickle_load(filename) except Exception as e: msg = "Could not unpickle file %r." % (filename) logger.error(msg) logger.exception(e) msg += "\n" + traceback.format_exc(e) raise CompmakeException(msg)
def jobs_nosetests_single(context, module): with create_tmp_dir() as cwd: out = os.path.join(cwd, '%s.pickle' % module) cmd = ['nosetests', '--collect-only', '--with-xunitext', '--xunitext-file' , out, '-v', '-s', module] system_cmd_result( cwd=cwd, cmd=cmd, display_stdout=True, display_stderr=True, raise_on_error=True) tests = safe_pickle_load(out) print('found %d mcdp_lang_tests ' % len(tests)) for t in tests: context.comp(execute, t)
def __getitem__(self, key): if trace_queries: logger.debug('R %s' % str(key)) self.check_existence() filename = self.filename_for_key(key) if not os.path.exists(filename): msg = 'Could not find key %r.' % key msg += '\n file: %s' % filename raise CompmakeBug(msg) try: return safe_pickle_load(filename) except Exception as e: msg = ("Could not unpickle data for key %r. \n file: %s" % (key, filename)) logger.error(msg) logger.exception(e) msg += "\n" + traceback.format_exc() raise CompmakeBug(msg)
def load_pickle(pickle): return safe_pickle_load(pickle)
def parmake_job2_new_process(args): """ Starts the job in a new compmake process. """ (job_id, context) = args compmake_bin = which('compmake') db = context.get_compmake_db() storage = db.basepath # XXX: where = os.path.join(storage, 'parmake_job2_new_process') if not os.path.exists(storage): try: os.makedirs(storage) except: pass out_result = os.path.join(where, '%s.results.pickle' % job_id) out_result = os.path.abspath(out_result) cmd = [compmake_bin, storage] if not all_disabled(): cmd += ['--contracts'] cmd += [ '--status_line_enabled', '0', '--colorize', '0', '-c', 'make_single out_result=%s %s' % (out_result, job_id), ] cwd = os.getcwd() cmd_res = system_cmd_result(cwd, cmd, display_stdout=False, display_stderr=False, raise_on_error=False, capture_keyboard_interrupt=False) ret = cmd_res.ret if ret == CompmakeConstants.RET_CODE_JOB_FAILED: # XXX: msg = 'Job %r failed in external process' % job_id msg += indent(cmd_res.stdout, 'stdout| ') msg += indent(cmd_res.stderr, 'stderr| ') res = safe_pickle_load(out_result) os.unlink(out_result) result_dict_check(res) raise JobFailed.from_dict(res) elif ret != 0: msg = 'Host failed while doing %r' % job_id msg += '\n cmd: %s' % " ".join(cmd) msg += '\n' + indent(cmd_res.stdout, 'stdout| ') msg += '\n' + indent(cmd_res.stderr, 'stderr| ') raise CompmakeBug(msg) # XXX: res = safe_pickle_load(out_result) os.unlink(out_result) result_dict_check(res) return res