def consolidate_batch_results(result, batch_result): """ Merges specific test run results into centralized result instance batch_result is a tuple of output - string that needs to go to stdout testsRun - int of tests run failures - list of objects errors - list of objects errorClasses - object :return: batch_result's output string """ # log.debug("batch result is %s" , batch_result) try: output, testsRun, failures, errors, errorClasses = batch_result except ValueError: # log.debug("result in unexpected format %s", batch_result) failure.Failure(*sys.exc_info())(result) return result.testsRun += testsRun result.failures.extend([(TestLet(c_data), err) for c_data, err in failures]) result.errors.extend([(TestLet(c_data), err) for c_data, err in errors]) for key, (storage, label, isfail) in errorClasses.items(): storage = [(TestLet(c_data), err) for c_data, err in storage] entry = result.errorClasses.get(key, ([], label, isfail)) entry[0].extend(storage) return output
def process_task(self, task, *args, **kwargs): """ :param task: A tuple of test address string and tuple of arguments :type task: tuple :return: a tuple of components from result object :rtype: tuple """ test_addr, arg = task result = self.create_result_object() test = self.loader.loadTestsFromNames([test_addr]) logging.info("Worker runs test %s '%s'" % (self.worker_id, test_addr)) try: # TODO: figure out what this is about: if arg is not None: test_addr = test_addr + str(arg) test(result) return test_addr, result #except KeyboardInterrupt as e: #TimedOutException: # timeout = isinstance(e, TimedOutException) # # if task_info.pop('test_address',None): # failure.Failure(*sys.exc_info())(result) # if timeout: # msg = 'Worker %s timed out, failing current test %s' # else: # msg = 'Worker %s keyboard interrupt, failing current test %s' # log.exception(msg, worker_id, test_addr) # else: # if timeout: # msg = 'Worker %s test %s timed out' # else: # msg = 'Worker %s test %s keyboard interrupt' # log.debug(msg, worker_id, test_addr) # # return (test_addr, unpack_results(result)) # # if timeout: # keyboardCaught.set() # else: # raise #except SystemExit: # task_info.pop('test_address',None) # log.exception('Worker %s system exit',worker_id) # raise except: failure.Failure(*sys.exc_info())(result) return test_addr, result
def consolidate(self, result, batch_result): log.debug("batch result is %s", batch_result) try: output, testsRun, failures, errors, errorClasses = batch_result except ValueError: log.debug("result in unexpected format %s", batch_result) failure.Failure(*sys.exc_info())(result) return self.stream.write(output) result.testsRun += testsRun result.failures.extend(failures) result.errors.extend(errors) for key, (storage, label, isfail) in errorClasses.items(): if key not in result.errorClasses: # Ordinarily storage is result attribute # but it's only processed through the errorClasses # dict, so it's ok to fake it here result.errorClasses[key] = ([], label, isfail) mystorage, _junk, _junk = result.errorClasses[key] mystorage.extend(storage) log.debug("Ran %s tests (total: %s)", testsRun, result.testsRun)
def multiprocess_runner(ix, testQueue, resultQueue, currentaddr, currentstart, keyboardCaught, shouldStop, loaderClass, resultClass, config): """To replace the test runner of multiprocess. * Setup gae services at the beginning of every process * Clean datastore after each test """ from nose.pyversion import bytes_ try: from cStringIO import StringIO except ImportError: import StringIO from nose.plugins.multiprocess import _instantiate_plugins, \ NoSharedFixtureContextSuite, _WritelnDecorator, TestLet config = pickle.loads(config) dummy_parser = config.parserClass() if _instantiate_plugins is not None: for pluginclass in _instantiate_plugins: plugin = pluginclass() plugin.addOptions(dummy_parser, {}) config.plugins.addPlugin(plugin) config.plugins.configure(config.options, config) config.plugins.begin() log.debug("Worker %s executing, pid=%d", ix, os.getpid()) loader = loaderClass(config=config) loader.suiteClass.suiteClass = NoSharedFixtureContextSuite def get(): return testQueue.get(timeout=config.multiprocess_timeout) def makeResult(): stream = _WritelnDecorator(StringIO()) result = resultClass(stream, descriptions=1, verbosity=config.verbosity, config=config) plug_result = config.plugins.prepareTestResult(result) return plug_result if plug_result else result def batch(result): failures = [(TestLet(c), err) for c, err in result.failures] errors = [(TestLet(c), err) for c, err in result.errors] errorClasses = {} for key, (storage, label, isfail) in result.errorClasses.items(): errorClasses[key] = ([(TestLet(c), err) for c, err in storage], label, isfail) return (result.stream.getvalue(), result.testsRun, failures, errors, errorClasses) def setup_process_env(): """Runs just after the process starts to setup services.""" setup_gae_services() def after_each_test(): """Runs after each test to clean datastore.""" clean_datastore() # Setup gae services at the beginning of every process setup_process_env() for test_addr, arg in iter(get, 'STOP'): if shouldStop.is_set(): log.exception('Worker %d STOPPED', ix) break result = makeResult() test = loader.loadTestsFromNames([test_addr]) test.testQueue = testQueue test.tasks = [] test.arg = arg log.debug("Worker %s Test is %s (%s)", ix, test_addr, test) try: if arg is not None: test_addr = test_addr + str(arg) currentaddr.value = bytes_(test_addr) currentstart.value = time.time() test(result) currentaddr.value = bytes_('') resultQueue.put((ix, test_addr, test.tasks, batch(result))) # Clean datastore after each test after_each_test() except KeyboardInterrupt: keyboardCaught.set() if len(currentaddr.value) > 0: log.exception( 'Worker %s keyboard interrupt, failing ' 'current test %s', ix, test_addr) currentaddr.value = bytes_('') failure.Failure(*sys.exc_info())(result) resultQueue.put((ix, test_addr, test.tasks, batch(result))) else: log.debug('Worker %s test %s timed out', ix, test_addr) resultQueue.put((ix, test_addr, test.tasks, batch(result))) except SystemExit: currentaddr.value = bytes_('') log.exception('Worker %s system exit', ix) raise except: currentaddr.value = bytes_('') log.exception( "Worker %s error running test or returning " "results", ix) failure.Failure(*sys.exc_info())(result) resultQueue.put((ix, test_addr, test.tasks, batch(result))) if config.multiprocess_restartworker: break log.debug("Worker %s ending", ix)
def __runner(ix, testQueue, resultQueue, procesingQueue, currentaddr, currentstart, keyboardCaught, shouldStop, loaderClass, resultClass, config): config = pickle.loads(config) dummy_parser = config.parserClass() if _instantiate_plugins is not None: for pluginclass in _instantiate_plugins: plugin = pluginclass() plugin.addOptions(dummy_parser, {}) config.plugins.addPlugin(plugin) config.plugins.configure(config.options, config) config.plugins.begin() log.debug("Worker %s executing, pid=%d", ix, os.getpid()) loader = loaderClass(config=config) loader.suiteClass.suiteClass = NoSharedFixtureContextSuite def get(): return testQueue.get(timeout=config.multiprocess_timeout) def makeResult(): stream = _WritelnDecorator(StringIO()) result = resultClass(stream, descriptions=1, verbosity=config.verbosity, config=config) plug_result = config.plugins.prepareTestResult(result) if plug_result: return plug_result return result def batch(result): failures = [(TestLet(c), err) for c, err in result.failures] errors = [(TestLet(c), err) for c, err in result.errors] errorClasses = {} for key, (storage, label, isfail) in result.errorClasses.items(): errorClasses[key] = ([(TestLet(c), err) for c, err in storage], label, isfail) return (result.stream.getvalue(), result.testsRun, failures, errors, errorClasses) for test_addr, arg in iter(get, 'STOP'): if shouldStop.is_set(): log.exception('Worker %d STOPPED', ix) break result = makeResult() test = loader.loadTestsFromNames([test_addr]) test.testQueue = testQueue test.tasks = [] test.arg = arg log.debug("Worker %s Test is %s (%s)", ix, test_addr, test) try: if arg is not None: test_addr = test_addr + str(arg) procesingQueue.put(test_addr) currentaddr.value = bytes_(test_addr) currentstart.value = time.time() test(result) currentaddr.value = bytes_('') resultQueue.put((ix, test_addr, test.tasks, batch(result))) except KeyboardInterrupt, e: #TimedOutException: timeout = isinstance(e, TimedOutException) if timeout: keyboardCaught.set() if len(currentaddr.value): if timeout: msg = 'Worker %s timed out, failing current test %s' else: msg = 'Worker %s keyboard interrupt, failing current test %s' log.exception(msg, ix, test_addr) currentaddr.value = bytes_('') failure.Failure(*sys.exc_info())(result) resultQueue.put((ix, test_addr, test.tasks, batch(result))) else: if timeout: msg = 'Worker %s test %s timed out' else: msg = 'Worker %s test %s keyboard interrupt' log.debug(msg, ix, test_addr) resultQueue.put((ix, test_addr, test.tasks, batch(result))) if not timeout: raise except SystemExit: currentaddr.value = bytes_('') log.exception('Worker %s system exit', ix) raise
else: msg = 'Worker %s test %s keyboard interrupt' log.debug(msg, ix, test_addr) resultQueue.put((ix, test_addr, test.tasks, batch(result))) if not timeout: raise except SystemExit: currentaddr.value = bytes_('') log.exception('Worker %s system exit', ix) raise except: currentaddr.value = bytes_('') log.exception( "Worker %s error running test or returning " "results", ix) failure.Failure(*sys.exc_info())(result) resultQueue.put((ix, test_addr, test.tasks, batch(result))) if config.multiprocess_restartworker: break log.debug("Worker %s ending", ix) class NoSharedFixtureContextSuite(ContextSuite): """ Context suite that never fires shared fixtures. When a context sets _multiprocess_shared_, fixtures in that context are executed by the main process. Using this suite class prevents them from executing in the runner process as well. """