def test_can_be_disabled(self): parser = OptionParser() sk = Skip() sk.addOptions(parser) options, args = parser.parse_args(["--no-skip"]) sk.configure(options, Config()) assert not sk.enabled, "Skip was not disabled by noSkip option"
def test_patches_only_when_needed(self): class NoPatch(unittest.TestResult): def __init__(self): self.errorClasses = {} res = NoPatch() sk = Skip() sk.prepareTestResult(res) assert not hasattr(res, "_orig_addError"), "Skip patched a result class it didn't need to patch"
def test_patches_only_when_needed(self): class NoPatch(unittest.TestResult): def __init__(self): self.errorClasses = {} res = NoPatch() sk = Skip() sk.prepareTestResult(res) assert not hasattr(res, '_orig_addError'), \ "Skip patched a result class it didn't need to patch"
def test_patched_result_handles_skip(self): res = unittest.TestResult() sk = Skip() sk.prepareTestResult(res) class TC(unittest.TestCase): def test(self): raise SkipTest("skip me") test = TC("test") test(res) assert not res.errors, "Skip was not caught: %s" % res.errors assert res.skipped assert res.skipped[0][0] is test
def test_patched_result_handles_skip(self): res = unittest.TestResult() sk = Skip() sk.prepareTestResult(res) class TC(unittest.TestCase): def test(self): raise SkipTest('skip me') test = TC('test') test(res) assert not res.errors, "Skip was not caught: %s" % res.errors assert res.skipped assert res.skipped[0][0] is test
class TestXUnitPlugin(PluginTester, unittest.TestCase): activate = '--with-xunit' args = ['-v', '--xunit-file=%s' % xml_results_filename] plugins = [Xunit(), Skip()] suitepath = os.path.join(support, 'xunit') def runTest(self): print(str(self.output)) assert "ERROR: test_error" in self.output assert "FAIL: test_fail" in self.output assert "test_skip (test_xunit_as_suite.TestForXunit) ... SKIP: skipit" in self.output assert "XML: %s" % xml_results_filename in self.output f = codecs.open(xml_results_filename, 'r', encoding='utf8') result = f.read() f.close() print(result.encode('utf8', 'replace')) assert '<?xml version="1.0" encoding="UTF-8"?>' in result assert '<testsuite name="nosetests" tests="6" errors="2" failures="1" skip="1">' in result assert '<testcase classname="test_xunit_as_suite.TestForXunit" name="test_error" time="' in result # TODO(Kumar) think of better x-platform code here that # does not confuse 2to3 if sys.version_info[0:2] >= (3, 0): assert ('<error type="%s.Exception" message="日本">' % (Exception.__module__, )) in result else: assert ('<error type="%s.Exception" message="日本">' % (Exception.__module__, )).decode('utf8') in result assert '</testcase>' in result assert '</testsuite>' in result
class TestMultipleDriversWithBrowserPerTest(PluginTester, unittest.TestCase): activate = "--with-holmium" args = [ '--holmium-environment=tenv', '--holmium-browser=chrome', '--holmium-browser-per-test' ] suitepath = os.path.join(support, 'multiple_drivers') plugins = [holmium.core.HolmiumNose(), Skip()] def setUp(self): self.old_mapping = holmium.core.noseplugin.BROWSER_MAPPING mock_browsers = build_mock_mapping("chrome") holmium.core.noseplugin.BROWSER_MAPPING.update(mock_browsers) ENV.clear() super(TestMultipleDriversWithBrowserPerTest, self).setUp() def runTest(self): assert "Ran 2 tests" in self.output, self.output assert "OK" in self.output, self.output self.assertAlmostEqual( holmium.core.noseplugin.BROWSER_MAPPING["chrome"].return_value. quit.call_count, # noqa: E501 3) self.assertAlmostEqual( holmium.core.noseplugin.BROWSER_MAPPING["chrome"].call_count, 3) def tearDown(self): holmium.core.noseplugin.BROWSER_MAPPING = self.old_mapping
def _perform_one_test(self, directories, results_queue, address): try: print("start test run") null_device = open('/dev/null') os.stdin = null_device select = SelectOneTestAndStoreOutput(address) plugins = [select, Skip()] argv = ['nose'] argv.extend(directories) result = TestProgram(exit=False, argv=argv, plugins=plugins) success = result.success if success: result = 'Success' else: result = 'Failure' if select.buffer: result += ' - ' if len(select.buffer) > 1000: result += select.buffer[:min(1000, len(result) - 1)] result += ' ' + str(result - 1000) + ' more ...' else: result += select.buffer results_queue.put(('test-output', result)) except: results_queue.put(( 'test-error', 'Exception happened: ' + str(sys.exc_info()[0]) + " - " + str(sys.exc_info()[1]), )) finally: results_queue.put(None) MPI.Finalize() print("calling finalize done")
class IntegrationTestCase(PluginTester, TestCase): activate = '--with-progressive' plugins = [ProgressivePlugin(), Skip()] def _count_eq(self, text, count): """Assert `text` appears `count` times in the captured output.""" eq_(str(self.output).count(text), count)
class TestXUnitPlugin(PluginTester, unittest.TestCase): activate = '--with-xunit' args = ['-v', '--xunit-file=%s' % xml_results_filename] plugins = [Xunit(), Skip()] suitepath = os.path.join(support, 'xunit') def runTest(self): print str(self.output) assert "ERROR: test_error" in self.output assert "FAIL: test_fail" in self.output assert "test_skip (test_xunit_as_suite.TestForXunit) ... SKIP: skipit" in self.output assert "XML: %s" % xml_results_filename in self.output f = open(xml_results_filename, 'r') result = f.read() f.close() print result assert '<?xml version="1.0" encoding="UTF-8"?>' in result assert '<testsuite name="nosetests" tests="6" errors="2" failures="1" skip="1">' in result assert '<testcase classname="test_xunit_as_suite.TestForXunit" name="test_error" time="0">' in result assert '<error type="exceptions.Exception" message="日本">' in result assert '</testcase>' in result assert '</testsuite>' in result
def test_skip_prevents_pdb_call(self): class TC(unittest.TestCase): def test(self): raise SkipTest('not me') skip = Skip() skip.enabled = True p = debug.Pdb() p.enabled = True p.enabled_for_errors = True res = unittest.TestResult() conf = Config(plugins=PluginManager(plugins=[skip, p])) rpf = ResultProxyFactory(conf) test = case.Test(TC('test'), resultProxy=rpf) test(res) assert not res.errors, "Skip was recorded as error %s" % res.errors assert not debug.pdb.called, "pdb was called"
def test_skip_output(self): class TC(unittest.TestCase): def test(self): raise SkipTest('skip me') stream = unittest._WritelnDecorator(StringIO()) res = unittest._TextTestResult(stream, 0, 1) sk = Skip() sk.prepareTestResult(res) test = TC('test') test(res) assert not res.errors, "Skip was not caught: %s" % res.errors assert res.skipped res.printErrors() out = stream.getvalue() assert out assert out.strip() == "S" assert res.wasSuccessful()
def test_skip_output(self): class TC(unittest.TestCase): def test(self): raise SkipTest("skip me") stream = unittest._WritelnDecorator(StringIO()) res = unittest._TextTestResult(stream, 0, 1) sk = Skip() sk.prepareTestResult(res) test = TC("test") test(res) assert not res.errors, "Skip was not caught: %s" % res.errors assert res.skipped res.printErrors() out = stream.getvalue() assert out assert out.strip() == "S" assert res.wasSuccessful()
def test_skip_output_verbose(self): class TC(unittest.TestCase): def test(self): raise SkipTest("skip me too") stream = unittest._WritelnDecorator(StringIO()) res = unittest._TextTestResult(stream, 0, verbosity=2) sk = Skip() sk.prepareTestResult(res) test = TC("test") test(res) assert not res.errors, "Skip was not caught: %s" % res.errors assert res.skipped res.printErrors() out = stream.getvalue() print out assert out assert " ... SKIP" in out assert "skip me too" in out
def test_skip_output_verbose(self): class TC(unittest.TestCase): def test(self): raise SkipTest('skip me too') stream = _WritelnDecorator(StringIO()) res = _TextTestResult(stream, 0, verbosity=2) sk = Skip() sk.prepareTestResult(res) test = TC('test') test(res) assert not res.errors, "Skip was not caught: %s" % res.errors assert res.skipped res.printErrors() out = stream.getvalue() print out assert out assert ' ... SKIP' in out assert 'skip me too' in out
def test_can_be_disabled(self): parser = OptionParser() sk = Skip() sk.addOptions(parser) options, args = parser.parse_args(['--no-skip']) sk.configure(options, Config()) assert not sk.enabled, "Skip was not disabled by noSkip option"
def test_mod_import_skip_one_test_no_errors(self): config = Config(plugins=PluginManager(plugins=[Skip()])) ctx = os.path.join(support, 'ctx') l = loader.TestLoader(workingDir=ctx, config=config) suite = l.loadTestsFromName('mod_import_skip.py') res = unittest.TestResult() suite(res) assert not res.errors, res.errors assert not res.failures, res.failures assert res.testsRun == 1, \ "Expected to run 1 tests but ran %s" % res.testsRun
class TestIssue680(PluginTester, unittest.TestCase): activate = '--with-xunit' args = ['-v', '--xunit-file=%s' % xml_results_filename] plugins = [Xunit(), Skip()] suitepath = os.path.join(support, 'issue680') def runTest(self): print str(self.output) f = open(xml_results_filename, 'rb') result = f.read().decode('utf-8') f.close() print result assert 'tests="1" errors="0" failures="0" skip="0"' in result
class TestIssue279(PluginTester, unittest.TestCase): activate = '--with-xunit' args = ['-v', '--xunit-file=%s' % xml_results_filename] plugins = [Xunit(), Skip()] suitepath = os.path.join(support, 'issue279') def runTest(self): print(str(self.output)) f = open(xml_results_filename, 'r') result = f.read() f.close() print(result) assert 'tests="1" errors="1" failures="0" skip="0"' in result assert "Exception: I would prefer not to" in result
def test_mod_setup_skip_no_tests_run_no_errors(self): config = Config(plugins=PluginManager(plugins=[Skip()])) ctx = os.path.join(support, 'ctx') l = loader.TestLoader(workingDir=ctx, config=config) suite = l.loadTestsFromName('mod_setup_skip.py') res = unittest.TestResult() suite(res) assert not suite.was_setup, "Suite setup did not fail" assert not res.errors, res.errors assert not res.failures, res.failures assert res.skipped assert res.testsRun == 0, \ "Expected to run 0 tests but ran %s" % res.testsRun
class TestNoDriver(PluginTester, unittest.TestCase): activate = "--with-holmium" suitepath = os.path.join(support, 'broken_driver') plugins = [holmium.core.HolmiumNose(), Skip()] def setUp(self): self.old_mapping = holmium.core.noseplugin.BROWSER_MAPPING mock_browsers = build_mock_mapping("chrome") holmium.core.noseplugin.BROWSER_MAPPING.update(mock_browsers) ENV.clear() super(TestNoDriver, self).setUp() def runTest(self): assert "Ran 1 test" in self.output, self.output assert "SKIP=1" in self.output, self.output def tearDown(self): holmium.core.noseplugin.BROWSER_MAPPING = self.old_mapping
def test_prepare_patches_result(self): stream = _WritelnDecorator(StringIO()) res = _TextTestResult(stream, 0, 1) sk = Skip() sk.prepareTestResult(res) res._orig_addError res._orig_printErrors res._orig_wasSuccessful res.skipped self.assertEqual(res.errorClasses, {SkipTest: (res.skipped, 'SKIP', False)}) # result w/out print works too res = unittest.TestResult() sk = Skip() sk.prepareTestResult(res) res._orig_addError res.skipped self.assertEqual(res.errorClasses, {SkipTest: (res.skipped, 'SKIP', False)})
def _perform_the_testrun(self, directories, results_queue, previous_report=None): try: ensure_mpd_is_running() null_device = open('/dev/null') os.stdin = null_device report = MakeAReportOfATestRun(previous_report, results_queue) doctest = Doctest() doctest.enabled = True plugins = [doctest, report, Skip(), Capture()] argv = ['nose', '-v'] old_working_directory = os.getcwd() if not self.WORKING_DIRECTORY is None: argv.extend(['-w', self.WORKING_DIRECTORY]) os.chdir(self.WORKING_DIRECTORY) argv.extend(directories) argv.extend(['--with-doctest', '--doctest-extension=txt']) result = TestProgram(exit=False, argv=argv, plugins=plugins) os.chdir(old_working_directory) results_queue.put(( 'test-report', report, )) except: results_queue.put(( 'test-error', 'Exception happened: ' + str(sys.exc_info()[0]) + " - " + str(sys.exc_info()[1]), )) finally: results_queue.put(None) MPI.Finalize()
class NoseDepPluginTester(PluginTester, unittest.TestCase): activate = '--with-nosedep' args = ['-v'] plugins = [NoseDep(), Skip()] # This is a bit odd. If using the absolute path on Windows # it tries to import module 'C' due to the ':' , if only relative path # it splits on the '.' before the file extension. # So I only got it to work with a relative path appended with a ':'. suitepath = None def makeSuite(self): raise Exception("Should not be used currently") def check(self, expect): results = len(expect) for line in self.output: if expect: self.assertEqual(expect.pop(0), line.strip()) # Verify that we ran the expected number of tests assert_in( "Ran {} test{} in".format(results, 's' if results > 1 else ''), str(self.output))
def test_prepare_patches_result(self): stream = unittest._WritelnDecorator(StringIO()) res = unittest._TextTestResult(stream, 0, 1) sk = Skip() sk.prepareTestResult(res) res._orig_addError res._orig_printErrors res._orig_wasSuccessful res.skipped self.assertEqual(res.errorClasses, {SkipTest: (res.skipped, "SKIP", False)}) # result w/out print works too res = unittest.TestResult() sk = Skip() sk.prepareTestResult(res) res._orig_addError res.skipped self.assertEqual(res.errorClasses, {SkipTest: (res.skipped, "SKIP", False)})
class TestDriverBroken(PluginTester, unittest.TestCase): activate = "--with-holmium" args = ['--holmium-environment=tenv', '--holmium-browser=chrome'] suitepath = os.path.join(support, 'broken_driver') plugins = [holmium.core.HolmiumNose(), Skip()] def setUp(self): self.old_mapping = holmium.core.noseplugin.BROWSER_MAPPING mock_browsers = build_mock_mapping("chrome") def fake_construct(*a, **k): raise Exception("failed to initialize") mock_browsers["chrome"].side_effect = fake_construct holmium.core.noseplugin.BROWSER_MAPPING.update(mock_browsers) ENV.clear() super(TestDriverBroken, self).setUp() def runTest(self): assert "Ran 1 test" in self.output, self.output assert "SKIP=1" in self.output, self.output def tearDown(self): holmium.core.noseplugin.BROWSER_MAPPING = self.old_mapping
"pymongo.change_stream", "pymongo.cursor", "pymongo.encryption", "pymongo.encryption_options", "pymongo.mongo_client", "pymongo.database", "gridfs", "gridfs.grid_file", ]: sys.modules.pop(n) if "--check-exclude-patterns" in sys.argv: check_exclude_patterns = True sys.argv.remove("--check-exclude-patterns") else: check_exclude_patterns = False success = nose.run( config=Config(plugins=PluginManager()), addplugins=[SynchroNosePlugin(), Skip(), Xunit()] ) if not success: sys.exit(1) if check_exclude_patterns: unused_module_pats = set(excluded_modules) - excluded_modules_matched assert not unused_module_pats, "Unused module patterns: %s" % (unused_module_pats,) unused_test_pats = set(excluded_tests) - excluded_tests_matched assert not unused_test_pats, "Unused test patterns: %s" % (unused_test_pats,)
def test_api_present(self): sk = Skip() sk.addOptions sk.configure sk.prepareTestResult
def test_enabled_by_default(self): sk = Skip() assert sk.enabled, "Skip was not enabled by default"
class SynchroModuleFinder(object): def find_module(self, fullname, path=None): for module_name in pymongo_modules: if fullname.endswith(module_name): return SynchroModuleLoader(path) # Let regular module search continue. return None class SynchroModuleLoader(object): def __init__(self, path): self.path = path def load_module(self, fullname): return synchro if __name__ == '__main__': # Monkey-patch all pymongo's unittests so they think Synchro is the # real PyMongo. sys.meta_path[0:0] = [SynchroModuleFinder()] # Ensure time.sleep() acts as PyMongo's tests expect: background tasks # can run to completion while foreground pauses. sys.modules['time'] = synchro.TimeModule() nose.main(config=Config(plugins=PluginManager()), addplugins=[SynchroNosePlugin(), Skip(), Xunit()])
if __name__ == '__main__': # Monkey-patch all pymongo's unittests so they think Synchro is the # real PyMongo. sys.meta_path[0:0] = [SynchroModuleFinder()] # Ensure time.sleep() acts as PyMongo's tests expect: background tasks # can run to completion while foreground pauses. sys.modules['time'] = synchro.TimeModule() if '--check-exclude-patterns' in sys.argv: check_exclude_patterns = True sys.argv.remove('--check-exclude-patterns') else: check_exclude_patterns = False success = nose.run( config=Config(plugins=PluginManager()), addplugins=[SynchroNosePlugin(), Skip(), Xunit()]) if not success: sys.exit(1) if check_exclude_patterns: unused_module_pats = set(excluded_modules) - excluded_modules_matched assert not unused_module_pats, "Unused module patterns: %s" % ( unused_module_pats, ) unused_test_pats = set(excluded_tests) - excluded_tests_matched assert not unused_test_pats, "Unused test patterns: %s" % ( unused_test_pats, )
# Monkey-patch all pymongo's unittests so they think Synchro is the # real PyMongo. sys.meta_path[0:0] = [SynchroModuleFinder()] # Ensure time.sleep() acts as PyMongo's tests expect: background tasks # can run to completion while foreground pauses. sys.modules['time'] = synchro.TimeModule() if '--check-exclude-patterns' in sys.argv: check_exclude_patterns = True sys.argv.remove('--check-exclude-patterns') else: check_exclude_patterns = False success = nose.run(config=Config(plugins=PluginManager()), addplugins=[SynchroNosePlugin(), Skip(), Xunit()]) if not success: sys.exit(1) if check_exclude_patterns: unused_module_pats = set(excluded_modules) - excluded_modules_matched assert not unused_module_pats, "Unused module patterns: %s" % ( unused_module_pats, ) unused_test_pats = set(excluded_tests) - excluded_tests_matched assert not unused_test_pats, "Unused test patterns: %s" % ( unused_test_pats, )
def makeSuite(self): class Skip(TestCase): def runTest(self): raise SkipTest return TestSuite([Skip()])