def setUp(self): """ This is a rather complex setUp since I need to move the failing_spider.py plugin to the plugin directory in order to be able to run it afterwards. In the tearDown method, I'll remove the file. """ self.src = os.path.join(ROOT_PATH, 'plugins', 'tests', 'crawl', 'failing_spider.py') self.dst = os.path.join(ROOT_PATH, 'plugins', 'crawl', 'failing_spider.py') # This lock prevents others (which also implement the locking) from # removing our file self.lock = FileLock(self.dst, timeout=60) self.lock.acquire() shutil.copy(self.src, self.dst) super(TestConsoleBugReport, self).setUp()
def setUp(self): """ This is a rather complex setUp since I need to move the failing_spider.py plugin to the plugin directory in order to be able to run it afterwards. In the tearDown method, I'll remove the file. """ self.src = os.path.join(ROOT_PATH, 'plugins', 'tests', 'crawl', 'failing_spider.py') self.dst = os.path.join(ROOT_PATH, 'plugins', 'crawl', 'failing_spider.py') # This lock prevents others (which also implement the locking) from # removing our file self.lock = FileLock(self.dst, timeout=60) self.lock.acquire() shutil.copy(self.src, self.dst) super(TestCrawlExceptions, self).setUp()
class TestCrawlExceptions(PluginTest): target_url = get_moth_http('/grep/csp/') _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'crawl': ( PluginConfig('failing_spider', ('only_forward', True, PluginConfig.BOOL)), ) } }, } def setUp(self): """ This is a rather complex setUp since I need to move the failing_spider.py plugin to the plugin directory in order to be able to run it afterwards. In the tearDown method, I'll remove the file. """ self.src = os.path.join(ROOT_PATH, 'plugins', 'tests', 'crawl', 'failing_spider.py') self.dst = os.path.join(ROOT_PATH, 'plugins', 'crawl', 'failing_spider.py') # This lock prevents others (which also implement the locking) from # removing our file self.lock = FileLock(self.dst, timeout=60) self.lock.acquire() shutil.copy(self.src, self.dst) super(TestCrawlExceptions, self).setUp() def tearDown(self): if os.path.exists(self.dst): os.remove(self.dst) if os.path.exists(self.dst + 'c'): # pyc file os.remove(self.dst + 'c') # Allow others to create the failing_spider.py file self.lock.release() super(TestCrawlExceptions, self).tearDown() def test_spider_found_urls(self): cfg = self._run_configs['cfg'] # This is a very special case in which I don't want the assertion in # the _scan() to trigger on me! self._scan(cfg['target'], cfg['plugins'], assert_exceptions=False) caught_exceptions = self.w3afcore.exception_handler.get_all_exceptions() self.assertEqual(len(caught_exceptions), 1) edata = caught_exceptions[0] self.assertEqual(edata.get_where(), 'crawl.failing_spider:45') # I tried to make some more advanced unittests here, but it was # very difficult to get a result that was NOT random from failing_spider # + exception_handler . # # Simply test that the scan was able to finish without a crash generated # by the failing_spider.py plugin. self.assertTrue(True)
class TestConsoleBugReport(ConsoleTestHelper): """ Run a scan from the console UI (which fails with a bug) and report it to a github issue. """ def setUp(self): """ This is a rather complex setUp since I need to move the failing_spider.py plugin to the plugin directory in order to be able to run it afterwards. In the tearDown method, I'll remove the file. """ self.src = os.path.join(ROOT_PATH, 'plugins', 'tests', 'crawl', 'failing_spider.py') self.dst = os.path.join(ROOT_PATH, 'plugins', 'crawl', 'failing_spider.py') # This lock prevents others (which also implement the locking) from # removing our file self.lock = FileLock(self.dst, timeout=60) self.lock.acquire() shutil.copy(self.src, self.dst) super(TestConsoleBugReport, self).setUp() def tearDown(self): if os.path.exists(self.dst): os.remove(self.dst) # pyc file if os.path.exists(self.dst + 'c'): os.remove(self.dst + 'c') # Allow others to create the failing_spider.py file self.lock.release() super(TestConsoleBugReport, self).tearDown() def test_buggy_scan(self): target = get_moth_http('/grep/csp/') commands_to_run = ['plugins', 'output console', 'crawl failing_spider', 'crawl config failing_spider', 'set only_forward true', 'back', 'grep path_disclosure', 'back', 'target', 'set target %s' % (target), 'back', 'start', 'bug-report', 'summary', 'report', 'exit'] expected = ('During the current scan (with id: ', 'A "Exception" exception was found while running crawl.failing_spider on ', 'New URL found by failing_spider plugin: ', ' [1/1] Bug with id 0 reported at https://github.com/andresriancho/w3af/issues/') self.console = ConsoleUI(commands=commands_to_run, do_upd=False) self.console.sh() caught_exceptions = self.console._w3af.exception_handler.get_all_exceptions() self.assertEqual(len(caught_exceptions), 1, self._mock_stdout.messages) assert_result, msg = self.startswith_expected_in_output(expected) self.assertTrue(assert_result, msg) found_errors = self.error_in_output(['No such file or directory']) self.assertFalse(found_errors) # Clear the exceptions, we don't need them anymore. self.console._w3af.exception_handler.clear() # Close issue from github issue_id_re = re.compile('https://github.com/andresriancho/w3af/issues/(\d*)') for line in self._mock_stdout.messages: mo = issue_id_re.search(line) if mo is not None: issue_id = mo.group(1) gh = Github(OAUTH_TOKEN) repo = gh.get_user('andresriancho').get_repo('w3af') issue = repo.get_issue(int(issue_id)) issue.edit(state='closed') break else: self.assertTrue(False, 'Did NOT close test ticket.')
class TestCrawlExceptions(PluginTest): target_url = get_moth_http('/grep/csp/') _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'crawl': (PluginConfig('failing_spider', ('only_forward', True, PluginConfig.BOOL)), ) } }, } def setUp(self): """ This is a rather complex setUp since I need to move the failing_spider.py plugin to the plugin directory in order to be able to run it afterwards. In the tearDown method, I'll remove the file. """ self.src = os.path.join(ROOT_PATH, 'plugins', 'tests', 'crawl', 'failing_spider.py') self.dst = os.path.join(ROOT_PATH, 'plugins', 'crawl', 'failing_spider.py') # This lock prevents others (which also implement the locking) from # removing our file self.lock = FileLock(self.dst, timeout=60) self.lock.acquire() shutil.copy(self.src, self.dst) super(TestCrawlExceptions, self).setUp() def tearDown(self): if os.path.exists(self.dst): os.remove(self.dst) if os.path.exists(self.dst + 'c'): # pyc file os.remove(self.dst + 'c') # Allow others to create the failing_spider.py file self.lock.release() super(TestCrawlExceptions, self).tearDown() def test_spider_found_urls(self): cfg = self._run_configs['cfg'] # This is a very special case in which I don't want the assertion in # the _scan() to trigger on me! self._scan(cfg['target'], cfg['plugins'], assert_exceptions=False) caught_exceptions = self.w3afcore.exception_handler.get_all_exceptions( ) self.assertEqual(len(caught_exceptions), 1) edata = caught_exceptions[0] self.assertEqual(edata.get_where(), 'crawl.failing_spider:45') # I tried to make some more advanced unittests here, but it was # very difficult to get a result that was NOT random from failing_spider # + exception_handler . # # Simply test that the scan was able to finish without a crash generated # by the failing_spider.py plugin. self.assertTrue(True)