def test_that_links_in_the_events_page_return_200_code(self, mozwebqa): crawler = LinkCrawler(mozwebqa) urls = crawler.collect_links("/events/") Assert.greater(len(urls), 0, "The link crawler did not find any urls to crawl") all_ok, bad_urls = crawler.verify_status_codes_are_ok(urls) Assert.true(all_ok, "%s bad links found. " % len(bad_urls) + ", ".join(bad_urls))
def test_that_links_in_the_labs_page_return_200_code(self, mozwebqa): crawler = LinkCrawler(mozwebqa) urls = crawler.collect_links('/labs/') Assert.greater(len(urls), 0, 'The link crawler did not find any urls to crawl') all_ok, bad_urls = crawler.verify_status_codes_are_ok(urls) Assert.true(all_ok, '%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))
def test_that_links_in_the_events_page_return_200_code(self, mozwebqa): crawler = LinkCrawler(mozwebqa) urls = crawler.collect_links('/events/', id='wrapper') Assert.greater(len(urls), 0, 'The link crawler did not find any urls to crawl') all_ok, bad_urls = crawler.verify_status_codes_are_ok(urls) Assert.true(all_ok, '%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))
def test_that_links_in_the_about_page_return_200_code(self, base_url): crawler = LinkCrawler(base_url) urls = crawler.collect_links('/about', id='main') bad_urls = [] assert len(urls) > 0 for url in urls: check_result = crawler.verify_status_code_is_ok(url) if check_result is not True: bad_urls.append(check_result) assert 0 == len(bad_urls), u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls)
def test_that_links_in_footer_return_200_code(self, base_url): crawler = LinkCrawler(base_url) urls = crawler.collect_links("/", name="footer") bad_urls = [] assert len(urls) > 0 for url in urls: check_result = crawler.verify_status_code_is_ok(url) if check_result is not True: bad_urls.append(check_result) assert 0 == len(bad_urls), u"%s bad links found. " % len(bad_urls) + ", ".join(bad_urls)
def test_that_links_in_footer_return_200_code(self, base_url): crawler = LinkCrawler(base_url) urls = crawler.collect_links('/', name='footer') bad_urls = [] assert len(urls) > 0 for url in urls: check_result = crawler.verify_status_code_is_ok(url) if check_result is not True: bad_urls.append(check_result) assert 0 == len(bad_urls), u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls)
def test_that_links_in_the_about_page_return_200_code(self, mozwebqa): crawler = LinkCrawler(mozwebqa) urls = crawler.collect_links("/about", id="main") bad_urls = [] Assert.greater(len(urls), 0, u"something went wrong. no links found.") for url in urls: check_result = crawler.verify_status_code_is_ok(url) if check_result is not True: bad_urls.append(check_result) Assert.equal(0, len(bad_urls), u"%s bad links found. " % len(bad_urls) + ", ".join(bad_urls))
def test_that_links_in_the_faq_page_return_200_code(self, mozwebqa): crawler = LinkCrawler(mozwebqa) urls = crawler.collect_links("/faq/", id="wrapper") bad_urls = [] Assert.greater(len(urls), 0, "The link crawler did not find any urls to crawl") for url in urls: check_result = crawler.verify_status_code_is_ok(url) if check_result is not True: bad_urls.append(check_result) Assert.equal(0, len(bad_urls), "%s bad links found. " % len(bad_urls) + ", ".join(bad_urls))
def test_home_page_links(self, mozwebqa): crawler = LinkCrawler(mozwebqa) urls = crawler.collect_links('/', id='content') bad_urls = [] Assert.greater(len(urls), 0, u'Something went wrong. No links found.') for url in urls: check_result = crawler.verify_status_code_is_ok(url) if check_result is not True: bad_urls.append(check_result) Assert.equal( 0, len(bad_urls), u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))
def test_that_links_in_the_about_page_return_200_code(self, mozwebqa): crawler = LinkCrawler(mozwebqa) urls = crawler.collect_links('/about', id='main') bad_urls = [] Assert.greater(len(urls), 0, u'something went wrong. no links found.') for url in urls: check_result = crawler.verify_status_code_is_ok(url) if check_result is not True: bad_urls.append(check_result) Assert.equal( 0, len(bad_urls), u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))
def test_that_links_in_footer_return_200_code(self, base_url): crawler = LinkCrawler(base_url) urls = crawler.collect_links('/', name='footer') bad_urls = [] Assert.greater(len(urls), 0, u'something went wrong. no links found.') for url in urls: check_result = crawler.verify_status_code_is_ok(url) if check_result is not True: bad_urls.append(check_result) Assert.equal( 0, len(bad_urls), u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))
def test_community_page_links(self, mozwebqa): crawler = LinkCrawler(mozwebqa) urls = crawler.collect_links('/community', id='activity-stream') bad_urls = [] Assert.greater(len(urls), 0, u'Something went wrong. No links found.') for url in urls: if not 'irc://irc.mozilla.org' and not 'mailto:' in url: check_result = crawler.verify_status_code_is_ok(url) if check_result is not True: bad_urls.append(check_result) Assert.equal( 0, len(bad_urls), u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))
def test_that_links_in_footer_return_200_code(self, mozwebqa): crawler = LinkCrawler(mozwebqa) urls = crawler.collect_links('/', name='footer') bad_urls = [] Assert.greater( len(urls), 0, u'something went wrong. no links found.') for url in urls: check_result = crawler.verify_status_code_is_ok(url) if check_result is not True: bad_urls.append(check_result) Assert.equal( 0, len(bad_urls), u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))
def test_that_links_in_the_events_page_return_200_code(self, mozwebqa): crawler = LinkCrawler(mozwebqa) urls = crawler.collect_links('/events/', id='wrapper') bad_urls = [] Assert.greater(len(urls), 0, 'The link crawler did not find any urls to crawl') for url in urls: check_result = crawler.verify_status_code_is_ok(url) if check_result is not True: bad_urls.append(check_result) Assert.equal( 0, len(bad_urls), '%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))
def test_that_links_in_the_about_page_return_200_code(self, base_url): crawler = LinkCrawler(base_url) urls = crawler.collect_links('/about', id='main') bad_urls = [] Assert.greater( len(urls), 0, u'something went wrong. no links found.') for url in urls: check_result = crawler.verify_status_code_is_ok(url) if check_result is not True: bad_urls.append(check_result) Assert.equal( 0, len(bad_urls), u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))
def test_community_page_links(self, mozwebqa): crawler = LinkCrawler(mozwebqa) urls = crawler.collect_links('/community', id='activity-stream') bad_urls = [] Assert.greater( len(urls), 0, u'something went wrong. no links found.') for url in urls: if not 'irc://irc.mozilla.org' and not 'mailto:' in url: check_result = crawler.verify_status_code_is_ok(url) if check_result is not True: bad_urls.append(check_result) Assert.equal( 0, len(bad_urls), u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))
def test_that_links_in_the_labs_page_return_200_code(self, mozwebqa): crawler = LinkCrawler(mozwebqa) urls = crawler.collect_links('/labs/', id='wrapper') bad_urls = [] Assert.greater( len(urls), 0, 'The link crawler did not find any urls to crawl') for url in urls: check_result = crawler.verify_status_code_is_ok(url) if check_result is not True: bad_urls.append(check_result) Assert.equal( 0, len(bad_urls), '%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))
def test_that_links_in_the_services_page_return_200_code(self, base_url, selenium, vouched_user): home_page = Home(base_url, selenium) home_page.login(vouched_user['email'], vouched_user['password']) settings = home_page.header.click_settings_menu_item() developer = settings.developer crawler = LinkCrawler(base_url) urls = developer.get_services_urls() bad_urls = [] assert len(urls) > 0 for url in urls: check_result = crawler.verify_status_code_is_ok(url) if check_result is not True: bad_urls.append(check_result) assert 0 == len(bad_urls), u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls)
def test_that_links_in_the_services_page_return_200_code(self, base_url, selenium, vouched_user): home_page = Home(base_url, selenium) home_page.login(vouched_user['email']) settings = home_page.header.click_settings_menu_item() developer = settings.developer crawler = LinkCrawler(base_url) urls = developer.get_services_urls() bad_urls = [] assert len(urls) > 0 for url in urls: check_result = crawler.verify_status_code_is_ok(url) if check_result is not True: bad_urls.append(check_result) assert 0 == len(bad_urls), u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls)
def test_that_links_in_the_services_page_return_200_code(self, mozwebqa): home_page = Home(mozwebqa) home_page.login() edit_profile_page = home_page.header.click_edit_profile_menu_item() crawler = LinkCrawler(mozwebqa) urls = edit_profile_page.get_services_urls() bad_urls = [] Assert.greater( len(urls), 0, u'something went wrong. no links found.') for url in urls: check_result = crawler.verify_status_code_is_ok(url) if check_result is not True: bad_urls.append(check_result) Assert.equal( 0, len(bad_urls), u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))
def test_that_links_in_the_services_page_return_200_code(self, mozwebqa, vouched_user): home_page = Home(mozwebqa) home_page.login(vouched_user['email'], vouched_user['password']) edit_profile_page = home_page.header.click_edit_profile_menu_item() crawler = LinkCrawler(mozwebqa) urls = edit_profile_page.get_services_urls() bad_urls = [] Assert.greater( len(urls), 0, u'something went wrong. no links found.') for url in urls: check_result = crawler.verify_status_code_is_ok(url) if check_result is not True: bad_urls.append(check_result) Assert.equal( 0, len(bad_urls), u'%s bad links found. ' % len(bad_urls) + ', '.join(bad_urls))