Esempio n. 1
0
 def test_that_all_links_are_valid(self, mozwebqa):
     notes_page = Notes(mozwebqa)
     url = mozwebqa.base_url + notes_page.notes_page_url
     page_response = requests.get(url)
     html = BeautifulStoneSoup(page_response.content)
     bad_urls = []
     links = html.findAll("a")
     for link in links:
         url = self.make_absolute(link["href"], mozwebqa.base_url)
         response_code = notes_page.get_response_code(url)
         if response_code != requests.codes.ok:
             bad_urls.append("%s is not a valid url - status code: %s." % (url, response_code))
     Assert.equal(0, len(bad_urls), "%s bad urls found: " % len(bad_urls) + ", ".join(bad_urls))
Esempio n. 2
0
 def test_that_all_links_are_valid(self, mozwebqa):
     notes_page = Notes(mozwebqa)
     url = mozwebqa.base_url + notes_page.notes_page_url
     page_response = requests.get(url)
     html = BeautifulStoneSoup(page_response.content)
     bad_urls = []
     links = html.findAll('a')
     for link in links:
         url = self.make_absolute(link['href'], mozwebqa.base_url)
         response_code = notes_page.get_response_code(url)
         if response_code != requests.codes.ok:
             bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
     Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
Esempio n. 3
0
 def test_that_all_links_are_valid(self, mozwebqa):
     notes_page = Notes(mozwebqa)
     url = mozwebqa.base_url + notes_page.notes_page_url
     page_response = requests.get(url)
     html = BeautifulSoup(page_response.content)
     bad_urls = []
     links = html.findAll('a')
     for link in links:
         # see both blog.mozilla.com and blog.mozilla.org domains
         if 'blog.mozilla.' in link['href']:
             # skip for issue 408: blog.m.o links not working via jenkins
             continue
         url = self.make_absolute(link['href'], mozwebqa.base_url)
         response_code = notes_page.get_response_code(url)
         if response_code != requests.codes.ok:
             bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
     Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
Esempio n. 4
0
 def test_that_all_links_are_valid(self, mozwebqa):
     notes_page = Notes(mozwebqa)
     url = mozwebqa.base_url + notes_page.notes_page_url
     page_response = requests.get(url)
     html = BeautifulSoup(page_response.content)
     bad_urls = []
     links = html.findAll('a')
     for link in links:
         # see both blog.mozilla.com and blog.mozilla.org domains
         if 'blog.mozilla.' in link['href']:
             # skip for issue 408: blog.m.o links not working via jenkins
             continue
         url = self.make_absolute(link['href'], mozwebqa.base_url)
         response_code = notes_page.get_response_code(url)
         if response_code != requests.codes.ok:
             bad_urls.append('%s is not a valid url - status code: %s.' %
                             (url, response_code))
     assert [] == bad_urls