Beispiel #1
0
 def test_all_links(self, mozwebqa):
     notes_page = Notes(mozwebqa)
     notes_page.go_to_page()
     Assert.contains("Notes", notes_page.firefox_notes_header_text)
     bad_urls = []
     for url in notes_page.all_links:
         if not notes_page.is_valid_link(url):
             bad_urls.append('%s is not a valid url' % url)
     Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
Beispiel #2
0
 def test_that_all_links_are_valid(self, mozwebqa):
     notes_page = Notes(mozwebqa)
     url = mozwebqa.base_url + notes_page.notes_page_url
     page_response = requests.get(url)
     html = BeautifulStoneSoup(page_response.content)
     bad_urls = []
     links = html.findAll("a")
     for link in links:
         url = self.make_absolute(link["href"], mozwebqa.base_url)
         if not notes_page.is_valid_link(url):
             bad_urls.append("%s is not a valid url" % url)
     Assert.equal(0, len(bad_urls), "%s bad urls found: " % len(bad_urls) + ", ".join(bad_urls))
Beispiel #3
0
 def test_that_all_links_are_valid(self, mozwebqa):
     notes_page = Notes(mozwebqa)
     url = mozwebqa.base_url + notes_page.notes_page_url
     page_response = requests.get(url)
     html = BeautifulSoup(page_response.content)
     bad_urls = []
     links = html.findAll('a')
     for link in links:
         url = self.make_absolute(link['href'], mozwebqa.base_url)
         response_code = notes_page.get_response_code(url)
         if response_code != requests.codes.ok:
             bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
     Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
Beispiel #4
0
 def test_that_all_links_are_valid(self, mozwebqa):
     notes_page = Notes(mozwebqa)
     url = mozwebqa.base_url + notes_page.notes_page_url
     page_response = requests.get(url)
     html = BeautifulStoneSoup(page_response.content)
     bad_urls = []
     links = html.findAll('a')
     for link in links:
         url = self.make_absolute(link['href'], mozwebqa.base_url)
         response_code = notes_page.get_response_code(url)
         if response_code != requests.codes.ok:
             bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
     Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
Beispiel #5
0
 def test_that_all_links_are_valid(self, mozwebqa):
     notes_page = Notes(mozwebqa)
     url = mozwebqa.base_url + notes_page.notes_page_url
     page_response = requests.get(url)
     html = BeautifulSoup(page_response.content)
     bad_urls = []
     links = html.findAll('a')
     for link in links:
         # see both blog.mozilla.com and blog.mozilla.org domains
         if 'blog.mozilla.' in link['href']:
             # skip for issue 408: blog.m.o links not working via jenkins
             continue
         url = self.make_absolute(link['href'], mozwebqa.base_url)
         response_code = notes_page.get_response_code(url)
         if response_code != requests.codes.ok:
             bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
     Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
Beispiel #6
0
 def test_that_all_links_are_valid(self, mozwebqa):
     notes_page = Notes(mozwebqa)
     url = mozwebqa.base_url + notes_page.notes_page_url
     page_response = requests.get(url)
     html = BeautifulSoup(page_response.content)
     bad_urls = []
     links = html.findAll('a')
     for link in links:
         # see both blog.mozilla.com and blog.mozilla.org domains
         if 'blog.mozilla.' in link['href']:
             # skip for issue 408: blog.m.o links not working via jenkins
             continue
         url = self.make_absolute(link['href'], mozwebqa.base_url)
         response_code = notes_page.get_response_code(url)
         if response_code != requests.codes.ok:
             bad_urls.append('%s is not a valid url - status code: %s.' %
                             (url, response_code))
     assert [] == bad_urls
Beispiel #7
0
 def test_that_notes_page_is_reachable(self, mozwebqa):
     notes_page = Notes(mozwebqa)
     notes_page.go_to_page()
     Assert.contains("Notes", notes_page.firefox_notes_header_text)
Beispiel #8
0
 def test_that_notes_page_is_reachable(self, mozwebqa):
     notes_page = Notes(mozwebqa)
     notes_page.go_to_page()
     Assert.contains("Notes", notes_page.firefox_notes_header_text)
Beispiel #9
0
 def test_that_notes_page_is_reachable(self, mozwebqa):
     notes_page = Notes(mozwebqa)
     notes_page.go_to_page()
     assert 'Notes' in notes_page.firefox_notes_header_text
Beispiel #10
0
 def test_that_notes_page_is_reachable(self, mozwebqa):
     notes_page = Notes(mozwebqa)
     notes_page.go_to_page()
     assert 'Notes' in notes_page.firefox_notes_header_text