def test_output_write(tmpdir): # output_errors is set and written to output_file = tmpdir.join("errorlog.txt") args = link_checker.parse_argument( ["--output-errors", output_file.strpath]) link_checker.output_write(args, "Output enabled") args.output_errors.flush() assert output_file.read() == "Output enabled\n"
def test_write_response(tmpdir): # Set config output_file = tmpdir.join("errorlog.txt") args = link_checker.parse_argument( ["--output-errors", output_file.strpath]) # Text to extract valid_anchors text = ("<a href='http://httpbin.org/status/200'>Response 200</a>," " <a href='file://link3'>Invalid Scheme</a>," " <a href='http://httpbin.org/status/400'>Response 400</a>") soup = BeautifulSoup(text, "lxml") valid_anchors = soup.find_all("a") # Setup function params all_links = [ "http://httpbin.org/status/200", "file://link3", "http://httpbin.org/status/400", ] rs = (grequests.get(link) for link in all_links) response = grequests.map(rs, exception_handler=link_checker.exception_handler) base_url = "https://baseurl/goes/here" license_name = "by-cc-nd_2.0" # Set output to external file caught_errors = link_checker.write_response( args, all_links, response, base_url, license_name, valid_anchors, license_name, False, ) assert caught_errors == 2 args.output_errors.flush() lines = output_file.readlines() i = 0 assert lines[i] == "\n" i += 1 assert lines[i] == "by-cc-nd_2.0\n" i += 1 assert lines[i] == "URL: https://baseurl/goes/here\n" i += 1 assert lines[i] == f' {"Invalid Schema":<24}file://link3\n' i += 1 assert lines[i] == f'{"":<26}<a href="file://link3">Invalid Scheme</a>\n' i += 1 assert lines[i] == f' {"400":<24}http://httpbin.org/status/400\n' i += 1 assert lines[i] == ( f'{"":<26}<a href="http://httpbin.org/status/400">Response 400</a>\n')
def test_output_summary(reset_global, tmpdir): # output_errors is set and written to output_file = tmpdir.join("errorlog.txt") args = link_checker.parse_argument( ["--output-errors", output_file.strpath]) link_checker.MAP_BROKEN_LINKS = { "https://link1.demo": [ "https://file1.url/here", "https://file2.url/goes/here", ], "https://link2.demo": ["https://file4.url/here"], } all_links = ["some link"] * 5 link_checker.output_summary(args, all_links, 3) args.output_errors.flush() lines = output_file.readlines() i = 0 assert lines[i] == "\n" i += 1 assert lines[i] == "\n" i += 1 assert lines[i] == "***************************************\n" i += 1 assert lines[i] == " SUMMARY\n" i += 1 assert lines[i] == "***************************************\n" i += 1 assert lines[i] == "\n" i += 1 assert str(lines[i]).startswith("Timestamp:") i += 1 assert lines[i] == "Total files checked: 5\n" i += 1 assert lines[i] == "Number of error links: 3\n" i += 1 assert lines[i] == "Number of unique broken links: 2\n" i += 1 assert lines[i] == "\n" i += 1 assert lines[i] == "\n" i += 1 assert lines[i] == "Broken link - https://link1.demo found in:\n" i += 1 assert lines[i] == "https://file1.url/here\n" i += 1 assert lines[i] == "https://file2.url/goes/here\n" i += 1 assert lines[i] == "\n" i += 1 assert lines[i] == "Broken link - https://link2.demo found in:\n" i += 1 assert lines[i] == "https://file4.url/here\n"
def test_get_scrapable_links(): args = link_checker.parse_argument([]) test_file = ("<a name='hello'>without href</a>," " <a href='#hello'>internal link</a>," " <a href='mailto:[email protected]'>mailto protocol</a>," " <a href='https://creativecommons.ca'>Absolute link</a>," " <a href='/index'>Relative Link</a>") soup = BeautifulSoup(test_file, "lxml") test_case = soup.find_all("a") base_url = "https://www.demourl.com/dir1/dir2" valid_anchors, valid_links, _ = link_checker.get_scrapable_links( args, base_url, test_case, None, False) assert str(valid_anchors) == ( '[<a href="https://creativecommons.ca">Absolute link</a>,' ' <a href="/index">Relative Link</a>]') assert (str(valid_links) == "['https://creativecommons.ca', 'https://www.demourl.com/index']")
def test_parse_argument(tmpdir): # Test default options args = link_checker.parse_argument([]) assert args.log_level == 30 assert bool(args.output_errors) is False assert args.local is False assert args.root_url == "https://creativecommons.org" # Test --local args = link_checker.parse_argument(["--local"]) assert args.local is True # Test Logging Levels -q/--quiet args = link_checker.parse_argument(["-q"]) assert args.log_level == 40 args = link_checker.parse_argument(["-qq"]) assert args.log_level == 50 args = link_checker.parse_argument(["-qqq"]) assert args.log_level == 50 args = link_checker.parse_argument(["-q", "--quiet"]) assert args.log_level == 50 # Test Logging Levels -v/--verbose args = link_checker.parse_argument(["-v"]) assert args.log_level == 20 args = link_checker.parse_argument(["-vv"]) assert args.log_level == 10 args = link_checker.parse_argument(["-vvv"]) assert args.log_level == 10 args = link_checker.parse_argument(["-v", "--verbose"]) assert args.log_level == 10 # Test Logging Levels with both -v and -q args = link_checker.parse_argument(["-vq"]) assert args.log_level == 30 args = link_checker.parse_argument(["-vvq"]) assert args.log_level == 20 args = link_checker.parse_argument(["-vqq"]) assert args.log_level == 40 # Test default value of --output-errors args = link_checker.parse_argument(["--output-errors"]) assert bool(args.output_errors) is True assert args.output_errors.name == "errorlog.txt" # Test custom value of --output-errors output_file = tmpdir.join("errorlog.txt") args = link_checker.parse_argument( ["--output-errors", output_file.strpath]) assert bool(args.output_errors) is True assert args.output_errors.name == output_file.strpath
def test_create_base_link(filename, result): args = link_checker.parse_argument([]) baseURL = link_checker.create_base_link(args, filename) assert baseURL == result