Exemplo n.º 1
0
def test_queue():
    'Test creation, modification and download of URL queues.'
    # test conversion and storage
    inputdict = add_to_compressed_dict(['ftps://www.example.org/'])
    assert inputdict == dict()
    inputdict = add_to_compressed_dict(['https://www.example.org/'])
    # CLI args
    testargs = ['', '--list']
    with patch.object(sys, 'argv', testargs):
        args = parse_args(testargs)
    assert url_processing_pipeline(args, inputdict) is None
    # single/multiprocessing
    testargs = ['', '-v']
    with patch.object(sys, 'argv', testargs):
        args = parse_args(testargs)
    domain_dict = dict()
    domain_dict['https://httpbin.org'] = deque([
        '/status/301', '/status/304', '/status/200', '/status/300',
        '/status/400', '/status/505'
    ])
    args.archived = True
    args.config_file = os.path.join(RESOURCES_DIR, 'newsettings.cfg')
    config = use_config(filename=args.config_file)
    results = download_queue_processing(domain_dict, args, None, config)
    assert len(results[0]) == 6 and results[1] is None
    # test backoff algorithm
    testdict = dict()
    backoffdict = dict()
    testdict['http://test.org'] = deque(['/1'])
    assert draw_backoff_url(testdict, backoffdict, 0,
                            set()) == ('http://test.org/1', dict(), dict(),
                                       'http://test.org')
    testdict['http://test.org'] = deque(['/1'])
    backoffdict['http://test.org'] = datetime(2019, 5, 18, 15, 17, 8, 132263)
    assert draw_backoff_url(testdict, backoffdict, 0,
                            set()) == ('http://test.org/1', dict(), dict(),
                                       'http://test.org')
    # code hangs, logical:
    #testdict['http://test.org'] = deque(['/1'])
    #backoffdict['http://test.org'] = datetime(2030, 5, 18, 15, 17, 8, 132263)
    #assert cli_utils.draw_backoff_url(testdict, backoffdict, 0, 3) == ('http://test.org/1', dict(), dict(), 0)
    # download buffer
    domain_dict = {
        'https://test.org': deque(['/1', '/2', '/3']),
        'https://test2.org': deque(['/1', '/2', '/3']),
        'https://test3.org': deque(['/1', '/2', '/3']),
        'https://test4.org': deque(['/1', '/2', '/3']),
        'https://test5.org': deque(['/1', '/2', '/3']),
        'https://test6.org': deque(['/1', '/2', '/3'])
    }
    bufferlist, _, _, _ = load_download_buffer(domain_dict,
                                               dict(),
                                               0,
                                               threads=1)
    assert len(bufferlist) == 6
    bufferlist, _, _, _ = load_download_buffer(domain_dict,
                                               dict(),
                                               0,
                                               threads=2)
    assert len(bufferlist) == 6
Exemplo n.º 2
0
def test_climain():
    '''test arguments and main CLI entrypoint'''
    # exit status required: 0
    # Windows platforms
    if os.name == 'nt':
        trafilatura_bin = os.path.join(sys.prefix, "Scripts", "trafilatura")
    # other platforms
    else:
        trafilatura_bin = 'trafilatura'
    # help display
    assert subprocess.run([trafilatura_bin, '--help']).returncode == 0
    # piped input
    empty_input = b'<html><body></body></html>'
    assert subprocess.run([trafilatura_bin], input=empty_input).returncode == 0
    # input directory walking and processing
    env = os.environ.copy()
    if os.name == 'nt':
        # Force encoding to utf-8 for Windows (seem to be a problem only in GitHub Actions)
        env['PYTHONIOENCODING'] = 'utf-8'
    assert subprocess.run([trafilatura_bin, '--inputdir', RESOURCES_DIR],
                          env=env).returncode == 0
    # dump urls
    inputdict = add_to_compressed_dict(['https://www.example.org'])
    f = io.StringIO()
    with redirect_stdout(f):
        cli.dump_on_exit(inputdict)
    assert f.getvalue() == 'todo: https://www.example.org/\n'
Exemplo n.º 3
0
def test_input_filtering():
    '''test internal functions to filter urls'''
    testargs = ['']
    with patch.object(sys, 'argv', testargs):
        args = cli.parse_args(testargs)
    # load dictionary
    args.inputfile = os.path.join(RESOURCES_DIR, 'list-process.txt')
    inputdict = cli.load_input_dict(args)
    assert inputdict['https://httpbin.org'] == deque(['/status/200', '/status/404'])
    args.inputfile = os.path.join(RESOURCES_DIR, 'list-process.txt')
    args.blacklist = {'httpbin.org/status/404'}
    inputdict = cli.load_input_dict(args)
    assert inputdict['https://httpbin.org'] == deque(['/status/200'])
    # deduplication and filtering
    myinput = ['https://example.org/1', 'https://example.org/2', 'https://example.org/2', 'https://example.org/3', 'https://example.org/4', 'https://example.org/5', 'https://example.org/6']
    myblacklist = {'example.org/1', 'example.org/3', 'example.org/5'}
    inputdict = add_to_compressed_dict(myinput, myblacklist)
    assert inputdict['https://example.org'] == deque(['/2', '/4', '/6'])
    # URL in blacklist
    args.inputfile = os.path.join(RESOURCES_DIR, 'list-process.txt')
    my_urls = cli_utils.load_input_urls(args)
    my_blacklist = cli_utils.load_blacklist(os.path.join(RESOURCES_DIR, 'list-discard.txt'))
    inputdict = add_to_compressed_dict(my_urls, my_blacklist)
    assert len(inputdict) == 0
    # URL filter
    args.inputfile = os.path.join(RESOURCES_DIR, 'list-process.txt')
    my_urls = cli_utils.load_input_urls(args)
    assert len(add_to_compressed_dict(my_urls, None, ['status'], None)) == 1
    assert len(add_to_compressed_dict(my_urls, None, ['teststring'], None)) == 0
    assert len(add_to_compressed_dict(my_urls, None, ['status', 'teststring'], None)) == 1
    # malformed URLs
    inputdict = add_to_compressed_dict(['123345', 'https://www.example.org/1'], {}, None, None)
    assert len(inputdict) == 1
Exemplo n.º 4
0
def test_cli_pipeline():
    '''test command-line processing pipeline'''
    # straight command-line input
    #testargs = ['', '<html><body>Text</body></html>']
    #with patch.object(sys, 'argv', testargs):
    #    args = cli.parse_args(testargs)
    #f = io.StringIO()
    #with redirect_stdout(f):
    #    cli.process_args(args)
    #assert len(f.getvalue()) == 0
    # test URL listing

    # Force encoding to utf-8 for Windows in future processes spawned by multiprocessing.Pool
    os.environ['PYTHONIOENCODING'] = "utf-8"

    testargs = ['', '--list']
    with patch.object(sys, 'argv', testargs):
        args = cli.parse_args(testargs)
    assert cli_utils.url_processing_pipeline(args, dict()) is None
    # test inputlist + blacklist
    testargs = ['', '-i', os.path.join(RESOURCES_DIR, 'list-process.txt')]
    with patch.object(sys, 'argv', testargs):
        args = cli.parse_args(testargs)
    my_urls = cli_utils.load_input_urls(args)
    assert my_urls is not None and len(my_urls) == 2
    testargs = [
        '', '-i',
        os.path.join(RESOURCES_DIR, 'list-process.txt'), '--blacklist',
        os.path.join(RESOURCES_DIR, 'list-discard.txt'), '--archived'
    ]
    with patch.object(sys, 'argv', testargs):
        args = cli.parse_args(testargs)
    assert args.blacklist is not None
    # test backoff between domain requests
    inputdict = add_to_compressed_dict(my_urls, args.blacklist, None, None)
    reftime = datetime.now()
    cli_utils.url_processing_pipeline(args, inputdict)
    delta = (datetime.now() - reftime).total_seconds()
    assert delta > 2
    # test blacklist and empty dict
    args.blacklist = cli_utils.load_blacklist(args.blacklist)
    assert len(args.blacklist) == 2
    inputdict = add_to_compressed_dict(my_urls, args.blacklist, None, None)
    cli_utils.url_processing_pipeline(args, inputdict)
    # test backup
    testargs = ['', '--backup-dir', '/tmp/']
    with patch.object(sys, 'argv', testargs):
        args = cli.parse_args(testargs)
    cli_utils.archive_html('00Test', args)
    # test date-based exclusion
    testargs = ['', '-out', 'xml', '--with-metadata']
    with patch.object(sys, 'argv', testargs):
        args = cli.parse_args(testargs)
    with open(os.path.join(RESOURCES_DIR, 'httpbin_sample.html'), 'r') as f:
        teststring = f.read()
    assert cli.examine(teststring, args) is None
    testargs = ['', '-out', 'xml', '--only-with-metadata', '--precision']
    with patch.object(sys, 'argv', testargs):
        args = cli.parse_args(testargs)
    with open(os.path.join(RESOURCES_DIR, 'httpbin_sample.html'), 'r') as f:
        teststring = f.read()
    assert cli.examine(teststring, args) is None
    # test JSON output
    testargs = ['', '-out', 'json', '--recall']
    with patch.object(sys, 'argv', testargs):
        args = cli.parse_args(testargs)
    with open(os.path.join(RESOURCES_DIR, 'httpbin_sample.html'), 'r') as f:
        teststring = f.read()
    assert cli.examine(teststring, args) is not None
    # dry-run file processing pipeline
    testargs = ['', '--parallel', '1', '--inputdir', '/dev/null']
    with patch.object(sys, 'argv', testargs):
        args = cli.parse_args(testargs)
    cli_utils.file_processing_pipeline(args)
    # file processing pipeline on resources/
    args.inputdir = RESOURCES_DIR
    cli_utils.file_processing_pipeline(args)
    # sitemaps
    testargs = ['', '--sitemap', 'https://httpbin.org/', '--list']
    with patch.object(sys, 'argv', testargs):
        args = cli.parse_args(testargs)
    f = io.StringIO()
    with redirect_stdout(f):
        cli.process_args(args)
    assert len(f.getvalue()) == 0
    # config file
    testargs = [
        '', '--inputdir', '/dev/null', '--config-file', 'newsettings.cfg'
    ]
    with patch.object(sys, 'argv', testargs):
        args = cli.parse_args(testargs)
    with open(os.path.join(RESOURCES_DIR, 'httpbin_sample.html'), 'r') as f:
        teststring = f.read()
    args.config_file = os.path.join(RESOURCES_DIR, args.config_file)
    # config = use_config(filename=args.config_file)
    assert cli.examine(teststring, args) is None
    # CLI options
    testargs = ['', '--links', '--images']
    with patch.object(sys, 'argv', testargs):
        args = cli.parse_args(testargs)
    #with open(os.path.join(RESOURCES_DIR, 'http_sample.html'), 'r') as f:
    #    teststring = f.read()
    #result = cli.examine(teststring, args)
    #assert '[link](testlink.html)' in result # and 'test.jpg' in result

    # Crawling
    testargs = ['', '--crawl', 'https://httpbin.org/html']
    with patch.object(sys, 'argv', testargs):
        args = cli.parse_args(testargs)
    f = io.StringIO()
    with redirect_stdout(f):
        cli_utils.cli_crawler(args)
    assert len(f.getvalue()) == 0
    # links permitted
    testargs = [
        '', '--crawl', 'https://httpbin.org/links/1/1', '--list', '--parallel',
        '1'
    ]
    with patch.object(sys, 'argv', testargs):
        args = cli.parse_args(testargs)
    f = io.StringIO()
    with redirect_stdout(f):
        cli_utils.cli_crawler(args)
    assert f.getvalue() == 'https://httpbin.org/links/1/0\n'
    # 0 links permitted
    args.crawl = 'https://httpbin.org/links/4/4'
    f = io.StringIO()
    with redirect_stdout(f):
        cli_utils.cli_crawler(args, n=0)
    # print(f.getvalue())
    assert len(f.getvalue().split('\n')) == 5

    # Exploration (Sitemap + Crawl)
    testargs = ['', '--explore', 'https://httpbin.org/html']
    with patch.object(sys, 'argv', testargs):
        args = cli.parse_args(testargs)
    f = io.StringIO()
    with redirect_stdout(f):
        cli.process_args(args)
    assert len(f.getvalue()) == 0
Exemplo n.º 5
0
def test_queue():
    'Test creation, modification and download of URL queues.'
    # test conversion and storage
    inputdict = add_to_compressed_dict(['ftps://www.example.org/', 'http://'])
    assert inputdict == dict()
    inputdict = add_to_compressed_dict(['https://www.example.org/'])
    # CLI args
    testargs = ['', '--list']
    with patch.object(sys, 'argv', testargs):
        args = parse_args(testargs)
    assert url_processing_pipeline(args, inputdict) is None
    # single/multiprocessing
    testargs = ['', '-v']
    with patch.object(sys, 'argv', testargs):
        args = parse_args(testargs)
    domain_dict = {
        'https://httpbin.org': deque(
            [
                '/status/301',
                '/status/304',
                '/status/200',
                '/status/300',
                '/status/400',
                '/status/505',
            ]
        )
    }
    args.archived = True
    args.config_file = os.path.join(RESOURCES_DIR, 'newsettings.cfg')
    config = use_config(filename=args.config_file)
    config['DEFAULT']['SLEEP_TIME'] = '0.2'
    results = download_queue_processing(domain_dict, args, None, config)
    ## fixed: /301 missing, probably for a good reason...
    assert len(results[0]) == 5 and results[1] is None
    # test backoff algorithm
    backoffdict = {}
    testdict = {'http://test.org': deque(['/1'])}
    assert draw_backoff_url(testdict, backoffdict, 0) == ('http://test.org/1', dict(), dict())
    testdict['http://test.org'] = deque(['/1'])
    backoffdict['http://test.org'] = datetime(2019, 5, 18, 15, 17, 8, 132263)
    assert draw_backoff_url(testdict, backoffdict, 0) == ('http://test.org/1', dict(), dict())
    # concurrent domains
    testdict = {}
    backoffdict = {}
    testdict['http://test.org'] = deque(['/1'])
    testdict['http://example.org'] = deque(['/1'])
    # simulate recent request
    backoffdict['http://test.org'] = datetime.now()
    # must return the other domain
    test = draw_backoff_url(testdict, backoffdict, 5)
    assert test[0], test[1] == ('http://example.org/1', {'http://test.org': deque(['/1'])})
    print(test)
    assert test[2] != {}
    # sleeps and returns the rest
    assert draw_backoff_url(testdict, backoffdict, 1) == ('http://test.org/1', {}, {})
    # code hangs, logical:
    #testdict['http://test.org'] = deque(['/1'])
    #backoffdict['http://test.org'] = datetime(2030, 5, 18, 15, 17, 8, 132263)
    #assert draw_backoff_url(testdict, backoffdict, 0) == ('http://test.org/1', dict(), dict())
    # download buffer
    domain_dict = {'https://test.org': deque(['/1', '/2', '/3']), 'https://test2.org': deque(['/1', '/2', '/3']), 'https://test3.org': deque(['/1', '/2', '/3']), 'https://test4.org': deque(['/1', '/2', '/3']), 'https://test5.org': deque(['/1', '/2', '/3']), 'https://test6.org': deque(['/1', '/2', '/3'])}
    bufferlist, _, _, _ = load_download_buffer(domain_dict, dict(), sleep_time=5, threads=1)
    assert len(bufferlist) == 6
    bufferlist, _, _, _ = load_download_buffer(domain_dict, dict(), sleep_time=5, threads=2)
    assert len(bufferlist) == 6