Ejemplo n.º 1
0
async def crawl():
    workers = [asyncio.Task(work(), loop=loop) for _ in range(int(config.read('Multiprocess', 'BurnerThreads')))]
    print('q count is {}'.format(queue.qsize()))
    await queue.join()
    print('join is done')
    for w in workers:
        if not w.done():
            w.cancel()
Ejemplo n.º 2
0
async def crawl():
    workers = [
        asyncio.Task(work(), loop=loop)
        for _ in range(int(config.read('Multiprocess', 'BurnerThreads')))
    ]
    print('q count is {}'.format(queue.qsize()))
    await queue.join()
    print('join is done')
    for w in workers:
        if not w.done():
            w.cancel()
Ejemplo n.º 3
0
def limit_resources():
    _, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
    # XXX warn if too few compared to max_wokers?
    resource.setrlimit(resource.RLIMIT_NOFILE, (hard, hard))

    _, hard = resource.getrlimit(
        resource.RLIMIT_AS)  # RLIMIT_VMEM does not exist?!
    rlimit_as = int(config.read('System', 'RLIMIT_AS_gigabytes'))
    rlimit_as *= 1024 * 1024 * 1024
    if rlimit_as == 0:
        return
    if hard > 0 and rlimit_as > hard:
        LOGGER.error('RLIMIT_AS limited to %d bytes by system limit', hard)
        rlimit_as = hard
    resource.setrlimit(resource.RLIMIT_AS, (rlimit_as, hard))
Ejemplo n.º 4
0
import cocrawler.dns as dns
import cocrawler.config as config

ARGS = argparse.ArgumentParser(description='CoCrawler dns fetcher')
ARGS.add_argument('--config', action='append')
ARGS.add_argument('--configfile', action='store')
ARGS.add_argument('--no-confighome', action='store_true')
ARGS.add_argument('--type', default='A')
ARGS.add_argument('hosts', nargs='+', help='list of hostnames to query')

args = ARGS.parse_args()

config.config(args.configfile, args.config, confighome=not args.no_confighome)

ns = config.read('Fetcher', 'Nameservers')
if not isinstance(ns, list):
    ns = [ns]

dns.setup_resolver(ns)
print('set nameservers to', ns)


async def main(hosts):
    for host in hosts:
        try:
            result = await dns.query(host, args.type)
            print(host, result)
        except Exception as e:
            result = None
            print('saw exception', e, 'but ignoring it')
Ejemplo n.º 5
0
def main():
    '''
    Main program: parse args, read config, set up event loop, run the crawler.
    '''

    args = ARGS.parse_args()

    if args.printdefault:
        config.print_default()
        sys.exit(1)

    loglevel = os.getenv('COCRAWLER_LOGLEVEL') or args.loglevel
    logging.basicConfig(level=loglevel)

    config.config(args.configfile, args.config)

    if args.printfinal:
        config.print_final()
        sys.exit(1)

    memory.limit_resources()

    if os.getenv('PYTHONASYNCIODEBUG') is not None:
        logging.captureWarnings(True)
        warnings.simplefilter('default', category=ResourceWarning)
        if LOGGER.getEffectiveLevel() > logging.WARNING:
            LOGGER.setLevel(logging.WARNING)
            LOGGER.warning(
                'Lowered logging level to WARNING because PYTHONASYNCIODEBUG env var is set'
            )
        LOGGER.warning(
            'Configured logging system to show ResourceWarning because PYTHONASYNCIODEBUG env var is set'
        )
        LOGGER.warning(
            'Note that this does have a significant impact on asyncio overhead'
        )
    if os.getenv('COCRAWLER_GC_DEBUG') is not None:
        LOGGER.warning('Configuring gc debugging')
        gc.set_debug(gc.DEBUG_STATS | gc.DEBUG_UNCOLLECTABLE)

    kwargs = {}
    if args.load:
        kwargs['load'] = args.load
    if args.no_test:
        kwargs['no_test'] = True

    crawler = cocrawler.Crawler(**kwargs)
    loop = asyncio.get_event_loop()
    slow_callback_duration = os.getenv('ASYNCIO_SLOW_CALLBACK_DURATION')
    if slow_callback_duration:
        loop.slow_callback_duration = float(slow_callback_duration)
        LOGGER.warning('set slow_callback_duration to %f',
                       slow_callback_duration)

    if config.read('CarbonStats'):
        timer.start_carbon()

    if config.read('REST'):
        app = webserver.make_app()
    else:
        app = None

    try:
        loop.run_until_complete(crawler.crawl())
    except KeyboardInterrupt:
        sys.stderr.flush()
        print('\nInterrupt. Exiting cleanly.\n')
        crawler.cancel_workers()
    finally:
        loop.run_until_complete(crawler.close())
        if app:
            webserver.close(app)
        if config.read('CarbonStats'):
            timer.close()
        # apparently this is needed for full aiohttp cleanup -- or is it cargo cult
        loop.stop()
        loop.run_forever()
        loop.close()
Ejemplo n.º 6
0
import os

from cocrawler.urls import URL
import cocrawler.dns as dns
import cocrawler.config as config

ARGS = argparse.ArgumentParser(description='CoCrawler dns benchmark')
ARGS.add_argument('--config', action='append')
ARGS.add_argument('--configfile', action='store')
ARGS.add_argument('--count', type=int, default=1000)
ARGS.add_argument('--expect-not-suitable', action='store_true')

args = ARGS.parse_args()

config.config(args.configfile, args.config)
max_workers = config.read('Crawl', 'MaxWorkers')
ns = config.read('Fetcher', 'Nameservers')
if isinstance(ns, str):
    ns = [ns]
    config.write(ns, 'Fetcher', 'Nameservers')

exit_value = 0

resolver = dns.get_resolver()


def create_queue():
    queue = asyncio.Queue()

    # add a fake domain to make sure the dns doesn't send unknown hosts to a search
    # note that mail.foo.com and mx.foo.com don't generally get bogus answers, it's foo.com or www.foo.com that do
Ejemplo n.º 7
0
def main():
    '''
    Main program: parse args, read config, set up event loop, run the crawler.
    '''

    args = ARGS.parse_args()

    if args.printdefault:
        config.print_default()
        sys.exit(1)

    loglevel = os.getenv('COCRAWLER_LOGLEVEL')
    if loglevel is None and args.loglevel:
        loglevel = args.loglevel
    if loglevel is None and args.verbose:
        loglevel = 'DEBUG'

    logging.basicConfig(level=loglevel)

    config.config(args.configfile, args.config)

    if args.printfinal:
        config.print_final()
        sys.exit(1)

    memory.limit_resources()

    if os.getenv('PYTHONASYNCIODEBUG') is not None:
        logging.captureWarnings(True)
        warnings.simplefilter('default', category=ResourceWarning)
        if LOGGER.getEffectiveLevel() > logging.WARNING:
            LOGGER.setLevel(logging.WARNING)
            LOGGER.warning('Lowered logging level to WARNING because PYTHONASYNCIODEBUG env var is set')
        LOGGER.warning('Configured logging system to show ResourceWarning because PYTHONASYNCIODEBUG env var is set')
        LOGGER.warning('Note that this does have a significant impact on asyncio overhead')
    if os.getenv('COCRAWLER_GC_DEBUG') is not None:
        LOGGER.warning('Configuring gc debugging')
        gc.set_debug(gc.DEBUG_STATS | gc.DEBUG_UNCOLLECTABLE)

    kwargs = {}
    if args.load:
        kwargs['load'] = args.load
    if args.no_test:
        kwargs['no_test'] = True

    crawler = cocrawler.Crawler(**kwargs)
    loop = asyncio.get_event_loop()
    slow_callback_duration = os.getenv('ASYNCIO_SLOW_CALLBACK_DURATION')
    if slow_callback_duration:
        loop.slow_callback_duration = float(slow_callback_duration)
        LOGGER.warning('set slow_callback_duration to %f', slow_callback_duration)

    if config.read('CarbonStats'):
        timer.start_carbon()

    if config.read('REST'):
        app = webserver.make_app()
    else:
        app = None

    try:
        loop.run_until_complete(crawler.crawl())
    except KeyboardInterrupt:
        sys.stderr.flush()
        print('\nInterrupt. Exiting cleanly.\n')
        crawler.cancel_workers()
    finally:
        loop.run_until_complete(crawler.close())
        if app:
            webserver.close(app)
        if config.read('CarbonStats'):
            timer.close()
        # vodoo recommended by advanced aiohttp docs for graceful shutdown
        # https://github.com/aio-libs/aiohttp/issues/1925
        loop.run_until_complete(asyncio.sleep(0.250))
        loop.close()
Ejemplo n.º 8
0
import os

from cocrawler.urls import URL
import cocrawler.dns as dns
import cocrawler.config as config

ARGS = argparse.ArgumentParser(description='CoCrawler dns benchmark')
ARGS.add_argument('--config', action='append')
ARGS.add_argument('--configfile', action='store')
ARGS.add_argument('--count', type=int, default=1000)
ARGS.add_argument('--expect-not-suitable', action='store_true')

args = ARGS.parse_args()

config.config(args.configfile, args.config)
max_workers = config.read('Crawl', 'MaxWorkers')
ns = config.read('Fetcher', 'Nameservers')
if isinstance(ns, str):
    ns = [ns]
    config.write(ns, 'Fetcher', 'Nameservers')

exit_value = 0

resolver = dns.get_resolver()


def create_queue():
    queue = asyncio.Queue()

    # add a fake domain to make sure the dns doesn't send unknown hosts to a search
    # note that mail.foo.com and mx.foo.com don't generally get bogus answers, it's foo.com or www.foo.com that do