def main(): options, args = parse_params() # set up logging if options.quiet: update_loggers_level(logging.CRITICAL) elif options.verbose: update_loggers_level(logging.DEBUG) item = {'url': None, 'links': args} heroshi.api.report_result(item)
def main(): options, args = parse_params() # set up logging if options.quiet: update_loggers_level(logging.CRITICAL) elif options.verbose: update_loggers_level(logging.DEBUG) step(options.plain) if options.forever: while True: step(options.plain)
def main(): options, args = parse_params() # set up logging if options.quiet: update_loggers_level(logging.CRITICAL) elif options.verbose: update_loggers_level(logging.DEBUG) crawler = Crawler(int(options.connections), options.plain) for url in args: crawler.queue.put({"url": url, "visited": None}) try: crawler.crawl(forever=len(args) == 0) except KeyboardInterrupt: crawler.graceful_stop()
def main(): options, args = parse_params() # set up logging if options.quiet: update_loggers_level(logging.CRITICAL) elif options.verbose: update_loggers_level(logging.DEBUG) for line in sys.stdin: line = line.strip() try: item = json.loads(line) except ValueError, e: log.error(u"Decoding report from '%s'", line) continue item['result'] = item.pop('status') item.pop('cached', None) item.pop('success', None) heroshi.api.report_result(item)
def main(): options, _args = parse_params() # set up logging if options.quiet: update_loggers_level(logging.CRITICAL) elif options.verbose: update_loggers_level(logging.DEBUG) else: update_loggers_level(settings.log['level']) # eager initialization of manager instance and its storage connection with manager_pool.item() as manager: manager.active = True manager.ping_storage() sock = eventlet.listen( ('0.0.0.0', 8080) ) eventlet.wsgi.server(sock, wsgi_app, log=Blackhole())