Beispiel #1
0
def get_fetch(log=False):
    settings = Settings()
    settings.set('LOG_ENABLED', log)

    crawler_process = CrawlerProcess(settings)
    crawler = crawler_process.create_crawler()
    crawler_process.start_crawling()

    t = Thread(target=crawler_process.start_reactor)
    t.daemon = True
    t.start()

    shell = Shell(crawler)
    shell.code = 'adsf'

    import threading
    lock = threading.Lock()

    def fetch(url_or_request):
        lock.acquire()
        try:
            shell.fetch(url_or_request)
            response = shell.vars.get('response')
            return response
        finally:
            lock.release()

    return fetch
Beispiel #2
0
def shell(argv):
    """ Open a url in the scrapy shell """
    parser = argparse.ArgumentParser('ozzy shell',
                                     description=shell.__doc__)
    parser.add_argument('url', help="URL to open in a shell")
    args = parser.parse_args(argv)

    crawler_process = CrawlerProcess(load_settings())
    crawler = crawler_process.create_crawler()
    crawler_process.start_crawling()
    thread = Thread(target=crawler_process.start_reactor)
    thread.daemon = True
    thread.start()
    sh = Shell(crawler)
    sh.start(url=args.url)