示例#1
0
    futures.append(handler.add(Worker.scrape, url, blacklist))
    add_visited(url['url'])

    #print("Status: q:" + str(len(queue)) + " | eqt:" + str(empty_queue_tick))
    time.sleep(.5)



"""






queue = Worker.scrape(Filehandler.getpage(), blacklist)
i = 0
while len(queue) > 0:

    # Fetch url from queue
    url = queue.pop()

    # Scrape the url, and retrieve new urls
    new_urls = Worker.scrape(url, blacklist)

    # Add new URL's to the queue
    queue.extend(new_urls)

    # Mark this url as visited
    add_visited(url['url'])