def crawl(max_page): text.delete('1.0', END) text.insert(END, 'Currently Crawling Please Wait\n') search_engine.update() count = int(max_page) while len(Crawler.queue) > 0 and count > 0: queue = str(Crawler.queue.pop()) Crawler.crawl(queue) count -= 1 text.insert(END, 'Currently Crawling: ' + queue + '\n') search_engine.update() print('Crawl Finished Can Now Search') text.delete('1.0', END) text.insert(END, 'Crawl Finished Can Now Search\n') text.insert(END, str(len(Crawler.crawled)) + " Url's have been Crawled and Indexed \n") text.insert(END, str(len(Crawler.queue)) + " Total Number of Url's In Queue\n") search_engine.update() Crawler.save_lists()
crawl_button.pack() label = Label(window, text='Enter a Word to Search') label.pack() search_input = Entry(window) search_input.insert(END, 'Shane') search_input.pack() search_button = Button(window, text='Search', command=lambda: search(search_input.get())) search_button.pack() scrollbar = Scrollbar(window) scrollbar.pack(side=RIGHT, fill=Y) text = Text(window, wrap=WORD, yscrollcommand=scrollbar.set) text.pack() scrollbar.config(command=text.yview) text.insert(END, str(len(Crawler.crawled)) + " Url's have been Crawled and Indexed \n") text.insert(END, str(len(Crawler.queue)) + " Total Number of Url's In Queue\n") search_engine.mainloop() Crawler.create_file() init_gui() Crawler.save_lists() sys.exit()