def new_server(repo_path, session_id): app.config['REPOSITORY'] = 'file://' + repo_path app.config['SESSION_ID'] = session_id if not app._worker_running: spawn_worker() app._worker_running = True return create_server(app, port=0)
def fx_crawler(request): empty_queue() spawn_worker() def terminate(): empty_queue() crawling_queue.put((0, 'terminate')) request.addfinalizer(terminate)
def serve(): server.run() def open_webbrowser(port): webbrowser.open('http://0.0.0.0:{}'.format(port)) if __name__ == "__main__": directory = os.path.expanduser('~/.earthreader') repository = urlparse.urljoin('file://', directory) session_id = Session().identifier app.config.update(REPOSITORY=repository, SESSION_ID=session_id) server = create_server(app, port=0) port = server.effective_port spawn_worker() proc = threading.Thread(target=serve) proc.daemon = True proc.start() open_webbrowser(port) root = tk.Tk() menubar = tk.Menu(root) filemenu = tk.Menu(menubar) filemenu.add_command(label="Open Browser", command= lambda: open_webbrowser(port)) menubar.add_cascade(label="File", menu=filemenu) root.config(menu=menubar) root.withdraw() root.mainloop()