def dev(): server = Server(app.wsgi_app) server.watch('**/*.*') server.serve()
from livereload import Server, shell from app import app server = Server(app) server.serve(port=5000, host='localhost')
from livereload import Server, shell if __name__ == "__main__": server = Server() server.watch("*.rst", shell("make html"), delay=1) server.watch("*.md", shell("make html"), delay=1) server.watch("*.py", shell("make html"), delay=1) server.watch("_static/*", shell("make html"), delay=1) server.watch("_templates/*", shell("make html"), delay=1) server.serve(root="_build/html")
def dev(): from livereload import Server live_server = Server(app.wsgi_app) # app.wsgi_app 即 app.run() live_server.watch("**/*.*") live_server.serve(open_url=True)
if not os.path.exists(build_folder): os.makedirs(build_folder) if configuration.get('autobuild'): ignored_files = [] for path in configuration.get('ignore'): ignored_files.append(os.path.realpath(path)) builder = sphinx_autobuild.SphinxBuilder( outdir=build_folder, args=['-b', 'html', source_folder, build_folder] + sys.argv[1:], ignored=ignored_files) server = Server(watcher=sphinx_autobuild.LivereloadWatchdogWatcher()) server.watch(source_folder, builder) server.watch(build_folder) builder.build() server.serve(port=8000, host='0.0.0.0', root=build_folder) else: # Building once when server starts builder = sphinx_autobuild.SphinxBuilder( outdir=build_folder, args=['-b', 'html', source_folder, build_folder] + sys.argv[1:]) builder.build() sys.argv = ['nouser', '8000']
def dev(): from livereload import Server live_server = Server(app.wsgi_app) live_server.watch('**/*.*') # 监测整个项目所有文件变化。可以加参数,不加就整个项目重加载,加的话就执行方法里的内容。 live_server.serve(open_url=True)
def dev(): from livereload import Server # 写代码实时刷新 live_server = Server(app.wsgi_app) live_server.watch('**/*.*') # 监测整个项目下所有文件 # 监测static中所有的文件 live_server.watch('static/*.*') live_server.serve(open_url_delay=True)
def serve(port=DEFAULT_PORT): server = Server() server.serve(host='localhost', port=port, root=DEPLOY_PATH)
def watch_and_serve(args): server = Server() server.watch('template.html', lambda: build_page(args)) server.watch(__file__, lambda: reload_script() and build_page(args)) webbrowser.open('http://localhost:5500/page.html') server.serve(root=PARENT_DIR, port=5500)
def start_server(): server = Server() server.watch('.published/*.md', shell('python honeycake.py -r'), delay=2) server.watch('.templates/*.html', shell('python honeycake.py -r'), delay=5) server.watch('.static/*.css') server.serve()
def dev(): live_server = Server(app.wsgi_app) live_server.watch('**/*.*') # 可用正则表达式 live_server.serve(open_url=True)
def main(): # noqa key = "" config_file = ".sphinx-server.yml" install_folder = "/opt/sphinx-server/" build_folder = os.path.realpath("_build/html") source_folder = os.path.realpath(".") configuration = None with open(install_folder + config_file, "r") as config_stream: configuration = yaml.safe_load(config_stream) if os.path.isfile(source_folder + "/" + config_file): with open(source_folder + "/" + config_file, "r") as custom_stream: configuration.update(yaml.safe_load(custom_stream)) if not os.path.exists(build_folder): os.makedirs(build_folder) # build only once, then exit. if "BUILD_ONCE" in os.environ: builder = sphinx_autobuild.SphinxBuilder( outdir=build_folder, args=["-b", "html", source_folder, build_folder] + sys.argv[1:], ignored=[], ) builder.build() return 0 if configuration.get("autobuild"): ignored_files = [] for path in configuration.get("ignore"): ignored_files.append(os.path.realpath(path)) builder = sphinx_autobuild.SphinxBuilder( outdir=build_folder, args=["-b", "html", source_folder, build_folder] + sys.argv[1:], ignored=ignored_files, ) server = Server(watcher=sphinx_autobuild.LivereloadWatchdogWatcher()) server.watch(source_folder, builder) server.watch(build_folder) builder.build() server.serve(port=8000, host="0.0.0.0", root=build_folder) else: # Building once when server starts builder = sphinx_autobuild.SphinxBuilder( outdir=build_folder, args=["-b", "html", source_folder, build_folder] + sys.argv[1:], ) builder.build() sys.argv = ["nouser", "8000"] if configuration.get("credentials")["username"] is not None: auth = (configuration.get("credentials")["username"] + ":" + configuration.get("credentials")["password"]) key = base64.b64encode(auth.encode("utf-8")) with pushd(build_folder): http.server.test(AuthHandler, http.server.HTTPServer) else: with pushd(build_folder): Handler = http.server.SimpleHTTPRequestHandler httpd = socketserver.TCPServer(("", 8000), Handler) httpd.serve_forever() return 0
def build_docs(self, path=None, fmt='html', outdir=None, auto_open=True, serve=True, http=None, archive=False, upload=False, jobs=None): if self.check_jsdoc(): return die(JSDOC_NOT_FOUND) self.activate_pipenv(os.path.join(here, 'Pipfile')) import webbrowser from livereload import Server from moztreedocs.package import create_tarball outdir = outdir or os.path.join(self.topobjdir, 'docs') savedir = os.path.join(outdir, fmt) path = path or self.topsrcdir path = os.path.normpath(os.path.abspath(path)) docdir = self._find_doc_dir(path) if not docdir: print(self._dump_sphinx_backtrace()) return die('failed to generate documentation:\n' '%s: could not find docs at this location' % path) result = self._run_sphinx(docdir, savedir, fmt=fmt, jobs=jobs) if result != 0: print(self._dump_sphinx_backtrace()) return die('failed to generate documentation:\n' '%s: sphinx return code %d' % (path, result)) else: print('\nGenerated documentation:\n%s' % savedir) print('Post processing HTML files') self._post_process_html(savedir) if archive: archive_path = os.path.join(outdir, '%s.tar.gz' % self.project) create_tarball(archive_path, savedir) print('Archived to %s' % archive_path) if upload: self._s3_upload(savedir, self.project, self.version) if not serve: index_path = os.path.join(savedir, 'index.html') if auto_open and os.path.isfile(index_path): webbrowser.open(index_path) return # Create livereload server. Any files modified in the specified docdir # will cause a re-build and refresh of the browser (if open). try: host, port = http.split(':', 1) port = int(port) except ValueError: return die('invalid address: %s' % http) server = Server() sphinx_trees = self.manager.trees or {savedir: docdir} for _, src in sphinx_trees.items(): run_sphinx = partial(self._run_sphinx, src, savedir, fmt=fmt, jobs=jobs) server.watch(src, run_sphinx) server.serve(host=host, port=port, root=savedir, open_url_delay=0.1 if auto_open else None)
def dev(): live_server = Server(app.wsgi_app) live_server.watch('**/*.*')
def dev(): from livereload import Server live_server = Server(app.wsgi_app) live_server.watch('**/*.*') live_server.serve(open_url=True)
def serve() -> None: build() server = Server() server.watch("**/*", build, ignore=check_ignore) server.serve(root=BUILD_PATH)
def docs_serve(): server = Server() server.watch('docs/*.rst', shell('make html', cwd='docs')) server.serve(root='docs/_build/html', open_url=True)
def main(): server = Server() server.watch('template.html', prepare_render) server.serve(root='.')
def build_docs(self, path=None, fmt='html', outdir=None, auto_open=True, serve=True, http=None, archive=False, upload=False): try: which.which('jsdoc') except which.WhichError: return die('jsdoc not found - please install from npm.') self._activate_virtualenv() self.virtualenv_manager.install_pip_requirements( os.path.join(here, 'requirements.txt'), quiet=True) import webbrowser from livereload import Server from moztreedocs.package import create_tarball outdir = outdir or os.path.join(self.topobjdir, 'docs') savedir = os.path.join(outdir, fmt) path = path or os.path.join(self.topsrcdir, 'tools') path = os.path.normpath(os.path.abspath(path)) docdir = self._find_doc_dir(path) if not docdir: return die('failed to generate documentation:\n' '%s: could not find docs at this location' % path) result = self._run_sphinx(docdir, savedir, fmt=fmt) if result != 0: return die('failed to generate documentation:\n' '%s: sphinx return code %d' % (path, result)) else: print('\nGenerated documentation:\n%s' % savedir) if archive: archive_path = os.path.join(outdir, '%s.tar.gz' % self.project) create_tarball(archive_path, savedir) print('Archived to %s' % archive_path) if upload: self._s3_upload(savedir, self.project, self.version) if not serve: index_path = os.path.join(savedir, 'index.html') if auto_open and os.path.isfile(index_path): webbrowser.open(index_path) return # Create livereload server. Any files modified in the specified docdir # will cause a re-build and refresh of the browser (if open). try: host, port = http.split(':', 1) port = int(port) except ValueError: return die('invalid address: %s' % http) server = Server() sphinx_trees = self.manager.trees or {savedir: docdir} for dest, src in sphinx_trees.items(): run_sphinx = partial(self._run_sphinx, src, savedir, fmt=fmt) server.watch(src, run_sphinx) server.serve(host=host, port=port, root=savedir, open_url_delay=0.1 if auto_open else None)
def readme(): """Live reload readme""" from livereload import Server server = Server() server.watch("README.rst", "python cute.py readme_build") server.serve(open_url_delay=1, root="build/readme")
# change for docker rebuil # hack socketio location test #socketio = SocketIO(app, ssl_ctx=ssl_ctx) if __name__ == '__main__': # If launched as main then setup the Flask Server HOST = environ.get('SERVER_HOST', 'localhost') # HOST = environ.get('SERVER_HOST', '0.0.0.0') # HOST = environ.get('SERVER_HOST', '') PORT = int(environ.get('SERVER_PORT', '5000')) if (enableLiveReload): app.debug = True liveReloadServer = Server(app.wsgi_app) # certLocation = "website/certs/" # server.watch liveReloadServer.watch('website/') liveReloadServer.watch('website/templates/') liveReloadServer.serve(port=PORT, host=HOST) else: app.run(HOST, PORT, debug=False) # socketio.run(app, port=9090) # server = Server(app.wsgi_app) # certLocation = "website/certs/" # app.debug = True
def build_docs( self, path=None, fmt="html", outdir=None, auto_open=True, serve=True, http=None, archive=False, upload=False, jobs=None, write_url=None, verbose=None, ): if self.check_jsdoc(): return die(JSDOC_NOT_FOUND) self.activate_virtualenv() self.virtualenv_manager.install_pip_requirements( os.path.join(here, "requirements.txt")) import webbrowser from livereload import Server from moztreedocs.package import create_tarball unique_id = "%s/%s" % (self.project, str(uuid.uuid1())) outdir = outdir or os.path.join(self.topobjdir, "docs") savedir = os.path.join(outdir, fmt) path = path or self.topsrcdir path = os.path.normpath(os.path.abspath(path)) docdir = self._find_doc_dir(path) if not docdir: print(self._dump_sphinx_backtrace()) return die("failed to generate documentation:\n" "%s: could not find docs at this location" % path) result = self._run_sphinx(docdir, savedir, fmt=fmt, jobs=jobs, verbose=verbose) if result != 0: print(self._dump_sphinx_backtrace()) return die("failed to generate documentation:\n" "%s: sphinx return code %d" % (path, result)) else: print("\nGenerated documentation:\n%s" % savedir) # Upload the artifact containing the link to S3 # This would be used by code-review to post the link to Phabricator if write_url is not None: unique_link = BASE_LINK + unique_id + "/index.html" with open(write_url, "w") as fp: fp.write(unique_link) fp.flush() print("Generated " + write_url) if archive: archive_path = os.path.join(outdir, "%s.tar.gz" % self.project) create_tarball(archive_path, savedir) print("Archived to %s" % archive_path) if upload: self._s3_upload(savedir, self.project, unique_id, self.version) if not serve: index_path = os.path.join(savedir, "index.html") if auto_open and os.path.isfile(index_path): webbrowser.open(index_path) return # Create livereload server. Any files modified in the specified docdir # will cause a re-build and refresh of the browser (if open). try: host, port = http.split(":", 1) port = int(port) except ValueError: return die("invalid address: %s" % http) server = Server() sphinx_trees = self.manager.trees or {savedir: docdir} for _, src in sphinx_trees.items(): run_sphinx = partial(self._run_sphinx, src, savedir, fmt=fmt, jobs=jobs, verbose=verbose) server.watch(src, run_sphinx) server.serve( host=host, port=port, root=savedir, open_url_delay=0.1 if auto_open else None, )
def dev(): live_server = Server(app.wsgi_app) live_server.watch('**/*.*') live_server.serve(open_url_delay=True)
def build_docs( command_context, path=None, fmt="html", outdir=None, auto_open=True, serve=True, http=None, archive=False, upload=False, jobs=None, write_url=None, verbose=None, ): # TODO: Bug 1704891 - move the ESLint setup tools to a shared place. sys.path.append( mozpath.join(command_context.topsrcdir, "tools", "lint", "eslint")) import setup_helper setup_helper.set_project_root(command_context.topsrcdir) if not setup_helper.check_node_executables_valid(): return 1 setup_helper.eslint_maybe_setup() # Set the path so that Sphinx can find jsdoc, unfortunately there isn't # a way to pass this to Sphinx itself at the moment. os.environ["PATH"] = ( mozpath.join(command_context.topsrcdir, "node_modules", ".bin") + os.pathsep + _node_path() + os.pathsep + os.environ["PATH"]) command_context.activate_virtualenv() command_context.virtualenv_manager.install_pip_requirements( os.path.join(here, "requirements.txt")) import webbrowser from livereload import Server from moztreedocs.package import create_tarball unique_id = "%s/%s" % (project(), str(uuid.uuid1())) outdir = outdir or os.path.join(command_context.topobjdir, "docs") savedir = os.path.join(outdir, fmt) path = path or command_context.topsrcdir path = os.path.normpath(os.path.abspath(path)) docdir = _find_doc_dir(path) if not docdir: print(_dump_sphinx_backtrace()) return die("failed to generate documentation:\n" "%s: could not find docs at this location" % path) result = _run_sphinx(docdir, savedir, fmt=fmt, jobs=jobs, verbose=verbose) if result != 0: print(_dump_sphinx_backtrace()) return die("failed to generate documentation:\n" "%s: sphinx return code %d" % (path, result)) else: print("\nGenerated documentation:\n%s" % savedir) # Upload the artifact containing the link to S3 # This would be used by code-review to post the link to Phabricator if write_url is not None: unique_link = BASE_LINK + unique_id + "/index.html" with open(write_url, "w") as fp: fp.write(unique_link) fp.flush() print("Generated " + write_url) if archive: archive_path = os.path.join(outdir, "%s.tar.gz" % project()) create_tarball(archive_path, savedir) print("Archived to %s" % archive_path) if upload: _s3_upload(savedir, project(), unique_id, version()) if not serve: index_path = os.path.join(savedir, "index.html") if auto_open and os.path.isfile(index_path): webbrowser.open(index_path) return # Create livereload server. Any files modified in the specified docdir # will cause a re-build and refresh of the browser (if open). try: host, port = http.split(":", 1) port = int(port) except ValueError: return die("invalid address: %s" % http) server = Server() sphinx_trees = manager().trees or {savedir: docdir} for _, src in sphinx_trees.items(): run_sphinx = partial(_run_sphinx, src, savedir, fmt=fmt, jobs=jobs, verbose=verbose) server.watch(src, run_sphinx) server.serve( host=host, port=port, root=savedir, open_url_delay=0.1 if auto_open else None, )
def live(): server = Server() server.watch('docs', shell('paver build_html', cwd='.')) server.serve(root='_build/html', open_url_delay=True)
from livereload import Server, shell from livereload.watcher import Watcher server = Server(watcher=Watcher(provide_filename=True)) def printfilename(filename): print(filename) server.watch('*.less', printfilename) server.serve(open_url_delay=1)
@app.route('/login', methods=["GET", "POST"]) def login(): redirect_uri = Config.APP_URL + url_for('callback_handling') return auth0.authorize_redirect(redirect_uri=redirect_uri) @app.route('/logout', methods=["GET"]) def logout(): session_id = request.cookies.get('session-id') login_manager.registerLogout(session_id) flash('Nastąpiło poprawne wylogowanie', 'alert-success') # Build redirect to Auth0 return_url = Config.APP_URL + url_for('index') params = {'returnTo': return_url, 'client_id': Config.AUTH0_CLIENT_ID} response = redirect(auth0.api_base_url + '/v2/logout?' + urlencode(params)) response.set_cookie('session-id', '', expires=0, httponly=True) # Clear cookie return response # Run app with live reload if app.debug: server = Server(app.wsgi_app) # server.watch server.serve(port=5000)
def dev(): app.config.debug = True live_server = Server(app.wsgi_app) # 监控文件变化参数是目录e.g:监控static文件夹 static/*.* live_server.watch("**/*.*") # 监控整个项目 live_server.serve(open_url=True)
#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wagtail_cookie.settings") from django.core.management import execute_from_command_line if 'livereload' in sys.argv: from django.core.wsgi import get_wsgi_application from livereload import Server application = get_wsgi_application() server = Server(application) # Add your watch # server.watch('path/to/file', 'your command') server.serve('8000') else: execute_from_command_line(sys.argv)
def serve_html(): server = Server() server.watch(css_file) server.serve(open_url_delay=1)