class C(HasTraits): klass = Type(None, B)
class NotebookApp(JupyterApp): name = 'jupyter-notebook' version = __version__ description = """ The Jupyter HTML Notebook. This launches a Tornado based HTML Notebook Server that serves up an HTML5/Javascript Notebook client. """ examples = _examples aliases = aliases flags = flags classes = [ KernelManager, Session, MappingKernelManager, ContentsManager, FileContentsManager, NotebookNotary, KernelSpecManager, ] flags = Dict(flags) aliases = Dict(aliases) subcommands = dict(list=(NbserverListApp, NbserverListApp.description.splitlines()[0]), ) _log_formatter_cls = LogFormatter def _log_level_default(self): return logging.INFO def _log_datefmt_default(self): """Exclude date from default date format""" return "%H:%M:%S" def _log_format_default(self): """override default log format to include time""" return u"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" ignore_minified_js = Bool( False, config=True, help= 'Use minified JS file or not, mainly use during dev to avoid JS recompilation', ) # file to be opened in the notebook server file_to_run = Unicode('', config=True) # Network related information allow_origin = Unicode('', config=True, help="""Set the Access-Control-Allow-Origin header Use '*' to allow any origin to access your server. Takes precedence over allow_origin_pat. """) allow_origin_pat = Unicode( '', config=True, help= """Use a regular expression for the Access-Control-Allow-Origin header Requests from an origin matching the expression will get replies with: Access-Control-Allow-Origin: origin where `origin` is the origin of the request. Ignored if allow_origin is set. """) allow_credentials = Bool( False, config=True, help="Set the Access-Control-Allow-Credentials: true header") default_url = Unicode('/tree', config=True, help="The default URL to redirect to from `/`") ip = Unicode('localhost', config=True, help="The IP address the notebook server will listen on.") def _ip_default(self): """Return localhost if available, 127.0.0.1 otherwise. On some (horribly broken) systems, localhost cannot be bound. """ s = socket.socket() try: s.bind(('localhost', 0)) except socket.error as e: self.log.warning( "Cannot bind to localhost, using 127.0.0.1 as default ip\n%s", e) return '127.0.0.1' else: s.close() return 'localhost' def _ip_changed(self, name, old, new): if new == u'*': self.ip = u'' port = Integer(8888, config=True, help="The port the notebook server will listen on.") port_retries = Integer( 50, config=True, help= "The number of additional ports to try if the specified port is not available." ) certfile = Unicode( u'', config=True, help="""The full path to an SSL/TLS certificate file.""") keyfile = Unicode( u'', config=True, help="""The full path to a private key file for usage with SSL/TLS.""") client_ca = Unicode( u'', config=True, help= """The full path to a certificate authority certifificate for SSL/TLS client authentication.""" ) cookie_secret_file = Unicode( config=True, help="""The file where the cookie secret is stored.""") def _cookie_secret_file_default(self): return os.path.join(self.runtime_dir, 'notebook_cookie_secret') cookie_secret = Bytes(b'', config=True, help="""The random bytes used to secure cookies. By default this is a new random number every time you start the Notebook. Set it to a value in a config file to enable logins to persist across server sessions. Note: Cookie secrets should be kept private, do not share config files with cookie_secret stored in plaintext (you can read the value from a file). """) def _cookie_secret_default(self): if os.path.exists(self.cookie_secret_file): with io.open(self.cookie_secret_file, 'rb') as f: return f.read() else: secret = base64.encodestring(os.urandom(1024)) self._write_cookie_secret_file(secret) return secret def _write_cookie_secret_file(self, secret): """write my secret to my secret_file""" self.log.info("Writing notebook server cookie secret to %s", self.cookie_secret_file) with io.open(self.cookie_secret_file, 'wb') as f: f.write(secret) try: os.chmod(self.cookie_secret_file, 0o600) except OSError: self.log.warning("Could not set permissions on %s", self.cookie_secret_file) password = Unicode(u'', config=True, help="""Hashed password to use for web authentication. To generate, type in a python/IPython shell: from notebook.auth import passwd; passwd() The string should be of the form type:salt:hashed-password. """) open_browser = Bool(True, config=True, help="""Whether to open in a browser after starting. The specific browser used is platform dependent and determined by the python standard library `webbrowser` module, unless it is overridden using the --browser (NotebookApp.browser) configuration option. """) browser = Unicode(u'', config=True, help="""Specify what command to use to invoke a web browser when opening the notebook. If not specified, the default browser will be determined by the `webbrowser` standard library module, which allows setting of the BROWSER environment variable to override it. """) webapp_settings = Dict(config=True, help="DEPRECATED, use tornado_settings") def _webapp_settings_changed(self, name, old, new): self.log.warning( "\n webapp_settings is deprecated, use tornado_settings.\n") self.tornado_settings = new tornado_settings = Dict( config=True, help="Supply overrides for the tornado.web.Application that the " "Jupyter notebook uses.") ssl_options = Dict(config=True, help="""Supply SSL options for the tornado HTTPServer. See the tornado docs for details.""") jinja_environment_options = Dict( config=True, help="Supply extra arguments that will be passed to Jinja environment." ) jinja_template_vars = Dict( config=True, help="Extra variables to supply to jinja templates when rendering.", ) enable_mathjax = Bool( True, config=True, help="""Whether to enable MathJax for typesetting math/TeX MathJax is the javascript library Jupyter uses to render math/LaTeX. It is very large, so you may want to disable it if you have a slow internet connection, or for offline use of the notebook. When disabled, equations etc. will appear as their untransformed TeX source. """) def _enable_mathjax_changed(self, name, old, new): """set mathjax url to empty if mathjax is disabled""" if not new: self.mathjax_url = u'' base_url = Unicode('/', config=True, help='''The base URL for the notebook server. Leading and trailing slashes can be omitted, and will automatically be added. ''') def _base_url_changed(self, name, old, new): if not new.startswith('/'): self.base_url = '/' + new elif not new.endswith('/'): self.base_url = new + '/' base_project_url = Unicode('/', config=True, help="""DEPRECATED use base_url""") def _base_project_url_changed(self, name, old, new): self.log.warning("base_project_url is deprecated, use base_url") self.base_url = new extra_static_paths = List( Unicode(), config=True, help="""Extra paths to search for serving static files. This allows adding javascript/css to be available from the notebook server machine, or overriding individual files in the IPython""") @property def static_file_path(self): """return extra paths + the default location""" return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH] static_custom_path = List(Unicode(), help="""Path to search for custom.js, css""") def _static_custom_path_default(self): return [ os.path.join(d, 'custom') for d in (self.config_dir, DEFAULT_STATIC_FILES_PATH) ] extra_template_paths = List( Unicode(), config=True, help="""Extra paths to search for serving jinja templates. Can be used to override templates from notebook.templates.""") @property def template_file_path(self): """return extra paths + the default locations""" return self.extra_template_paths + DEFAULT_TEMPLATE_PATH_LIST extra_nbextensions_path = List( Unicode(), config=True, help="""extra paths to look for Javascript notebook extensions""") @property def nbextensions_path(self): """The path to look for Javascript notebook extensions""" path = self.extra_nbextensions_path + jupyter_path('nbextensions') # FIXME: remove IPython nbextensions path after a migration period try: from IPython.paths import get_ipython_dir except ImportError: pass else: path.append(os.path.join(get_ipython_dir(), 'nbextensions')) return path websocket_url = Unicode("", config=True, help="""The base URL for websockets, if it differs from the HTTP server (hint: it almost certainly doesn't). Should be in the form of an HTTP origin: ws[s]://hostname[:port] """) mathjax_url = Unicode("", config=True, help="""The url for MathJax.js.""") def _mathjax_url_default(self): if not self.enable_mathjax: return u'' static_url_prefix = self.tornado_settings.get("static_url_prefix", "static") return url_path_join(static_url_prefix, 'components', 'MathJax', 'MathJax.js') def _mathjax_url_changed(self, name, old, new): if new and not self.enable_mathjax: # enable_mathjax=False overrides mathjax_url self.mathjax_url = u'' else: self.log.info("Using MathJax: %s", new) contents_manager_class = Type(default_value=FileContentsManager, klass=ContentsManager, config=True, help='The notebook manager class to use.') kernel_manager_class = Type(default_value=MappingKernelManager, config=True, help='The kernel manager class to use.') session_manager_class = Type(default_value=SessionManager, config=True, help='The session manager class to use.') config_manager_class = Type(default_value=ConfigManager, config=True, help='The config manager class to use') kernel_spec_manager = Instance(KernelSpecManager, allow_none=True) kernel_spec_manager_class = Type(default_value=KernelSpecManager, config=True, help=""" The kernel spec manager class to use. Should be a subclass of `jupyter_client.kernelspec.KernelSpecManager`. The Api of KernelSpecManager is provisional and might change without warning between this version of Jupyter and the next stable one. """) login_handler_class = Type( default_value=LoginHandler, klass=web.RequestHandler, config=True, help='The login handler class to use.', ) logout_handler_class = Type( default_value=LogoutHandler, klass=web.RequestHandler, config=True, help='The logout handler class to use.', ) trust_xheaders = Bool( False, config=True, help= ("Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers" "sent by the upstream reverse proxy. Necessary if the proxy handles SSL" )) info_file = Unicode() def _info_file_default(self): info_file = "nbserver-%s.json" % os.getpid() return os.path.join(self.runtime_dir, info_file) pylab = Unicode('disabled', config=True, help=""" DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. """) def _pylab_changed(self, name, old, new): """when --pylab is specified, display a warning and exit""" if new != 'warn': backend = ' %s' % new else: backend = '' self.log.error( "Support for specifying --pylab on the command line has been removed." ) self.log.error( "Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself." .format(backend)) self.exit(1) notebook_dir = Unicode( config=True, help="The directory to use for notebooks and kernels.") def _notebook_dir_default(self): if self.file_to_run: return os.path.dirname(os.path.abspath(self.file_to_run)) else: return py3compat.getcwd() def _notebook_dir_validate(self, value, trait): # Strip any trailing slashes value = value.rstrip(os.sep) if not os.path.isabs(value): # If we receive a non-absolute path, make it absolute. value = os.path.abspath(value) if not os.path.isdir(value): raise TraitError("No such notebook dir: %r" % value) return value def _notebook_dir_changed(self, name, old, new): """Do a bit of validation of the notebook dir.""" # setting App.notebook_dir implies setting notebook and kernel dirs as well self.config.FileContentsManager.root_dir = new self.config.MappingKernelManager.root_dir = new server_extensions = List( Unicode(), config=True, help=( "Python modules to load as notebook server extensions. " "This is an experimental API, and may change in future releases.")) reraise_server_extension_failures = Bool( False, config=True, help="Reraise exceptions encountered loading server extensions?", ) def parse_command_line(self, argv=None): super(NotebookApp, self).parse_command_line(argv) if self.extra_args: arg0 = self.extra_args[0] f = os.path.abspath(arg0) self.argv.remove(arg0) if not os.path.exists(f): self.log.critical("No such file or directory: %s", f) self.exit(1) # Use config here, to ensure that it takes higher priority than # anything that comes from the config dirs. c = Config() if os.path.isdir(f): c.NotebookApp.notebook_dir = f elif os.path.isfile(f): c.NotebookApp.file_to_run = f self.update_config(c) def init_configurables(self): self.kernel_spec_manager = self.kernel_spec_manager_class( parent=self, ) self.kernel_manager = self.kernel_manager_class( parent=self, log=self.log, connection_dir=self.runtime_dir, kernel_spec_manager=self.kernel_spec_manager, ) self.contents_manager = self.contents_manager_class( parent=self, log=self.log, ) self.session_manager = self.session_manager_class( parent=self, log=self.log, kernel_manager=self.kernel_manager, contents_manager=self.contents_manager, ) self.config_manager = self.config_manager_class( parent=self, log=self.log, config_dir=os.path.join(self.config_dir, 'nbconfig'), ) def init_logging(self): # This prevents double log messages because tornado use a root logger that # self.log is a child of. The logging module dipatches log messages to a log # and all of its ancenstors until propagate is set to False. self.log.propagate = False for log in app_log, access_log, gen_log: # consistent log output name (NotebookApp instead of tornado.access, etc.) log.name = self.log.name # hook up tornado 3's loggers to our app handlers logger = logging.getLogger('tornado') logger.propagate = True logger.parent = self.log logger.setLevel(self.log.level) def init_webapp(self): """initialize tornado webapp and httpserver""" self.tornado_settings['allow_origin'] = self.allow_origin if self.allow_origin_pat: self.tornado_settings['allow_origin_pat'] = re.compile( self.allow_origin_pat) self.tornado_settings['allow_credentials'] = self.allow_credentials # ensure default_url starts with base_url if not self.default_url.startswith(self.base_url): self.default_url = url_path_join(self.base_url, self.default_url) self.web_app = NotebookWebApplication( self, self.kernel_manager, self.contents_manager, self.session_manager, self.kernel_spec_manager, self.config_manager, self.log, self.base_url, self.default_url, self.tornado_settings, self.jinja_environment_options) ssl_options = self.ssl_options if self.certfile: ssl_options['certfile'] = self.certfile if self.keyfile: ssl_options['keyfile'] = self.keyfile if self.client_ca: ssl_options['ca_certs'] = self.client_ca if not ssl_options: # None indicates no SSL config ssl_options = None else: # SSL may be missing, so only import it if it's to be used import ssl # Disable SSLv3, since its use is discouraged. ssl_options['ssl_version'] = ssl.PROTOCOL_TLSv1 if ssl_options.get('ca_certs', False): ssl_options['cert_reqs'] = ssl.CERT_REQUIRED self.login_handler_class.validate_security(self, ssl_options=ssl_options) self.http_server = httpserver.HTTPServer(self.web_app, ssl_options=ssl_options, xheaders=self.trust_xheaders) success = None for port in random_ports(self.port, self.port_retries + 1): try: self.http_server.listen(port, self.ip) except socket.error as e: if e.errno == errno.EADDRINUSE: self.log.info( 'The port %i is already in use, trying another random port.' % port) continue elif e.errno in (errno.EACCES, getattr(errno, 'WSAEACCES', errno.EACCES)): self.log.warning("Permission to listen on port %i denied" % port) continue else: raise else: self.port = port success = True break if not success: self.log.critical( 'ERROR: the notebook server could not be started because ' 'no available port could be found.') self.exit(1) @property def display_url(self): ip = self.ip if self.ip else '[all ip addresses on your system]' return self._url(ip) @property def connection_url(self): ip = self.ip if self.ip else 'localhost' return self._url(ip) def _url(self, ip): proto = 'https' if self.certfile else 'http' return "%s://%s:%i%s" % (proto, ip, self.port, self.base_url) def init_terminals(self): try: from .terminal import initialize initialize(self.web_app, self.notebook_dir, self.connection_url) self.web_app.settings['terminals_available'] = True except ImportError as e: log = self.log.debug if sys.platform == 'win32' else self.log.warning log("Terminals not available (error was %s)", e) def init_signal(self): if not sys.platform.startswith('win') and sys.stdin.isatty(): signal.signal(signal.SIGINT, self._handle_sigint) signal.signal(signal.SIGTERM, self._signal_stop) if hasattr(signal, 'SIGUSR1'): # Windows doesn't support SIGUSR1 signal.signal(signal.SIGUSR1, self._signal_info) if hasattr(signal, 'SIGINFO'): # only on BSD-based systems signal.signal(signal.SIGINFO, self._signal_info) def _handle_sigint(self, sig, frame): """SIGINT handler spawns confirmation dialog""" # register more forceful signal handler for ^C^C case signal.signal(signal.SIGINT, self._signal_stop) # request confirmation dialog in bg thread, to avoid # blocking the App thread = threading.Thread(target=self._confirm_exit) thread.daemon = True thread.start() def _restore_sigint_handler(self): """callback for restoring original SIGINT handler""" signal.signal(signal.SIGINT, self._handle_sigint) def _confirm_exit(self): """confirm shutdown on ^C A second ^C, or answering 'y' within 5s will cause shutdown, otherwise original SIGINT handler will be restored. This doesn't work on Windows. """ info = self.log.info info('interrupted') print(self.notebook_info()) sys.stdout.write("Shutdown this notebook server (y/[n])? ") sys.stdout.flush() r, w, x = select.select([sys.stdin], [], [], 5) if r: line = sys.stdin.readline() if line.lower().startswith('y') and 'n' not in line.lower(): self.log.critical("Shutdown confirmed") ioloop.IOLoop.current().stop() return else: print("No answer for 5s:", end=' ') print("resuming operation...") # no answer, or answer is no: # set it back to original SIGINT handler # use IOLoop.add_callback because signal.signal must be called # from main thread ioloop.IOLoop.current().add_callback(self._restore_sigint_handler) def _signal_stop(self, sig, frame): self.log.critical("received signal %s, stopping", sig) ioloop.IOLoop.current().stop() def _signal_info(self, sig, frame): print(self.notebook_info()) def init_components(self): """Check the components submodule, and warn if it's unclean""" # TODO: this should still check, but now we use bower, not git submodule pass def init_server_extensions(self): """Load any extensions specified by config. Import the module, then call the load_jupyter_server_extension function, if one exists. The extension API is experimental, and may change in future releases. """ for modulename in self.server_extensions: try: mod = importlib.import_module(modulename) func = getattr(mod, 'load_jupyter_server_extension', None) if func is not None: func(self) except Exception: if self.reraise_server_extension_failures: raise self.log.warning("Error loading server extension %s", modulename, exc_info=True) @catch_config_error def initialize(self, argv=None): super(NotebookApp, self).initialize(argv) self.init_logging() if self._dispatching: return self.init_configurables() self.init_components() self.init_webapp() self.init_terminals() self.init_signal() self.init_server_extensions() def cleanup_kernels(self): """Shutdown all kernels. The kernels will shutdown themselves when this process no longer exists, but explicit shutdown allows the KernelManagers to cleanup the connection files. """ self.log.info('Shutting down kernels') self.kernel_manager.shutdown_all() def notebook_info(self): "Return the current working directory and the server url information" info = self.contents_manager.info_string() + "\n" info += "%d active kernels \n" % len(self.kernel_manager._kernels) return info + "The Jupyter Notebook is running at: %s" % self.display_url def server_info(self): """Return a JSONable dict of information about this server.""" return { 'url': self.connection_url, 'hostname': self.ip if self.ip else 'localhost', 'port': self.port, 'secure': bool(self.certfile), 'base_url': self.base_url, 'notebook_dir': os.path.abspath(self.notebook_dir), 'pid': os.getpid() } def write_server_info_file(self): """Write the result of server_info() to the JSON file info_file.""" with open(self.info_file, 'w') as f: json.dump(self.server_info(), f, indent=2) def remove_server_info_file(self): """Remove the nbserver-<pid>.json file created for this server. Ignores the error raised when the file has already been removed. """ try: os.unlink(self.info_file) except OSError as e: if e.errno != errno.ENOENT: raise def start(self): """ Start the Notebook server app, after initialization This method takes no arguments so all configuration and initialization must be done prior to calling this method.""" super(NotebookApp, self).start() info = self.log.info for line in self.notebook_info().split("\n"): info(line) info( "Use Control-C to stop this server and shut down all kernels (twice to skip confirmation)." ) self.write_server_info_file() if self.open_browser or self.file_to_run: try: browser = webbrowser.get(self.browser or None) except webbrowser.Error as e: self.log.warning('No web browser found: %s.' % e) browser = None if self.file_to_run: if not os.path.exists(self.file_to_run): self.log.critical("%s does not exist" % self.file_to_run) self.exit(1) relpath = os.path.relpath(self.file_to_run, self.notebook_dir) uri = url_escape( url_path_join('notebooks', *relpath.split(os.sep))) else: uri = self.default_url if browser: b = lambda: browser.open( url_path_join(self.connection_url, uri), new=2) threading.Thread(target=b).start() self.io_loop = ioloop.IOLoop.current() if sys.platform.startswith('win'): # add no-op to wake every 5s # to handle signals that may be ignored by the inner loop pc = ioloop.PeriodicCallback(lambda: None, 5000) pc.start() try: self.io_loop.start() except KeyboardInterrupt: info("Interrupted...") finally: self.cleanup_kernels() self.remove_server_info_file() def stop(self): def _stop(): self.http_server.stop() self.io_loop.stop() self.io_loop.add_callback(_stop)
class BaseConverter(LoggingConfigurable): notebooks = List([]) assignments = Dict({}) writer = Instance(FilesWriter) exporter = Instance(Exporter) exporter_class = Type(NotebookExporter, klass=Exporter) preprocessors = List([]) force = Bool( False, help="Whether to overwrite existing assignments/submissions").tag( config=True) permissions = Integer(help=dedent(""" Permissions to set on files output by nbgrader. The default is generally read-only (444), with the exception of nbgrader assign and nbgrader feedback, in which case the user also has write permission. """)).tag(config=True) @default("permissions") def _permissions_default(self): return 444 coursedir = Instance(CourseDirectory, allow_none=True) def __init__(self, coursedir=None, **kwargs): self.coursedir = coursedir super(BaseConverter, self).__init__(**kwargs) if self.parent and hasattr(self.parent, "logfile"): self.logfile = self.parent.logfile else: self.logfile = None c = Config() c.Exporter.default_preprocessors = [] self.update_config(c) def start(self): self.init_notebooks() self.writer = FilesWriter(parent=self, config=self.config) self.exporter = self.exporter_class(parent=self, config=self.config) for pp in self.preprocessors: self.exporter.register_preprocessor(pp) currdir = os.getcwd() os.chdir(self.coursedir.root) try: self.convert_notebooks() finally: os.chdir(currdir) @default("classes") def _classes_default(self): classes = super(BaseConverter, self)._classes_default() classes.append(FilesWriter) classes.append(Exporter) for pp in self.preprocessors: if len(pp.class_traits(config=True)) > 0: classes.append(pp) return classes @property def _input_directory(self): raise NotImplementedError @property def _output_directory(self): raise NotImplementedError def _format_source(self, assignment_id, student_id, escape=False): return self.coursedir.format_path(self._input_directory, student_id, assignment_id, escape=escape) def _format_dest(self, assignment_id, student_id, escape=False): return self.coursedir.format_path(self._output_directory, student_id, assignment_id, escape=escape) def init_notebooks(self): self.assignments = {} self.notebooks = [] fullglob = self._format_source(self.coursedir.assignment_id, self.coursedir.student_id) for assignment in glob.glob(fullglob): found = glob.glob( os.path.join(assignment, self.coursedir.notebook_id + ".ipynb")) if len(found) == 0: self.log.warning("No notebooks were matched in '%s'", assignment) continue self.assignments[assignment] = found if len(self.assignments) == 0: msg = "No notebooks were matched by '%s'" % fullglob self.log.error(msg) raise NbGraderException(msg) def init_single_notebook_resources(self, notebook_filename): regexp = re.escape(os.path.sep).join([ self._format_source("(?P<assignment_id>.*)", "(?P<student_id>.*)", escape=True), "(?P<notebook_id>.*).ipynb" ]) m = re.match(regexp, notebook_filename) if m is None: msg = "Could not match '%s' with regexp '%s'" % (notebook_filename, regexp) self.log.error(msg) raise NbGraderException(msg) gd = m.groupdict() self.log.debug("Student: %s", gd['student_id']) self.log.debug("Assignment: %s", gd['assignment_id']) self.log.debug("Notebook: %s", gd['notebook_id']) resources = {} resources['unique_key'] = gd['notebook_id'] resources['output_files_dir'] = '%s_files' % gd['notebook_id'] resources['nbgrader'] = {} resources['nbgrader']['student'] = gd['student_id'] resources['nbgrader']['assignment'] = gd['assignment_id'] resources['nbgrader']['notebook'] = gd['notebook_id'] resources['nbgrader']['db_url'] = self.coursedir.db_url return resources def write_single_notebook(self, output, resources): # configure the writer build directory self.writer.build_directory = self._format_dest( resources['nbgrader']['assignment'], resources['nbgrader']['student']) # write out the results self.writer.write(output, resources, notebook_name=resources['unique_key']) def init_destination(self, assignment_id, student_id): """Initialize the destination for an assignment. Returns whether the assignment should actually be processed or not (i.e. whether the initialization was successful). """ dest = os.path.normpath(self._format_dest(assignment_id, student_id)) # the destination doesn't exist, so we haven't processed it if self.coursedir.notebook_id == "*": if not os.path.exists(dest): return True else: # if any of the notebooks don't exist, then we want to process them for notebook in self.notebooks: filename = os.path.splitext(os.path.basename( notebook))[0] + self.exporter.file_extension path = os.path.join(dest, filename) if not os.path.exists(path): return True # if we have specified --force, then always remove existing stuff if self.force: if self.coursedir.notebook_id == "*": self.log.warning( "Removing existing assignment: {}".format(dest)) rmtree(dest) else: for notebook in self.notebooks: filename = os.path.splitext(os.path.basename( notebook))[0] + self.exporter.file_extension path = os.path.join(dest, filename) if os.path.exists(path): self.log.warning( "Removing existing notebook: {}".format(path)) remove(path) return True src = self._format_source(assignment_id, student_id) new_timestamp = self.coursedir.get_existing_timestamp(src) old_timestamp = self.coursedir.get_existing_timestamp(dest) # if --force hasn't been specified, but the source assignment is newer, # then we want to overwrite it if new_timestamp is not None and old_timestamp is not None and new_timestamp > old_timestamp: if self.coursedir.notebook_id == "*": self.log.warning( "Updating existing assignment: {}".format(dest)) rmtree(dest) else: for notebook in self.notebooks: filename = os.path.splitext(os.path.basename( notebook))[0] + self.exporter.file_extension path = os.path.join(dest, filename) if os.path.exists(path): self.log.warning( "Updating existing notebook: {}".format(path)) remove(path) return True # otherwise, we should skip the assignment self.log.info("Skipping existing assignment: {}".format(dest)) return False def init_assignment(self, assignment_id, student_id): """Initializes resources/dependencies/etc. that are common to all notebooks in an assignment. """ source = self._format_source(assignment_id, student_id) dest = self._format_dest(assignment_id, student_id) # detect other files in the source directory for filename in find_all_files(source, self.coursedir.ignore + ["*.ipynb"]): # Make sure folder exists. path = os.path.join(dest, os.path.relpath(filename, source)) if not os.path.exists(os.path.dirname(path)): os.makedirs(os.path.dirname(path)) if os.path.exists(path): remove(path) self.log.info("Copying %s -> %s", filename, path) shutil.copy(filename, path) def set_permissions(self, assignment_id, student_id): self.log.info("Setting destination file permissions to %s", self.permissions) dest = os.path.normpath(self._format_dest(assignment_id, student_id)) permissions = int(str(self.permissions), 8) for dirname, _, filenames in os.walk(dest): for filename in filenames: os.chmod(os.path.join(dirname, filename), permissions) def convert_single_notebook(self, notebook_filename): """Convert a single notebook. Performs the following steps: 1. Initialize notebook resources 2. Export the notebook to a particular format 3. Write the exported notebook to file """ self.log.info("Converting notebook %s", notebook_filename) resources = self.init_single_notebook_resources(notebook_filename) output, resources = self.exporter.from_filename(notebook_filename, resources=resources) self.write_single_notebook(output, resources) def convert_notebooks(self): errors = [] def _handle_failure(gd): dest = os.path.normpath( self._format_dest(gd['assignment_id'], gd['student_id'])) if self.coursedir.notebook_id == "*": if os.path.exists(dest): self.log.warning( "Removing failed assignment: {}".format(dest)) rmtree(dest) else: for notebook in self.notebooks: filename = os.path.splitext(os.path.basename( notebook))[0] + self.exporter.file_extension path = os.path.join(dest, filename) if os.path.exists(path): self.log.warning( "Removing failed notebook: {}".format(path)) remove(path) for assignment in sorted(self.assignments.keys()): # initialize the list of notebooks and the exporter self.notebooks = sorted(self.assignments[assignment]) # parse out the assignment and student ids regexp = self._format_source("(?P<assignment_id>.*)", "(?P<student_id>.*)", escape=True) m = re.match(regexp, assignment) if m is None: msg = "Could not match '%s' with regexp '%s'" % (assignment, regexp) self.log.error(msg) raise NbGraderException(msg) gd = m.groupdict() try: # determine whether we actually even want to process this submission should_process = self.init_destination(gd['assignment_id'], gd['student_id']) if not should_process: continue # initialize the destination self.init_assignment(gd['assignment_id'], gd['student_id']) # convert all the notebooks for notebook_filename in self.notebooks: self.convert_single_notebook(notebook_filename) # set assignment permissions self.set_permissions(gd['assignment_id'], gd['student_id']) except UnresponsiveKernelError: self.log.error( "While processing assignment %s, the kernel became " "unresponsive and we could not interrupt it. This probably " "means that the students' code has an infinite loop that " "consumes a lot of memory or something similar. nbgrader " "doesn't know how to deal with this problem, so you will " "have to manually edit the students' code (for example, to " "just throw an error rather than enter an infinite loop). ", assignment) errors.append((gd['assignment_id'], gd['student_id'])) _handle_failure(gd) except sqlalchemy.exc.OperationalError: _handle_failure(gd) self.log.error(traceback.format_exc()) msg = ( "There was an error accessing the nbgrader database. This " "may occur if you recently upgraded nbgrader. To resolve " "the issue, first BACK UP your database and then run the " "command `nbgrader db upgrade`.") self.log.error(msg) raise NbGraderException(msg) except KeyboardInterrupt: _handle_failure(gd) self.log.error("Canceled") raise except Exception: self.log.error("There was an error processing assignment: %s", assignment) self.log.error(traceback.format_exc()) errors.append((gd['assignment_id'], gd['student_id'])) _handle_failure(gd) if len(errors) > 0: for assignment_id, student_id in errors: self.log.error( "There was an error processing assignment '{}' for student '{}'" .format(assignment_id, student_id)) if self.logfile: msg = ( "Please see the error log ({}) for details on the specific " "errors on the above failures.".format(self.logfile)) else: msg = ( "Please see the the above traceback for details on the specific " "errors on the above failures.") self.log.error(msg) raise NbGraderException(msg)
class NbConvertApp(JupyterApp): """Application used to convert from notebook file type (``*.ipynb``)""" version = __version__ name = 'jupyter-nbconvert' aliases = nbconvert_aliases flags = nbconvert_flags @default('log_level') def _log_level_default(self): return logging.INFO classes = List() @default('classes') def _classes_default(self): classes = [NbConvertBase] for pkg in (exporters, preprocessors, writers, postprocessors): for name in dir(pkg): cls = getattr(pkg, name) if isinstance(cls, type) and issubclass(cls, Configurable): classes.append(cls) return classes description = Unicode( u"""This application is used to convert notebook files (*.ipynb) to various other formats. WARNING: THE COMMANDLINE INTERFACE MAY CHANGE IN FUTURE RELEASES.""") output_base = Unicode('', help='''overwrite base name use for output files. can only be used when converting one notebook at a time. ''').tag(config=True) use_output_suffix = Bool( True, help="""Whether to apply a suffix prior to the extension (only relevant when converting to notebook format). The suffix is determined by the exporter, and is usually '.nbconvert'.""").tag(config=True) output_files_dir = Unicode( '{notebook_name}_files', help='''Directory to copy extra files (figures) to. '{notebook_name}' in the string will be converted to notebook basename.''').tag(config=True) examples = Unicode(u""" The simplest way to use nbconvert is > jupyter nbconvert mynotebook.ipynb which will convert mynotebook.ipynb to the default format (probably HTML). You can specify the export format with `--to`. Options include {formats}. > jupyter nbconvert --to latex mynotebook.ipynb Both HTML and LaTeX support multiple output templates. LaTeX includes 'base', 'article' and 'report'. HTML includes 'basic' and 'full'. You can specify the flavor of the format used. > jupyter nbconvert --to html --template lab mynotebook.ipynb You can also pipe the output to stdout, rather than a file > jupyter nbconvert mynotebook.ipynb --stdout PDF is generated via latex > jupyter nbconvert mynotebook.ipynb --to pdf You can get (and serve) a Reveal.js-powered slideshow > jupyter nbconvert myslides.ipynb --to slides --post serve Multiple notebooks can be given at the command line in a couple of different ways: > jupyter nbconvert notebook*.ipynb > jupyter nbconvert notebook1.ipynb notebook2.ipynb or you can specify the notebooks list in a config file, containing:: c.NbConvertApp.notebooks = ["my_notebook.ipynb"] > jupyter nbconvert --config mycfg.py """.format(formats=get_export_names())) # Writer specific variables writer = Instance('nbconvert.writers.base.WriterBase', help="""Instance of the writer class used to write the results of the conversion.""", allow_none=True) writer_class = DottedObjectName('FilesWriter', help="""Writer class used to write the results of the conversion""").tag( config=True) writer_aliases = { 'fileswriter': 'nbconvert.writers.files.FilesWriter', 'debugwriter': 'nbconvert.writers.debug.DebugWriter', 'stdoutwriter': 'nbconvert.writers.stdout.StdoutWriter' } writer_factory = Type(allow_none=True) @observe('writer_class') def _writer_class_changed(self, change): new = change['new'] if new.lower() in self.writer_aliases: new = self.writer_aliases[new.lower()] self.writer_factory = import_item(new) # Post-processor specific variables postprocessor = Instance( 'nbconvert.postprocessors.base.PostProcessorBase', help="""Instance of the PostProcessor class used to write the results of the conversion.""", allow_none=True) postprocessor_class = DottedOrNone( help="""PostProcessor class used to write the results of the conversion""").tag( config=True) postprocessor_aliases = { 'serve': 'nbconvert.postprocessors.serve.ServePostProcessor' } postprocessor_factory = Type(None, allow_none=True) @observe('postprocessor_class') def _postprocessor_class_changed(self, change): new = change['new'] if new.lower() in self.postprocessor_aliases: new = self.postprocessor_aliases[new.lower()] if new: self.postprocessor_factory = import_item(new) ipywidgets_base_url = Unicode( "https://unpkg.com/", help="URL base for ipywidgets package").tag(config=True) export_format = Unicode( 'html', allow_none=False, help="""The export format to be used, either one of the built-in formats {formats} or a dotted object name that represents the import path for an `Exporter` class""".format(formats=get_export_names())).tag( config=True) notebooks = List([], help="""List of notebooks to convert. Wildcards are supported. Filenames passed positionally will be added to the list. """).tag(config=True) from_stdin = Bool( False, help="read a single notebook from stdin.").tag(config=True) @catch_config_error def initialize(self, argv=None): """Initialize application, notebooks, writer, and postprocessor""" self.init_syspath() super(NbConvertApp, self).initialize(argv) self.init_notebooks() self.init_writer() self.init_postprocessor() def init_syspath(self): """Add the cwd to the sys.path ($PYTHONPATH)""" sys.path.insert(0, os.getcwd()) def init_notebooks(self): """Construct the list of notebooks. If notebooks are passed on the command-line, they override (rather than add) notebooks specified in config files. Glob each notebook to replace notebook patterns with filenames. """ # Specifying notebooks on the command-line overrides (rather than # adds) the notebook list if self.extra_args: patterns = self.extra_args else: patterns = self.notebooks # Use glob to replace all the notebook patterns with filenames. filenames = [] for pattern in patterns: # Use glob to find matching filenames. Allow the user to convert # notebooks without having to type the extension. globbed_files = glob.glob(pattern) globbed_files.extend(glob.glob(pattern + '.ipynb')) if not globbed_files: self.log.warning("pattern %r matched no files", pattern) for filename in globbed_files: if not filename in filenames: filenames.append(filename) self.notebooks = filenames def init_writer(self): """Initialize the writer (which is stateless)""" self._writer_class_changed({'new': self.writer_class}) self.writer = self.writer_factory(parent=self) if hasattr(self.writer, 'build_directory') and self.writer.build_directory != '': self.use_output_suffix = False def init_postprocessor(self): """Initialize the postprocessor (which is stateless)""" self._postprocessor_class_changed({'new': self.postprocessor_class}) if self.postprocessor_factory: self.postprocessor = self.postprocessor_factory(parent=self) def start(self): """Run start after initialization process has completed""" super(NbConvertApp, self).start() self.convert_notebooks() def init_single_notebook_resources(self, notebook_filename): """Step 1: Initialize resources This initializes the resources dictionary for a single notebook. Returns ------- dict resources dictionary for a single notebook that MUST include the following keys: - config_dir: the location of the Jupyter config directory - unique_key: the notebook name - output_files_dir: a directory where output files (not including the notebook itself) should be saved """ basename = os.path.basename(notebook_filename) notebook_name = basename[:basename.rfind('.')] if self.output_base: # strip duplicate extension from output_base, to avoid Basename.ext.ext if getattr(self.exporter, 'file_extension', False): base, ext = os.path.splitext(self.output_base) if ext == self.exporter.file_extension: self.output_base = base notebook_name = self.output_base self.log.debug("Notebook name is '%s'", notebook_name) # first initialize the resources we want to use resources = {} resources['config_dir'] = self.config_dir resources['unique_key'] = notebook_name output_files_dir = (self.output_files_dir.format( notebook_name=notebook_name)) resources['output_files_dir'] = output_files_dir resources['ipywidgets_base_url'] = self.ipywidgets_base_url return resources def export_single_notebook(self, notebook_filename, resources, input_buffer=None): """Step 2: Export the notebook Exports the notebook to a particular format according to the specified exporter. This function returns the output and (possibly modified) resources from the exporter. Parameters ---------- notebook_filename : str name of notebook file. resources : dict input_buffer : readable file-like object returning unicode. if not None, notebook_filename is ignored Returns ------- output dict resources (possibly modified) """ try: if input_buffer is not None: output, resources = self.exporter.from_file( input_buffer, resources=resources) else: output, resources = self.exporter.from_filename( notebook_filename, resources=resources) except ConversionException: self.log.error("Error while converting '%s'", notebook_filename, exc_info=True) self.exit(1) return output, resources def write_single_notebook(self, output, resources): """Step 3: Write the notebook to file This writes output from the exporter to file using the specified writer. It returns the results from the writer. Parameters ---------- output : resources : dict resources for a single notebook including name, config directory and directory to save output Returns ------- file results from the specified writer output of exporter """ if 'unique_key' not in resources: raise KeyError( "unique_key MUST be specified in the resources, but it is not") notebook_name = resources['unique_key'] if self.use_output_suffix and not self.output_base: notebook_name += resources.get('output_suffix', '') write_results = self.writer.write(output, resources, notebook_name=notebook_name) return write_results def postprocess_single_notebook(self, write_results): """Step 4: Post-process the written file Only used if a postprocessor has been specified. After the converted notebook is written to a file in Step 3, this post-processes the notebook. """ # Post-process if post processor has been defined. if hasattr(self, 'postprocessor') and self.postprocessor: self.postprocessor(write_results) def convert_single_notebook(self, notebook_filename, input_buffer=None): """Convert a single notebook. Performs the following steps: 1. Initialize notebook resources 2. Export the notebook to a particular format 3. Write the exported notebook to file 4. (Maybe) postprocess the written file Parameters ---------- notebook_filename : str input_buffer : If input_buffer is not None, conversion is done and the buffer is used as source into a file basenamed by the notebook_filename argument. """ if input_buffer is None: self.log.info("Converting notebook %s to %s", notebook_filename, self.export_format) else: self.log.info("Converting notebook into %s", self.export_format) resources = self.init_single_notebook_resources(notebook_filename) output, resources = self.export_single_notebook( notebook_filename, resources, input_buffer=input_buffer) write_results = self.write_single_notebook(output, resources) self.postprocess_single_notebook(write_results) def convert_notebooks(self): """Convert the notebooks in the self.notebook traitlet """ # check that the output base isn't specified if there is more than # one notebook to convert if self.output_base != '' and len(self.notebooks) > 1: self.log.error(""" UsageError: --output flag or `NbConvertApp.output_base` config option cannot be used when converting multiple notebooks. """) self.exit(1) # initialize the exporter cls = get_exporter(self.export_format) self.exporter = cls(config=self.config) # no notebooks to convert! if len(self.notebooks) == 0 and not self.from_stdin: self.print_help() sys.exit(-1) # convert each notebook if not self.from_stdin: for notebook_filename in self.notebooks: self.convert_single_notebook(notebook_filename) else: input_buffer = unicode_stdin_stream() # default name when conversion from stdin self.convert_single_notebook("notebook.ipynb", input_buffer=input_buffer) def document_flag_help(self): """ Return a string containing descriptions of all the flags. """ flags = "The following flags are defined:\n\n" for flag, (cfg, fhelp) in self.flags.items(): flags += "{}\n".format(flag) flags += indent(fill(fhelp, 80)) + '\n\n' flags += indent(fill("Long Form: " + str(cfg), 80)) + '\n\n' return flags def document_alias_help(self): """Return a string containing all of the aliases""" aliases = "The folowing aliases are defined:\n\n" for alias, longname in self.aliases.items(): aliases += "\t**{}** ({})\n\n".format(alias, longname) return aliases def document_config_options(self): """ Provides a much improves version of the configuration documentation by breaking the configuration options into app, exporter, writer, preprocessor, postprocessor, and other sections. """ categories = { category: [ c for c in self._classes_inc_parents() if category in c.__name__.lower() ] for category in ['app', 'exporter', 'writer', 'preprocessor', 'postprocessor'] } accounted_for = { c for category in categories.values() for c in category } categories['other'] = [ c for c in self._classes_inc_parents() if c not in accounted_for ] header = dedent(""" {section} Options ----------------------- """) sections = "" for category in categories: sections += header.format(section=category.title()) if category in ['exporter', 'preprocessor', 'writer']: sections += ".. image:: _static/{image}_inheritance.png\n\n".format( image=category) sections += '\n'.join(c.class_config_rst_doc() for c in categories[category]) return sections.replace(' : ', r' \: ')
class KernelClient(ConnectionFileMixin): """Communicates with a single kernel on any host via zmq channels. There are five channels associated with each kernel: * shell: for request/reply calls to the kernel. * iopub: for the kernel to publish results to frontends. * hb: for monitoring the kernel's heartbeat. * stdin: for frontends to reply to raw_input calls in the kernel. * control: for kernel management calls to the kernel. The messages that can be sent on these channels are exposed as methods of the client (KernelClient.execute, complete, history, etc.). These methods only send the message, they don't wait for a reply. To get results, use e.g. :meth:`get_shell_msg` to fetch messages from the shell channel. """ # The PyZMQ Context to use for communication with the kernel. context = Instance(zmq.asyncio.Context) def _context_default(self) -> zmq.asyncio.Context: return zmq.asyncio.Context() # The classes to use for the various channels shell_channel_class = Type(ChannelABC) iopub_channel_class = Type(ChannelABC) stdin_channel_class = Type(ChannelABC) hb_channel_class = Type(HBChannelABC) control_channel_class = Type(ChannelABC) # Protected traits _shell_channel = Any() _iopub_channel = Any() _stdin_channel = Any() _hb_channel = Any() _control_channel = Any() # flag for whether execute requests should be allowed to call raw_input: allow_stdin: bool = True # -------------------------------------------------------------------------- # Channel proxy methods # -------------------------------------------------------------------------- async def _async_get_shell_msg(self, *args, **kwargs) -> t.Dict[str, t.Any]: """Get a message from the shell channel""" return await self.shell_channel.get_msg(*args, **kwargs) async def _async_get_iopub_msg(self, *args, **kwargs) -> t.Dict[str, t.Any]: """Get a message from the iopub channel""" return await self.iopub_channel.get_msg(*args, **kwargs) async def _async_get_stdin_msg(self, *args, **kwargs) -> t.Dict[str, t.Any]: """Get a message from the stdin channel""" return await self.stdin_channel.get_msg(*args, **kwargs) async def _async_get_control_msg(self, *args, **kwargs) -> t.Dict[str, t.Any]: """Get a message from the control channel""" return await self.control_channel.get_msg(*args, **kwargs) async def _async_wait_for_ready(self, timeout: t.Optional[float] = None) -> None: """Waits for a response when a client is blocked - Sets future time for timeout - Blocks on shell channel until a message is received - Exit if the kernel has died - If client times out before receiving a message from the kernel, send RuntimeError - Flush the IOPub channel """ if timeout is None: timeout = float("inf") abs_timeout = time.time() + timeout from .manager import KernelManager if not isinstance(self.parent, KernelManager): # This Client was not created by a KernelManager, # so wait for kernel to become responsive to heartbeats # before checking for kernel_info reply while not await ensure_async(self.is_alive()): if time.time() > abs_timeout: raise RuntimeError( "Kernel didn't respond to heartbeats in %d seconds and timed out" % timeout) await asyncio.sleep(0.2) # Wait for kernel info reply on shell channel while True: self._kernel_info() try: msg = await self.shell_channel.get_msg(timeout=1) except Empty: pass else: if msg["msg_type"] == "kernel_info_reply": # Checking that IOPub is connected. If it is not connected, start over. try: await self.iopub_channel.get_msg(timeout=0.2) except Empty: pass else: self._handle_kernel_info_reply(msg) break if not await ensure_async(self.is_alive()): raise RuntimeError( "Kernel died before replying to kernel_info") # Check if current time is ready check time plus timeout if time.time() > abs_timeout: raise RuntimeError("Kernel didn't respond in %d seconds" % timeout) # Flush IOPub channel while True: try: msg = await self.iopub_channel.get_msg(timeout=0.2) except Empty: break async def _async_recv_reply(self, msg_id: str, timeout: t.Optional[float] = None, channel: str = "shell") -> t.Dict[str, t.Any]: """Receive and return the reply for a given request""" if timeout is not None: deadline = time.monotonic() + timeout while True: if timeout is not None: timeout = max(0, deadline - time.monotonic()) try: if channel == "control": reply = await self._async_get_control_msg(timeout=timeout) else: reply = await self._async_get_shell_msg(timeout=timeout) except Empty as e: raise TimeoutError("Timeout waiting for reply") from e if reply["parent_header"].get("msg_id") != msg_id: # not my reply, someone may have forgotten to retrieve theirs continue return reply def _stdin_hook_default(self, msg: t.Dict[str, t.Any]) -> None: """Handle an input request""" content = msg["content"] if content.get("password", False): prompt = getpass else: prompt = input # type: ignore try: raw_data = prompt(content["prompt"]) except EOFError: # turn EOFError into EOF character raw_data = "\x04" except KeyboardInterrupt: sys.stdout.write("\n") return # only send stdin reply if there *was not* another request # or execution finished while we were reading. if not (self.stdin_channel.msg_ready() or self.shell_channel.msg_ready()): self.input(raw_data) def _output_hook_default(self, msg: t.Dict[str, t.Any]) -> None: """Default hook for redisplaying plain-text output""" msg_type = msg["header"]["msg_type"] content = msg["content"] if msg_type == "stream": stream = getattr(sys, content["name"]) stream.write(content["text"]) elif msg_type in ("display_data", "execute_result"): sys.stdout.write(content["data"].get("text/plain", "")) elif msg_type == "error": print("\n".join(content["traceback"]), file=sys.stderr) def _output_hook_kernel( self, session: Session, socket: zmq.sugar.socket.Socket, parent_header, msg: t.Dict[str, t.Any], ) -> None: """Output hook when running inside an IPython kernel adds rich output support. """ msg_type = msg["header"]["msg_type"] if msg_type in ("display_data", "execute_result", "error"): session.send(socket, msg_type, msg["content"], parent=parent_header) else: self._output_hook_default(msg) # -------------------------------------------------------------------------- # Channel management methods # -------------------------------------------------------------------------- def start_channels( self, shell: bool = True, iopub: bool = True, stdin: bool = True, hb: bool = True, control: bool = True, ) -> None: """Starts the channels for this kernel. This will create the channels if they do not exist and then start them (their activity runs in a thread). If port numbers of 0 are being used (random ports) then you must first call :meth:`start_kernel`. If the channels have been stopped and you call this, :class:`RuntimeError` will be raised. """ if iopub: self.iopub_channel.start() if shell: self.shell_channel.start() if stdin: self.stdin_channel.start() self.allow_stdin = True else: self.allow_stdin = False if hb: self.hb_channel.start() if control: self.control_channel.start() def stop_channels(self) -> None: """Stops all the running channels for this kernel. This stops their event loops and joins their threads. """ if self.shell_channel.is_alive(): self.shell_channel.stop() if self.iopub_channel.is_alive(): self.iopub_channel.stop() if self.stdin_channel.is_alive(): self.stdin_channel.stop() if self.hb_channel.is_alive(): self.hb_channel.stop() if self.control_channel.is_alive(): self.control_channel.stop() @property def channels_running(self) -> bool: """Are any of the channels created and running?""" return (self.shell_channel.is_alive() or self.iopub_channel.is_alive() or self.stdin_channel.is_alive() or self.hb_channel.is_alive() or self.control_channel.is_alive()) ioloop = None # Overridden in subclasses that use pyzmq event loop @property def shell_channel(self) -> t.Any: """Get the shell channel object for this kernel.""" if self._shell_channel is None: url = self._make_url("shell") self.log.debug("connecting shell channel to %s", url) socket = self.connect_shell(identity=self.session.bsession) self._shell_channel = self.shell_channel_class( socket, self.session, self.ioloop) return self._shell_channel @property def iopub_channel(self) -> t.Any: """Get the iopub channel object for this kernel.""" if self._iopub_channel is None: url = self._make_url("iopub") self.log.debug("connecting iopub channel to %s", url) socket = self.connect_iopub() self._iopub_channel = self.iopub_channel_class( socket, self.session, self.ioloop) return self._iopub_channel @property def stdin_channel(self) -> t.Any: """Get the stdin channel object for this kernel.""" if self._stdin_channel is None: url = self._make_url("stdin") self.log.debug("connecting stdin channel to %s", url) socket = self.connect_stdin(identity=self.session.bsession) self._stdin_channel = self.stdin_channel_class( socket, self.session, self.ioloop) return self._stdin_channel @property def hb_channel(self) -> t.Any: """Get the hb channel object for this kernel.""" if self._hb_channel is None: url = self._make_url("hb") self.log.debug("connecting heartbeat channel to %s", url) self._hb_channel = self.hb_channel_class(self.context, self.session, url) return self._hb_channel @property def control_channel(self) -> t.Any: """Get the control channel object for this kernel.""" if self._control_channel is None: url = self._make_url("control") self.log.debug("connecting control channel to %s", url) socket = self.connect_control(identity=self.session.bsession) self._control_channel = self.control_channel_class( socket, self.session, self.ioloop) return self._control_channel async def _async_is_alive(self) -> bool: """Is the kernel process still running?""" from .manager import KernelManager if isinstance(self.parent, KernelManager): # This KernelClient was created by a KernelManager, # we can ask the parent KernelManager: return await ensure_async(self.parent.is_alive()) if self._hb_channel is not None: # We don't have access to the KernelManager, # so we use the heartbeat. return self._hb_channel.is_beating() else: # no heartbeat and not local, we can't tell if it's running, # so naively return True return True async def _async_execute_interactive( self, code: str, silent: bool = False, store_history: bool = True, user_expressions: t.Optional[t.Dict[str, t.Any]] = None, allow_stdin: t.Optional[bool] = None, stop_on_error: bool = True, timeout: t.Optional[float] = None, output_hook: t.Optional[t.Callable] = None, stdin_hook: t.Optional[t.Callable] = None, ) -> t.Dict[str, t.Any]: """Execute code in the kernel interactively Output will be redisplayed, and stdin prompts will be relayed as well. If an IPython kernel is detected, rich output will be displayed. You can pass a custom output_hook callable that will be called with every IOPub message that is produced instead of the default redisplay. .. versionadded:: 5.0 Parameters ---------- code : str A string of code in the kernel's language. silent : bool, optional (default False) If set, the kernel will execute the code as quietly possible, and will force store_history to be False. store_history : bool, optional (default True) If set, the kernel will store command history. This is forced to be False if silent is True. user_expressions : dict, optional A dict mapping names to expressions to be evaluated in the user's dict. The expression values are returned as strings formatted using :func:`repr`. allow_stdin : bool, optional (default self.allow_stdin) Flag for whether the kernel can send stdin requests to frontends. Some frontends (e.g. the Notebook) do not support stdin requests. If raw_input is called from code executed from such a frontend, a StdinNotImplementedError will be raised. stop_on_error: bool, optional (default True) Flag whether to abort the execution queue, if an exception is encountered. timeout: float or None (default: None) Timeout to use when waiting for a reply output_hook: callable(msg) Function to be called with output messages. If not specified, output will be redisplayed. stdin_hook: callable(msg) Function to be called with stdin_request messages. If not specified, input/getpass will be called. Returns ------- reply: dict The reply message for this request """ if not self.iopub_channel.is_alive(): raise RuntimeError( "IOPub channel must be running to receive output") if allow_stdin is None: allow_stdin = self.allow_stdin if allow_stdin and not self.stdin_channel.is_alive(): raise RuntimeError("stdin channel must be running to allow input") msg_id = self._execute( code, silent=silent, store_history=store_history, user_expressions=user_expressions, allow_stdin=allow_stdin, stop_on_error=stop_on_error, ) if stdin_hook is None: stdin_hook = self._stdin_hook_default if output_hook is None: # detect IPython kernel if "IPython" in sys.modules: from IPython import get_ipython # type: ignore ip = get_ipython() in_kernel = getattr(ip, "kernel", False) if in_kernel: output_hook = partial( self._output_hook_kernel, ip.display_pub.session, ip.display_pub.pub_socket, ip.display_pub.parent_header, ) if output_hook is None: # default: redisplay plain-text outputs output_hook = self._output_hook_default # set deadline based on timeout if timeout is not None: deadline = time.monotonic() + timeout else: timeout_ms = None poller = zmq.Poller() iopub_socket = self.iopub_channel.socket poller.register(iopub_socket, zmq.POLLIN) if allow_stdin: stdin_socket = self.stdin_channel.socket poller.register(stdin_socket, zmq.POLLIN) else: stdin_socket = None # wait for output and redisplay it while True: if timeout is not None: timeout = max(0, deadline - time.monotonic()) timeout_ms = int(1000 * timeout) events = dict(poller.poll(timeout_ms)) if not events: raise TimeoutError("Timeout waiting for output") if stdin_socket in events: req = await self.stdin_channel.get_msg(timeout=0) stdin_hook(req) continue if iopub_socket not in events: continue msg = await self.iopub_channel.get_msg(timeout=0) if msg["parent_header"].get("msg_id") != msg_id: # not from my request continue output_hook(msg) # stop on idle if (msg["header"]["msg_type"] == "status" and msg["content"]["execution_state"] == "idle"): break # output is done, get the reply if timeout is not None: timeout = max(0, deadline - time.monotonic()) return await self._async_recv_reply(msg_id, timeout=timeout) # Methods to send specific messages on channels def _execute( self, code: str, silent: bool = False, store_history: bool = True, user_expressions: t.Optional[t.Dict[str, t.Any]] = None, allow_stdin: t.Optional[bool] = None, stop_on_error: bool = True, ) -> str: """Execute code in the kernel. Parameters ---------- code : str A string of code in the kernel's language. silent : bool, optional (default False) If set, the kernel will execute the code as quietly possible, and will force store_history to be False. store_history : bool, optional (default True) If set, the kernel will store command history. This is forced to be False if silent is True. user_expressions : dict, optional A dict mapping names to expressions to be evaluated in the user's dict. The expression values are returned as strings formatted using :func:`repr`. allow_stdin : bool, optional (default self.allow_stdin) Flag for whether the kernel can send stdin requests to frontends. Some frontends (e.g. the Notebook) do not support stdin requests. If raw_input is called from code executed from such a frontend, a StdinNotImplementedError will be raised. stop_on_error: bool, optional (default True) Flag whether to abort the execution queue, if an exception is encountered. Returns ------- The msg_id of the message sent. """ if user_expressions is None: user_expressions = {} if allow_stdin is None: allow_stdin = self.allow_stdin # Don't waste network traffic if inputs are invalid if not isinstance(code, str): raise ValueError("code %r must be a string" % code) validate_string_dict(user_expressions) # Create class for content/msg creation. Related to, but possibly # not in Session. content = dict( code=code, silent=silent, store_history=store_history, user_expressions=user_expressions, allow_stdin=allow_stdin, stop_on_error=stop_on_error, ) msg = self.session.msg("execute_request", content) self.shell_channel.send(msg) return msg["header"]["msg_id"] def _complete(self, code: str, cursor_pos: t.Optional[int] = None) -> str: """Tab complete text in the kernel's namespace. Parameters ---------- code : str The context in which completion is requested. Can be anything between a variable name and an entire cell. cursor_pos : int, optional The position of the cursor in the block of code where the completion was requested. Default: ``len(code)`` Returns ------- The msg_id of the message sent. """ if cursor_pos is None: cursor_pos = len(code) content = dict(code=code, cursor_pos=cursor_pos) msg = self.session.msg("complete_request", content) self.shell_channel.send(msg) return msg["header"]["msg_id"] def _inspect(self, code: str, cursor_pos: t.Optional[int] = None, detail_level: int = 0) -> str: """Get metadata information about an object in the kernel's namespace. It is up to the kernel to determine the appropriate object to inspect. Parameters ---------- code : str The context in which info is requested. Can be anything between a variable name and an entire cell. cursor_pos : int, optional The position of the cursor in the block of code where the info was requested. Default: ``len(code)`` detail_level : int, optional The level of detail for the introspection (0-2) Returns ------- The msg_id of the message sent. """ if cursor_pos is None: cursor_pos = len(code) content = dict( code=code, cursor_pos=cursor_pos, detail_level=detail_level, ) msg = self.session.msg("inspect_request", content) self.shell_channel.send(msg) return msg["header"]["msg_id"] def _history( self, raw: bool = True, output: bool = False, hist_access_type: str = "range", **kwargs, ) -> str: """Get entries from the kernel's history list. Parameters ---------- raw : bool If True, return the raw input. output : bool If True, then return the output as well. hist_access_type : str 'range' (fill in session, start and stop params), 'tail' (fill in n) or 'search' (fill in pattern param). session : int For a range request, the session from which to get lines. Session numbers are positive integers; negative ones count back from the current session. start : int The first line number of a history range. stop : int The final (excluded) line number of a history range. n : int The number of lines of history to get for a tail request. pattern : str The glob-syntax pattern for a search request. Returns ------- The ID of the message sent. """ if hist_access_type == "range": kwargs.setdefault("session", 0) kwargs.setdefault("start", 0) content = dict(raw=raw, output=output, hist_access_type=hist_access_type, **kwargs) msg = self.session.msg("history_request", content) self.shell_channel.send(msg) return msg["header"]["msg_id"] def _kernel_info(self) -> str: """Request kernel info Returns ------- The msg_id of the message sent """ msg = self.session.msg("kernel_info_request") self.shell_channel.send(msg) return msg["header"]["msg_id"] def _comm_info(self, target_name: t.Optional[str] = None) -> str: """Request comm info Returns ------- The msg_id of the message sent """ if target_name is None: content = {} else: content = dict(target_name=target_name) msg = self.session.msg("comm_info_request", content) self.shell_channel.send(msg) return msg["header"]["msg_id"] def _handle_kernel_info_reply(self, msg: t.Dict[str, t.Any]) -> None: """handle kernel info reply sets protocol adaptation version. This might be run from a separate thread. """ adapt_version = int(msg["content"]["protocol_version"].split(".")[0]) if adapt_version != major_protocol_version: self.session.adapt_version = adapt_version def is_complete(self, code: str) -> str: """Ask the kernel whether some code is complete and ready to execute.""" msg = self.session.msg("is_complete_request", {"code": code}) self.shell_channel.send(msg) return msg["header"]["msg_id"] def input(self, string: str) -> None: """Send a string of raw input to the kernel. This should only be called in response to the kernel sending an ``input_request`` message on the stdin channel. """ content = dict(value=string) msg = self.session.msg("input_reply", content) self.stdin_channel.send(msg) def _shutdown(self, restart: bool = False) -> str: """Request an immediate kernel shutdown on the control channel. Upon receipt of the (empty) reply, client code can safely assume that the kernel has shut down and it's safe to forcefully terminate it if it's still alive. The kernel will send the reply via a function registered with Python's atexit module, ensuring it's truly done as the kernel is done with all normal operation. Returns ------- The msg_id of the message sent """ # Send quit message to kernel. Once we implement kernel-side setattr, # this should probably be done that way, but for now this will do. msg = self.session.msg("shutdown_request", {"restart": restart}) self.control_channel.send(msg) return msg["header"]["msg_id"]
class IPKernelApp(BaseIPythonApplication, InteractiveShellApp, ConnectionFileMixin): name = 'ipython-kernel' aliases = Dict(kernel_aliases) flags = Dict(kernel_flags) classes = [IPythonKernel, ZMQInteractiveShell, ProfileDir, Session] # the kernel class, as an importstring kernel_class = Type('ipykernel.ipkernel.IPythonKernel', config=True, klass='ipykernel.kernelbase.Kernel', help="""The Kernel subclass to be used. This should allow easy re-use of the IPKernelApp entry point to configure and launch kernels other than IPython's own. """) kernel = Any() poller = Any( ) # don't restrict this even though current pollers are all Threads heartbeat = Instance(Heartbeat, allow_none=True) ports = Dict() subcommands = { 'install': ('ipykernel.kernelspec.InstallIPythonKernelSpecApp', 'Install the IPython kernel'), } # connection info: connection_dir = Unicode() def _connection_dir_default(self): return jupyter_runtime_dir() @property def abs_connection_file(self): if os.path.basename(self.connection_file) == self.connection_file: return os.path.join(self.connection_dir, self.connection_file) else: return self.connection_file # streams, etc. no_stdout = Bool(False, config=True, help="redirect stdout to the null device") no_stderr = Bool(False, config=True, help="redirect stderr to the null device") outstream_class = DottedObjectName( 'ipykernel.iostream.OutStream', config=True, help="The importstring for the OutStream factory") displayhook_class = DottedObjectName( 'ipykernel.displayhook.ZMQDisplayHook', config=True, help="The importstring for the DisplayHook factory") # polling parent_handle = Integer( int(os.environ.get('JPY_PARENT_PID') or 0), config=True, help="""kill this process if its parent dies. On Windows, the argument specifies the HANDLE of the parent process, otherwise it is simply boolean. """) interrupt = Integer(int(os.environ.get('JPY_INTERRUPT_EVENT') or 0), config=True, help="""ONLY USED ON WINDOWS Interrupt this process when the parent is signaled. """) def init_crash_handler(self): sys.excepthook = self.excepthook def excepthook(self, etype, evalue, tb): # write uncaught traceback to 'real' stderr, not zmq-forwarder traceback.print_exception(etype, evalue, tb, file=sys.__stderr__) def init_poller(self): if sys.platform == 'win32': if self.interrupt or self.parent_handle: self.poller = ParentPollerWindows(self.interrupt, self.parent_handle) elif self.parent_handle: self.poller = ParentPollerUnix() def _bind_socket(self, s, port): iface = '%s://%s' % (self.transport, self.ip) if self.transport == 'tcp': if port <= 0: port = s.bind_to_random_port(iface) else: s.bind("tcp://%s:%i" % (self.ip, port)) elif self.transport == 'ipc': if port <= 0: port = 1 path = "%s-%i" % (self.ip, port) while os.path.exists(path): port = port + 1 path = "%s-%i" % (self.ip, port) else: path = "%s-%i" % (self.ip, port) s.bind("ipc://%s" % path) return port def write_connection_file(self): """write connection info to JSON file""" cf = self.abs_connection_file self.log.debug("Writing connection file: %s", cf) write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport, shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port, iopub_port=self.iopub_port, control_port=self.control_port) def cleanup_connection_file(self): cf = self.abs_connection_file self.log.debug("Cleaning up connection file: %s", cf) try: os.remove(cf) except (IOError, OSError): pass self.cleanup_ipc_files() def init_connection_file(self): if not self.connection_file: self.connection_file = "kernel-%s.json" % os.getpid() try: self.connection_file = filefind(self.connection_file, ['.', self.connection_dir]) except IOError: self.log.debug("Connection file not found: %s", self.connection_file) # This means I own it, and I'll create it in this directory: ensure_dir_exists(os.path.dirname(self.abs_connection_file), 0o700) # Also, I will clean it up: atexit.register(self.cleanup_connection_file) return try: self.load_connection_file() except Exception: self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True) self.exit(1) def init_sockets(self): # Create a context, a session, and the kernel sockets. self.log.info("Starting the kernel at pid: %i", os.getpid()) context = zmq.Context.instance() # Uncomment this to try closing the context. # atexit.register(context.term) self.shell_socket = context.socket(zmq.ROUTER) self.shell_socket.linger = 1000 self.shell_port = self._bind_socket(self.shell_socket, self.shell_port) self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port) self.iopub_socket = context.socket(zmq.PUB) self.iopub_socket.linger = 1000 self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port) self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port) self.stdin_socket = context.socket(zmq.ROUTER) self.stdin_socket.linger = 1000 self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port) self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port) self.control_socket = context.socket(zmq.ROUTER) self.control_socket.linger = 1000 self.control_port = self._bind_socket(self.control_socket, self.control_port) self.log.debug("control ROUTER Channel on port: %i" % self.control_port) def init_heartbeat(self): """start the heart beating""" # heartbeat doesn't share context, because it mustn't be blocked # by the GIL, which is accessed by libzmq when freeing zero-copy messages hb_ctx = zmq.Context() self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port)) self.hb_port = self.heartbeat.port self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port) self.heartbeat.start() def log_connection_info(self): """display connection info, and store ports""" basename = os.path.basename(self.connection_file) if basename == self.connection_file or \ os.path.dirname(self.connection_file) == self.connection_dir: # use shortname tail = basename else: tail = self.connection_file lines = [ "To connect another client to this kernel, use:", " --existing %s" % tail, ] # log connection info # info-level, so often not shown. # frontends should use the %connect_info magic # to see the connection info for line in lines: self.log.info(line) # also raw print to the terminal if no parent_handle (`ipython kernel`) if not self.parent_handle: io.rprint(_ctrl_c_message) for line in lines: io.rprint(line) self.ports = dict(shell=self.shell_port, iopub=self.iopub_port, stdin=self.stdin_port, hb=self.hb_port, control=self.control_port) def init_blackhole(self): """redirects stdout/stderr to devnull if necessary""" if self.no_stdout or self.no_stderr: blackhole = open(os.devnull, 'w') if self.no_stdout: sys.stdout = sys.__stdout__ = blackhole if self.no_stderr: sys.stderr = sys.__stderr__ = blackhole def init_io(self): """Redirect input streams and set a display hook.""" if self.outstream_class: outstream_factory = import_item(str(self.outstream_class)) sys.stdout = outstream_factory(self.session, self.iopub_socket, u'stdout') sys.stderr = outstream_factory(self.session, self.iopub_socket, u'stderr') if self.displayhook_class: displayhook_factory = import_item(str(self.displayhook_class)) sys.displayhook = displayhook_factory(self.session, self.iopub_socket) def init_signal(self): signal.signal(signal.SIGINT, signal.SIG_IGN) def init_kernel(self): """Create the Kernel object itself""" shell_stream = ZMQStream(self.shell_socket) control_stream = ZMQStream(self.control_socket) kernel_factory = self.kernel_class.instance kernel = kernel_factory( parent=self, session=self.session, shell_streams=[shell_stream, control_stream], iopub_socket=self.iopub_socket, stdin_socket=self.stdin_socket, log=self.log, profile_dir=self.profile_dir, user_ns=self.user_ns, ) kernel.record_ports(self.ports) self.kernel = kernel def init_gui_pylab(self): """Enable GUI event loop integration, taking pylab into account.""" # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab` # to ensure that any exception is printed straight to stderr. # Normally _showtraceback associates the reply with an execution, # which means frontends will never draw it, as this exception # is not associated with any execute request. shell = self.shell _showtraceback = shell._showtraceback try: # replace error-sending traceback with stderr def print_tb(etype, evalue, stb): print("GUI event loop or pylab initialization failed", file=io.stderr) print(shell.InteractiveTB.stb2text(stb), file=io.stderr) shell._showtraceback = print_tb InteractiveShellApp.init_gui_pylab(self) finally: shell._showtraceback = _showtraceback def init_shell(self): self.shell = getattr(self.kernel, 'shell', None) if self.shell: self.shell.configurables.append(self) def init_extensions(self): super(IPKernelApp, self).init_extensions() # BEGIN HARDCODED WIDGETS HACK # Ensure ipywidgets extension is loaded if available extension_man = self.shell.extension_manager if 'ipywidgets' not in extension_man.loaded: try: extension_man.load_extension('ipywidgets') except ImportError as e: self.log.debug( 'ipywidgets package not installed. Widgets will not be available.' ) # END HARDCODED WIDGETS HACK @catch_config_error def initialize(self, argv=None): super(IPKernelApp, self).initialize(argv) if self.subapp is not None: return self.init_blackhole() self.init_connection_file() self.init_poller() self.init_sockets() self.init_heartbeat() # writing/displaying connection info must be *after* init_sockets/heartbeat self.write_connection_file() # Log connection info after writing connection file, so that the connection # file is definitely available at the time someone reads the log. self.log_connection_info() self.init_io() self.init_signal() self.init_kernel() # shell init steps self.init_path() self.init_shell() if self.shell: self.init_gui_pylab() self.init_extensions() self.init_code() # flush stdout/stderr, so that anything written to these streams during # initialization do not get associated with the first execution request sys.stdout.flush() sys.stderr.flush() def start(self): if self.subapp is not None: return self.subapp.start() if self.poller is not None: self.poller.start() self.kernel.start() try: ioloop.IOLoop.instance().start() except KeyboardInterrupt: pass
class YAPKernel(KernelBase): shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) shell_class = Type(ZMQInteractiveShell) user_ns = Instance(dict, args=None, allow_none=True) def _user_ns_changed(self, name, old, new): if self.shell is not None: self.shell.user_ns = new self.shell.init_user_ns() # A reference to the Python builtin 'raw_input' function. # (i.e., __builtin__.raw_input for Python 2.7, builtins.input for Python 3) _sys_raw_input = Any() _sys_eval_input = Any() implementation = 'YAP Kernel' implementation_version = '1.0' language = 'text' language_version = '6.3' banner = "YAP-6.3" language_info = { 'mimetype': 'text/prolog', 'name': 'text', # ------ If different from 'language': 'codemirror_mode': { "version": 2, "name": "prolog" }, 'pygments_lexer': 'prolog', 'version': "0.0.1", 'file_extension': '.yap', } #------------------------------------------------------------------------- # Things related to history management #------------------------------------------------------------------------- def __init__(self, **kwargs): # sp = super(YAPKernel, self) super(YAPKernel, self).__init__(**kwargs) # Initialize the InteractiveShell subclass self.shell = self.shell_class.instance( parent=self, profile_dir=self.profile_dir, user_ns=self.user_ns, kernel=self, ) self.shell.displayhook.session = self.session self.shell.displayhook.pub_socket = self.iopub_socket self.shell.displayhook.topic = self._topic('execute_result') self.shell.display_pub.session = self.session self.shell.display_pub.pub_socket = self.iopub_socket self.comm_manager = CommManager(parent=self, kernel=self) # self.shell._last_traceback = None self.shell.configurables.append(self.comm_manager) comm_msg_types = ['comm_open', 'comm_msg', 'comm_close'] for msg_type in comm_msg_types: self.shell_handlers[msg_type] = getattr(self.comm_manager, msg_type) self.yap_shell = YAPInteractiveShell(self) def get_usage(self): return "This is the YAP kernel." help_links = List([ { 'text': "Python", 'url': "http://docs.python.org/%i.%i" % sys.version_info[:2], }, { 'text': "YAP", 'url': "http://YAP.org/documentation.html", }, { 'text': "NumPy", 'url': "http://docs.scipy.org/doc/numpy/reference/", }, { 'text': "SciPy", 'url': "http://docs.scipy.org/doc/scipy/reference/", }, { 'text': "Matplotlib", 'url': "http://matplotlib.org/contents.html", }, { 'text': "SymPy", 'url': "http://docs.sympy.org/latest/index.html", }, { 'text': "pandas", 'url': "http://pandas.pydata.org/pandas-docs/stable/", }, ]).tag(config=True) # Kernel info fields implementation = 'YAP' implementation_version = release.version language_info = { 'name': 'python', 'version': sys.version.split()[0], 'mimetype': 'text/x-python', 'codemirror_mode': { 'name': 'prolog', 'version': sys.version_info[0] }, 'pygments_lexer': 'prolog', 'nbconvert_exporter': 'python', 'file_extension': '.yap' } @property def banner(self): return self.shell.banner def start(self): self.shell.exit_now = False super(YAPKernel, self).start() def set_parent(self, ident, parent): """Overridden from parent to tell the display hook and output streams about the parent message. """ super(YAPKernel, self).set_parent(ident, parent) self.shell.set_parent(parent) def init_metadata(self, parent): """Initialize metadata. Run at the beginning of each execution request. """ md = super(YAPKernel, self).init_metadata(parent) # FIXME: remove deprecated ipyparallel-specific code # This is required for ipyparallel < 5.0 md.update({ 'dependencies_met': True, 'engine': self.ident, }) return md def finish_metadata(self, parent, metadata, reply_content): """Finish populating metadata. Run after completing an execution request. """ # FIXME: remove deprecated ipyparallel-specific code # This is required by ipyparallel < 5.0 metadata['status'] = reply_content['status'] if reply_content['status'] == 'error' and reply_content[ 'ename'] == 'UnmetDependency': metadata['dependencies_met'] = False return metadata def _forward_input(self, allow_stdin=False): """Forward raw_input and getpass to the current frontend. via input_request """ self._allow_stdin = allow_stdin if PY3: self._sys_raw_input = builtin_mod.input builtin_mod.input = self.raw_input else: self._sys_raw_input = builtin_mod.raw_input self._sys_eval_input = builtin_mod.input builtin_mod.raw_input = self.raw_input builtin_mod.input = lambda prompt='': eval(self.raw_input(prompt)) self._save_getpass = getpass.getpass getpass.getpass = self.getpass def _restore_input(self): """Restore raw_input, getpass""" if PY3: builtin_mod.input = self._sys_raw_input else: builtin_mod.raw_input = self._sys_raw_input builtin_mod.input = self._sys_eval_input getpass.getpass = self._save_getpass @property def execution_count(self): return self.shell.execution_count @execution_count.setter def execution_count(self, value): # Ignore the incrememnting done by KernelBase, in favour of our shell's # execution counter. pass def do_execute(self, code, silent, store_history=True, user_expressions=None, allow_stdin=False): shell = self.shell # we'll need this a lot here self._forward_input(allow_stdin) reply_content = {} try: res = shell.run_cell(code, store_history=store_history, silent=silent) finally: self._restore_input() if res.error_before_exec is not None: err = res.error_before_exec else: err = res.error_in_exec if res.success: reply_content[u'status'] = u'ok' elif isinstance(err, KeyboardInterrupt): reply_content[u'status'] = u'aborted' else: reply_content[u'status'] = u'error' reply_content.update({ # u'traceback': shell._last_traceback or [], u'ename': unicode_type(type(err).__name__), u'evalue': safe_unicode(err), }) # FIXME: deprecate piece for ipyparallel: e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='execute') reply_content['engine_info'] = e_info # Return the execution counter so clients can display prompts reply_content['execution_count'] = shell.execution_count - 1 if 'traceback' in reply_content: self.log.info("Exception in execute request:\n%s", '\n'.join(reply_content['traceback'])) # At this point, we can tell whether the main code execution succeeded # or not. If it did, we proceed to evaluate user_expressions if reply_content['status'] == 'ok': reply_content[u'user_expressions'] = \ shell.user_expressions(user_expressions or {}) else: # If there was an error, don't even try to compute expressions reply_content[u'user_expressions'] = {} # Payloads should be retrieved regardless of outcome, so we can both # recover partial output (that could have been generated early in a # block, before an error) and always clear the payload system. reply_content[u'payload'] = shell.payload_manager.read_payload() # Be aggressive about clearing the payload because we don't want # it to sit in memory until the next execute_request comes in. shell.payload_manager.clear_payload() return reply_content def do_complete(self, code, cursor_pos): # FIXME: YAP completers currently assume single line, # but completion messages give multi-line context # For now, extract line from cell, based on cursor_pos: if cursor_pos is None: cursor_pos = len(code) line, offset = line_at_cursor(code, cursor_pos) line_cursor = cursor_pos - offset txt, matches = self.shell.complete('', line, line_cursor) return { 'matches': matches, 'cursor_end': cursor_pos, 'cursor_start': cursor_pos - len(txt), 'metadata': {}, 'status': 'ok' } def do_inspect(self, code, cursor_pos, detail_level=0): name = token_at_cursor(code, cursor_pos) info = self.shell.object_inspect(name) reply_content = {'status': 'ok'} reply_content['data'] = data = {} reply_content['metadata'] = {} reply_content['found'] = info['found'] if info['found']: info_text = self.shell.object_inspect_text( name, detail_level=detail_level, ) data['text/plain'] = info_text return reply_content def do_history(self, hist_access_type, output, raw, session=0, start=0, stop=None, n=None, pattern=None, unique=False): if hist_access_type == 'tail': hist = self.shell.history_manager.get_tail(n, raw=raw, output=output, include_latest=True) elif hist_access_type == 'range': hist = self.shell.history_manager.get_range(session, start, stop, raw=raw, output=output) elif hist_access_type == 'search': hist = self.shell.history_manager.search(pattern, raw=raw, output=output, n=n, unique=unique) else: hist = [] return { 'status': 'ok', 'history': list(hist), } def do_shutdown(self, restart): self.shell.exit_now = True return dict(status='ok', restart=restart) def do_is_complete(self, code): status, indent_spaces = self.shell.input_transformer_manager.check_complete( code) r = {'status': status} if status == 'incomplete': r['indent'] = ' ' * indent_spaces return r def do_apply(self, content, bufs, msg_id, reply_metadata): from .serialize import serialize_object, unpack_apply_message shell = self.shell try: working = shell.user_ns prefix = "_" + str(msg_id).replace("-", "") + "_" f, args, kwargs = unpack_apply_message(bufs, working, copy=False) fname = getattr(f, '__name__', 'f') fname = prefix + "f" argname = prefix + "args" kwargname = prefix + "kwargs" resultname = prefix + "result" ns = {fname: f, argname: args, kwargname: kwargs, resultname: None} # print ns working.update(ns) code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname) try: exec(code, shell.user_global_ns, shell.user_ns) result = working.get(resultname) finally: for key in ns: working.pop(key) result_buf = serialize_object( result, buffer_threshold=self.session.buffer_threshold, item_threshold=self.session.item_threshold, ) except BaseException as e: # invoke YAP traceback formatting shell.showtraceback() reply_content = { u'traceback': shell._last_traceback or [], u'ename': unicode_type(type(e).__name__), u'evalue': safe_unicode(e), } # FIXME: deprecate piece for ipyparallel: e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='apply') reply_content['engine_info'] = e_info self.send_response(self.iopub_socket, u'error', reply_content, ident=self._topic('error')) self.log.info("Exception in apply request:\n%s", '\n'.join(reply_content['traceback'])) result_buf = [] reply_content['status'] = 'error' else: reply_content = {'status': 'ok'} return reply_content, result_buf def do_clear(self): self.shell.reset(False) return dict(status='ok')
class BaseIPythonApplication(Application): name = Unicode(u'ipython') description = Unicode(u'IPython: an enhanced interactive Python shell.') version = Unicode(release.version) aliases = Dict(base_aliases) flags = Dict(base_flags) classes = List([ProfileDir]) # enable `load_subconfig('cfg.py', profile='name')` python_config_loader_class = ProfileAwareConfigLoader # Track whether the config_file has changed, # because some logic happens only if we aren't using the default. config_file_specified = Set() config_file_name = Unicode() @default('config_file_name') def _config_file_name_default(self): return self.name.replace('-', '_') + u'_config.py' @observe('config_file_name') def _config_file_name_changed(self, change): if change['new'] != change['old']: self.config_file_specified.add(change['new']) # The directory that contains IPython's builtin profiles. builtin_profile_dir = Unicode( os.path.join(get_ipython_package_dir(), u'config', u'profile', u'default')) config_file_paths = List(Unicode()) @default('config_file_paths') def _config_file_paths_default(self): return [os.getcwd()] extra_config_file = Unicode(help="""Path to an extra config file to load. If specified, load this config file in addition to any other IPython config. """).tag(config=True) @observe('extra_config_file') def _extra_config_file_changed(self, change): old = change['old'] new = change['new'] try: self.config_files.remove(old) except ValueError: pass self.config_file_specified.add(new) self.config_files.append(new) profile = Unicode(u'default', help="""The IPython profile to use.""").tag(config=True) @observe('profile') def _profile_changed(self, change): self.builtin_profile_dir = os.path.join(get_ipython_package_dir(), u'config', u'profile', change['new']) ipython_dir = Unicode(help=""" The name of the IPython directory. This directory is used for logging configuration (through profiles), history storage, etc. The default is usually $HOME/.ipython. This option can also be specified through the environment variable IPYTHONDIR. """).tag(config=True) @default('ipython_dir') def _ipython_dir_default(self): d = get_ipython_dir() self._ipython_dir_changed({ 'name': 'ipython_dir', 'old': d, 'new': d, }) return d _in_init_profile_dir = False profile_dir = Instance(ProfileDir, allow_none=True) @default('profile_dir') def _profile_dir_default(self): # avoid recursion if self._in_init_profile_dir: return # profile_dir requested early, force initialization self.init_profile_dir() return self.profile_dir overwrite = Bool( False, help="""Whether to overwrite existing config files when copying""" ).tag(config=True) auto_create = Bool( False, help="""Whether to create profile dir if it doesn't exist""").tag( config=True) config_files = List(Unicode()) @default('config_files') def _config_files_default(self): return [self.config_file_name] copy_config_files = Bool( False, help="""Whether to install the default config files into the profile dir. If a new profile is being created, and IPython contains config files for that profile, then they will be staged into the new directory. Otherwise, default config files will be automatically generated. """).tag(config=True) verbose_crash = Bool( False, help= """Create a massive crash report when IPython encounters what may be an internal error. The default is to append a short message to the usual traceback""").tag(config=True) # The class to use as the crash handler. crash_handler_class = Type(crashhandler.CrashHandler) @catch_config_error def __init__(self, **kwargs): super(BaseIPythonApplication, self).__init__(**kwargs) # ensure current working directory exists try: os.getcwd() except: # exit if cwd doesn't exist self.log.error("Current working directory doesn't exist.") self.exit(1) #------------------------------------------------------------------------- # Various stages of Application creation #------------------------------------------------------------------------- deprecated_subcommands = {} def initialize_subcommand(self, subc, argv=None): if subc in self.deprecated_subcommands: self.log.warning( "Subcommand `ipython {sub}` is deprecated and will be removed " "in future versions.".format(sub=subc)) self.log.warning("You likely want to use `jupyter {sub}` in the " "future".format(sub=subc)) return super(BaseIPythonApplication, self).initialize_subcommand(subc, argv) def init_crash_handler(self): """Create a crash handler, typically setting sys.excepthook to it.""" self.crash_handler = self.crash_handler_class(self) sys.excepthook = self.excepthook def unset_crashhandler(): sys.excepthook = sys.__excepthook__ atexit.register(unset_crashhandler) def excepthook(self, etype, evalue, tb): """this is sys.excepthook after init_crashhandler set self.verbose_crash=True to use our full crashhandler, instead of a regular traceback with a short message (crash_handler_lite) """ if self.verbose_crash: return self.crash_handler(etype, evalue, tb) else: return crashhandler.crash_handler_lite(etype, evalue, tb) @observe('ipython_dir') def _ipython_dir_changed(self, change): old = change['old'] new = change['new'] if old is not Undefined: str_old = py3compat.cast_bytes_py2(os.path.abspath(old), sys.getfilesystemencoding()) if str_old in sys.path: sys.path.remove(str_old) str_path = py3compat.cast_bytes_py2(os.path.abspath(new), sys.getfilesystemencoding()) sys.path.append(str_path) ensure_dir_exists(new) readme = os.path.join(new, 'README') readme_src = os.path.join(get_ipython_package_dir(), u'config', u'profile', 'README') if not os.path.exists(readme) and os.path.exists(readme_src): shutil.copy(readme_src, readme) for d in ('extensions', 'nbextensions'): path = os.path.join(new, d) try: ensure_dir_exists(path) except OSError as e: # this will not be EEXIST self.log.error("couldn't create path %s: %s", path, e) self.log.debug("IPYTHONDIR set to: %s" % new) def load_config_file(self, suppress_errors=IPYTHON_SUPPRESS_CONFIG_ERRORS): """Load the config file. By default, errors in loading config are handled, and a warning printed on screen. For testing, the suppress_errors option is set to False, so errors will make tests fail. `supress_errors` default value is to be `None` in which case the behavior default to the one of `traitlets.Application`. The default value can be set : - to `False` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '0', or 'false' (case insensitive). - to `True` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '1' or 'true' (case insensitive). - to `None` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '' (empty string) or leaving it unset. Any other value are invalid, and will make IPython exit with a non-zero return code. """ self.log.debug("Searching path %s for config files", self.config_file_paths) base_config = 'ipython_config.py' self.log.debug("Attempting to load config file: %s" % base_config) try: if suppress_errors is not None: old_value = Application.raise_config_file_errors Application.raise_config_file_errors = not suppress_errors Application.load_config_file(self, base_config, path=self.config_file_paths) except ConfigFileNotFound: # ignore errors loading parent self.log.debug("Config file %s not found", base_config) pass if suppress_errors is not None: Application.raise_config_file_errors = old_value for config_file_name in self.config_files: if not config_file_name or config_file_name == base_config: continue self.log.debug("Attempting to load config file: %s" % self.config_file_name) try: Application.load_config_file(self, config_file_name, path=self.config_file_paths) except ConfigFileNotFound: # Only warn if the default config file was NOT being used. if config_file_name in self.config_file_specified: msg = self.log.warning else: msg = self.log.debug msg("Config file not found, skipping: %s", config_file_name) except Exception: # For testing purposes. if not suppress_errors: raise self.log.warning("Error loading config file: %s" % self.config_file_name, exc_info=True) def init_profile_dir(self): """initialize the profile dir""" self._in_init_profile_dir = True if self.profile_dir is not None: # already ran return if 'ProfileDir.location' not in self.config: # location not specified, find by profile name try: p = ProfileDir.find_profile_dir_by_name( self.ipython_dir, self.profile, self.config) except ProfileDirError: # not found, maybe create it (always create default profile) if self.auto_create or self.profile == 'default': try: p = ProfileDir.create_profile_dir_by_name( self.ipython_dir, self.profile, self.config) except ProfileDirError: self.log.fatal("Could not create profile: %r" % self.profile) self.exit(1) else: self.log.info("Created profile dir: %r" % p.location) else: self.log.fatal("Profile %r not found." % self.profile) self.exit(1) else: self.log.debug("Using existing profile dir: %r" % p.location) else: location = self.config.ProfileDir.location # location is fully specified try: p = ProfileDir.find_profile_dir(location, self.config) except ProfileDirError: # not found, maybe create it if self.auto_create: try: p = ProfileDir.create_profile_dir( location, self.config) except ProfileDirError: self.log.fatal( "Could not create profile directory: %r" % location) self.exit(1) else: self.log.debug("Creating new profile dir: %r" % location) else: self.log.fatal("Profile directory %r not found." % location) self.exit(1) else: self.log.info("Using existing profile dir: %r" % location) # if profile_dir is specified explicitly, set profile name dir_name = os.path.basename(p.location) if dir_name.startswith('profile_'): self.profile = dir_name[8:] self.profile_dir = p self.config_file_paths.append(p.location) self._in_init_profile_dir = False def init_config_files(self): """[optionally] copy default config files into profile dir.""" self.config_file_paths.extend(SYSTEM_CONFIG_DIRS) # copy config files path = self.builtin_profile_dir if self.copy_config_files: src = self.profile cfg = self.config_file_name if path and os.path.exists(os.path.join(path, cfg)): self.log.warning( "Staging %r from %s into %r [overwrite=%s]" % (cfg, src, self.profile_dir.location, self.overwrite)) self.profile_dir.copy_config_file(cfg, path=path, overwrite=self.overwrite) else: self.stage_default_config_file() else: # Still stage *bundled* config files, but not generated ones # This is necessary for `ipython profile=sympy` to load the profile # on the first go files = glob.glob(os.path.join(path, '*.py')) for fullpath in files: cfg = os.path.basename(fullpath) if self.profile_dir.copy_config_file(cfg, path=path, overwrite=False): # file was copied self.log.warning( "Staging bundled %s from %s into %r" % (cfg, self.profile, self.profile_dir.location)) def stage_default_config_file(self): """auto generate default config file, and stage it into the profile.""" s = self.generate_config_file() fname = os.path.join(self.profile_dir.location, self.config_file_name) if self.overwrite or not os.path.exists(fname): self.log.warning("Generating default config file: %r" % (fname)) with open(fname, 'w') as f: f.write(s) @catch_config_error def initialize(self, argv=None): # don't hook up crash handler before parsing command-line self.parse_command_line(argv) self.init_crash_handler() if self.subapp is not None: # stop here if subapp is taking over return # save a copy of CLI config to re-load after config files # so that it has highest priority cl_config = deepcopy(self.config) self.init_profile_dir() self.init_config_files() self.load_config_file() # enforce cl-opts override configfile opts: self.update_config(cl_config)
class KernelManager(ConnectionFileMixin): """Manages a single kernel in a subprocess on this host. This version starts kernels with Popen. """ # The PyZMQ Context to use for communication with the kernel. context = Instance(zmq.Context) def _context_default(self): return zmq.Context.instance() # the class to create with our `client` method client_class = DottedObjectName( 'jupyter_client.blocking.BlockingKernelClient') client_factory = Type(klass='jupyter_client.KernelClient') def _client_factory_default(self): return import_item(self.client_class) def _client_class_changed(self, name, old, new): self.client_factory = import_item(str(new)) # The kernel process with which the KernelManager is communicating. # generally a Popen instance kernel = Any() kernel_spec_manager = Instance(kernelspec.KernelSpecManager) def _kernel_spec_manager_default(self): return kernelspec.KernelSpecManager(data_dir=self.data_dir) def _kernel_spec_manager_changed(self): self._kernel_spec = None shutdown_wait_time = Float( 5.0, config=True, help="Time to wait for a kernel to terminate before killing it, " "in seconds.") kernel_name = Unicode(kernelspec.NATIVE_KERNEL_NAME) def _kernel_name_changed(self, name, old, new): self._kernel_spec = None if new == 'python': self.kernel_name = kernelspec.NATIVE_KERNEL_NAME _kernel_spec = None @property def kernel_spec(self): if self._kernel_spec is None: self._kernel_spec = self.kernel_spec_manager.get_kernel_spec( self.kernel_name) return self._kernel_spec kernel_cmd = List(Unicode(), help="""The Popen Command to launch the kernel.""") extra_env = Dict( help="""Extra environment variables to be set for the kernel.""") @property def ipykernel(self): return self.kernel_name in {'python', 'python2', 'python3'} # Protected traits _launch_args = Any() _control_socket = Any() _restarter = Any() autorestart = Bool(True, config=True, help="""Should we autorestart the kernel if it dies.""") def __del__(self): self._close_control_socket() self.cleanup_connection_file() #-------------------------------------------------------------------------- # Kernel restarter #-------------------------------------------------------------------------- def start_restarter(self): pass def stop_restarter(self): pass def add_restart_callback(self, callback, event='restart'): """register a callback to be called when a kernel is restarted""" if self._restarter is None: return self._restarter.add_callback(callback, event) def remove_restart_callback(self, callback, event='restart'): """unregister a callback to be called when a kernel is restarted""" if self._restarter is None: return self._restarter.remove_callback(callback, event) #-------------------------------------------------------------------------- # create a Client connected to our Kernel #-------------------------------------------------------------------------- def client(self, **kwargs): """Create a client configured to connect to our kernel""" kw = {} kw.update(self.get_connection_info(session=True)) kw.update(dict( connection_file=self.connection_file, parent=self, )) # add kwargs last, for manual overrides kw.update(kwargs) return self.client_factory(**kw) #-------------------------------------------------------------------------- # Kernel management #-------------------------------------------------------------------------- def format_kernel_cmd(self, extra_arguments=None): """replace templated args (e.g. {connection_file})""" extra_arguments = extra_arguments or [] if self.kernel_cmd: cmd = self.kernel_cmd + extra_arguments else: cmd = self.kernel_spec.argv + extra_arguments if cmd and cmd[0] in { 'python', 'python%i' % sys.version_info[0], 'python%i.%i' % sys.version_info[:2] }: # executable is 'python' or 'python3', use sys.executable. # These will typically be the same, # but if the current process is in an env # and has been launched by abspath without # activating the env, python on PATH may not be sys.executable, # but it should be. cmd[0] = sys.executable ns = dict( connection_file=self.connection_file, prefix=sys.prefix, ) if self.kernel_spec: ns["resource_dir"] = self.kernel_spec.resource_dir ns.update(self._launch_args) pat = re.compile(r'\{([A-Za-z0-9_]+)\}') def from_ns(match): """Get the key out of ns if it's there, otherwise no change.""" return ns.get(match.group(1), match.group()) return [pat.sub(from_ns, arg) for arg in cmd] def _launch_kernel(self, kernel_cmd, **kw): """actually launch the kernel override in a subclass to launch kernel subprocesses differently """ return launch_kernel(kernel_cmd, **kw) # Control socket used for polite kernel shutdown def _connect_control_socket(self): if self._control_socket is None: self._control_socket = self.connect_control() self._control_socket.linger = 100 def _close_control_socket(self): if self._control_socket is None: return self._control_socket.close() self._control_socket = None def start_kernel(self, **kw): """Starts a kernel on this host in a separate process. If random ports (port=0) are being used, this method must be called before the channels are created. Parameters ---------- `**kw` : optional keyword arguments that are passed down to build the kernel_cmd and launching the kernel (e.g. Popen kwargs). """ if self.transport == 'tcp' and not is_local_ip(self.ip): raise RuntimeError( "Can only launch a kernel on a local interface. " "Make sure that the '*_address' attributes are " "configured properly. " "Currently valid addresses are: %s" % local_ips()) # write connection file / get default ports self.write_connection_file() # save kwargs for use in restart self._launch_args = kw.copy() # build the Popen cmd extra_arguments = kw.pop('extra_arguments', []) kernel_cmd = self.format_kernel_cmd(extra_arguments=extra_arguments) env = kw.pop('env', os.environ).copy() # Don't allow PYTHONEXECUTABLE to be passed to kernel process. # If set, it can bork all the things. env.pop('PYTHONEXECUTABLE', None) if not self.kernel_cmd: # If kernel_cmd has been set manually, don't refer to a kernel spec # Environment variables from kernel spec are added to os.environ env.update(self.kernel_spec.env or {}) elif self.extra_env: env.update(self.extra_env) # launch the kernel subprocess self.log.debug("Starting kernel: %s", kernel_cmd) self.kernel = self._launch_kernel(kernel_cmd, env=env, **kw) self.start_restarter() self._connect_control_socket() def request_shutdown(self, restart=False): """Send a shutdown request via control channel """ content = dict(restart=restart) msg = self.session.msg("shutdown_request", content=content) # ensure control socket is connected self._connect_control_socket() self.session.send(self._control_socket, msg) def finish_shutdown(self, waittime=None, pollinterval=0.1): """Wait for kernel shutdown, then kill process if it doesn't shutdown. This does not send shutdown requests - use :meth:`request_shutdown` first. """ if waittime is None: waittime = max(self.shutdown_wait_time, 0) for i in range(int(waittime / pollinterval)): if self.is_alive(): time.sleep(pollinterval) else: break else: # OK, we've waited long enough. if self.has_kernel: self.log.debug("Kernel is taking too long to finish, killing") self._kill_kernel() def cleanup(self, connection_file=True): """Clean up resources when the kernel is shut down""" if connection_file: self.cleanup_connection_file() self.cleanup_ipc_files() self._close_control_socket() def shutdown_kernel(self, now=False, restart=False): """Attempts to stop the kernel process cleanly. This attempts to shutdown the kernels cleanly by: 1. Sending it a shutdown message over the shell channel. 2. If that fails, the kernel is shutdown forcibly by sending it a signal. Parameters ---------- now : bool Should the kernel be forcible killed *now*. This skips the first, nice shutdown attempt. restart: bool Will this kernel be restarted after it is shutdown. When this is True, connection files will not be cleaned up. """ # Stop monitoring for restarting while we shutdown. self.stop_restarter() if now: self._kill_kernel() else: self.request_shutdown(restart=restart) # Don't send any additional kernel kill messages immediately, to give # the kernel a chance to properly execute shutdown actions. Wait for at # most 1s, checking every 0.1s. self.finish_shutdown() self.cleanup(connection_file=not restart) def restart_kernel(self, now=False, newports=False, **kw): """Restarts a kernel with the arguments that were used to launch it. Parameters ---------- now : bool, optional If True, the kernel is forcefully restarted *immediately*, without having a chance to do any cleanup action. Otherwise the kernel is given 1s to clean up before a forceful restart is issued. In all cases the kernel is restarted, the only difference is whether it is given a chance to perform a clean shutdown or not. newports : bool, optional If the old kernel was launched with random ports, this flag decides whether the same ports and connection file will be used again. If False, the same ports and connection file are used. This is the default. If True, new random port numbers are chosen and a new connection file is written. It is still possible that the newly chosen random port numbers happen to be the same as the old ones. `**kw` : optional Any options specified here will overwrite those used to launch the kernel. """ if self._launch_args is None: raise RuntimeError("Cannot restart the kernel. " "No previous call to 'start_kernel'.") else: # Stop currently running kernel. self.shutdown_kernel(now=now, restart=True) if newports: self.cleanup_random_ports() # Start new kernel. self._launch_args.update(kw) self.start_kernel(**self._launch_args) @property def has_kernel(self): """Has a kernel been started that we are managing.""" return self.kernel is not None def _kill_kernel(self): """Kill the running kernel. This is a private method, callers should use shutdown_kernel(now=True). """ if self.has_kernel: # Signal the kernel to terminate (sends SIGKILL on Unix and calls # TerminateProcess() on Win32). try: self.kernel.kill() except OSError as e: # In Windows, we will get an Access Denied error if the process # has already terminated. Ignore it. if sys.platform == 'win32': if e.winerror != 5: raise # On Unix, we may get an ESRCH error if the process has already # terminated. Ignore it. else: from errno import ESRCH if e.errno != ESRCH: raise # Block until the kernel terminates. self.kernel.wait() self.kernel = None else: raise RuntimeError("Cannot kill kernel. No kernel is running!") def interrupt_kernel(self): """Interrupts the kernel by sending it a signal. Unlike ``signal_kernel``, this operation is well supported on all platforms. """ if self.has_kernel: interrupt_mode = self.kernel_spec.interrupt_mode if interrupt_mode == 'signal': if sys.platform == 'win32': from .win_interrupt import send_interrupt send_interrupt(self.kernel.win32_interrupt_event) else: self.signal_kernel(signal.SIGINT) elif interrupt_mode == 'message': msg = self.session.msg("interrupt_request", content={}) self._connect_control_socket() self.session.send(self._control_socket, msg) else: raise RuntimeError( "Cannot interrupt kernel. No kernel is running!") def signal_kernel(self, signum): """Sends a signal to the process group of the kernel (this usually includes the kernel and any subprocesses spawned by the kernel). Note that since only SIGTERM is supported on Windows, this function is only useful on Unix systems. """ if self.has_kernel: if hasattr(os, "getpgid") and hasattr(os, "killpg"): try: pgid = os.getpgid(self.kernel.pid) os.killpg(pgid, signum) return except OSError: pass self.kernel.send_signal(signum) else: raise RuntimeError("Cannot signal kernel. No kernel is running!") def is_alive(self): """Is the kernel process still running?""" if self.has_kernel: if self.kernel.poll() is None: return True else: return False else: # we don't have a kernel return False
class TerminalInteractiveShell(InteractiveShell): space_for_menu = Integer( 6, help='Number of line at the bottom of the screen ' 'to reserve for the completion menu').tag(config=True) pt_app = None debugger_history = None simple_prompt = Bool( _use_simple_prompt, help= """Use `raw_input` for the REPL, without completion and prompt colors. Useful when controlling IPython as a subprocess, and piping STDIN/OUT/ERR. Known usage are: IPython own testing machinery, and emacs inferior-shell integration through elpy. This mode default to `True` if the `IPY_TEST_SIMPLE_PROMPT` environment variable is set, or the current terminal is not a tty.""" ).tag(config=True) @property def debugger_cls(self): return Pdb if self.simple_prompt else TerminalPdb confirm_exit = Bool( True, help=""" Set to confirm when you try to exit IPython with an EOF (Control-D in Unix, Control-Z/Enter in Windows). By typing 'exit' or 'quit', you can force a direct exit without any confirmation.""", ).tag(config=True) editing_mode = Unicode( 'emacs', help="Shortcut style to use at the prompt. 'vi' or 'emacs'.", ).tag(config=True) mouse_support = Bool( False, help= "Enable mouse support in the prompt\n(Note: prevents selecting text with the mouse)" ).tag(config=True) # We don't load the list of styles for the help string, because loading # Pygments plugins takes time and can cause unexpected errors. highlighting_style = Union( [Unicode('legacy'), Type(klass=Style)], help="""The name or class of a Pygments style to use for syntax highlighting. To see available styles, run `pygmentize -L styles`.""" ).tag(config=True) @observe('highlighting_style') @observe('colors') def _highlighting_style_changed(self, change): self.refresh_style() def refresh_style(self): self._style = self._make_style_from_name_or_cls( self.highlighting_style) highlighting_style_overrides = Dict( help="Override highlighting format for specific tokens").tag( config=True) true_color = Bool( False, help=("Use 24bit colors instead of 256 colors in prompt highlighting. " "If your terminal supports true color, the following command " "should print 'TRUECOLOR' in orange: " "printf \"\\x1b[38;2;255;100;0mTRUECOLOR\\x1b[0m\\n\"")).tag( config=True) editor = Unicode( get_default_editor(), help="Set the editor used by IPython (default to $EDITOR/vi/notepad)." ).tag(config=True) prompts_class = Type( Prompts, help='Class used to generate Prompt token for prompt_toolkit').tag( config=True) prompts = Instance(Prompts) @default('prompts') def _prompts_default(self): return self.prompts_class(self) # @observe('prompts') # def _(self, change): # self._update_layout() @default('displayhook_class') def _displayhook_class_default(self): return RichPromptDisplayHook term_title = Bool( True, help="Automatically set the terminal title").tag(config=True) term_title_format = Unicode( "IPython: {cwd}", help= "Customize the terminal title format. This is a python format string. " + "Available substitutions are: {cwd}.").tag(config=True) display_completions = Enum( ('column', 'multicolumn', 'readlinelike'), help= ("Options for displaying tab completions, 'column', 'multicolumn', and " "'readlinelike'. These options are for `prompt_toolkit`, see " "`prompt_toolkit` documentation for more information."), default_value='multicolumn').tag(config=True) highlight_matching_brackets = Bool( True, help="Highlight matching brackets.", ).tag(config=True) extra_open_editor_shortcuts = Bool( False, help= "Enable vi (v) or Emacs (C-X C-E) shortcuts to open an external editor. " "This is in addition to the F2 binding, which is always enabled.").tag( config=True) handle_return = Any( None, help="Provide an alternative handler to be called when the user presses " "Return. This is an advanced option intended for debugging, which " "may be changed or removed in later releases.").tag(config=True) enable_history_search = Bool( True, help="Allows to enable/disable the prompt toolkit history search").tag( config=True) @observe('term_title') def init_term_title(self, change=None): # Enable or disable the terminal title. if self.term_title: toggle_set_term_title(True) set_term_title(self.term_title_format.format(cwd=abbrev_cwd())) else: toggle_set_term_title(False) def init_display_formatter(self): super(TerminalInteractiveShell, self).init_display_formatter() # terminal only supports plain text self.display_formatter.active_types = ['text/plain'] # disable `_ipython_display_` self.display_formatter.ipython_display_formatter.enabled = False def init_prompt_toolkit_cli(self): if self.simple_prompt: # Fall back to plain non-interactive output for tests. # This is very limited, and only accepts a single line. def prompt(): isp = self.input_splitter prompt_text = "".join(x[1] for x in self.prompts.in_prompt_tokens()) prompt_continuation = "".join( x[1] for x in self.prompts.continuation_prompt_tokens()) while isp.push_accepts_more(): line = input(prompt_text) isp.push(line) prompt_text = prompt_continuation return isp.source_reset() self.prompt_for_code = prompt return # Set up keyboard shortcuts key_bindings = create_ipython_shortcuts(self) # Pre-populate history from IPython's history database history = InMemoryHistory() last_cell = u"" for __, ___, cell in self.history_manager.get_tail( self.history_load_length, include_latest=True): # Ignore blank lines and consecutive duplicates cell = cell.rstrip() if cell and (cell != last_cell): history.append_string(cell) last_cell = cell self._style = self._make_style_from_name_or_cls( self.highlighting_style) self.style = DynamicStyle(lambda: self._style) editing_mode = getattr(EditingMode, self.editing_mode.upper()) self.pt_app = PromptSession( editing_mode=editing_mode, key_bindings=key_bindings, history=history, completer=IPythonPTCompleter(shell=self), enable_history_search=self.enable_history_search, style=self.style, include_default_pygments_style=False, mouse_support=self.mouse_support, enable_open_in_editor=self.extra_open_editor_shortcuts, color_depth=(ColorDepth.TRUE_COLOR if self.true_color else None), **self._extra_prompt_options()) def _make_style_from_name_or_cls(self, name_or_cls): """ Small wrapper that make an IPython compatible style from a style name We need that to add style for prompt ... etc. """ style_overrides = {} if name_or_cls == 'legacy': legacy = self.colors.lower() if legacy == 'linux': style_cls = get_style_by_name('monokai') style_overrides = _style_overrides_linux elif legacy == 'lightbg': style_overrides = _style_overrides_light_bg style_cls = get_style_by_name('pastie') elif legacy == 'neutral': # The default theme needs to be visible on both a dark background # and a light background, because we can't tell what the terminal # looks like. These tweaks to the default theme help with that. style_cls = get_style_by_name('default') style_overrides.update({ Token.Number: '#007700', Token.Operator: 'noinherit', Token.String: '#BB6622', Token.Name.Function: '#2080D0', Token.Name.Class: 'bold #2080D0', Token.Name.Namespace: 'bold #2080D0', Token.Prompt: '#009900', Token.PromptNum: '#00ff00 bold', Token.OutPrompt: '#990000', Token.OutPromptNum: '#ff0000 bold', }) # Hack: Due to limited color support on the Windows console # the prompt colors will be wrong without this if os.name == 'nt': style_overrides.update({ Token.Prompt: '#ansidarkgreen', Token.PromptNum: '#ansigreen bold', Token.OutPrompt: '#ansidarkred', Token.OutPromptNum: '#ansired bold', }) elif legacy == 'nocolor': style_cls = _NoStyle style_overrides = {} else: raise ValueError('Got unknown colors: ', legacy) else: if isinstance(name_or_cls, str): style_cls = get_style_by_name(name_or_cls) else: style_cls = name_or_cls style_overrides = { Token.Prompt: '#009900', Token.PromptNum: '#00ff00 bold', Token.OutPrompt: '#990000', Token.OutPromptNum: '#ff0000 bold', } style_overrides.update(self.highlighting_style_overrides) style = merge_styles([ style_from_pygments_cls(style_cls), style_from_pygments_dict(style_overrides), ]) return style @property def pt_complete_style(self): return { 'multicolumn': CompleteStyle.MULTI_COLUMN, 'column': CompleteStyle.COLUMN, 'readlinelike': CompleteStyle.READLINE_LIKE, }[self.display_completions], def _extra_prompt_options(self): """ Return the current layout option for the current Terminal InteractiveShell """ def get_message(): return PygmentsTokens(self.prompts.in_prompt_tokens()) return { 'complete_in_thread': False, 'lexer': IPythonPTLexer(), 'reserve_space_for_menu': self.space_for_menu, 'message': get_message, 'prompt_continuation': (lambda width, lineno, is_soft_wrap: PygmentsTokens( self.prompts.continuation_prompt_tokens(width))), 'multiline': True, 'complete_style': self.pt_complete_style, # Highlight matching brackets, but only when this setting is # enabled, and only when the DEFAULT_BUFFER has the focus. 'input_processors': [ ConditionalProcessor( processor=HighlightMatchingBracketProcessor( chars='[](){}'), filter=HasFocus(DEFAULT_BUFFER) & ~IsDone() & Condition(lambda: self.highlight_matching_brackets)) ], } def prompt_for_code(self): if self.rl_next_input: default = self.rl_next_input self.rl_next_input = None else: default = '' with patch_stdout(raw=True): text = self.pt_app.prompt( default=default, # pre_run=self.pre_prompt,# reset_current_buffer=True, **self._extra_prompt_options()) return text def enable_win_unicode_console(self): if sys.version_info >= (3, 6): # Since PEP 528, Python uses the unicode APIs for the Windows # console by default, so WUC shouldn't be needed. return import win_unicode_console win_unicode_console.enable() def init_io(self): if sys.platform not in {'win32', 'cli'}: return self.enable_win_unicode_console() import colorama colorama.init() # For some reason we make these wrappers around stdout/stderr. # For now, we need to reset them so all output gets coloured. # https://github.com/ipython/ipython/issues/8669 # io.std* are deprecated, but don't show our own deprecation warnings # during initialization of the deprecated API. with warnings.catch_warnings(): warnings.simplefilter('ignore', DeprecationWarning) io.stdout = io.IOStream(sys.stdout) io.stderr = io.IOStream(sys.stderr) def init_magics(self): super(TerminalInteractiveShell, self).init_magics() self.register_magics(TerminalMagics) def init_alias(self): # The parent class defines aliases that can be safely used with any # frontend. super(TerminalInteractiveShell, self).init_alias() # Now define aliases that only make sense on the terminal, because they # need direct access to the console in a way that we can't emulate in # GUI or web frontend if os.name == 'posix': for cmd in ['clear', 'more', 'less', 'man']: self.alias_manager.soft_define_alias(cmd, cmd) def __init__(self, *args, **kwargs): super(TerminalInteractiveShell, self).__init__(*args, **kwargs) self.init_prompt_toolkit_cli() self.init_term_title() self.keep_running = True self.debugger_history = InMemoryHistory() def ask_exit(self): self.keep_running = False rl_next_input = None def interact(self, display_banner=DISPLAY_BANNER_DEPRECATED): if display_banner is not DISPLAY_BANNER_DEPRECATED: warn( 'interact `display_banner` argument is deprecated since IPython 5.0. Call `show_banner()` if needed.', DeprecationWarning, stacklevel=2) self.keep_running = True while self.keep_running: print(self.separate_in, end='') try: code = self.prompt_for_code() except EOFError: if (not self.confirm_exit) \ or self.ask_yes_no('Do you really want to exit ([y]/n)?','y','n'): self.ask_exit() else: if code: self.run_cell(code, store_history=True) def mainloop(self, display_banner=DISPLAY_BANNER_DEPRECATED): # An extra layer of protection in case someone mashing Ctrl-C breaks # out of our internal code. if display_banner is not DISPLAY_BANNER_DEPRECATED: warn( 'mainloop `display_banner` argument is deprecated since IPython 5.0. Call `show_banner()` if needed.', DeprecationWarning, stacklevel=2) while True: try: self.interact() break except KeyboardInterrupt as e: print("\n%s escaped interact()\n" % type(e).__name__) finally: # An interrupt during the eventloop will mess up the # internal state of the prompt_toolkit library. # Stopping the eventloop fixes this, see # https://github.com/ipython/ipython/pull/9867 if hasattr(self, '_eventloop'): self._eventloop.stop() _inputhook = None def inputhook(self, context): if self._inputhook is not None: self._inputhook(context) active_eventloop = None def enable_gui(self, gui=None): if gui: self.active_eventloop, self._inputhook =\ get_inputhook_name_and_func(gui) else: self.active_eventloop = self._inputhook = None # Run !system commands directly, not through pipes, so terminal programs # work correctly. system = InteractiveShell.system_raw def auto_rewrite_input(self, cmd): """Overridden from the parent class to use fancy rewriting prompt""" if not self.show_rewritten_input: return tokens = self.prompts.rewrite_prompt_tokens() if self.pt_app: print_formatted_text(PygmentsTokens(tokens), end='', style=self.pt_app.app.style) print(cmd) else: prompt = ''.join(s for t, s in tokens) print(prompt, cmd, sep='') _prompts_before = None def switch_doctest_mode(self, mode): """Switch prompts to classic for %doctest_mode""" if mode: self._prompts_before = self.prompts self.prompts = ClassicPrompts(self) elif self._prompts_before: self.prompts = self._prompts_before self._prompts_before = None
class KernelGatewayApp(JupyterApp): """Application that provisions Jupyter kernels and proxies HTTP/Websocket traffic to the kernels. - reads command line and environment variable settings - initializes managers and routes - creates a Tornado HTTP server - starts the Tornado event loop """ name = 'jupyter-kernel-gateway' version = __version__ description = """ Jupyter Kernel Gateway Provisions Jupyter kernels and proxies HTTP/Websocket traffic to them. """ # Also include when generating help options classes = [NotebookHTTPPersonality, JupyterWebsocketPersonality] # Enable some command line shortcuts aliases = aliases # Server IP / PORT binding port_env = 'KG_PORT' port_default_value = 8888 port = Integer(port_default_value, config=True, help="Port on which to listen (KG_PORT env var)") @default('port') def port_default(self): return int(os.getenv(self.port_env, self.port_default_value)) port_retries_env = 'KG_PORT_RETRIES' port_retries_default_value = 50 port_retries = Integer( port_retries_default_value, config=True, help= "Number of ports to try if the specified port is not available (KG_PORT_RETRIES env var)" ) @default('port_retries') def port_retries_default(self): return int( os.getenv(self.port_retries_env, self.port_retries_default_value)) ip_env = 'KG_IP' ip_default_value = '127.0.0.1' ip = Unicode(ip_default_value, config=True, help="IP address on which to listen (KG_IP env var)") @default('ip') def ip_default(self): return os.getenv(self.ip_env, self.ip_default_value) # Base URL base_url_env = 'KG_BASE_URL' base_url_default_value = '/' base_url = Unicode( base_url_default_value, config=True, help= """The base path for mounting all API resources (KG_BASE_URL env var)""" ) @default('base_url') def base_url_default(self): return os.getenv(self.base_url_env, self.base_url_default_value) # Token authorization auth_token_env = 'KG_AUTH_TOKEN' auth_token = Unicode( config=True, help= 'Authorization token required for all requests (KG_AUTH_TOKEN env var)' ) @default('auth_token') def _auth_token_default(self): return os.getenv(self.auth_token_env, '') # CORS headers allow_credentials_env = 'KG_ALLOW_CREDENTIALS' allow_credentials = Unicode( config=True, help= 'Sets the Access-Control-Allow-Credentials header. (KG_ALLOW_CREDENTIALS env var)' ) @default('allow_credentials') def allow_credentials_default(self): return os.getenv(self.allow_credentials_env, '') allow_headers_env = 'KG_ALLOW_HEADERS' allow_headers = Unicode( config=True, help= 'Sets the Access-Control-Allow-Headers header. (KG_ALLOW_HEADERS env var)' ) @default('allow_headers') def allow_headers_default(self): return os.getenv(self.allow_headers_env, '') allow_methods_env = 'KG_ALLOW_METHODS' allow_methods = Unicode( config=True, help= 'Sets the Access-Control-Allow-Methods header. (KG_ALLOW_METHODS env var)' ) @default('allow_methods') def allow_methods_default(self): return os.getenv(self.allow_methods_env, '') allow_origin_env = 'KG_ALLOW_ORIGIN' allow_origin = Unicode( config=True, help= 'Sets the Access-Control-Allow-Origin header. (KG_ALLOW_ORIGIN env var)' ) @default('allow_origin') def allow_origin_default(self): return os.getenv(self.allow_origin_env, '') expose_headers_env = 'KG_EXPOSE_HEADERS' expose_headers = Unicode( config=True, help= 'Sets the Access-Control-Expose-Headers header. (KG_EXPOSE_HEADERS env var)' ) @default('expose_headers') def expose_headers_default(self): return os.getenv(self.expose_headers_env, '') trust_xheaders_env = 'KG_TRUST_XHEADERS' trust_xheaders = CBool( False, config=True, help= 'Use x-* header values for overriding the remote-ip, useful when application is behing a proxy. (KG_TRUST_XHEADERS env var)' ) @default('trust_xheaders') def trust_xheaders_default(self): return strtobool(os.getenv(self.trust_xheaders_env, 'False')) max_age_env = 'KG_MAX_AGE' max_age = Unicode( config=True, help='Sets the Access-Control-Max-Age header. (KG_MAX_AGE env var)') @default('max_age') def max_age_default(self): return os.getenv(self.max_age_env, '') max_kernels_env = 'KG_MAX_KERNELS' max_kernels = Integer( None, config=True, allow_none=True, help= 'Limits the number of kernel instances allowed to run by this gateway. Unbounded by default. (KG_MAX_KERNELS env var)' ) @default('max_kernels') def max_kernels_default(self): val = os.getenv(self.max_kernels_env) return val if val is None else int(val) seed_uri_env = 'KG_SEED_URI' seed_uri = Unicode( None, config=True, allow_none=True, help= 'Runs the notebook (.ipynb) at the given URI on every kernel launched. No seed by default. (KG_SEED_URI env var)' ) @default('seed_uri') def seed_uri_default(self): return os.getenv(self.seed_uri_env) prespawn_count_env = 'KG_PRESPAWN_COUNT' prespawn_count = Integer( None, config=True, allow_none=True, help= 'Number of kernels to prespawn using the default language. No prespawn by default. (KG_PRESPAWN_COUNT env var)' ) @default('prespawn_count') def prespawn_count_default(self): val = os.getenv(self.prespawn_count_env) return val if val is None else int(val) default_kernel_name_env = 'KG_DEFAULT_KERNEL_NAME' default_kernel_name = Unicode( config=True, help= 'Default kernel name when spawning a kernel (KG_DEFAULT_KERNEL_NAME env var)' ) @default('default_kernel_name') def default_kernel_name_default(self): # defaults to Jupyter's default kernel name on empty string return os.getenv(self.default_kernel_name_env, '') force_kernel_name_env = 'KG_FORCE_KERNEL_NAME' force_kernel_name = Unicode( config=True, help= 'Override any kernel name specified in a notebook or request (KG_FORCE_KERNEL_NAME env var)' ) @default('force_kernel_name') def force_kernel_name_default(self): return os.getenv(self.force_kernel_name_env, '') env_process_whitelist_env = 'KG_ENV_PROCESS_WHITELIST' env_process_whitelist = List( config=True, help= """Environment variables allowed to be inherited from the spawning process by the kernel""" ) @default('env_process_whitelist') def env_process_whitelist_default(self): return os.getenv(self.env_process_whitelist_env, '').split(',') api_env = 'KG_API' api_default_value = 'kernel_gateway.jupyter_websocket' api = Unicode( api_default_value, config=True, help= """Controls which API to expose, that of a Jupyter notebook server, the seed notebook's, or one provided by another module, respectively using values 'kernel_gateway.jupyter_websocket', 'kernel_gateway.notebook_http', or another fully qualified module name (KG_API env var) """) @default('api') def api_default(self): return os.getenv(self.api_env, self.api_default_value) @observe('api') def api_changed(self, event): try: self._load_api_module(event['new']) except ImportError: # re-raise with more sensible message to help the user raise ImportError('API module {} not found'.format(event['new'])) certfile_env = 'KG_CERTFILE' certfile = Unicode( None, config=True, allow_none=True, help= """The full path to an SSL/TLS certificate file. (KG_CERTFILE env var)""" ) @default('certfile') def certfile_default(self): return os.getenv(self.certfile_env) keyfile_env = 'KG_KEYFILE' keyfile = Unicode( None, config=True, allow_none=True, help= """The full path to a private key file for usage with SSL/TLS. (KG_KEYFILE env var)""" ) @default('keyfile') def keyfile_default(self): return os.getenv(self.keyfile_env) client_ca_env = 'KG_CLIENT_CA' client_ca = Unicode( None, config=True, allow_none=True, help= """The full path to a certificate authority certificate for SSL/TLS client authentication. (KG_CLIENT_CA env var)""" ) @default('client_ca') def client_ca_default(self): return os.getenv(self.client_ca_env) ssl_version_env = 'KG_SSL_VERSION' ssl_version_default_value = ssl.PROTOCOL_TLSv1_2 ssl_version = Integer( None, config=True, allow_none=True, help= """Sets the SSL version to use for the web socket connection. (KG_SSL_VERSION env var)""" ) @default('ssl_version') def ssl_version_default(self): ssl_from_env = os.getenv(self.ssl_version_env) return ssl_from_env if ssl_from_env is None else int(ssl_from_env) kernel_spec_manager = Instance(KernelSpecManager, allow_none=True) kernel_spec_manager_class = Type(default_value=KernelSpecManager, config=True, help=""" The kernel spec manager class to use. Should be a subclass of `jupyter_client.kernelspec.KernelSpecManager`. """) kernel_manager_class = Type(klass=MappingKernelManager, default_value=SeedingMappingKernelManager, config=True, help="""The kernel manager class to use.""") def _load_api_module(self, module_name): """Tries to import the given module name. Parameters ---------- module_name: str Module name to import Returns ------- module Module with the given name loaded using importlib.import_module """ # some compatibility allowances if module_name == 'jupyter-websocket': module_name = 'kernel_gateway.jupyter_websocket' elif module_name == 'notebook-http': module_name = 'kernel_gateway.notebook_http' return importlib.import_module(module_name) def _load_notebook(self, uri): """Loads a notebook from the local filesystem or HTTP(S) URL. Raises ------ RuntimeError if there is no kernel spec matching the one specified in the notebook or forced via configuration. Returns ------- object Notebook object from nbformat """ parts = urlparse(uri) if parts.scheme not in ('http', 'https'): # Local file path = parts._replace(scheme='', netloc='').geturl() with open(path) as nb_fh: notebook = nbformat.read(nb_fh, 4) else: # Remote file import requests resp = requests.get(uri) resp.raise_for_status() notebook = nbformat.reads(resp.text, 4) # Error if no kernel spec can handle the language requested kernel_name = self.force_kernel_name if self.force_kernel_name \ else notebook['metadata']['kernelspec']['name'] self.kernel_spec_manager.get_kernel_spec(kernel_name) return notebook def initialize(self, argv=None): """Initializes the base class, configurable manager instances, the Tornado web app, and the tornado HTTP server. Parameters ---------- argv Command line arguments """ super(KernelGatewayApp, self).initialize(argv) self.init_configurables() self.init_webapp() self.init_http_server() def init_configurables(self): """Initializes all configurable objects including a kernel manager, kernel spec manager, session manager, and personality. Any kernel pool configured by the personality will be its responsibility to shut down. Optionally, loads a notebook and prespawns the configured number of kernels. """ self.kernel_spec_manager = KernelSpecManager(parent=self) self.seed_notebook = None if self.seed_uri is not None: # Note: must be set before instantiating a SeedingMappingKernelManager self.seed_notebook = self._load_notebook(self.seed_uri) # Only pass a default kernel name when one is provided. Otherwise, # adopt whatever default the kernel manager wants to use. kwargs = {} if self.default_kernel_name: kwargs['default_kernel_name'] = self.default_kernel_name self.kernel_spec_manager = self.kernel_spec_manager_class( parent=self, ) self.kernel_manager = self.kernel_manager_class( parent=self, log=self.log, connection_dir=self.runtime_dir, kernel_spec_manager=self.kernel_spec_manager, **kwargs) self.session_manager = SessionManager( log=self.log, kernel_manager=self.kernel_manager) self.contents_manager = None if self.prespawn_count: if self.max_kernels and self.prespawn_count > self.max_kernels: raise RuntimeError( 'cannot prespawn {}; more than max kernels {}'.format( self.prespawn_count, self.max_kernels)) api_module = self._load_api_module(self.api) func = getattr(api_module, 'create_personality') self.personality = func(parent=self, log=self.log) self.personality.init_configurables() def init_webapp(self): """Initializes Tornado web application with uri handlers. Adds the various managers and web-front configuration values to the Tornado settings for reference by the handlers. """ # Enable the same pretty logging the notebook uses enable_pretty_logging() # Configure the tornado logging level too logging.getLogger().setLevel(self.log_level) handlers = self.personality.create_request_handlers() self.web_app = web.Application( handlers=handlers, kernel_manager=self.kernel_manager, session_manager=self.session_manager, contents_manager=self.contents_manager, kernel_spec_manager=self.kernel_spec_manager, kg_auth_token=self.auth_token, kg_allow_credentials=self.allow_credentials, kg_allow_headers=self.allow_headers, kg_allow_methods=self.allow_methods, kg_allow_origin=self.allow_origin, kg_expose_headers=self.expose_headers, kg_max_age=self.max_age, kg_max_kernels=self.max_kernels, kg_env_process_whitelist=self.env_process_whitelist, kg_api=self.api, kg_personality=self.personality, # Also set the allow_origin setting used by notebook so that the # check_origin method used everywhere respects the value allow_origin=self.allow_origin, # Always allow remote access (has been limited to localhost >= notebook 5.6) allow_remote_access=True) # promote the current personality's "config" tagged traitlet values to webapp settings for trait_name, trait_value in self.personality.class_traits( config=True).items(): kg_name = 'kg_' + trait_name # a personality's traitlets may not overwrite the kernel gateway's if kg_name not in self.web_app.settings: self.web_app.settings[kg_name] = trait_value.get( obj=self.personality) else: self.log.warning( 'The personality trait name, %s, conflicts with a kernel gateway trait.', trait_name) def _build_ssl_options(self): """Build a dictionary of SSL options for the tornado HTTP server. Taken directly from jupyter/notebook code. """ ssl_options = {} if self.certfile: ssl_options['certfile'] = self.certfile if self.keyfile: ssl_options['keyfile'] = self.keyfile if self.client_ca: ssl_options['ca_certs'] = self.client_ca if self.ssl_version: ssl_options['ssl_version'] = self.ssl_version if not ssl_options: # None indicates no SSL config ssl_options = None else: ssl_options.setdefault('ssl_version', self.ssl_version_default_value) if ssl_options.get('ca_certs', False): ssl_options.setdefault('cert_reqs', ssl.CERT_REQUIRED) return ssl_options def init_http_server(self): """Initializes a HTTP server for the Tornado web application on the configured interface and port. Tries to find an open port if the one configured is not available using the same logic as the Jupyer Notebook server. """ ssl_options = self._build_ssl_options() self.http_server = httpserver.HTTPServer(self.web_app, xheaders=self.trust_xheaders, ssl_options=ssl_options) for port in random_ports(self.port, self.port_retries + 1): try: self.http_server.listen(port, self.ip) except socket.error as e: if e.errno == errno.EADDRINUSE: self.log.info( 'The port %i is already in use, trying another port.' % port) continue elif e.errno in (errno.EACCES, getattr(errno, 'WSAEACCES', errno.EACCES)): self.log.warning("Permission to listen on port %i denied" % port) continue else: raise else: self.port = port break else: self.log.critical( 'ERROR: the notebook server could not be started because ' 'no available port could be found.') self.exit(1) def start(self): """Starts an IO loop for the application.""" super(KernelGatewayApp, self).start() self.log.info('Jupyter Kernel Gateway at http{}://{}:{}'.format( 's' if self.keyfile else '', self.ip, self.port)) self.io_loop = ioloop.IOLoop.current() if sys.platform != 'win32': signal.signal(signal.SIGHUP, signal.SIG_IGN) signal.signal(signal.SIGTERM, self._signal_stop) try: self.io_loop.start() except KeyboardInterrupt: self.log.info("Interrupted...") finally: self.shutdown() def stop(self): """ Stops the HTTP server and IO loop associated with the application. """ def _stop(): self.http_server.stop() self.io_loop.stop() self.io_loop.add_callback(_stop) def shutdown(self): """Stop all kernels in the pool.""" self.personality.shutdown() def _signal_stop(self, sig, frame): self.log.info("Received signal to terminate.") self.io_loop.stop()
class UnionTrait(HasTraits): value = Union([Type(), Bool()])
class A(HasTraits): klass = Type()
class A(HasTraits): klass = Type('ipython_genutils.ipstruct.Struct')
class TerminalInteractiveShell(InteractiveShell): space_for_menu = Integer( 6, help='Number of line at the bottom of the screen ' 'to reserve for the completion menu').tag(config=True) def _space_for_menu_changed(self, old, new): self._update_layout() pt_cli = None debugger_history = None _pt_app = None simple_prompt = Bool( _use_simple_prompt, help= """Use `raw_input` for the REPL, without completion, multiline input, and prompt colors. Useful when controlling IPython as a subprocess, and piping STDIN/OUT/ERR. Known usage are: IPython own testing machinery, and emacs inferior-shell integration through elpy. This mode default to `True` if the `IPY_TEST_SIMPLE_PROMPT` environment variable is set, or the current terminal is not a tty. """).tag(config=True) @property def debugger_cls(self): return Pdb if self.simple_prompt else TerminalPdb confirm_exit = Bool( True, help=""" Set to confirm when you try to exit IPython with an EOF (Control-D in Unix, Control-Z/Enter in Windows). By typing 'exit' or 'quit', you can force a direct exit without any confirmation.""", ).tag(config=True) editing_mode = Unicode( 'emacs', help="Shortcut style to use at the prompt. 'vi' or 'emacs'.", ).tag(config=True) mouse_support = Bool( False, help="Enable mouse support in the prompt").tag(config=True) highlighting_style = Union( [Unicode('legacy'), Type(klass=Style)], help="""The name or class of a Pygments style to use for syntax highlighting: \n %s""" % ', '.join(get_all_styles())).tag(config=True) @observe('highlighting_style') @observe('colors') def _highlighting_style_changed(self, change): self.refresh_style() def refresh_style(self): self._style = self._make_style_from_name_or_cls( self.highlighting_style) highlighting_style_overrides = Dict( help="Override highlighting format for specific tokens").tag( config=True) true_color = Bool( False, help=("Use 24bit colors instead of 256 colors in prompt highlighting. " "If your terminal supports true color, the following command " "should print 'TRUECOLOR' in orange: " "printf \"\\x1b[38;2;255;100;0mTRUECOLOR\\x1b[0m\\n\"")).tag( config=True) editor = Unicode( get_default_editor(), help="Set the editor used by IPython (default to $EDITOR/vi/notepad)." ).tag(config=True) prompts_class = Type( Prompts, help='Class used to generate Prompt token for prompt_toolkit').tag( config=True) prompts = Instance(Prompts) @default('prompts') def _prompts_default(self): return self.prompts_class(self) @observe('prompts') def _(self, change): self._update_layout() @default('displayhook_class') def _displayhook_class_default(self): return RichPromptDisplayHook term_title = Bool( True, help="Automatically set the terminal title").tag(config=True) display_completions = Enum( ('column', 'multicolumn', 'readlinelike'), help= ("Options for displaying tab completions, 'column', 'multicolumn', and " "'readlinelike'. These options are for `prompt_toolkit`, see " "`prompt_toolkit` documentation for more information."), default_value='multicolumn').tag(config=True) highlight_matching_brackets = Bool( True, help="Highlight matching brackets .", ).tag(config=True) @observe('term_title') def init_term_title(self, change=None): # Enable or disable the terminal title. if self.term_title: toggle_set_term_title(True) set_term_title('IPython: ' + abbrev_cwd()) else: toggle_set_term_title(False) def init_display_formatter(self): super(TerminalInteractiveShell, self).init_display_formatter() # terminal only supports plain text self.display_formatter.active_types = ['text/plain'] def init_prompt_toolkit_cli(self): if self.simple_prompt: # Fall back to plain non-interactive output for tests. # This is very limited, and only accepts a single line. def prompt(): return cast_unicode_py2( input('In [%d]: ' % self.execution_count)) self.prompt_for_code = prompt return # Set up keyboard shortcuts kbmanager = KeyBindingManager.for_prompt() register_ipython_shortcuts(kbmanager.registry, self) # Pre-populate history from IPython's history database history = InMemoryHistory() last_cell = u"" for __, ___, cell in self.history_manager.get_tail( self.history_load_length, include_latest=True): # Ignore blank lines and consecutive duplicates cell = cell.rstrip() if cell and (cell != last_cell): history.append(cell) last_cell = cell self._style = self._make_style_from_name_or_cls( self.highlighting_style) style = DynamicStyle(lambda: self._style) editing_mode = getattr(EditingMode, self.editing_mode.upper()) self._pt_app = create_prompt_application( editing_mode=editing_mode, key_bindings_registry=kbmanager.registry, history=history, completer=IPythonPTCompleter(shell=self), enable_history_search=True, style=style, mouse_support=self.mouse_support, **self._layout_options()) self._eventloop = create_eventloop(self.inputhook) self.pt_cli = CommandLineInterface( self._pt_app, eventloop=self._eventloop, output=create_output(true_color=self.true_color)) def _make_style_from_name_or_cls(self, name_or_cls): """ Small wrapper that make an IPython compatible style from a style name We need that to add style for prompt ... etc. """ style_overrides = {} if name_or_cls == 'legacy': legacy = self.colors.lower() if legacy == 'linux': style_cls = get_style_by_name('monokai') style_overrides = _style_overrides_linux elif legacy == 'lightbg': style_overrides = _style_overrides_light_bg style_cls = get_style_by_name('pastie') elif legacy == 'neutral': # The default theme needs to be visible on both a dark background # and a light background, because we can't tell what the terminal # looks like. These tweaks to the default theme help with that. style_cls = get_style_by_name('default') style_overrides.update({ Token.Number: '#007700', Token.Operator: 'noinherit', Token.String: '#BB6622', Token.Name.Function: '#2080D0', Token.Name.Class: 'bold #2080D0', Token.Name.Namespace: 'bold #2080D0', Token.Prompt: '#009900', Token.PromptNum: '#00ff00 bold', Token.OutPrompt: '#990000', Token.OutPromptNum: '#ff0000 bold', }) elif legacy == 'nocolor': style_cls = _NoStyle style_overrides = {} else: raise ValueError('Got unknown colors: ', legacy) else: if isinstance(name_or_cls, string_types): style_cls = get_style_by_name(name_or_cls) else: style_cls = name_or_cls style_overrides = { Token.Prompt: '#009900', Token.PromptNum: '#00ff00 bold', Token.OutPrompt: '#990000', Token.OutPromptNum: '#ff0000 bold', } style_overrides.update(self.highlighting_style_overrides) style = PygmentsStyle.from_defaults(pygments_style_cls=style_cls, style_dict=style_overrides) return style def _layout_options(self): """ Return the current layout option for the current Terminal InteractiveShell """ return { 'lexer': IPythonPTLexer(), 'reserve_space_for_menu': self.space_for_menu, 'get_prompt_tokens': self.prompts.in_prompt_tokens, 'get_continuation_tokens': self.prompts.continuation_prompt_tokens, 'multiline': True, 'display_completions_in_columns': (self.display_completions == 'multicolumn'), # Highlight matching brackets, but only when this setting is # enabled, and only when the DEFAULT_BUFFER has the focus. 'extra_input_processors': [ ConditionalProcessor( processor=HighlightMatchingBracketProcessor( chars='[](){}'), filter=HasFocus(DEFAULT_BUFFER) & ~IsDone() & Condition(lambda cli: self.highlight_matching_brackets)) ], } def _update_layout(self): """ Ask for a re computation of the application layout, if for example , some configuration options have changed. """ if self._pt_app: self._pt_app.layout = create_prompt_layout( **self._layout_options()) def prompt_for_code(self): document = self.pt_cli.run(pre_run=self.pre_prompt, reset_current_buffer=True) return document.text def enable_win_unicode_console(self): if sys.version_info >= (3, 6): # Since PEP 528, Python uses the unicode APIs for the Windows # console by default, so WUC shouldn't be needed. return import win_unicode_console if PY3: win_unicode_console.enable() else: # https://github.com/ipython/ipython/issues/9768 from win_unicode_console.streams import (TextStreamWrapper, stdout_text_transcoded, stderr_text_transcoded) class LenientStrStreamWrapper(TextStreamWrapper): def write(self, s): if isinstance(s, bytes): s = s.decode(self.encoding, 'replace') self.base.write(s) stdout_text_str = LenientStrStreamWrapper(stdout_text_transcoded) stderr_text_str = LenientStrStreamWrapper(stderr_text_transcoded) win_unicode_console.enable(stdout=stdout_text_str, stderr=stderr_text_str) def init_io(self): if sys.platform not in {'win32', 'cli'}: return self.enable_win_unicode_console() import colorama colorama.init() # For some reason we make these wrappers around stdout/stderr. # For now, we need to reset them so all output gets coloured. # https://github.com/ipython/ipython/issues/8669 # io.std* are deprecated, but don't show our own deprecation warnings # during initialization of the deprecated API. with warnings.catch_warnings(): warnings.simplefilter('ignore', DeprecationWarning) io.stdout = io.IOStream(sys.stdout) io.stderr = io.IOStream(sys.stderr) def init_magics(self): super(TerminalInteractiveShell, self).init_magics() self.register_magics(TerminalMagics) def init_alias(self): # The parent class defines aliases that can be safely used with any # frontend. super(TerminalInteractiveShell, self).init_alias() # Now define aliases that only make sense on the terminal, because they # need direct access to the console in a way that we can't emulate in # GUI or web frontend if os.name == 'posix': for cmd in ['clear', 'more', 'less', 'man']: self.alias_manager.soft_define_alias(cmd, cmd) def __init__(self, *args, **kwargs): super(TerminalInteractiveShell, self).__init__(*args, **kwargs) self.init_prompt_toolkit_cli() self.init_term_title() self.keep_running = True self.debugger_history = InMemoryHistory() def ask_exit(self): self.keep_running = False rl_next_input = None def pre_prompt(self): if self.rl_next_input: self.pt_cli.application.buffer.text = cast_unicode_py2( self.rl_next_input) self.rl_next_input = None def interact(self, display_banner=DISPLAY_BANNER_DEPRECATED): if display_banner is not DISPLAY_BANNER_DEPRECATED: warn( 'interact `display_banner` argument is deprecated since IPython 5.0. Call `show_banner()` if needed.', DeprecationWarning, stacklevel=2) self.keep_running = True while self.keep_running: print(self.separate_in, end='') try: code = self.prompt_for_code() except EOFError: if (not self.confirm_exit) \ or self.ask_yes_no('Do you really want to exit ([y]/n)?','y','n'): self.ask_exit() else: if code: self.run_cell(code, store_history=True) def mainloop(self, display_banner=DISPLAY_BANNER_DEPRECATED): # An extra layer of protection in case someone mashing Ctrl-C breaks # out of our internal code. if display_banner is not DISPLAY_BANNER_DEPRECATED: warn( 'mainloop `display_banner` argument is deprecated since IPython 5.0. Call `show_banner()` if needed.', DeprecationWarning, stacklevel=2) while True: try: self.interact() break except KeyboardInterrupt: print("\nKeyboardInterrupt escaped interact()\n") _inputhook = None def inputhook(self, context): if self._inputhook is not None: self._inputhook(context) def enable_gui(self, gui=None): if gui: self._inputhook = get_inputhook_func(gui) else: self._inputhook = None # Run !system commands directly, not through pipes, so terminal programs # work correctly. system = InteractiveShell.system_raw def auto_rewrite_input(self, cmd): """Overridden from the parent class to use fancy rewriting prompt""" if not self.show_rewritten_input: return tokens = self.prompts.rewrite_prompt_tokens() if self.pt_cli: self.pt_cli.print_tokens(tokens) print(cmd) else: prompt = ''.join(s for t, s in tokens) print(prompt, cmd, sep='') _prompts_before = None def switch_doctest_mode(self, mode): """Switch prompts to classic for %doctest_mode""" if mode: self._prompts_before = self.prompts self.prompts = ClassicPrompts(self) elif self._prompts_before: self.prompts = self._prompts_before self._prompts_before = None self._update_layout()
class NotebookClient(LoggingConfigurable): """ Encompasses a Client for executing cells in a notebook """ timeout: int = Integer( None, allow_none=True, help=dedent( """ The time to wait (in seconds) for output from executions. If a cell execution takes longer, a TimeoutError is raised. ``None`` or ``-1`` will disable the timeout. If ``timeout_func`` is set, it overrides ``timeout``. """ ), ).tag(config=True) timeout_func: t.Any = Any( default_value=None, allow_none=True, help=dedent( """ A callable which, when given the cell source as input, returns the time to wait (in seconds) for output from cell executions. If a cell execution takes longer, a TimeoutError is raised. Returning ``None`` or ``-1`` will disable the timeout for the cell. Not setting ``timeout_func`` will cause the client to default to using the ``timeout`` trait for all cells. The ``timeout_func`` trait overrides ``timeout`` if it is not ``None``. """ ), ).tag(config=True) interrupt_on_timeout: bool = Bool( False, help=dedent( """ If execution of a cell times out, interrupt the kernel and continue executing other cells rather than throwing an error and stopping. """ ), ).tag(config=True) startup_timeout: int = Integer( 60, help=dedent( """ The time to wait (in seconds) for the kernel to start. If kernel startup takes longer, a RuntimeError is raised. """ ), ).tag(config=True) allow_errors: bool = Bool( False, help=dedent( """ If ``False`` (default), when a cell raises an error the execution is stopped and a `CellExecutionError` is raised, except if the error name is in ``allow_error_names``. If ``True``, execution errors are ignored and the execution is continued until the end of the notebook. Output from exceptions is included in the cell output in both cases. """ ), ).tag(config=True) allow_error_names: t.List[str] = List( Unicode(), help=dedent( """ List of error names which won't stop the execution. Use this if the ``allow_errors`` option it too general and you want to allow only specific kinds of errors. """ ), ).tag(config=True) force_raise_errors: bool = Bool( False, help=dedent( """ If False (default), errors from executing the notebook can be allowed with a ``raises-exception`` tag on a single cell, or the ``allow_errors`` or ``allow_error_names`` configurable options for all cells. An allowed error will be recorded in notebook output, and execution will continue. If an error occurs when it is not explicitly allowed, a `CellExecutionError` will be raised. If True, `CellExecutionError` will be raised for any error that occurs while executing the notebook. This overrides the ``allow_errors`` and ``allow_error_names`` options and the ``raises-exception`` cell tag. """ ), ).tag(config=True) extra_arguments: t.List = List(Unicode()).tag(config=True) kernel_name: str = Unicode( '', help=dedent( """ Name of kernel to use to execute the cells. If not set, use the kernel_spec embedded in the notebook. """ ), ).tag(config=True) raise_on_iopub_timeout: bool = Bool( False, help=dedent( """ If ``False`` (default), then the kernel will continue waiting for iopub messages until it receives a kernel idle message, or until a timeout occurs, at which point the currently executing cell will be skipped. If ``True``, then an error will be raised after the first timeout. This option generally does not need to be used, but may be useful in contexts where there is the possibility of executing notebooks with memory-consuming infinite loops. """ ), ).tag(config=True) store_widget_state: bool = Bool( True, help=dedent( """ If ``True`` (default), then the state of the Jupyter widgets created at the kernel will be stored in the metadata of the notebook. """ ), ).tag(config=True) record_timing: bool = Bool( True, help=dedent( """ If ``True`` (default), then the execution timings of each cell will be stored in the metadata of the notebook. """ ), ).tag(config=True) iopub_timeout: int = Integer( 4, allow_none=False, help=dedent( """ The time to wait (in seconds) for IOPub output. This generally doesn't need to be set, but on some slow networks (such as CI systems) the default timeout might not be long enough to get all messages. """ ), ).tag(config=True) shell_timeout_interval: int = Integer( 5, allow_none=False, help=dedent( """ The time to wait (in seconds) for Shell output before retrying. This generally doesn't need to be set, but if one needs to check for dead kernels at a faster rate this can help. """ ), ).tag(config=True) shutdown_kernel = Enum( ['graceful', 'immediate'], default_value='graceful', help=dedent( """ If ``graceful`` (default), then the kernel is given time to clean up after executing all cells, e.g., to execute its ``atexit`` hooks. If ``immediate``, then the kernel is signaled to immediately terminate. """ ), ).tag(config=True) ipython_hist_file: str = Unicode( default_value=':memory:', help="""Path to file to use for SQLite history database for an IPython kernel. The specific value ``:memory:`` (including the colon at both end but not the back ticks), avoids creating a history file. Otherwise, IPython will create a history file for each kernel. When running kernels simultaneously (e.g. via multiprocessing) saving history a single SQLite file can result in database errors, so using ``:memory:`` is recommended in non-interactive contexts. """, ).tag(config=True) kernel_manager_class: KernelManager = Type(config=True, help='The kernel manager class to use.') @default('kernel_manager_class') def _kernel_manager_class_default(self) -> KernelManager: """Use a dynamic default to avoid importing jupyter_client at startup""" from jupyter_client import AsyncKernelManager return AsyncKernelManager _display_id_map: t.Dict[str, t.Dict] = Dict( help=dedent( """ mapping of locations of outputs with a given display_id tracks cell index and output index within cell.outputs for each appearance of the display_id { 'display_id': { cell_idx: [output_idx,] } } """ ) ) display_data_priority: t.List = List( [ 'text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/markdown', 'text/plain', ], help=""" An ordered list of preferred output type, the first encountered will usually be used when converting discarding the others. """, ).tag(config=True) resources: t.Dict = Dict( help=dedent( """ Additional resources used in the conversion process. For example, passing ``{'metadata': {'path': run_path}}`` sets the execution path to ``run_path``. """ ) ) def __init__( self, nb: NotebookNode, km: t.Optional[KernelManager] = None, **kw) -> None: """Initializes the execution manager. Parameters ---------- nb : NotebookNode Notebook being executed. km : KernelManager (optional) Optional kernel manager. If none is provided, a kernel manager will be created. """ super().__init__(**kw) self.nb: NotebookNode = nb self.km: t.Optional[KernelManager] = km self.owns_km: bool = km is None # whether the NotebookClient owns the kernel manager self.kc: t.Optional[KernelClient] = None self.reset_execution_trackers() self.widget_registry: t.Dict[str, t.Dict] = { '@jupyter-widgets/output': { 'OutputModel': OutputWidget } } # comm_open_handlers should return an object with a .handle_msg(msg) method or None self.comm_open_handlers: t.Dict[str, t.Any] = { 'jupyter.widget': self.on_comm_open_jupyter_widget } def reset_execution_trackers(self) -> None: """Resets any per-execution trackers. """ self.task_poll_for_reply: t.Optional[asyncio.Future] = None self.code_cells_executed = 0 self._display_id_map = {} self.widget_state: t.Dict[str, t.Dict] = {} self.widget_buffers: t.Dict[str, t.List[t.Dict[str, str]]] = {} # maps to list of hooks, where the last is used, this is used # to support nested use of output widgets. self.output_hook_stack: t.Any = collections.defaultdict(list) # our front-end mimicing Output widgets self.comm_objects: t.Dict[str, t.Any] = {} def create_kernel_manager(self) -> KernelManager: """Creates a new kernel manager. Returns ------- km : KernelManager Kernel manager whose client class is asynchronous. """ if not self.kernel_name: kn = self.nb.metadata.get('kernelspec', {}).get('name') if kn is not None: self.kernel_name = kn if not self.kernel_name: self.km = self.kernel_manager_class(config=self.config) else: self.km = self.kernel_manager_class(kernel_name=self.kernel_name, config=self.config) # If the current kernel manager is still using the default (synchronous) KernelClient class, # switch to the async version since that's what NBClient prefers. if self.km.client_class == 'jupyter_client.client.KernelClient': self.km.client_class = 'jupyter_client.asynchronous.AsyncKernelClient' return self.km async def _async_cleanup_kernel(self) -> None: assert self.km is not None now = self.shutdown_kernel == "immediate" try: # Queue the manager to kill the process, and recover gracefully if it's already dead. if await ensure_async(self.km.is_alive()): await ensure_async(self.km.shutdown_kernel(now=now)) except RuntimeError as e: # The error isn't specialized, so we have to check the message if 'No kernel is running!' not in str(e): raise finally: # Remove any state left over even if we failed to stop the kernel await ensure_async(self.km.cleanup_resources()) if getattr(self, "kc") and self.kc is not None: await ensure_async(self.kc.stop_channels()) self.kc = None self.km = None _cleanup_kernel = run_sync(_async_cleanup_kernel) async def async_start_new_kernel(self, **kwargs) -> None: """Creates a new kernel. Parameters ---------- kwargs : Any options for ``self.kernel_manager_class.start_kernel()``. Because that defaults to AsyncKernelManager, this will likely include options accepted by ``AsyncKernelManager.start_kernel()``, which includes ``cwd``. """ assert self.km is not None resource_path = self.resources.get('metadata', {}).get('path') or None if resource_path and 'cwd' not in kwargs: kwargs["cwd"] = resource_path has_history_manager_arg = any( arg.startswith('--HistoryManager.hist_file') for arg in self.extra_arguments) if (hasattr(self.km, 'ipykernel') and self.km.ipykernel and self.ipython_hist_file and not has_history_manager_arg): self.extra_arguments += ['--HistoryManager.hist_file={}'.format(self.ipython_hist_file)] await ensure_async(self.km.start_kernel(extra_arguments=self.extra_arguments, **kwargs)) start_new_kernel = run_sync(async_start_new_kernel) async def async_start_new_kernel_client(self) -> KernelClient: """Creates a new kernel client. Returns ------- kc : KernelClient Kernel client as created by the kernel manager ``km``. """ assert self.km is not None self.kc = self.km.client() await ensure_async(self.kc.start_channels()) try: await ensure_async(self.kc.wait_for_ready(timeout=self.startup_timeout)) except RuntimeError: await self._async_cleanup_kernel() raise self.kc.allow_stdin = False return self.kc start_new_kernel_client = run_sync(async_start_new_kernel_client) @contextmanager def setup_kernel(self, **kwargs) -> t.Generator: """ Context manager for setting up the kernel to execute a notebook. The assigns the Kernel Manager (``self.km``) if missing and Kernel Client(``self.kc``). When control returns from the yield it stops the client's zmq channels, and shuts down the kernel. """ # by default, cleanup the kernel client if we own the kernel manager # and keep it alive if we don't cleanup_kc = kwargs.pop('cleanup_kc', self.owns_km) # Can't use run_until_complete on an asynccontextmanager function :( if self.km is None: self.km = self.create_kernel_manager() if not self.km.has_kernel: self.start_new_kernel(**kwargs) self.start_new_kernel_client() try: yield finally: if cleanup_kc: self._cleanup_kernel() @asynccontextmanager async def async_setup_kernel(self, **kwargs) -> t.AsyncGenerator: """ Context manager for setting up the kernel to execute a notebook. This assigns the Kernel Manager (``self.km``) if missing and Kernel Client(``self.kc``). When control returns from the yield it stops the client's zmq channels, and shuts down the kernel. Handlers for SIGINT and SIGTERM are also added to cleanup in case of unexpected shutdown. """ # by default, cleanup the kernel client if we own the kernel manager # and keep it alive if we don't cleanup_kc = kwargs.pop('cleanup_kc', self.owns_km) if self.km is None: self.km = self.create_kernel_manager() # self._cleanup_kernel uses run_async, which ensures the ioloop is running again. # This is necessary as the ioloop has stopped once atexit fires. atexit.register(self._cleanup_kernel) def on_signal(): asyncio.ensure_future(self._async_cleanup_kernel()) atexit.unregister(self._cleanup_kernel) loop = asyncio.get_event_loop() try: loop.add_signal_handler(signal.SIGINT, on_signal) loop.add_signal_handler(signal.SIGTERM, on_signal) except (NotImplementedError, RuntimeError): # NotImplementedError: Windows does not support signals. # RuntimeError: Raised when add_signal_handler is called outside the main thread pass if not self.km.has_kernel: await self.async_start_new_kernel(**kwargs) await self.async_start_new_kernel_client() try: yield finally: if cleanup_kc: await self._async_cleanup_kernel() atexit.unregister(self._cleanup_kernel) try: loop.remove_signal_handler(signal.SIGINT) loop.remove_signal_handler(signal.SIGTERM) except (NotImplementedError, RuntimeError): pass async def async_execute( self, reset_kc: bool = False, **kwargs) -> NotebookNode: """ Executes each code cell. Parameters ---------- kwargs : Any option for ``self.kernel_manager_class.start_kernel()``. Because that defaults to AsyncKernelManager, this will likely include options accepted by ``jupyter_client.AsyncKernelManager.start_kernel()``, which includes ``cwd``. ``reset_kc`` if True, the kernel client will be reset and a new one will be created (default: False). Returns ------- nb : NotebookNode The executed notebook. """ if reset_kc and self.owns_km: await self._async_cleanup_kernel() self.reset_execution_trackers() async with self.async_setup_kernel(**kwargs): assert self.kc is not None self.log.info("Executing notebook with kernel: %s" % self.kernel_name) msg_id = await ensure_async(self.kc.kernel_info()) info_msg = await self.async_wait_for_reply(msg_id) if info_msg is not None: if 'language_info' in info_msg['content']: self.nb.metadata['language_info'] = info_msg['content']['language_info'] else: raise RuntimeError( 'Kernel info received message content has no "language_info" key. ' 'Content is:\n' + str(info_msg['content']) ) for index, cell in enumerate(self.nb.cells): # Ignore `'execution_count' in content` as it's always 1 # when store_history is False await self.async_execute_cell( cell, index, execution_count=self.code_cells_executed + 1 ) self.set_widgets_metadata() return self.nb execute = run_sync(async_execute) def set_widgets_metadata(self) -> None: if self.widget_state: self.nb.metadata.widgets = { 'application/vnd.jupyter.widget-state+json': { 'state': { model_id: self._serialize_widget_state(state) for model_id, state in self.widget_state.items() if '_model_name' in state }, 'version_major': 2, 'version_minor': 0, } } for key, widget in self.nb.metadata.widgets[ 'application/vnd.jupyter.widget-state+json' ]['state'].items(): buffers = self.widget_buffers.get(key) if buffers: widget['buffers'] = buffers def _update_display_id( self, display_id: str, msg: t.Dict) -> None: """Update outputs with a given display_id""" if display_id not in self._display_id_map: self.log.debug("display id %r not in %s", display_id, self._display_id_map) return if msg['header']['msg_type'] == 'update_display_data': msg['header']['msg_type'] = 'display_data' try: out = output_from_msg(msg) except ValueError: self.log.error("unhandled iopub msg: " + msg['msg_type']) return for cell_idx, output_indices in self._display_id_map[display_id].items(): cell = self.nb['cells'][cell_idx] outputs = cell['outputs'] for output_idx in output_indices: outputs[output_idx]['data'] = out['data'] outputs[output_idx]['metadata'] = out['metadata'] async def _async_poll_for_reply( self, msg_id: str, cell: NotebookNode, timeout: t.Optional[int], task_poll_output_msg: asyncio.Future, task_poll_kernel_alive: asyncio.Future) -> t.Dict: assert self.kc is not None new_timeout: t.Optional[float] = None if timeout is not None: deadline = monotonic() + timeout new_timeout = float(timeout) while True: try: msg = await ensure_async(self.kc.shell_channel.get_msg(timeout=new_timeout)) if msg['parent_header'].get('msg_id') == msg_id: if self.record_timing: cell['metadata']['execution']['shell.execute_reply'] = timestamp() try: await asyncio.wait_for(task_poll_output_msg, self.iopub_timeout) except (asyncio.TimeoutError, Empty): if self.raise_on_iopub_timeout: task_poll_kernel_alive.cancel() raise CellTimeoutError.error_from_timeout_and_cell( "Timeout waiting for IOPub output", self.iopub_timeout, cell ) else: self.log.warning("Timeout waiting for IOPub output") task_poll_kernel_alive.cancel() return msg else: if new_timeout is not None: new_timeout = max(0, deadline - monotonic()) except Empty: # received no message, check if kernel is still alive assert timeout is not None task_poll_kernel_alive.cancel() await self._async_check_alive() await self._async_handle_timeout(timeout, cell) async def _async_poll_output_msg( self, parent_msg_id: str, cell: NotebookNode, cell_index: int) -> None: assert self.kc is not None while True: msg = await ensure_async(self.kc.iopub_channel.get_msg(timeout=None)) if msg['parent_header'].get('msg_id') == parent_msg_id: try: # Will raise CellExecutionComplete when completed self.process_message(msg, cell, cell_index) except CellExecutionComplete: return async def _async_poll_kernel_alive(self) -> None: while True: await asyncio.sleep(1) try: await self._async_check_alive() except DeadKernelError: assert self.task_poll_for_reply is not None self.task_poll_for_reply.cancel() return def _get_timeout(self, cell: t.Optional[NotebookNode]) -> int: if self.timeout_func is not None and cell is not None: timeout = self.timeout_func(cell) else: timeout = self.timeout if not timeout or timeout < 0: timeout = None return timeout async def _async_handle_timeout( self, timeout: int, cell: t.Optional[NotebookNode] = None) -> None: self.log.error("Timeout waiting for execute reply (%is)." % timeout) if self.interrupt_on_timeout: self.log.error("Interrupting kernel") assert self.km is not None await ensure_async(self.km.interrupt_kernel()) else: raise CellTimeoutError.error_from_timeout_and_cell( "Cell execution timed out", timeout, cell ) async def _async_check_alive(self) -> None: assert self.kc is not None if not await ensure_async(self.kc.is_alive()): self.log.error("Kernel died while waiting for execute reply.") raise DeadKernelError("Kernel died") async def async_wait_for_reply( self, msg_id: str, cell: t.Optional[NotebookNode] = None) -> t.Optional[t.Dict]: assert self.kc is not None # wait for finish, with timeout timeout = self._get_timeout(cell) cummulative_time = 0 while True: try: msg = await ensure_async( self.kc.shell_channel.get_msg( timeout=self.shell_timeout_interval ) ) except Empty: await self._async_check_alive() cummulative_time += self.shell_timeout_interval if timeout and cummulative_time > timeout: await self._async_async_handle_timeout(timeout, cell) break else: if msg['parent_header'].get('msg_id') == msg_id: return msg return None wait_for_reply = run_sync(async_wait_for_reply) # Backwards compatability naming for papermill _wait_for_reply = wait_for_reply def _passed_deadline(self, deadline: int) -> bool: if deadline is not None and deadline - monotonic() <= 0: return True return False def _check_raise_for_error( self, cell: NotebookNode, exec_reply: t.Optional[t.Dict]) -> None: if exec_reply is None: return None exec_reply_content = exec_reply['content'] if exec_reply_content['status'] != 'error': return None cell_allows_errors = (not self.force_raise_errors) and ( self.allow_errors or exec_reply_content.get('ename') in self.allow_error_names or "raises-exception" in cell.metadata.get("tags", [])) if not cell_allows_errors: raise CellExecutionError.from_cell_and_msg(cell, exec_reply_content) async def async_execute_cell( self, cell: NotebookNode, cell_index: int, execution_count: t.Optional[int] = None, store_history: bool = True) -> NotebookNode: """ Executes a single code cell. To execute all cells see :meth:`execute`. Parameters ---------- cell : nbformat.NotebookNode The cell which is currently being processed. cell_index : int The position of the cell within the notebook object. execution_count : int The execution count to be assigned to the cell (default: Use kernel response) store_history : bool Determines if history should be stored in the kernel (default: False). Specific to ipython kernels, which can store command histories. Returns ------- output : dict The execution output payload (or None for no output). Raises ------ CellExecutionError If execution failed and should raise an exception, this will be raised with defaults about the failure. Returns ------- cell : NotebookNode The cell which was just processed. """ assert self.kc is not None if cell.cell_type != 'code' or not cell.source.strip(): self.log.debug("Skipping non-executing cell %s", cell_index) return cell if self.record_timing and 'execution' not in cell['metadata']: cell['metadata']['execution'] = {} self.log.debug("Executing cell:\n%s", cell.source) parent_msg_id = await ensure_async( self.kc.execute( cell.source, store_history=store_history, stop_on_error=not self.allow_errors ) ) # We launched a code cell to execute self.code_cells_executed += 1 exec_timeout = self._get_timeout(cell) cell.outputs = [] self.clear_before_next_output = False task_poll_kernel_alive = asyncio.ensure_future( self._async_poll_kernel_alive() ) task_poll_output_msg = asyncio.ensure_future( self._async_poll_output_msg(parent_msg_id, cell, cell_index) ) self.task_poll_for_reply = asyncio.ensure_future( self._async_poll_for_reply( parent_msg_id, cell, exec_timeout, task_poll_output_msg, task_poll_kernel_alive ) ) try: exec_reply = await self.task_poll_for_reply except asyncio.CancelledError: # can only be cancelled by task_poll_kernel_alive when the kernel is dead task_poll_output_msg.cancel() raise DeadKernelError("Kernel died") except Exception as e: # Best effort to cancel request if it hasn't been resolved try: # Check if the task_poll_output is doing the raising for us if not isinstance(e, CellControlSignal): task_poll_output_msg.cancel() finally: raise if execution_count: cell['execution_count'] = execution_count self._check_raise_for_error(cell, exec_reply) self.nb['cells'][cell_index] = cell return cell execute_cell = run_sync(async_execute_cell) def process_message( self, msg: t.Dict, cell: NotebookNode, cell_index: int) -> t.Optional[t.List]: """ Processes a kernel message, updates cell state, and returns the resulting output object that was appended to cell.outputs. The input argument *cell* is modified in-place. Parameters ---------- msg : dict The kernel message being processed. cell : nbformat.NotebookNode The cell which is currently being processed. cell_index : int The position of the cell within the notebook object. Returns ------- output : dict The execution output payload (or None for no output). Raises ------ CellExecutionComplete Once a message arrives which indicates computation completeness. """ msg_type = msg['msg_type'] self.log.debug("msg_type: %s", msg_type) content = msg['content'] self.log.debug("content: %s", content) display_id = content.get('transient', {}).get('display_id', None) if display_id and msg_type in {'execute_result', 'display_data', 'update_display_data'}: self._update_display_id(display_id, msg) # set the prompt number for the input and the output if 'execution_count' in content: cell['execution_count'] = content['execution_count'] if self.record_timing: if msg_type == 'status': if content['execution_state'] == 'idle': cell['metadata']['execution']['iopub.status.idle'] = timestamp() elif content['execution_state'] == 'busy': cell['metadata']['execution']['iopub.status.busy'] = timestamp() elif msg_type == 'execute_input': cell['metadata']['execution']['iopub.execute_input'] = timestamp() if msg_type == 'status': if content['execution_state'] == 'idle': raise CellExecutionComplete() elif msg_type == 'clear_output': self.clear_output(cell.outputs, msg, cell_index) elif msg_type.startswith('comm'): self.handle_comm_msg(cell.outputs, msg, cell_index) # Check for remaining messages we don't process elif msg_type not in ['execute_input', 'update_display_data']: # Assign output as our processed "result" return self.output(cell.outputs, msg, display_id, cell_index) return None def output( self, outs: t.List, msg: t.Dict, display_id: str, cell_index: int) -> t.Optional[t.List]: msg_type = msg['msg_type'] parent_msg_id = msg['parent_header'].get('msg_id') if self.output_hook_stack[parent_msg_id]: # if we have a hook registered, it will overrride our # default output behaviour (e.g. OutputWidget) hook = self.output_hook_stack[parent_msg_id][-1] hook.output(outs, msg, display_id, cell_index) return None try: out = output_from_msg(msg) except ValueError: self.log.error("unhandled iopub msg: " + msg_type) return None if self.clear_before_next_output: self.log.debug('Executing delayed clear_output') outs[:] = [] self.clear_display_id_mapping(cell_index) self.clear_before_next_output = False if display_id: # record output index in: # _display_id_map[display_id][cell_idx] cell_map = self._display_id_map.setdefault(display_id, {}) output_idx_list = cell_map.setdefault(cell_index, []) output_idx_list.append(len(outs)) outs.append(out) return out def clear_output( self, outs: t.List, msg: t.Dict, cell_index: int) -> None: content = msg['content'] parent_msg_id = msg['parent_header'].get('msg_id') if self.output_hook_stack[parent_msg_id]: # if we have a hook registered, it will overrride our # default clear_output behaviour (e.g. OutputWidget) hook = self.output_hook_stack[parent_msg_id][-1] hook.clear_output(outs, msg, cell_index) return if content.get('wait'): self.log.debug('Wait to clear output') self.clear_before_next_output = True else: self.log.debug('Immediate clear output') outs[:] = [] self.clear_display_id_mapping(cell_index) def clear_display_id_mapping( self, cell_index: int) -> None: for display_id, cell_map in self._display_id_map.items(): if cell_index in cell_map: cell_map[cell_index] = [] def handle_comm_msg( self, outs: t.List, msg: t.Dict, cell_index: int) -> None: content = msg['content'] data = content['data'] if self.store_widget_state and 'state' in data: # ignore custom msg'es self.widget_state.setdefault(content['comm_id'], {}).update(data['state']) if 'buffer_paths' in data and data['buffer_paths']: self.widget_buffers[content['comm_id']] = self._get_buffer_data(msg) # There are cases where we need to mimic a frontend, to get similar behaviour as # when using the Output widget from Jupyter lab/notebook if msg['msg_type'] == 'comm_open': target = msg['content'].get('target_name') handler = self.comm_open_handlers.get(target) if handler: comm_id = msg['content']['comm_id'] comm_object = handler(msg) if comm_object: self.comm_objects[comm_id] = comm_object else: self.log.warning(f'No handler found for comm target {target!r}') elif msg['msg_type'] == 'comm_msg': content = msg['content'] comm_id = msg['content']['comm_id'] if comm_id in self.comm_objects: self.comm_objects[comm_id].handle_msg(msg) def _serialize_widget_state(self, state: t.Dict) -> t.Dict[str, t.Any]: """Serialize a widget state, following format in @jupyter-widgets/schema.""" return { 'model_name': state.get('_model_name'), 'model_module': state.get('_model_module'), 'model_module_version': state.get('_model_module_version'), 'state': state, } def _get_buffer_data(self, msg: t.Dict) -> t.List[t.Dict[str, str]]: encoded_buffers = [] paths = msg['content']['data']['buffer_paths'] buffers = msg['buffers'] for path, buffer in zip(paths, buffers): encoded_buffers.append( { 'data': base64.b64encode(buffer).decode('utf-8'), 'encoding': 'base64', 'path': path, } ) return encoded_buffers def register_output_hook( self, msg_id: str, hook: OutputWidget) -> None: """Registers an override object that handles output/clear_output instead. Multiple hooks can be registered, where the last one will be used (stack based) """ # mimics # https://jupyterlab.github.io/jupyterlab/services/interfaces/kernel.ikernelconnection.html#registermessagehook self.output_hook_stack[msg_id].append(hook) def remove_output_hook( self, msg_id: str, hook: OutputWidget) -> None: """Unregisters an override object that handles output/clear_output instead""" # mimics # https://jupyterlab.github.io/jupyterlab/services/interfaces/kernel.ikernelconnection.html#removemessagehook removed_hook = self.output_hook_stack[msg_id].pop() assert removed_hook == hook def on_comm_open_jupyter_widget(self, msg: t.Dict): content = msg['content'] data = content['data'] state = data['state'] comm_id = msg['content']['comm_id'] module = self.widget_registry.get(state['_model_module']) if module: widget_class = module.get(state['_model_name']) if widget_class: return widget_class(comm_id, state, self.kc, self)
class IPythonKernel(KernelBase): shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) shell_class = Type(ZMQInteractiveShell) use_experimental_completions = Bool( True, help= "Set this flag to False to deactivate the use of experimental IPython completion APIs.", ).tag(config=True) debugpy_stream = Instance( ZMQStream, allow_none=True) if _is_debugpy_available else None user_module = Any() @observe('user_module') @observe_compat def _user_module_changed(self, change): if self.shell is not None: self.shell.user_module = change['new'] user_ns = Instance(dict, args=None, allow_none=True) @observe('user_ns') @observe_compat def _user_ns_changed(self, change): if self.shell is not None: self.shell.user_ns = change['new'] self.shell.init_user_ns() # A reference to the Python builtin 'raw_input' function. # (i.e., __builtin__.raw_input for Python 2.7, builtins.input for Python 3) _sys_raw_input = Any() _sys_eval_input = Any() def __init__(self, **kwargs): super().__init__(**kwargs) # Initialize the Debugger if _is_debugpy_available: self.debugger = Debugger(self.log, self.debugpy_stream, self._publish_debug_event, self.debug_shell_socket, self.session) # Initialize the InteractiveShell subclass self.shell = self.shell_class.instance( parent=self, profile_dir=self.profile_dir, user_module=self.user_module, user_ns=self.user_ns, kernel=self, compiler_class=XCachingCompiler, ) self.shell.displayhook.session = self.session self.shell.displayhook.pub_socket = self.iopub_socket self.shell.displayhook.topic = self._topic('execute_result') self.shell.display_pub.session = self.session self.shell.display_pub.pub_socket = self.iopub_socket self.comm_manager = CommManager(parent=self, kernel=self) self.shell.configurables.append(self.comm_manager) comm_msg_types = ['comm_open', 'comm_msg', 'comm_close'] for msg_type in comm_msg_types: self.shell_handlers[msg_type] = getattr(self.comm_manager, msg_type) if _use_appnope() and self._darwin_app_nap: # Disable app-nap as the kernel is not a gui but can have guis import appnope appnope.nope() help_links = List([ { 'text': "Python Reference", 'url': "https://docs.python.org/%i.%i" % sys.version_info[:2], }, { 'text': "IPython Reference", 'url': "https://ipython.org/documentation.html", }, { 'text': "NumPy Reference", 'url': "https://docs.scipy.org/doc/numpy/reference/", }, { 'text': "SciPy Reference", 'url': "https://docs.scipy.org/doc/scipy/reference/", }, { 'text': "Matplotlib Reference", 'url': "https://matplotlib.org/contents.html", }, { 'text': "SymPy Reference", 'url': "http://docs.sympy.org/latest/index.html", }, { 'text': "pandas Reference", 'url': "https://pandas.pydata.org/pandas-docs/stable/", }, ]).tag(config=True) # Kernel info fields implementation = 'ipython' implementation_version = release.version language_info = { 'name': 'python', 'version': sys.version.split()[0], 'mimetype': 'text/x-python', 'codemirror_mode': { 'name': 'ipython', 'version': sys.version_info[0] }, 'pygments_lexer': 'ipython%d' % 3, 'nbconvert_exporter': 'python', 'file_extension': '.py' } def dispatch_debugpy(self, msg): if _is_debugpy_available: # The first frame is the socket id, we can drop it frame = msg[1].bytes.decode('utf-8') self.log.debug("Debugpy received: %s", frame) self.debugger.tcp_client.receive_dap_frame(frame) @property def banner(self): return self.shell.banner async def poll_stopped_queue(self): while True: await self.debugger.handle_stopped_event() def start(self): self.shell.exit_now = False if self.debugpy_stream is None: self.log.warning( "debugpy_stream undefined, debugging will not be enabled") else: self.debugpy_stream.on_recv(self.dispatch_debugpy, copy=False) super().start() if self.debugpy_stream: asyncio.run_coroutine_threadsafe( self.poll_stopped_queue(), self.control_thread.io_loop.asyncio_loop) def set_parent(self, ident, parent, channel='shell'): """Overridden from parent to tell the display hook and output streams about the parent message. """ super().set_parent(ident, parent, channel) if channel == 'shell': self.shell.set_parent(parent) def init_metadata(self, parent): """Initialize metadata. Run at the beginning of each execution request. """ md = super().init_metadata(parent) # FIXME: remove deprecated ipyparallel-specific code # This is required for ipyparallel < 5.0 md.update({ 'dependencies_met': True, 'engine': self.ident, }) return md def finish_metadata(self, parent, metadata, reply_content): """Finish populating metadata. Run after completing an execution request. """ # FIXME: remove deprecated ipyparallel-specific code # This is required by ipyparallel < 5.0 metadata["status"] = reply_content["status"] if (reply_content["status"] == "error" and reply_content["ename"] == "UnmetDependency"): metadata["dependencies_met"] = False return metadata def _forward_input(self, allow_stdin=False): """Forward raw_input and getpass to the current frontend. via input_request """ self._allow_stdin = allow_stdin self._sys_raw_input = builtins.input builtins.input = self.raw_input self._save_getpass = getpass.getpass getpass.getpass = self.getpass def _restore_input(self): """Restore raw_input, getpass""" builtins.input = self._sys_raw_input getpass.getpass = self._save_getpass @property def execution_count(self): return self.shell.execution_count @execution_count.setter def execution_count(self, value): # Ignore the incrementing done by KernelBase, in favour of our shell's # execution counter. pass @contextmanager def _cancel_on_sigint(self, future): """ContextManager for capturing SIGINT and cancelling a future SIGINT raises in the event loop when running async code, but we want it to halt a coroutine. Ideally, it would raise KeyboardInterrupt, but this turns it into a CancelledError. At least it gets a decent traceback to the user. """ sigint_future = asyncio.Future() # whichever future finishes first, # cancel the other one def cancel_unless_done(f, _ignored): if f.cancelled() or f.done(): return f.cancel() # when sigint finishes, # abort the coroutine with CancelledError sigint_future.add_done_callback(partial(cancel_unless_done, future)) # when the main future finishes, # stop watching for SIGINT events future.add_done_callback(partial(cancel_unless_done, sigint_future)) def handle_sigint(*args): def set_sigint_result(): if sigint_future.cancelled() or sigint_future.done(): return sigint_future.set_result(1) # use add_callback for thread safety self.io_loop.add_callback(set_sigint_result) # set the custom sigint hander during this context save_sigint = signal.signal(signal.SIGINT, handle_sigint) try: yield finally: # restore the previous sigint handler signal.signal(signal.SIGINT, save_sigint) async def do_execute(self, code, silent, store_history=True, user_expressions=None, allow_stdin=False): shell = self.shell # we'll need this a lot here self._forward_input(allow_stdin) reply_content = {} if hasattr(shell, 'run_cell_async') and hasattr( shell, 'should_run_async'): run_cell = shell.run_cell_async should_run_async = shell.should_run_async else: should_run_async = lambda cell: False # older IPython, # use blocking run_cell and wrap it in coroutine async def run_cell(*args, **kwargs): return shell.run_cell(*args, **kwargs) try: # default case: runner is asyncio and asyncio is already running # TODO: this should check every case for "are we inside the runner", # not just asyncio preprocessing_exc_tuple = None try: transformed_cell = self.shell.transform_cell(code) except Exception: transformed_cell = code preprocessing_exc_tuple = sys.exc_info() if (_asyncio_runner and shell.loop_runner is _asyncio_runner and asyncio.get_event_loop().is_running() and should_run_async( code, transformed_cell=transformed_cell, preprocessing_exc_tuple=preprocessing_exc_tuple, )): coro = run_cell( code, store_history=store_history, silent=silent, transformed_cell=transformed_cell, preprocessing_exc_tuple=preprocessing_exc_tuple) coro_future = asyncio.ensure_future(coro) with self._cancel_on_sigint(coro_future): res = None try: res = await coro_future finally: shell.events.trigger('post_execute') if not silent: shell.events.trigger('post_run_cell', res) else: # runner isn't already running, # make synchronous call, # letting shell dispatch to loop runners res = shell.run_cell(code, store_history=store_history, silent=silent) finally: self._restore_input() if res.error_before_exec is not None: err = res.error_before_exec else: err = res.error_in_exec if res.success: reply_content['status'] = 'ok' else: reply_content['status'] = 'error' reply_content.update({ 'traceback': shell._last_traceback or [], 'ename': str(type(err).__name__), 'evalue': str(err), }) # FIXME: deprecated piece for ipyparallel (remove in 5.0): e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='execute') reply_content['engine_info'] = e_info # Return the execution counter so clients can display prompts reply_content['execution_count'] = shell.execution_count - 1 if 'traceback' in reply_content: self.log.info("Exception in execute request:\n%s", '\n'.join(reply_content['traceback'])) # At this point, we can tell whether the main code execution succeeded # or not. If it did, we proceed to evaluate user_expressions if reply_content['status'] == 'ok': reply_content['user_expressions'] = \ shell.user_expressions(user_expressions or {}) else: # If there was an error, don't even try to compute expressions reply_content['user_expressions'] = {} # Payloads should be retrieved regardless of outcome, so we can both # recover partial output (that could have been generated early in a # block, before an error) and always clear the payload system. reply_content['payload'] = shell.payload_manager.read_payload() # Be aggressive about clearing the payload because we don't want # it to sit in memory until the next execute_request comes in. shell.payload_manager.clear_payload() return reply_content def do_complete(self, code, cursor_pos): if _use_experimental_60_completion and self.use_experimental_completions: return self._experimental_do_complete(code, cursor_pos) # FIXME: IPython completers currently assume single line, # but completion messages give multi-line context # For now, extract line from cell, based on cursor_pos: if cursor_pos is None: cursor_pos = len(code) line, offset = line_at_cursor(code, cursor_pos) line_cursor = cursor_pos - offset txt, matches = self.shell.complete('', line, line_cursor) return { 'matches': matches, 'cursor_end': cursor_pos, 'cursor_start': cursor_pos - len(txt), 'metadata': {}, 'status': 'ok' } async def do_debug_request(self, msg): if _is_debugpy_available: return await self.debugger.process_request(msg) def _experimental_do_complete(self, code, cursor_pos): """ Experimental completions from IPython, using Jedi. """ if cursor_pos is None: cursor_pos = len(code) with _provisionalcompleter(): raw_completions = self.shell.Completer.completions( code, cursor_pos) completions = list(_rectify_completions(code, raw_completions)) comps = [] for comp in completions: comps.append( dict( start=comp.start, end=comp.end, text=comp.text, type=comp.type, )) if completions: s = completions[0].start e = completions[0].end matches = [c.text for c in completions] else: s = cursor_pos e = cursor_pos matches = [] return { 'matches': matches, 'cursor_end': e, 'cursor_start': s, 'metadata': { _EXPERIMENTAL_KEY_NAME: comps }, 'status': 'ok' } def do_inspect(self, code, cursor_pos, detail_level=0, omit_sections=()): name = token_at_cursor(code, cursor_pos) reply_content = {'status': 'ok'} reply_content['data'] = {} reply_content['metadata'] = {} try: if release.version_info >= (8, ): # `omit_sections` keyword will be available in IPython 8, see # https://github.com/ipython/ipython/pull/13343 bundle = self.shell.object_inspect_mime( name, detail_level=detail_level, omit_sections=omit_sections, ) else: bundle = self.shell.object_inspect_mime( name, detail_level=detail_level) reply_content['data'].update(bundle) if not self.shell.enable_html_pager: reply_content['data'].pop('text/html') reply_content['found'] = True except KeyError: reply_content['found'] = False return reply_content def do_history(self, hist_access_type, output, raw, session=0, start=0, stop=None, n=None, pattern=None, unique=False): if hist_access_type == 'tail': hist = self.shell.history_manager.get_tail(n, raw=raw, output=output, include_latest=True) elif hist_access_type == 'range': hist = self.shell.history_manager.get_range(session, start, stop, raw=raw, output=output) elif hist_access_type == 'search': hist = self.shell.history_manager.search(pattern, raw=raw, output=output, n=n, unique=unique) else: hist = [] return { 'status': 'ok', 'history': list(hist), } def do_shutdown(self, restart): self.shell.exit_now = True return dict(status='ok', restart=restart) def do_is_complete(self, code): transformer_manager = getattr(self.shell, 'input_transformer_manager', None) if transformer_manager is None: # input_splitter attribute is deprecated transformer_manager = self.shell.input_splitter status, indent_spaces = transformer_manager.check_complete(code) r = {'status': status} if status == 'incomplete': r['indent'] = ' ' * indent_spaces return r def do_apply(self, content, bufs, msg_id, reply_metadata): from .serialize import serialize_object, unpack_apply_message shell = self.shell try: working = shell.user_ns prefix = "_" + str(msg_id).replace("-", "") + "_" f, args, kwargs = unpack_apply_message(bufs, working, copy=False) fname = getattr(f, '__name__', 'f') fname = prefix + "f" argname = prefix + "args" kwargname = prefix + "kwargs" resultname = prefix + "result" ns = {fname: f, argname: args, kwargname: kwargs, resultname: None} # print ns working.update(ns) code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname) try: exec(code, shell.user_global_ns, shell.user_ns) result = working.get(resultname) finally: for key in ns: working.pop(key) result_buf = serialize_object( result, buffer_threshold=self.session.buffer_threshold, item_threshold=self.session.item_threshold, ) except BaseException as e: # invoke IPython traceback formatting shell.showtraceback() reply_content = { "traceback": shell._last_traceback or [], "ename": str(type(e).__name__), "evalue": str(e), } # FIXME: deprecated piece for ipyparallel (remove in 5.0): e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='apply') reply_content['engine_info'] = e_info self.send_response(self.iopub_socket, 'error', reply_content, ident=self._topic('error'), channel='shell') self.log.info("Exception in apply request:\n%s", '\n'.join(reply_content['traceback'])) result_buf = [] reply_content['status'] = 'error' else: reply_content = {'status': 'ok'} return reply_content, result_buf def do_clear(self): self.shell.reset(False) return dict(status='ok')
class SageKernel(IPythonKernel): implementation = 'sage' implementation_version = SAGE_VERSION shell_class = Type(SageZMQInteractiveShell) def __init__(self, **kwds): """ The Sage Jupyter Kernel INPUT: See the Jupyter documentation EXAMPLES:: sage: from sage.repl.ipython_kernel.kernel import SageKernel sage: SageKernel.__new__(SageKernel) <sage.repl.ipython_kernel.kernel.SageKernel object at 0x...> """ super(SageKernel, self).__init__(**kwds) SageCustomizations(self.shell) @property def banner(self): r""" The Sage Banner The value of this property is displayed in the Jupyter notebook. OUTPUT: String. EXAMPLES:: sage: from sage.repl.ipython_kernel.kernel import SageKernel sage: sk = SageKernel.__new__(SageKernel) sage: sk.banner '\xe2\x94\x8c\xe2...SageMath Version...' """ from sage.misc.banner import banner_text return banner_text() @property def help_links(self): r""" Help in the Jupyter Notebook OUTPUT: See the Jupyter documentation. EXAMPLES:: sage: from sage.repl.ipython_kernel.kernel import SageKernel sage: sk = SageKernel.__new__(SageKernel) sage: sk.help_links [{'text': 'Sage Documentation', 'url': '../kernelspecs/sagemath/doc/index.html'}, ...] """ from sage.repl.ipython_kernel.install import SageKernelSpec identifier = SageKernelSpec.identifier() kernel_url = lambda x: '../kernelspecs/{0}/{1}'.format(identifier, x) return [ { 'text': 'Sage Documentation', 'url': kernel_url('doc/index.html') }, { 'text': 'Sage Tutorial', 'url': kernel_url('doc/tutorial/index.html'), }, { 'text': 'Thematic Tutorials', 'url': kernel_url('doc/thematic_tutorials/index.html'), }, { 'text': 'FAQs', 'url': kernel_url('doc/faq/index.html'), }, { 'text': 'PREP Tutorials', 'url': kernel_url('doc/prep/index.html'), }, { 'text': 'Sage Reference', 'url': kernel_url('doc/reference/index.html'), }, { 'text': "Developer's Guide", 'url': kernel_url('doc/developer/index.html'), }, { 'text': "Python", 'url': "http://docs.python.org/%i.%i" % sys.version_info[:2], }, { 'text': "IPython", 'url': "http://ipython.org/documentation.html", }, { 'text': 'Singular', 'url': 'http://www.singular.uni-kl.de/Manual/latest/index.htm', }, { 'text': 'GAP', 'url': 'http://gap-system.org/Manuals/doc/ref/chap0.html', }, { 'text': "NumPy", 'url': "http://docs.scipy.org/doc/numpy/reference/", }, { 'text': "SciPy", 'url': "http://docs.scipy.org/doc/scipy/reference/", }, { 'text': "SymPy", 'url': 'http://docs.sympy.org/latest/index.html', }, { 'text': "Matplotlib", 'url': "http://matplotlib.org/contents.html", }, { 'text': "Markdown", 'url': "http://help.github.com/articles/github-flavored-markdown", }, ] def pre_handler_hook(self): from sage.ext.interrupt.interrupt import init_interrupts self.saved_sigint_handler = init_interrupts()
class AssignLatePenalties(NbGraderPreprocessor): """Preprocessor for assigning penalties for late submissions to the database""" plugin_class = Type( LateSubmissionPlugin, klass=BasePlugin, help="The plugin class for assigning the late penalty for each notebook." ).tag(config=True) plugin_inst = Instance(BasePlugin).tag(config=False) def init_plugin(self): self.plugin_inst = self.plugin_class(parent=self) def _check_late_penalty(self, notebook, penalty): msg = "(Penalty {}) Adjusting late submission penalty from {} to {}." if penalty < 0: self.log.warning(msg.format("< 0", penalty, 0)) return 0 if penalty > notebook.score: self.log.warning(msg.format("> score", penalty, notebook.score)) return notebook.score return penalty def preprocess(self, nb, resources): # pull information from the resources self.notebook_id = resources['nbgrader']['notebook'] self.assignment_id = resources['nbgrader']['assignment'] self.student_id = resources['nbgrader']['student'] self.db_url = resources['nbgrader']['db_url'] # init the plugin self.init_plugin() # connect to the database self.gradebook = Gradebook(self.db_url) with self.gradebook: # process the late submissions nb, resources = super(AssignLatePenalties, self).preprocess(nb, resources) assignment = self.gradebook.find_submission( self.assignment_id, self.student_id) notebook = self.gradebook.find_submission_notebook( self.notebook_id, self.assignment_id, self.student_id) # reset to None (zero) notebook.late_submission_penalty = None if assignment.total_seconds_late > 0: self.log.warning("{} is {} seconds late".format( assignment, assignment.total_seconds_late)) late_penalty = self.plugin_inst.late_submission_penalty( self.student_id, notebook.score, assignment.total_seconds_late) self.log.warning( "Late submission penalty: {}".format(late_penalty)) if late_penalty is not None: late_penalty = self._check_late_penalty( notebook, late_penalty) notebook.late_submission_penalty = late_penalty self.gradebook.db.commit() return nb, resources def preprocess_cell(self, cell, resources, cell_index): return cell, resources
class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp, ConnectionFileMixin): name = 'YAP-kernel' aliases = Dict(kernel_aliases) flags = Dict(kernel_flags) classes = [YAPKernel, ZMQInteractiveShell, ProfileDir, Session] # the kernel class, as an importstring kernel_class = Type('yap_kernel.yap_kernel.YAPKernel', klass='ipykernel.kernelbase.Kernel', help="""The Kernel subclass to be used. This should allow easy re-use of the YAPKernelApp entry point to configure and launch kernels other than IPython's own. """).tag(config=True) kernel = Any() poller = Any( ) # don't restrict this even though current pollers are all Threads heartbeat = Instance(Heartbeat, allow_none=True) ports = Dict() subcommands = { 'install': ('yap_kernel.kernelspec.InstallYAPKernelSpecApp', 'Install the YAP kernel'), } # connection info: connection_dir = Unicode() @default('connection_dir') def _default_connection_dir(self): return jupyter_runtime_dir() @property def abs_connection_file(self): if os.path.basename(self.connection_file) == self.connection_file: return os.path.join(self.connection_dir, self.connection_file) else: return self.connection_file # streams, etc. no_stdout = Bool( False, help="redirect stdout to the null device").tag(config=True) no_stderr = Bool( False, help="redirect stderr to the null device").tag(config=True) outstream_class = DottedObjectName( 'ipykernel.iostream.OutStream', help="The importstring for the OutStream factory").tag(config=True) displayhook_class = DottedObjectName( 'ipykernel.displayhook.ZMQDisplayHook', help="The importstring for the DisplayHook factory").tag(config=True) # polling parent_handle = Integer( int(os.environ.get('JPY_PARENT_PID') or 0), help="""kill this process if its parent dies. On Windows, the argument specifies the HANDLE of the parent process, otherwise it is simply boolean. """).tag(config=True) interrupt = Integer(int(os.environ.get('JPY_INTERRUPT_EVENT') or 0), help="""ONLY USED ON WINDOWS Interrupt this process when the parent is signaled. """).tag(config=True) def init_crash_handler(self): sys.excepthook = self.excepthook def excepthook(self, etype, evalue, tb): # write uncaught traceback to 'real' stderr, not zmq-forwarder traceback.print_exception(etype, evalue, tb, file=sys.__stderr__) def init_poller(self): if sys.platform == 'win32': if self.interrupt or self.parent_handle: self.poller = ParentPollerWindows(self.interrupt, self.parent_handle) elif self.parent_handle: self.poller = ParentPollerUnix() def _bind_socket(self, s, port): iface = '%s://%s' % (self.transport, self.ip) if self.transport == 'tcp': if port <= 0: port = s.bind_to_random_port(iface) else: s.bind("tcp://%s:%i" % (self.ip, port)) elif self.transport == 'ipc': if port <= 0: port = 1 path = "%s-%i" % (self.ip, port) while os.path.exists(path): port = port + 1 path = "%s-%i" % (self.ip, port) else: path = "%s-%i" % (self.ip, port) s.bind("ipc://%s" % path) return port def write_connection_file(self): """write connection info to JSON file""" cf = self.abs_connection_file self.log.debug("Writing connection file: %s", cf) write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport, shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port, iopub_port=self.iopub_port, control_port=self.control_port) def cleanup_connection_file(self): cf = self.abs_connection_file self.log.debug("Cleaning up connection file: %s", cf) try: os.remove(cf) except (IOError, OSError): pass self.cleanup_ipc_files() def init_connection_file(self): if not self.connection_file: self.connection_file = "kernel-%s.json" % os.getpid() try: self.connection_file = filefind(self.connection_file, ['.', self.connection_dir]) except IOError: self.log.debug("Connection file not found: %s", self.connection_file) # This means I own it, and I'll create it in this directory: ensure_dir_exists(os.path.dirname(self.abs_connection_file), 0o700) # Also, I will clean it up: atexit.register(self.cleanup_connection_file) return try: self.load_connection_file() except Exception: self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True) self.exit(1) def init_sockets(self): # Create a context, a session, and the kernel sockets. self.log.info("Starting the kernel at pid: %i", os.getpid()) context = zmq.Context.instance() # Uncomment this to try closing the context. # atexit.register(context.term) self.shell_socket = context.socket(zmq.ROUTER) self.shell_socket.linger = 1000 self.shell_port = self._bind_socket(self.shell_socket, self.shell_port) self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port) self.stdin_socket = context.socket(zmq.ROUTER) self.stdin_socket.linger = 1000 self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port) self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port) self.control_socket = context.socket(zmq.ROUTER) self.control_socket.linger = 1000 self.control_port = self._bind_socket(self.control_socket, self.control_port) self.log.debug("control ROUTER Channel on port: %i" % self.control_port) self.init_iopub(context) def init_iopub(self, context): self.iopub_socket = context.socket(zmq.PUB) self.iopub_socket.linger = 1000 self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port) self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port) self.configure_tornado_logger() self.iopub_thread = IOPubThread(self.iopub_socket, pipe=True) self.iopub_thread.start() # backward-compat: wrap iopub socket API in background thread self.iopub_socket = self.iopub_thread.background_socket def init_heartbeat(self): """start the heart beating""" # heartbeat doesn't share context, because it mustn't be blocked # by the GIL, which is accessed by libzmq when freeing zero-copy messages hb_ctx = zmq.Context() self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port)) self.hb_port = self.heartbeat.port self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port) self.heartbeat.start() def log_connection_info(self): """display connection info, and store ports""" basename = os.path.basename(self.connection_file) if basename == self.connection_file or \ os.path.dirname(self.connection_file) == self.connection_dir: # use shortname tail = basename else: tail = self.connection_file lines = [ "To connect another client to this kernel, use:", " --existing %s" % tail, ] # log connection info # info-level, so often not shown. # frontends should use the %connect_info magic # to see the connection info for line in lines: self.log.info(line) # also raw print to the terminal if no parent_handle (`ipython kernel`) # unless log-level is CRITICAL (--quiet) if not self.parent_handle and self.log_level < logging.CRITICAL: io.rprint(_ctrl_c_message) for line in lines: io.rprint(line) self.ports = dict(shell=self.shell_port, iopub=self.iopub_port, stdin=self.stdin_port, hb=self.hb_port, control=self.control_port) def init_blackhole(self): """redirects stdout/stderr to devnull if necessary""" if self.no_stdout or self.no_stderr: blackhole = open(os.devnull, 'w') if self.no_stdout: sys.stdout = sys.__stdout__ = blackhole if self.no_stderr: sys.stderr = sys.__stderr__ = blackhole def init_io(self): """Redirect input streams and set a display hook.""" if self.outstream_class: outstream_factory = import_item(str(self.outstream_class)) sys.stdout = outstream_factory(self.session, self.iopub_thread, u'stdout') sys.stderr = outstream_factory(self.session, self.iopub_thread, u'stderr') if self.displayhook_class: displayhook_factory = import_item(str(self.displayhook_class)) self.displayhook = displayhook_factory(self.session, self.iopub_socket) sys.displayhook = self.displayhook self.patch_io() def patch_io(self): """Patch important libraries that can't handle sys.stdout forwarding""" try: import faulthandler except ImportError: pass else: # Warning: this is a monkeypatch of `faulthandler.enable`, watch for possible # updates to the upstream API and update accordingly (up-to-date as of Python 3.5): # https://docs.python.org/3/library/faulthandler.html#faulthandler.enable # change default file to __stderr__ from forwarded stderr faulthandler_enable = faulthandler.enable def enable(file=sys.__stderr__, all_threads=True, **kwargs): return faulthandler_enable(file=file, all_threads=all_threads, **kwargs) faulthandler.enable = enable if hasattr(faulthandler, 'register'): faulthandler_register = faulthandler.register def register(signum, file=sys.__stderr__, all_threads=True, chain=False, **kwargs): return faulthandler_register(signum, file=file, all_threads=all_threads, chain=chain, **kwargs) faulthandler.register = register def init_signal(self): signal.signal(signal.SIGINT, signal.SIG_IGN) def init_kernel(self): """Create the Kernel object itself""" shell_stream = ZMQStream(self.shell_socket) control_stream = ZMQStream(self.control_socket) kernel_factory = self.kernel_class.instance kernel = kernel_factory( parent=self, session=self.session, shell_streams=[shell_stream, control_stream], iopub_thread=self.iopub_thread, iopub_socket=self.iopub_socket, stdin_socket=self.stdin_socket, log=self.log, profile_dir=self.profile_dir, user_ns=self.user_ns, ) kernel.record_ports( {name + '_port': port for name, port in self.ports.items()}) self.kernel = kernel # Allow the displayhook to get the execution count self.displayhook.get_execution_count = lambda: kernel.execution_count def init_gui_pylab(self): """Enable GUI event loop integration, taking pylab into account.""" # Register inline backend as default # this is higher priority than matplotlibrc, # but lower priority than anything else (mpl.use() for instance). # This only affects matplotlib >= 1.5 if not os.environ.get('MPLBACKEND'): os.environ[ 'MPLBACKEND'] = 'module://ipykernel.pylab.backend_inline' # Provide a wrapper for :meth:`YAPInteractiveShellApp.init_gui_pylab` # to ensure that any exception is printed straight to stderr. # Normally _showtraceback associates the reply with an execution, # which means frontends will never draw it, as this exception # is not associated with any execute request. shell = self.shell _showtraceback = shell._showtraceback try: # replace error-sending traceback with stderr def print_tb(etype, evalue, stb): print("GUI event loop or pylab initialization failed", file=sys.stderr) print(shell.InteractiveTB.stb2text(stb), file=sys.stderr) shell._showtraceback = print_tb InteractiveShellApp.init_gui_pylab(self) finally: shell._showtraceback = _showtraceback def init_shell(self): self.shell = getattr(self.kernel, 'shell', None) if self.shell: self.shell.configurables.append(self) def init_extensions(self): super(YAPKernelApp, self).init_extensions() # BEGIN HARDCODED WIDGETS HACK # Ensure ipywidgets extension is loaded if available extension_man = self.shell.extension_manager if 'ipywidgets' not in extension_man.loaded: try: extension_man.load_extension('ipywidgets') except ImportError as e: self.log.debug( 'ipywidgets package not installed. Widgets will not be available.' ) # END HARDCODED WIDGETS HACK def configure_tornado_logger(self): """ Configure the tornado logging.Logger. Must set up the tornado logger or else tornado will call basicConfig for the root logger which makes the root logger go to the real sys.stderr instead of the capture streams. This function mimics the setup of logging.basicConfig. """ logger = logging.getLogger('tornado') handler = logging.StreamHandler() formatter = logging.Formatter(logging.BASIC_FORMAT) handler.setFormatter(formatter) logger.addHandler(handler) @catch_config_error def initialize(self, argv=None): super(YAPKernelApp, self).initialize(argv) if self.subapp is not None: return # register zmq IOLoop with tornado zmq_ioloop.install() self.init_blackhole() self.init_connection_file() self.init_poller() self.init_sockets() self.init_heartbeat() # writing/displaying connection info must be *after* init_sockets/heartbeat self.write_connection_file() # Log connection info after writing connection file, so that the connection # file is definitely available at the time someone reads the log. self.log_connection_info() self.init_io() self.init_signal() self.init_kernel() # shell init steps self.init_path() self.init_shell() if self.shell: self.init_gui_pylab() self.init_extensions() self.init_code() # flush stdout/stderr, so that anything written to these streams during # initialization do not get associated with the first execution request sys.stdout.flush() sys.stderr.flush() def start(self): if self.subapp is not None: return self.subapp.start() if self.poller is not None: self.poller.start() self.kernel.start() try: ioloop.IOLoop.instance().start() except KeyboardInterrupt: pass
class TerminalInteractiveShell(InteractiveShell): colors_force = True space_for_menu = Integer( 6, help='Number of line at the bottom of the screen ' 'to reserve for the completion menu').tag(config=True) def _space_for_menu_changed(self, old, new): self._update_layout() pt_cli = None debugger_history = None simple_prompt = Bool( _use_simple_prompt, help= """Use `raw_input` for the REPL, without completion, multiline input, and prompt colors. Useful when controlling IPython as a subprocess, and piping STDIN/OUT/ERR. Known usage are: IPython own testing machinery, and emacs inferior-shell integration through elpy. This mode default to `True` if the `IPY_TEST_SIMPLE_PROMPT` environment variable is set, or the current terminal is not a tty. """).tag(config=True) @property def debugger_cls(self): return Pdb if self.simple_prompt else TerminalPdb autoedit_syntax = Bool( False, help="auto editing of files with syntax errors.", ).tag(config=True) confirm_exit = Bool( True, help=""" Set to confirm when you try to exit IPython with an EOF (Control-D in Unix, Control-Z/Enter in Windows). By typing 'exit' or 'quit', you can force a direct exit without any confirmation.""", ).tag(config=True) editing_mode = Unicode( 'emacs', help="Shortcut style to use at the prompt. 'vi' or 'emacs'.", ).tag(config=True) mouse_support = Bool( False, help="Enable mouse support in the prompt").tag(config=True) highlighting_style = Unicode( 'default', help= "The name of a Pygments style to use for syntax highlighting: \n %s" % ', '.join(get_all_styles())).tag(config=True) @observe('highlighting_style') def _highlighting_style_changed(self, change): self._style = self._make_style_from_name(self.highlighting_style) highlighting_style_overrides = Dict( help="Override highlighting format for specific tokens").tag( config=True) editor = Unicode( get_default_editor(), help="Set the editor used by IPython (default to $EDITOR/vi/notepad)." ).tag(config=True) prompts_class = Type( Prompts, help='Class used to generate Prompt token for prompt_toolkit').tag( config=True) prompts = Instance(Prompts) @default('prompts') def _prompts_default(self): return self.prompts_class(self) @observe('prompts') def _(self, change): self._update_layout() @default('displayhook_class') def _displayhook_class_default(self): return RichPromptDisplayHook term_title = Bool( True, help="Automatically set the terminal title").tag(config=True) display_completions_in_columns = Bool( False, help="Display a multi column completion menu.", ).tag(config=True) highlight_matching_brackets = Bool( True, help="Highlight matching brackets .", ).tag(config=True) @observe('term_title') def init_term_title(self, change=None): # Enable or disable the terminal title. if self.term_title: toggle_set_term_title(True) set_term_title('IPython: ' + abbrev_cwd()) else: toggle_set_term_title(False) def init_prompt_toolkit_cli(self): self._app = None if self.simple_prompt: # Fall back to plain non-interactive output for tests. # This is very limited, and only accepts a single line. def prompt(): return cast_unicode_py2( input('In [%d]: ' % self.execution_count)) self.prompt_for_code = prompt return kbmanager = KeyBindingManager.for_prompt() insert_mode = ViInsertMode() | EmacsInsertMode() # Ctrl+J == Enter, seemingly @kbmanager.registry.add_binding(Keys.ControlJ, filter=(HasFocus(DEFAULT_BUFFER) & ~HasSelection() & insert_mode)) def _(event): b = event.current_buffer d = b.document if b.complete_state: cc = b.complete_state.current_completion if cc: b.apply_completion(cc) return if not (d.on_last_line or d.cursor_position_row >= d.line_count - d.empty_line_count_at_the_end()): b.newline() return status, indent = self.input_splitter.check_complete(d.text) if (status != 'incomplete') and b.accept_action.is_returnable: b.accept_action.validate_and_handle(event.cli, b) else: b.insert_text('\n' + (' ' * (indent or 0))) @kbmanager.registry.add_binding(Keys.ControlP, filter=(ViInsertMode() & HasFocus(DEFAULT_BUFFER))) def _previous_history_or_previous_completion(event): """ Control-P in vi edit mode on readline is history next, unlike default prompt toolkit. If completer is open this still select previous completion. """ event.current_buffer.auto_up() @kbmanager.registry.add_binding(Keys.ControlN, filter=(ViInsertMode() & HasFocus(DEFAULT_BUFFER))) def _next_history_or_next_completion(event): """ Control-N in vi edit mode on readline is history previous, unlike default prompt toolkit. If completer is open this still select next completion. """ event.current_buffer.auto_down() @kbmanager.registry.add_binding(Keys.ControlG, filter=(HasFocus(DEFAULT_BUFFER) & HasCompletions())) def _dismiss_completion(event): b = event.current_buffer if b.complete_state: b.cancel_completion() @kbmanager.registry.add_binding(Keys.ControlC, filter=HasFocus(DEFAULT_BUFFER)) def _reset_buffer(event): b = event.current_buffer if b.complete_state: b.cancel_completion() else: b.reset() @kbmanager.registry.add_binding(Keys.ControlC, filter=HasFocus(SEARCH_BUFFER)) def _reset_search_buffer(event): if event.current_buffer.document.text: event.current_buffer.reset() else: event.cli.push_focus(DEFAULT_BUFFER) supports_suspend = Condition(lambda cli: hasattr(signal, 'SIGTSTP')) @kbmanager.registry.add_binding(Keys.ControlZ, filter=supports_suspend) def _suspend_to_bg(event): event.cli.suspend_to_background() @Condition def cursor_in_leading_ws(cli): before = cli.application.buffer.document.current_line_before_cursor return (not before) or before.isspace() # Ctrl+I == Tab @kbmanager.registry.add_binding(Keys.ControlI, filter=(HasFocus(DEFAULT_BUFFER) & ~HasSelection() & insert_mode & cursor_in_leading_ws)) def _indent_buffer(event): event.current_buffer.insert_text(' ' * 4) # Pre-populate history from IPython's history database history = InMemoryHistory() last_cell = u"" for __, ___, cell in self.history_manager.get_tail( self.history_load_length, include_latest=True): # Ignore blank lines and consecutive duplicates cell = cell.rstrip() if cell and (cell != last_cell): history.append(cell) self._style = self._make_style_from_name(self.highlighting_style) style = DynamicStyle(lambda: self._style) editing_mode = getattr(EditingMode, self.editing_mode.upper()) self._app = create_prompt_application( editing_mode=editing_mode, key_bindings_registry=kbmanager.registry, history=history, completer=IPythonPTCompleter(self.Completer), enable_history_search=True, style=style, mouse_support=self.mouse_support, **self._layout_options()) self._eventloop = create_eventloop(self.inputhook) self.pt_cli = CommandLineInterface(self._app, eventloop=self._eventloop) def _make_style_from_name(self, name): """ Small wrapper that make an IPython compatible style from a style name We need that to add style for prompt ... etc. """ style_cls = get_style_by_name(name) style_overrides = { Token.Prompt: '#009900', Token.PromptNum: '#00ff00 bold', Token.OutPrompt: '#990000', Token.OutPromptNum: '#ff0000 bold', } if name == 'default': style_cls = get_style_by_name('default') # The default theme needs to be visible on both a dark background # and a light background, because we can't tell what the terminal # looks like. These tweaks to the default theme help with that. style_overrides.update({ Token.Number: '#007700', Token.Operator: 'noinherit', Token.String: '#BB6622', Token.Name.Function: '#2080D0', Token.Name.Class: 'bold #2080D0', Token.Name.Namespace: 'bold #2080D0', }) style_overrides.update(self.highlighting_style_overrides) style = PygmentsStyle.from_defaults(pygments_style_cls=style_cls, style_dict=style_overrides) return style def _layout_options(self): """ Return the current layout option for the current Terminal InteractiveShell """ return { 'lexer': IPythonPTLexer(), 'reserve_space_for_menu': self.space_for_menu, 'get_prompt_tokens': self.prompts.in_prompt_tokens, 'get_continuation_tokens': self.prompts.continuation_prompt_tokens, 'multiline': True, 'display_completions_in_columns': self.display_completions_in_columns, # Highlight matching brackets, but only when this setting is # enabled, and only when the DEFAULT_BUFFER has the focus. 'extra_input_processors': [ ConditionalProcessor( processor=HighlightMatchingBracketProcessor( chars='[](){}'), filter=HasFocus(DEFAULT_BUFFER) & ~IsDone() & Condition(lambda cli: self.highlight_matching_brackets)) ], } def _update_layout(self): """ Ask for a re computation of the application layout, if for example , some configuration options have changed. """ if getattr(self, '._app', None): self._app.layout = create_prompt_layout(**self._layout_options()) def prompt_for_code(self): document = self.pt_cli.run(pre_run=self.pre_prompt, reset_current_buffer=True) return document.text def init_io(self): if sys.platform not in {'win32', 'cli'}: return import colorama colorama.init() # For some reason we make these wrappers around stdout/stderr. # For now, we need to reset them so all output gets coloured. # https://github.com/ipython/ipython/issues/8669 from IPython.utils import io io.stdout = io.IOStream(sys.stdout) io.stderr = io.IOStream(sys.stderr) def init_magics(self): super(TerminalInteractiveShell, self).init_magics() self.register_magics(TerminalMagics) def init_alias(self): # The parent class defines aliases that can be safely used with any # frontend. super(TerminalInteractiveShell, self).init_alias() # Now define aliases that only make sense on the terminal, because they # need direct access to the console in a way that we can't emulate in # GUI or web frontend if os.name == 'posix': for cmd in ['clear', 'more', 'less', 'man']: self.alias_manager.soft_define_alias(cmd, cmd) def __init__(self, *args, **kwargs): super(TerminalInteractiveShell, self).__init__(*args, **kwargs) self.init_prompt_toolkit_cli() self.init_term_title() self.keep_running = True self.debugger_history = InMemoryHistory() def ask_exit(self): self.keep_running = False rl_next_input = None def pre_prompt(self): if self.rl_next_input: self.pt_cli.application.buffer.text = cast_unicode_py2( self.rl_next_input) self.rl_next_input = None def interact(self): while self.keep_running: print(self.separate_in, end='') try: code = self.prompt_for_code() except EOFError: if (not self.confirm_exit) \ or self.ask_yes_no('Do you really want to exit ([y]/n)?','y','n'): self.ask_exit() else: if code: self.run_cell(code, store_history=True) if self.autoedit_syntax and self.SyntaxTB.last_syntax_error: self.edit_syntax_error() def mainloop(self): # An extra layer of protection in case someone mashing Ctrl-C breaks # out of our internal code. while True: try: self.interact() break except KeyboardInterrupt: print("\nKeyboardInterrupt escaped interact()\n") if hasattr(self, '_eventloop'): self._eventloop.close() _inputhook = None def inputhook(self, context): if self._inputhook is not None: self._inputhook(context) def enable_gui(self, gui=None): if gui: self._inputhook = get_inputhook_func(gui) else: self._inputhook = None # Methods to support auto-editing of SyntaxErrors: def edit_syntax_error(self): """The bottom half of the syntax error handler called in the main loop. Loop until syntax error is fixed or user cancels. """ while self.SyntaxTB.last_syntax_error: # copy and clear last_syntax_error err = self.SyntaxTB.clear_err_state() if not self._should_recompile(err): return try: # may set last_syntax_error again if a SyntaxError is raised self.safe_execfile(err.filename, self.user_ns) except: self.showtraceback() else: try: with open(err.filename) as f: # This should be inside a display_trap block and I # think it is. sys.displayhook(f.read()) except: self.showtraceback() def _should_recompile(self, e): """Utility routine for edit_syntax_error""" if e.filename in ('<ipython console>', '<input>', '<string>', '<console>', '<BackgroundJob compilation>', None): return False try: if (self.autoedit_syntax and not self.ask_yes_no( 'Return to editor to correct syntax error? ' '[Y/n] ', 'y')): return False except EOFError: return False def int0(x): try: return int(x) except TypeError: return 0 # always pass integer line and offset values to editor hook try: self.hooks.fix_error_editor(e.filename, int0(e.lineno), int0(e.offset), e.msg) except TryNext: warn('Could not open editor') return False return True # Run !system commands directly, not through pipes, so terminal programs # work correctly. system = InteractiveShell.system_raw def auto_rewrite_input(self, cmd): """Overridden from the parent class to use fancy rewriting prompt""" if not self.show_rewritten_input: return tokens = self.prompts.rewrite_prompt_tokens() if self.pt_cli: self.pt_cli.print_tokens(tokens) print(cmd) else: prompt = ''.join(s for t, s in tokens) print(prompt, cmd, sep='') _prompts_before = None def switch_doctest_mode(self, mode): """Switch prompts to classic for %doctest_mode""" if mode: self._prompts_before = self.prompts self.prompts = ClassicPrompts(self) elif self._prompts_before: self.prompts = self._prompts_before self._prompts_before = None
class KernelSpecManager(LoggingConfigurable): kernel_spec_class = Type(KernelSpec, config=True, help="""The kernel spec class. This is configurable to allow subclassing of the KernelSpecManager for customized behavior. """ ) ensure_native_kernel = Bool(True, config=True, help="""If there is no Python kernelspec registered and the IPython kernel is available, ensure it is added to the spec list. """ ) data_dir = Unicode() def _data_dir_default(self): return jupyter_data_dir() user_kernel_dir = Unicode() def _user_kernel_dir_default(self): return pjoin(self.data_dir, 'kernels') whitelist = Set(config=True, help="""Whitelist of allowed kernel names. By default, all installed kernels are allowed. """ ) kernel_dirs = List( help="List of kernel directories to search. Later ones take priority over earlier." ) def _kernel_dirs_default(self): dirs = jupyter_path('kernels') # At some point, we should stop adding .ipython/kernels to the path, # but the cost to keeping it is very small. try: from IPython.paths import get_ipython_dir except ImportError: try: from IPython.utils.path import get_ipython_dir except ImportError: # no IPython, no ipython dir get_ipython_dir = None if get_ipython_dir is not None: dirs.append(os.path.join(get_ipython_dir(), 'kernels')) return dirs def find_kernel_specs(self): """Returns a dict mapping kernel names to resource directories.""" d = {} for kernel_dir in self.kernel_dirs: kernels = _list_kernels_in(kernel_dir) for kname, spec in kernels.items(): if kname not in d: self.log.debug("Found kernel %s in %s", kname, kernel_dir) d[kname] = spec if self.ensure_native_kernel and NATIVE_KERNEL_NAME not in d: try: from ipykernel.kernelspec import RESOURCES self.log.debug("Native kernel (%s) available from %s", NATIVE_KERNEL_NAME, RESOURCES) d[NATIVE_KERNEL_NAME] = RESOURCES except ImportError: self.log.warning("Native kernel (%s) is not available", NATIVE_KERNEL_NAME) if self.whitelist: # filter if there's a whitelist d = {name:spec for name,spec in d.items() if name in self.whitelist} return d # TODO: Caching? def _get_kernel_spec_by_name(self, kernel_name, resource_dir): """ Returns a :class:`KernelSpec` instance for a given kernel_name and resource_dir. """ if kernel_name == NATIVE_KERNEL_NAME: try: from ipykernel.kernelspec import RESOURCES, get_kernel_dict except ImportError: # It should be impossible to reach this, but let's play it safe pass else: if resource_dir == RESOURCES: return self.kernel_spec_class(resource_dir=resource_dir, **get_kernel_dict()) return self.kernel_spec_class.from_resource_dir(resource_dir) def get_kernel_spec(self, kernel_name): """Returns a :class:`KernelSpec` instance for the given kernel_name. Raises :exc:`NoSuchKernel` if the given kernel name is not found. """ d = self.find_kernel_specs() try: resource_dir = d[kernel_name.lower()] except KeyError: raise NoSuchKernel(kernel_name) return self._get_kernel_spec_by_name(kernel_name, resource_dir) def get_all_specs(self): """Returns a dict mapping kernel names to kernelspecs. Returns a dict of the form:: { 'kernel_name': { 'resource_dir': '/path/to/kernel_name', 'spec': {"the spec itself": ...} }, ... } """ d = self.find_kernel_specs() return {kname: { "resource_dir": d[kname], "spec": self._get_kernel_spec_by_name(kname, d[kname]).to_dict() } for kname in d} def remove_kernel_spec(self, name): """Remove a kernel spec directory by name. Returns the path that was deleted. """ save_native = self.ensure_native_kernel try: self.ensure_native_kernel = False specs = self.find_kernel_specs() finally: self.ensure_native_kernel = save_native spec_dir = specs[name] self.log.debug("Removing %s", spec_dir) if os.path.islink(spec_dir): os.remove(spec_dir) else: shutil.rmtree(spec_dir) return spec_dir def _get_destination_dir(self, kernel_name, user=False, prefix=None): if user: return os.path.join(self.user_kernel_dir, kernel_name) elif prefix: return os.path.join(os.path.abspath(prefix), 'share', 'jupyter', 'kernels', kernel_name) else: return os.path.join(SYSTEM_JUPYTER_PATH[0], 'kernels', kernel_name) def install_kernel_spec(self, source_dir, kernel_name=None, user=False, replace=None, prefix=None): """Install a kernel spec by copying its directory. If ``kernel_name`` is not given, the basename of ``source_dir`` will be used. If ``user`` is False, it will attempt to install into the systemwide kernel registry. If the process does not have appropriate permissions, an :exc:`OSError` will be raised. If ``prefix`` is given, the kernelspec will be installed to PREFIX/share/jupyter/kernels/KERNEL_NAME. This can be sys.prefix for installation inside virtual or conda envs. """ source_dir = source_dir.rstrip('/\\') if not kernel_name: kernel_name = os.path.basename(source_dir) kernel_name = kernel_name.lower() if not _is_valid_kernel_name(kernel_name): raise ValueError("Invalid kernel name %r. %s" % (kernel_name, _kernel_name_description)) if user and prefix: raise ValueError("Can't specify both user and prefix. Please choose one or the other.") if replace is not None: warnings.warn( "replace is ignored. Installing a kernelspec always replaces an existing installation", DeprecationWarning, stacklevel=2, ) destination = self._get_destination_dir(kernel_name, user=user, prefix=prefix) self.log.debug('Installing kernelspec in %s', destination) kernel_dir = os.path.dirname(destination) if kernel_dir not in self.kernel_dirs: self.log.warning("Installing to %s, which is not in %s. The kernelspec may not be found.", kernel_dir, self.kernel_dirs, ) if os.path.isdir(destination): self.log.info('Removing existing kernelspec in %s', destination) shutil.rmtree(destination) shutil.copytree(source_dir, destination) self.log.info('Installed kernelspec %s in %s', kernel_name, destination) return destination def install_native_kernel_spec(self, user=False): """DEPRECATED: Use ipykernel.kenelspec.install""" warnings.warn("install_native_kernel_spec is deprecated." " Use ipykernel.kernelspec import install.", stacklevel=2) from ipykernel.kernelspec import install install(self, user=user)
class FargateSpawner(Spawner): aws_region = Unicode(config=True) aws_ecs_host = Unicode(config=True) get_run_task_args = Callable(config=True) notebook_port = Int(config=True) notebook_scheme = Unicode(config=True) authentication_class = Type(FargateSpawnerAuthentication, config=True) authentication = Instance(FargateSpawnerAuthentication) @default('authentication') def _default_authentication(self): return self.authentication_class(parent=self) task_arn = Unicode('') task_cluster_arn = Unicode('') # We mostly are able to call the AWS API to determine status. However, when we yield the # event loop to create the task, if there is a poll before the creation is complete, # we must behave as though we are running/starting, but we have no IDs to use with which # to check the task. calling_run_task = Bool(False) progress_buffer = None def load_state(self, state): ''' Misleading name: this "loads" the state onto self, to be used by other methods ''' super().load_state(state) # Called when first created: we might have no state from a previous invocation self.task_arn = state.get('task_arn', '') self.task_cluster_arn = state.get('task_cluster_arn', '') def get_state(self): ''' Misleading name: the return value of get_state is saved to the database in order to be able to restore after the hub went down ''' state = super().get_state() state['task_arn'] = self.task_arn state['task_cluster_arn'] = self.task_cluster_arn return state async def poll(self): # Return values, as dictacted by the Jupyterhub framework: # 0 == not running, or not starting up, i.e. we need to call start # None == running, or not finished starting # 1, or anything else == error return \ None if self.calling_run_task else \ 0 if self.task_arn == '' else \ None if (await _get_task_status(self.log, self._aws_endpoint(), self.task_cluster_arn, self.task_arn)) in ALLOWED_STATUSES else \ 1 async def start(self): progress_buffer = self.progress_buffer self.log.debug('Starting spawner') progress_buffer.write({ 'progress': 0.5, 'message': 'Starting server...' }) try: self.calling_run_task = True run_response = await _run_task(self.log, self._aws_endpoint(), self.get_run_task_args(self)) task_arn = run_response['tasks'][0]['taskArn'] task_cluster_arn = run_response['tasks'][0]['clusterArn'] progress_buffer.write({'progress': 1}) finally: self.calling_run_task = False self.task_arn = task_arn self.task_cluster_arn = task_cluster_arn max_polls = 50 num_polls = 0 task_ip = '' while task_ip == '': num_polls += 1 if num_polls >= max_polls: raise Exception( 'Task {} took too long to find IP address'.format( task_arn)) task_ip = await _get_task_ip(self.log, self._aws_endpoint(), task_cluster_arn, task_arn) await gen.sleep(1) progress_buffer.write({'progress': 1 + num_polls / max_polls}) progress_buffer.write({'progress': 2}) max_polls = self.start_timeout num_polls = 0 status = '' while status != 'RUNNING': num_polls += 1 if num_polls >= max_polls: raise Exception( 'Task {} took too long to become running'.format(task_arn)) status = await _get_task_status(self.log, self._aws_endpoint(), task_cluster_arn, task_arn) if status not in ALLOWED_STATUSES: raise Exception('Task {} is {}'.format(task_arn, status)) await gen.sleep(1) progress_buffer.write({'progress': 2 + num_polls / max_polls * 98}) progress_buffer.write({'progress': 100, 'message': 'Server started'}) await gen.sleep(1) progress_buffer.close() return f'{self.notebook_scheme}://{task_ip}:{self.notebook_port}' async def stop(self, now=False): if self.task_arn == '': return self.log.debug('Stopping task (%s)...', self.task_arn) await _ensure_stopped_task(self.log, self._aws_endpoint(), self.task_cluster_arn, self.task_arn) self.log.debug('Stopped task (%s)... (done)', self.task_arn) def clear_state(self): super().clear_state() self.log.debug('Clearing state: (%s)', self.task_arn) self.task_arn = '' self.task_cluster_arn = '' self.progress_buffer = AsyncIteratorBuffer() async def progress(self): async for progress_message in self.progress_buffer: yield progress_message def _aws_endpoint(self): return { 'region': self.aws_region, 'ecs_host': self.aws_ecs_host, 'ecs_auth': self.authentication.get_credentials, }
class ExportApp(NbGrader): name = u'nbgrader-export' description = u'Export information from the database to another format.' aliases = aliases flags = flags examples = """ The default is to export to a file called "grades.csv", i.e.: nbgrader export You can customize the filename with the --to flag: nbgrader export --to mygrades.csv You can export the grades for a single (or limited set) of students or assignments with the --assignment and/or --student flag: nbgrader export --assignment [assignmentID] --student [studentID1,studentID2] Where the studentIDs and assignmentIDs are a list of IDs and assignments. The assignments or studentIDs need to quoted if they contain not only numbers. The square brackets are obligatory. If instead the flags: nbgrader export --skip-assignment [assignmentID] --skip-student [studentID1,studentID2] are used, the assignment and/or student is skipped. The assignment and student options take priority. To change the export type, you will need a class that inherits from nbgrader.plugins.ExportPlugin. If your exporter is named `MyCustomExporter` and is saved in the file `myexporter.py`, then: nbgrader export --exporter=myexporter.MyCustomExporter """ plugin_class = Type( CsvExportPlugin, klass=ExportPlugin, help="The plugin class for exporting the grades.").tag(config=True) plugin_inst = Instance(ExportPlugin).tag(config=False) def init_plugin(self): self.log.info("Using exporter: %s", self.plugin_class.__name__) self.plugin_inst = self.plugin_class(parent=self) @default("classes") def _classes_default(self): classes = super(ExportApp, self)._classes_default() classes.append(ExportApp) classes.append(ExportPlugin) return classes def start(self): super(ExportApp, self).start() self.init_plugin() with Gradebook(self.coursedir.db_url, self.coursedir.course_id) as gb: self.plugin_inst.export(gb)
class EnterpriseGatewayApp(KernelGatewayApp): """Application that provisions Jupyter kernels and proxies HTTP/Websocket traffic to the kernels. - reads command line and environment variable settings - initializes managers and routes - creates a Tornado HTTP server - starts the Tornado event loop """ name = 'jupyter-enterprise-gateway' version = __version__ description = """ Jupyter Enterprise Gateway Provisions remote Jupyter kernels and proxies HTTP/Websocket traffic to them. """ # Remote hosts remote_hosts_env = 'EG_REMOTE_HOSTS' remote_hosts_default_value = 'localhost' remote_hosts = List( default_value=[remote_hosts_default_value], config=True, help= """Bracketed comma-separated list of hosts on which DistributedProcessProxy kernels will be launched e.g., ['host1','host2']. (EG_REMOTE_HOSTS env var - non-bracketed, just comma-separated)""" ) @default('remote_hosts') def remote_hosts_default(self): return os.getenv(self.remote_hosts_env, self.remote_hosts_default_value).split(',') # Yarn endpoint yarn_endpoint_env = 'EG_YARN_ENDPOINT' yarn_endpoint_default_value = 'http://*****:*****@default('yarn_endpoint') def yarn_endpoint_default(self): return os.getenv(self.yarn_endpoint_env, self.yarn_endpoint_default_value) # Conductor endpoint conductor_endpoint_env = 'EG_CONDUCTOR_ENDPOINT' conductor_endpoint_default_value = None conductor_endpoint = Unicode( conductor_endpoint_default_value, config=True, help= """The http url for accessing the Conductor REST API. (EG_CONDUCTOR_ENDPOINT env var)""" ) @default('conductor_endpoint') def conductor_endpoint_default(self): return os.getenv(self.conductor_endpoint_env, self.conductor_endpoint_default_value) _log_formatter_cls = LogFormatter @default('log_format') def _default_log_format(self): """override default log format to include milliseconds""" return u"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" # Impersonation enabled impersonation_enabled_env = 'EG_IMPERSONATION_ENABLED' impersonation_enabled = Bool( False, config=True, help= """Indicates whether impersonation will be performed during kernel launch. (EG_IMPERSONATION_ENABLED env var)""") @default('impersonation_enabled') def impersonation_enabled_default(self): return bool( os.getenv(self.impersonation_enabled_env, 'false').lower() == 'true') # Unauthorized users unauthorized_users_env = 'EG_UNAUTHORIZED_USERS' unauthorized_users_default_value = 'root' unauthorized_users = Set( default_value={unauthorized_users_default_value}, config=True, help= """Comma-separated list of user names (e.g., ['root','admin']) against which KERNEL_USERNAME will be compared. Any match (case-sensitive) will prevent the kernel's launch and result in an HTTP 403 (Forbidden) error. (EG_UNAUTHORIZED_USERS env var - non-bracketed, just comma-separated)""") @default('unauthorized_users') def unauthorized_users_default(self): return os.getenv(self.unauthorized_users_env, self.unauthorized_users_default_value).split(',') # Authorized users authorized_users_env = 'EG_AUTHORIZED_USERS' authorized_users = Set( config=True, help= """Comma-separated list of user names (e.g., ['bob','alice']) against which KERNEL_USERNAME will be compared. Any match (case-sensitive) will allow the kernel's launch, otherwise an HTTP 403 (Forbidden) error will be raised. The set of unauthorized users takes precedence. This option should be used carefully as it can dramatically limit who can launch kernels. (EG_AUTHORIZED_USERS env var - non-bracketed, just comma-separated)""") @default('authorized_users') def authorized_users_default(self): au_env = os.getenv(self.authorized_users_env) return au_env.split(',') if au_env is not None else [] # Port range port_range_env = 'EG_PORT_RANGE' port_range_default_value = "0..0" port_range = Unicode( port_range_default_value, config=True, help= """Specifies the lower and upper port numbers from which ports are created. The bounded values are separated by '..' (e.g., 33245..34245 specifies a range of 1000 ports to be randomly selected). A range of zero (e.g., 33245..33245 or 0..0) disables port-range enforcement. (EG_PORT_RANGE env var)""" ) @default('port_range') def port_range_default(self): return os.getenv(self.port_range_env, self.port_range_default_value) # Max Kernels per User max_kernels_per_user_env = 'EG_MAX_KERNELS_PER_USER' max_kernels_per_user_default_value = -1 max_kernels_per_user = Integer( max_kernels_per_user_default_value, config=True, help= """Specifies the maximum number of kernels a user can have active simultaneously. A value of -1 disables enforcement. (EG_MAX_KERNELS_PER_USER env var)""") @default('max_kernels_per_user') def max_kernels_per_user_default(self): return int( os.getenv(self.max_kernels_per_user_env, self.max_kernels_per_user_default_value)) kernel_spec_manager = Instance(RemoteKernelSpecManager, allow_none=True) kernel_spec_manager_class = Type(klass=KernelSpecManager, default_value=RemoteKernelSpecManager, config=True, help=""" The kernel spec manager class to use. Should be a subclass of `jupyter_client.kernelspec.KernelSpecManager`. """) kernel_manager_class = Type(klass=MappingKernelManager, default_value=RemoteMappingKernelManager, config=True, help=""" The kernel manager class to use. Should be a subclass of `notebook.services.kernels.MappingKernelManager`. """) def init_configurables(self): """Initializes all configurable objects including a kernel manager, kernel spec manager, session manager, and personality. Any kernel pool configured by the personality will be its responsibility to shut down. Optionally, loads a notebook and prespawns the configured number of kernels. """ self.kernel_spec_manager = RemoteKernelSpecManager(parent=self) self.seed_notebook = None if self.seed_uri is not None: # Note: must be set before instantiating a SeedingMappingKernelManager self.seed_notebook = self._load_notebook(self.seed_uri) # Only pass a default kernel name when one is provided. Otherwise, # adopt whatever default the kernel manager wants to use. kwargs = {} if self.default_kernel_name: kwargs['default_kernel_name'] = self.default_kernel_name self.kernel_spec_manager = self.kernel_spec_manager_class( parent=self, ) self.kernel_manager = self.kernel_manager_class( parent=self, log=self.log, connection_dir=self.runtime_dir, kernel_spec_manager=self.kernel_spec_manager, **kwargs) # Detect older version of notebook func = getattr(self.kernel_manager, 'initialize_culler', None) if not func: self.log.warning( "Older version of Notebook detected - idle kernels will not be culled. " "Culling requires Notebook >= 5.1.0.") self.session_manager = SessionManager( log=self.log, kernel_manager=self.kernel_manager) self.kernel_session_manager = KernelSessionManager( log=self.log, kernel_manager=self.kernel_manager, config=self.config, # required to get command-line options visible **kwargs) # Attempt to start persisted sessions self.kernel_session_manager.start_sessions() self.contents_manager = None if self.prespawn_count: if self.max_kernels and self.prespawn_count > self.max_kernels: raise RuntimeError( 'cannot prespawn {}; more than max kernels {}'.format( self.prespawn_count, self.max_kernels)) api_module = self._load_api_module(self.api) func = getattr(api_module, 'create_personality') self.personality = func(parent=self, log=self.log) self.personality.init_configurables() def init_webapp(self): super(EnterpriseGatewayApp, self).init_webapp() # As of Notebook 5.6, remote kernels are prevented: https://github.com/jupyter/notebook/pull/3714/ unless # 'allow_remote_access' is enabled. Since this is the entire purpose of EG, we'll unconditionally set that # here. Because this is a dictionary, we shouldn't have to worry about older versions as this will be ignored. self.web_app.settings['allow_remote_access'] = True def start(self): """Starts an IO loop for the application. """ # Note that we *intentionally* reference the KernelGatewayApp so that we bypass # its start() logic and just call that of JKG's superclass. super(KernelGatewayApp, self).start() self.log.info('Jupyter Enterprise Gateway at http{}://{}:{}'.format( 's' if self.keyfile else '', self.ip, self.port)) # If impersonation is enabled, issue a warning message if the gateway user is not in unauthorized_users. if self.impersonation_enabled: gateway_user = getpass.getuser() if gateway_user.lower() not in self.unauthorized_users: self.log.warning( "Impersonation is enabled and gateway user '{}' is NOT specified in the set of " "unauthorized users! Kernels may execute as that user with elevated privileges." .format(gateway_user)) self.io_loop = ioloop.IOLoop.current() signal.signal(signal.SIGTERM, self._signal_stop) try: self.io_loop.start() except KeyboardInterrupt: self.log.info("Interrupted...") # Ignore further interrupts (ctrl-c) signal.signal(signal.SIGINT, signal.SIG_IGN) finally: self.shutdown() def stop(self): """ Stops the HTTP server and IO loop associated with the application. """ def _stop(): self.http_server.stop() self.io_loop.stop() self.io_loop.add_callback(_stop) def _signal_stop(self, sig, frame): self.log.info("Received signal to terminate Enterprise Gateway.") self.io_loop.stop()
class TerminalInteractiveShell(InteractiveShell): mime_renderers = Dict().tag(config=True) space_for_menu = Integer(6, help='Number of line at the bottom of the screen ' 'to reserve for the tab completion menu, ' 'search history, ...etc, the height of ' 'these menus will at most this value. ' 'Increase it is you prefer long and skinny ' 'menus, decrease for short and wide.' ).tag(config=True) pt_app = None debugger_history = None debugger_history_file = Unicode( "~/.pdbhistory", help="File in which to store and read history" ).tag(config=True) simple_prompt = Bool(_use_simple_prompt, help="""Use `raw_input` for the REPL, without completion and prompt colors. Useful when controlling IPython as a subprocess, and piping STDIN/OUT/ERR. Known usage are: IPython own testing machinery, and emacs inferior-shell integration through elpy. This mode default to `True` if the `IPY_TEST_SIMPLE_PROMPT` environment variable is set, or the current terminal is not a tty.""" ).tag(config=True) @property def debugger_cls(self): return Pdb if self.simple_prompt else TerminalPdb confirm_exit = Bool(True, help=""" Set to confirm when you try to exit IPython with an EOF (Control-D in Unix, Control-Z/Enter in Windows). By typing 'exit' or 'quit', you can force a direct exit without any confirmation.""", ).tag(config=True) editing_mode = Unicode('emacs', help="Shortcut style to use at the prompt. 'vi' or 'emacs'.", ).tag(config=True) emacs_bindings_in_vi_insert_mode = Bool( True, help="Add shortcuts from 'emacs' insert mode to 'vi' insert mode.", ).tag(config=True) modal_cursor = Bool( True, help=""" Cursor shape changes depending on vi mode: beam in vi insert mode, block in nav mode, underscore in replace mode.""", ).tag(config=True) ttimeoutlen = Float( 0.01, help="""The time in milliseconds that is waited for a key code to complete.""", ).tag(config=True) timeoutlen = Float( 0.5, help="""The time in milliseconds that is waited for a mapped key sequence to complete.""", ).tag(config=True) autoformatter = Unicode(None, help="Autoformatter to reformat Terminal code. Can be `'black'` or `None`", allow_none=True ).tag(config=True) mouse_support = Bool(False, help="Enable mouse support in the prompt\n(Note: prevents selecting text with the mouse)" ).tag(config=True) # We don't load the list of styles for the help string, because loading # Pygments plugins takes time and can cause unexpected errors. highlighting_style = Union([Unicode('legacy'), Type(klass=Style)], help="""The name or class of a Pygments style to use for syntax highlighting. To see available styles, run `pygmentize -L styles`.""" ).tag(config=True) @validate('editing_mode') def _validate_editing_mode(self, proposal): if proposal['value'].lower() == 'vim': proposal['value']= 'vi' elif proposal['value'].lower() == 'default': proposal['value']= 'emacs' if hasattr(EditingMode, proposal['value'].upper()): return proposal['value'].lower() return self.editing_mode @observe('editing_mode') def _editing_mode(self, change): if self.pt_app: self.pt_app.editing_mode = getattr(EditingMode, change.new.upper()) @observe('autoformatter') def _autoformatter_changed(self, change): formatter = change.new if formatter is None: self.reformat_handler = lambda x:x elif formatter == 'black': self.reformat_handler = black_reformat_handler else: raise ValueError @observe('highlighting_style') @observe('colors') def _highlighting_style_changed(self, change): self.refresh_style() def refresh_style(self): self._style = self._make_style_from_name_or_cls(self.highlighting_style) highlighting_style_overrides = Dict( help="Override highlighting format for specific tokens" ).tag(config=True) true_color = Bool(False, help="""Use 24bit colors instead of 256 colors in prompt highlighting. If your terminal supports true color, the following command should print ``TRUECOLOR`` in orange:: printf \"\\x1b[38;2;255;100;0mTRUECOLOR\\x1b[0m\\n\" """, ).tag(config=True) editor = Unicode(get_default_editor(), help="Set the editor used by IPython (default to $EDITOR/vi/notepad)." ).tag(config=True) prompts_class = Type(Prompts, help='Class used to generate Prompt token for prompt_toolkit').tag(config=True) prompts = Instance(Prompts) @default('prompts') def _prompts_default(self): return self.prompts_class(self) # @observe('prompts') # def _(self, change): # self._update_layout() @default('displayhook_class') def _displayhook_class_default(self): return RichPromptDisplayHook term_title = Bool(True, help="Automatically set the terminal title" ).tag(config=True) term_title_format = Unicode("IPython: {cwd}", help="Customize the terminal title format. This is a python format string. " + "Available substitutions are: {cwd}." ).tag(config=True) display_completions = Enum(('column', 'multicolumn','readlinelike'), help= ( "Options for displaying tab completions, 'column', 'multicolumn', and " "'readlinelike'. These options are for `prompt_toolkit`, see " "`prompt_toolkit` documentation for more information." ), default_value='multicolumn').tag(config=True) highlight_matching_brackets = Bool(True, help="Highlight matching brackets.", ).tag(config=True) extra_open_editor_shortcuts = Bool(False, help="Enable vi (v) or Emacs (C-X C-E) shortcuts to open an external editor. " "This is in addition to the F2 binding, which is always enabled." ).tag(config=True) handle_return = Any(None, help="Provide an alternative handler to be called when the user presses " "Return. This is an advanced option intended for debugging, which " "may be changed or removed in later releases." ).tag(config=True) enable_history_search = Bool(True, help="Allows to enable/disable the prompt toolkit history search" ).tag(config=True) prompt_includes_vi_mode = Bool(True, help="Display the current vi mode (when using vi editing mode)." ).tag(config=True) @observe('term_title') def init_term_title(self, change=None): # Enable or disable the terminal title. if self.term_title: toggle_set_term_title(True) set_term_title(self.term_title_format.format(cwd=abbrev_cwd())) else: toggle_set_term_title(False) def restore_term_title(self): if self.term_title: restore_term_title() def init_display_formatter(self): super(TerminalInteractiveShell, self).init_display_formatter() # terminal only supports plain text self.display_formatter.active_types = ['text/plain'] # disable `_ipython_display_` self.display_formatter.ipython_display_formatter.enabled = False def init_prompt_toolkit_cli(self): if self.simple_prompt: # Fall back to plain non-interactive output for tests. # This is very limited. def prompt(): prompt_text = "".join(x[1] for x in self.prompts.in_prompt_tokens()) lines = [input(prompt_text)] prompt_continuation = "".join(x[1] for x in self.prompts.continuation_prompt_tokens()) while self.check_complete('\n'.join(lines))[0] == 'incomplete': lines.append( input(prompt_continuation) ) return '\n'.join(lines) self.prompt_for_code = prompt return # Set up keyboard shortcuts key_bindings = create_ipython_shortcuts(self) # Pre-populate history from IPython's history database history = InMemoryHistory() last_cell = u"" for __, ___, cell in self.history_manager.get_tail(self.history_load_length, include_latest=True): # Ignore blank lines and consecutive duplicates cell = cell.rstrip() if cell and (cell != last_cell): history.append_string(cell) last_cell = cell self._style = self._make_style_from_name_or_cls(self.highlighting_style) self.style = DynamicStyle(lambda: self._style) editing_mode = getattr(EditingMode, self.editing_mode.upper()) self.pt_loop = asyncio.new_event_loop() self.pt_app = PromptSession( auto_suggest=AutoSuggestFromHistory(), editing_mode=editing_mode, key_bindings=key_bindings, history=history, completer=IPythonPTCompleter(shell=self), enable_history_search=self.enable_history_search, style=self.style, include_default_pygments_style=False, mouse_support=self.mouse_support, enable_open_in_editor=self.extra_open_editor_shortcuts, color_depth=self.color_depth, tempfile_suffix=".py", **self._extra_prompt_options() ) def _make_style_from_name_or_cls(self, name_or_cls): """ Small wrapper that make an IPython compatible style from a style name We need that to add style for prompt ... etc. """ style_overrides = {} if name_or_cls == 'legacy': legacy = self.colors.lower() if legacy == 'linux': style_cls = get_style_by_name('monokai') style_overrides = _style_overrides_linux elif legacy == 'lightbg': style_overrides = _style_overrides_light_bg style_cls = get_style_by_name('pastie') elif legacy == 'neutral': # The default theme needs to be visible on both a dark background # and a light background, because we can't tell what the terminal # looks like. These tweaks to the default theme help with that. style_cls = get_style_by_name('default') style_overrides.update({ Token.Number: '#ansigreen', Token.Operator: 'noinherit', Token.String: '#ansiyellow', Token.Name.Function: '#ansiblue', Token.Name.Class: 'bold #ansiblue', Token.Name.Namespace: 'bold #ansiblue', Token.Name.Variable.Magic: '#ansiblue', Token.Prompt: '#ansigreen', Token.PromptNum: '#ansibrightgreen bold', Token.OutPrompt: '#ansired', Token.OutPromptNum: '#ansibrightred bold', }) # Hack: Due to limited color support on the Windows console # the prompt colors will be wrong without this if os.name == 'nt': style_overrides.update({ Token.Prompt: '#ansidarkgreen', Token.PromptNum: '#ansigreen bold', Token.OutPrompt: '#ansidarkred', Token.OutPromptNum: '#ansired bold', }) elif legacy =='nocolor': style_cls=_NoStyle style_overrides = {} else : raise ValueError('Got unknown colors: ', legacy) else : if isinstance(name_or_cls, str): style_cls = get_style_by_name(name_or_cls) else: style_cls = name_or_cls style_overrides = { Token.Prompt: '#ansigreen', Token.PromptNum: '#ansibrightgreen bold', Token.OutPrompt: '#ansired', Token.OutPromptNum: '#ansibrightred bold', } style_overrides.update(self.highlighting_style_overrides) style = merge_styles([ style_from_pygments_cls(style_cls), style_from_pygments_dict(style_overrides), ]) return style @property def pt_complete_style(self): return { 'multicolumn': CompleteStyle.MULTI_COLUMN, 'column': CompleteStyle.COLUMN, 'readlinelike': CompleteStyle.READLINE_LIKE, }[self.display_completions] @property def color_depth(self): return (ColorDepth.TRUE_COLOR if self.true_color else None) def _extra_prompt_options(self): """ Return the current layout option for the current Terminal InteractiveShell """ def get_message(): return PygmentsTokens(self.prompts.in_prompt_tokens()) if self.editing_mode == 'emacs': # with emacs mode the prompt is (usually) static, so we call only # the function once. With VI mode it can toggle between [ins] and # [nor] so we can't precompute. # here I'm going to favor the default keybinding which almost # everybody uses to decrease CPU usage. # if we have issues with users with custom Prompts we can see how to # work around this. get_message = get_message() options = { 'complete_in_thread': False, 'lexer':IPythonPTLexer(), 'reserve_space_for_menu':self.space_for_menu, 'message': get_message, 'prompt_continuation': ( lambda width, lineno, is_soft_wrap: PygmentsTokens(self.prompts.continuation_prompt_tokens(width))), 'multiline': True, 'complete_style': self.pt_complete_style, # Highlight matching brackets, but only when this setting is # enabled, and only when the DEFAULT_BUFFER has the focus. 'input_processors': [ConditionalProcessor( processor=HighlightMatchingBracketProcessor(chars='[](){}'), filter=HasFocus(DEFAULT_BUFFER) & ~IsDone() & Condition(lambda: self.highlight_matching_brackets))], } if not PTK3: options['inputhook'] = self.inputhook return options def prompt_for_code(self): if self.rl_next_input: default = self.rl_next_input self.rl_next_input = None else: default = '' # In order to make sure that asyncio code written in the # interactive shell doesn't interfere with the prompt, we run the # prompt in a different event loop. # If we don't do this, people could spawn coroutine with a # while/true inside which will freeze the prompt. try: old_loop = asyncio.get_running_loop() except RuntimeError: # This happens when the user used `asyncio.run()`. old_loop = None asyncio.set_event_loop(self.pt_loop) try: with patch_stdout(raw=True): text = self.pt_app.prompt( default=default, **self._extra_prompt_options()) finally: # Restore the original event loop. asyncio.set_event_loop(old_loop) return text def enable_win_unicode_console(self): # Since IPython 7.10 doesn't support python < 3.6 and PEP 528, Python uses the unicode APIs for the Windows # console by default, so WUC shouldn't be needed. from warnings import warn warn("`enable_win_unicode_console` is deprecated since IPython 7.10, does not do anything and will be removed in the future", DeprecationWarning, stacklevel=2) def init_io(self): if sys.platform not in {'win32', 'cli'}: return import colorama colorama.init() # For some reason we make these wrappers around stdout/stderr. # For now, we need to reset them so all output gets coloured. # https://github.com/ipython/ipython/issues/8669 # io.std* are deprecated, but don't show our own deprecation warnings # during initialization of the deprecated API. with warnings.catch_warnings(): warnings.simplefilter('ignore', DeprecationWarning) io.stdout = io.IOStream(sys.stdout) io.stderr = io.IOStream(sys.stderr) def init_magics(self): super(TerminalInteractiveShell, self).init_magics() self.register_magics(TerminalMagics) def init_alias(self): # The parent class defines aliases that can be safely used with any # frontend. super(TerminalInteractiveShell, self).init_alias() # Now define aliases that only make sense on the terminal, because they # need direct access to the console in a way that we can't emulate in # GUI or web frontend if os.name == 'posix': for cmd in ('clear', 'more', 'less', 'man'): self.alias_manager.soft_define_alias(cmd, cmd) def __init__(self, *args, **kwargs): super(TerminalInteractiveShell, self).__init__(*args, **kwargs) self.init_prompt_toolkit_cli() self.init_term_title() self.keep_running = True def ask_exit(self): self.keep_running = False rl_next_input = None def interact(self, display_banner=DISPLAY_BANNER_DEPRECATED): if display_banner is not DISPLAY_BANNER_DEPRECATED: warn('interact `display_banner` argument is deprecated since IPython 5.0. Call `show_banner()` if needed.', DeprecationWarning, stacklevel=2) self.keep_running = True while self.keep_running: print(self.separate_in, end='') try: code = self.prompt_for_code() except EOFError: if (not self.confirm_exit) \ or self.ask_yes_no('Do you really want to exit ([y]/n)?','y','n'): self.ask_exit() else: if code: self.run_cell(code, store_history=True) def mainloop(self, display_banner=DISPLAY_BANNER_DEPRECATED): # An extra layer of protection in case someone mashing Ctrl-C breaks # out of our internal code. if display_banner is not DISPLAY_BANNER_DEPRECATED: warn('mainloop `display_banner` argument is deprecated since IPython 5.0. Call `show_banner()` if needed.', DeprecationWarning, stacklevel=2) while True: try: self.interact() break except KeyboardInterrupt as e: print("\n%s escaped interact()\n" % type(e).__name__) finally: # An interrupt during the eventloop will mess up the # internal state of the prompt_toolkit library. # Stopping the eventloop fixes this, see # https://github.com/ipython/ipython/pull/9867 if hasattr(self, '_eventloop'): self._eventloop.stop() self.restore_term_title() # try to call some at-exit operation optimistically as some things can't # be done during interpreter shutdown. this is technically inaccurate as # this make mainlool not re-callable, but that should be a rare if not # in existent use case. self._atexit_once() _inputhook = None def inputhook(self, context): if self._inputhook is not None: self._inputhook(context) active_eventloop = None def enable_gui(self, gui=None): if gui and (gui != 'inline') : self.active_eventloop, self._inputhook =\ get_inputhook_name_and_func(gui) else: self.active_eventloop = self._inputhook = None # For prompt_toolkit 3.0. We have to create an asyncio event loop with # this inputhook. if PTK3: import asyncio from prompt_toolkit.eventloop import new_eventloop_with_inputhook if gui == 'asyncio': # When we integrate the asyncio event loop, run the UI in the # same event loop as the rest of the code. don't use an actual # input hook. (Asyncio is not made for nesting event loops.) self.pt_loop = asyncio.get_event_loop() elif self._inputhook: # If an inputhook was set, create a new asyncio event loop with # this inputhook for the prompt. self.pt_loop = new_eventloop_with_inputhook(self._inputhook) else: # When there's no inputhook, run the prompt in a separate # asyncio event loop. self.pt_loop = asyncio.new_event_loop() # Run !system commands directly, not through pipes, so terminal programs # work correctly. system = InteractiveShell.system_raw def auto_rewrite_input(self, cmd): """Overridden from the parent class to use fancy rewriting prompt""" if not self.show_rewritten_input: return tokens = self.prompts.rewrite_prompt_tokens() if self.pt_app: print_formatted_text(PygmentsTokens(tokens), end='', style=self.pt_app.app.style) print(cmd) else: prompt = ''.join(s for t, s in tokens) print(prompt, cmd, sep='') _prompts_before = None def switch_doctest_mode(self, mode): """Switch prompts to classic for %doctest_mode""" if mode: self._prompts_before = self.prompts self.prompts = ClassicPrompts(self) elif self._prompts_before: self.prompts = self._prompts_before self._prompts_before = None
class JupyterHub(Application): """An Application for starting a Multi-User Jupyter Notebook server.""" name = 'jupyterhub' version = jupyterhub.__version__ description = """Start a multi-user Jupyter Notebook server Spawns a configurable-http-proxy and multi-user Hub, which authenticates users and spawns single-user Notebook servers on behalf of users. """ examples = """ generate default config file: jupyterhub --generate-config -f /etc/jupyterhub/jupyterhub.py spawn the server on 10.0.1.2:443 with https: jupyterhub --ip 10.0.1.2 --port 443 --ssl-key my_ssl.key --ssl-cert my_ssl.cert """ aliases = Dict(aliases) flags = Dict(flags) subcommands = {'token': (NewToken, "Generate an API token for a user")} classes = List([ Spawner, LocalProcessSpawner, Authenticator, PAMAuthenticator, ]) config_file = Unicode( 'jupyterhub_config.py', help="The config file to load", ).tag(config=True) generate_config = Bool( False, help="Generate default config file", ).tag(config=True) answer_yes = Bool( False, help="Answer yes to any questions (e.g. confirm overwrite)").tag( config=True) pid_file = Unicode('', help="""File to write PID Useful for daemonizing jupyterhub. """).tag(config=True) cookie_max_age_days = Float( 14, help="""Number of days for a login cookie to be valid. Default is two weeks. """).tag(config=True) last_activity_interval = Integer( 300, help="Interval (in seconds) at which to update last-activity timestamps." ).tag(config=True) proxy_check_interval = Integer( 30, help="Interval (in seconds) at which to check if the proxy is running." ).tag(config=True) data_files_path = Unicode( DATA_FILES_PATH, help= "The location of jupyterhub data files (e.g. /usr/local/share/jupyter/hub)" ).tag(config=True) template_paths = List( help="Paths to search for jinja templates.", ).tag(config=True) @default('template_paths') def _template_paths_default(self): return [os.path.join(self.data_files_path, 'templates')] confirm_no_ssl = Bool( False, help="""Confirm that JupyterHub should be run without SSL. This is **NOT RECOMMENDED** unless SSL termination is being handled by another layer. """).tag(config=True) ssl_key = Unicode( '', help="""Path to SSL key file for the public facing interface of the proxy Use with ssl_cert """).tag(config=True) ssl_cert = Unicode( '', help= """Path to SSL certificate file for the public facing interface of the proxy Use with ssl_key """).tag(config=True) ip = Unicode( '', help="The public facing ip of the whole application (the proxy)").tag( config=True) subdomain_host = Unicode( '', help="""Run single-user servers on subdomains of this host. This should be the full https://hub.domain.tld[:port] Provides additional cross-site protections for javascript served by single-user servers. Requires <username>.hub.domain.tld to resolve to the same host as hub.domain.tld. In general, this is most easily achieved with wildcard DNS. When using SSL (i.e. always) this also requires a wildcard SSL certificate. """).tag(config=True) def _subdomain_host_changed(self, name, old, new): if new and '://' not in new: # host should include '://' # if not specified, assume https: You have to be really explicit about HTTP! self.subdomain_host = 'https://' + new port = Integer(8000, help="The public facing port of the proxy").tag(config=True) base_url = URLPrefix( '/', help="The base URL of the entire application").tag(config=True) logo_file = Unicode( '', help= "Specify path to a logo image to override the Jupyter logo in the banner." ).tag(config=True) @default('logo_file') def _logo_file_default(self): return os.path.join(self.data_files_path, 'static', 'images', 'jupyter.png') jinja_environment_options = Dict( help="Supply extra arguments that will be passed to Jinja environment." ).tag(config=True) proxy_cmd = Command('configurable-http-proxy', help="""The command to start the http proxy. Only override if configurable-http-proxy is not on your PATH """).tag(config=True) debug_proxy = Bool( False, help="show debug output in configurable-http-proxy").tag(config=True) proxy_auth_token = Unicode(help="""The Proxy Auth token. Loaded from the CONFIGPROXY_AUTH_TOKEN env variable by default. """).tag(config=True) @default('proxy_auth_token') def _proxy_auth_token_default(self): token = os.environ.get('CONFIGPROXY_AUTH_TOKEN', None) if not token: self.log.warning('\n'.join([ "", "Generating CONFIGPROXY_AUTH_TOKEN. Restarting the Hub will require restarting the proxy.", "Set CONFIGPROXY_AUTH_TOKEN env or JupyterHub.proxy_auth_token config to avoid this message.", "", ])) token = orm.new_token() return token proxy_api_ip = Unicode( '127.0.0.1', help="The ip for the proxy API handlers").tag(config=True) proxy_api_port = Integer(help="The port for the proxy API handlers").tag( config=True) @default('proxy_api_port') def _proxy_api_port_default(self): return self.port + 1 hub_port = Integer(8081, help="The port for this process").tag(config=True) hub_ip = Unicode('127.0.0.1', help="The ip for this process").tag(config=True) hub_prefix = URLPrefix( '/hub/', help="The prefix for the hub server. Must not be '/'").tag(config=True) @default('hub_prefix') def _hub_prefix_default(self): return url_path_join(self.base_url, '/hub/') @observe('hub_prefix') def _hub_prefix_changed(self, name, old, new): if new == '/': raise TraitError("'/' is not a valid hub prefix") if not new.startswith(self.base_url): self.hub_prefix = url_path_join(self.base_url, new) cookie_secret = Bytes(help="""The cookie secret to use to encrypt cookies. Loaded from the JPY_COOKIE_SECRET env variable by default. """).tag( config=True, env='JPY_COOKIE_SECRET', ) cookie_secret_file = Unicode( 'jupyterhub_cookie_secret', help="""File in which to store the cookie secret.""").tag(config=True) authenticator_class = Type(PAMAuthenticator, Authenticator, help="""Class for authenticating users. This should be a class with the following form: - constructor takes one kwarg: `config`, the IPython config object. - is a tornado.gen.coroutine - returns username on success, None on failure - takes two arguments: (handler, data), where `handler` is the calling web.RequestHandler, and `data` is the POST form data from the login page. """).tag(config=True) authenticator = Instance(Authenticator) @default('authenticator') def _authenticator_default(self): return self.authenticator_class(parent=self, db=self.db) # class for spawning single-user servers spawner_class = Type( LocalProcessSpawner, Spawner, help="""The class to use for spawning single-user servers. Should be a subclass of Spawner. """).tag(config=True) db_url = Unicode( 'sqlite:///jupyterhub.sqlite', help="url for the database. e.g. `sqlite:///jupyterhub.sqlite`").tag( config=True) @observe('db_url') def _db_url_changed(self, change): new = change['new'] if '://' not in new: # assume sqlite, if given as a plain filename self.db_url = 'sqlite:///%s' % new db_kwargs = Dict( help="""Include any kwargs to pass to the database connection. See sqlalchemy.create_engine for details. """).tag(config=True) reset_db = Bool(False, help="Purge and reset the database.").tag(config=True) debug_db = Bool( False, help="log all database transactions. This has A LOT of output").tag( config=True) session_factory = Any() users = Instance(UserDict) @default('users') def _users_default(self): assert self.tornado_settings return UserDict(db_factory=lambda: self.db, settings=self.tornado_settings) admin_access = Bool( False, help="""Grant admin users permission to access single-user servers. Users should be properly informed if this is enabled. """).tag(config=True) admin_users = Set( help="""DEPRECATED, use Authenticator.admin_users instead.""").tag( config=True) tornado_settings = Dict( help="Extra settings overrides to pass to the tornado application." ).tag(config=True) cleanup_servers = Bool( True, help="""Whether to shutdown single-user servers when the Hub shuts down. Disable if you want to be able to teardown the Hub while leaving the single-user servers running. If both this and cleanup_proxy are False, sending SIGINT to the Hub will only shutdown the Hub, leaving everything else running. The Hub should be able to resume from database state. """).tag(config=True) cleanup_proxy = Bool( True, help="""Whether to shutdown the proxy when the Hub shuts down. Disable if you want to be able to teardown the Hub while leaving the proxy running. Only valid if the proxy was starting by the Hub process. If both this and cleanup_servers are False, sending SIGINT to the Hub will only shutdown the Hub, leaving everything else running. The Hub should be able to resume from database state. """).tag(config=True) statsd_host = Unicode(help="Host to send statds metrics to").tag( config=True) statsd_port = Integer( 8125, help="Port on which to send statsd metrics about the hub").tag( config=True) statsd_prefix = Unicode( 'jupyterhub', help="Prefix to use for all metrics sent by jupyterhub to statsd").tag( config=True) handlers = List() _log_formatter_cls = CoroutineLogFormatter http_server = None proxy_process = None io_loop = None @default('log_level') def _log_level_default(self): return logging.INFO @default('log_datefmt') def _log_datefmt_default(self): """Exclude date from default date format""" return "%Y-%m-%d %H:%M:%S" @default('log_format') def _log_format_default(self): """override default log format to include time""" return "%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s %(module)s:%(lineno)d]%(end_color)s %(message)s" extra_log_file = Unicode( help="Set a logging.FileHandler on this file.").tag(config=True) extra_log_handlers = List( Instance(logging.Handler), help="Extra log handlers to set on JupyterHub logger", ).tag(config=True) @property def statsd(self): if hasattr(self, '_statsd'): return self._statsd if self.statsd_host: self._statsd = statsd.StatsClient(self.statsd_host, self.statsd_port, self.statsd_prefix) return self._statsd else: # return an empty mock object! self._statsd = EmptyClass() return self._statsd def init_logging(self): # This prevents double log messages because tornado use a root logger that # self.log is a child of. The logging module dipatches log messages to a log # and all of its ancenstors until propagate is set to False. self.log.propagate = False if self.extra_log_file: self.extra_log_handlers.append( logging.FileHandler(self.extra_log_file)) _formatter = self._log_formatter_cls( fmt=self.log_format, datefmt=self.log_datefmt, ) for handler in self.extra_log_handlers: if handler.formatter is None: handler.setFormatter(_formatter) self.log.addHandler(handler) # hook up tornado 3's loggers to our app handlers for log in (app_log, access_log, gen_log): # ensure all log statements identify the application they come from log.name = self.log.name logger = logging.getLogger('tornado') logger.propagate = True logger.parent = self.log logger.setLevel(self.log.level) def init_ports(self): if self.hub_port == self.port: raise TraitError( "The hub and proxy cannot both listen on port %i" % self.port) if self.hub_port == self.proxy_api_port: raise TraitError( "The hub and proxy API cannot both listen on port %i" % self.hub_port) if self.proxy_api_port == self.port: raise TraitError( "The proxy's public and API ports cannot both be %i" % self.port) @staticmethod def add_url_prefix(prefix, handlers): """add a url prefix to handlers""" for i, tup in enumerate(handlers): lis = list(tup) lis[0] = url_path_join(prefix, tup[0]) handlers[i] = tuple(lis) return handlers def init_handlers(self): h = [] # load handlers from the authenticator h.extend(self.authenticator.get_handlers(self)) # set default handlers h.extend(handlers.default_handlers) h.extend(apihandlers.default_handlers) h.append((r'/logo', LogoHandler, {'path': self.logo_file})) self.handlers = self.add_url_prefix(self.hub_prefix, h) # some extra handlers, outside hub_prefix self.handlers.extend([ (r"%s" % self.hub_prefix.rstrip('/'), web.RedirectHandler, { "url": self.hub_prefix, "permanent": False, }), (r"(?!%s).*" % self.hub_prefix, handlers.PrefixRedirectHandler), (r'(.*)', handlers.Template404), ]) def _check_db_path(self, path): """More informative log messages for failed filesystem access""" path = os.path.abspath(path) parent, fname = os.path.split(path) user = getuser() if not os.path.isdir(parent): self.log.error("Directory %s does not exist", parent) if os.path.exists(parent) and not os.access(parent, os.W_OK): self.log.error("%s cannot create files in %s", user, parent) if os.path.exists(path) and not os.access(path, os.W_OK): self.log.error("%s cannot edit %s", user, path) def init_secrets(self): trait_name = 'cookie_secret' trait = self.traits()[trait_name] env_name = trait.metadata.get('env') secret_file = os.path.abspath( os.path.expanduser(self.cookie_secret_file)) secret = self.cookie_secret secret_from = 'config' # load priority: 1. config, 2. env, 3. file if not secret and os.environ.get(env_name): secret_from = 'env' self.log.info("Loading %s from env[%s]", trait_name, env_name) secret = binascii.a2b_hex(os.environ[env_name]) if not secret and os.path.exists(secret_file): secret_from = 'file' perm = os.stat(secret_file).st_mode if perm & 0o077: self.log.error("Bad permissions on %s", secret_file) else: self.log.info("Loading %s from %s", trait_name, secret_file) with open(secret_file) as f: b64_secret = f.read() try: secret = binascii.a2b_base64(b64_secret) except Exception as e: self.log.error("%s does not contain b64 key: %s", secret_file, e) if not secret: secret_from = 'new' self.log.debug("Generating new %s", trait_name) secret = os.urandom(SECRET_BYTES) if secret_file and secret_from == 'new': # if we generated a new secret, store it in the secret_file self.log.info("Writing %s to %s", trait_name, secret_file) b64_secret = binascii.b2a_base64(secret).decode('ascii') with open(secret_file, 'w') as f: f.write(b64_secret) try: os.chmod(secret_file, 0o600) except OSError: self.log.warning("Failed to set permissions on %s", secret_file) # store the loaded trait value self.cookie_secret = secret # thread-local storage of db objects _local = Instance(threading.local, ()) @property def db(self): if not hasattr(self._local, 'db'): self._local.db = scoped_session(self.session_factory)() return self._local.db @property def hub(self): if not getattr(self._local, 'hub', None): q = self.db.query(orm.Hub) assert q.count() <= 1 self._local.hub = q.first() if self.subdomain_host and self._local.hub: self._local.hub.host = self.subdomain_host return self._local.hub @hub.setter def hub(self, hub): self._local.hub = hub if hub and self.subdomain_host: hub.host = self.subdomain_host @property def proxy(self): if not getattr(self._local, 'proxy', None): q = self.db.query(orm.Proxy) assert q.count() <= 1 p = self._local.proxy = q.first() if p: p.auth_token = self.proxy_auth_token return self._local.proxy @proxy.setter def proxy(self, proxy): self._local.proxy = proxy def init_db(self): """Create the database connection""" self.log.debug("Connecting to db: %s", self.db_url) try: self.session_factory = orm.new_session_factory(self.db_url, reset=self.reset_db, echo=self.debug_db, **self.db_kwargs) # trigger constructing thread local db property _ = self.db except OperationalError as e: self.log.error("Failed to connect to db: %s", self.db_url) self.log.debug("Database error was:", exc_info=True) if self.db_url.startswith('sqlite:///'): self._check_db_path(self.db_url.split(':///', 1)[1]) self.exit(1) def init_hub(self): """Load the Hub config into the database""" self.hub = self.db.query(orm.Hub).first() if self.hub is None: self.hub = orm.Hub(server=orm.Server( ip=self.hub_ip, port=self.hub_port, base_url=self.hub_prefix, cookie_name='jupyter-hub-token', )) self.db.add(self.hub) else: server = self.hub.server server.ip = self.hub_ip server.port = self.hub_port server.base_url = self.hub_prefix if self.subdomain_host: if not self.subdomain_host: raise ValueError( "Must specify subdomain_host when using subdomains." " This should be the public domain[:port] of the Hub.") self.db.commit() @gen.coroutine def init_users(self): """Load users into and from the database""" db = self.db if self.admin_users and not self.authenticator.admin_users: self.log.warning("\nJupyterHub.admin_users is deprecated." "\nUse Authenticator.admin_users instead.") self.authenticator.admin_users = self.admin_users admin_users = [ self.authenticator.normalize_username(name) for name in self.authenticator.admin_users ] self.authenticator.admin_users = set( admin_users) # force normalization for username in admin_users: if not self.authenticator.validate_username(username): raise ValueError("username %r is not valid" % username) if not admin_users: self.log.warning( "No admin users, admin interface will be unavailable.") self.log.warning( "Add any administrative users to `c.Authenticator.admin_users` in config." ) new_users = [] for name in admin_users: # ensure anyone specified as admin in config is admin in db user = orm.User.find(db, name) if user is None: user = orm.User(name=name, admin=True) new_users.append(user) db.add(user) else: user.admin = True # the admin_users config variable will never be used after this point. # only the database values will be referenced. whitelist = [ self.authenticator.normalize_username(name) for name in self.authenticator.whitelist ] self.authenticator.whitelist = set(whitelist) # force normalization for username in whitelist: if not self.authenticator.validate_username(username): raise ValueError("username %r is not valid" % username) if not whitelist: self.log.info( "Not using whitelist. Any authenticated user will be allowed.") # add whitelisted users to the db for name in whitelist: user = orm.User.find(db, name) if user is None: user = orm.User(name=name) new_users.append(user) db.add(user) db.commit() # Notify authenticator of all users. # This ensures Auth whitelist is up-to-date with the database. # This lets whitelist be used to set up initial list, # but changes to the whitelist can occur in the database, # and persist across sessions. for user in db.query(orm.User): yield gen.maybe_future(self.authenticator.add_user(user)) db.commit() # can add_user touch the db? # The whitelist set and the users in the db are now the same. # From this point on, any user changes should be done simultaneously # to the whitelist set and user db, unless the whitelist is empty (all users allowed). @gen.coroutine def init_spawners(self): db = self.db user_summaries = [''] def _user_summary(user): parts = ['{0: >8}'.format(user.name)] if user.admin: parts.append('admin') if user.server: parts.append('running at %s' % user.server) return ' '.join(parts) @gen.coroutine def user_stopped(user): status = yield user.spawner.poll() self.log.warning( "User %s server stopped with exit code: %s", user.name, status, ) yield self.proxy.delete_user(user) yield user.stop() for orm_user in db.query(orm.User): self.users[orm_user.id] = user = User(orm_user, self.tornado_settings) if not user.state: # without spawner state, server isn't valid user.server = None user_summaries.append(_user_summary(user)) continue self.log.debug("Loading state for %s from db", user.name) spawner = user.spawner status = yield spawner.poll() if status is None: self.log.info("%s still running", user.name) spawner.add_poll_callback(user_stopped, user) spawner.start_polling() else: # user not running. This is expected if server is None, # but indicates the user's server died while the Hub wasn't running # if user.server is defined. log = self.log.warning if user.server else self.log.debug log("%s not running.", user.name) user.server = None user_summaries.append(_user_summary(user)) self.log.debug("Loaded users: %s", '\n'.join(user_summaries)) db.commit() def init_proxy(self): """Load the Proxy config into the database""" self.proxy = self.db.query(orm.Proxy).first() if self.proxy is None: self.proxy = orm.Proxy( public_server=orm.Server(), api_server=orm.Server(), ) self.db.add(self.proxy) self.db.commit() self.proxy.auth_token = self.proxy_auth_token # not persisted self.proxy.log = self.log self.proxy.public_server.ip = self.ip self.proxy.public_server.port = self.port self.proxy.api_server.ip = self.proxy_api_ip self.proxy.api_server.port = self.proxy_api_port self.proxy.api_server.base_url = '/api/routes/' self.db.commit() @gen.coroutine def start_proxy(self): """Actually start the configurable-http-proxy""" # check for proxy if self.proxy.public_server.is_up() or self.proxy.api_server.is_up(): # check for *authenticated* access to the proxy (auth token can change) try: yield self.proxy.get_routes() except (HTTPError, OSError, socket.error) as e: if isinstance(e, HTTPError) and e.code == 403: msg = "Did CONFIGPROXY_AUTH_TOKEN change?" else: msg = "Is something else using %s?" % self.proxy.public_server.bind_url self.log.error( "Proxy appears to be running at %s, but I can't access it (%s)\n%s", self.proxy.public_server.bind_url, e, msg) self.exit(1) return else: self.log.info("Proxy already running at: %s", self.proxy.public_server.bind_url) self.proxy_process = None return env = os.environ.copy() env['CONFIGPROXY_AUTH_TOKEN'] = self.proxy.auth_token cmd = self.proxy_cmd + [ '--ip', self.proxy.public_server.ip, '--port', str(self.proxy.public_server.port), '--api-ip', self.proxy.api_server.ip, '--api-port', str(self.proxy.api_server.port), '--default-target', self.hub.server.host, ] if self.subdomain_host: cmd.append('--host-routing') if self.debug_proxy: cmd.extend(['--log-level', 'debug']) if self.ssl_key: cmd.extend(['--ssl-key', self.ssl_key]) if self.ssl_cert: cmd.extend(['--ssl-cert', self.ssl_cert]) if self.statsd_host: cmd.extend([ '--statsd-host', self.statsd_host, '--statsd-port', str(self.statsd_port), '--statsd-prefix', self.statsd_prefix + '.chp' ]) # Require SSL to be used or `--no-ssl` to confirm no SSL on if ' --ssl' not in ' '.join(cmd): if self.confirm_no_ssl: self.log.warning( "Running JupyterHub without SSL." " There better be SSL termination happening somewhere else..." ) else: self.log.error( "Refusing to run JuptyterHub without SSL." " If you are terminating SSL in another layer," " pass --no-ssl to tell JupyterHub to allow the proxy to listen on HTTP." ) self.exit(1) self.log.info("Starting proxy @ %s", self.proxy.public_server.bind_url) self.log.debug("Proxy cmd: %s", cmd) try: self.proxy_process = Popen(cmd, env=env) except FileNotFoundError as e: self.log.error( "Failed to find proxy %r\n" "The proxy can be installed with `npm install -g configurable-http-proxy`" % self.proxy_cmd) self.exit(1) def _check(): status = self.proxy_process.poll() if status is not None: e = RuntimeError("Proxy failed to start with exit code %i" % status) # py2-compatible `raise e from None` e.__cause__ = None raise e for server in (self.proxy.public_server, self.proxy.api_server): for i in range(10): _check() try: yield server.wait_up(1) except TimeoutError: continue else: break yield server.wait_up(1) self.log.debug("Proxy started and appears to be up") @gen.coroutine def check_proxy(self): if self.proxy_process.poll() is None: return self.log.error( "Proxy stopped with exit code %r", 'unknown' if self.proxy_process is None else self.proxy_process.poll()) yield self.start_proxy() self.log.info("Setting up routes on new proxy") yield self.proxy.add_all_users(self.users) self.log.info("New proxy back up, and good to go") def init_tornado_settings(self): """Set up the tornado settings dict.""" base_url = self.hub.server.base_url jinja_options = dict(autoescape=True, ) jinja_options.update(self.jinja_environment_options) jinja_env = Environment(loader=FileSystemLoader(self.template_paths), **jinja_options) login_url = self.authenticator.login_url(base_url) logout_url = self.authenticator.logout_url(base_url) # if running from git, disable caching of require.js # otherwise cache based on server start time parent = os.path.dirname(os.path.dirname(jupyterhub.__file__)) if os.path.isdir(os.path.join(parent, '.git')): version_hash = '' else: version_hash = datetime.now().strftime("%Y%m%d%H%M%S"), subdomain_host = self.subdomain_host domain = urlparse(subdomain_host).hostname settings = dict( log_function=log_request, config=self.config, log=self.log, db=self.db, proxy=self.proxy, hub=self.hub, admin_users=self.authenticator.admin_users, admin_access=self.admin_access, authenticator=self.authenticator, spawner_class=self.spawner_class, base_url=self.base_url, cookie_secret=self.cookie_secret, cookie_max_age_days=self.cookie_max_age_days, login_url=login_url, logout_url=logout_url, static_path=os.path.join(self.data_files_path, 'static'), static_url_prefix=url_path_join(self.hub.server.base_url, 'static/'), static_handler_class=CacheControlStaticFilesHandler, template_path=self.template_paths, jinja2_env=jinja_env, version_hash=version_hash, subdomain_host=subdomain_host, domain=domain, statsd=self.statsd, ) # allow configured settings to have priority settings.update(self.tornado_settings) self.tornado_settings = settings # constructing users requires access to tornado_settings self.tornado_settings['users'] = self.users def init_tornado_application(self): """Instantiate the tornado Application object""" self.tornado_application = web.Application(self.handlers, **self.tornado_settings) def write_pid_file(self): pid = os.getpid() if self.pid_file: self.log.debug("Writing PID %i to %s", pid, self.pid_file) with open(self.pid_file, 'w') as f: f.write('%i' % pid) @gen.coroutine @catch_config_error def initialize(self, *args, **kwargs): super().initialize(*args, **kwargs) if self.generate_config or self.subapp: return self.load_config_file(self.config_file) self.init_logging() if 'JupyterHubApp' in self.config: self.log.warning( "Use JupyterHub in config, not JupyterHubApp. Outdated config:\n%s", '\n'.join('JupyterHubApp.{key} = {value!r}'.format(key=key, value=value) for key, value in self.config.JupyterHubApp.items())) cfg = self.config.copy() cfg.JupyterHub.merge(cfg.JupyterHubApp) self.update_config(cfg) self.write_pid_file() self.init_ports() self.init_secrets() self.init_db() self.init_hub() self.init_proxy() yield self.init_users() self.init_tornado_settings() yield self.init_spawners() self.init_handlers() self.init_tornado_application() @gen.coroutine def cleanup(self): """Shutdown our various subprocesses and cleanup runtime files.""" futures = [] if self.cleanup_servers: self.log.info("Cleaning up single-user servers...") # request (async) process termination for uid, user in self.users.items(): if user.spawner is not None: futures.append(user.stop()) else: self.log.info("Leaving single-user servers running") # clean up proxy while SUS are shutting down if self.cleanup_proxy: if self.proxy_process: self.log.info("Cleaning up proxy[%i]...", self.proxy_process.pid) if self.proxy_process.poll() is None: try: self.proxy_process.terminate() except Exception as e: self.log.error("Failed to terminate proxy process: %s", e) else: self.log.info("I didn't start the proxy, I can't clean it up") else: self.log.info("Leaving proxy running") # wait for the requests to stop finish: for f in futures: try: yield f except Exception as e: self.log.error("Failed to stop user: %s", e) self.db.commit() if self.pid_file and os.path.exists(self.pid_file): self.log.info("Cleaning up PID file %s", self.pid_file) os.remove(self.pid_file) # finally stop the loop once we are all cleaned up self.log.info("...done") def write_config_file(self): """Write our default config to a .py config file""" if os.path.exists(self.config_file) and not self.answer_yes: answer = '' def ask(): prompt = "Overwrite %s with default config? [y/N]" % self.config_file try: return input(prompt).lower() or 'n' except KeyboardInterrupt: print('') # empty line return 'n' answer = ask() while not answer.startswith(('y', 'n')): print("Please answer 'yes' or 'no'") answer = ask() if answer.startswith('n'): return config_text = self.generate_config_file() if isinstance(config_text, bytes): config_text = config_text.decode('utf8') print("Writing default config to: %s" % self.config_file) with open(self.config_file, mode='w') as f: f.write(config_text) @gen.coroutine def update_last_activity(self): """Update User.last_activity timestamps from the proxy""" routes = yield self.proxy.get_routes() users_count = 0 active_users_count = 0 for prefix, route in routes.items(): if 'user' not in route: # not a user route, ignore it continue user = orm.User.find(self.db, route['user']) if user is None: self.log.warning("Found no user for route: %s", route) continue try: dt = datetime.strptime(route['last_activity'], ISO8601_ms) except Exception: dt = datetime.strptime(route['last_activity'], ISO8601_s) user.last_activity = max(user.last_activity, dt) # FIXME: Make this configurable duration. 30 minutes for now! if (datetime.now() - user.last_activity).total_seconds() < 30 * 60: active_users_count += 1 users_count += 1 self.statsd.gauge('users.running', users_count) self.statsd.gauge('users.active', active_users_count) self.db.commit() yield self.proxy.check_routes(self.users, routes) @gen.coroutine def start(self): """Start the whole thing""" self.io_loop = loop = IOLoop.current() if self.subapp: self.subapp.start() loop.stop() return if self.generate_config: self.write_config_file() loop.stop() return # start the webserver self.http_server = tornado.httpserver.HTTPServer( self.tornado_application, xheaders=True) try: self.http_server.listen(self.hub_port, address=self.hub_ip) except Exception: self.log.error("Failed to bind hub to %s", self.hub.server.bind_url) raise else: self.log.info("Hub API listening on %s", self.hub.server.bind_url) # start the proxy try: yield self.start_proxy() except Exception as e: self.log.critical("Failed to start proxy", exc_info=True) self.exit(1) return loop.add_callback(self.proxy.add_all_users, self.users) if self.proxy_process: # only check / restart the proxy if we started it in the first place. # this means a restarted Hub cannot restart a Proxy that its # predecessor started. pc = PeriodicCallback(self.check_proxy, 1e3 * self.proxy_check_interval) pc.start() if self.last_activity_interval: pc = PeriodicCallback(self.update_last_activity, 1e3 * self.last_activity_interval) pc.start() self.log.info("JupyterHub is now running at %s", self.proxy.public_server.url) # register cleanup on both TERM and INT atexit.register(self.atexit) self.init_signal() def init_signal(self): signal.signal(signal.SIGTERM, self.sigterm) def sigterm(self, signum, frame): self.log.critical("Received SIGTERM, shutting down") self.io_loop.stop() self.atexit() _atexit_ran = False def atexit(self): """atexit callback""" if self._atexit_ran: return self._atexit_ran = True # run the cleanup step (in a new loop, because the interrupted one is unclean) IOLoop.clear_current() loop = IOLoop() loop.make_current() loop.run_sync(self.cleanup) def stop(self): if not self.io_loop: return if self.http_server: if self.io_loop._running: self.io_loop.add_callback(self.http_server.stop) else: self.http_server.stop() self.io_loop.add_callback(self.io_loop.stop) @gen.coroutine def launch_instance_async(self, argv=None): try: yield self.initialize(argv) yield self.start() except Exception as e: self.log.exception("") self.exit(1) @classmethod def launch_instance(cls, argv=None): self = cls.instance() loop = IOLoop.current() loop.add_callback(self.launch_instance_async, argv) try: loop.start() except KeyboardInterrupt: print("\nInterrupted")
class BlockingKernelClient(KernelClient): """A KernelClient with blocking APIs ``get_[channel]_msg()`` methods wait for and return messages on channels, raising :exc:`queue.Empty` if no message arrives within ``timeout`` seconds. """ def wait_for_ready(self, timeout=None): """Waits for a response when a client is blocked - Sets future time for timeout - Blocks on shell channel until a message is received - Exit if the kernel has died - If client times out before receiving a message from the kernel, send RuntimeError - Flush the IOPub channel """ if timeout is None: abs_timeout = float('inf') else: abs_timeout = time.time() + timeout from ..manager import KernelManager if not isinstance(self.parent, KernelManager): # This Client was not created by a KernelManager, # so wait for kernel to become responsive to heartbeats # before checking for kernel_info reply while not self.is_alive(): if time.time() > abs_timeout: raise RuntimeError("Kernel didn't respond to heartbeats in %d seconds and timed out" % timeout) time.sleep(0.2) # Wait for kernel info reply on shell channel while True: try: msg = self.shell_channel.get_msg(block=True, timeout=1) except Empty: pass else: if msg['msg_type'] == 'kernel_info_reply': self._handle_kernel_info_reply(msg) break if not self.is_alive(): raise RuntimeError('Kernel died before replying to kernel_info') # Check if current time is ready check time plus timeout if time.time() > abs_timeout: raise RuntimeError("Kernel didn't respond in %d seconds" % timeout) # Flush IOPub channel while True: try: msg = self.iopub_channel.get_msg(block=True, timeout=0.2) except Empty: break # The classes to use for the various channels shell_channel_class = Type(ZMQSocketChannel) iopub_channel_class = Type(ZMQSocketChannel) stdin_channel_class = Type(ZMQSocketChannel) hb_channel_class = Type(HBChannel) control_channel_class = Type(ZMQSocketChannel) def _recv_reply(self, msg_id, timeout=None, channel='shell'): """Receive and return the reply for a given request""" if timeout is not None: deadline = monotonic() + timeout while True: if timeout is not None: timeout = max(0, deadline - monotonic()) try: if channel == 'control': reply = self.get_control_msg(timeout=timeout) else: reply = self.get_shell_msg(timeout=timeout) except Empty: raise TimeoutError("Timeout waiting for reply") if reply['parent_header'].get('msg_id') != msg_id: # not my reply, someone may have forgotten to retrieve theirs continue return reply # replies come on the shell channel execute = reqrep(KernelClient.execute) history = reqrep(KernelClient.history) complete = reqrep(KernelClient.complete) inspect = reqrep(KernelClient.inspect) kernel_info = reqrep(KernelClient.kernel_info) comm_info = reqrep(KernelClient.comm_info) # replies come on the control channel shutdown = reqrep(KernelClient.shutdown, channel='control') def _stdin_hook_default(self, msg): """Handle an input request""" content = msg['content'] if content.get('password', False): prompt = getpass elif sys.version_info < (3,): prompt = raw_input else: prompt = input try: raw_data = prompt(content["prompt"]) except EOFError: # turn EOFError into EOF character raw_data = '\x04' except KeyboardInterrupt: sys.stdout.write('\n') return # only send stdin reply if there *was not* another request # or execution finished while we were reading. if not (self.stdin_channel.msg_ready() or self.shell_channel.msg_ready()): self.input(raw_data) def _output_hook_default(self, msg): """Default hook for redisplaying plain-text output""" msg_type = msg['header']['msg_type'] content = msg['content'] if msg_type == 'stream': stream = getattr(sys, content['name']) stream.write(content['text']) elif msg_type in ('display_data', 'execute_result'): sys.stdout.write(content['data'].get('text/plain', '')) elif msg_type == 'error': print('\n'.join(content['traceback']), file=sys.stderr) def _output_hook_kernel(self, session, socket, parent_header, msg): """Output hook when running inside an IPython kernel adds rich output support. """ msg_type = msg['header']['msg_type'] if msg_type in ('display_data', 'execute_result', 'error'): session.send(socket, msg_type, msg['content'], parent=parent_header) else: self._output_hook_default(msg) def execute_interactive(self, code, silent=False, store_history=True, user_expressions=None, allow_stdin=None, stop_on_error=True, timeout=None, output_hook=None, stdin_hook=None, ): """Execute code in the kernel interactively Output will be redisplayed, and stdin prompts will be relayed as well. If an IPython kernel is detected, rich output will be displayed. You can pass a custom output_hook callable that will be called with every IOPub message that is produced instead of the default redisplay. .. versionadded:: 5.0 Parameters ---------- code : str A string of code in the kernel's language. silent : bool, optional (default False) If set, the kernel will execute the code as quietly possible, and will force store_history to be False. store_history : bool, optional (default True) If set, the kernel will store command history. This is forced to be False if silent is True. user_expressions : dict, optional A dict mapping names to expressions to be evaluated in the user's dict. The expression values are returned as strings formatted using :func:`repr`. allow_stdin : bool, optional (default self.allow_stdin) Flag for whether the kernel can send stdin requests to frontends. Some frontends (e.g. the Notebook) do not support stdin requests. If raw_input is called from code executed from such a frontend, a StdinNotImplementedError will be raised. stop_on_error: bool, optional (default True) Flag whether to abort the execution queue, if an exception is encountered. timeout: float or None (default: None) Timeout to use when waiting for a reply output_hook: callable(msg) Function to be called with output messages. If not specified, output will be redisplayed. stdin_hook: callable(msg) Function to be called with stdin_request messages. If not specified, input/getpass will be called. Returns ------- reply: dict The reply message for this request """ if not self.iopub_channel.is_alive(): raise RuntimeError("IOPub channel must be running to receive output") if allow_stdin is None: allow_stdin = self.allow_stdin if allow_stdin and not self.stdin_channel.is_alive(): raise RuntimeError("stdin channel must be running to allow input") msg_id = self.execute(code, silent=silent, store_history=store_history, user_expressions=user_expressions, allow_stdin=allow_stdin, stop_on_error=stop_on_error, ) if stdin_hook is None: stdin_hook = self._stdin_hook_default if output_hook is None: # detect IPython kernel if 'IPython' in sys.modules: from IPython import get_ipython ip = get_ipython() in_kernel = getattr(ip, 'kernel', False) if in_kernel: output_hook = partial( self._output_hook_kernel, ip.display_pub.session, ip.display_pub.pub_socket, ip.display_pub.parent_header, ) if output_hook is None: # default: redisplay plain-text outputs output_hook = self._output_hook_default # set deadline based on timeout if timeout is not None: deadline = monotonic() + timeout else: timeout_ms = None poller = zmq.Poller() iopub_socket = self.iopub_channel.socket poller.register(iopub_socket, zmq.POLLIN) if allow_stdin: stdin_socket = self.stdin_channel.socket poller.register(stdin_socket, zmq.POLLIN) else: stdin_socket = None # wait for output and redisplay it while True: if timeout is not None: timeout = max(0, deadline - monotonic()) timeout_ms = 1e3 * timeout events = dict(poller.poll(timeout_ms)) if not events: raise TimeoutError("Timeout waiting for output") if stdin_socket in events: req = self.stdin_channel.get_msg(timeout=0) stdin_hook(req) continue if iopub_socket not in events: continue msg = self.iopub_channel.get_msg(timeout=0) if msg['parent_header'].get('msg_id') != msg_id: # not from my request continue output_hook(msg) # stop on idle if msg['header']['msg_type'] == 'status' and \ msg['content']['execution_state'] == 'idle': break # output is done, get the reply if timeout is not None: timeout = max(0, deadline - monotonic()) return self._recv_reply(msg_id, timeout=timeout)
class InProcessKernelClient(KernelClient): """A client for an in-process kernel. This class implements the interface of `jupyter_client.clientabc.KernelClientABC` and allows (asynchronous) frontends to be used seamlessly with an in-process kernel. See `jupyter_client.client.KernelClient` for docstrings. """ # The classes to use for the various channels. shell_channel_class = Type(InProcessChannel) iopub_channel_class = Type(InProcessChannel) stdin_channel_class = Type(InProcessChannel) control_channel_class = Type(InProcessChannel) hb_channel_class = Type(InProcessHBChannel) kernel = Instance("ipykernel.inprocess.ipkernel.InProcessKernel", allow_none=True) # -------------------------------------------------------------------------- # Channel management methods # -------------------------------------------------------------------------- @default("blocking_class") def _default_blocking_class(self): from .blocking import BlockingInProcessKernelClient return BlockingInProcessKernelClient def get_connection_info(self): d = super().get_connection_info() d["kernel"] = self.kernel return d def start_channels(self, *args, **kwargs): super().start_channels() self.kernel.frontends.append(self) @property def shell_channel(self): if self._shell_channel is None: self._shell_channel = self.shell_channel_class(self) return self._shell_channel @property def iopub_channel(self): if self._iopub_channel is None: self._iopub_channel = self.iopub_channel_class(self) return self._iopub_channel @property def stdin_channel(self): if self._stdin_channel is None: self._stdin_channel = self.stdin_channel_class(self) return self._stdin_channel @property def control_channel(self): if self._control_channel is None: self._control_channel = self.control_channel_class(self) return self._control_channel @property def hb_channel(self): if self._hb_channel is None: self._hb_channel = self.hb_channel_class(self) return self._hb_channel # Methods for sending specific messages # ------------------------------------- def execute( self, code, silent=False, store_history=True, user_expressions=None, allow_stdin=None ): if allow_stdin is None: allow_stdin = self.allow_stdin content = dict( code=code, silent=silent, store_history=store_history, user_expressions=user_expressions or {}, allow_stdin=allow_stdin, ) msg = self.session.msg("execute_request", content) self._dispatch_to_kernel(msg) return msg["header"]["msg_id"] def complete(self, code, cursor_pos=None): if cursor_pos is None: cursor_pos = len(code) content = dict(code=code, cursor_pos=cursor_pos) msg = self.session.msg("complete_request", content) self._dispatch_to_kernel(msg) return msg["header"]["msg_id"] def inspect(self, code, cursor_pos=None, detail_level=0): if cursor_pos is None: cursor_pos = len(code) content = dict( code=code, cursor_pos=cursor_pos, detail_level=detail_level, ) msg = self.session.msg("inspect_request", content) self._dispatch_to_kernel(msg) return msg["header"]["msg_id"] def history(self, raw=True, output=False, hist_access_type="range", **kwds): content = dict(raw=raw, output=output, hist_access_type=hist_access_type, **kwds) msg = self.session.msg("history_request", content) self._dispatch_to_kernel(msg) return msg["header"]["msg_id"] def shutdown(self, restart=False): # FIXME: What to do here? raise NotImplementedError("Cannot shutdown in-process kernel") def kernel_info(self): """Request kernel info.""" msg = self.session.msg("kernel_info_request") self._dispatch_to_kernel(msg) return msg["header"]["msg_id"] def comm_info(self, target_name=None): """Request a dictionary of valid comms and their targets.""" if target_name is None: content = {} else: content = dict(target_name=target_name) msg = self.session.msg("comm_info_request", content) self._dispatch_to_kernel(msg) return msg["header"]["msg_id"] def input(self, string): if self.kernel is None: raise RuntimeError("Cannot send input reply. No kernel exists.") self.kernel.raw_input_str = string def is_complete(self, code): msg = self.session.msg("is_complete_request", {"code": code}) self._dispatch_to_kernel(msg) return msg["header"]["msg_id"] def _dispatch_to_kernel(self, msg): """Send a message to the kernel and handle a reply.""" kernel = self.kernel if kernel is None: raise RuntimeError("Cannot send request. No kernel exists.") stream = kernel.shell_stream self.session.send(stream, msg) msg_parts = stream.recv_multipart() loop = asyncio.get_event_loop() loop.run_until_complete(kernel.dispatch_shell(msg_parts)) idents, reply_msg = self.session.recv(stream, copy=False) self.shell_channel.call_handlers_later(reply_msg) def get_shell_msg(self, block=True, timeout=None): return self.shell_channel.get_msg(block, timeout) def get_iopub_msg(self, block=True, timeout=None): return self.iopub_channel.get_msg(block, timeout) def get_stdin_msg(self, block=True, timeout=None): return self.stdin_channel.get_msg(block, timeout) def get_control_msg(self, block=True, timeout=None): return self.control_channel.get_msg(block, timeout)
class A(HasTraits): klass = Type('bad default', B)