def test_runtime_dir_windows(): runtime = jupyter_runtime_dir() assert runtime == realpath(pjoin(os.environ.get('APPDATA', None), 'jupyter', 'runtime')) with xdg: # windows should ignore xdg runtime = jupyter_runtime_dir() assert runtime == realpath(pjoin(os.environ.get('APPDATA', None), 'jupyter', 'runtime'))
def test_runtime_dir_linux(): with linux, no_xdg: runtime = jupyter_runtime_dir() assert runtime == realpath('~/.local/share/jupyter/runtime') with linux, xdg: runtime = jupyter_runtime_dir() assert runtime == pjoin(xdg_env['XDG_RUNTIME_HOME'], 'jupyter')
def test_runtime_dir_linux(): with linux, no_xdg: runtime = jupyter_runtime_dir() assert runtime == realpath('~/.local/share/jupyter/runtime') with linux, xdg: runtime = jupyter_runtime_dir() assert runtime == pjoin(xdg_env['XDG_DATA_HOME'], 'jupyter', 'runtime')
def test_runtime_dir_darwin(): with darwin: runtime = jupyter_runtime_dir() assert runtime == realpath('~/Library/Jupyter/runtime') with darwin, xdg: # darwin should ignore xdg runtime = jupyter_runtime_dir() assert runtime == realpath('~/Library/Jupyter/runtime')
def test_runtime_dir_windows(): with windows, appdata: runtime = jupyter_runtime_dir() assert runtime == pjoin('appdata', 'jupyter', 'runtime') with windows, appdata, xdg: # windows should ignore xdg runtime = jupyter_runtime_dir() assert runtime == pjoin('appdata', 'jupyter', 'runtime')
def _list_running_servers_jl3(): import io import json from notebook.utils import check_pid from jupyter_core.paths import jupyter_runtime_dir import os.path import re runtime_dir = jupyter_runtime_dir() if not os.path.isdir(runtime_dir): return for file_name in os.listdir(runtime_dir): # here is the fix: if re.match('nbserver-(.+).json', file_name) or re.match( 'jpserver-(.+).json', file_name): with io.open(os.path.join(runtime_dir, file_name), encoding='utf-8') as f: info = json.load(f) if ('pid' in info) and check_pid(info['pid']): yield info else: try: os.unlink(os.path.join(runtime_dir, file_name)) except OSError: pass
def find_connection_file( filename: str = "kernel-*.json", path: Optional[Union[str, List[str]]] = None, profile: Optional[str] = None, ) -> str: """find a connection file, and return its absolute path. The current working directory and optional search path will be searched for the file if it is not given by absolute path. If the argument does not match an existing file, it will be interpreted as a fileglob, and the matching file in the profile's security dir with the latest access time will be used. Parameters ---------- filename : str The connection file or fileglob to search for. path : str or list of strs[optional] Paths in which to search for connection files. Returns ------- str : The absolute path of the connection file. """ if profile is not None: warnings.warn("Jupyter has no profiles. profile=%s has been ignored." % profile) if path is None: path = [".", jupyter_runtime_dir()] if isinstance(path, str): path = [path] try: # first, try explicit name return _filefind(filename, path) except IOError: pass # not found by full name if "*" in filename: # given as a glob already pat = filename else: # accept any substring match pat = "*%s*" % filename matches = [] for p in path: matches.extend(glob.glob(os.path.join(p, pat))) matches = [os.path.abspath(m) for m in matches] if not matches: raise IOError("Could not find %r in %r" % (filename, path)) elif len(matches) == 1: return matches[0] else: # get most recent match, by access time: return sorted(matches, key=lambda f: os.stat(f).st_atime)[-1]
def list_running_servers(runtime_dir=None): """Iterate over the server info files of running notebook servers. Given a runtime directory, find nbserver-* files in the security directory, and yield dicts of their information, each one pertaining to a currently running notebook server instance. """ if runtime_dir is None: runtime_dir = jupyter_runtime_dir() # The runtime dir might not exist if not os.path.isdir(runtime_dir): return for file in os.listdir(runtime_dir): if file.startswith('nbserver-'): with io.open(os.path.join(runtime_dir, file), encoding='utf-8') as f: info = json.load(f) # Simple check whether that process is really still running # Also remove leftover files from IPython 2.x without a pid field if ('pid' in info) and check_pid(info['pid']): yield info else: # If the process has died, try to delete its info file try: os.unlink(file) except OSError: pass # TODO: This should warn or log or something
def _list_running_servers(runtime_dir: str = None) -> Generator: """Iterate over the server info files of running notebook servers. Given a runtime directory, find nbserver-* files in the security directory, and yield dicts of their information, each one pertaining to a currently running notebook server instance. Copied from notebook.notebookapp.list_running_servers() (version 5.7.8) since the highest version compatible with Python 3.5 (version 5.6.0) has a bug. """ if runtime_dir is None: runtime_dir = jupyter_runtime_dir() # The runtime dir might not exist if not os.path.isdir(runtime_dir): return for file_name in os.listdir(runtime_dir): if re.match('nbserver-(.+).json', file_name): with io.open(os.path.join(runtime_dir, file_name), encoding='utf-8') as f: info = json.load(f) # Simple check whether that process is really still running # Also remove leftover files from IPython 2.x without a pid # field if ('pid' in info) and check_pid(info['pid']): yield info else: # If the process has died, try to delete its info file try: os.unlink(os.path.join(runtime_dir, file_name)) except OSError: pass # TODO: This should warn or log or something
def get_connection_parameters(parent=None, dialog=None): if not dialog: dialog = KernelConnectionDialog(parent) result = dialog.exec_() is_remote = bool(dialog.rm_group.isChecked()) accepted = result == QDialog.Accepted if is_remote: def falsy_to_none(arg): return arg if arg else None if dialog.hn.text() and dialog.un.text(): port = dialog.pn.text() if dialog.pn.text() else '22' hostname = "{0}@{1}:{2}".format(dialog.un.text(), dialog.hn.text(), port) else: hostname = None if dialog.pw_radio.isChecked(): password = falsy_to_none(dialog.pw.text()) keyfile = None elif dialog.kf_radio.isChecked(): keyfile = falsy_to_none(dialog.kf.text()) password = falsy_to_none(dialog.kfp.text()) else: # imposible? keyfile = None password = None return (dialog.cf.text(), hostname, keyfile, password, accepted) else: path = dialog.cf.text() _dir, filename = osp.dirname(path), osp.basename(path) if _dir == '' and not filename.endswith('.json'): path = osp.join(jupyter_runtime_dir(), 'kernel-'+path+'.json') return (path, None, None, None, accepted)
def get_connection_parameters(parent=None, dialog=None): if not dialog: dialog = KernelConnectionDialog(parent) result = dialog.exec_() is_remote = bool(dialog.rm_group.isChecked()) accepted = result == QDialog.Accepted if is_remote: def falsy_to_none(arg): return arg if arg else None if dialog.hn.text() and dialog.un.text(): port = dialog.pn.text() if dialog.pn.text() else '22' hostname = "{0}@{1}:{2}".format(dialog.un.text(), dialog.hn.text(), port) else: hostname = None if dialog.pw_radio.isChecked(): password = falsy_to_none(dialog.pw.text()) keyfile = None elif dialog.kf_radio.isChecked(): keyfile = falsy_to_none(dialog.kf.text()) password = falsy_to_none(dialog.kfp.text()) else: # imposible? keyfile = None password = None return (dialog.cf.text(), hostname, keyfile, password, accepted) else: path = dialog.cf.text() _dir, filename = osp.dirname(path), osp.basename(path) if _dir == '' and not filename.endswith('.json'): path = osp.join(jupyter_runtime_dir(), 'kernel-' + path + '.json') return (path, None, None, None, accepted)
def _init_server(self): "Start the notebook server in a separate process" self.server_command = command = [ sys.executable, '-m', 'notebook', '--no-browser', '--notebook-dir', self.nbdir.name, '--NotebookApp.token=', '--NotebookApp.base_url=%s' % self.base_url, ] # ipc doesn't work on Windows, and darwin has crazy-long temp paths, # which run afoul of ipc's maximum path length. if sys.platform.startswith('linux'): command.append('--KernelManager.transport=ipc') self.stream_capturer = c = StreamCapturer() c.start() env = os.environ.copy() env.update(self.env) self.server = subprocess.Popen( command, stdout=c.writefd, stderr=subprocess.STDOUT, cwd=self.nbdir.name, env=env, ) with patch.dict('os.environ', {'HOME': self.home.name}): runtime_dir = jupyter_runtime_dir() self.server_info_file = os.path.join( runtime_dir, 'nbserver-%i.json' % self.server.pid) self._wait_for_server()
def __init__(self, **kwargs): super().__init__(**kwargs) self._component_cache = {} self.is_server_process = ComponentCache._determine_server_process( **kwargs) self.manifest_dir = jupyter_runtime_dir() # Ensure queue attribute exists for non-server instances as well. self.refresh_queue: Optional[RefreshQueue] = None self.update_queue: Optional[UpdateQueue] = None if self.is_server_process: self.refresh_queue = RefreshQueue() self.update_queue = UpdateQueue() # Set up watchdog for manifest file for out-of-process updates self.observer = Observer() self.observer.schedule(ManifestFileChangeHandler(self), self.manifest_dir) # Start a thread to manage updates to the component cache manager = CacheUpdateManager(self.log, self._component_cache, self.refresh_queue, self.update_queue) self.cache_manager = manager self.cache_manager.start() self.log.debug("CacheUpdateManager started...") else: self.manifest_filename = os.path.join( self.manifest_dir, f"elyra-component-manifest-{os.getpid()}.json")
def launch(image, cwd): d = os.path.join(jupyter_runtime_dir(), 'docker_kernels') ensure_dir_exists(d) set_sticky_bit(d) conn_file_tmpdir = TemporaryDirectory(dir=d) conn_info = make_connection_file(conn_file_tmpdir.name) container = docker.from_env().containers.run(image, detach=True, volumes={ conn_file_tmpdir.name: { 'bind': '/connect', 'mode': 'rw' }, cwd: { 'bind': '/working', 'mode': 'rw' }, }) container.reload() # Need this to get the IP address ip = container.attrs['NetworkSettings']['Networks']['bridge']['IPAddress'] if not ip: raise RuntimeError("No IP address for docker container") print(container.attrs['NetworkSettings']['Networks']) conn_info['ip'] = ip return conn_info, DockerKernelManager(container, conn_file_tmpdir)
def initialize(self, existing="", argv=None): # if self._dispatching: # return self.existing = existing self.runtime_dir = jupyter_runtime_dir() if not self.existing: self.connection_file = self._new_connection_file() #print(self.connection_file) if not os.path.isdir(self.runtime_dir): os.mkdir(self.runtime_dir) if self.kernel_manager is not None and\ self.kernel_manager.is_alive(): # first, shutdown the old kernel if own one self.kernel_manager.shutdown_kernel(restart=False) #print(self.kernel_name) self.init_connection_file() self.init_ssh() self.init_kernel_manager() self.init_kernel_client() self.init_shell()
def running_kernels(self, args): rdir = jupyter_runtime_dir() l = fnmatch.filter(os.listdir(rdir), 'kernel-*.json') if len(l) > 1: cf = os.path.relpath(find_connection_file(), rdir) l = [f + '(newest)' if f == cf else f for f in l] return l
def connect_qtconsole(connection_info, name=None, extra_args=None): """Open a QtConsole connected to a worker who has the given future - identify worker with who_has - start IPython kernel on the worker - start qtconsole connected to the kernel """ runtime_dir = jupyter_runtime_dir() if name is None: name = uuid4().hex path = os.path.join(runtime_dir, name + ".json") write_connection_file(path, **connection_info) cmd = ["jupyter", "qtconsole", "--existing", path] if extra_args: cmd.extend(extra_args) Popen(cmd) @atexit.register def _cleanup_connection_file(): """Cleanup our connection file when we exit.""" try: os.remove(path) except OSError: pass
def connect_info(self, arg_s): """Print information for connecting other clients to this kernel It will print the contents of this session's connection file, as well as shortcuts for local clients. In the simplest case, when called from the most recently launched kernel, secondary clients can be connected, simply with: $> jupyter <app> --existing """ try: connection_file = get_connection_file() info = get_connection_info(unpack=False) except Exception as e: warnings.warn("Could not get connection info: %r" % e) return # if it's in the default dir, truncate to basename if jupyter_runtime_dir() == os.path.dirname(connection_file): connection_file = os.path.basename(connection_file) print(info + '\n') print("Paste the above JSON into a file, and connect with:\n" " $> jupyter <app> --existing <file>\n" "or, if you are local, you can connect with just:\n" " $> jupyter <app> --existing {0}\n" "or even just:\n" " $> jupyter <app> --existing\n" "if this is the most recent Jupyter kernel you have started.". format(connection_file))
def _init_server(self): "Start the notebook server in a separate process" self.server_command = command = [sys.executable, '-m', 'notebook', '--no-browser', '--notebook-dir', self.nbdir.name, '--NotebookApp.base_url=%s' % self.base_url, ] # ipc doesn't work on Windows, and darwin has crazy-long temp paths, # which run afoul of ipc's maximum path length. if sys.platform.startswith('linux'): command.append('--KernelManager.transport=ipc') self.stream_capturer = c = StreamCapturer() c.start() env = os.environ.copy() env.update(self.env) if self.engine == 'phantomjs': env['IPYTHON_ALLOW_DRAFT_WEBSOCKETS_FOR_PHANTOMJS'] = '1' self.server = subprocess.Popen(command, stdout = c.writefd, stderr = subprocess.STDOUT, cwd=self.nbdir.name, env=env, ) with patch.dict('os.environ', {'HOME': self.home.name}): runtime_dir = jupyter_runtime_dir() self.server_info_file = os.path.join(runtime_dir, 'nbserver-%i.json' % self.server.pid ) self._wait_for_server()
def test_find_connection_file_abspath(): with TemporaryDirectory() as d: cf = 'absolute.json' abs_cf = os.path.abspath(cf) with open(cf, 'w') as f: f.write('{}') assert connect.find_connection_file(abs_cf, path=jupyter_runtime_dir()) == abs_cf
def test_find_connection_file_abspath(): with TemporaryDirectory() as d: cf = 'absolute.json' abs_cf = os.path.abspath(cf) with open(cf, 'w') as f: f.write('{}') assert connect.find_connection_file( abs_cf, path=jupyter_runtime_dir()) == abs_cf
def test_find_connection_file_abspath(): with TemporaryDirectory(): cf = "absolute.json" abs_cf = os.path.abspath(cf) with open(cf, "w") as f: f.write("{}") assert connect.find_connection_file( abs_cf, path=jupyter_runtime_dir()) == abs_cf os.remove(abs_cf)
def find_connection_file(filename='kernel-*.json', path=None): """find a connection file, and return its absolute path. The current working directory and the profile's security directory will be searched for the file if it is not given by absolute path. If profile is unspecified, then the current running application's profile will be used, or 'default', if not run from IPython. If the argument does not match an existing file, it will be interpreted as a fileglob, and the matching file in the profile's security dir with the latest access time will be used. Parameters ---------- filename : str The connection file or fileglob to search for. path : str or list of strs[optional] Paths in which to search for connection files. Returns ------- str : The absolute path of the connection file. """ if path is None: path = ['.', jupyter_runtime_dir()] if isinstance(path, string_types): path = [path] try: # first, try explicit name return filefind(filename, path) except IOError: pass # not found by full name if '*' in filename: # given as a glob already pat = filename else: # accept any substring match pat = '*%s*' % filename matches = [] for p in path: matches.extend(glob.glob(os.path.join(p, pat))) if not matches: raise IOError("Could not find %r in %r" % (filename, path)) elif len(matches) == 1: return matches[0] else: # get most recent match, by access time: return sorted(matches, key=lambda f: os.stat(f).st_atime)[-1]
def create_tab_with_existing_kernel(self): """create a new frontend attached to an external kernel in a new tab""" connection_file, file_type = QtWidgets.QFileDialog.getOpenFileName( self, "Connect to Existing Kernel", jupyter_runtime_dir(), "Connection file (*.json)") if not connection_file: return widget = self.connection_frontend_factory(connection_file) name = "external {}".format(self.next_external_kernel_id) self.add_tab_with_frontend(widget, name=name)
def test_paths(): output = get_jupyter_output("--paths") for d in (jupyter_config_dir(), jupyter_data_dir(), jupyter_runtime_dir()): assert d in output for key in ("config", "data", "runtime"): assert ("%s:" % key) in output for path in (jupyter_config_path(), jupyter_path()): for d in path: assert d in output
def write_connection_file(self): runtime_dir = jupyter_runtime_dir() ensure_dir_exists(runtime_dir) fname = os.path.join(runtime_dir, 'kernelnanny-%s.json' % os.getpid()) with open(fname, 'w') as f: f.write(json.dumps(self.connection_info, indent=2)) set_sticky_bit(fname) return fname
def test_paths(): output = get_jupyter_output('--paths') for d in (jupyter_config_dir(), jupyter_data_dir(), jupyter_runtime_dir()): assert d in output for key in ('config', 'data', 'runtime'): assert ('%s:' % key) in output for path in (jupyter_config_path(), jupyter_path()): for d in path: assert d in output
def setup_kernel(cmd): """start an embedded kernel in a subprocess, and wait for it to be ready Returns ------- kernel_manager: connected KernelManager instance """ def connection_file_ready(connection_file): """Check if connection_file is a readable json file.""" if not os.path.exists(connection_file): return False try: with open(connection_file) as f: json.load(f) return True except ValueError: return False kernel = Popen([sys.executable, '-c', cmd], stdout=PIPE, stderr=PIPE) try: connection_file = os.path.join( paths.jupyter_runtime_dir(), 'kernel-%i.json' % kernel.pid, ) # wait for connection file to exist, timeout after 5s tic = time.time() while not connection_file_ready(connection_file) \ and kernel.poll() is None \ and time.time() < tic + SETUP_TIMEOUT: time.sleep(0.1) # Wait 100ms for the writing to finish time.sleep(0.1) if kernel.poll() is not None: o,e = kernel.communicate() e = py3compat.cast_unicode(e) raise IOError("Kernel failed to start:\n%s" % e) if not os.path.exists(connection_file): if kernel.poll() is None: kernel.terminate() raise IOError("Connection file %r never arrived" % connection_file) client = BlockingKernelClient(connection_file=connection_file) client.load_connection_file() client.start_channels() client.wait_for_ready() try: yield client finally: client.stop_channels() finally: kernel.terminate()
def create_tab_with_existing_kernel(self): """create a new frontend attached to an external kernel in a new tab""" connection_file, file_type = QtGui.QFileDialog.getOpenFileName(self, "Connect to Existing Kernel", jupyter_runtime_dir(), "Connection file (*.json)") if not connection_file: return widget = self.connection_frontend_factory(connection_file) name = "external {}".format(self.next_external_kernel_id) self.add_tab_with_frontend(widget, name=name)
def find_jupyter_kernel_ids(): """Find opened kernel json files. .. note:: called by vim command completion. Returns ------- list(str) List of strings of kernel ids. """ # TODO Get type of kernel (python, julia, etc.) runtime_files = Path(jupyter_runtime_dir()).glob('kernel*.json') return [match_kernel_id(fpath) for fpath in runtime_files]
def _read_latest_connection_file(cls): """ Reads the latest jupyter kernel connection file. https://jupyter.readthedocs.io/en/latest/projects/jupyter-directories.html. """ runtime_dir = jupyter_runtime_dir() files = glob.glob(os.path.join(runtime_dir, 'kernel-*.json')) if len(files) == 0: return None # use the latest connection file connection_file = max(files, key=os.path.getctime) with open(connection_file, 'r') as f: return json.load(f)
def list_running_servers_v2(runtime_dir=None): """Iterate over the server info files of running notebook servers. Given a runtime directory, find nbserver-* files in the security directory, and yield dicts of their information, each one pertaining to a currently running notebook server instance. """ if runtime_dir is None: runtime_dir = jupyter_runtime_dir() # The runtime dir might not exist if not os.path.isdir(runtime_dir): return for file_name in os.listdir(runtime_dir): if re.match('nbserver-(.+).json', file_name): with io.open(os.path.join(runtime_dir, file_name), encoding='utf-8') as f: info = json.load(f) # Simple check whether that process is really still running # Also remove leftover files from IPython 2.x without a pid field if ('pid' in info) and check_pid(info['pid']): #yield info pass else: # If the process has died, try to delete its info file try: os.unlink(os.path.join(runtime_dir, file_name)) except OSError: pass # TODO: This should warn or log or something try: os.unlink( os.path.join(runtime_dir, file_name)[:-5] + "-open.html") except OSError: pass # TODO: This should warn or log or something # Delete all "*-open.html" files which hasn't pair json if re.match('nbserver-(.+)-open.html', file_name): if os.path.isfile( os.path.join(runtime_dir, file_name)[:-10] + ".json"): pass else: try: os.unlink(os.path.join(runtime_dir, file_name)) except OSError: pass # TODO: This should warn or log or something
def test_find_connection_file_local(): with TemporaryWorkingDirectory() as d: cf = 'test.json' abs_cf = os.path.abspath(cf) with open(cf, 'w') as f: f.write('{}') for query in ( 'test.json', 'test', abs_cf, os.path.join('.', 'test.json'), ): assert connect.find_connection_file(query, path=['.', jupyter_runtime_dir()]) == abs_cf
def find_kernels(sep="\t"): from jupyter_core.paths import jupyter_runtime_dir import glob cfiles = glob.glob(os.path.join(jupyter_runtime_dir(), "kernel-*.json")) cfiles = [ "{}{}purpose: {}".format( os.path.basename(p), sep, str( temporary_connect_query( p, "_purpose = purpose if 'purpose' in globals() else 'undefined'", "_purpose"))) for p in cfiles ] return cfiles
def record_connection_info(self, conn_info): log.info("Connection info: %s", conn_info) runtime_dir = jupyter_runtime_dir() ensure_dir_exists(runtime_dir) fname = os.path.join(runtime_dir, 'kernel-%s.json' % uuid4()) # Only ever write this file as user read/writeable # This would otherwise introduce a vulnerability as a file has secrets # which would let others execute arbitrarily code as you with secure_write(fname) as f: f.write(json.dumps(conn_info, indent=2)) log.info("To connect a client: --existing %s", os.path.basename(fname)) return fname
def test_find_connection_file_relative(): with TemporaryWorkingDirectory() as d: cf = 'test.json' os.mkdir('subdir') cf = os.path.join('subdir', 'test.json') abs_cf = os.path.abspath(cf) with open(cf, 'w') as f: f.write('{}') for query in ( os.path.join('.', 'subdir', 'test.json'), os.path.join('subdir', 'test.json'), abs_cf, ): assert connect.find_connection_file(query, path=['.', jupyter_runtime_dir()]) == abs_cf
def test_find_connection_file_local(): with TemporaryWorkingDirectory(): cf = "test.json" abs_cf = os.path.abspath(cf) with open(cf, "w") as f: f.write("{}") for query in ( "test.json", "test", abs_cf, os.path.join(".", "test.json"), ): assert connect.find_connection_file( query, path=[".", jupyter_runtime_dir()]) == abs_cf
def load_ipython_extension(ipython): # use the configured working directory if we can find it work_dir = None for filename in scandir.scandir(jupyter_runtime_dir()): if filename.name.startswith('nbserver-') and filename.name.endswith('.json'): with open(filename.path, 'r') as fh: nbserver = json.load(fh) work_dir = nbserver['notebook_dir'] break if work_dir is None: # fall back on an environment variable or ultimately the pwd work_dir = os.getenv('WORK', '.') loader.enable(work_dir) ipython.push({'load_notebook': loader.load_notebook}) ipython.register_magics(InjectMagic(ipython))
def setup_kernel(cmd): """start an embedded kernel in a subprocess, and wait for it to be ready Returns ------- kernel_manager: connected KernelManager instance """ kernel = Popen([sys.executable, '-c', cmd], stdout=PIPE, stderr=PIPE) connection_file = os.path.join( paths.jupyter_runtime_dir(), 'kernel-%i.json' % kernel.pid, ) # wait for connection file to exist, timeout after 5s tic = time.time() while not os.path.exists(connection_file) \ and kernel.poll() is None \ and time.time() < tic + SETUP_TIMEOUT: time.sleep(0.1) if kernel.poll() is not None: o,e = kernel.communicate() e = py3compat.cast_unicode(e) raise IOError("Kernel failed to start:\n%s" % e) if not os.path.exists(connection_file): if kernel.poll() is None: kernel.terminate() raise IOError("Connection file %r never arrived" % connection_file) client = BlockingKernelClient(connection_file=connection_file) client.load_connection_file() client.start_channels() client.wait_for_ready() try: yield client finally: client.stop_channels() kernel.terminate()
def connect_info(self, arg_s): """Print information for connecting other clients to this kernel It will print the contents of this session's connection file, as well as shortcuts for local clients. In the simplest case, when called from the most recently launched kernel, secondary clients can be connected, simply with: $> jupyter <app> --existing """ try: connection_file = get_connection_file() info = get_connection_info(unpack=False) except Exception as e: warnings.warn("Could not get connection info: %r" % e) return # if it's in the default dir, truncate to basename if jupyter_runtime_dir() == os.path.dirname(connection_file): connection_file = os.path.basename(connection_file) print (info + '\n') print ("Paste the above JSON into a file, and connect with:\n" " $> jupyter <app> --existing <file>\n" "or, if you are local, you can connect with just:\n" " $> jupyter <app> --existing {0}\n" "or even just:\n" " $> jupyter <app> --existing\n" "if this is the most recent Jupyter kernel you have started.".format( connection_file ) )
def connect_qtconsole(connection_info, name=None, extra_args=None): """Open a QtConsole connected to a worker who has the given future - identify worker with who_has - start IPython kernel on the worker - start qtconsole connected to the kernel """ runtime_dir = jupyter_runtime_dir() if name is None: name = uuid4().hex path = os.path.join(runtime_dir, name + '.json') write_connection_file(path, **connection_info) cmd = ['jupyter', 'qtconsole', '--existing', path] if extra_args: cmd.extend(extra_args) Popen(cmd) def _cleanup_connection_file(): """Cleanup our connection file when we exit.""" try: os.remove(path) except OSError: pass atexit.register(_cleanup_connection_file)
def select_connection_file(self): cf = getopenfilename(self, _('Select kernel connection file'), jupyter_runtime_dir(), '*.json;;*.*')[0] self.cf.setText(cf)
def _connection_dir_default(self): return jupyter_runtime_dir()
def test_runtime_dir(): assert_output('--runtime-dir', jupyter_runtime_dir())
def _connection_dir_default(self): d = jupyter_runtime_dir() ensure_dir_exists(d, 0o700) return d
def test_runtime_dir_env(): rtd_env = 'runtime-dir' with patch.dict('os.environ', {'JUPYTER_RUNTIME_DIR': rtd_env}): runtime = jupyter_runtime_dir() assert runtime == rtd_env