def install(kernel_spec_manager=None, user=False, kernel_name=None): """Install the IPython kernelspec for Jupyter Parameters ---------- kernel_spec_manager: KernelSpecManager [optional] A KernelSpecManager to use for installation. If none provided, a default instance will be created. user: bool [default: False] Whether to do a user-only install, or system-wide. kernel_name: str, optional Specify a name for the kernelspec. This is needed for having multiple IPython kernels for different environments. Returns ------- The path where the kernelspec was installed. """ if kernel_spec_manager is None: kernel_spec_manager = KernelSpecManager() if kernel_name is None: kernel_name = KERNEL_NAME path = write_kernel_spec() dest = kernel_spec_manager.install_kernel_spec(path, kernel_name=kernel_name, user=user) # cleanup afterward shutil.rmtree(path) return dest
def install_kernelspec(self): from ipykernel.kernelspec import write_kernel_spec from jupyter_client.kernelspec import KernelSpecManager kernel_spec_manager = KernelSpecManager() log.info('Writing kernel spec') kernel_spec_path = write_kernel_spec(overrides=kernel_json) log.info('Installing kernel spec ' + kernel_spec_path) try: kernel_spec_manager.install_kernel_spec( kernel_spec_path, kernel_name=kernel_json['name'], user=self.user) except Exception as e: log.error(str(e.args)) log.error('Failed to install kernel spec') else: return # retry with not self.user log.info('Retry install kernel spec') try: kernel_spec_manager.install_kernel_spec( kernel_spec_path, kernel_name=kernel_json['name'], user=not self.user) except Exception as e: log.error(str(e.args)) log.error('Failed to install kernel spec')
def install(kernel_spec_manager=None, user=False, kernel_name=KERNEL_NAME, display_name=None, prefix=None, profile=None): """Install the IPython kernelspec for Jupyter Parameters ---------- kernel_spec_manager: KernelSpecManager [optional] A KernelSpecManager to use for installation. If none provided, a default instance will be created. user: bool [default: False] Whether to do a user-only install, or system-wide. kernel_name: str, optional Specify a name for the kernelspec. This is needed for having multiple IPython kernels for different environments. display_name: str, optional Specify the display name for the kernelspec profile: str, optional Specify a custom profile to be loaded by the kernel. prefix: str, optional Specify an install prefix for the kernelspec. This is needed to install into a non-default location, such as a conda/virtual-env. Returns ------- The path where the kernelspec was installed. """ if kernel_spec_manager is None: kernel_spec_manager = KernelSpecManager() if (kernel_name != KERNEL_NAME) and (display_name is None): # kernel_name is specified and display_name is not # default display_name to kernel_name display_name = kernel_name overrides = {} if display_name: overrides["display_name"] = display_name if profile: extra_arguments = ["--profile", profile] if not display_name: # add the profile to the default display name overrides["display_name"] = 'Python %i [profile=%s]' % (sys.version_info[0], profile) else: extra_arguments = None path = write_kernel_spec(overrides=overrides, extra_arguments=extra_arguments) dest = kernel_spec_manager.install_kernel_spec( path, kernel_name=kernel_name, user=user, prefix=prefix) # cleanup afterward shutil.rmtree(path) return dest
def run(self): # Regular installation install.run(self) # Now write the kernelspec from jupyter_client.kernelspec import KernelSpecManager from tempfile import TemporaryDirectory kernel_spec = KernelSpecManager() with TemporaryDirectory() as td: os.chmod(td, 0o755) # Starts off as 700, not user readable with open(os.path.join(td, 'kernel.json'), 'w') as f: json.dump(kernel_json, f, sort_keys=True) # TODO: Copy resources once they're specified kernel_spec.install_kernel_spec(td, 'postgres', user=self.user)
def __init__(self, settings, document_language): """Initializes available kernel names""" self.__settings = settings self.__doc_lang = document_language spec_manager = KernelSpecManager() kernel_names = spec_manager.find_kernel_specs() self.__available_kernel_names_mappings = {} for name in kernel_names: spec = spec_manager.get_kernel_spec(name) self.__available_kernel_names_mappings[spec.display_name] = name self.__manager = MultiKernelManager() self.__client_managers = {}
def get_notebook_kernel(kernel_name, kernel_spec_manager=None): """ return a `KernelSpec <https://ipython.org/ipython-doc/dev/api/generated/IPython.kernel.kernelspec.html>`_ @param kernel_spec_manager see `KernelSpecManager <http://jupyter-client.readthedocs.org/en/latest/api/kernelspec.html#jupyter_client.kernelspec.KernelSpecManager>`_ A KernelSpecManager to use for installation. If none provided, a default instance will be created. @param kernel_name kernel name @return KernelSpec The function only works with Jupyter>=4.0. .. versionadded:: 1.3 """ if kernel_spec_manager is None: from jupyter_client.kernelspec import KernelSpecManager kernel_spec_manager = KernelSpecManager() return kernel_spec_manager.get_kernel_spec(kernel_name)
def find_notebook_kernel(kernel_spec_manager=None): """ return a dict mapping kernel names to resource directories @param kernel_spec_manager see `KernelSpecManager <http://jupyter-client.readthedocs.org/en/latest/api/kernelspec.html#jupyter_client.kernelspec.KernelSpecManager>`_ A KernelSpecManager to use for installation. If none provided, a default instance will be created. @return dict The list of installed kernels is described at `Making kernel for Jupyter <http://jupyter-client.readthedocs.org/en/latest/kernels.html#kernelspecs>`_. The function only works with Jupyter>=4.0. .. versionadded:: 1.3 """ if kernel_spec_manager is None: from jupyter_client.kernelspec import KernelSpecManager kernel_spec_manager = KernelSpecManager() return kernel_spec_manager.find_kernel_specs()
def install(kernel_spec_manager=None, user=False, kernel_name=None, display_name=None, prefix=None): """Install the IPython kernelspec for Jupyter Parameters ---------- kernel_spec_manager: KernelSpecManager [optional] A KernelSpecManager to use for installation. If none provided, a default instance will be created. user: bool [default: False] Whether to do a user-only install, or system-wide. kernel_name: str, optional Specify a name for the kernelspec. This is needed for having multiple IPython kernels for different environments. prefix: str, optional Specify an install prefix for the kernelspec. This is needed to install into a non-default location, such as a conda/virtual-env. display_name: str, optional Specify the display name for the kernelspec Returns ------- The path where the kernelspec was installed. """ if kernel_spec_manager is None: kernel_spec_manager = KernelSpecManager() if kernel_name is None: kernel_name = KERNEL_NAME elif display_name is None: # kernel_name is specified and display_name is not # default display_name to kernel_name display_name = kernel_name if display_name: overrides = dict(display_name=display_name) else: overrides = None path = write_kernel_spec(overrides=overrides) dest = kernel_spec_manager.install_kernel_spec(path, kernel_name=kernel_name, user=user, prefix=prefix) # cleanup afterward shutil.rmtree(path) return dest
def install(kernel_spec_manager=None, user=False): """Install the IPython kernelspec for Jupyter Parameters ---------- kernel_spec_manager: KernelSpecManager [optional] A KernelSpecManager to use for installation. If none provided, a default instance will be created. user: bool [default: False] Whether to do a user-only install, or system-wide. """ if kernel_spec_manager is None: kernel_spec_manager = KernelSpecManager() path = write_kernel_spec() kernel_spec_manager.install_kernel_spec(path, kernel_name=KERNEL_NAME, user=user, replace=True) # cleanup afterward shutil.rmtree(path)
def setup(self): ipydir = TemporaryDirectory() self.dirs.append(ipydir) self.env['IPYTHONDIR'] = ipydir.name # FIXME: install IPython kernel in temporary IPython dir # remove after big split try: from jupyter_client.kernelspec import KernelSpecManager except ImportError: pass else: ksm = KernelSpecManager(ipython_dir=ipydir.name) ksm.install_native_kernel_spec(user=True) self.workingdir = workingdir = TemporaryDirectory() self.dirs.append(workingdir) self.env['IPTEST_WORKING_DIR'] = workingdir.name # This means we won't get odd effects from our own matplotlib config self.env['MPLCONFIGDIR'] = workingdir.name # For security reasons (http://bugs.python.org/issue16202), use # a temporary directory to which other users have no access. self.env['TMPDIR'] = workingdir.name # Add a non-accessible directory to PATH (see gh-7053) noaccess = os.path.join(self.workingdir.name, "_no_access_") self.noaccess = noaccess os.mkdir(noaccess, 0) PATH = os.environ.get('PATH', '') if PATH: PATH = noaccess + os.pathsep + PATH else: PATH = noaccess self.env['PATH'] = PATH # From options: if self.options.xunit: self.add_xunit() if self.options.coverage: self.add_coverage() self.env['IPTEST_SUBPROC_STREAMS'] = self.options.subproc_streams self.cmd.extend(self.options.extra_args)
def test_kernelspec_installed(self): """Test that the jupyter kernelspec installations work, and that all the examples can be installed as expected.""" manager = KernelSpecManager() # Remove all kernelspecs first. subprocess.check_call(["jupyter", "kernelspec", "remove", "-f", "basic", "calculator", "stdin"]) # Check that kernelspec installation works, the kernel exists after # it has been installed, and the executable file is found. subprocess.check_call(["kernel-basic", "install"]) kernelspec = manager.get_kernel_spec("basic") self.assertTrue(os.path.isfile(kernelspec.argv[0])) subprocess.check_call(["kernel-calculator", "install"]) kernelspec = manager.get_kernel_spec("calculator") self.assertTrue(os.path.isfile(kernelspec.argv[0])) subprocess.check_call(["kernel-stdin", "install"]) kernelspec = manager.get_kernel_spec("stdin") self.assertTrue(os.path.isfile(kernelspec.argv[0]))
def install_kernel(): from jupyter_client.kernelspec import KernelSpecManager logging.info('Installing Qudi kernel.') try: # prepare temporary kernelspec folder tempdir = tempfile.mkdtemp(suffix='_kernels') path = os.path.join(tempdir, 'qudi') resourcepath = os.path.join(path, 'resources') kernelpath = os.path.abspath(__file__) os.mkdir(path) os.mkdir(resourcepath) kernel_dict = { 'argv': [sys.executable, kernelpath, '{connection_file}'], 'display_name': 'Qudi', 'language': 'python', } # write the kernelspe file with open(os.path.join(path, 'kernel.json'), 'w') as f: json.dump(kernel_dict, f, indent=1) # copy logo logopath = os.path.abspath(os.path.join(os.path.dirname(kernelpath), '..', 'artwork', 'logo')) shutil.copy(os.path.join(logopath, 'logo-qudi-32x32.png'), os.path.join(resourcepath, 'logo-32x32.png')) shutil.copy(os.path.join(logopath, 'logo-qudi-32x32.png'), os.path.join(resourcepath, 'logo-32x32.png')) # install kernelspec folder kernel_spec_manager = KernelSpecManager() dest = kernel_spec_manager.install_kernel_spec(path, kernel_name='qudi', user=True) logging.info('Installed kernelspec qudi in {}'.format(dest)) except OSError as e: if e.errno == errno.EACCES: print(e, file=sys.stderr) sys.exit(1) finally: if os.path.isdir(tempdir): shutil.rmtree(tempdir)
def get_kernel_specs(self): return KernelSpecManager().get_all_specs()
import jupyter_client import time from jupyter_client.kernelspec import KernelSpecManager, NoSuchKernel, NATIVE_KERNEL_NAME from IPython.utils.capture import capture_output from ipython_genutils.tempdir import TemporaryDirectory TIMEOUT = 30 td = TemporaryDirectory() km, kc = jupyter_client.manager.start_new_kernel(kernel_name='python3') KernelSpecManager().get_kernel_spec('python3') with capture_output() as io: reply = kc.execute_interactive("print('hello')", timeout=TIMEOUT) print(reply) print(io.stdout) # assert 'hello' in io.stdout # assert reply['content']['status'] == 'ok' kc.execute("print('hello world')") kc.get_shell_msg() time.sleep(10)
from jupyter_client.kernelspec import KernelSpecManager manager = KernelSpecManager() kernels = manager.get_all_specs().keys() assert "scijava" in kernels print("Scijava kernel correctly installed.")
def get(self): self.write(json.dumps(KernelSpecManager().get_all_specs()))
class TestExecute(PreprocessorTestsBase): """Contains test functions for execute.py""" maxDiff = None def test_constructor(self): """Can a ExecutePreprocessor be constructed?""" build_preprocessor({}) def test_populate_language_info(self): preprocessor = build_preprocessor(opts=dict(kernel_name="python")) nb = nbformat.v4.new_notebook() # Certainly has no language_info. nb, _ = preprocessor.preprocess(nb, resources={}) assert 'language_info' in nb.metadata def test_empty_path(self): """Can the kernel be started when the path is empty?""" filename = os.path.join(current_dir, 'files', 'HelloWorld.ipynb') res = self.build_resources() res['metadata']['path'] = '' input_nb, output_nb = run_notebook(filename, {}, res) assert_notebooks_equal(input_nb, output_nb) @pytest.mark.xfail("python3" not in KernelSpecManager().find_kernel_specs(), reason="requires a python3 kernelspec") def test_empty_kernel_name(self): """Can kernel in nb metadata be found when an empty string is passed? Note: this pattern should be discouraged in practice. Passing in no kernel_name to ExecutePreprocessor is recommended instead. """ filename = os.path.join(current_dir, 'files', 'UnicodePy3.ipynb') res = self.build_resources() input_nb, output_nb = run_notebook(filename, {"kernel_name": ""}, res) assert_notebooks_equal(input_nb, output_nb) with pytest.raises(TraitError): input_nb, output_nb = run_notebook(filename, {"kernel_name": None}, res) def test_disable_stdin(self): """Test disabling standard input""" filename = os.path.join(current_dir, 'files', 'Disable Stdin.ipynb') res = self.build_resources() res['metadata']['path'] = os.path.dirname(filename) input_nb, output_nb = run_notebook(filename, dict(allow_errors=True), res) # We need to special-case this particular notebook, because the # traceback contains machine-specific stuff like where IPython # is installed. It is sufficient here to just check that an error # was thrown, and that it was a StdinNotImplementedError self.assertEqual(len(output_nb['cells']), 1) self.assertEqual(len(output_nb['cells'][0]['outputs']), 1) output = output_nb['cells'][0]['outputs'][0] self.assertEqual(output['output_type'], 'error') self.assertEqual(output['ename'], 'StdinNotImplementedError') self.assertEqual(output['evalue'], 'raw_input was called, but this frontend does not support input requests.') def test_timeout(self): """Check that an error is raised when a computation times out""" filename = os.path.join(current_dir, 'files', 'Interrupt.ipynb') res = self.build_resources() res['metadata']['path'] = os.path.dirname(filename) with pytest.raises(TimeoutError): run_notebook(filename, dict(timeout=1), res) def test_timeout_func(self): """Check that an error is raised when a computation times out""" filename = os.path.join(current_dir, 'files', 'Interrupt.ipynb') res = self.build_resources() res['metadata']['path'] = os.path.dirname(filename) def timeout_func(source): return 10 with pytest.raises(TimeoutError): run_notebook(filename, dict(timeout_func=timeout_func), res) def test_kernel_death(self): """Check that an error is raised when the kernel is_alive is false""" filename = os.path.join(current_dir, 'files', 'Interrupt.ipynb') with io.open(filename, 'r') as f: input_nb = nbformat.read(f, 4) res = self.build_resources() res['metadata']['path'] = os.path.dirname(filename) preprocessor = build_preprocessor({"timeout": 5}) try: input_nb, output_nb = preprocessor(input_nb, {}) except TimeoutError: pass km, kc = preprocessor.start_new_kernel() with patch.object(km, "is_alive") as alive_mock: alive_mock.return_value = False with pytest.raises(DeadKernelError): input_nb, output_nb = preprocessor.preprocess(input_nb, {}, km=km) def test_allow_errors(self): """ Check that conversion halts if ``allow_errors`` is False. """ filename = os.path.join(current_dir, 'files', 'Skip Exceptions.ipynb') res = self.build_resources() res['metadata']['path'] = os.path.dirname(filename) with pytest.raises(CellExecutionError) as exc: run_notebook(filename, dict(allow_errors=False), res) self.assertIsInstance(str(exc.value), str) if sys.version_info >= (3, 0): assert u"# üñîçø∂é" in str(exc.value) else: assert u"# üñîçø∂é".encode('utf8', 'replace') in str(exc.value) def test_force_raise_errors(self): """ Check that conversion halts if the ``force_raise_errors`` traitlet on ExecutePreprocessor is set to True. """ filename = os.path.join(current_dir, 'files', 'Skip Exceptions with Cell Tags.ipynb') res = self.build_resources() res['metadata']['path'] = os.path.dirname(filename) with pytest.raises(CellExecutionError) as exc: run_notebook(filename, dict(force_raise_errors=True), res) self.assertIsInstance(str(exc.value), str) if sys.version_info >= (3, 0): assert u"# üñîçø∂é" in str(exc.value) else: assert u"# üñîçø∂é".encode('utf8', 'replace') in str(exc.value) def test_custom_kernel_manager(self): from .fake_kernelmanager import FakeCustomKernelManager filename = os.path.join(current_dir, 'files', 'HelloWorld.ipynb') with io.open(filename) as f: input_nb = nbformat.read(f, 4) preprocessor = build_preprocessor({ 'kernel_manager_class': FakeCustomKernelManager }) cleaned_input_nb = copy.deepcopy(input_nb) for cell in cleaned_input_nb.cells: if 'execution_count' in cell: del cell['execution_count'] cell['outputs'] = [] # Override terminal size to standardise traceback format with modified_env({'COLUMNS': '80', 'LINES': '24'}): output_nb, _ = preprocessor(cleaned_input_nb, self.build_resources()) expected = FakeCustomKernelManager.expected_methods.items() for method, call_count in expected: self.assertNotEqual(call_count, 0, '{} was called'.format(method)) def test_process_message_wrapper(self): outputs = [] class WrappedPreProc(ExecutePreprocessor): def process_message(self, msg, cell, cell_index): result = super(WrappedPreProc, self).process_message(msg, cell, cell_index) if result: outputs.append(result) return result current_dir = os.path.dirname(__file__) filename = os.path.join(current_dir, 'files', 'HelloWorld.ipynb') with io.open(filename) as f: input_nb = nbformat.read(f, 4) original = copy.deepcopy(input_nb) wpp = WrappedPreProc() executed = wpp.preprocess(input_nb, {})[0] assert outputs == [ {'name': 'stdout', 'output_type': 'stream', 'text': 'Hello World\n'} ] assert_notebooks_equal(original, executed) def test_execute_function(self): # Test the executenb() convenience API filename = os.path.join(current_dir, 'files', 'HelloWorld.ipynb') with io.open(filename) as f: input_nb = nbformat.read(f, 4) original = copy.deepcopy(input_nb) executed = executenb(original, os.path.dirname(filename)) assert_notebooks_equal(original, executed) def test_widgets(self): """Runs a test notebook with widgets and checks the widget state is saved.""" input_file = os.path.join(current_dir, 'files', 'JupyterWidgets.ipynb') opts = dict(kernel_name="python") res = self.build_resources() res['metadata']['path'] = os.path.dirname(input_file) input_nb, output_nb = run_notebook(input_file, opts, res) output_data = [ output.get('data', {}) for cell in output_nb['cells'] for output in cell['outputs'] ] model_ids = [ data['application/vnd.jupyter.widget-view+json']['model_id'] for data in output_data if 'application/vnd.jupyter.widget-view+json' in data ] wdata = output_nb['metadata']['widgets'] \ ['application/vnd.jupyter.widget-state+json'] for k in model_ids: d = wdata['state'][k] assert 'model_name' in d assert 'model_module' in d assert 'state' in d assert 'version_major' in wdata assert 'version_minor' in wdata
def get_kernelspec(name): ksm = KernelSpecManager() kernelspec = ksm.get_kernel_spec(name).to_dict() kernelspec['name'] = name kernelspec.pop('argv') return kernelspec
import os import unittest import pytest import nbformat from nbconvert.preprocessors import ExecutePreprocessor from nbconvert.preprocessors.execute import CellExecutionError from popmon import resources from jupyter_client.kernelspec import KernelSpecManager, NoSuchKernel kernel_name = 'python3' # check if jupyter python3 kernel can be opened. if kernel not found, skip unit tests below. try: km = KernelSpecManager() km.get_kernel_spec(kernel_name) kernel_found = True except NoSuchKernel: kernel_found = False class NotebookTest(unittest.TestCase): """Unit test notebook""" def run_notebook(self, notebook): """ Test notebook """ # load notebook with open(notebook) as f: nb = nbformat.read(f, as_version=4)
def uninstall_kernel(): """ Uninstall the kernel. """ KernelSpecManager().remove_kernel_spec(KERNEL_NAME)
def init_kernel_manager(self): self._km = MultiKernelManager(log=self.log, parent=self) self._ksm = KernelSpecManager(log=self.log, parent=self) self._kernels = {}
def _jupyter_kernel( user=False, prefix=None, root=None, ): """Generate xonsh kernel for jupyter. Parameters ---------- user : -u, --user Install kernel spec in user config directory. prefix : -p, --prefix Installation prefix for bin, lib, etc. root : -r, --root Install relative to this alternate root directory. """ try: from jupyter_client.kernelspec import KernelSpecManager, NoSuchKernel except ImportError as e: raise ImportError("Jupyter not found in current Python environment") from e ksm = KernelSpecManager() prefix = prefix or sys.prefix spec = { "argv": [ sys.executable, "-m", "xonsh.jupyter_kernel", "-f", "{connection_file}", ], "display_name": "Xonsh", "language": "xonsh", "codemirror_mode": "shell", } if root and prefix: # os.path.join isn't used since prefix is probably absolute prefix = root + prefix try: old_jup_kernel = ksm.get_kernel_spec(XONSH_JUPYTER_KERNEL) if not old_jup_kernel.resource_dir.startswith(prefix): print( "Removing existing Jupyter kernel found at {}".format( old_jup_kernel.resource_dir ) ) ksm.remove_kernel_spec(XONSH_JUPYTER_KERNEL) except NoSuchKernel: pass if sys.platform == "win32": # Ensure that conda-build detects the hard coded prefix spec["argv"][0] = spec["argv"][0].replace(os.sep, os.altsep) prefix = prefix.replace(os.sep, os.altsep) with tempfile.TemporaryDirectory() as d: os.chmod(d, 0o755) # Starts off as 700, not user readable with open(os.path.join(d, "kernel.json"), "w") as f: json.dump(spec, f, sort_keys=True) print("Installing Jupyter kernel spec:") print(f" root: {root!r}") if user: print(f" as user: {user}") elif root and prefix: print(f" combined prefix {prefix!r}") else: print(f" prefix: {prefix!r}") ksm.install_kernel_spec( d, XONSH_JUPYTER_KERNEL, user=user, prefix=(None if user else prefix) ) return 0
def _info( to_json=False, ) -> str: """Displays configuration information Parameters ---------- to_json : -j, --json reports results as json """ env = XSH.env data: tp.List[tp.Any] = [("xonsh", XONSH_VERSION)] hash_, date_ = githash() if hash_: data.append(("Git SHA", hash_)) data.append(("Commit Date", date_)) data.extend( [ ("Python", "{}.{}.{}".format(*PYTHON_VERSION_INFO)), ("PLY", ply.__version__), ("have readline", is_readline_available()), ("prompt toolkit", ptk_version() or None), ("shell type", env.get("SHELL_TYPE")), ("history backend", env.get("XONSH_HISTORY_BACKEND")), ("pygments", pygments_version()), ("on posix", bool(ON_POSIX)), ("on linux", bool(ON_LINUX)), ] ) if ON_LINUX: data.append(("distro", linux_distro())) data.append(("on wsl", bool(ON_WSL))) if ON_WSL: data.append(("wsl version", 1 if ON_WSL1 else 2)) data.extend( [ ("on darwin", bool(ON_DARWIN)), ("on windows", bool(ON_WINDOWS)), ("on cygwin", bool(ON_CYGWIN)), ("on msys2", bool(ON_MSYS)), ("is superuser", is_superuser()), ("default encoding", DEFAULT_ENCODING), ("xonsh encoding", env.get("XONSH_ENCODING")), ("encoding errors", env.get("XONSH_ENCODING_ERRORS")), ] ) jup_ksm = jup_kernel = None try: from jupyter_client.kernelspec import KernelSpecManager jup_ksm = KernelSpecManager() jup_kernel = jup_ksm.find_kernel_specs().get(XONSH_JUPYTER_KERNEL) except Exception: pass data.extend([("on jupyter", jup_ksm is not None), ("jupyter kernel", jup_kernel)]) data.extend([("xontrib", xontribs_loaded())]) data.extend([("RC file", XSH.rc_files)]) formatter = _xonfig_format_json if to_json else _xonfig_format_human s = formatter(data) return s
# The great "support IPython 2, 3, 4" strat begins if not TEST: try: import jupyter except ImportError: jupyter_era = False else: jupyter_era = True if jupyter_era: # Jupyter / IPython 4.x from jupyter_client import KernelManager from jupyter_client.kernelspec import KernelSpecManager from jupyter_client import MultiKernelManager kernelSpecManager = KernelSpecManager() multiKernelManager = MultiKernelManager() else: from IPython.kernel import KernelManager from IPython.kernel.kernelspec import KernelSpecManager from IPython.kernel.multikernelmanager import MultiKernelManager kernelSpecManager = KernelSpecManager() multiKernelManager = MultiKernelManager() # End of the great "support IPython 2, 3, 4" strat def _debug_write(out): if DEBUG: sys.__stdout__.write(out) sys.__stdout__.write("\n")
def _jupyter_kernel(args): """Make xonsh available as a Jupyter kernel.""" try: from jupyter_client.kernelspec import KernelSpecManager, NoSuchKernel except ImportError as e: raise ImportError("Jupyter not found in current Python environment") from e ksm = KernelSpecManager() root = args.root prefix = args.prefix if args.prefix else sys.prefix user = args.user spec = { "argv": [ sys.executable, "-m", "xonsh.jupyter_kernel", "-f", "{connection_file}", ], "display_name": "Xonsh", "language": "xonsh", "codemirror_mode": "shell", } if root and prefix: # os.path.join isn't used since prefix is probably absolute prefix = root + prefix try: old_jup_kernel = ksm.get_kernel_spec(XONSH_JUPYTER_KERNEL) if not old_jup_kernel.resource_dir.startswith(prefix): print( "Removing existing Jupyter kernel found at {0}".format( old_jup_kernel.resource_dir ) ) ksm.remove_kernel_spec(XONSH_JUPYTER_KERNEL) except NoSuchKernel: pass if sys.platform == "win32": # Ensure that conda-build detects the hard coded prefix spec["argv"][0] = spec["argv"][0].replace(os.sep, os.altsep) prefix = prefix.replace(os.sep, os.altsep) with tempfile.TemporaryDirectory() as d: os.chmod(d, 0o755) # Starts off as 700, not user readable with open(os.path.join(d, "kernel.json"), "w") as f: json.dump(spec, f, sort_keys=True) print("Installing Jupyter kernel spec:") print(" root: {0!r}".format(root)) if user: print(" as user: {0}".format(user)) elif root and prefix: print(" combined prefix {0!r}".format(prefix)) else: print(" prefix: {0!r}".format(prefix)) ksm.install_kernel_spec( d, XONSH_JUPYTER_KERNEL, user=user, prefix=(None if user else prefix) ) return 0
class TestExecute(ExecuteTestBase): """Contains test functions for execute.py""" maxDiff = None @staticmethod def normalize_output(output): """ Normalizes outputs for comparison. """ output = dict(output) if 'metadata' in output: del output['metadata'] if 'text' in output: output['text'] = re.sub(addr_pat, '<HEXADDR>', output['text']) if 'text/plain' in output.get('data', {}): output['data']['text/plain'] = \ re.sub(addr_pat, '<HEXADDR>', output['data']['text/plain']) if 'application/vnd.jupyter.widget-view+json' in output.get( 'data', {}): output['data']['application/vnd.jupyter.widget-view+json'] \ ['model_id'] = '<MODEL_ID>' for key, value in output.get('data', {}).items(): if isinstance(value, string_types): if sys.version_info.major == 2: value = value.replace('u\'', '\'') output['data'][key] = _normalize_base64(value) if 'traceback' in output: tb = [ re.sub(ipython_input_pat, '<IPY-INPUT>', strip_ansi(line)) for line in output['traceback'] ] output['traceback'] = tb return output def assert_notebooks_equal(self, expected, actual): expected_cells = expected['cells'] actual_cells = actual['cells'] assert len(expected_cells) == len(actual_cells) for expected_cell, actual_cell in zip(expected_cells, actual_cells): expected_outputs = expected_cell.get('outputs', []) actual_outputs = actual_cell.get('outputs', []) normalized_expected_outputs = list( map(self.normalize_output, expected_outputs)) normalized_actual_outputs = list( map(self.normalize_output, actual_outputs)) assert normalized_expected_outputs == normalized_actual_outputs expected_execution_count = expected_cell.get( 'execution_count', None) actual_execution_count = actual_cell.get('execution_count', None) assert expected_execution_count == actual_execution_count def test_constructor(self): """Can a ExecutePreprocessor be constructed?""" self.build_preprocessor({}) def run_notebook(self, filename, opts, resources): """Loads and runs a notebook, returning both the version prior to running it and the version after running it. """ with io.open(filename) as f: input_nb = nbformat.read(f, 4) preprocessor = self.build_preprocessor(opts) cleaned_input_nb = copy.deepcopy(input_nb) for cell in cleaned_input_nb.cells: if 'execution_count' in cell: del cell['execution_count'] cell['outputs'] = [] # Override terminal size to standardise traceback format with modified_env({'COLUMNS': '80', 'LINES': '24'}): output_nb, _ = preprocessor(cleaned_input_nb, resources) return input_nb, output_nb def test_run_notebooks(self): """Runs a series of test notebooks and compares them to their actual output""" input_files = glob.glob(os.path.join(current_dir, 'files', '*.ipynb')) shared_opts = dict(kernel_name="python") for filename in input_files: # There is some slight differences between the output in IPython 6 and IPython 7. IPY_MAJOR = IPython.version_info[0] if os.path.basename(filename).endswith("-IPY6.ipynb"): print(filename, IPY_MAJOR) if IPY_MAJOR >= 7: continue elif os.path.basename(filename) in ( "Interrupt.ipynb", "Skip Exceptions with Cell Tags.ipynb", "Skip Exceptions.ipynb"): if IPY_MAJOR < 7: continue # Special arguments for the notebooks if os.path.basename(filename) == "Disable Stdin.ipynb": continue elif os.path.basename(filename) in ("Interrupt.ipynb", "Interrupt-IPY6.ipynb"): opts = dict(timeout=1, interrupt_on_timeout=True, allow_errors=True) elif os.path.basename(filename) in ("Skip Exceptions.ipynb", "Skip Exceptions-IPY6.ipynb"): opts = dict(allow_errors=True) else: opts = dict() res = self.build_resources() res['metadata']['path'] = os.path.dirname(filename) opts.update(shared_opts) input_nb, output_nb = self.run_notebook(filename, opts, res) self.assert_notebooks_equal(input_nb, output_nb) def test_populate_language_info(self): preprocessor = self.build_preprocessor(opts=dict(kernel_name="python")) nb = nbformat.v4.new_notebook() # Certainly has no language_info. nb, _ = preprocessor.preprocess(nb, resources={}) assert 'language_info' in nb.metadata def test_empty_path(self): """Can the kernel be started when the path is empty?""" filename = os.path.join(current_dir, 'files', 'HelloWorld.ipynb') res = self.build_resources() res['metadata']['path'] = '' input_nb, output_nb = self.run_notebook(filename, {}, res) self.assert_notebooks_equal(input_nb, output_nb) @pytest.mark.xfail("python3" not in KernelSpecManager().find_kernel_specs(), reason="requires a python3 kernelspec") def test_empty_kernel_name(self): """Can kernel in nb metadata be found when an empty string is passed? Note: this pattern should be discouraged in practice. Passing in no kernel_name to ExecutePreprocessor is recommended instead. """ filename = os.path.join(current_dir, 'files', 'UnicodePy3.ipynb') res = self.build_resources() input_nb, output_nb = self.run_notebook(filename, {"kernel_name": ""}, res) self.assert_notebooks_equal(input_nb, output_nb) with pytest.raises(TraitError): input_nb, output_nb = self.run_notebook(filename, {"kernel_name": None}, res) def test_disable_stdin(self): """Test disabling standard input""" filename = os.path.join(current_dir, 'files', 'Disable Stdin.ipynb') res = self.build_resources() res['metadata']['path'] = os.path.dirname(filename) input_nb, output_nb = self.run_notebook(filename, dict(allow_errors=True), res) # We need to special-case this particular notebook, because the # traceback contains machine-specific stuff like where IPython # is installed. It is sufficient here to just check that an error # was thrown, and that it was a StdinNotImplementedError self.assertEqual(len(output_nb['cells']), 1) self.assertEqual(len(output_nb['cells'][0]['outputs']), 1) output = output_nb['cells'][0]['outputs'][0] self.assertEqual(output['output_type'], 'error') self.assertEqual(output['ename'], 'StdinNotImplementedError') self.assertEqual( output['evalue'], 'raw_input was called, but this frontend does not support input requests.' ) def test_timeout(self): """Check that an error is raised when a computation times out""" filename = os.path.join(current_dir, 'files', 'Interrupt.ipynb') res = self.build_resources() res['metadata']['path'] = os.path.dirname(filename) with pytest.raises(TimeoutError): self.run_notebook(filename, dict(timeout=1), res) def test_timeout_func(self): """Check that an error is raised when a computation times out""" filename = os.path.join(current_dir, 'files', 'Interrupt.ipynb') res = self.build_resources() res['metadata']['path'] = os.path.dirname(filename) def timeout_func(source): return 10 with pytest.raises(TimeoutError): self.run_notebook(filename, dict(timeout_func=timeout_func), res) def test_kernel_death(self): """Check that an error is raised when the kernel is_alive is false""" filename = os.path.join(current_dir, 'files', 'Interrupt.ipynb') with io.open(filename, 'r') as f: input_nb = nbformat.read(f, 4) res = self.build_resources() res['metadata']['path'] = os.path.dirname(filename) preprocessor = self.build_preprocessor({"timeout": 5}) try: input_nb, output_nb = preprocessor(input_nb, {}) except TimeoutError: pass km, kc = preprocessor.start_new_kernel() with patch.object(km, "is_alive") as alive_mock: alive_mock.return_value = False with pytest.raises(DeadKernelError): input_nb, output_nb = preprocessor.preprocess(input_nb, {}, km=km) def test_allow_errors(self): """ Check that conversion halts if ``allow_errors`` is False. """ filename = os.path.join(current_dir, 'files', 'Skip Exceptions.ipynb') res = self.build_resources() res['metadata']['path'] = os.path.dirname(filename) with pytest.raises(CellExecutionError) as exc: self.run_notebook(filename, dict(allow_errors=False), res) self.assertIsInstance(str(exc.value), str) if sys.version_info >= (3, 0): assert u"# üñîçø∂é" in str(exc.value) else: assert u"# üñîçø∂é".encode('utf8', 'replace') in str(exc.value) def test_force_raise_errors(self): """ Check that conversion halts if the ``force_raise_errors`` traitlet on ExecutePreprocessor is set to True. """ filename = os.path.join(current_dir, 'files', 'Skip Exceptions with Cell Tags.ipynb') res = self.build_resources() res['metadata']['path'] = os.path.dirname(filename) with pytest.raises(CellExecutionError) as exc: self.run_notebook(filename, dict(force_raise_errors=True), res) self.assertIsInstance(str(exc.value), str) if sys.version_info >= (3, 0): assert u"# üñîçø∂é" in str(exc.value) else: assert u"# üñîçø∂é".encode('utf8', 'replace') in str(exc.value) def test_custom_kernel_manager(self): from .fake_kernelmanager import FakeCustomKernelManager filename = os.path.join(current_dir, 'files', 'HelloWorld.ipynb') with io.open(filename) as f: input_nb = nbformat.read(f, 4) preprocessor = self.build_preprocessor( {'kernel_manager_class': FakeCustomKernelManager}) cleaned_input_nb = copy.deepcopy(input_nb) for cell in cleaned_input_nb.cells: if 'execution_count' in cell: del cell['execution_count'] cell['outputs'] = [] # Override terminal size to standardise traceback format with modified_env({'COLUMNS': '80', 'LINES': '24'}): output_nb, _ = preprocessor(cleaned_input_nb, self.build_resources()) expected = FakeCustomKernelManager.expected_methods.items() for method, call_count in expected: self.assertNotEqual(call_count, 0, '{} was called'.format(method)) def test_process_message_wrapper(self): outputs = [] class WrappedPreProc(ExecutePreprocessor): def process_message(self, msg, cell, cell_index): result = super(WrappedPreProc, self).process_message(msg, cell, cell_index) if result: outputs.append(result) return result current_dir = os.path.dirname(__file__) filename = os.path.join(current_dir, 'files', 'HelloWorld.ipynb') with io.open(filename) as f: input_nb = nbformat.read(f, 4) original = copy.deepcopy(input_nb) wpp = WrappedPreProc() executed = wpp.preprocess(input_nb, {})[0] assert outputs == [{ 'name': 'stdout', 'output_type': 'stream', 'text': 'Hello World\n' }] self.assert_notebooks_equal(original, executed) def test_execute_function(self): # Test the executenb() convenience API filename = os.path.join(current_dir, 'files', 'HelloWorld.ipynb') with io.open(filename) as f: input_nb = nbformat.read(f, 4) original = copy.deepcopy(input_nb) executed = executenb(original, os.path.dirname(filename)) self.assert_notebooks_equal(original, executed) def test_widgets(self): """Runs a test notebook with widgets and checks the widget state is saved.""" input_file = os.path.join(current_dir, 'files', 'JupyterWidgets.ipynb') opts = dict(kernel_name="python") res = self.build_resources() res['metadata']['path'] = os.path.dirname(input_file) input_nb, output_nb = self.run_notebook(input_file, opts, res) output_data = [ output.get('data', {}) for cell in output_nb['cells'] for output in cell['outputs'] ] model_ids = [ data['application/vnd.jupyter.widget-view+json']['model_id'] for data in output_data if 'application/vnd.jupyter.widget-view+json' in data ] wdata = output_nb['metadata']['widgets'] \ ['application/vnd.jupyter.widget-state+json'] for k in model_ids: d = wdata['state'][k] assert 'model_name' in d assert 'model_module' in d assert 'state' in d assert 'version_major' in wdata assert 'version_minor' in wdata
def install(kernel_spec_manager=None, user=False, kernel_name=KERNEL_NAME, display_name=None, prefix=None, profile=None, env=None): """Install the IPython kernelspec for Jupyter Parameters ---------- kernel_spec_manager : KernelSpecManager [optional] A KernelSpecManager to use for installation. If none provided, a default instance will be created. user : bool [default: False] Whether to do a user-only install, or system-wide. kernel_name : str, optional Specify a name for the kernelspec. This is needed for having multiple IPython kernels for different environments. display_name : str, optional Specify the display name for the kernelspec profile : str, optional Specify a custom profile to be loaded by the kernel. prefix : str, optional Specify an install prefix for the kernelspec. This is needed to install into a non-default location, such as a conda/virtual-env. env : dict, optional A dictionary of extra environment variables for the kernel. These will be added to the current environment variables before the kernel is started Returns ------- The path where the kernelspec was installed. """ if kernel_spec_manager is None: kernel_spec_manager = KernelSpecManager() if (kernel_name != KERNEL_NAME) and (display_name is None): # kernel_name is specified and display_name is not # default display_name to kernel_name display_name = kernel_name overrides = {} if display_name: overrides["display_name"] = display_name if profile: extra_arguments = ["--profile", profile] if not display_name: # add the profile to the default display name overrides["display_name"] = 'Python %i [profile=%s]' % ( sys.version_info[0], profile) else: extra_arguments = None if env: overrides['env'] = env path = write_kernel_spec(overrides=overrides, extra_arguments=extra_arguments) dest = kernel_spec_manager.install_kernel_spec(path, kernel_name=kernel_name, user=user, prefix=prefix) # cleanup afterward shutil.rmtree(path) return dest
def install(self): with TemporaryDirectory() as td: os.chmod(td, 0o755) # check if this is needed with open(os.path.join(td, 'kernel.json'), 'w') as file: json.dump(self.kernel_spec, file, sort_keys=True) return KernelSpecManager().install_kernel_spec(td, self.kernel_name, replace=True)
# from jinja2 import FileSystemLoader # from notebook.base.handlers import IPythonHandler, FileFindHandler import logging from tornado import web, ioloop from notebook.services.kernels.kernelmanager import MappingKernelManager import os from jupyter_client.kernelspec import KernelSpecManager from .handlers import pages_handler, kernels_handler, kernelspecs_handler, content_handeler logger = logging.getLogger("lab_dash") settings = dict(kernel_manager=MappingKernelManager(), kernel_spec_manager=KernelSpecManager(), logger=logger) default_handlers = (pages_handler.default_handlers + kernels_handler.default_handlers + kernelspecs_handler.default_handlers + content_handeler.default_handlers) static_handler = (r'/(.*)', web.StaticFileHandler, { 'path': os.path.join(os.path.dirname(__file__), '..', '..', 'build') }) default_handlers += [static_handler] def main(): logger.setLevel(logging.INFO) ch = logging.StreamHandler()
def install_my_kernel_javascript(): seq_js_file = os.path.join(os.environ['SEQ_PATH'][:-7], 'jupyter', 'seq_kernel', 'kernel.js') kernel_js_file = os.path.join( KernelSpecManager().get_kernel_spec('seq').resource_dir, 'kernel.js') os.system(f'cp {seq_js_file} {kernel_js_file}')
'--replace', help='Replace any existing kernel spec with this name.', action='store_true' ) @/s/GENERATED_ARGS@ args = parser.parse_args() @/s/GENERATED_DEFAULT_REPLACEMENT@ # Install the kernel install_dest = KernelSpecManager().install_kernel_spec( os.path.join(os.path.dirname(os.path.abspath(__file__)), '@KERNEL_DIRECTORY@'), kernel_name='@KERNEL_NAME@', user=args.user, prefix=sys.prefix if args.sys_prefix else args.prefix, replace=args.replace ) # Connect the self referencing token left in the kernel.json to point to it's install location. # Prepare the token replacement string which should be properly escaped for use in a JSON string # The [1:-1] trims the first and last " json.dumps adds for strings. install_dest_json_fragment = json.dumps(install_dest)[1:-1] # Prepare the paths to the installed kernel.json and the one bundled with this installer. local_kernel_json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '@KERNEL_DIRECTORY@', 'kernel.json') installed_kernel_json_path = os.path.join(install_dest, 'kernel.json') # Replace the @KERNEL_INSTALL_DIRECTORY@ token with the path to where the kernel was installed
def uninstall_kernel(): print("Uninstalling", kernel_name, "kernel...") KernelSpecManager().remove_kernel_spec(kernel_name) print(kernel_name.capitalize(), "kernel uninstallation complete")
def init_configurables(self): """Initializes all configurable objects including a kernel manager, kernel spec manager, session manager, and personality. Any kernel pool configured by the personality will be its responsibility to shut down. Optionally, loads a notebook and pre-spawns the configured number of kernels. """ self.kernel_spec_manager = KernelSpecManager(parent=self) self.seed_notebook = None if self.seed_uri is not None: # Note: must be set before instantiating a SeedingMappingKernelManager self.seed_notebook = self._load_notebook(self.seed_uri) # Only pass a default kernel name when one is provided. Otherwise, # adopt whatever default the kernel manager wants to use. kwargs = {} if self.default_kernel_name: kwargs['default_kernel_name'] = self.default_kernel_name self.kernel_spec_manager = self.kernel_spec_manager_class( parent=self, ) self.kernel_manager = self.kernel_manager_class( parent=self, log=self.log, connection_dir=self.runtime_dir, kernel_spec_manager=self.kernel_spec_manager, **kwargs ) # Detect older version of notebook func = getattr(self.kernel_manager, 'initialize_culler', None) if not func: self.log.warning("Older version of Notebook detected - idle kernels will not be culled. " "Culling requires Notebook >= 5.1.0.") self.session_manager = SessionManager( log=self.log, kernel_manager=self.kernel_manager ) self.kernel_session_manager = self.kernel_session_manager_class( parent=self, log=self.log, kernel_manager=self.kernel_manager, config=self.config, # required to get command-line options visible **kwargs ) # Attempt to start persisted sessions self.kernel_session_manager.start_sessions() self.contents_manager = None # Gateways don't use contents manager if self.prespawn_count: if self.max_kernels and self.prespawn_count > self.max_kernels: raise RuntimeError('cannot prespawn {}; more than max kernels {}'.format( self.prespawn_count, self.max_kernels) ) api_module = self._load_api_module(self.api) func = getattr(api_module, 'create_personality') self.personality = func(parent=self, log=self.log) self.personality.init_configurables()
def run_notebook(nb_path, output_dir): """Run a notebook tests executes the notebook and stores the output in a file """ import nbformat from jupyter_client.kernelspec import KernelSpecManager from nbconvert.preprocessors.execute import executenb log.info(f"Testing notebook {nb_path}") with open(nb_path) as f: nb = nbformat.read(f, as_version=4) kernel_specs = KernelSpecManager().get_all_specs() kernel_info = nb.metadata.get("kernelspec") or {} kernel_name = kernel_info.get("name", "") kernel_language = kernel_info.get("language") or "" if kernel_name in kernel_specs: log.info(f"Found kernel {kernel_name}") elif kernel_language: log.warning( f"No such kernel {kernel_name}, falling back on kernel language={kernel_language}" ) kernel_language = kernel_language.lower() # no exact name match, re-implement js notebook fallback, # using kernel language instead # nbconvert does not implement this, but it should for kernel_spec_name, kernel_info in kernel_specs.items(): if (kernel_info.get("spec", {}).get("language", "").lower() == kernel_language): log.warning( f"Using kernel {kernel_spec_name} to provide language: {kernel_language}" ) kernel_name = kernel_spec_name break else: log.warning( "Found no matching kernel for name={kernel_name}, language={kernel_language}" ) summary_specs = [ f"name={name}, language={info['spec'].get('language')}" for name, info in kernel_specs.items() ] log.warning(f"Found kernel specs: {'; '.join(summary_specs)}") exported = executenb(nb, cwd=os.path.dirname(nb_path), kernel_name=kernel_name, timeout=600) rel_path = os.path.relpath(nb_path, os.getcwd()) dest_path = os.path.join(output_dir, "notebooks", rel_path) log.info(f"Saving exported notebook to {dest_path}") try: os.makedirs(os.path.dirname(dest_path)) except FileExistsError: pass with open(dest_path, "w") as f: nbformat.write(exported, f)
def start(self): self.connection_dir = tempfile.mkdtemp(prefix='voila_', dir=self.connection_dir_root) self.log.info('Storing connection files in %s.' % self.connection_dir) self.log.info('Serving static files from %s.' % self.static_root) self.kernel_spec_manager = KernelSpecManager(parent=self) self.kernel_manager = MappingKernelManager( parent=self, connection_dir=self.connection_dir, kernel_spec_manager=self.kernel_spec_manager, allowed_message_types=[ 'comm_msg', 'comm_info_request', 'kernel_info_request', 'shutdown_request' ]) jenv_opt = { "autoescape": True } # we might want extra options via cmd line like notebook server env = jinja2.Environment(loader=jinja2.FileSystemLoader( self.template_paths), extensions=['jinja2.ext.i18n'], **jenv_opt) nbui = gettext.translation('nbui', localedir=os.path.join(ROOT, 'i18n'), fallback=True) env.install_gettext_translations(nbui, newstyle=False) self.contents_manager = LargeFileManager(parent=self) # we create a config manager that load both the serverconfig and nbconfig (classical notebook) read_config_path = [ os.path.join(p, 'serverconfig') for p in jupyter_config_path() ] read_config_path += [ os.path.join(p, 'nbconfig') for p in jupyter_config_path() ] self.config_manager = ConfigManager(parent=self, read_config_path=read_config_path) # default server_url to base_url self.server_url = self.server_url or self.base_url self.app = tornado.web.Application( base_url=self.base_url, server_url=self.server_url or self.base_url, kernel_manager=self.kernel_manager, kernel_spec_manager=self.kernel_spec_manager, allow_remote_access=True, autoreload=self.autoreload, voila_jinja2_env=env, jinja2_env=env, static_path='/', server_root_dir='/', contents_manager=self.contents_manager, config_manager=self.config_manager) self.app.settings.update(self.tornado_settings) handlers = [] handlers.extend([ (url_path_join(self.server_url, r'/api/kernels/%s' % _kernel_id_regex), KernelHandler), (url_path_join(self.server_url, r'/api/kernels/%s/channels' % _kernel_id_regex), ZMQChannelsHandler), (url_path_join(self.server_url, r'/voila/static/(.*)'), MultiStaticFileHandler, { 'paths': self.static_paths, 'default_filename': 'index.html' }) ]) # Serving notebook extensions if self.voila_configuration.enable_nbextensions: handlers.append(( url_path_join(self.server_url, r'/voila/nbextensions/(.*)'), FileFindHandler, { 'path': self.nbextensions_path, 'no_cache_paths': ['/'], # don't cache anything in nbextensions }, )) handlers.append(( url_path_join(self.server_url, r'/voila/files/(.*)'), WhiteListFileHandler, { 'whitelist': self.voila_configuration.file_whitelist, 'blacklist': self.voila_configuration.file_blacklist, 'path': self.root_dir, }, )) tree_handler_conf = {'voila_configuration': self.voila_configuration} if self.notebook_path: handlers.append( (url_path_join(self.server_url, r'/(.*)'), VoilaHandler, { 'notebook_path': os.path.relpath(self.notebook_path, self.root_dir), 'nbconvert_template_paths': self.nbconvert_template_paths, 'config': self.config, 'voila_configuration': self.voila_configuration })) else: self.log.debug('serving directory: %r', self.root_dir) handlers.extend([ (self.server_url, VoilaTreeHandler, tree_handler_conf), (url_path_join(self.server_url, r'/voila/tree' + path_regex), VoilaTreeHandler, tree_handler_conf), (url_path_join(self.server_url, r'/voila/render/(.*)'), VoilaHandler, { 'nbconvert_template_paths': self.nbconvert_template_paths, 'config': self.config, 'voila_configuration': self.voila_configuration }), ]) self.app.add_handlers('.*$', handlers) self.listen()
from jupyter_client.kernelspec import KernelSpecManager from setuptools import setup name = 'cwlkernel' setup( name=name, version='0.1', py_modules=['cwlkernel'], url='https://github.com/giannisdoukas/CWLJNIKernel', author='Yannis Doukas', author_email='', description='', classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Development Status :: 2 - Pre-Alpha" ], ) import sys import os kernel_requirements_directory = os.path.join( os.path.dirname(os.path.realpath(__file__)), 'kernelmeta') print('Installing IPython kernel spec') KernelSpecManager().install_kernel_spec(kernel_requirements_directory, name, user=False, prefix=sys.prefix)
def kernel_spec_manager(environ, setup_kernelspecs): yield KernelSpecManager(ensure_native_kernel=False)
def remove(_ctx) -> None: kernel_spec = KernelSpecManager() kernel_spec.remove_kernel_spec('michelson')
def uninstall(self): return KernelSpecManager().remove_kernel_spec(self.kernel_name.lower())
aliases=ALIASES, name_map=NAME_MAP, help= "The path to the Graphviz dot executable that is used for the %viz magic command.", type=type_assertion("graphviz-path", str), ) args = parser.parse_args() if not hasattr(args, "env") or getattr(args, "env") is None: setattr(args, "env", {}) # Install the kernel install_dest = KernelSpecManager().install_kernel_spec( os.path.join(os.path.dirname(os.path.abspath(__file__)), 'sysml'), kernel_name='sysml', user=args.user, prefix=sys.prefix if args.sys_prefix else args.prefix, replace=args.replace) # Connect the self referencing token left in the kernel.json to point to it's install location. # Prepare the token replacement string which should be properly escaped for use in a JSON string # The [1:-1] trims the first and last " json.dumps adds for strings. install_dest_json_fragment = json.dumps(install_dest)[1:-1] # Prepare the paths to the installed kernel.json and the one bundled with this installer. local_kernel_json_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'sysml', 'kernel.json') installed_kernel_json_path = os.path.join(install_dest, 'kernel.json') # Replace the @KERNEL_INSTALL_DIRECTORY@ token with the path to where the kernel was installed
def __init__(self): self.args = self.parse_arguments() self.connection = self.parse_connection_file(self.args.connection_file) self.spawn = eval(f"self._spawn_{self.args.scheduler}") self.kernel_spec = KernelSpecManager().get_kernel_spec(self.args.kernel)