Example #1
0
    def create_kernel_json(self, location, interpreter):

        python_lib_contents = listdir("{0}/python/lib".format(self.spark_home))
        try:
            py4j_zip = list(filter( lambda filename: "py4j" in filename, python_lib_contents))[0]
        except:
            self.log.warn('Unable to find py4j, installing without PySpark support.')

        kernel_spec = KernelSpec()
        interpreter_lang = INTERPRETER_LANGUAGES[interpreter]
        kernel_spec.display_name = '{} - {}'.format(self.kernel_name, interpreter)
        kernel_spec.language = interpreter_lang
        kernel_spec.argv = [os.path.join(location, 'bin', 'run.sh'), '--profile', '{connection_file}']
        kernel_spec.env = {
            DEFAULT_INTERPRETER : interpreter,
            # The SPARK_OPTS values are stored in TOREE_SPARK_OPTS to allow the two values to be merged when kernels
            # are run. This allows values to be specified during install, but also during runtime.
            TOREE_SPARK_OPTS : self.spark_opts,
            SPARK_HOME : self.spark_home,
            PYTHON_PATH : '{0}/python:{0}/python/lib/{1}'.format(self.spark_home, py4j_zip)
        }

        kernel_json_file = os.path.join(location, 'kernel.json')
        self.log.debug('Creating kernel json file for {}'.format(interpreter))
        with open(kernel_json_file, 'w+') as f:
            json.dump(kernel_spec.to_dict(), f, indent=2)
Example #2
0
def julia_spec(kernel_path=None, *, julia_path='/usr/bin/julia'):
    if kernel_path is None:
        kernel_path = os.path.join(os.path.dirname(find_ijulia()), "kernel.jl")
    julia_argv = [
        julia_path, '-i', '--startup-file=yes', '--color=yes', kernel_path,
        '{connection_file}'
    ]
    return KernelSpec(display_name="julia",
                      env={},
                      language="julia",
                      argv=julia_argv)
Example #3
0
    def create_kernel_json(self, location, interpreter):

        python_lib_contents = listdir("{0}/python/lib".format(self.spark_home))
        try:
            py4j_zip = list(filter( lambda filename: "py4j" in filename, python_lib_contents))[0]
        except:
            self.log.warn('Unable to find py4j, installing without PySpark support.')

        kernel_spec = KernelSpec()
        interpreter_lang = INTERPRETER_LANGUAGES[interpreter]
        kernel_spec.display_name = '{} - {}'.format(self.kernel_name, interpreter)
        kernel_spec.language = interpreter_lang
        kernel_spec.argv = [os.path.join(location, 'bin', 'run.sh'), '--profile', '{connection_file}']
        kernel_spec.env = {
            DEFAULT_INTERPRETER : interpreter,
            # The SPARK_OPTS values are stored in TOREE_SPARK_OPTS to allow the two values to be merged when kernels
            # are run. This allows values to be specified during install, but also during runtime.
            TOREE_SPARK_OPTS : self.spark_opts,
            TOREE_OPTS : self.toree_opts,
            SPARK_HOME : self.spark_home,
            PYTHON_PATH : '{0}/python:{0}/python/lib/{1}'.format(self.spark_home, py4j_zip),
            PYTHON_EXEC : self.python_exec
        }

        kernel_json_file = os.path.join(location, 'kernel.json')
        self.log.debug('Creating kernel json file for {}'.format(interpreter))
        with open(kernel_json_file, 'w+') as f:
            json.dump(kernel_spec.to_dict(), f, indent=2)
Example #4
0
    def venv_kernel_specs(self):
        python_dirs = self.find_python_paths()
        kspecs = {}
        for venv_name, venv_dir in python_dirs.items():
            exe_name = os.path.join(venv_dir, 'bin/python')
            kspec_dict = {
                "argv":
                [exe_name, "-m", "IPython.kernel", "-f", "{connection_file}"],
                "display_name":
                "Environment ({})".format(venv_name),
                "env": {}
            }

            kspecs.update({venv_name: KernelSpec(**kspec_dict)})
        return kspecs
Example #5
0
    def _conda_kspecs(self):
        """ Get (or refresh) the cache of conda kernels
        """
        if self._conda_info is None:
            return {}

        expiry = self._conda_kernels_cache_expiry
        if expiry is not None and expiry >= time.time():
            return self._conda_kernels_cache

        kspecs = {}
        for name, info in self._all_specs().items():
            kspecs[name] = KernelSpec(**info)

        self._conda_kernels_cache_expiry = time.time() + CACHE_TIMEOUT
        self._conda_kernels_cache = kspecs
        return kspecs
    def create_kernel_json(self, location, interpreter):

        kernel_spec = KernelSpec()
        interpreter_lang = INTERPRETER_LANGUAGES[interpreter]
        kernel_spec.display_name = '{} - {}'.format(self.kernel_name, interpreter)
        kernel_spec.language = interpreter_lang
        kernel_spec.argv = [os.path.join(location, 'bin', 'run.sh'), '--profile', '{connection_file}']
        kernel_spec.env = {
            DEFAULT_INTERPRETER : interpreter,
            # The SPARK_OPTS values are stored in TOREE_SPARK_OPTS to allow the two values to be merged when kernels
            # are run. This allows values to be specified during install, but also during runtime.
            TOREE_SPARK_OPTS : self.spark_opts,
            TOREE_OPTS : self.toree_opts,
            SPARK_HOME : self.spark_home,
        }

        kernel_json_file = os.path.join(location, 'kernel.json')
        self.log.debug('Creating kernel json file for {}'.format(interpreter))
        with open(kernel_json_file, 'w+') as f:
            json.dump(kernel_spec.to_dict(), f, indent=2)
Example #7
0
    def _load_conda_kspecs(self):
        """ Create a kernelspec for each of the envs where jupyter is installed
        """
        kspecs = {}
        for name, info in self._all_envs().items():
            executable = info['executable']
            display_name = info['display_name']

            if info['language_key'] == 'py':
                kspec = {
                    "argv":
                    [executable, "-m", "ipykernel", "-f", "{connection_file}"],
                    "display_name":
                    display_name,
                    "language":
                    "python",
                    "env": {},
                    "resource_dir":
                    join(dirname(abspath(__file__)), "logos", "python")
                }
            elif info['language_key'] == 'r':
                kspec = {
                    "argv": [
                        executable, "--slave", "-e", "IRkernel::main()",
                        "--args", "{connection_file}"
                    ],
                    "display_name":
                    display_name,
                    "language":
                    "R",
                    "env": {},
                    "resource_dir":
                    join(dirname(abspath(__file__)), "logos", "r")
                }

            kspecs.update({name: KernelSpec(**kspec)})

        return kspecs
Example #8
0
def test_connection_file_real_path():
    """Verify realpath is used when formatting connection file"""
    with mock.patch("os.path.realpath") as patched_realpath:
        patched_realpath.return_value = "foobar"
        km = KernelManager(
            connection_file=os.path.join(tempfile.gettempdir(), "kernel-test.json"),
            kernel_name="test_kernel",
        )

        # KernelSpec and launch args have to be mocked as we don't have an actual kernel on disk
        km._kernel_spec = KernelSpec(
            resource_dir="test",
            **{
                "argv": ["python.exe", "-m", "test_kernel", "-f", "{connection_file}"],
                "env": {},
                "display_name": "test_kernel",
                "language": "python",
                "metadata": {},
            },
        )
        km._launch_args = {}
        cmds = km.format_kernel_cmd()
        assert cmds[4] == "foobar"
Example #9
0
    def create_kernel_json(self, location, interpreter):
        kernel_spec = KernelSpec()
        interpreter_lang = INTERPRETER_LANGUAGES[interpreter]
        kernel_spec.display_name = '{} - {}'.format(self.kernel_name, interpreter)
        kernel_spec.language = interpreter_lang
        kernel_spec.argv = [os.path.join(location, 'bin', 'run.sh'), '--profile', '{connection_file}']
        kernel_spec.env = {
            DEFAULT_INTERPRETER : interpreter,
            # The SPARK_OPTS values are stored in TOREE_SPARK_OPTS to allow the two values to be merged when kernels
            # are run. This allows values to be specified during install, but also during runtime.
            TOREE_SPARK_OPTS : self.spark_opts,
            SPARK_HOME : self.spark_home,
            PYTHON_PATH : '{0}/python:{0}/python/lib/py4j-0.8.2.1-src.zip'.format(self.spark_home)
        }

        kernel_json_file = os.path.join(location, 'kernel.json')
        self.log.debug('Creating kernel json file for {}'.format(interpreter))
        with open(kernel_json_file, 'w+') as f:
            json.dump(kernel_spec.to_dict(), f, indent=2)
Example #10
0
 def cache_item_to_kernel_spec(cache_item: CacheItemType) -> KernelSpec:
     """Converts a CacheItemType to a KernelSpec instance for user consumption."""
     kernel_spec = KernelSpec(resource_dir=cache_item["resource_dir"], **cache_item["spec"])
     return kernel_spec
Example #11
0
 def kernel_spec_to_cache_item(kernelspec: KernelSpec) -> CacheItemType:
     """Converts a KernelSpec instance to a CacheItemType for storage into the cache."""
     cache_item = dict()
     cache_item["spec"] = kernelspec.to_dict()
     cache_item["resource_dir"] = kernelspec.resource_dir
     return cache_item
Example #12
0
 def cache_item_to_kernel_spec(cache_item: CacheItemType) -> KernelSpec:
     """Converts a CacheItemType to a KernelSpec instance for user consumption."""
     return KernelSpec.from_resource_dir(cache_item['resource_dir'])