def init_io(self):
        """Redirect input streams and set a display hook."""
        if self.outstream_class:
            outstream_factory = import_item(str(self.outstream_class))
            if sys.stdout is not None:
                sys.stdout.flush()

            e_stdout = None if self.quiet else sys.__stdout__
            e_stderr = None if self.quiet else sys.__stderr__

            sys.stdout = outstream_factory(self.session,
                                           self.iopub_thread,
                                           'stdout',
                                           echo=e_stdout)
            if sys.stderr is not None:
                sys.stderr.flush()
            sys.stderr = outstream_factory(self.session,
                                           self.iopub_thread,
                                           'stderr',
                                           echo=e_stderr)
        if self.displayhook_class:
            displayhook_factory = import_item(str(self.displayhook_class))
            self.displayhook = displayhook_factory(self.session,
                                                   self.iopub_socket)
            sys.displayhook = self.displayhook

        self.patch_io()
Beispiel #2
0
 def init_io(self):
     """Redirect input streams and set a display hook."""
     if self.outstream_class:
         outstream_factory = import_item(str(self.outstream_class))
         sys.stdout = outstream_factory(self.session, self.iopub_socket, u'stdout')
         sys.stderr = outstream_factory(self.session, self.iopub_socket, u'stderr')
     if self.displayhook_class:
         displayhook_factory = import_item(str(self.displayhook_class))
         sys.displayhook = displayhook_factory(self.session, self.iopub_socket)
Beispiel #3
0
 def init_io(self):
     """Redirect input streams and set a display hook."""
     if self.outstream_class:
         outstream_factory = import_item(str(self.outstream_class))
         sys.stdout = outstream_factory(self.session, self.iopub_socket,
                                        u'stdout')
         sys.stderr = outstream_factory(self.session, self.iopub_socket,
                                        u'stderr')
     if self.displayhook_class:
         displayhook_factory = import_item(str(self.displayhook_class))
         sys.displayhook = displayhook_factory(self.session,
                                               self.iopub_socket)
    def get(self, bundler_id):
        '''
        Executes the requested bundler on the given notebook.

        :param bundler_id: Unique ID of an installed bundler
        :arg notebook: Path to the notebook relative to the notebook directory
            root
        '''
        notebook = self.get_query_argument('notebook')
        abs_nb_path = os.path.join(self.notebook_dir, url2path(notebook))
        try:
            bundler = self.get_bundler(bundler_id)
        except KeyError:
            raise web.HTTPError(404, 'Bundler %s not found' % bundler_id)

        module_name = bundler['module_name']
        try:
            # no-op in python3, decode error in python2
            module_name = str(module_name)
        except UnicodeEncodeError:
            # Encode unicode as utf-8 in python2 else import_item fails
            module_name = module_name.encode('utf-8')

        try:
            bundler_mod = import_item(module_name)
        except ImportError:
            raise web.HTTPError(500,
                                'Could not import bundler %s ' % bundler_id)

        # Let the bundler respond in any way it sees fit and assume it will
        # finish the request
        yield gen.maybe_future(bundler_mod.bundle(self, abs_nb_path))
Beispiel #5
0
    def get(self, path):
        """Bundle the given notebook.

        Parameters
        ----------
        path: str
            Path to the notebook (path parameter)
        bundler: str
            Bundler ID to use (query parameter)
        """
        bundler_id = self.get_query_argument('bundler')
        model = self.contents_manager.get(path=url2path(path))

        try:
            bundler = self.get_bundler(bundler_id)
        except KeyError:
            raise web.HTTPError(400, 'Bundler %s not enabled' % bundler_id)

        module_name = bundler['module_name']
        try:
            # no-op in python3, decode error in python2
            module_name = str(module_name)
        except UnicodeEncodeError:
            # Encode unicode as utf-8 in python2 else import_item fails
            module_name = module_name.encode('utf-8')

        try:
            bundler_mod = import_item(module_name)
        except ImportError:
            raise web.HTTPError(500,
                                'Could not import bundler %s ' % bundler_id)

        # Let the bundler respond in any way it sees fit and assume it will
        # finish the request
        yield gen.maybe_future(bundler_mod.bundle(self, model))
Beispiel #6
0
def get_validator(version=None, version_minor=None):
    """Load the JSON schema into a Validator"""
    if version is None:
        from .. import current_nbformat
        version = current_nbformat

    v = import_item("nbformat.v%s" % version)
    current_minor = v.nbformat_minor
    if version_minor is None:
        version_minor = current_minor

    version_tuple = (version, version_minor)

    if version_tuple not in validators:
        try:
            v.nbformat_schema
        except AttributeError:
            # no validator
            return None
        schema_path = os.path.join(os.path.dirname(v.__file__),
                                   v.nbformat_schema)
        with open(schema_path) as f:
            schema_json = json.load(f)

        if current_minor < version_minor:
            # notebook from the future, relax all `additionalProperties: False` requirements
            schema_json = _relax_additional_properties(schema_json)
            # and allow undefined cell types and outputs
            schema_json = _allow_undefined(schema_json)

        validators[version_tuple] = Validator(schema_json)
    return validators[version_tuple]
    def get(self, bundler_id):
        """
        Executes the requested bundler on the given notebook.

        :param bundler_id: Unique ID of an installed bundler
        :arg notebook: Path to the notebook relative to the notebook directory
            root
        """
        notebook = self.get_query_argument("notebook")
        abs_nb_path = os.path.join(self.notebook_dir, url2path(notebook))
        try:
            bundler = self.get_bundler(bundler_id)
        except KeyError:
            raise web.HTTPError(404, "Bundler %s not found" % bundler_id)

        module_name = bundler["module_name"]
        try:
            # no-op in python3, decode error in python2
            module_name = str(module_name)
        except UnicodeEncodeError:
            # Encode unicode as utf-8 in python2 else import_item fails
            module_name = module_name.encode("utf-8")

        try:
            bundler_mod = import_item(module_name)
        except ImportError:
            raise web.HTTPError(500, "Could not import bundler %s " % bundler_id)

        # Let the bundler respond in any way it sees fit and assume it will
        # finish the request
        yield gen.maybe_future(bundler_mod.bundle(self, abs_nb_path))
Beispiel #8
0
def get_exporter(name):
    """ given an exporter name, return a class ready to be instantiate
    
    Raises ValueError if exporter is not found
    """
    if name.lower() in exporter_map:
        return exporter_map[name.lower()]

    if '.' in name:
        try:
            return import_item(name)
        except ImportError:
            log = logging.getLogger()
            log.error("Error importing %s" % name, exc_info=True)
            pass
    else:
        try:
            return entrypoints.get_single('nbconvert.exporter', name).load()
        except entrypoints.NoSuchEntryPoint:
            pass

    valid_names = sorted(
        get_export_names() +
        list(entrypoints.get_group_named('nbconvert.exporter')))
    raise ValueError('Unknown exporter "%s", did you mean one of: %s?' %
                     (name, ', '.join(valid_names)))
Beispiel #9
0
    def get(self, path):
        """Bundle the given notebook.
        
        Parameters
        ----------
        path: str
            Path to the notebook (path parameter)
        bundler: str
            Bundler ID to use (query parameter)
        """
        bundler_id = self.get_query_argument('bundler')
        model = self.contents_manager.get(path=url2path(path))

        try:
            bundler = self.get_bundler(bundler_id)
        except KeyError:
            raise web.HTTPError(400, 'Bundler %s not enabled' % bundler_id)
        
        module_name = bundler['module_name']
        try:
            # no-op in python3, decode error in python2
            module_name = str(module_name)
        except UnicodeEncodeError:
            # Encode unicode as utf-8 in python2 else import_item fails
            module_name = module_name.encode('utf-8')
        
        try:
            bundler_mod = import_item(module_name)
        except ImportError:
            raise web.HTTPError(500, 'Could not import bundler %s ' % bundler_id)

        # Let the bundler respond in any way it sees fit and assume it will
        # finish the request
        yield gen.maybe_future(bundler_mod.bundle(self, model))
Beispiel #10
0
    def start_kernel(self):
        """Starts a kernel in a separate process.

        Where the started kernel resides depends on the configured lifecycle manager.

        Parameters
        ----------
        `**kwargs` : optional
             keyword arguments that are passed down to build the kernel_cmd
             and launching the kernel (e.g. Popen kwargs).
        """

        lifecycle_manager_class_name = self.lifecycle_info.get('class_name')
        self.log.debug(
            "Instantiating kernel '{}' with lifecycle manager: {}".format(
                self.kernel_spec.display_name, lifecycle_manager_class_name))
        lifecycle_manager_class = import_item(lifecycle_manager_class_name)
        self.lifecycle_manager = lifecycle_manager_class(
            kernel_manager=self,
            lifecycle_config=self.lifecycle_info.get('config', {}))

        # format command
        kernel_cmd = self.format_kernel_cmd()

        self.log.debug("Launching kernel: {} with command: {}".format(
            self.kernel_spec.display_name, kernel_cmd))
        self.kernel = self.lifecycle_manager.launch_process(kernel_cmd,
                                                            env=self.env)
Beispiel #11
0
 def _validate_pre_save_hook(self, proposal):
     value = proposal["value"]
     if isinstance(value, str):
         value = import_item(self.pre_save_hook)
     if not callable(value):
         raise TraitError("pre_save_hook must be callable")
     return value
def get_validator(version=None, version_minor=None):
    """Load the JSON schema into a Validator"""
    if version is None:
        from .. import current_nbformat
        version = current_nbformat

    v = import_item("jupyter_nbformat.v%s" % version)
    current_minor = v.nbformat_minor
    if version_minor is None:
        version_minor = current_minor

    version_tuple = (version, version_minor)

    if version_tuple not in validators:
        try:
            v.nbformat_schema
        except AttributeError:
            # no validator
            return None
        schema_path = os.path.join(os.path.dirname(v.__file__), v.nbformat_schema)
        with open(schema_path) as f:
            schema_json = json.load(f)

        if current_minor < version_minor:
            # notebook from the future, relax all `additionalProperties: False` requirements
            schema_json = _relax_additional_properties(schema_json)
            # and allow undefined cell types and outputs
            schema_json = _allow_undefined(schema_json)

        validators[version_tuple] = Validator(schema_json)
    return validators[version_tuple]
Beispiel #13
0
def get_exporter(name):
    """ given an exporter name, return a class ready to be instantiate
    
    Raises ValueError if exporter is not found
    """
    if name.lower() in exporter_map:
        return exporter_map[name.lower()]

    if '.' in name:
        try:
            return import_item(name)
        except ImportError:
            log = logging.getLogger()
            log.error("Error importing %s" % name, exc_info=True)
            pass
    else:
        try:
            return entrypoints.get_single('nbconvert.exporter', name).load()
        except entrypoints.NoSuchEntryPoint:
            pass

    valid_names = sorted(get_export_names() +
                     list(entrypoints.get_group_named('nbconvert.exporter')))
    raise ValueError('Unknown exporter "%s", did you mean one of: %s?'
                     % (name, ', '.join(valid_names)))
Beispiel #14
0
    def from_dict(cls: Type[M], schemaspace: str, metadata_dict: dict) -> M:
        """Creates an appropriate instance of Metadata from a dictionary instance"""

        # Get the schema and look for metadata_class entry and use that, else Metadata.
        metadata_class_name = "elyra.metadata.metadata.Metadata"
        schema_name = metadata_dict.get("schema_name")
        if schema_name:
            schema = SchemaManager.instance().get_schema(
                schemaspace, schema_name)
            metadata_class_name = schema.get("metadata_class_name",
                                             metadata_class_name)
        metadata_class = import_item(metadata_class_name)
        try:
            instance = metadata_class(**metadata_dict)
            if not isinstance(instance, Metadata):
                raise ValueError(
                    f"The metadata_class_name ('{metadata_class_name}') for "
                    f"schema '{schema_name}' must be a subclass of '{cls.__name__}'!"
                )
        except TypeError as te:
            raise ValueError(
                f"The metadata_class_name ('{metadata_class_name}') for "
                f"schema '{schema_name}' must be a subclass of '{cls.__name__}'!"
            ) from te
        return instance
Beispiel #15
0
    def from_dict(cls: Type[M], namespace: str, metadata_dict: dict) -> M:
        """Creates an appropriate instance of Metadata from a dictionary instance """

        # Get the schema and look for metadata_class entry and use that, else Metadata.
        metadata_class_name = 'elyra.metadata.Metadata'
        schema_name = metadata_dict.get('schema_name')
        if schema_name:
            try:
                schema = SchemaManager.instance().get_schema(
                    namespace, schema_name)
                metadata_class_name = schema.get('metadata_class_name',
                                                 metadata_class_name)
            except Exception:  # just use the default
                pass
        metadata_class = import_item(metadata_class_name)
        try:
            instance = metadata_class(**metadata_dict)
            if not isinstance(instance, Metadata):
                raise ValueError(
                    "The metadata_class_name ('{}') for schema '{}' must be a subclass of '{}'!"
                    .format(metadata_class_name, schema_name, cls.__name__))
        except TypeError as te:
            raise ValueError(
                "The metadata_class_name ('{}') for schema '{}' must be a subclass of '{}'!"
                .format(metadata_class_name, schema_name,
                        cls.__name__)) from te
        return instance
Beispiel #16
0
 def _validate_post_save_hook(self, proposal):
     value = proposal['value']
     if isinstance(value, string_types):
         value = import_item(value)
     if not callable(value):
         raise TraitError("post_save_hook must be callable")
     return value
    def start_kernel(self, **kw):
        if self.kernel_spec.process_proxy_class:
            self.log.debug("Instantiating kernel '{}' with process proxy: {}".
                           format(self.kernel_spec.display_name, self.kernel_spec.process_proxy_class))
            process_proxy_class = import_item(self.kernel_spec.process_proxy_class)
            self.process_proxy = process_proxy_class(kernel_manager=self, proxy_config=self.kernel_spec.process_proxy_config)

        return super(RemoteKernelManager, self).start_kernel(**kw)
    def start_kernel(self, **kw):
        process_proxy = get_process_proxy_config(self.kernel_spec)
        process_proxy_class_name = process_proxy.get('class_name')
        self.log.debug("Instantiating kernel '{}' with process proxy: {}".
                       format(self.kernel_spec.display_name, process_proxy_class_name))
        process_proxy_class = import_item(process_proxy_class_name)
        self.process_proxy = process_proxy_class(kernel_manager=self, proxy_config=process_proxy.get('config'))
        self._capture_user_overrides(**kw)

        return super(RemoteKernelManager, self).start_kernel(**kw)
Beispiel #19
0
 def _unpacker_changed(self, name, old, new):
     if new.lower() == 'json':
         self.pack = json_packer
         self.unpack = json_unpacker
         self.packer = new
     elif new.lower() == 'pickle':
         self.pack = pickle_packer
         self.unpack = pickle_unpacker
         self.packer = new
     else:
         self.unpack = import_item(str(new))
Beispiel #20
0
 def _unpacker_changed(self, name, old, new):
     if new.lower() == 'json':
         self.pack = json_packer
         self.unpack = json_unpacker
         self.packer = new
     elif new.lower() == 'pickle':
         self.pack = pickle_packer
         self.unpack = pickle_unpacker
         self.packer = new
     else:
         self.unpack = import_item(str(new))
Beispiel #21
0
 def _unpacker_changed(self, change):
     new = change['new']
     if new.lower() == 'json':
         self.pack = json_packer
         self.unpack = json_unpacker
         self.packer = new
     elif new.lower() == 'pickle':
         self.pack = pickle_packer
         self.unpack = pickle_unpacker
         self.packer = new
     else:
         self.unpack = import_item(str(new))
    def start_kernel(self, **kw):
        process_proxy = get_process_proxy_config(self.kernel_spec)
        process_proxy_class_name = process_proxy.get('class_name')
        self.log.debug(
            "Instantiating kernel '{}' with process proxy: {}".format(
                self.kernel_spec.display_name, process_proxy_class_name))
        process_proxy_class = import_item(process_proxy_class_name)
        self.process_proxy = process_proxy_class(
            kernel_manager=self, proxy_config=process_proxy.get('config'))
        self._capture_user_overrides(**kw)

        return super(RemoteKernelManager, self).start_kernel(**kw)
 def _get_process_proxy(self):
     """Reads the associated kernelspec and to see if has a process proxy stanza.
        If one exists, it instantiates an instance.  If a process proxy is not
        specified in the kernelspec, a LocalProcessProxy stanza is fabricated and
        instantiated.
     """
     process_proxy_cfg = get_process_proxy_config(self.kernel_spec)
     process_proxy_class_name = process_proxy_cfg.get('class_name')
     self.log.debug("Instantiating kernel '{}' with process proxy: {}".
                    format(self.kernel_spec.display_name, process_proxy_class_name))
     process_proxy_class = import_item(process_proxy_class_name)
     self.process_proxy = process_proxy_class(kernel_manager=self, proxy_config=process_proxy_cfg.get('config'))
Beispiel #24
0
    def init_io(self):
        """Redirect input streams and set a display hook."""
        if self.outstream_class:
            outstream_factory = import_item(str(self.outstream_class))
            sys.stdout.flush()

            e_stdout = None if self.quiet else sys.__stdout__
            e_stderr = None if self.quiet else sys.__stderr__

            sys.stdout = outstream_factory(self.session, self.iopub_thread,
                                           u'stdout',
                                           echo=e_stdout)
            sys.stderr.flush()
            sys.stderr = outstream_factory(self.session, self.iopub_thread,
                                           u'stderr',
                                           echo=e_stderr)
        if self.displayhook_class:
            displayhook_factory = import_item(str(self.displayhook_class))
            self.displayhook = displayhook_factory(self.session, self.iopub_socket)
            sys.displayhook = self.displayhook

        self.patch_io()
Beispiel #25
0
    def initialize_subcommand(self, subc, argv=None):
        """Initialize a subcommand with argv."""
        subapp, help = self.subcommands.get(subc)

        if isinstance(subapp, six.string_types):
            subapp = import_item(subapp)

        # clear existing instances
        self.__class__.clear_instance()
        # instantiate
        self.subapp = subapp.instance(config=self.config)
        # and initialize subapp
        self.subapp.initialize(argv)
Beispiel #26
0
    def initialize_subcommand(self, subc, argv=None):
        """Initialize a subcommand with argv."""
        subapp,help = self.subcommands.get(subc)

        if isinstance(subapp, string_types):
            subapp = import_item(subapp)

        # clear existing instances
        self.__class__.clear_instance()
        # instantiate
        self.subapp = subapp.instance(config=self.config)
        # and initialize subapp
        self.subapp.initialize(argv)
Beispiel #27
0
    def _create_kernel_manager_factory(self):
        kernel_manager_ctor = import_item(self.kernel_manager_class)

        def create_kernel_manager(*args, **kwargs):
            if self.shared_context:
                if self.context.closed:
                    # recreate context if closed
                    self.context = self._context_default()
                kwargs.setdefault("context", self.context)
            km = kernel_manager_ctor(*args, **kwargs)
            return km

        return create_kernel_manager
    def start_kernel_from_session(self, kernel_id, kernel_name,
                                  connection_info, process_info, launch_args):
        # Create a KernelManger instance and load connection and process info, then confirm the kernel is still
        # alive.
        constructor_kwargs = {}
        if self.kernel_spec_manager:
            constructor_kwargs[
                'kernel_spec_manager'] = self.kernel_spec_manager

        # Construct a kernel manager...
        km = self.kernel_manager_factory(connection_file=os.path.join(
            self.connection_dir, "kernel-%s.json" % kernel_id),
                                         parent=self,
                                         log=self.log,
                                         kernel_name=kernel_name,
                                         **constructor_kwargs)

        # Load connection info into member vars - no need to write out connection file
        km.load_connection_info(connection_info)

        km._launch_args = launch_args

        # Construct a process-proxy
        if km.kernel_spec.process_proxy_class:
            process_proxy_class = import_item(
                km.kernel_spec.process_proxy_class)
            km.process_proxy = process_proxy_class(
                km, proxy_config=km.kernel_spec.process_proxy_config)
            km.process_proxy.load_process_info(process_info)

            # Confirm we can even poll the process.  If not, remove the persisted session.
            if km.process_proxy.poll() is False:
                return False

        km.kernel = km.process_proxy
        km.start_restarter()
        km._connect_control_socket()
        self._kernels[kernel_id] = km
        self._kernel_connections[kernel_id] = 0
        self.start_watching_activity(kernel_id)
        self.add_restart_callback(
            kernel_id,
            lambda: self._handle_kernel_died(kernel_id),
            'dead',
        )
        # Only initialize culling if available.  Warning message will be issued in gatewayapp at startup.
        func = getattr(self, 'initialize_culler', None)
        if func:
            func()
        return True
Beispiel #29
0
    def register_target(self, target_name, f):
        """Register a callable f for a given target name

        f will be called with two arguments when a comm_open message is received with `target`:

        - the Comm instance
        - the `comm_open` message itself.

        f can be a Python callable or an import string for one.
        """
        if isinstance(f, string_types):
            f = import_item(f)

        self.targets[target_name] = f
Beispiel #30
0
    def register_target(self, target_name, f):
        """Register a callable f for a given target name

        f will be called with two arguments when a comm_open message is received with `target`:

        - the Comm instance
        - the `comm_open` message itself.

        f can be a Python callable or an import string for one.
        """
        if isinstance(f, string_types):
            f = import_item(f)

        self.targets[target_name] = f
Beispiel #31
0
    def register_preprocessor(self, preprocessor, enabled=False):
        """
        Register a preprocessor.
        Preprocessors are classes that act upon the notebook before it is
        passed into the Jinja templating engine.  preprocessors are also
        capable of passing additional information to the Jinja
        templating engine.

        Parameters
        ----------
        preprocessor : :class:`~nbconvert.preprocessors.Preprocessor`
            A dotted module name, a type, or an instance
        enabled : bool
            Mark the preprocessor as enabled

        """
        if preprocessor is None:
            raise TypeError("preprocessor")
        isclass = isinstance(preprocessor, type)
        constructed = not isclass

        # Handle preprocessor's registration based on it's type
        if constructed and isinstance(preprocessor, py3compat.string_types):
            # Preprocessor is a string, import the namespace and recursively call
            # this register_preprocessor method
            preprocessor_cls = import_item(preprocessor)
            return self.register_preprocessor(preprocessor_cls, enabled)

        if constructed and hasattr(preprocessor, "__call__"):
            # Preprocessor is a function, no need to construct it.
            # Register and return the preprocessor.
            if enabled:
                preprocessor.enabled = True
            self._preprocessors.append(preprocessor)
            return preprocessor

        elif isclass and isinstance(preprocessor, MetaHasTraits):
            # Preprocessor is configurable.  Make sure to pass in new default for
            # the enabled flag if one was specified.
            self.register_preprocessor(preprocessor(parent=self), enabled)

        elif isclass:
            # Preprocessor is not configurable, construct it
            self.register_preprocessor(preprocessor(), enabled)

        else:
            # Preprocessor is an instance of something without a __call__
            # attribute.
            raise TypeError("preprocessor")
Beispiel #32
0
    def register_preprocessor(self, preprocessor, enabled=False):
        """
        Register a preprocessor.
        Preprocessors are classes that act upon the notebook before it is
        passed into the Jinja templating engine.  preprocessors are also
        capable of passing additional information to the Jinja
        templating engine.

        Parameters
        ----------
        preprocessor : :class:`~nbconvert.preprocessors.Preprocessor`
            A dotted module name, a type, or an instance
        enabled : bool
            Mark the preprocessor as enabled

        """
        if preprocessor is None:
            raise TypeError('preprocessor')
        isclass = isinstance(preprocessor, type)
        constructed = not isclass

        # Handle preprocessor's registration based on it's type
        if constructed and isinstance(preprocessor, py3compat.string_types):
            # Preprocessor is a string, import the namespace and recursively call
            # this register_preprocessor method
            preprocessor_cls = import_item(preprocessor)
            return self.register_preprocessor(preprocessor_cls, enabled)

        if constructed and hasattr(preprocessor, '__call__'):
            # Preprocessor is a function, no need to construct it.
            # Register and return the preprocessor.
            if enabled:
                preprocessor.enabled = True
            self._preprocessors.append(preprocessor)
            return preprocessor

        elif isclass and issubclass(preprocessor, HasTraits):
            # Preprocessor is configurable.  Make sure to pass in new default for
            # the enabled flag if one was specified.
            self.register_preprocessor(preprocessor(parent=self), enabled)

        elif isclass:
            # Preprocessor is not configurable, construct it
            self.register_preprocessor(preprocessor(), enabled)

        else:
            # Preprocessor is an instance of something without a __call__
            # attribute.
            raise TypeError('preprocessor')
Beispiel #33
0
def _import_mapping(mapping, original=None):
    """import any string-keys in a type mapping"""
    log = get_logger()
    log.debug("Importing canning map")
    for key, value in list(mapping.items()):
        if isinstance(key, string_types):
            try:
                cls = import_item(key)
            except Exception:
                if original and key not in original:
                    # only message on user-added classes
                    log.error("canning class not importable: %r", key, exc_info=True)
                mapping.pop(key)
            else:
                mapping[cls] = mapping.pop(key)
Beispiel #34
0
    def client(self, **kwargs):
        """Create a client configured to connect to our kernel"""
        if self.client_factory is None:
            self.client_factory = import_item(self.client_class)

        kw = {}
        kw.update(self.get_connection_info())
        kw.update(dict(
            connection_file=self.connection_file,
            session=self.session,
            parent=self,
        ))

        # add kwargs last, for manual overrides
        kw.update(kwargs)
        return self.client_factory(**kw)
    def _create_kernel_manager_factory(self):
        kernel_manager_ctor = import_item(self.kernel_manager_class)

        def create_kernel_manager(*args, **kwargs):
            km = kernel_manager_ctor(*args, **kwargs)

            if km.cache_ports:
                km.shell_port = self._find_available_port(km.ip)
                km.iopub_port = self._find_available_port(km.ip)
                km.stdin_port = self._find_available_port(km.ip)
                km.hb_port = self._find_available_port(km.ip)
                km.control_port = self._find_available_port(km.ip)

            return km

        return create_kernel_manager
Beispiel #36
0
def _import_mapping(mapping, original=None):
    """import any string-keys in a type mapping
    
    """
    log = get_logger()
    log.debug("Importing canning map")
    for key,value in list(mapping.items()):
        if isinstance(key, string_types):
            try:
                cls = import_item(key)
            except Exception:
                if original and key not in original:
                    # only message on user-added classes
                    log.error("canning class not importable: %r", key, exc_info=True)
                mapping.pop(key)
            else:
                mapping[cls] = mapping.pop(key)
Beispiel #37
0
def get_exporter(name):
    """ given an exporter name, return a class ready to be instantiate
    
    Raises ValueError if exporter is not found
    """
    if name.lower() in exporter_map:
        return exporter_map[name.lower()]

    if '.' in name:
        try:
            return import_item(name)
        except ImportError:
            log = logging.getLogger()
            log.error("Error importing %s" % name, exc_info=True)
            pass

    raise ValueError('Unknown exporter "%s", did you mean one of: %s?'
                     % (name, ', '.join(sorted(get_export_names()))))
    def _register_filter(self, environ, name, jinja_filter):
        """
        Register a filter.
        A filter is a function that accepts and acts on one string.
        The filters are accessible within the Jinja templating engine.

        Parameters
        ----------
        name : str
            name to give the filter in the Jinja engine
        filter : filter
        """
        if jinja_filter is None:
            raise TypeError('filter')
        isclass = isinstance(jinja_filter, type)
        constructed = not isclass

        #Handle filter's registration based on it's type
        if constructed and isinstance(jinja_filter, py3compat.string_types):
            #filter is a string, import the namespace and recursively call
            #this register_filter method
            filter_cls = import_item(jinja_filter)
            return self._register_filter(environ, name, filter_cls)

        if constructed and hasattr(jinja_filter, '__call__'):
            #filter is a function, no need to construct it.
            environ.filters[name] = jinja_filter
            return jinja_filter

        elif isclass and issubclass(jinja_filter, HasTraits):
            #filter is configurable.  Make sure to pass in new default for
            #the enabled flag if one was specified.
            filter_instance = jinja_filter(parent=self)
            self._register_filter(environ, name, filter_instance)

        elif isclass:
            #filter is not configurable, construct it
            filter_instance = jinja_filter()
            self._register_filter(environ, name, filter_instance)

        else:
            #filter is an instance of something without a __call__
            #attribute.
            raise TypeError('filter')
    def _register_filter(self, environ, name, jinja_filter):
        """
        Register a filter.
        A filter is a function that accepts and acts on one string.
        The filters are accessible within the Jinja templating engine.

        Parameters
        ----------
        name : str
            name to give the filter in the Jinja engine
        filter : filter
        """
        if jinja_filter is None:
            raise TypeError('filter')
        isclass = isinstance(jinja_filter, type)
        constructed = not isclass

        #Handle filter's registration based on it's type
        if constructed and isinstance(jinja_filter, py3compat.string_types):
            #filter is a string, import the namespace and recursively call
            #this register_filter method
            filter_cls = import_item(jinja_filter)
            return self._register_filter(environ, name, filter_cls)

        if constructed and hasattr(jinja_filter, '__call__'):
            #filter is a function, no need to construct it.
            environ.filters[name] = jinja_filter
            return jinja_filter

        elif isclass and issubclass(jinja_filter, HasTraits):
            #filter is configurable.  Make sure to pass in new default for
            #the enabled flag if one was specified.
            filter_instance = jinja_filter(parent=self)
            self._register_filter(environ, name, filter_instance)

        elif isclass:
            #filter is not configurable, construct it
            filter_instance = jinja_filter()
            self._register_filter(environ, name, filter_instance)

        else:
            #filter is an instance of something without a __call__
            #attribute.
            raise TypeError('filter')
Beispiel #40
0
def get_validator(version=None, version_minor=None, relax_add_props=False):
    """Load the JSON schema into a Validator"""
    if version is None:
        from . import current_nbformat
        version = current_nbformat

    v = import_item("nbformat.v%s" % version)
    current_minor = getattr(v, 'nbformat_minor', 0)
    if version_minor is None:
        version_minor = current_minor

    current_validator = get_current_validator()
    version_tuple = (current_validator.name, version, version_minor)

    if version_tuple not in validators:
        try:
            schema_json = _get_schema_json(v,
                                           version=version,
                                           version_minor=version_minor)
        except AttributeError:
            return None

        if current_minor < version_minor:
            # notebook from the future, relax all `additionalProperties: False` requirements
            schema_json = _relax_additional_properties(schema_json)
            # and allow undefined cell types and outputs
            schema_json = _allow_undefined(schema_json)

        validators[version_tuple] = current_validator(schema_json)

    if relax_add_props:
        try:
            schema_json = _get_schema_json(v,
                                           version=version,
                                           version_minor=version_minor)
        except AttributeError:
            return None

        # this allows properties to be added for intermediate
        # representations while validating for all other kinds of errors
        schema_json = _relax_additional_properties(schema_json)
        validators[version_tuple] = current_validator(schema_json)

    return validators[version_tuple]
    def start_kernel_from_session(self, kernel_id, kernel_name, connection_info, process_info, launch_args):
        # Create a KernelManger instance and load connection and process info, then confirm the kernel is still
        # alive.
        constructor_kwargs = {}
        if self.kernel_spec_manager:
            constructor_kwargs['kernel_spec_manager'] = self.kernel_spec_manager

        # Construct a kernel manager...
        km = self.kernel_manager_factory(connection_file=os.path.join(
            self.connection_dir, "kernel-%s.json" % kernel_id),
            parent=self, log=self.log, kernel_name=kernel_name,
            **constructor_kwargs)

        # Load connection info into member vars - no need to write out connection file
        km.load_connection_info(connection_info)

        km._launch_args = launch_args

        # Construct a process-proxy
        process_proxy = get_process_proxy_config(km.kernel_spec)
        process_proxy_class = import_item(process_proxy.get('class_name'))
        km.process_proxy = process_proxy_class(km, proxy_config=process_proxy.get('config'))
        km.process_proxy.load_process_info(process_info)

        # Confirm we can even poll the process.  If not, remove the persisted session.
        if km.process_proxy.poll() is False:
            return False

        km.kernel = km.process_proxy
        km.start_restarter()
        km._connect_control_socket()
        self._kernels[kernel_id] = km
        self._kernel_connections[kernel_id] = 0
        self.start_watching_activity(kernel_id)
        self.add_restart_callback(kernel_id,
            lambda: self._handle_kernel_died(kernel_id),
            'dead',
        )
        # Only initialize culling if available.  Warning message will be issued in gatewayapp at startup.
        func = getattr(self, 'initialize_culler', None)
        if func:
            func()
        return True
def find_launcher_class(clsname, kind):
    """Return a launcher for a given clsname and kind.

    Parameters
    ==========
    clsname : str
        The full name of the launcher class, either with or without the
        module path, or an abbreviation (MPI, SSH, SGE, PBS, LSF, HTCondor
        WindowsHPC).
    kind : str
        Either 'EngineSet' or 'Controller'.
    """
    if '.' not in clsname:
        # not a module, presume it's the raw name in apps.launcher
        if kind and kind not in clsname:
            # doesn't match necessary full class name, assume it's
            # just 'PBS' or 'MPI' etc prefix:
            clsname = clsname + kind + 'Launcher'
        clsname = 'ipyparallel.apps.launcher.'+clsname
    klass = import_item(clsname)
    return klass
Beispiel #43
0
    def initialize_subcommand(self, subc, argv=None):
        """Initialize a subcommand with argv."""
        subapp, _ = self.subcommands.get(subc)

        if isinstance(subapp, str):
            subapp = import_item(subapp)

        ## Cannot issubclass() on a non-type (SOhttp://stackoverflow.com/questions/8692430)
        if isinstance(subapp, type) and issubclass(subapp, Application):
            # Clear existing instances before...
            self.__class__.clear_instance()
            # instantiating subapp...
            self.subapp = subapp.instance(parent=self)
        elif callable(subapp):
            # or ask factory to create it...
            self.subapp = subapp(self)
        else:
            raise AssertionError("Invalid mappings for subcommand '%s'!" % subc)

        # ... and finally initialize subapp.
        self.subapp.initialize(argv)
Beispiel #44
0
def find_launcher_class(clsname, kind):
    """Return a launcher for a given clsname and kind.

    Parameters
    ==========
    clsname : str
        The full name of the launcher class, either with or without the
        module path, or an abbreviation (MPI, SSH, SGE, PBS, LSF, HTCondor
        Slurm, WindowsHPC).
    kind : str
        Either 'EngineSet' or 'Controller'.
    """
    if '.' not in clsname:
        # not a module, presume it's the raw name in apps.launcher
        if kind and kind not in clsname:
            # doesn't match necessary full class name, assume it's
            # just 'PBS' or 'MPI' etc prefix:
            clsname = clsname + kind + 'Launcher'
        clsname = 'ipyparallel.apps.launcher.' + clsname
    klass = import_item(clsname)
    return klass
Beispiel #45
0
    def initialize_subcommand(self, subc, argv=None):
        """Initialize a subcommand with argv."""
        subapp, _ = self.subcommands.get(subc)

        if isinstance(subapp, six.string_types):
            subapp = import_item(subapp)

        ## Cannot issubclass() on a non-type (SOhttp://stackoverflow.com/questions/8692430)
        if isinstance(subapp, type) and issubclass(subapp, Application):
            # Clear existing instances before...
            self.__class__.clear_instance()
            # instantiating subapp...
            self.subapp = subapp.instance(parent=self)
        elif callable(subapp):
            # or ask factory to create it...
            self.subapp = subapp(self)
        else:
            raise AssertionError("Invalid mappings for subcommand '%s'!" % subc)

        # ... and finally initialize subapp.
        self.subapp.initialize(argv)
Beispiel #46
0
    def _create_kernel_manager_factory(self):
        kernel_manager_ctor = import_item(self.kernel_manager_class)

        def create_kernel_manager(*args, **kwargs):
            if self.shared_context:
                if self.context.closed:
                    # recreate context if closed
                    self.context = self._context_default()
                kwargs.setdefault("context", self.context)
            km = kernel_manager_ctor(*args, **kwargs)

            if km.cache_ports:
                km.shell_port = self._find_available_port(km.ip)
                km.iopub_port = self._find_available_port(km.ip)
                km.stdin_port = self._find_available_port(km.ip)
                km.hb_port = self._find_available_port(km.ip)
                km.control_port = self._find_available_port(km.ip)

            return km

        return create_kernel_manager
Beispiel #47
0
    def get(self, bundler_id):
        '''
        Executes the requested bundler on the given notebook.

        :param bundler_id: Unique ID of an installed bundler
        :arg notebook: Path to the notebook relative to the notebook directory
            root
        '''
        notebook = self.get_query_argument('notebook')
        abs_nb_path = os.path.join(self.notebook_dir, url2path(notebook))
        try:
            bundler = self.get_bundler(bundler_id)
        except KeyError:
            raise web.HTTPError(404, 'Bundler %s not found' % bundler_id)
        try:
            bundler_mod = import_item(bundler['module_name'])
        except ImportError:
            raise web.HTTPError(500, 'Could not import bundler %s ' % bundler_id)

        # Let the bundler respond in any way it sees fit and assume it will
        # finish the request
        yield gen.maybe_future(bundler_mod.bundle(self, abs_nb_path))
Beispiel #48
0
    def start_kernel(self, **kwargs):
        """Starts a kernel in a separate process.

        Where the started kernel resides depends on the configured process proxy.

        Parameters
        ----------
        `**kwargs` : optional
             keyword arguments that are passed down to build the kernel_cmd
             and launching the kernel (e.g. Popen kwargs).
        """

        process_proxy = get_process_proxy_config(self.kernel_spec)
        process_proxy_class_name = process_proxy.get('class_name')
        self.log.debug(
            "Instantiating kernel '{}' with process proxy: {}".format(
                self.kernel_spec.display_name, process_proxy_class_name))
        process_proxy_class = import_item(process_proxy_class_name)
        self.process_proxy = process_proxy_class(
            kernel_manager=self, proxy_config=process_proxy.get('config'))
        self._capture_user_overrides(**kwargs)
        super(RemoteKernelManager, self).start_kernel(**kwargs)
Beispiel #49
0
def get_validator(version=None, version_minor=None, relax_add_props=False):
    """Load the JSON schema into a Validator"""
    if version is None:
        from .. import current_nbformat
        version = current_nbformat

    v = import_item("nbformat.v%s" % version)
    current_minor = getattr(v, 'nbformat_minor', 0)
    if version_minor is None:
        version_minor = current_minor

    version_tuple = (version, version_minor)

    if version_tuple not in validators:
        try:
            schema_json = _get_schema_json(v)
        except AttributeError:
            return None

        if current_minor < version_minor:
            # notebook from the future, relax all `additionalProperties: False` requirements
            schema_json = _relax_additional_properties(schema_json)
            # and allow undefined cell types and outputs
            schema_json = _allow_undefined(schema_json)

        validators[version_tuple] = Validator(schema_json)

    if relax_add_props:
        try:
            schema_json = _get_schema_json(v)
        except AttributeError:
            return None

        # this allows properties to be added for intermediate
        # representations while validating for all other kinds of errors
        schema_json = _relax_additional_properties(schema_json)

        validators[version_tuple] = Validator(schema_json)
    return validators[version_tuple]
Beispiel #50
0
 def _postprocessor_class_changed(self, name, old, new):
     if new.lower() in self.postprocessor_aliases:
         new = self.postprocessor_aliases[new.lower()]
     if new:
         self.postprocessor_factory = import_item(new)
Beispiel #51
0
 def _writer_class_changed(self, name, old, new):
     if new.lower() in self.writer_aliases:
         new = self.writer_aliases[new.lower()]
     self.writer_factory = import_item(new)
Beispiel #52
0
 def _post_save_hook_changed(self, name, old, new):
     if new and isinstance(new, string_types):
         self.post_save_hook = import_item(self.post_save_hook)
     elif new:
         if not callable(new):
             raise TraitError("post_save_hook must be callable")
Beispiel #53
0
 def _lexer_default(self):
     lexer_class = import_item(self.lexer_class)
     return lexer_class()
Beispiel #54
0
 def _lexer_class_changed(self, name, old, new):
     lexer_class = import_item(new)
     self.lexer = lexer_class()
    def init_schedulers(self):
        children = self.children
        mq = import_item(str(self.mq_class))
        
        f = self.factory
        ident = f.session.bsession
        # disambiguate url, in case of *
        monitor_url = disambiguate_url(f.monitor_url)
        # maybe_inproc = 'inproc://monitor' if self.use_threads else monitor_url
        # IOPub relay (in a Process)
        q = mq(zmq.PUB, zmq.SUB, zmq.PUB, b'N/A',b'iopub')
        q.bind_in(f.client_url('iopub'))
        q.setsockopt_in(zmq.IDENTITY, ident + b"_iopub")
        q.bind_out(f.engine_url('iopub'))
        q.setsockopt_out(zmq.SUBSCRIBE, b'')
        q.connect_mon(monitor_url)
        q.daemon=True
        children.append(q)

        # Multiplexer Queue (in a Process)
        q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'in', b'out')
        
        q.bind_in(f.client_url('mux'))
        q.setsockopt_in(zmq.IDENTITY, b'mux_in')
        q.bind_out(f.engine_url('mux'))
        q.setsockopt_out(zmq.IDENTITY, b'mux_out')
        q.connect_mon(monitor_url)
        q.daemon=True
        children.append(q)

        # Control Queue (in a Process)
        q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'incontrol', b'outcontrol')
        q.bind_in(f.client_url('control'))
        q.setsockopt_in(zmq.IDENTITY, b'control_in')
        q.bind_out(f.engine_url('control'))
        q.setsockopt_out(zmq.IDENTITY, b'control_out')
        q.connect_mon(monitor_url)
        q.daemon=True
        children.append(q)
        if 'TaskScheduler.scheme_name' in self.config:
            scheme = self.config.TaskScheduler.scheme_name
        else:
            scheme = TaskScheduler.scheme_name.get_default_value()
        # Task Queue (in a Process)
        if scheme == 'pure':
            self.log.warn("task::using pure DEALER Task scheduler")
            q = mq(zmq.ROUTER, zmq.DEALER, zmq.PUB, b'intask', b'outtask')
            # q.setsockopt_out(zmq.HWM, hub.hwm)
            q.bind_in(f.client_url('task'))
            q.setsockopt_in(zmq.IDENTITY, b'task_in')
            q.bind_out(f.engine_url('task'))
            q.setsockopt_out(zmq.IDENTITY, b'task_out')
            q.connect_mon(monitor_url)
            q.daemon=True
            children.append(q)
        elif scheme == 'none':
            self.log.warn("task::using no Task scheduler")

        else:
            self.log.info("task::using Python %s Task scheduler"%scheme)
            sargs = (f.client_url('task'), f.engine_url('task'),
                    monitor_url, disambiguate_url(f.client_url('notification')),
                    disambiguate_url(f.client_url('registration')),
            )
            kwargs = dict(logname='scheduler', loglevel=self.log_level,
                            log_url = self.log_url, config=dict(self.config))
            if 'Process' in self.mq_class:
                # run the Python scheduler in a Process
                q = Process(target=launch_scheduler, args=sargs, kwargs=kwargs)
                q.daemon=True
                children.append(q)
            else:
                # single-threaded Controller
                kwargs['in_thread'] = True
                launch_scheduler(*sargs, **kwargs)
        
        # set unlimited HWM for all relay devices
        if hasattr(zmq, 'SNDHWM'):
            q = children[0]
            q.setsockopt_in(zmq.RCVHWM, 0)
            q.setsockopt_out(zmq.SNDHWM, 0)
            
            for q in children[1:]:
                if not hasattr(q, 'setsockopt_in'):
                    continue
                q.setsockopt_in(zmq.SNDHWM, 0)
                q.setsockopt_in(zmq.RCVHWM, 0)
                q.setsockopt_out(zmq.SNDHWM, 0)
                q.setsockopt_out(zmq.RCVHWM, 0)
                q.setsockopt_mon(zmq.SNDHWM, 0)
Beispiel #56
0
 def handle_comm_opened(comm, msg):
     """Static method, called when a widget is constructed."""
     widget_class = import_item(str(msg['content']['data']['widget_class']))
     widget = widget_class(comm=comm)
Beispiel #57
0
 def _client_factory_default(self):
     return import_item(self.client_class)
Beispiel #58
0
 def _client_class_changed(self, name, old, new):
     self.client_factory = import_item(str(new))