コード例 #1
0
def writes(nb, version=NO_CONVERT, **kwargs):
    """Write a notebook to a string in a given format in the given nbformat version.

    Any notebook format errors will be logged.

    Parameters
    ----------
    nb : NotebookNode
        The notebook to write.
    version : int, optional
        The nbformat version to write.
        If unspecified, or specified as nbformat.NO_CONVERT,
        the notebook's own version will be used and no conversion performed.

    Returns
    -------
    s : unicode
        The notebook as a JSON string.
    """
    if version is not NO_CONVERT:
        nb = convert(nb, version)
    else:
        version, _ = reader.get_version(nb)
    try:
        validate(nb)
    except ValidationError as e:
        get_logger().error("Notebook JSON is invalid: %s", e)
    return versions[version].writes_json(nb, **kwargs)
コード例 #2
0
ファイル: coalescestreams.py プロジェクト: 3kwa/nbconvert
 def wrappedfunc(nb, resources):
     get_logger().debug(
             "Applying preprocessor: %s", function.__name__
         )
     for index, cell in enumerate(nb.cells):
         nb.cells[index], resources = function(cell, resources, index)
     return nb, resources
コード例 #3
0
ファイル: current.py プロジェクト: BarnetteME1/DnD-stuff
def writes(nb, format='DEPRECATED', version=current_nbformat, **kwargs):
    """Write a notebook to a string in a given format in the current nbformat version.

    This function always writes the notebook in the current nbformat version.

    Parameters
    ----------
    nb : NotebookNode
        The notebook to write.
    version : int
        The nbformat version to write.
        Used for downgrading notebooks.

    Returns
    -------
    s : unicode
        The notebook string.
    """
    if format not in {'DEPRECATED', 'json'}:
        _warn_format()
    nb = convert(nb, version)
    try:
        validate(nb)
    except ValidationError as e:
        get_logger().error("Notebook JSON is invalid: %s", e)
    return versions[version].writes_json(nb, **kwargs)
コード例 #4
0
ファイル: __init__.py プロジェクト: BarnetteME1/DnD-stuff
def reads(s, as_version, **kwargs):
    """Read a notebook from a string and return the NotebookNode object as the given version.
    
    The string can contain a notebook of any version.
    The notebook will be returned `as_version`, converting, if necessary.

    Notebook format errors will be logged.

    Parameters
    ----------
    s : unicode
        The raw unicode string to read the notebook from.
    as_version : int
        The version of the notebook format to return.
        The notebook will be converted, if necessary.
        Pass nbformat.NO_CONVERT to prevent conversion.

    Returns
    -------
    nb : NotebookNode
        The notebook that was read.
    """
    nb = reader.reads(s, **kwargs)
    if as_version is not NO_CONVERT:
        nb = convert(nb, as_version)
    try:
        validate(nb)
    except ValidationError as e:
        get_logger().error("Notebook JSON is invalid: %s", e)
    return nb
コード例 #5
0
def writes(nb, version=NO_CONVERT, capture_validation_error=None, **kwargs):
    """Write a notebook to a string in a given format in the given nbformat version.

    Any notebook format errors will be logged.

    Parameters
    ----------
    nb : NotebookNode
        The notebook to write.
    version : int, optional
        The nbformat version to write.
        If unspecified, or specified as nbformat.NO_CONVERT,
        the notebook's own version will be used and no conversion performed.
    capture_validation_error : dict, optional
        If provided, a key of "ValidationError" with a
        value of the ValidationError instance will be added
        to the dictionary.

    Returns
    -------
    s : unicode
        The notebook as a JSON string.
    """
    if version is not NO_CONVERT:
        nb = convert(nb, version)
    else:
        version, _ = reader.get_version(nb)
    try:
        validate(nb)
    except ValidationError as e:
        get_logger().error("Notebook JSON is invalid: %s", e)
        if isinstance(capture_validation_error, dict):
            capture_validation_error['ValidationError'] = e
    return versions[version].writes_json(nb, **kwargs)
コード例 #6
0
def reads(s, format='DEPRECATED', version=current_nbformat, **kwargs):
    """Read a notebook from a string and return the NotebookNode object.

    This function properly handles notebooks of any version. The notebook
    returned will always be in the current version's format.

    Parameters
    ----------
    s : unicode
        The raw unicode string to read the notebook from.

    Returns
    -------
    nb : NotebookNode
        The notebook that was read.
    """
    if format not in {'DEPRECATED', 'json'}:
        _warn_format()
    nb = reader_reads(s, **kwargs)
    nb = convert(nb, version)
    try:
        validate(nb)
    except ValidationError as e:
        get_logger().error("Notebook JSON is invalid: %s", e)
    return nb
コード例 #7
0
def writes(nb, format='DEPRECATED', version=current_nbformat, **kwargs):
    """Write a notebook to a string in a given format in the current nbformat version.

    This function always writes the notebook in the current nbformat version.

    Parameters
    ----------
    nb : NotebookNode
        The notebook to write.
    version : int
        The nbformat version to write.
        Used for downgrading notebooks.

    Returns
    -------
    s : unicode
        The notebook string.
    """
    if format not in {'DEPRECATED', 'json'}:
        _warn_format()
    nb = convert(nb, version)
    try:
        validate(nb)
    except ValidationError as e:
        get_logger().error("Notebook JSON is invalid: %s", e)
    return versions[version].writes_json(nb, **kwargs)
コード例 #8
0
ファイル: convert.py プロジェクト: Gabodp/TorreInternshipTest
def _warn_if_invalid(nb, version):
    """Log validation errors, if there are any."""
    from nbformat import validate, ValidationError
    try:
        validate(nb, version=version)
    except ValidationError as e:
        get_logger().error("Notebook JSON is not valid v%i: %s", version, e)
コード例 #9
0
ファイル: __init__.py プロジェクト: BarnetteME1/DnD-stuff
def writes(nb, version=NO_CONVERT, **kwargs):
    """Write a notebook to a string in a given format in the given nbformat version.

    Any notebook format errors will be logged.

    Parameters
    ----------
    nb : NotebookNode
        The notebook to write.
    version : int, optional
        The nbformat version to write.
        If unspecified, or specified as nbformat.NO_CONVERT,
        the notebook's own version will be used and no conversion performed.

    Returns
    -------
    s : unicode
        The notebook as a JSON string.
    """
    if version is not NO_CONVERT:
        nb = convert(nb, version)
    else:
        version, _ = reader.get_version(nb)
    try:
        validate(nb)
    except ValidationError as e:
        get_logger().error("Notebook JSON is invalid: %s", e)
    return versions[version].writes_json(nb, **kwargs)
コード例 #10
0
ファイル: current.py プロジェクト: BarnetteME1/DnD-stuff
def reads(s, format='DEPRECATED', version=current_nbformat, **kwargs):
    """Read a notebook from a string and return the NotebookNode object.

    This function properly handles notebooks of any version. The notebook
    returned will always be in the current version's format.

    Parameters
    ----------
    s : unicode
        The raw unicode string to read the notebook from.

    Returns
    -------
    nb : NotebookNode
        The notebook that was read.
    """
    if format not in {'DEPRECATED', 'json'}:
        _warn_format()
    nb = reader_reads(s, **kwargs)
    nb = convert(nb, version)
    try:
        validate(nb)
    except ValidationError as e:
        get_logger().error("Notebook JSON is invalid: %s", e)
    return nb
コード例 #11
0
def reads(s, as_version, **kwargs):
    """Read a notebook from a string and return the NotebookNode object as the given version.

    The string can contain a notebook of any version.
    The notebook will be returned `as_version`, converting, if necessary.

    Notebook format errors will be logged.

    Parameters
    ----------
    s : unicode
        The raw unicode string to read the notebook from.
    as_version : int
        The version of the notebook format to return.
        The notebook will be converted, if necessary.
        Pass nbformat.NO_CONVERT to prevent conversion.

    Returns
    -------
    nb : NotebookNode
        The notebook that was read.
    """
    nb = reader.reads(s, **kwargs)
    if as_version is not NO_CONVERT:
        nb = convert(nb, as_version)
    try:
        validate(nb)
    except ValidationError as e:
        get_logger().error("Notebook JSON is invalid: %s", e)
    return nb
コード例 #12
0
def _warn_if_invalid(nb, version):
    """Log validation errors, if there are any."""
    from jupyter_nbformat import validate, ValidationError
    try:
        validate(nb, version=version)
    except ValidationError as e:
        get_logger().error("Notebook JSON is not valid v%i: %s", version, e)
コード例 #13
0
ファイル: parentpoller.py プロジェクト: gokhansolak/yap-6.3
 def run(self):
     # We cannot use os.waitpid because it works only for child processes.
     from errno import EINTR
     while True:
         try:
             if os.getppid() == 1:
                 get_logger().warning("Parent appears to have exited, shutting down.")
                 os._exit(1)
             time.sleep(1.0)
         except OSError as e:
             if e.errno == EINTR:
                 continue
             raise
コード例 #14
0
 def run(self):
     # We cannot use os.waitpid because it works only for child processes.
     from errno import EINTR
     while True:
         try:
             if os.getppid() == 1:
                 get_logger().warning("Parent appears to have exited, shutting down.")
                 os._exit(1)
             time.sleep(1.0)
         except OSError as e:
             if e.errno == EINTR:
                 continue
             raise
コード例 #15
0
    def __init__(self, **kwargs):
        """create a Session object

        Parameters
        ----------

        debug : bool
            whether to trigger extra debugging statements
        packer/unpacker : str : 'json', 'pickle' or import_string
            importstrings for methods to serialize message parts.  If just
            'json' or 'pickle', predefined JSON and pickle packers will be used.
            Otherwise, the entire importstring must be used.

            The functions must accept at least valid JSON input, and output
            *bytes*.

            For example, to use msgpack:
            packer = 'msgpack.packb', unpacker='msgpack.unpackb'
        pack/unpack : callables
            You can also set the pack/unpack callables for serialization
            directly.
        session : unicode (must be ascii)
            the ID of this Session object.  The default is to generate a new
            UUID.
        bsession : bytes
            The session as bytes
        username : unicode
            username added to message headers.  The default is to ask the OS.
        key : bytes
            The key used to initialize an HMAC signature.  If unset, messages
            will not be signed or checked.
        signature_scheme : str
            The message digest scheme. Currently must be of the form 'hmac-HASH',
            where 'HASH' is a hashing function available in Python's hashlib.
            The default is 'hmac-sha256'.
            This is ignored if 'key' is empty.
        keyfile : filepath
            The file containing a key.  If this is set, `key` will be
            initialized to the contents of the file.
        """
        super(Session, self).__init__(**kwargs)
        self._check_packers()
        self.none = self.pack({})
        # ensure self._session_default() if necessary, so bsession is defined:
        self.session
        self.pid = os.getpid()
        self._new_auth()
        if not self.key:
            get_logger().warning(
                "Message signing is disabled.  This is insecure and not recommended!"
            )
コード例 #16
0
    def test_parent_logger(self):
        class Parent(LoggingConfigurable): pass
        class Child(LoggingConfigurable): pass
        log = get_logger().getChild("TestLoggingConfigurable")

        parent = Parent(log=log)
        child = Child(parent=parent)
        self.assertEqual(parent.log, log)
        self.assertEqual(child.log, log)

        parent = Parent()
        child = Child(parent=parent, log=log)
        self.assertEqual(parent.log, get_logger())
        self.assertEqual(child.log, log)
コード例 #17
0
    def test_parent_logger(self):
        class Parent(LoggingConfigurable): pass
        class Child(LoggingConfigurable): pass
        log = get_logger().getChild("TestLoggingConfigurable")

        parent = Parent(log=log)
        child = Child(parent=parent)
        self.assertEqual(parent.log, log)
        self.assertEqual(child.log, log)

        parent = Parent()
        child = Child(parent=parent, log=log)
        self.assertEqual(parent.log, get_logger())
        self.assertEqual(child.log, log)
コード例 #18
0
ファイル: session.py プロジェクト: ngoldbaum/jupyter_client
    def __init__(self, **kwargs):
        """create a Session object

        Parameters
        ----------

        debug : bool
            whether to trigger extra debugging statements
        packer/unpacker : str : 'json', 'pickle' or import_string
            importstrings for methods to serialize message parts.  If just
            'json' or 'pickle', predefined JSON and pickle packers will be used.
            Otherwise, the entire importstring must be used.

            The functions must accept at least valid JSON input, and output
            *bytes*.

            For example, to use msgpack:
            packer = 'msgpack.packb', unpacker='msgpack.unpackb'
        pack/unpack : callables
            You can also set the pack/unpack callables for serialization
            directly.
        session : unicode (must be ascii)
            the ID of this Session object.  The default is to generate a new
            UUID.
        bsession : bytes
            The session as bytes
        username : unicode
            username added to message headers.  The default is to ask the OS.
        key : bytes
            The key used to initialize an HMAC signature.  If unset, messages
            will not be signed or checked.
        signature_scheme : str
            The message digest scheme. Currently must be of the form 'hmac-HASH',
            where 'HASH' is a hashing function available in Python's hashlib.
            The default is 'hmac-sha256'.
            This is ignored if 'key' is empty.
        keyfile : filepath
            The file containing a key.  If this is set, `key` will be
            initialized to the contents of the file.
        """
        super(Session, self).__init__(**kwargs)
        self._check_packers()
        self.none = self.pack({})
        # ensure self._session_default() if necessary, so bsession is defined:
        self.session
        self.pid = os.getpid()
        self._new_auth()
        if not self.key:
            get_logger().warning("Message signing is disabled.  This is insecure and not recommended!")
コード例 #19
0
ファイル: session.py プロジェクト: takluyver/jupyter_protocol
 def __init__(self, key, signature_scheme='hmac-sha256'):
     self.key = key
     self.signature_scheme = signature_scheme
     self.session_id = new_id()
     self.username = os.environ.get('USER', 'username')
     if self.key:
         hash_name = self.signature_scheme.split('-', 1)[1]
         digest_mod = getattr(hashlib, hash_name)
         self.auth = hmac.HMAC(self.key, digestmod=digest_mod)
     else:
         self.auth = None
         get_logger().warning(
             "Message signing is disabled.  This is insecure and not recommended!"
         )
     self.digest_history = set()
コード例 #20
0
ファイル: util.py プロジェクト: AminJamalzadeh/ipyparallel
 def terminate_children(sig, frame):
     log = get_logger()
     log.critical("Got signal %i, terminating children..."%sig)
     for child in children:
         child.terminate()
     
     sys.exit(sig != SIGINT)
コード例 #21
0
def fetch_listings(logger):
    if not logger:
        from traitlets import log
        logger = log.get_logger()
    if len(ListingsHandler.blocked_extensions_uris) > 0:
        blocked_extensions = []
        for blocked_extensions_uri in ListingsHandler.blocked_extensions_uris:
            logger.info('Fetching blocked_extensions from {}'.format(ListingsHandler.blocked_extensions_uris))
            r = requests.request('GET', blocked_extensions_uri, **ListingsHandler.listings_request_opts)
            j = json.loads(r.text)
            for b in j['blocked_extensions']:
                blocked_extensions.append(b)
            ListingsHandler.blocked_extensions = blocked_extensions
    if len(ListingsHandler.allowed_extensions_uris) > 0:
        allowed_extensions = []
        for allowed_extensions_uri in ListingsHandler.allowed_extensions_uris:
            logger.info('Fetching allowed_extensions from {}'.format(ListingsHandler.allowed_extensions_uris))
            r = requests.request('GET', allowed_extensions_uri, **ListingsHandler.listings_request_opts)
            j = json.loads(r.text)
            for w in j['allowed_extensions']:
                allowed_extensions.append(w)
        ListingsHandler.allowed_extensions = allowed_extensions
    ListingsHandler.listings = json.dumps({
        'blocked_extensions_uris': list(ListingsHandler.blocked_extensions_uris),
        'allowed_extensions_uris': list(ListingsHandler.allowed_extensions_uris),
        'blocked_extensions': ListingsHandler.blocked_extensions,
        'allowed_extensions': ListingsHandler.allowed_extensions,
    })
コード例 #22
0
def migrate_config(name, env):
    """Migrate a config file
    
    Includes substitutions for updated configurable names.
    """
    log = get_logger()
    src_base = pjoin('{profile}', 'ipython_{name}_config').format(name=name, **env)
    dst_base = pjoin('{jupyter_config}', 'jupyter_{name}_config').format(name=name, **env)
    loaders = {
        '.py': PyFileConfigLoader,
        '.json': JSONFileConfigLoader,
    }
    migrated = []
    for ext in ('.py', '.json'):
        src = src_base + ext
        dst = dst_base + ext
        if os.path.exists(src):
            cfg = loaders[ext](src).load_config()
            if cfg:
                if migrate_file(src, dst, substitutions=config_substitutions):
                    migrated.append(src)
            else:
                # don't migrate empty config files
                log.debug("Not migrating empty config file: %s" % src)
    return migrated
コード例 #23
0
    def terminate_children(sig, frame):
        log = get_logger()
        log.critical("Got signal %i, terminating children..." % sig)
        for child in children:
            child.terminate()

        sys.exit(sig != SIGINT)
コード例 #24
0
ファイル: base.py プロジェクト: stjguns1/nbconvert
def get_exporter(name):
    """Given an exporter name or import path, return a class ready to be instantiated
    
    Raises ValueError if exporter is not found
    """

    if name == 'ipynb':
        name = 'notebook'

    try:
        return entrypoints.get_single('nbconvert.exporters', name).load()
    except entrypoints.NoSuchEntryPoint:
        try:
            return entrypoints.get_single('nbconvert.exporters',
                                          name.lower()).load()
        except entrypoints.NoSuchEntryPoint:
            pass

    if '.' in name:
        try:
            return import_item(name)
        except ImportError:
            log = get_logger()
            log.error("Error importing %s" % name, exc_info=True)

    raise ExporterNameError('Unknown exporter "%s", did you mean one of: %s?' %
                            (name, ', '.join(get_export_names())))
コード例 #25
0
ファイル: comm.py プロジェクト: vidartf/ipytunnel
    def __init__(self,
                 target_name='',
                 data=None,
                 metadata=None,
                 buffers=None,
                 **kwargs):
        self.primary = True  # Am I the primary or secondary Comm?
        self.target_name = target_name
        # requirejs module from which to load comm target
        self.target_module = kwargs.get('target_module', None)
        self.open_hook = None
        self._closed = True
        self._close_callback = None
        self._msg_callback = None
        try:
            self.kernel = kwargs['kernel']
        except KeyError:
            if Kernel.initialized():
                self.kernel = Kernel.instance()
            else:
                self.kernel = None
        try:
            self.comm_id = kwargs['comm_id']
        except KeyError:
            self.comm_id = uuid.uuid4().hex
        self.topic = kwargs.get('topic',
                                ('comm-%s' % self.comm_id).encode('ascii'))
        self.log = log.get_logger()

        if self.kernel:
            if self.primary:
                # I am primary, open my peer.
                self.open(data=data, metadata=metadata, buffers=buffers)
            else:
                self._closed = False
コード例 #26
0
ファイル: base.py プロジェクト: jupyter/nbconvert
def get_exporter(name):
    """Given an exporter name or import path, return a class ready to be instantiated
    
    Raises ValueError if exporter is not found
    """
    
    if name == 'ipynb':
        name = 'notebook'

    try:
        return entrypoints.get_single('nbconvert.exporters', name).load()
    except entrypoints.NoSuchEntryPoint:
        try:
            return entrypoints.get_single('nbconvert.exporters', name.lower()).load()
        except entrypoints.NoSuchEntryPoint:
            pass
        
    if '.' in name:
        try:
            return import_item(name)
        except ImportError:
            log = get_logger()
            log.error("Error importing %s" % name, exc_info=True)

    raise ValueError('Unknown exporter "%s", did you mean one of: %s?'
                     % (name, ', '.join(get_export_names())))
コード例 #27
0
ファイル: migrate.py プロジェクト: Gabodp/TorreInternshipTest
def migrate_config(name, env):
    """Migrate a config file
    
    Includes substitutions for updated configurable names.
    """
    log = get_logger()
    src_base = pjoin('{profile}', 'ipython_{name}_config').format(name=name,
                                                                  **env)
    dst_base = pjoin('{jupyter_config}',
                     'jupyter_{name}_config').format(name=name, **env)
    loaders = {
        '.py': PyFileConfigLoader,
        '.json': JSONFileConfigLoader,
    }
    migrated = []
    for ext in ('.py', '.json'):
        src = src_base + ext
        dst = dst_base + ext
        if os.path.exists(src):
            cfg = loaders[ext](src).load_config()
            if cfg:
                if migrate_file(src, dst, substitutions=config_substitutions):
                    migrated.append(src)
            else:
                # don't migrate empty config files
                log.debug("Not migrating empty config file: %s" % src)
    return migrated
コード例 #28
0
    def run(self):

        try:
            from _winapi import WAIT_OBJECT_0, INFINITE
        except ImportError:
            from _subprocess import WAIT_OBJECT_0, INFINITE

        # Build the list of handle to listen on.
        handles = []
        if self.interrupt_handle:
            handles.append(self.interrupt_handle)
        if self.parent_handle:
            handles.append(self.parent_handle)
        arch = platform.architecture()[0]
        c_int = ctypes.c_int64 if arch.startswith('64') else ctypes.c_int

        # Listen forever.
        while True:
            result = ctypes.windll.kernel32.WaitForMultipleObjects(
                len(handles),                            # nCount
                (c_int * len(handles))(*handles),        # lpHandles
                False,                                   # bWaitAll
                INFINITE)                                # dwMilliseconds

            if WAIT_OBJECT_0 <= result < len(handles):
                handle = handles[result - WAIT_OBJECT_0]

                if handle == self.interrupt_handle:
                    # check if signal handler is callable
                    # to avoid 'int not callable' error (Python issue #23395)
                    if callable(signal.getsignal(signal.SIGINT)):
                        interrupt_main()

                elif handle == self.parent_handle:
                    get_logger().warning("Parent appears to have exited, "
                                         "shutting down.")
                    if self.exit_call:
                        self.exit_call()
                    else:
                        os._exit(1)
            elif result < 0:
                # wait failed, just give up and stop polling.
                warnings.warn("""Parent poll failed.  If the frontend dies,
                the kernel may be left running.  Please let us know
                about your system (bitness, Python, etc.) at
                [email protected]""")
                return
コード例 #29
0
    def __init__(self, namespace, **kwargs):
        self.schema_mgr = SchemaManager.instance()
        if not self.schema_mgr.is_valid_namespace(namespace):
            raise ValueError("Namespace '{}' is not in the list of valid namespaces: {}".
                             format(namespace, self.schema_mgr.get_namespaces()))

        self.namespace = namespace
        self.log = log.get_logger()
コード例 #30
0
ファイル: parentpoller.py プロジェクト: gokhansolak/yap-6.3
    def run(self):
        """ Run the poll loop. This method never returns.
        """
        try:
            from _winapi import WAIT_OBJECT_0, INFINITE
        except ImportError:
            from _subprocess import WAIT_OBJECT_0, INFINITE

        # Build the list of handle to listen on.
        handles = []
        if self.interrupt_handle:
            handles.append(self.interrupt_handle)
        if self.parent_handle:
            handles.append(self.parent_handle)
        arch = platform.architecture()[0]
        c_int = ctypes.c_int64 if arch.startswith('64') else ctypes.c_int

        # Listen forever.
        while True:
            result = ctypes.windll.kernel32.WaitForMultipleObjects(
                len(handles),                            # nCount
                (c_int * len(handles))(*handles),        # lpHandles
                False,                                   # bWaitAll
                INFINITE)                                # dwMilliseconds

            if WAIT_OBJECT_0 <= result < len(handles):
                handle = handles[result - WAIT_OBJECT_0]

                if handle == self.interrupt_handle:
                    # check if signal handler is callable
                    # to avoid 'int not callable' error (Python issue #23395)
                    if callable(signal.getsignal(signal.SIGINT)):
                        interrupt_main()

                elif handle == self.parent_handle:
                    get_logger().warning("Parent appears to have exited, shutting down.")
                    os._exit(1)
            elif result < 0:
                # wait failed, just give up and stop polling.
                warnings.warn("""Parent poll failed.  If the frontend dies,
                the kernel may be left running.  Please let us know
                about your system (bitness, Python, etc.) at
                [email protected]""")
                return
コード例 #31
0
 def __init__(self):
     self.log = log.get_logger()
     # get set of registered runtimes
     self._runtime_processor_names = set()
     for processor in entrypoints.get_group_all(
             "elyra.pipeline.processors"):
         # load the names of the runtime processors (skip 'local')
         if processor.name == "local":
             continue
         self._runtime_processor_names.add(processor.name)
コード例 #32
0
    def test_parent_not_logging_configurable(self):
        class Parent(Configurable):
            pass

        class Child(LoggingConfigurable):
            pass

        parent = Parent()
        child = Child(parent=parent)
        self.assertEqual(child.log, get_logger())
コード例 #33
0
ファイル: migrate.py プロジェクト: Gabodp/TorreInternshipTest
def migrate_one(src, dst):
    """Migrate one item
    
    dispatches to migrate_dir/_file
    """
    log = get_logger()
    if os.path.isfile(src):
        return migrate_file(src, dst)
    elif os.path.isdir(src):
        return migrate_dir(src, dst)
    else:
        log.debug("Nothing to migrate for %s" % src)
        return False
コード例 #34
0
def get_export_names(config=get_config()):
    """Return a list of the currently supported export targets

    Exporters can be found in external packages by registering
    them as an nbconvert.exporter entrypoint.
    """
    exporters = sorted(entrypoints.get_group_named('nbconvert.exporters'))
    if os.environ.get("NBCONVERT_DISABLE_CONFIG_EXPORTERS"):
        get_logger().info(
            "Config exporter loading disabled, no additional exporters will be automatically included."
        )
        return exporters

    enabled_exporters = []
    for exporter_name in exporters:
        try:
            e = get_exporter(exporter_name)(config=config)
            if e.enabled:
                enabled_exporters.append(exporter_name)
        except (ExporterDisabledError, ValueError):
            pass
    return enabled_exporters
コード例 #35
0
def migrate_one(src, dst):
    """Migrate one item
    
    dispatches to migrate_dir/_file
    """
    log = get_logger()
    if os.path.isfile(src):
        return migrate_file(src, dst)
    elif os.path.isdir(src):
        return migrate_dir(src, dst)
    else:
        log.debug("Nothing to migrate for %s" % src)
        return False
コード例 #36
0
def migrate_static_custom(src, dst):
    """Migrate non-empty custom.js,css from src to dst
    
    src, dst are 'custom' directories containing custom.{js,css}
    """
    log = get_logger()
    migrated = False
    
    custom_js = pjoin(src, 'custom.js')
    custom_css = pjoin(src, 'custom.css')
    # check if custom_js is empty:
    custom_js_empty = True
    if os.path.isfile(custom_js):
        with open(custom_js) as f:
            js = f.read().strip()
            for line in js.splitlines():
                if not (
                    line.isspace()
                    or line.strip().startswith(('/*', '*', '//'))
                ):
                    custom_js_empty = False
                    break
    
    # check if custom_css is empty:
    custom_css_empty = True
    if os.path.isfile(custom_css):
        with open(custom_css) as f:
            css = f.read().strip()
            custom_css_empty = css.startswith('/*') and css.endswith('*/')
    
    if custom_js_empty:
        log.debug("Ignoring empty %s" % custom_js)
    if custom_css_empty:
        log.debug("Ignoring empty %s" % custom_css)
    
    if custom_js_empty and custom_css_empty:
        # nothing to migrate
        return False
    ensure_dir_exists(dst)
    
    if not custom_js_empty or not custom_css_empty:
        ensure_dir_exists(dst)
    
    if not custom_js_empty:
        if migrate_file(custom_js, pjoin(dst, 'custom.js')):
            migrated = True
    if not custom_css_empty:
        if migrate_file(custom_css, pjoin(dst, 'custom.css')):
            migrated = True
    
    return migrated
コード例 #37
0
ファイル: migrate.py プロジェクト: Gabodp/TorreInternshipTest
def migrate_static_custom(src, dst):
    """Migrate non-empty custom.js,css from src to dst
    
    src, dst are 'custom' directories containing custom.{js,css}
    """
    log = get_logger()
    migrated = False

    custom_js = pjoin(src, 'custom.js')
    custom_css = pjoin(src, 'custom.css')
    # check if custom_js is empty:
    custom_js_empty = True
    if os.path.isfile(custom_js):
        with open(custom_js) as f:
            js = f.read().strip()
            for line in js.splitlines():
                if not (line.isspace() or line.strip().startswith(
                    ('/*', '*', '//'))):
                    custom_js_empty = False
                    break

    # check if custom_css is empty:
    custom_css_empty = True
    if os.path.isfile(custom_css):
        with open(custom_css) as f:
            css = f.read().strip()
            custom_css_empty = css.startswith('/*') and css.endswith('*/')

    if custom_js_empty:
        log.debug("Ignoring empty %s" % custom_js)
    if custom_css_empty:
        log.debug("Ignoring empty %s" % custom_css)

    if custom_js_empty and custom_css_empty:
        # nothing to migrate
        return False
    ensure_dir_exists(dst)

    if not custom_js_empty or not custom_css_empty:
        ensure_dir_exists(dst)

    if not custom_js_empty:
        if migrate_file(custom_js, pjoin(dst, 'custom.js')):
            migrated = True
    if not custom_css_empty:
        if migrate_file(custom_css, pjoin(dst, 'custom.css')):
            migrated = True

    return migrated
コード例 #38
0
ファイル: canning.py プロジェクト: kapukiran94/ipyparallel
def _import_mapping(mapping, original=None):
    """import any string-keys in a type mapping"""
    log = get_logger()
    log.debug("Importing canning map")
    for key, value in list(mapping.items()):
        if isinstance(key, string_types):
            try:
                cls = import_item(key)
            except Exception:
                if original and key not in original:
                    # only message on user-added classes
                    log.error("canning class not importable: %r", key, exc_info=True)
                mapping.pop(key)
            else:
                mapping[cls] = mapping.pop(key)
コード例 #39
0
def reads(s, as_version, capture_validation_error=None, **kwargs):
    """Read a notebook from a string and return the NotebookNode object as the given version.

    The string can contain a notebook of any version.
    The notebook will be returned `as_version`, converting, if necessary.

    Notebook format errors will be logged.

    Parameters
    ----------
    s : unicode
        The raw unicode string to read the notebook from.
    as_version : int
        The version of the notebook format to return.
        The notebook will be converted, if necessary.
        Pass nbformat.NO_CONVERT to prevent conversion.
    capture_validation_error : dict, optional
        If provided, a key of "ValidationError" with a
        value of the ValidationError instance will be added
        to the dictionary.

    Returns
    -------
    nb : NotebookNode
        The notebook that was read.
    """
    nb = reader.reads(s, **kwargs)
    if as_version is not NO_CONVERT:
        nb = convert(nb, as_version)
    try:
        validate(nb)
    except ValidationError as e:
        get_logger().error("Notebook JSON is invalid: %s", e)
        if isinstance(capture_validation_error, dict):
            capture_validation_error['ValidationError'] = e
    return nb
コード例 #40
0
def migrate_dir(src, dst):
    """Migrate a directory from src to dst"""
    log = get_logger()
    if not os.listdir(src):
        log.debug("No files in %s" % src)
        return False
    if os.path.exists(dst):
        if os.listdir(dst):
            # already exists, non-empty
            log.debug("%s already exists" % dst)
            return False
        else:
            os.rmdir(dst)
    log.info("Copying %s -> %s" % (src, dst))
    ensure_dir_exists(os.path.dirname(dst))
    shutil.copytree(src, dst, symlinks=True)
    return True
コード例 #41
0
ファイル: migrate.py プロジェクト: Gabodp/TorreInternshipTest
def migrate_dir(src, dst):
    """Migrate a directory from src to dst"""
    log = get_logger()
    if not os.listdir(src):
        log.debug("No files in %s" % src)
        return False
    if os.path.exists(dst):
        if os.listdir(dst):
            # already exists, non-empty
            log.debug("%s already exists" % dst)
            return False
        else:
            os.rmdir(dst)
    log.info("Copying %s -> %s" % (src, dst))
    ensure_dir_exists(os.path.dirname(dst))
    shutil.copytree(src, dst, symlinks=True)
    return True
コード例 #42
0
def _import_mapping(mapping, original=None):
    """import any string-keys in a type mapping
    
    """
    log = get_logger()
    log.debug("Importing canning map")
    for key,value in list(mapping.items()):
        if isinstance(key, string_types):
            try:
                cls = import_item(key)
            except Exception:
                if original and key not in original:
                    # only message on user-added classes
                    log.error("canning class not importable: %r", key, exc_info=True)
                mapping.pop(key)
            else:
                mapping[cls] = mapping.pop(key)
コード例 #43
0
def get_exporter(name, config=get_config()):
    """Given an exporter name or import path, return a class ready to be instantiated

    Raises ExporterName if exporter is not found or ExporterDisabledError if not enabled
    """

    if name == 'ipynb':
        name = 'notebook'

    try:
        exporter = entrypoints.get_single('nbconvert.exporters', name).load()
        if getattr(exporter(config=config), 'enabled', True):
            return exporter
        else:
            raise ExporterDisabledError(
                'Exporter "%s" disabled in configuration' % (name))
    except entrypoints.NoSuchEntryPoint:
        try:
            exporter = entrypoints.get_single('nbconvert.exporters',
                                              name.lower()).load()
            if getattr(exporter(config=config), 'enabled', True):
                return exporter
            else:
                raise ExporterDisabledError(
                    'Exporter "%s" disabled in configuration' % (name))
        except entrypoints.NoSuchEntryPoint:
            pass

    if '.' in name:
        try:
            exporter = import_item(name)
            if getattr(exporter(config=config), 'enabled', True):
                return exporter
            else:
                raise ExporterDisabledError(
                    'Exporter "%s" disabled in configuration' % (name))
        except ImportError:
            log = get_logger()
            log.error("Error importing %s" % name, exc_info=True)

    raise ExporterNameError('Unknown exporter "%s", did you mean one of: %s?' %
                            (name, ', '.join(get_export_names())))
コード例 #44
0
def new_kernel_id(**kwargs):
    """
    This method provides a mechanism by which clients can specify a kernel's id.  In this case
    that mechanism is via the per-kernel environment variable: KERNEL_ID.  If specified, its value
    will be validated and returned, otherwise the result from the provided method is returned.

    NOTE: This method exists in jupyter_client.multikernelmanager.py for releases > 5.2.3.  If you
    find that this method is not getting invoked, then you likely need to update the version of
    jupyter_client.  The Enterprise Gateway dependency will be updated once new releases of
    jupyter_client are more prevalent.

    Returns
    -------
    kernel_id : str
        The uuid string to associate with the new kernel
    """
    log = kwargs.pop("log", None) or traitlets_log.get_logger()
    kernel_id_fn = kwargs.pop("kernel_id_fn",
                              None) or (lambda: unicode_type(uuid.uuid4()))

    env = kwargs.get('env')
    if env and env.get(
            'KERNEL_ID'):  # If there's a KERNEL_ID in the env, check it out
        # convert string back to UUID - validating string in the process.
        str_kernel_id = env.get('KERNEL_ID')
        try:
            str_v4_kernel_id = str(uuid.UUID(str_kernel_id, version=4))
            if str_kernel_id != str_v4_kernel_id:  # Given string is not uuid v4 compliant
                raise ValueError("value is not uuid v4 compliant")
        except ValueError as ve:
            log.error(
                "Invalid v4 UUID value detected in ['env']['KERNEL_ID']: '{}'!  Error: {}"
                .format(str_kernel_id, ve))
            raise ve
        # user-provided id is valid, use it
        kernel_id = unicode_type(str_kernel_id)
        log.debug("Using user-provided kernel_id: {}".format(kernel_id))
    else:
        kernel_id = kernel_id_fn(**kwargs)

    return kernel_id
コード例 #45
0
def migrate_file(src, dst, substitutions=None):
    """Migrate a single file from src to dst
    
    substitutions is an optional dict of {regex: replacement} for performing replacements on the file.
    """
    log = get_logger()
    if os.path.exists(dst):
        # already exists
        log.debug("%s already exists" % dst)
        return False
    log.info("Copying %s -> %s" % (src, dst))
    ensure_dir_exists(os.path.dirname(dst))
    shutil.copy(src, dst)
    if substitutions:
        with open(dst) as f:
            text = f.read()
        for pat, replacement in substitutions.items():
            text = pat.sub(replacement, text)
        with open(dst, 'w') as f:
            f.write(text)
    return True
コード例 #46
0
    def __init__(self, **kwargs):
        self.kernel = None
        self.lifecycle_manager = None
        self.response_address = None
        self.sigint_value = None
        self.port_range = None
        self.user_overrides = {}
        self.restarting = False  # need to track whether we're in a restart situation or not
        self.log = get_logger()
        self.kernel_spec = kwargs.get('kernelspec')
        self.lifecycle_info = kwargs.get('lifecycle_info')
        self.cwd = kwargs.get('cwd')
        self.app_config = kwargs.get('app_config', {})
        self.provider_config = kwargs.get('provider_config', {})
        self.launch_params = kwargs.get('launch_params', {})
        self.env = self.kernel_spec.env.copy()  # Seed env from kernelspec
        self.env.update(self._capture_user_overrides(kwargs.get('env', {}), self.launch_params.get('env', {})))

        self.kernel_id = RemoteKernelManager.get_kernel_id(self.env)
        self.kernel_username = RemoteKernelManager.get_kernel_username(self.env)
        self.shutdown_wait_time = 5.0  # TODO - handle this better
コード例 #47
0
ファイル: migrate.py プロジェクト: Gabodp/TorreInternshipTest
def migrate_file(src, dst, substitutions=None):
    """Migrate a single file from src to dst
    
    substitutions is an optional dict of {regex: replacement} for performing replacements on the file.
    """
    log = get_logger()
    if os.path.exists(dst):
        # already exists
        log.debug("%s already exists" % dst)
        return False
    log.info("Copying %s -> %s" % (src, dst))
    ensure_dir_exists(os.path.dirname(dst))
    shutil.copy(src, dst)
    if substitutions:
        with open(dst) as f:
            text = f.read()
        for pat, replacement in substitutions.items():
            text = pat.sub(replacement, text)
        with open(dst, 'w') as f:
            f.write(text)
    return True
コード例 #48
0
def fetch_listings(logger):
    """Fetch the listings for the extension manager."""
    if not logger:
        from traitlets import log

        logger = log.get_logger()
    if len(ListingsHandler.blocked_extensions_uris) > 0:
        blocked_extensions = []
        for blocked_extensions_uri in ListingsHandler.blocked_extensions_uris:
            logger.info("Fetching blocked_extensions from {}".format(
                ListingsHandler.blocked_extensions_uris))
            r = requests.request("GET", blocked_extensions_uri,
                                 **ListingsHandler.listings_request_opts)
            j = json.loads(r.text)
            for b in j["blocked_extensions"]:
                blocked_extensions.append(b)
            ListingsHandler.blocked_extensions = blocked_extensions
    if len(ListingsHandler.allowed_extensions_uris) > 0:
        allowed_extensions = []
        for allowed_extensions_uri in ListingsHandler.allowed_extensions_uris:
            logger.info("Fetching allowed_extensions from {}".format(
                ListingsHandler.allowed_extensions_uris))
            r = requests.request("GET", allowed_extensions_uri,
                                 **ListingsHandler.listings_request_opts)
            j = json.loads(r.text)
            for w in j["allowed_extensions"]:
                allowed_extensions.append(w)
        ListingsHandler.allowed_extensions = allowed_extensions
    ListingsHandler.listings = json.dumps({
        "blocked_extensions_uris":
        list(ListingsHandler.blocked_extensions_uris),
        "allowed_extensions_uris":
        list(ListingsHandler.allowed_extensions_uris),
        "blocked_extensions":
        ListingsHandler.blocked_extensions,
        "allowed_extensions":
        ListingsHandler.allowed_extensions,
    })
コード例 #49
0
ファイル: session.py プロジェクト: WesternStar/jupyter_client
    def send(self, stream, msg_or_type, content=None, parent=None, ident=None,
             buffers=None, track=False, header=None, metadata=None):
        """Build and send a message via stream or socket.

        The message format used by this function internally is as follows:

        [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content,
         buffer1,buffer2,...]

        The serialize/deserialize methods convert the nested message dict into this
        format.

        Parameters
        ----------

        stream : zmq.Socket or ZMQStream
            The socket-like object used to send the data.
        msg_or_type : str or Message/dict
            Normally, msg_or_type will be a msg_type unless a message is being
            sent more than once. If a header is supplied, this can be set to
            None and the msg_type will be pulled from the header.

        content : dict or None
            The content of the message (ignored if msg_or_type is a message).
        header : dict or None
            The header dict for the message (ignored if msg_to_type is a message).
        parent : Message or dict or None
            The parent or parent header describing the parent of this message
            (ignored if msg_or_type is a message).
        ident : bytes or list of bytes
            The zmq.IDENTITY routing path.
        metadata : dict or None
            The metadata describing the message
        buffers : list or None
            The already-serialized buffers to be appended to the message.
        track : bool
            Whether to track.  Only for use with Sockets, because ZMQStream
            objects cannot track messages.


        Returns
        -------
        msg : dict
            The constructed message.
        """
        if not isinstance(stream, zmq.Socket):
            # ZMQStreams and dummy sockets do not support tracking.
            track = False

        if isinstance(msg_or_type, (Message, dict)):
            # We got a Message or message dict, not a msg_type so don't
            # build a new Message.
            msg = msg_or_type
            buffers = buffers or msg.get('buffers', [])
        else:
            msg = self.msg(msg_or_type, content=content, parent=parent,
                           header=header, metadata=metadata)
        if not os.getpid() == self.pid:
            get_logger().warn("WARNING: attempted to send message from fork\n%s",
                msg
            )
            return
        buffers = [] if buffers is None else buffers
        if self.adapt_version:
            msg = adapt(msg, self.adapt_version)
        to_send = self.serialize(msg, ident)
        to_send.extend(buffers)
        longest = max([ len(s) for s in to_send ])
        copy = (longest < self.copy_threshold)

        if buffers and track and not copy:
            # only really track when we are doing zero-copy buffers
            tracker = stream.send_multipart(to_send, copy=False, track=True)
        else:
            # use dummy tracker, which will be done immediately
            tracker = DONE
            stream.send_multipart(to_send, copy=copy)

        if self.debug:
            pprint.pprint(msg)
            pprint.pprint(to_send)
            pprint.pprint(buffers)

        msg['tracker'] = tracker

        return msg
コード例 #50
0
 def _log_default(self):
     from traitlets.log import get_logger
     return get_logger()
コード例 #51
0
 def test_parent_not_logging_configurable(self):
     class Parent(Configurable): pass
     class Child(LoggingConfigurable): pass
     parent = Parent()
     child = Child(parent=parent)
     self.assertEqual(child.log, get_logger())
コード例 #52
0
ファイル: launcher.py プロジェクト: jupyter/jupyter_client
def launch_kernel(cmd, stdin=None, stdout=None, stderr=None, env=None,
                  independent=False, cwd=None, **kw):
    """ Launches a localhost kernel, binding to the specified ports.

    Parameters
    ----------
    cmd : Popen list,
        A string of Python code that imports and executes a kernel entry point.

    stdin, stdout, stderr : optional (default None)
        Standards streams, as defined in subprocess.Popen.

    env: dict, optional
        Environment variables passed to the kernel

    independent : bool, optional (default False)
        If set, the kernel process is guaranteed to survive if this process
        dies. If not set, an effort is made to ensure that the kernel is killed
        when this process dies. Note that in this case it is still good practice
        to kill kernels manually before exiting.

    cwd : path, optional
        The working dir of the kernel process (default: cwd of this process).

    **kw: optional
        Additional arguments for Popen

    Returns
    -------

    Popen instance for the kernel subprocess
    """

    # Popen will fail (sometimes with a deadlock) if stdin, stdout, and stderr
    # are invalid. Unfortunately, there is in general no way to detect whether
    # they are valid.  The following two blocks redirect them to (temporary)
    # pipes in certain important cases.

    # If this process has been backgrounded, our stdin is invalid. Since there
    # is no compelling reason for the kernel to inherit our stdin anyway, we'll
    # place this one safe and always redirect.
    redirect_in = True
    _stdin = PIPE if stdin is None else stdin

    # If this process in running on pythonw, we know that stdin, stdout, and
    # stderr are all invalid.
    redirect_out = sys.executable.endswith('pythonw.exe')
    if redirect_out:
        blackhole = open(os.devnull, 'w')
        _stdout = blackhole if stdout is None else stdout
        _stderr = blackhole if stderr is None else stderr
    else:
        _stdout, _stderr = stdout, stderr

    env = env if (env is not None) else os.environ.copy()

    encoding = getdefaultencoding(prefer_stream=False)
    kwargs = kw.copy()
    main_args = dict(
        stdin=_stdin,
        stdout=_stdout,
        stderr=_stderr,
        cwd=cwd,
        env=env,
    )
    kwargs.update(main_args)

    # Spawn a kernel.
    if sys.platform == 'win32':
        # Popen on Python 2 on Windows cannot handle unicode args or cwd
        cmd = [ cast_bytes_py2(c, encoding) for c in cmd ]
        if cwd:
            cwd = cast_bytes_py2(cwd, sys.getfilesystemencoding() or 'ascii')
            kwargs['cwd'] = cwd

        from .win_interrupt import create_interrupt_event
        # Create a Win32 event for interrupting the kernel
        # and store it in an environment variable.
        interrupt_event = create_interrupt_event()
        env["JPY_INTERRUPT_EVENT"] = str(interrupt_event)
        # deprecated old env name:
        env["IPY_INTERRUPT_EVENT"] = env["JPY_INTERRUPT_EVENT"]

        try:
            from _winapi import DuplicateHandle, GetCurrentProcess, \
                DUPLICATE_SAME_ACCESS, CREATE_NEW_PROCESS_GROUP
        except:
            from _subprocess import DuplicateHandle, GetCurrentProcess, \
                DUPLICATE_SAME_ACCESS, CREATE_NEW_PROCESS_GROUP

        # create a handle on the parent to be inherited
        if independent:
            kwargs['creationflags'] = CREATE_NEW_PROCESS_GROUP
        else:
            pid = GetCurrentProcess()
            handle = DuplicateHandle(pid, pid, pid, 0,
                                     True, # Inheritable by new processes.
                                     DUPLICATE_SAME_ACCESS)
            env['JPY_PARENT_PID'] = str(int(handle))

        # Prevent creating new console window on pythonw
        if redirect_out:
            kwargs['creationflags'] = kwargs.setdefault('creationflags', 0) | 0x08000000 # CREATE_NO_WINDOW

        # Avoid closing the above parent and interrupt handles.
        # close_fds is True by default on Python >=3.7
        # or when no stream is captured on Python <3.7
        # (we always capture stdin, so this is already False by default on <3.7)
        kwargs['close_fds'] = False
    else:
        # Create a new session.
        # This makes it easier to interrupt the kernel,
        # because we want to interrupt the whole process group.
        # We don't use setpgrp, which is known to cause problems for kernels starting
        # certain interactive subprocesses, such as bash -i.
        if PY3:
            kwargs['start_new_session'] = True
        else:
            kwargs['preexec_fn'] = lambda: os.setsid()
        if not independent:
            env['JPY_PARENT_PID'] = str(os.getpid())

    try:
        proc = Popen(cmd, **kwargs)
    except Exception as exc:
        msg = (
            "Failed to run command:\n{}\n"
            "    PATH={!r}\n"
            "    with kwargs:\n{!r}\n"
        )
        # exclude environment variables,
        # which may contain access tokens and the like.
        without_env = {key:value for key, value in kwargs.items() if key != 'env'}
        msg = msg.format(cmd, env.get('PATH', os.defpath), without_env)
        get_logger().error(msg)
        raise

    if sys.platform == 'win32':
        # Attach the interrupt event to the Popen objet so it can be used later.
        proc.win32_interrupt_event = interrupt_event

    # Clean up pipes created to work around Popen bug.
    if redirect_in:
        if stdin is None:
            proc.stdin.close()

    return proc
コード例 #53
0
ファイル: configurable.py プロジェクト: ipython/traitlets
 def _log_default(self):
     if isinstance(self.parent, LoggingConfigurable):
         return self.parent.log
     from traitlets import log
     return log.get_logger()
コード例 #54
0
ファイル: configurable.py プロジェクト: ghosthamlet/traitlets
 def _log_default(self):
     from traitlets import log
     return log.get_logger()
コード例 #55
0
def launch_kernel(cmd, stdin=None, stdout=None, stderr=None, env=None, independent=False, cwd=None, **kw):
    """ Launches a localhost kernel, binding to the specified ports.

    Parameters
    ----------
    cmd : Popen list,
        A string of Python code that imports and executes a kernel entry point.

    stdin, stdout, stderr : optional (default None)
        Standards streams, as defined in subprocess.Popen.

    independent : bool, optional (default False)
        If set, the kernel process is guaranteed to survive if this process
        dies. If not set, an effort is made to ensure that the kernel is killed
        when this process dies. Note that in this case it is still good practice
        to kill kernels manually before exiting.

    cwd : path, optional
        The working dir of the kernel process (default: cwd of this process).

    Returns
    -------

    Popen instance for the kernel subprocess
    """

    # Popen will fail (sometimes with a deadlock) if stdin, stdout, and stderr
    # are invalid. Unfortunately, there is in general no way to detect whether
    # they are valid.  The following two blocks redirect them to (temporary)
    # pipes in certain important cases.

    # If this process has been backgrounded, our stdin is invalid. Since there
    # is no compelling reason for the kernel to inherit our stdin anyway, we'll
    # place this one safe and always redirect.
    redirect_in = True
    _stdin = PIPE if stdin is None else stdin

    # If this process in running on pythonw, we know that stdin, stdout, and
    # stderr are all invalid.
    redirect_out = sys.executable.endswith("pythonw.exe")
    if redirect_out:
        blackhole = open(os.devnull, "w")
        _stdout = blackhole if stdout is None else stdout
        _stderr = blackhole if stderr is None else stderr
    else:
        _stdout, _stderr = stdout, stderr

    env = env if (env is not None) else os.environ.copy()

    encoding = getdefaultencoding(prefer_stream=False)
    kwargs = dict(stdin=_stdin, stdout=_stdout, stderr=_stderr, cwd=cwd, env=env)

    # Spawn a kernel.
    if sys.platform == "win32":
        # Popen on Python 2 on Windows cannot handle unicode args or cwd
        cmd = [cast_bytes_py2(c, encoding) for c in cmd]
        if cwd:
            cwd = cast_bytes_py2(cwd, sys.getfilesystemencoding() or "ascii")
            kwargs["cwd"] = cwd

        from .win_interrupt import create_interrupt_event

        # Create a Win32 event for interrupting the kernel
        # and store it in an environment variable.
        interrupt_event = create_interrupt_event()
        env["JPY_INTERRUPT_EVENT"] = str(interrupt_event)
        # deprecated old env name:
        env["IPY_INTERRUPT_EVENT"] = env["JPY_INTERRUPT_EVENT"]

        try:
            from _winapi import DuplicateHandle, GetCurrentProcess, DUPLICATE_SAME_ACCESS, CREATE_NEW_PROCESS_GROUP
        except:
            from _subprocess import DuplicateHandle, GetCurrentProcess, DUPLICATE_SAME_ACCESS, CREATE_NEW_PROCESS_GROUP
        # Launch the kernel process
        if independent:
            kwargs["creationflags"] = CREATE_NEW_PROCESS_GROUP
        else:
            pid = GetCurrentProcess()
            handle = DuplicateHandle(pid, pid, pid, 0, True, DUPLICATE_SAME_ACCESS)  # Inheritable by new processes.
            env["JPY_PARENT_PID"] = str(int(handle))

    else:
        if independent:
            kwargs["preexec_fn"] = lambda: os.setsid()
        else:
            # Create a new process group. This makes it easier to
            # interrupt the kernel, because we want to interrupt the
            # children of the kernel process also.
            kwargs["preexec_fn"] = lambda: os.setpgrp()
            env["JPY_PARENT_PID"] = str(os.getpid())

    try:
        proc = Popen(cmd, **kwargs)
    except Exception as exc:
        msg = "Failed to run command:\n{}\n" "with kwargs:\n{!r}\n"
        msg = msg.format(cmd, kwargs)
        get_logger().error(msg)
        raise

    if sys.platform == "win32":
        # Attach the interrupt event to the Popen objet so it can be used later.
        proc.win32_interrupt_event = interrupt_event

    # Clean up pipes created to work around Popen bug.
    if redirect_in:
        if stdin is None:
            proc.stdin.close()

    return proc